From 3251b00f3e113e2f2d311c64b40fe25b372c00c6 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Wed, 19 Jul 2023 09:23:30 -0700 Subject: [PATCH] chore: remove migrated snippets (#479) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: remove migrated snippets * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../samples/snippets/__init__.py | 13 -- .../samples/snippets/conftest.py | 156 --------------- .../samples/snippets/copy_dataset.py | 54 ----- .../samples/snippets/copy_dataset_test.py | 66 ------- .../snippets/manage_transfer_configs.py | 185 ------------------ .../snippets/manage_transfer_configs_test.py | 68 ------- .../samples/snippets/noxfile_config.py | 38 ---- .../samples/snippets/quickstart.py | 49 ----- .../samples/snippets/quickstart_test.py | 25 --- .../samples/snippets/run_notification.py | 44 ----- .../samples/snippets/run_notification_test.py | 27 --- .../samples/snippets/scheduled_query.py | 80 -------- .../samples/snippets/scheduled_query_test.py | 22 --- 13 files changed, 827 deletions(-) delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/__init__.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/conftest.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/copy_dataset.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/copy_dataset_test.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/manage_transfer_configs.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/manage_transfer_configs_test.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/noxfile_config.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart_test.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/run_notification.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/run_notification_test.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/scheduled_query.py delete mode 100644 packages/google-cloud-bigquery-datatransfer/samples/snippets/scheduled_query_test.py diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/__init__.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/__init__.py deleted file mode 100644 index c6334245aea5..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/conftest.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/conftest.py deleted file mode 100644 index 1248a9407f79..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/conftest.py +++ /dev/null @@ -1,156 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import os -import random -import uuid - -from google.api_core import client_options -import google.api_core.exceptions -import google.auth -from google.cloud import bigquery, bigquery_datatransfer, pubsub_v1 -import pytest - -RESOURCE_PREFIX = "python_bigquery_datatransfer_samples_snippets" -RESOURCE_DATE_FORMAT = "%Y%m%d%H%M%S" -RESOURCE_DATE_LENGTH = 4 + 2 + 2 + 2 + 2 + 2 - - -def resource_prefix() -> str: - timestamp = datetime.datetime.utcnow().strftime(RESOURCE_DATE_FORMAT) - random_string = hex(random.randrange(1000000))[2:] - return f"{RESOURCE_PREFIX}_{timestamp}_{random_string}" - - -def resource_name_to_date(resource_name: str): - start_date = len(RESOURCE_PREFIX) + 1 - date_string = resource_name[start_date : start_date + RESOURCE_DATE_LENGTH] - parsed_date = datetime.datetime.strptime(date_string, RESOURCE_DATE_FORMAT) - return parsed_date - - -@pytest.fixture(scope="session", autouse=True) -def cleanup_pubsub_topics(pubsub_client: pubsub_v1.PublisherClient, project_id): - yesterday = datetime.datetime.utcnow() - datetime.timedelta(days=1) - for topic in pubsub_client.list_topics(project=f"projects/{project_id}"): - topic_id = topic.name.split("/")[-1] - if ( - topic_id.startswith(RESOURCE_PREFIX) - and resource_name_to_date(topic_id) < yesterday - ): - pubsub_client.delete_topic(topic=topic.name) - - -def temp_suffix(): - now = datetime.datetime.now() - return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}" - - -@pytest.fixture(scope="session") -def bigquery_client(default_credentials): - credentials, project_id = default_credentials - return bigquery.Client(credentials=credentials, project=project_id) - - -@pytest.fixture(scope="session") -def pubsub_client(default_credentials): - credentials, _ = default_credentials - return pubsub_v1.PublisherClient(credentials=credentials) - - -@pytest.fixture(scope="session") -def pubsub_topic(pubsub_client: pubsub_v1.PublisherClient, project_id): - topic_id = resource_prefix() - topic_path = pubsub_v1.PublisherClient.topic_path(project_id, topic_id) - pubsub_client.create_topic(name=topic_path) - yield topic_path - pubsub_client.delete_topic(topic=topic_path) - - -@pytest.fixture(scope="session") -def dataset_id(bigquery_client, project_id): - dataset_id = f"bqdts_{temp_suffix()}" - bigquery_client.create_dataset(f"{project_id}.{dataset_id}") - yield dataset_id - bigquery_client.delete_dataset(dataset_id, delete_contents=True) - - -@pytest.fixture(scope="session") -def default_credentials(): - return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"]) - - -@pytest.fixture(scope="session") -def project_id(): - return os.environ["GOOGLE_CLOUD_PROJECT"] - - -@pytest.fixture(scope="session") -def service_account_name(default_credentials): - credentials, _ = default_credentials - # The service_account_email attribute is not available when running with - # user account credentials, but should be available when running from our - # continuous integration tests. - return getattr(credentials, "service_account_email", None) - - -@pytest.fixture(scope="session") -def transfer_client(default_credentials, project_id): - credentials, _ = default_credentials - options = client_options.ClientOptions(quota_project_id=project_id) - - transfer_client = bigquery_datatransfer.DataTransferServiceClient( - credentials=credentials, client_options=options - ) - - # Ensure quota is always attributed to the correct project. - bigquery_datatransfer.DataTransferServiceClient = lambda: transfer_client - - return transfer_client - - -@pytest.fixture(scope="session") -def transfer_config_name(transfer_client, project_id, dataset_id, service_account_name): - from . import manage_transfer_configs, scheduled_query - - # Use the transfer_client fixture so we know quota is attributed to the - # correct project. - assert transfer_client is not None - - # To conserve limited BQ-DTS quota, this fixture creates only one transfer - # config for a whole session and is used to test the scheduled_query.py and - # the delete operation in manage_transfer_configs.py. - transfer_config = scheduled_query.create_scheduled_query( - { - "project_id": project_id, - "dataset_id": dataset_id, - "service_account_name": service_account_name, - } - ) - yield transfer_config.name - manage_transfer_configs.delete_config( - {"transfer_config_name": transfer_config.name} - ) - - -@pytest.fixture -def to_delete_configs(transfer_client): - to_delete = [] - yield to_delete - for config_name in to_delete: - try: - transfer_client.delete_transfer_config(name=config_name) - except google.api_core.exceptions.GoogleAPICallError: - pass diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/copy_dataset.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/copy_dataset.py deleted file mode 100644 index 084ab733034b..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/copy_dataset.py +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def copy_dataset(override_values={}): - # [START bigquerydatatransfer_copy_dataset] - from google.cloud import bigquery_datatransfer - - transfer_client = bigquery_datatransfer.DataTransferServiceClient() - - destination_project_id = "my-destination-project" - destination_dataset_id = "my_destination_dataset" - source_project_id = "my-source-project" - source_dataset_id = "my_source_dataset" - # [END bigquerydatatransfer_copy_dataset] - # To facilitate testing, we replace values with alternatives - # provided by the testing harness. - destination_project_id = override_values.get( - "destination_project_id", destination_project_id - ) - destination_dataset_id = override_values.get( - "destination_dataset_id", destination_dataset_id - ) - source_project_id = override_values.get("source_project_id", source_project_id) - source_dataset_id = override_values.get("source_dataset_id", source_dataset_id) - # [START bigquerydatatransfer_copy_dataset] - transfer_config = bigquery_datatransfer.TransferConfig( - destination_dataset_id=destination_dataset_id, - display_name="Your Dataset Copy Name", - data_source_id="cross_region_copy", - params={ - "source_project_id": source_project_id, - "source_dataset_id": source_dataset_id, - }, - schedule="every 24 hours", - ) - transfer_config = transfer_client.create_transfer_config( - parent=transfer_client.common_project_path(destination_project_id), - transfer_config=transfer_config, - ) - print(f"Created transfer config: {transfer_config.name}") - # [END bigquerydatatransfer_copy_dataset] - return transfer_config diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/copy_dataset_test.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/copy_dataset_test.py deleted file mode 100644 index 349f05cef5f8..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/copy_dataset_test.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import uuid - -import pytest - -from . import copy_dataset - - -def temp_suffix(): - now = datetime.datetime.now() - return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}" - - -@pytest.fixture(scope="module") -def destination_dataset_id(bigquery_client, project_id): - dataset_id = f"bqdts_dest_{temp_suffix()}" - bigquery_client.create_dataset(f"{project_id}.{dataset_id}") - yield dataset_id - bigquery_client.delete_dataset(dataset_id, delete_contents=True) - - -@pytest.fixture(scope="module") -def source_dataset_id(bigquery_client, project_id): - dataset_id = f"bqdts_src_{temp_suffix()}" - bigquery_client.create_dataset(f"{project_id}.{dataset_id}") - yield dataset_id - bigquery_client.delete_dataset(dataset_id, delete_contents=True) - - -def test_copy_dataset( - capsys, - transfer_client, - project_id, - destination_dataset_id, - source_dataset_id, - to_delete_configs, -): - # Use the transfer_client fixture so we know quota is attributed to the - # correct project. - assert transfer_client is not None - - transfer_config = copy_dataset.copy_dataset( - { - "destination_project_id": project_id, - "destination_dataset_id": destination_dataset_id, - "source_project_id": project_id, - "source_dataset_id": source_dataset_id, - } - ) - to_delete_configs.append(transfer_config.name) - out, _ = capsys.readouterr() - assert transfer_config.name in out diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/manage_transfer_configs.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/manage_transfer_configs.py deleted file mode 100644 index cd865455c10d..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/manage_transfer_configs.py +++ /dev/null @@ -1,185 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def list_configs(override_values={}): - # [START bigquerydatatransfer_list_configs] - from google.cloud import bigquery_datatransfer - - transfer_client = bigquery_datatransfer.DataTransferServiceClient() - - project_id = "my-project" - # [END bigquerydatatransfer_list_configs] - # To facilitate testing, we replace values with alternatives - # provided by the testing harness. - project_id = override_values.get("project_id", project_id) - # [START bigquerydatatransfer_list_configs] - parent = transfer_client.common_project_path(project_id) - - configs = transfer_client.list_transfer_configs(parent=parent) - print("Got the following configs:") - for config in configs: - print(f"\tID: {config.name}, Schedule: {config.schedule}") - # [END bigquerydatatransfer_list_configs] - - -def update_config(override_values={}): - # [START bigquerydatatransfer_update_config] - from google.cloud import bigquery_datatransfer - from google.protobuf import field_mask_pb2 - - transfer_client = bigquery_datatransfer.DataTransferServiceClient() - - transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" - new_display_name = "My Transfer Config" - # [END bigquerydatatransfer_update_config] - # To facilitate testing, we replace values with alternatives - # provided by the testing harness. - new_display_name = override_values.get("new_display_name", new_display_name) - transfer_config_name = override_values.get( - "transfer_config_name", transfer_config_name - ) - # [START bigquerydatatransfer_update_config] - - transfer_config = bigquery_datatransfer.TransferConfig(name=transfer_config_name) - transfer_config.display_name = new_display_name - - transfer_config = transfer_client.update_transfer_config( - { - "transfer_config": transfer_config, - "update_mask": field_mask_pb2.FieldMask(paths=["display_name"]), - } - ) - - print(f"Updated config: '{transfer_config.name}'") - print(f"New display name: '{transfer_config.display_name}'") - # [END bigquerydatatransfer_update_config] - # Return the config name for testing purposes, so that it can be deleted. - return transfer_config - - -def update_credentials_with_service_account(override_values={}): - # [START bigquerydatatransfer_update_credentials] - from google.cloud import bigquery_datatransfer - from google.protobuf import field_mask_pb2 - - transfer_client = bigquery_datatransfer.DataTransferServiceClient() - - service_account_name = "abcdef-test-sa@abcdef-test.iam.gserviceaccount.com" - transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" - # [END bigquerydatatransfer_update_credentials] - # To facilitate testing, we replace values with alternatives - # provided by the testing harness. - service_account_name = override_values.get( - "service_account_name", service_account_name - ) - transfer_config_name = override_values.get( - "transfer_config_name", transfer_config_name - ) - # [START bigquerydatatransfer_update_credentials] - - transfer_config = bigquery_datatransfer.TransferConfig(name=transfer_config_name) - - transfer_config = transfer_client.update_transfer_config( - { - "transfer_config": transfer_config, - "update_mask": field_mask_pb2.FieldMask(paths=["service_account_name"]), - "service_account_name": service_account_name, - } - ) - - print("Updated config: '{}'".format(transfer_config.name)) - # [END bigquerydatatransfer_update_credentials] - # Return the config name for testing purposes, so that it can be deleted. - return transfer_config - - -def schedule_backfill_manual_transfer(override_values={}): - # [START bigquerydatatransfer_schedule_backfill] - import datetime - - from google.cloud.bigquery_datatransfer_v1 import ( - DataTransferServiceClient, - StartManualTransferRunsRequest, - ) - - # Create a client object - client = DataTransferServiceClient() - - # Replace with your transfer configuration name - transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" - # [END bigquerydatatransfer_schedule_backfill] - # To facilitate testing, we replace values with alternatives - # provided by the testing harness. - transfer_config_name = override_values.get( - "transfer_config_name", transfer_config_name - ) - # [START bigquerydatatransfer_schedule_backfill] - now = datetime.datetime.now(datetime.timezone.utc) - start_time = now - datetime.timedelta(days=5) - end_time = now - datetime.timedelta(days=2) - - # Some data sources, such as scheduled_query only support daily run. - # Truncate start_time and end_time to midnight time (00:00AM UTC). - start_time = datetime.datetime( - start_time.year, start_time.month, start_time.day, tzinfo=datetime.timezone.utc - ) - end_time = datetime.datetime( - end_time.year, end_time.month, end_time.day, tzinfo=datetime.timezone.utc - ) - - requested_time_range = StartManualTransferRunsRequest.TimeRange( - start_time=start_time, - end_time=end_time, - ) - - # Initialize request argument(s) - request = StartManualTransferRunsRequest( - parent=transfer_config_name, - requested_time_range=requested_time_range, - ) - - # Make the request - response = client.start_manual_transfer_runs(request=request) - - # Handle the response - print("Started manual transfer runs:") - for run in response.runs: - print(f"backfill: {run.run_time} run: {run.name}") - # [END bigquerydatatransfer_schedule_backfill] - return response.runs - - -def delete_config(override_values={}): - # [START bigquerydatatransfer_delete_transfer] - import google.api_core.exceptions - from google.cloud import bigquery_datatransfer - - transfer_client = bigquery_datatransfer.DataTransferServiceClient() - - transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" - # [END bigquerydatatransfer_delete_transfer] - # To facilitate testing, we replace values with alternatives - # provided by the testing harness. - transfer_config_name = override_values.get( - "transfer_config_name", transfer_config_name - ) - # [START bigquerydatatransfer_delete_transfer] - try: - transfer_client.delete_transfer_config(name=transfer_config_name) - except google.api_core.exceptions.NotFound: - print("Transfer config not found.") - else: - print(f"Deleted transfer config: {transfer_config_name}") - # [END bigquerydatatransfer_delete_transfer] diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/manage_transfer_configs_test.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/manage_transfer_configs_test.py deleted file mode 100644 index 5504f19cbf91..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/manage_transfer_configs_test.py +++ /dev/null @@ -1,68 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from . import manage_transfer_configs - - -def test_list_configs(capsys, project_id, transfer_config_name): - manage_transfer_configs.list_configs({"project_id": project_id}) - out, _ = capsys.readouterr() - assert "Got the following configs:" in out - assert transfer_config_name in out - - -def test_update_config(capsys, transfer_config_name): - manage_transfer_configs.update_config( - { - "new_display_name": "name from test_update_config", - "transfer_config_name": transfer_config_name, - } - ) - out, _ = capsys.readouterr() - assert "Updated config:" in out - assert transfer_config_name in out - assert "name from test_update_config" in out - - -def test_update_credentials_with_service_account( - capsys, project_id, service_account_name, transfer_config_name -): - manage_transfer_configs.update_credentials_with_service_account( - { - "project_id": project_id, - "service_account_name": service_account_name, - "transfer_config_name": transfer_config_name, - } - ) - out, _ = capsys.readouterr() - assert "Updated config:" in out - assert transfer_config_name in out - - -def test_schedule_backfill_manual_transfer(capsys, transfer_config_name): - runs = manage_transfer_configs.schedule_backfill_manual_transfer( - {"transfer_config_name": transfer_config_name} - ) - out, _ = capsys.readouterr() - assert "Started manual transfer runs:" in out - # Run IDs should include the transfer name in their path. - assert transfer_config_name in out - # Check that there are three runs for between 2 and 5 days ago. - assert len(runs) == 3 - - -def test_delete_config(capsys, transfer_config_name): - # transfer_config_name fixture in conftest.py calls the delete config - # sample. To conserve limited BQ-DTS quota we only make basic checks. - assert len(transfer_config_name) != 0 diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/noxfile_config.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/noxfile_config.py deleted file mode 100644 index 57b25e58e396..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/noxfile_config.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Default TEST_CONFIG_OVERRIDE for python repos. - -# You can copy this file into your directory, then it will be inported from -# the noxfile.py. - -# The source of truth: -# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py - -TEST_CONFIG_OVERRIDE = { - # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # "gcloud_project_env": "BUILD_SPECIFIC_GCLOUD_PROJECT", - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart.py deleted file mode 100644 index de8d05e52428..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import sys - - -def run_quickstart(override_values={}): - # [START bigquerydatatransfer_quickstart] - from google.cloud import bigquery_datatransfer - - client = bigquery_datatransfer.DataTransferServiceClient() - - # TODO: Update to your project ID. - project_id = "my-project" - # [END bigquerydatatransfer_quickstart] - # To facilitate testing, we replace values with alternatives - # provided by the testing harness. - project_id = override_values.get("project_id", project_id) - # [START bigquerydatatransfer_quickstart] - - # Get the full path to your project. - parent = client.common_project_path(project_id) - - print("Supported Data Sources:") - - # Iterate over all possible data sources. - for data_source in client.list_data_sources(parent=parent): - print("{}:".format(data_source.display_name)) - print("\tID: {}".format(data_source.data_source_id)) - print("\tFull path: {}".format(data_source.name)) - print("\tDescription: {}".format(data_source.description)) - # [END bigquerydatatransfer_quickstart] - - -if __name__ == "__main__": - run_quickstart(override_values={"project_id": sys.argv[1]}) diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart_test.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart_test.py deleted file mode 100644 index 46398b0f939f..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/quickstart_test.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from . import quickstart - - -def test_quickstart(capsys, transfer_client, project_id): - # Use the transfer_client fixture so we know quota is attributed to the - # correct project. - assert transfer_client is not None - - quickstart.run_quickstart(override_values={"project_id": project_id}) - out, _ = capsys.readouterr() - assert "Supported Data Sources:" in out diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/run_notification.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/run_notification.py deleted file mode 100644 index 44f1bf12451c..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/run_notification.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def run_notification(transfer_config_name, pubsub_topic): - orig_transfer_config_name = transfer_config_name - orig_pubsub_topic = pubsub_topic - # [START bigquerydatatransfer_run_notification] - transfer_config_name = "projects/1234/locations/us/transferConfigs/abcd" - pubsub_topic = "projects/PROJECT-ID/topics/TOPIC-ID" - # [END bigquerydatatransfer_run_notification] - transfer_config_name = orig_transfer_config_name - pubsub_topic = orig_pubsub_topic - - # [START bigquerydatatransfer_run_notification] - from google.cloud import bigquery_datatransfer - from google.protobuf import field_mask_pb2 - - transfer_client = bigquery_datatransfer.DataTransferServiceClient() - - transfer_config = bigquery_datatransfer.TransferConfig(name=transfer_config_name) - transfer_config.notification_pubsub_topic = pubsub_topic - update_mask = field_mask_pb2.FieldMask(paths=["notification_pubsub_topic"]) - - transfer_config = transfer_client.update_transfer_config( - {"transfer_config": transfer_config, "update_mask": update_mask} - ) - - print(f"Updated config: '{transfer_config.name}'") - print(f"Notification Pub/Sub topic: '{transfer_config.notification_pubsub_topic}'") - # [END bigquerydatatransfer_run_notification] - # Return the config name for testing purposes, so that it can be deleted. - return transfer_config diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/run_notification_test.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/run_notification_test.py deleted file mode 100644 index 02f24266cdf6..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/run_notification_test.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from . import run_notification - - -def test_run_notification(capsys, transfer_config_name, pubsub_topic): - run_notification.run_notification( - transfer_config_name=transfer_config_name, - pubsub_topic=pubsub_topic, - ) - out, _ = capsys.readouterr() - assert "Updated config:" in out - assert transfer_config_name in out - assert "Notification Pub/Sub topic:" in out - assert pubsub_topic in out diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/scheduled_query.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/scheduled_query.py deleted file mode 100644 index ab85c5152baa..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/scheduled_query.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_scheduled_query(override_values={}): - # [START bigquerydatatransfer_create_scheduled_query] - # [START bigquerydatatransfer_create_scheduled_query_with_service_account] - from google.cloud import bigquery_datatransfer - - transfer_client = bigquery_datatransfer.DataTransferServiceClient() - - # The project where the query job runs is the same as the project - # containing the destination dataset. - project_id = "your-project-id" - dataset_id = "your_dataset_id" - - # This service account will be used to execute the scheduled queries. Omit - # this request parameter to run the query as the user with the credentials - # associated with this client. - service_account_name = "abcdef-test-sa@abcdef-test.iam.gserviceaccount.com" - # [END bigquerydatatransfer_create_scheduled_query_with_service_account] - # [END bigquerydatatransfer_create_scheduled_query] - # To facilitate testing, we replace values with alternatives - # provided by the testing harness. - project_id = override_values.get("project_id", project_id) - dataset_id = override_values.get("dataset_id", dataset_id) - service_account_name = override_values.get( - "service_account_name", service_account_name - ) - # [START bigquerydatatransfer_create_scheduled_query] - # [START bigquerydatatransfer_create_scheduled_query_with_service_account] - - # Use standard SQL syntax for the query. - query_string = """ - SELECT - CURRENT_TIMESTAMP() as current_time, - @run_time as intended_run_time, - @run_date as intended_run_date, - 17 as some_integer - """ - - parent = transfer_client.common_project_path(project_id) - - transfer_config = bigquery_datatransfer.TransferConfig( - destination_dataset_id=dataset_id, - display_name="Your Scheduled Query Name", - data_source_id="scheduled_query", - params={ - "query": query_string, - "destination_table_name_template": "your_table_{run_date}", - "write_disposition": "WRITE_TRUNCATE", - "partitioning_field": "", - }, - schedule="every 24 hours", - ) - - transfer_config = transfer_client.create_transfer_config( - bigquery_datatransfer.CreateTransferConfigRequest( - parent=parent, - transfer_config=transfer_config, - service_account_name=service_account_name, - ) - ) - - print("Created scheduled query '{}'".format(transfer_config.name)) - # [END bigquerydatatransfer_create_scheduled_query_with_service_account] - # [END bigquerydatatransfer_create_scheduled_query] - # Return the config name for testing purposes, so that it can be deleted. - return transfer_config diff --git a/packages/google-cloud-bigquery-datatransfer/samples/snippets/scheduled_query_test.py b/packages/google-cloud-bigquery-datatransfer/samples/snippets/scheduled_query_test.py deleted file mode 100644 index ef8418241924..000000000000 --- a/packages/google-cloud-bigquery-datatransfer/samples/snippets/scheduled_query_test.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def test_create_scheduled_query(transfer_config_name): - from . import scheduled_query - - # transfer_config_name fixture in conftest.py calls the scheduled query - # sample. To conserve limited BQ-DTS quota we only make basic checks. - assert hasattr(scheduled_query, "create_scheduled_query") - assert len(transfer_config_name) != 0