diff --git a/bigquery-datatransfer/snippets/__init__.py b/bigquery-datatransfer/snippets/__init__.py
new file mode 100644
index 000000000000..c6334245aea5
--- /dev/null
+++ b/bigquery-datatransfer/snippets/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/bigquery-datatransfer/snippets/copy_dataset.py b/bigquery-datatransfer/snippets/copy_dataset.py
new file mode 100644
index 000000000000..084ab733034b
--- /dev/null
+++ b/bigquery-datatransfer/snippets/copy_dataset.py
@@ -0,0 +1,54 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def copy_dataset(override_values={}):
+    # [START bigquerydatatransfer_copy_dataset]
+    from google.cloud import bigquery_datatransfer
+
+    transfer_client = bigquery_datatransfer.DataTransferServiceClient()
+
+    destination_project_id = "my-destination-project"
+    destination_dataset_id = "my_destination_dataset"
+    source_project_id = "my-source-project"
+    source_dataset_id = "my_source_dataset"
+    # [END bigquerydatatransfer_copy_dataset]
+    # To facilitate testing, we replace values with alternatives
+    # provided by the testing harness.
+    destination_project_id = override_values.get(
+        "destination_project_id", destination_project_id
+    )
+    destination_dataset_id = override_values.get(
+        "destination_dataset_id", destination_dataset_id
+    )
+    source_project_id = override_values.get("source_project_id", source_project_id)
+    source_dataset_id = override_values.get("source_dataset_id", source_dataset_id)
+    # [START bigquerydatatransfer_copy_dataset]
+    transfer_config = bigquery_datatransfer.TransferConfig(
+        destination_dataset_id=destination_dataset_id,
+        display_name="Your Dataset Copy Name",
+        data_source_id="cross_region_copy",
+        params={
+            "source_project_id": source_project_id,
+            "source_dataset_id": source_dataset_id,
+        },
+        schedule="every 24 hours",
+    )
+    transfer_config = transfer_client.create_transfer_config(
+        parent=transfer_client.common_project_path(destination_project_id),
+        transfer_config=transfer_config,
+    )
+    print(f"Created transfer config: {transfer_config.name}")
+    # [END bigquerydatatransfer_copy_dataset]
+    return transfer_config
diff --git a/bigquery-datatransfer/snippets/copy_dataset_test.py b/bigquery-datatransfer/snippets/copy_dataset_test.py
new file mode 100644
index 000000000000..6ff702372171
--- /dev/null
+++ b/bigquery-datatransfer/snippets/copy_dataset_test.py
@@ -0,0 +1,95 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import uuid
+
+import google.api_core.exceptions
+import google.auth
+from google.cloud import bigquery
+from google.cloud import bigquery_datatransfer
+import pytest
+
+from . import copy_dataset
+
+
+def temp_suffix():
+    now = datetime.datetime.now()
+    return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}"
+
+
+@pytest.fixture(scope="session")
+def default_credentials():
+    return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])
+
+
+@pytest.fixture(scope="session")
+def project_id(default_credentials):
+    _, project_id = default_credentials
+    return project_id
+
+
+@pytest.fixture(scope="session")
+def bigquery_client(default_credentials):
+    credentials, project_id = default_credentials
+    return bigquery.Client(credentials=credentials, project=project_id)
+
+
+@pytest.fixture(scope="session")
+def transfer_client(default_credentials):
+    credentials, _ = default_credentials
+    return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials)
+
+
+@pytest.fixture
+def to_delete_configs(transfer_client):
+    to_delete = []
+    yield to_delete
+    for config_name in to_delete:
+        try:
+            transfer_client.delete_transfer_config(name=config_name)
+        except google.api_core.exceptions.GoogleAPICallError:
+            pass
+
+
+@pytest.fixture(scope="module")
+def destination_dataset_id(bigquery_client, project_id):
+    dataset_id = f"bqdts_dest_{temp_suffix()}"
+    bigquery_client.create_dataset(f"{project_id}.{dataset_id}")
+    yield dataset_id
+    bigquery_client.delete_dataset(dataset_id, delete_contents=True)
+
+
+@pytest.fixture(scope="module")
+def source_dataset_id(bigquery_client, project_id):
+    dataset_id = f"bqdts_src_{temp_suffix()}"
+    bigquery_client.create_dataset(f"{project_id}.{dataset_id}")
+    yield dataset_id
+    bigquery_client.delete_dataset(dataset_id, delete_contents=True)
+
+
+def test_copy_dataset(
+    capsys, project_id, destination_dataset_id, source_dataset_id, to_delete_configs
+):
+    transfer_config = copy_dataset.copy_dataset(
+        {
+            "destination_project_id": project_id,
+            "destination_dataset_id": destination_dataset_id,
+            "source_project_id": project_id,
+            "source_dataset_id": source_dataset_id,
+        }
+    )
+    to_delete_configs.append(transfer_config.name)
+    out, _ = capsys.readouterr()
+    assert transfer_config.name in out
diff --git a/bigquery-datatransfer/snippets/quickstart_test.py b/bigquery-datatransfer/snippets/quickstart_test.py
index 387c2e8b6066..5b873c63bea7 100644
--- a/bigquery-datatransfer/snippets/quickstart_test.py
+++ b/bigquery-datatransfer/snippets/quickstart_test.py
@@ -16,10 +16,10 @@
 
 import pytest
 
-import quickstart
+from . import quickstart
 
 
-PROJECT = os.environ['GOOGLE_CLOUD_PROJECT']
+PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"]
 
 
 @pytest.fixture
@@ -32,4 +32,4 @@ def mock_project_id():
 def test_quickstart(capsys, mock_project_id):
     quickstart.run_quickstart(mock_project_id)
     out, _ = capsys.readouterr()
-    assert 'Supported Data Sources:' in out
+    assert "Supported Data Sources:" in out
diff --git a/bigquery-datatransfer/snippets/requirements-test.txt b/bigquery-datatransfer/snippets/requirements-test.txt
index 2466e25087c6..fff09f56d36c 100644
--- a/bigquery-datatransfer/snippets/requirements-test.txt
+++ b/bigquery-datatransfer/snippets/requirements-test.txt
@@ -1,2 +1,3 @@
+google-cloud-bigquery==2.6.0
 pytest==6.0.1
 mock==4.0.2