From 237d97ad134e6e84ec202ce4df2ed6c7c5aef866 Mon Sep 17 00:00:00 2001
From: Torry Yang <sirtorry@users.noreply.github.com>
Date: Fri, 20 Jul 2018 16:24:34 -0700
Subject: [PATCH] automl beta
 [(#1575)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1575)

* automl initial commit

* lint

* fix import groupings

* add requirements.txt

* address review comments
---
 automl/snippets/automl_translation_dataset.py | 278 ++++++++++++++++
 automl/snippets/automl_translation_model.py   | 300 ++++++++++++++++++
 automl/snippets/automl_translation_predict.py | 109 +++++++
 automl/snippets/dataset_test.py               |  69 ++++
 automl/snippets/model_test.py                 |  78 +++++
 automl/snippets/predict_test.py               |  31 ++
 6 files changed, 865 insertions(+)
 create mode 100755 automl/snippets/automl_translation_dataset.py
 create mode 100755 automl/snippets/automl_translation_model.py
 create mode 100644 automl/snippets/automl_translation_predict.py
 create mode 100644 automl/snippets/dataset_test.py
 create mode 100644 automl/snippets/model_test.py
 create mode 100644 automl/snippets/predict_test.py

diff --git a/automl/snippets/automl_translation_dataset.py b/automl/snippets/automl_translation_dataset.py
new file mode 100755
index 000000000000..e579ac358a21
--- /dev/null
+++ b/automl/snippets/automl_translation_dataset.py
@@ -0,0 +1,278 @@
+#!/usr/bin/env python
+
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This application demonstrates how to perform basic operations on dataset
+with the Google AutoML Translation API.
+
+For more information, see the documentation at
+https://cloud.google.com/translate/automl/docs
+"""
+
+import argparse
+import os
+
+
+def create_dataset(project_id, compute_region, dataset_name, source, target):
+    """Create a dataset."""
+    # [START automl_translation_create_dataset]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # dataset_name = 'DATASET_NAME_HERE'
+    # source = 'LANGUAGE_CODE_OF_SOURCE_LANGUAGE'
+    # target = 'LANGUAGE_CODE_OF_TARGET_LANGUAGE'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # A resource that represents Google Cloud Platform location.
+    project_location = client.location_path(project_id, compute_region)
+
+    # Specify the source and target language.
+    dataset_metadata = {
+        "source_language_code": source,
+        "target_language_code": target,
+    }
+    # Set dataset name and dataset metadata
+    my_dataset = {
+        "display_name": dataset_name,
+        "translation_dataset_metadata": dataset_metadata,
+    }
+
+    # Create a dataset with the dataset metadata in the region.
+    dataset = client.create_dataset(project_location, my_dataset)
+
+    # Display the dataset information
+    print("Dataset name: {}".format(dataset.name))
+    print("Dataset id: {}".format(dataset.name.split("/")[-1]))
+    print("Dataset display name: {}".format(dataset.display_name))
+    print("Translation dataset Metadata:")
+    print(
+        "\tsource_language_code: {}".format(
+            dataset.translation_dataset_metadata.source_language_code
+        )
+    )
+    print(
+        "\ttarget_language_code: {}".format(
+            dataset.translation_dataset_metadata.target_language_code
+        )
+    )
+    print("Dataset create time:")
+    print("\tseconds: {}".format(dataset.create_time.seconds))
+    print("\tnanos: {}".format(dataset.create_time.nanos))
+
+    # [END automl_translation_create_dataset]
+
+
+def list_datasets(project_id, compute_region, filter_):
+    """List Datasets."""
+    # [START automl_translation_list_datasets]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # filter_ = 'filter expression here'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # A resource that represents Google Cloud Platform location.
+    project_location = client.location_path(project_id, compute_region)
+
+    # List all the datasets available in the region by applying filter.
+    response = client.list_datasets(project_location, filter_)
+
+    print("List of datasets:")
+    for dataset in response:
+        # Display the dataset information
+        print("Dataset name: {}".format(dataset.name))
+        print("Dataset id: {}".format(dataset.name.split("/")[-1]))
+        print("Dataset display name: {}".format(dataset.display_name))
+        print("Translation dataset metadata:")
+        print(
+            "\tsource_language_code: {}".format(
+                dataset.translation_dataset_metadata.source_language_code
+            )
+        )
+        print(
+            "\ttarget_language_code: {}".format(
+                dataset.translation_dataset_metadata.target_language_code
+            )
+        )
+        print("Dataset create time:")
+        print("\tseconds: {}".format(dataset.create_time.seconds))
+        print("\tnanos: {}".format(dataset.create_time.nanos))
+
+    # [END automl_translation_list_datasets]
+
+
+def get_dataset(project_id, compute_region, dataset_id):
+    """Get the dataset."""
+    # [START automl_translation_get_dataset]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # dataset_id = 'DATASET_ID_HERE'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # Get the full path of the dataset
+    dataset_full_id = client.dataset_path(
+        project_id, compute_region, dataset_id
+    )
+
+    # Get complete detail of the dataset.
+    dataset = client.get_dataset(dataset_full_id)
+
+    # Display the dataset information
+    print("Dataset name: {}".format(dataset.name))
+    print("Dataset id: {}".format(dataset.name.split("/")[-1]))
+    print("Dataset display name: {}".format(dataset.display_name))
+    print("Translation dataset metadata:")
+    print(
+        "\tsource_language_code: {}".format(
+            dataset.translation_dataset_metadata.source_language_code
+        )
+    )
+    print(
+        "\ttarget_language_code: {}".format(
+            dataset.translation_dataset_metadata.target_language_code
+        )
+    )
+    print("Dataset create time:")
+    print("\tseconds: {}".format(dataset.create_time.seconds))
+    print("\tnanos: {}".format(dataset.create_time.nanos))
+
+    # [END automl_translation_get_dataset]
+
+
+def import_data(project_id, compute_region, dataset_id, path):
+    """Import sentence pairs to the dataset."""
+    # [START automl_translation_import_data]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # dataset_id = 'DATASET_ID_HERE'
+    # path = 'gs://path/to/file.csv'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # Get the full path of the dataset.
+    dataset_full_id = client.dataset_path(
+        project_id, compute_region, dataset_id
+    )
+
+    # Get the multiple Google Cloud Storage URIs
+    input_uris = path.split(",")
+    input_config = {"gcs_source": {"input_uris": input_uris}}
+
+    # Import data from the input URI
+    response = client.import_data(dataset_full_id, input_config)
+
+    print("Processing import...")
+    # synchronous check of operation status
+    print("Data imported. {}".format(response.result()))
+
+    # [END automl_translation_import_data]
+
+
+def delete_dataset(project_id, compute_region, dataset_id):
+    """Delete a dataset."""
+    # [START automl_translation_delete_dataset]]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # dataset_id = 'DATASET_ID_HERE'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # Get the full path of the dataset.
+    dataset_full_id = client.dataset_path(
+        project_id, compute_region, dataset_id
+    )
+
+    # Delete a dataset.
+    response = client.delete_dataset(dataset_full_id)
+
+    # synchronous check of operation status
+    print("Dataset deleted. {}".format(response.result()))
+
+    # [END automl_translation_delete_dataset]
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+    )
+    subparsers = parser.add_subparsers(dest="command")
+
+    create_dataset_parser = subparsers.add_parser(
+        "create_dataset", help=create_dataset.__doc__
+    )
+    create_dataset_parser.add_argument("dataset_name")
+    create_dataset_parser.add_argument("source")
+    create_dataset_parser.add_argument("target")
+
+    list_datasets_parser = subparsers.add_parser(
+        "list_datasets", help=list_datasets.__doc__
+    )
+    list_datasets_parser.add_argument("filter", nargs="?", default="")
+
+    import_data_parser = subparsers.add_parser(
+        "import_data", help=import_data.__doc__
+    )
+    import_data_parser.add_argument("dataset_id")
+    import_data_parser.add_argument("path")
+
+    delete_dataset_parser = subparsers.add_parser(
+        "delete_dataset", help=delete_dataset.__doc__
+    )
+    delete_dataset_parser.add_argument("dataset_id")
+
+    get_dataset_parser = subparsers.add_parser(
+        "get_dataset", help=get_dataset.__doc__
+    )
+    get_dataset_parser.add_argument("dataset_id")
+
+    project_id = os.environ["PROJECT_ID"]
+    compute_region = os.environ["REGION_NAME"]
+
+    args = parser.parse_args()
+
+    if args.command == "create_dataset":
+        create_dataset(
+            project_id,
+            compute_region,
+            args.dataset_name,
+            args.source,
+            args.target,
+        )
+    if args.command == "list_datasets":
+        list_datasets(project_id, compute_region, args.filter)
+    if args.command == "get_dataset":
+        get_dataset(project_id, compute_region, args.dataset_id)
+    if args.command == "import_data":
+        import_data(project_id, compute_region, args.dataset_id, args.path)
+    if args.command == "delete_dataset":
+        delete_dataset(project_id, compute_region, args.dataset_id)
diff --git a/automl/snippets/automl_translation_model.py b/automl/snippets/automl_translation_model.py
new file mode 100755
index 000000000000..0b9b6f53acaa
--- /dev/null
+++ b/automl/snippets/automl_translation_model.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python
+
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This application demonstrates how to perform basic operations on model
+with the Google AutoML Translation API.
+
+For more information, see the documentation at
+https://cloud.google.com/translate/automl/docs
+"""
+
+import argparse
+import os
+
+
+def create_model(project_id, compute_region, dataset_id, model_name):
+    """Create a model."""
+    # [START automl_translation_create_model]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # dataset_id = 'DATASET_ID_HERE'
+    # model_name = 'MODEL_NAME_HERE'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # A resource that represents Google Cloud Platform location.
+    project_location = client.location_path(project_id, compute_region)
+
+    # Set model name and dataset.
+    my_model = {
+        "display_name": model_name,
+        "dataset_id": dataset_id,
+        "translation_model_metadata": {"base_model": ""},
+    }
+
+    # Create a model with the model metadata in the region.
+    response = client.create_model(project_location, my_model)
+
+    print("Training operation name: {}".format(response.operation.name))
+    print("Training started...")
+
+    # [END automl_translation_create_model]
+
+
+def list_models(project_id, compute_region, filter_):
+    """List all models."""
+    # [START automl_translation_list_models]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # filter_ = 'DATASET_ID_HERE'
+
+    from google.cloud import automl_v1beta1 as automl
+    from google.cloud.automl_v1beta1 import enums
+
+    client = automl.AutoMlClient()
+
+    # A resource that represents Google Cloud Platform location.
+    project_location = client.location_path(project_id, compute_region)
+
+    # List all the models available in the region by applying filter.
+    response = client.list_models(project_location, filter_)
+
+    print("List of models:")
+    for model in response:
+        # Display the model information.
+        if model.deployment_state == enums.Model.DeploymentState.DEPLOYED:
+            deployment_state = "deployed"
+        else:
+            deployment_state = "undeployed"
+
+        print("Model name: {}".format(model.name))
+        print("Model id: {}".format(model.name.split("/")[-1]))
+        print("Model display name: {}".format(model.display_name))
+        print("Model create time:")
+        print("\tseconds: {}".format(model.create_time.seconds))
+        print("\tnanos: {}".format(model.create_time.nanos))
+        print("Model deployment state: {}".format(deployment_state))
+
+    # [END automl_translation_list_models]
+
+
+def get_model(project_id, compute_region, model_id):
+    """Get model details."""
+    # [START automl_translation_get_model]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # model_id = 'MODEL_ID_HERE'
+
+    from google.cloud import automl_v1beta1 as automl
+    from google.cloud.automl_v1beta1 import enums
+
+    client = automl.AutoMlClient()
+
+    # Get the full path of the model.
+    model_full_id = client.model_path(project_id, compute_region, model_id)
+
+    # Get complete detail of the model.
+    model = client.get_model(model_full_id)
+
+    # Retrieve deployment state.
+    if model.deployment_state == enums.Model.DeploymentState.DEPLOYED:
+        deployment_state = "deployed"
+    else:
+        deployment_state = "undeployed"
+
+    # Display the model information.
+    print("Model name: {}".format(model.name))
+    print("Model id: {}".format(model.name.split("/")[-1]))
+    print("Model display name: {}".format(model.display_name))
+    print("Model create time:")
+    print("\tseconds: {}".format(model.create_time.seconds))
+    print("\tnanos: {}".format(model.create_time.nanos))
+    print("Model deployment state: {}".format(deployment_state))
+
+    # [END automl_translation_get_model]
+
+
+def list_model_evaluations(project_id, compute_region, model_id, filter_):
+    """List model evaluations."""
+    # [START automl_translation_list_model_evaluations]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # model_id = 'MODEL_ID_HERE'
+    # filter_ = 'filter expression here'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # Get the full path of the model.
+    model_full_id = client.model_path(project_id, compute_region, model_id)
+
+    print("List of model evaluations:")
+    for element in client.list_model_evaluations(model_full_id, filter_):
+        print(element)
+
+    # [END automl_translation_list_model_evaluations]
+
+
+def get_model_evaluation(
+    project_id, compute_region, model_id, model_evaluation_id
+):
+    """Get model evaluation."""
+    # [START automl_translation_get_model_evaluation]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # model_id = 'MODEL_ID_HERE'
+    # model_evaluation_id = 'MODEL_EVALUATION_ID_HERE'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # Get the full path of the model evaluation.
+    model_evaluation_full_id = client.model_evaluation_path(
+        project_id, compute_region, model_id, model_evaluation_id
+    )
+
+    # Get complete detail of the model evaluation.
+    response = client.get_model_evaluation(model_evaluation_full_id)
+
+    print(response)
+
+    # [END automl_translation_get_model_evaluation]
+
+
+def delete_model(project_id, compute_region, model_id):
+    """Delete a model."""
+    # [START automl_translation_delete_model]
+    # TODO(developer): Uncomment and set the following variables
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # model_id = 'MODEL_ID_HERE'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # Get the full path of the model.
+    model_full_id = client.model_path(project_id, compute_region, model_id)
+
+    # Delete a model.
+    response = client.delete_model(model_full_id)
+
+    # synchronous check of operation status.
+    print("Model deleted. {}".format(response.result()))
+
+    # [END automl_translation_delete_model]
+
+
+def get_operation_status(operation_full_id):
+    """Get operation status."""
+    # [START automl_translation_get_operation_status]
+    # TODO(developer): Uncomment and set the following variables
+    # operation_full_id =
+    #   'projects/<projectId>/locations/<region>/operations/<operationId>'
+
+    from google.cloud import automl_v1beta1 as automl
+
+    client = automl.AutoMlClient()
+
+    # Get the latest state of a long-running operation.
+    response = client.transport._operations_client.get_operation(
+        operation_full_id
+    )
+
+    print("Operation status: {}".format(response))
+
+    # [END automl_translation_get_operation_status]
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+    )
+    subparsers = parser.add_subparsers(dest="command")
+
+    create_model_parser = subparsers.add_parser(
+        "create_model", help=create_model.__doc__
+    )
+    create_model_parser.add_argument("dataset_id")
+    create_model_parser.add_argument("model_name")
+
+    list_model_evaluations_parser = subparsers.add_parser(
+        "list_model_evaluations", help=list_model_evaluations.__doc__
+    )
+    list_model_evaluations_parser.add_argument("model_id")
+    list_model_evaluations_parser.add_argument("filter", nargs="?", default="")
+
+    get_model_evaluation_parser = subparsers.add_parser(
+        "get_model_evaluation", help=get_model_evaluation.__doc__
+    )
+    get_model_evaluation_parser.add_argument("model_id")
+    get_model_evaluation_parser.add_argument("model_evaluation_id")
+
+    get_model_parser = subparsers.add_parser(
+        "get_model", help=get_model.__doc__
+    )
+    get_model_parser.add_argument("model_id")
+
+    get_operation_status_parser = subparsers.add_parser(
+        "get_operation_status", help=get_operation_status.__doc__
+    )
+    get_operation_status_parser.add_argument("operation_full_id")
+
+    list_models_parser = subparsers.add_parser(
+        "list_models", help=list_models.__doc__
+    )
+    list_models_parser.add_argument("filter", nargs="?", default="")
+
+    delete_model_parser = subparsers.add_parser(
+        "delete_model", help=delete_model.__doc__
+    )
+    delete_model_parser.add_argument("model_id")
+
+    project_id = os.environ["PROJECT_ID"]
+    compute_region = os.environ["REGION_NAME"]
+
+    args = parser.parse_args()
+
+    if args.command == "create_model":
+        create_model(
+            project_id, compute_region, args.dataset_id, args.model_name
+        )
+    if args.command == "list_models":
+        list_models(project_id, compute_region, args.filter)
+    if args.command == "get_model":
+        get_model(project_id, compute_region, args.model_id)
+    if args.command == "list_model_evaluations":
+        list_model_evaluations(
+            project_id, compute_region, args.model_id, args.filter
+        )
+    if args.command == "get_model_evaluation":
+        get_model_evaluation(
+            project_id, compute_region, args.model_id, args.model_evaluation_id
+        )
+    if args.command == "delete_model":
+        delete_model(project_id, compute_region, args.model_id)
+    if args.command == "get_operation_status":
+        get_operation_status(args.operation_full_id)
diff --git a/automl/snippets/automl_translation_predict.py b/automl/snippets/automl_translation_predict.py
new file mode 100644
index 000000000000..1dac70b78d55
--- /dev/null
+++ b/automl/snippets/automl_translation_predict.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This application demonstrates how to perform basic operations on prediction
+with the Google AutoML Translation API.
+
+For more information, see the documentation at
+https://cloud.google.com/translate/automl/docs
+"""
+
+import argparse
+import os
+
+
+def predict(
+    project_id,
+    compute_region,
+    model_id,
+    file_path,
+    translation_allow_fallback=False,
+):
+    """Translate the content."""
+    # [START automl_translation_predict]
+    # project_id = 'PROJECT_ID_HERE'
+    # compute_region = 'COMPUTE_REGION_HERE'
+    # model_id = 'MODEL_ID_HERE'
+    # file_path = '/local/path/to/file'
+    # translation_allow_fallback = True allows fallback to Google Translate
+
+    from google.cloud import automl_v1beta1 as automl
+
+    automl_client = automl.AutoMlClient()
+
+    # Create client for prediction service.
+    prediction_client = automl.PredictionServiceClient()
+
+    # Get the full path of the model.
+    model_full_id = automl_client.model_path(
+        project_id, compute_region, model_id
+    )
+
+    # Read the file content for translation.
+    with open(file_path, "rb") as content_file:
+        content = content_file.read()
+    content.decode("utf-8")
+
+    # Set the payload by giving the content of the file.
+    payload = {"text_snippet": {"content": content}}
+
+    # params is additional domain-specific parameters.
+    # translation_allow_fallback allows to use Google translation model.
+    params = {}
+    if translation_allow_fallback:
+        params = {"translation_allow_fallback": "True"}
+
+    response = prediction_client.predict(model_full_id, payload, params)
+    translated_content = response.payload[0].translation.translated_content
+
+    print(u"Translated content: {}".format(translated_content.content))
+
+    # [END automl_translation_predict]
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+    )
+    subparsers = parser.add_subparsers(dest="command")
+
+    predict_parser = subparsers.add_parser("predict", help=predict.__doc__)
+    predict_parser.add_argument("model_id")
+    predict_parser.add_argument("file_path")
+    predict_parser.add_argument(
+        "translation_allow_fallback",
+        nargs="?",
+        choices=["False", "True"],
+        default="False",
+    )
+
+    project_id = os.environ["PROJECT_ID"]
+    compute_region = os.environ["REGION_NAME"]
+
+    args = parser.parse_args()
+
+    if args.command == "predict":
+        translation_allow_fallback = (
+            True if args.translation_allow_fallback == "True" else False
+        )
+        predict(
+            project_id,
+            compute_region,
+            args.model_id,
+            args.file_path,
+            translation_allow_fallback,
+        )
diff --git a/automl/snippets/dataset_test.py b/automl/snippets/dataset_test.py
new file mode 100644
index 000000000000..29e3e5c9fe97
--- /dev/null
+++ b/automl/snippets/dataset_test.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import os
+
+import pytest
+
+import automl_translation_dataset
+
+project_id = os.environ["GCLOUD_PROJECT"]
+compute_region = "us-central1"
+
+
+@pytest.mark.slow
+def test_dataset_create_import_delete(capsys):
+    # create dataset
+    dataset_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S")
+    automl_translation_dataset.create_dataset(
+        project_id, compute_region, dataset_name, "en", "ja"
+    )
+    out, _ = capsys.readouterr()
+    create_dataset_output = out.splitlines()
+    assert "Dataset id: " in create_dataset_output[1]
+
+    # import data
+    dataset_id = create_dataset_output[1].split()[2]
+    data = "gs://{}-vcm/en-ja.csv".format(project_id)
+    automl_translation_dataset.import_data(
+        project_id, compute_region, dataset_id, data
+    )
+    out, _ = capsys.readouterr()
+    assert "Data imported." in out
+
+    # delete dataset
+    automl_translation_dataset.delete_dataset(
+        project_id, compute_region, dataset_id
+    )
+    out, _ = capsys.readouterr()
+    assert "Dataset deleted." in out
+
+
+def test_dataset_list_get(capsys):
+    # list datasets
+    automl_translation_dataset.list_datasets(project_id, compute_region, "")
+    out, _ = capsys.readouterr()
+    list_dataset_output = out.splitlines()
+    assert "Dataset id: " in list_dataset_output[2]
+
+    # get dataset
+    dataset_id = list_dataset_output[2].split()[2]
+    automl_translation_dataset.get_dataset(
+        project_id, compute_region, dataset_id
+    )
+    out, _ = capsys.readouterr()
+    assert "Dataset name: " in out
diff --git a/automl/snippets/model_test.py b/automl/snippets/model_test.py
new file mode 100644
index 000000000000..7f915c5dddf3
--- /dev/null
+++ b/automl/snippets/model_test.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import os
+
+from google.cloud import automl_v1beta1 as automl
+
+import automl_translation_model
+
+project_id = os.environ["GCLOUD_PROJECT"]
+compute_region = "us-central1"
+
+
+def test_model_create_status_delete(capsys):
+    # create model
+    client = automl.AutoMlClient()
+    model_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S")
+    project_location = client.location_path(project_id, compute_region)
+    my_model = {
+        "display_name": model_name,
+        "dataset_id": "3876092572857648864",
+        "translation_model_metadata": {"base_model": ""},
+    }
+    response = client.create_model(project_location, my_model)
+    operation_name = response.operation.name
+    assert operation_name
+
+    # get operation status
+    automl_translation_model.get_operation_status(operation_name)
+    out, _ = capsys.readouterr()
+    assert "Operation status: " in out
+
+    # cancel operation
+    response.cancel()
+
+
+def test_model_list_get_evaluate(capsys):
+    # list models
+    automl_translation_model.list_models(project_id, compute_region, "")
+    out, _ = capsys.readouterr()
+    list_models_output = out.splitlines()
+    assert "Model id: " in list_models_output[2]
+
+    # get model
+    model_id = list_models_output[2].split()[2]
+    automl_translation_model.get_model(project_id, compute_region, model_id)
+    out, _ = capsys.readouterr()
+    assert "Model name: " in out
+
+    # list model evaluations
+    automl_translation_model.list_model_evaluations(
+        project_id, compute_region, model_id, ""
+    )
+    out, _ = capsys.readouterr()
+    list_evals_output = out.splitlines()
+    assert "name: " in list_evals_output[1]
+
+    # get model evaluation
+    model_evaluation_id = list_evals_output[1].split("/")[-1][:-1]
+    automl_translation_model.get_model_evaluation(
+        project_id, compute_region, model_id, model_evaluation_id
+    )
+    out, _ = capsys.readouterr()
+    assert "evaluation_metric" in out
diff --git a/automl/snippets/predict_test.py b/automl/snippets/predict_test.py
new file mode 100644
index 000000000000..87aea8faa5b0
--- /dev/null
+++ b/automl/snippets/predict_test.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import automl_translation_predict
+
+project_id = os.environ["GCLOUD_PROJECT"]
+compute_region = "us-central1"
+
+
+def test_predict(capsys):
+    model_id = "3128559826197068699"
+    automl_translation_predict.predict(
+        project_id, compute_region, model_id, "resources/input.txt", False
+    )
+    out, _ = capsys.readouterr()
+    assert "Translated content: " in out