diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/events/import_user_events_gcs.py b/packages/google-cloud-retail/samples/interactive-tutorials/events/import_user_events_gcs.py
index 0c56987f305a..2b548add5657 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/events/import_user_events_gcs.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/events/import_user_events_gcs.py
@@ -40,12 +40,10 @@ def main(bucket_name):
     # TODO: Developer set the bucket_name
     # bucket_name = 'user_events_bucket'
 
-    default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
-        project_id
-    )
+    default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"
 
-    gcs_bucket = "gs://{}".format(bucket_name)
-    gcs_errors_bucket = "{}/error".format(gcs_bucket)
+    gcs_bucket = f"gs://{bucket_name}"
+    gcs_errors_bucket = f"{gcs_bucket}/error"
     gcs_events_object = "user_events.json"
 
     # TO CHECK ERROR HANDLING USE THE JSON WITH INVALID PRODUCT
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/events/import_user_events_inline.py b/packages/google-cloud-retail/samples/interactive-tutorials/events/import_user_events_inline.py
index bdbaa72a3bfc..197d6175fc14 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/events/import_user_events_inline.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/events/import_user_events_inline.py
@@ -35,9 +35,7 @@
 
 project_id = google.auth.default()[1]
 
-default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
-    project_id
-)
+default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"
 
 
 # get user events for import
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/events/purge_user_event.py b/packages/google-cloud-retail/samples/interactive-tutorials/events/purge_user_event.py
index 256ecde1e3a5..1be1d4b3d33c 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/events/purge_user_event.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/events/purge_user_event.py
@@ -24,9 +24,8 @@
 
 project_id = google.auth.default()[1]
 
-default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
-    project_id
-)
+default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"
+
 visitor_id = "test_visitor_id"
 
 
@@ -34,7 +33,7 @@
 def get_purge_user_event_request():
     purge_user_event_request = PurgeUserEventsRequest()
     # TO CHECK ERROR HANDLING SET INVALID FILTER HERE:
-    purge_user_event_request.filter = 'visitorId="{}"'.format(visitor_id)
+    purge_user_event_request.filter = f'visitorId="{visitor_id}"'
     purge_user_event_request.parent = default_catalog
     purge_user_event_request.force = True
     print("---purge user events request---")
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/events/rejoin_user_event.py b/packages/google-cloud-retail/samples/interactive-tutorials/events/rejoin_user_event.py
index 67e4caa585be..598955e9c6a2 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/events/rejoin_user_event.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/events/rejoin_user_event.py
@@ -24,16 +24,15 @@
 
 project_id = google.auth.default()[1]
 
-default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
-    project_id
-)
+default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"
+
 visitor_id = "test_visitor_id"
 
 
 # get rejoin user event request
 def get_rejoin_user_event_request():
     # TO CHECK THE ERROR HANDLING TRY TO PASS INVALID CATALOG:
-    # default_catalog = "projects/{0}/locations/global/catalogs/invalid_catalog".format(project_number)
+    # default_catalog = f"projects/{project_id}/locations/global/catalogs/invalid_catalog"
     rejoin_user_event_request = RejoinUserEventsRequest()
     rejoin_user_event_request.parent = default_catalog
     rejoin_user_event_request.user_event_rejoin_scope = (
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/events_create_gcs_bucket.py b/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/events_create_gcs_bucket.py
index 0ccf67852f24..07b30763e038 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/events_create_gcs_bucket.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/events_create_gcs_bucket.py
@@ -20,10 +20,10 @@
 
 project_id = google.auth.default()[1]
 timestamp_ = datetime.datetime.now().timestamp().__round__()
-bucket_name = "{}_events_{}".format(project_id, timestamp_)
+bucket_name = f"{project_id}_events_{timestamp_}"
 
 create_bucket(bucket_name)
 upload_blob(bucket_name, "../resources/user_events.json")
 upload_blob(bucket_name, "../resources/user_events_some_invalid.json")
 
-print("\nThe gcs bucket {} was created".format(bucket_name))
+print(f"\nThe gcs bucket {bucket_name} was created")
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/setup_cleanup.py b/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/setup_cleanup.py
index d1d0fe3507e5..4c4f843aba52 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/setup_cleanup.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/setup_cleanup.py
@@ -31,8 +31,7 @@
 from google.protobuf.timestamp_pb2 import Timestamp
 
 project_id = google.auth.default()[1]
-default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
-    project_id)
+default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"
 
 
 # get user event
@@ -71,7 +70,7 @@ def write_user_event(visitor_id):
 # purge user event
 def purge_user_event(visitor_id):
     purge_user_event_request = PurgeUserEventsRequest()
-    purge_user_event_request.filter = 'visitorId="{}"'.format(visitor_id)
+    purge_user_event_request.filter = f'visitorId="{visitor_id}"'
     purge_user_event_request.parent = default_catalog
     purge_user_event_request.force = True
     purge_operation = UserEventServiceClient().purge_user_events(
@@ -93,17 +92,14 @@ def create_bucket(bucket_name: str):
     print("Creating new bucket:" + bucket_name)
     buckets_in_your_project = list_buckets()
     if bucket_name in buckets_in_your_project:
-        print("Bucket {} already exists".format(bucket_name))
+        print(f"Bucket {bucket_name} already exists")
     else:
         storage_client = storage.Client()
         bucket = storage_client.bucket(bucket_name)
         bucket.storage_class = "STANDARD"
         new_bucket = storage_client.create_bucket(bucket, location="us")
         print(
-            "Created bucket {} in {} with storage class {}".format(
-                new_bucket.name, new_bucket.location, new_bucket.storage_class
-            )
-        )
+            f"Created bucket {new_bucket.name} in {new_bucket.location} with storage class {new_bucket.storage_class}")
         return new_bucket
 
 
@@ -118,9 +114,9 @@ def delete_bucket(bucket_name: str):
             blob.delete()
         bucket = storage_client.get_bucket(bucket_name)
         bucket.delete()
-        print("Bucket {} is deleted".format(bucket.name))
+        print(f"Bucket {bucket.name} is deleted")
     else:
-        print("Bucket {} is not found".format(bucket_name))
+        print(f"Bucket {bucket_name} is not found")
 
 
 def list_buckets():
@@ -137,17 +133,14 @@ def upload_blob(bucket_name, source_file_name):
     """Uploads a file to the bucket."""
     # The path to your file to upload
     # source_file_name = "local/path/to/file"
-    print("Uploading data form {} to the bucket {}".format(source_file_name,
-                                                           bucket_name))
+    print(f"Uploading data from {source_file_name} to the bucket {bucket_name}")
     storage_client = storage.Client()
     bucket = storage_client.bucket(bucket_name)
     object_name = re.search('resources/(.*?)$', source_file_name).group(1)
     blob = bucket.blob(object_name)
     blob.upload_from_filename(source_file_name)
     print(
-        "File {} uploaded to {}.".format(
-            source_file_name, object_name
-        )
+        f"File {source_file_name} uploaded to {object_name}."
     )
 
 
@@ -190,7 +183,7 @@ def delete_bq_table(dataset, table_name):
     full_table_id = f"{project_id}.{dataset}.{table_name}"
     bq = bigquery.Client()
     bq.delete_table(full_table_id, not_found_ok=True)
-    print("Table '{}' is deleted.".format(full_table_id))
+    print(f"Table '{full_table_id}' is deleted.")
 
 
 def upload_data_to_bq_table(dataset, table_name, source, schema_file_path):
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/update_user_events_json.py b/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/update_user_events_json.py
index f04b352e47ad..b0e79950065e 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/update_user_events_json.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/events/setup_events/update_user_events_json.py
@@ -21,4 +21,4 @@ def update_events_timestamp(json_file):
     # Write the file out again
     with open(json_file, 'w') as file:
         file.write(filedata)
-    print("The {} is updated".format(json_file))
+    print(f"The {json_file} is updated")
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/events/write_user_event.py b/packages/google-cloud-retail/samples/interactive-tutorials/events/write_user_event.py
index cd02d7c9fb56..50e8dca0f3b4 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/events/write_user_event.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/events/write_user_event.py
@@ -28,9 +28,8 @@
 
 project_id = google.auth.default()[1]
 
-default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
-    project_id
-)
+default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"
+
 visitor_id = "test_visitor_id"
 
 
@@ -51,8 +50,7 @@ def get_user_event():
 # get write user event request
 def get_write_event_request(user_event):
     # TO CHECK THE ERROR HANDLING TRY TO PASS INVALID CATALOG:
-    # default_catalog = "projects/{0}/locations/global/catalogs/invalid_catalog"
-    # .format(project_number)
+    # default_catalog = f"projects/{project_id}/locations/global/catalogs/invalid_catalog"
     write_user_event_request = WriteUserEventRequest()
     write_user_event_request.user_event = user_event
     write_user_event_request.parent = default_catalog
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/noxfile.py b/packages/google-cloud-retail/samples/interactive-tutorials/noxfile.py
index 1e30748a29ae..fa7787dadf90 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/noxfile.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/noxfile.py
@@ -64,7 +64,7 @@
     sys.path.append(".")
     from noxfile_config import TEST_CONFIG_OVERRIDE
 except ImportError as e:
-    print("No user noxfile_config found: detail: {}".format(e))
+    print(f"No user noxfile_config found: detail: {e}")
     TEST_CONFIG_OVERRIDE = {}
 
 # Update the TEST_CONFIG with the user supplied values.
@@ -222,9 +222,7 @@ def py(session: nox.sessions.Session) -> None:
     if session.python in TESTED_VERSIONS:
         _session_tests(session)
     else:
-        session.skip(
-            "SKIPPED: {} tests are disabled for this sample.".format(session.python)
-        )
+        session.skip(f"SKIPPED: {session.python} tests are disabled for this sample.")
 
 
 #
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/product/add_fulfillment_places.py b/packages/google-cloud-retail/samples/interactive-tutorials/product/add_fulfillment_places.py
index 1a791c6ff5fb..3f2eb8cd5853 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/product/add_fulfillment_places.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/product/add_fulfillment_places.py
@@ -70,7 +70,7 @@ def add_fulfillment_places(product_name: str, timestamp, place_id):
 
 
 create_product(product_id)
-print("------add fulfilment places with current date: {}-----".format(current_date))
+print(f"------add fulfilment places with current date: {current_date}-----")
 add_fulfillment_places(product_name, current_date, "store2")
 get_product(product_name)
 delete_product(product_name)
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/product/crud_product.py b/packages/google-cloud-retail/samples/interactive-tutorials/product/crud_product.py
index 726371946d46..45f58e53e00a 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/product/crud_product.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/product/crud_product.py
@@ -38,7 +38,7 @@
     + "/locations/global/catalogs/default_catalog/branches/default_branch"
 )
 product_id = "".join(random.sample(string.ascii_lowercase, 8))
-product_name = "{}/products/{}".format(default_branch_name, product_id)
+product_name = f"{default_branch_name}/products/{product_id}"
 
 
 # generate product for create
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/product/remove_fulfillment_places.py b/packages/google-cloud-retail/samples/interactive-tutorials/product/remove_fulfillment_places.py
index 654daffcd58c..1397f560e7c3 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/product/remove_fulfillment_places.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/product/remove_fulfillment_places.py
@@ -70,7 +70,7 @@ def remove_fulfillment_places(product_name: str, timestamp, store_id):
 
 
 create_product(product_id)
-print("------remove fulfilment places with current date: {}-----".format(current_date))
+print(f"------remove fulfilment places with current date: {current_date}-----")
 remove_fulfillment_places(product_name, current_date, "store0")
 get_product(product_name)
 delete_product(product_name)
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/product/setup_product/products_create_gcs_bucket.py b/packages/google-cloud-retail/samples/interactive-tutorials/product/setup_product/products_create_gcs_bucket.py
index 3d84635b3781..fe976d7a5602 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/product/setup_product/products_create_gcs_bucket.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/product/setup_product/products_create_gcs_bucket.py
@@ -20,10 +20,10 @@
 
 project_id = google.auth.default()[1]
 timestamp_ = datetime.datetime.now().timestamp().__round__()
-bucket_name = "{}_products_{}".format(project_id, timestamp_)
+bucket_name = f"{project_id}_products_{timestamp_}"
 
 create_bucket(bucket_name)
 upload_blob(bucket_name, "../resources/products.json")
 upload_blob(bucket_name, "../resources/products_some_invalid.json")
 
-print("\nThe gcs bucket {} was created".format(bucket_name))
+print(f"\nThe gcs bucket {bucket_name} was created")
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/product/setup_product/setup_cleanup.py b/packages/google-cloud-retail/samples/interactive-tutorials/product/setup_product/setup_cleanup.py
index 686163b96618..31a6a4f9fb90 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/product/setup_product/setup_cleanup.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/product/setup_product/setup_cleanup.py
@@ -101,16 +101,14 @@ def create_bucket(bucket_name: str):
     print("Creating new bucket:" + bucket_name)
     buckets_in_your_project = list_buckets()
     if bucket_name in buckets_in_your_project:
-        print("Bucket {} already exists".format(bucket_name))
+        print(f"Bucket {bucket_name} already exists")
     else:
         storage_client = storage.Client()
         bucket = storage_client.bucket(bucket_name)
         bucket.storage_class = "STANDARD"
         new_bucket = storage_client.create_bucket(bucket, location="us")
         print(
-            "Created bucket {} in {} with storage class {}".format(
-                new_bucket.name, new_bucket.location, new_bucket.storage_class
-            )
+            f"Created bucket {new_bucket.name} in {new_bucket.location} with storage class {new_bucket.storage_class}"
         )
         return new_bucket
 
@@ -126,9 +124,9 @@ def delete_bucket(bucket_name: str):
             blob.delete()
         bucket = storage_client.get_bucket(bucket_name)
         bucket.delete()
-        print("Bucket {} is deleted".format(bucket.name))
+        print(f"Bucket {bucket.name} is deleted")
     else:
-        print("Bucket {} is not found".format(bucket_name))
+        print(f"Bucket {bucket_name} is not found")
 
 
 def list_buckets():
@@ -145,8 +143,7 @@ def upload_blob(bucket_name, source_file_name):
     """Uploads a file to the bucket."""
     # The path to your file to upload
     # source_file_name = "local/path/to/file"
-    print("Uploading data from {} to the bucket {}".format(source_file_name,
-                                                           bucket_name))
+    print(f"Uploading data from {source_file_name} to the bucket {bucket_name}")
     storage_client = storage.Client()
     bucket = storage_client.bucket(bucket_name)
     object_name = re.search('resources/(.*?)$', source_file_name).group(1)
@@ -154,9 +151,7 @@ def upload_blob(bucket_name, source_file_name):
     blob.upload_from_filename(source_file_name)
 
     print(
-        "File {} uploaded to {}.".format(
-            source_file_name, object_name
-        )
+        f"File {source_file_name} uploaded to {object_name}."
     )
 
 
@@ -199,7 +194,7 @@ def delete_bq_table(dataset, table_name):
     full_table_id = f"{project_id}.{dataset}.{table_name}"
     bq = bigquery.Client()
     bq.delete_table(full_table_id, not_found_ok=True)
-    print("Table '{}' is deleted.".format(full_table_id))
+    print(f"Table '{full_table_id}' is deleted.")
 
 
 def upload_data_to_bq_table(dataset, table_name, source, schema_file_path):
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/test_resources_recovery/create_test_resources.py b/packages/google-cloud-retail/samples/interactive-tutorials/test_resources_recovery/create_test_resources.py
index b81f84b3196d..756bb4febe51 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/test_resources_recovery/create_test_resources.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/test_resources_recovery/create_test_resources.py
@@ -50,16 +50,14 @@ def create_bucket(bucket_name: str) -> Bucket:
     print("Creating new bucket:" + bucket_name)
     bucket_exists = check_if_bucket_exists(bucket_name)
     if bucket_exists:
-        print("Bucket {} already exists".format(bucket_name))
+        print(f"Bucket {bucket_name} already exists")
         return storage_client.bucket(bucket_name)
     else:
         bucket = storage_client.bucket(bucket_name)
         bucket.storage_class = "STANDARD"
         new_bucket = storage_client.create_bucket(bucket, location="us")
         print(
-            "Created bucket {} in {} with storage class {}".format(
-                new_bucket.name, new_bucket.location, new_bucket.storage_class
-            )
+            f"Created bucket {new_bucket.name} in {new_bucket.location} with storage class {new_bucket.storage_class}"
         )
         return new_bucket
 
@@ -79,17 +77,16 @@ def upload_data_to_bucket(bucket: Bucket):
     """Upload data to a GCS bucket"""
     blob = bucket.blob(object_name)
     blob.upload_from_filename(product_resource_file)
-    print("Data from {} has being uploaded to {}".format(product_resource_file,
-                                                         bucket.name))
+    print(f"Data from {product_resource_file} has being uploaded to {bucket.name}")
 
 
 def get_import_products_gcs_request():
     """Get import products from gcs request"""
-    gcs_bucket = "gs://{}".format(products_bucket_name)
-    gcs_errors_bucket = "{}/error".format(gcs_bucket)
+    gcs_bucket = f"gs://{products_bucket_name}"
+    gcs_errors_bucket = f"{gcs_bucket}/error"
 
     gcs_source = GcsSource()
-    gcs_source.input_uris = ["{0}/{1}".format(gcs_bucket, object_name)]
+    gcs_source.input_uris = [f"{gcs_bucket}/{object_name}"]
 
     input_config = ProductInputConfig()
     input_config.gcs_source = gcs_source
@@ -115,7 +112,7 @@ def import_products_from_gcs():
     gcs_operation = ProductServiceClient().import_products(
         import_gcs_request)
     print(
-        "Import operation is started: {}".format(gcs_operation.operation.name))
+        f"Import operation is started: {gcs_operation.operation.name}")
 
     while not gcs_operation.done():
         print("Please wait till operation is completed")
@@ -137,13 +134,12 @@ def import_products_from_gcs():
 
 def create_bq_dataset(dataset_name):
     """Create a BigQuery dataset"""
-    print("Creating dataset {}".format(dataset_name))
+    print(f"Creating dataset {dataset_name}")
     try:
         list_bq_dataset(project_id, dataset_name)
-        print("dataset {} already exists".format(dataset_name))
+        print(f"dataset {dataset_name} already exists")
     except subprocess.CalledProcessError:
-        create_dataset_command = 'bq --location=US mk -d --default_table_expiration 3600 --description "This is my dataset." {}:{}'.format(
-            project_id, dataset_name)
+        create_dataset_command = f'bq --location=US mk -d --default_table_expiration 3600 --description "This is my dataset." {project_id}:{dataset_name}'
         subprocess.check_output(shlex.split(create_dataset_command))
         print("dataset is created")
 
@@ -157,32 +153,27 @@ def list_bq_dataset(project_id: str, dataset_name: str):
 
 def create_bq_table(dataset, table_name, schema):
     """Create a BigQuery table"""
-    print("Creating BigQuery table {}".format(table_name))
+    print(f"Creating BigQuery table {table_name}")
     if table_name not in list_bq_tables(dataset):
-        create_table_command = "bq mk --table {}:{}.{} {}".format(
-            project_id,
-            dataset,
-            table_name, schema)
+        create_table_command = f"bq mk --table {project_id}:{dataset}.{table_name} {schema}"
         output = subprocess.check_output(shlex.split(create_table_command))
         print(output)
         print("table is created")
     else:
-        print("table {} already exists".format(table_name))
+        print(f"table {table_name} already exists")
 
 
 def list_bq_tables(dataset):
     """List BigQuery tables in the dataset"""
-    list_tables_command = "bq ls {}:{}".format(project_id, dataset)
+    list_tables_command = f"bq ls {project_id}:{dataset}"
     tables = subprocess.check_output(shlex.split(list_tables_command))
     return str(tables)
 
 
 def upload_data_to_bq_table(dataset, table_name, source, schema):
     """Upload data to the table from specified source file"""
-    print("Uploading data from {} to the table {}.{}".format(source, dataset,
-                                                             table_name))
-    upload_data_command = "bq load --source_format=NEWLINE_DELIMITED_JSON {}:{}.{} {} {}".format(
-        project_id, dataset, table_name, source, schema)
+    print(f"Uploading data from {source} to the table {dataset}.{table_name}")
+    upload_data_command = f"bq load --source_format=NEWLINE_DELIMITED_JSON {project_id}:{dataset}.{table_name} {source} {schema}"
     output = subprocess.check_output(shlex.split(upload_data_command))
     print(output)
 
diff --git a/packages/google-cloud-retail/samples/interactive-tutorials/test_resources_recovery/remove_test_resources.py b/packages/google-cloud-retail/samples/interactive-tutorials/test_resources_recovery/remove_test_resources.py
index 0d2247dece68..63e6d64ffe04 100644
--- a/packages/google-cloud-retail/samples/interactive-tutorials/test_resources_recovery/remove_test_resources.py
+++ b/packages/google-cloud-retail/samples/interactive-tutorials/test_resources_recovery/remove_test_resources.py
@@ -41,11 +41,11 @@ def delete_bucket(bucket_name):
     try:
         bucket = storage_client.get_bucket(bucket_name)
     except NotFound:
-        print("Bucket {} does not exists".format(bucket_name))
+        print(f"Bucket {bucket_name} does not exists")
     else:
         delete_object_from_bucket(bucket)
         bucket.delete()
-        print("bucket {} is deleted".format(bucket_name))
+        print(f"bucket {bucket_name} is deleted")
 
 
 def delete_object_from_bucket(bucket: Bucket):
@@ -53,7 +53,7 @@ def delete_object_from_bucket(bucket: Bucket):
     blobs = bucket.list_blobs()
     for blob in blobs:
         blob.delete()
-    print("all objects are deleted from GCS bucket {}".format(bucket.name))
+    print(f"all objects are deleted from GCS bucket {bucket.name}")
 
 
 def delete_all_products():
@@ -78,7 +78,7 @@ def delete_all_products():
 
 def delete_bq_dataset_with_tables(dataset):
     """Delete a BigQuery dataset with all tables"""
-    delete_dataset_command = "bq rm -r -d -f {}".format(dataset)
+    delete_dataset_command = f"bq rm -r -d -f {dataset}"
     output = subprocess.check_output(shlex.split(delete_dataset_command))
     print(output)