diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py
index 6e7f774..efe0d6a 100644
--- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py
+++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py
@@ -260,7 +260,7 @@ async def create_migration_workflow(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([parent, migration_workflow])
         if request is not None and has_flattened_params:
@@ -335,7 +335,7 @@ async def get_migration_workflow(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([name])
         if request is not None and has_flattened_params:
@@ -418,7 +418,7 @@ async def list_migration_workflows(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([parent])
         if request is not None and has_flattened_params:
@@ -499,7 +499,7 @@ async def delete_migration_workflow(
                 sent along with the request as metadata.
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([name])
         if request is not None and has_flattened_params:
@@ -568,7 +568,7 @@ async def start_migration_workflow(
                 sent along with the request as metadata.
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([name])
         if request is not None and has_flattened_params:
@@ -652,7 +652,7 @@ async def get_migration_subtask(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([name])
         if request is not None and has_flattened_params:
@@ -734,7 +734,7 @@ async def list_migration_subtasks(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([parent])
         if request is not None and has_flattened_params:
diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py
index 4e30027..a0774b1 100644
--- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py
+++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py
@@ -467,7 +467,7 @@ def create_migration_workflow(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([parent, migration_workflow])
         if request is not None and has_flattened_params:
@@ -544,7 +544,7 @@ def get_migration_workflow(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([name])
         if request is not None and has_flattened_params:
@@ -618,7 +618,7 @@ def list_migration_workflows(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([parent])
         if request is not None and has_flattened_params:
@@ -690,7 +690,7 @@ def delete_migration_workflow(
                 sent along with the request as metadata.
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([name])
         if request is not None and has_flattened_params:
@@ -761,7 +761,7 @@ def start_migration_workflow(
                 sent along with the request as metadata.
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([name])
         if request is not None and has_flattened_params:
@@ -836,7 +836,7 @@ def get_migration_subtask(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([name])
         if request is not None and has_flattened_params:
@@ -909,7 +909,7 @@ def list_migration_subtasks(
 
         """
         # Create or coerce a protobuf request object.
-        # Sanity check: If we got a request object, we should *not* have
+        # Quick check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
         has_flattened_params = any([parent])
         if request is not None and has_flattened_params:
diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py
index 19f31f8..2e80842 100644
--- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py
+++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py
@@ -160,8 +160,11 @@ def __init__(
         if not self._grpc_channel:
             self._grpc_channel = type(self).create_channel(
                 self._host,
+                # use the credentials which are saved
                 credentials=self._credentials,
-                credentials_file=credentials_file,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
                 scopes=self._scopes,
                 ssl_credentials=self._ssl_channel_credentials,
                 quota_project_id=quota_project_id,
diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py
index 5eacdd0..c06da35 100644
--- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py
+++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py
@@ -205,8 +205,11 @@ def __init__(
         if not self._grpc_channel:
             self._grpc_channel = type(self).create_channel(
                 self._host,
+                # use the credentials which are saved
                 credentials=self._credentials,
-                credentials_file=credentials_file,
+                # Set ``credentials_file`` to ``None`` here as
+                # the credentials that we saved earlier should be used.
+                credentials_file=None,
                 scopes=self._scopes,
                 ssl_credentials=self._ssl_channel_credentials,
                 quota_project_id=quota_project_id,
diff --git a/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py b/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py
index 55daf49..0254518 100644
--- a/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py
+++ b/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py
@@ -105,8 +105,8 @@ class MigrationTask(proto.Message):
             This field is a member of `oneof`_ ``task_details``.
         id (str):
             Output only. Immutable. The unique identifier
-            for the migration task. The ID is server-
-            generated.
+            for the migration task. The ID is
+            server-generated.
         type_ (str):
             The type of the task. This must be a
             supported task type.
diff --git a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py
index 67b7975..323f238 100644
--- a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py
+++ b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py
@@ -534,21 +534,28 @@ def test_migration_service_client_client_options_scopes(
 
 
 @pytest.mark.parametrize(
-    "client_class,transport_class,transport_name",
+    "client_class,transport_class,transport_name,grpc_helpers",
     [
-        (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"),
+        (
+            MigrationServiceClient,
+            transports.MigrationServiceGrpcTransport,
+            "grpc",
+            grpc_helpers,
+        ),
         (
             MigrationServiceAsyncClient,
             transports.MigrationServiceGrpcAsyncIOTransport,
             "grpc_asyncio",
+            grpc_helpers_async,
         ),
     ],
 )
 def test_migration_service_client_client_options_credentials_file(
-    client_class, transport_class, transport_name
+    client_class, transport_class, transport_name, grpc_helpers
 ):
     # Check the case credentials file is provided.
     options = client_options.ClientOptions(credentials_file="credentials.json")
+
     with mock.patch.object(transport_class, "__init__") as patched:
         patched.return_value = None
         client = client_class(client_options=options, transport=transport_name)
@@ -584,6 +591,72 @@ def test_migration_service_client_client_options_from_dict():
         )
 
 
+@pytest.mark.parametrize(
+    "client_class,transport_class,transport_name,grpc_helpers",
+    [
+        (
+            MigrationServiceClient,
+            transports.MigrationServiceGrpcTransport,
+            "grpc",
+            grpc_helpers,
+        ),
+        (
+            MigrationServiceAsyncClient,
+            transports.MigrationServiceGrpcAsyncIOTransport,
+            "grpc_asyncio",
+            grpc_helpers_async,
+        ),
+    ],
+)
+def test_migration_service_client_create_channel_credentials_file(
+    client_class, transport_class, transport_name, grpc_helpers
+):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(credentials_file="credentials.json")
+
+    with mock.patch.object(transport_class, "__init__") as patched:
+        patched.return_value = None
+        client = client_class(client_options=options, transport=transport_name)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client.DEFAULT_ENDPOINT,
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+        )
+
+    # test that the credentials from file are saved and used as the credentials.
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch.object(
+        google.auth, "default", autospec=True
+    ) as adc, mock.patch.object(
+        grpc_helpers, "create_channel"
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        file_creds = ga_credentials.AnonymousCredentials()
+        load_creds.return_value = (file_creds, None)
+        adc.return_value = (creds, None)
+        client = client_class(client_options=options, transport=transport_name)
+        create_channel.assert_called_with(
+            "bigquerymigration.googleapis.com:443",
+            credentials=file_creds,
+            credentials_file=None,
+            quota_project_id=None,
+            default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+            scopes=None,
+            default_host="bigquerymigration.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
 @pytest.mark.parametrize(
     "request_type", [migration_service.CreateMigrationWorkflowRequest, dict,]
 )