From f4eeff4eaf12b97f6b91c25ec64e7799a0e1639d Mon Sep 17 00:00:00 2001
From: Obada Alabbadi <76101898+obada-ab@users.noreply.github.com>
Date: Fri, 23 Feb 2024 23:16:55 +0100
Subject: [PATCH] add `remote_function_options` to bigquery_routine (#9893)

* Add dataGovernanceType and remoteFunctionOptions to bigquery_routine

* add function-sources.zip to biguquery fixtures

* fix resource names in TestAccBigQueryRoutine

* add bigquery routine remote function example
---
 mmv1/products/bigquery/Routine.yaml           |  50 +++++-
 ...c.tf.erb => bigquery_routine_basic.tf.erb} |   0
 ...on.tf.erb => bigquery_routine_json.tf.erb} |   0
 ...tf.erb => bigquery_routine_pyspark.tf.erb} |   0
 ... bigquery_routine_pyspark_mainfile.tf.erb} |   0
 .../bigquery_routine_remote_function.tf.erb   |  27 +++
 ....erb => bigquery_routine_spark_jar.tf.erb} |   0
 ...tvf.tf.erb => bigquery_routine_tvf.tf.erb} |   0
 .../resource_bigquery_routine_test.go         | 168 ++++++++++++++++++
 .../test-fixtures/function-source.zip         | Bin 0 -> 458 bytes
 10 files changed, 239 insertions(+), 6 deletions(-)
 rename mmv1/templates/terraform/examples/{big_query_routine_basic.tf.erb => bigquery_routine_basic.tf.erb} (100%)
 rename mmv1/templates/terraform/examples/{big_query_routine_json.tf.erb => bigquery_routine_json.tf.erb} (100%)
 rename mmv1/templates/terraform/examples/{big_query_routine_pyspark.tf.erb => bigquery_routine_pyspark.tf.erb} (100%)
 rename mmv1/templates/terraform/examples/{big_query_routine_pyspark_mainfile.tf.erb => bigquery_routine_pyspark_mainfile.tf.erb} (100%)
 create mode 100644 mmv1/templates/terraform/examples/bigquery_routine_remote_function.tf.erb
 rename mmv1/templates/terraform/examples/{big_query_routine_spark_jar.tf.erb => bigquery_routine_spark_jar.tf.erb} (100%)
 rename mmv1/templates/terraform/examples/{big_query_routine_tvf.tf.erb => bigquery_routine_tvf.tf.erb} (100%)
 create mode 100644 mmv1/third_party/terraform/services/bigquery/test-fixtures/function-source.zip

diff --git a/mmv1/products/bigquery/Routine.yaml b/mmv1/products/bigquery/Routine.yaml
index 513a80b503d4..3dbbfd3b0577 100644
--- a/mmv1/products/bigquery/Routine.yaml
+++ b/mmv1/products/bigquery/Routine.yaml
@@ -26,7 +26,7 @@ import_format:
   ['projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}']
 examples:
   - !ruby/object:Provider::Terraform::Examples
-    name: 'big_query_routine_basic'
+    name: 'bigquery_routine_basic'
     primary_resource_id: 'sproc'
     primary_resource_name: "fmt.Sprintf(\"tf_test_dataset_id%s\",
       context[\"random_suffix\"\
@@ -35,7 +35,7 @@ examples:
       dataset_id: 'dataset_id'
       routine_id: 'routine_id'
   - !ruby/object:Provider::Terraform::Examples
-    name: 'big_query_routine_json'
+    name: 'bigquery_routine_json'
     primary_resource_id: 'sproc'
     primary_resource_name: "fmt.Sprintf(\"tf_test_dataset_id%s\",
       context[\"random_suffix\"\
@@ -44,7 +44,7 @@ examples:
       dataset_id: 'dataset_id'
       routine_id: 'routine_id'
   - !ruby/object:Provider::Terraform::Examples
-    name: 'big_query_routine_tvf'
+    name: 'bigquery_routine_tvf'
     primary_resource_id: 'sproc'
     primary_resource_name: "fmt.Sprintf(\"tf_test_dataset_id%s\",
       context[\"random_suffix\"\
@@ -53,26 +53,34 @@ examples:
       dataset_id: 'dataset_id'
       routine_id: 'routine_id'
   - !ruby/object:Provider::Terraform::Examples
-    name: 'big_query_routine_pyspark'
+    name: 'bigquery_routine_pyspark'
     primary_resource_id: 'pyspark'
     vars:
       dataset_id: 'dataset_id'
       connection_id: 'connection_id'
       routine_id: 'routine_id'
   - !ruby/object:Provider::Terraform::Examples
-    name: 'big_query_routine_pyspark_mainfile'
+    name: 'bigquery_routine_pyspark_mainfile'
     primary_resource_id: 'pyspark_mainfile'
     vars:
       dataset_id: 'dataset_id'
       connection_id: 'connection_id'
       routine_id: 'routine_id'
   - !ruby/object:Provider::Terraform::Examples
-    name: 'big_query_routine_spark_jar'
+    name: 'bigquery_routine_spark_jar'
     primary_resource_id: 'spark_jar'
     vars:
       dataset_id: 'dataset_id'
       connection_id: 'connection_id'
       routine_id: 'routine_id'
+  - !ruby/object:Provider::Terraform::Examples
+    skip_test: true
+    name: 'bigquery_routine_remote_function'
+    primary_resource_id: 'remote_function'
+    vars:
+      dataset_id: 'dataset_id'
+      connection_id: 'connection_id'
+      routine_id: 'routine_id'
 properties:
   - !ruby/object:Api::Type::NestedObject
     name: routineReference
@@ -283,3 +291,33 @@ properties:
         description: |
           The fully qualified name of a class in jarUris, for example, com.example.wordcount.
           Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type.
+  - !ruby/object:Api::Type::NestedObject
+    name: 'remoteFunctionOptions'
+    description: Remote function specific options.
+    properties:
+      - !ruby/object:Api::Type::String
+        name: 'endpoint'
+        description: |
+          Endpoint of the user-provided remote service, e.g.
+          `https://us-east1-my_gcf_project.cloudfunctions.net/remote_add`
+      - !ruby/object:Api::Type::String
+        name: 'connection'
+        description: |
+          Fully qualified name of the user-provided connection object which holds
+          the authentication information to send requests to the remote service.
+          Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}"
+      - !ruby/object:Api::Type::KeyValuePairs
+        name: 'userDefinedContext'
+        description: |
+          User-defined context as a set of key/value pairs, which will be sent as function
+          invocation context together with batched arguments in the requests to the remote
+          service. The total number of bytes of keys and values must be less than 8KB.
+
+          An object containing a list of "key": value pairs. Example:
+          `{ "name": "wrench", "mass": "1.3kg", "count": "3" }`.
+        default_from_api: true
+      - !ruby/object:Api::Type::String
+        name: 'maxBatchingRows'
+        description: |
+          Max number of rows in each batch sent to the remote service. If absent or if 0,
+          BigQuery dynamically decides the number of rows in a batch.
diff --git a/mmv1/templates/terraform/examples/big_query_routine_basic.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_basic.tf.erb
similarity index 100%
rename from mmv1/templates/terraform/examples/big_query_routine_basic.tf.erb
rename to mmv1/templates/terraform/examples/bigquery_routine_basic.tf.erb
diff --git a/mmv1/templates/terraform/examples/big_query_routine_json.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_json.tf.erb
similarity index 100%
rename from mmv1/templates/terraform/examples/big_query_routine_json.tf.erb
rename to mmv1/templates/terraform/examples/bigquery_routine_json.tf.erb
diff --git a/mmv1/templates/terraform/examples/big_query_routine_pyspark.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_pyspark.tf.erb
similarity index 100%
rename from mmv1/templates/terraform/examples/big_query_routine_pyspark.tf.erb
rename to mmv1/templates/terraform/examples/bigquery_routine_pyspark.tf.erb
diff --git a/mmv1/templates/terraform/examples/big_query_routine_pyspark_mainfile.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_pyspark_mainfile.tf.erb
similarity index 100%
rename from mmv1/templates/terraform/examples/big_query_routine_pyspark_mainfile.tf.erb
rename to mmv1/templates/terraform/examples/bigquery_routine_pyspark_mainfile.tf.erb
diff --git a/mmv1/templates/terraform/examples/bigquery_routine_remote_function.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_remote_function.tf.erb
new file mode 100644
index 000000000000..9ef5b2a0dd2a
--- /dev/null
+++ b/mmv1/templates/terraform/examples/bigquery_routine_remote_function.tf.erb
@@ -0,0 +1,27 @@
+resource "google_bigquery_dataset" "test" {
+  dataset_id = "<%= ctx[:vars]['dataset_id'] %>"
+}
+
+resource "google_bigquery_connection" "test" {
+  connection_id = "<%= ctx[:vars]['connection_id'] %>"
+  location      = "US"
+  cloud_resource { }
+}
+
+resource "google_bigquery_routine" "<%= ctx[:primary_resource_id] %>" {
+  dataset_id = google_bigquery_dataset.test.dataset_id
+  routine_id = "<%= ctx[:vars]['routine_id'] %>"
+  routine_type = "SCALAR_FUNCTION"
+  definition_body = ""
+
+  return_type = "{\"typeKind\" :  \"STRING\"}"
+
+  remote_function_options {
+    endpoint = "https://us-east1-my_gcf_project.cloudfunctions.net/remote_add"
+    connection = google_bigquery_connection.test.name
+    max_batching_rows = "10"
+    user_defined_context = {
+      "z": "1.5",
+    }
+  }
+}
\ No newline at end of file
diff --git a/mmv1/templates/terraform/examples/big_query_routine_spark_jar.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_spark_jar.tf.erb
similarity index 100%
rename from mmv1/templates/terraform/examples/big_query_routine_spark_jar.tf.erb
rename to mmv1/templates/terraform/examples/bigquery_routine_spark_jar.tf.erb
diff --git a/mmv1/templates/terraform/examples/big_query_routine_tvf.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_tvf.tf.erb
similarity index 100%
rename from mmv1/templates/terraform/examples/big_query_routine_tvf.tf.erb
rename to mmv1/templates/terraform/examples/bigquery_routine_tvf.tf.erb
diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go
index bd106a6f079b..00f5763b505b 100644
--- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go
+++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go
@@ -173,3 +173,171 @@ resource "google_bigquery_routine" "spark_jar" {
 }
 `, context)
 }
+
+func TestAccBigQueryRoutine_bigQueryRoutineRemoteFunction(t *testing.T) {
+	t.Parallel()
+
+	context := map[string]interface{}{
+		"random_suffix": acctest.RandString(t, 10),
+		"zip_path":      "./test-fixtures/function-source.zip",
+	}
+
+	acctest.VcrTest(t, resource.TestCase{
+		PreCheck:                 func() { acctest.AccTestPreCheck(t) },
+		ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
+		CheckDestroy:             testAccCheckBigQueryRoutineDestroyProducer(t),
+		Steps: []resource.TestStep{
+			{
+				Config: testAccBigQueryRoutine_bigQueryRoutineRemoteFunction(context),
+			},
+			{
+				ResourceName:      "google_bigquery_routine.remote_function_routine",
+				ImportState:       true,
+				ImportStateVerify: true,
+			},
+			{
+				Config: testAccBigQueryRoutine_bigQueryRoutineRemoteFunction_Update(context),
+			},
+			{
+				ResourceName:      "google_bigquery_routine.remote_function_routine",
+				ImportState:       true,
+				ImportStateVerify: true,
+			},
+		},
+	})
+}
+
+func testAccBigQueryRoutine_bigQueryRoutineRemoteFunction(context map[string]interface{}) string {
+	return acctest.Nprintf(`
+resource "google_storage_bucket" "default" {
+  name                        = "%{random_suffix}-gcf-source"
+  location                    = "US"
+  uniform_bucket_level_access = true
+}
+
+resource "google_storage_bucket_object" "object" {
+  name   = "function-source.zip"
+  bucket = google_storage_bucket.default.name
+  source = "%{zip_path}"
+}
+
+resource "google_cloudfunctions2_function" "default" {
+  name        = "function-v2-0"
+  location    = "us-central1"
+  description = "a new function"
+
+  build_config {
+    runtime     = "nodejs18"
+    entry_point = "helloHttp"
+    source {
+      storage_source {
+        bucket = google_storage_bucket.default.name
+        object = google_storage_bucket_object.object.name
+      }
+    }
+  }
+
+  service_config {
+    max_instance_count = 1
+    available_memory   = "256M"
+    timeout_seconds    = 60
+  }
+}
+
+resource "google_bigquery_connection" "test" {
+  connection_id = "tf_test_connection_id%{random_suffix}"
+  location      = "US"
+  cloud_resource { }
+}
+
+resource "google_bigquery_dataset" "test" {
+  dataset_id = "tf_test_dataset_id%{random_suffix}"
+}
+
+resource "google_bigquery_routine" "remote_function_routine" {
+  dataset_id = "${google_bigquery_dataset.test.dataset_id}"
+  routine_id = "tf_test_routine_id%{random_suffix}"
+  routine_type = "SCALAR_FUNCTION"
+  definition_body = ""
+
+  return_type = "{\"typeKind\" :  \"STRING\"}"
+
+  remote_function_options {
+    endpoint = google_cloudfunctions2_function.default.service_config[0].uri
+    connection = "${google_bigquery_connection.test.name}"
+    max_batching_rows = "10"
+    user_defined_context = {
+      "z": "1.5",
+    }
+  }
+}
+`, context)
+}
+
+func testAccBigQueryRoutine_bigQueryRoutineRemoteFunction_Update(context map[string]interface{}) string {
+	return acctest.Nprintf(`
+resource "google_storage_bucket" "default" {
+  name                        = "%{random_suffix}-gcf-source"
+  location                    = "US"
+  uniform_bucket_level_access = true
+}
+
+resource "google_storage_bucket_object" "object" {
+  name   = "function-source.zip"
+  bucket = google_storage_bucket.default.name
+  source = "%{zip_path}"
+}
+
+resource "google_cloudfunctions2_function" "default2" {
+  name        = "function-v2-1"
+  location    = "us-central1"
+  description = "a new new function"
+
+  build_config {
+    runtime     = "nodejs18"
+    entry_point = "helloHttp"
+    source {
+      storage_source {
+        bucket = google_storage_bucket.default.name
+        object = google_storage_bucket_object.object.name
+      }
+    }
+  }
+
+  service_config {
+    max_instance_count = 1
+    available_memory   = "256M"
+    timeout_seconds    = 60
+  }
+}
+
+resource "google_bigquery_connection" "test2" {
+  connection_id = "tf_test_connection2_id%{random_suffix}"
+  location      = "US"
+  cloud_resource { }
+}
+
+resource "google_bigquery_dataset" "test" {
+  dataset_id = "tf_test_dataset_id%{random_suffix}"
+}
+
+resource "google_bigquery_routine" "remote_function_routine" {
+  dataset_id = "${google_bigquery_dataset.test.dataset_id}"
+  routine_id = "tf_test_routine_id%{random_suffix}"
+  routine_type = "SCALAR_FUNCTION"
+  definition_body = ""
+
+  return_type = "{\"typeKind\" :  \"STRING\"}"
+
+  remote_function_options {
+    endpoint = google_cloudfunctions2_function.default2.service_config[0].uri
+    connection = "${google_bigquery_connection.test2.name}"
+    max_batching_rows = "5"
+    user_defined_context = {
+      "z": "1.2",
+      "w": "test",
+    }
+  }
+}
+`, context)
+}
diff --git a/mmv1/third_party/terraform/services/bigquery/test-fixtures/function-source.zip b/mmv1/third_party/terraform/services/bigquery/test-fixtures/function-source.zip
new file mode 100644
index 0000000000000000000000000000000000000000..1cb571888ef575c261c2c42e8315daddbb653b5a
GIT binary patch
literal 458
zcmWIWW@Zs#U|`^2Fbc5`5s?gR?g8?)fJGP>GV@YWEA+C8U3-1GnhgZpzFQs@;p$xQ
zlJUr;rRFcLUF)$oIeX<#Kvz}y@!q?(c5?5tPquQz7UjxFE@bKtma^5VeBD{q?V7DQ
z^YlWIx%GQKdjC`19Mq9~-ML3zm3_;!EVr4>kHXF!oZwj68u?}gv(@5RRo5TbY+ks$
z<==AI@WjY(^?y}9%RiaB$F#`S^w0LRr8gJ?P+Yt`ex_Fd(6xR*%mc&)iOJcC>8U_B
z=jVx@-ph4Vfyd$D-Q^rNx7sH!xAR<>A+pm<GwNl*$Gcl!AGfS$ZRpEOa?eo~z4Ku6
z!fFA|&Mm${cb{nAR_79b`?>O>_D}O5o3PC@B#&PV36>4;W@Hj!z#UpZ>yZE~?9g?g
eyBMKm6Of7Q{s3=QHjpe65EcMw9Yzoj7XSctwvFxp

literal 0
HcmV?d00001