Skip to content

Commit

Permalink
Enhance BigQuery table schema input validation (hashicorp#8460)
Browse files Browse the repository at this point in the history
* Enhance BigQuery table schema input validation

* skip TestAccBigQueryTable_invalidSchemas in VCR test

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician committed Jul 28, 2023
1 parent 651aae3 commit 55d3909
Show file tree
Hide file tree
Showing 3 changed files with 165 additions and 2 deletions.
3 changes: 3 additions & 0 deletions .changelog/8460.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:breaking-change
added more input validations for BigQuery table schema
```
134 changes: 134 additions & 0 deletions google/resource_bigquery_table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -588,6 +588,36 @@ func TestAccBigQueryExternalDataTable_CSV(t *testing.T) {
})
}

func TestAccBigQueryExternalDataTable_CSV_WithSchema_InvalidSchemas(t *testing.T) {
t.Parallel()

bucketName := testBucketName(t)
objectName := fmt.Sprintf("tf_test_%s.csv", RandString(t, 10))

datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", RandString(t, 10))

VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableFromGCSWithExternalDataConfigSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_INVALID_SCHEMA_NOT_JSON),
ExpectError: regexp.MustCompile("contains an invalid JSON"),
},
{
Config: testAccBigQueryTableFromGCSWithExternalDataConfigSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_INVALID_SCHEMA_NOT_JSON_LIST),
ExpectError: regexp.MustCompile("\"schema\" is not a JSON array"),
},
{
Config: testAccBigQueryTableFromGCSWithExternalDataConfigSchema(datasetID, tableID, bucketName, objectName, TEST_SIMPLE_CSV, TEST_INVALID_SCHEMA_JSON_LIST_WITH_NULL_ELEMENT),
ExpectError: regexp.MustCompile("\"schema\" contains a nil element"),
},
},
})
}

func TestAccBigQueryExternalDataTable_CSV_WithSchemaAndConnectionID_UpdateNoConnectionID(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -919,6 +949,35 @@ func TestAccBigQueryTable_emptySchema(t *testing.T) {
})
}

func TestAccBigQueryTable_invalidSchemas(t *testing.T) {
t.Parallel()
// Not an acceptance test.
acctest.SkipIfVcr(t)

datasetID := fmt.Sprintf("tf_test_%s", RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", RandString(t, 10))

VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableWithSchema(datasetID, tableID, TEST_INVALID_SCHEMA_NOT_JSON),
ExpectError: regexp.MustCompile("contains an invalid JSON"),
},
{
Config: testAccBigQueryTableWithSchema(datasetID, tableID, TEST_INVALID_SCHEMA_NOT_JSON_LIST),
ExpectError: regexp.MustCompile("\"schema\" is not a JSON array"),
},
{
Config: testAccBigQueryTableWithSchema(datasetID, tableID, TEST_INVALID_SCHEMA_JSON_LIST_WITH_NULL_ELEMENT),
ExpectError: regexp.MustCompile("\"schema\" contains a nil element"),
},
},
})
}

func testAccCheckBigQueryExtData(t *testing.T, expectedQuoteChar string) resource.TestCheckFunc {
return func(s *terraform.State) error {
for _, rs := range s.RootModule().Resources {
Expand Down Expand Up @@ -1986,6 +2045,45 @@ resource "google_bigquery_table" "test" {
`, datasetID, bucketName, objectName, content, tableID, schema)
}

func testAccBigQueryTableFromGCSWithExternalDataConfigSchema(datasetID, tableID, bucketName, objectName, content, schema string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_storage_bucket" "test" {
name = "%s"
location = "US"
force_destroy = true
}
resource "google_storage_bucket_object" "test" {
name = "%s"
content = <<EOF
%s
EOF
bucket = google_storage_bucket.test.name
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
external_data_configuration {
autodetect = false
source_format = "CSV"
csv_options {
encoding = "UTF-8"
quote = ""
}
source_uris = [
"gs://${google_storage_bucket.test.name}/${google_storage_bucket_object.test.name}",
]
schema = <<EOF
%s
EOF
}
}
`, datasetID, bucketName, objectName, content, tableID, schema)
}

func testAccBigQueryTableFromGCSWithSchema_UpdatAllowQuotedNewlines(datasetID, tableID, bucketName, objectName, content, schema string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down Expand Up @@ -2438,6 +2536,23 @@ resource "google_bigquery_table" "test" {
`, datasetID, tableID)
}

func testAccBigQueryTableWithSchema(datasetID, tableID, schema string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOF
%s
EOF
}
`, datasetID, tableID, schema)
}

var TEST_CSV = `lifelock,LifeLock,,web,Tempe,AZ,1-May-07,6850000,USD,b
lifelock,LifeLock,,web,Tempe,AZ,1-Oct-06,6000000,USD,a
lifelock,LifeLock,,web,Tempe,AZ,1-Jan-08,25000000,USD,c
Expand Down Expand Up @@ -2466,3 +2581,22 @@ var TEST_SIMPLE_CSV_SCHEMA = `[
"type": "INT64"
}
]`
var TEST_INVALID_SCHEMA_NOT_JSON = `
not a valid table schema
`
var TEST_INVALID_SCHEMA_NOT_JSON_LIST = `
{
"name": "country",
"type": "STRING"
}`
var TEST_INVALID_SCHEMA_JSON_LIST_WITH_NULL_ELEMENT = `[
{
"name": "country",
"type": "STRING"
},
null,
{
"name": "price",
"type": "INT64"
}
]`
30 changes: 28 additions & 2 deletions google/services/bigquery/resource_bigquery_table.go
Original file line number Diff line number Diff line change
Expand Up @@ -365,6 +365,32 @@ func resourceBigQueryTableSchemaCustomizeDiff(_ context.Context, d *schema.Resou
return resourceBigQueryTableSchemaCustomizeDiffFunc(d)
}

func validateBigQueryTableSchema(v interface{}, k string) (warnings []string, errs []error) {
if v == nil {
return
}

if _, e := validation.StringIsJSON(v, k); e != nil {
errs = append(errs, e...)
return
}

var jsonList []interface{}
if err := json.Unmarshal([]byte(v.(string)), &jsonList); err != nil {
errs = append(errs, fmt.Errorf("\"schema\" is not a JSON array: %s", err))
return
}

for _, v := range jsonList {
if v == nil {
errs = append(errs, errors.New("\"schema\" contains a nil element"))
return
}
}

return
}

func ResourceBigQueryTable() *schema.Resource {
return &schema.Resource{
Create: resourceBigQueryTableCreate,
Expand Down Expand Up @@ -473,7 +499,7 @@ func ResourceBigQueryTable() *schema.Resource {
Optional: true,
Computed: true,
ForceNew: true,
ValidateFunc: validation.StringIsJSON,
ValidateFunc: validateBigQueryTableSchema,
StateFunc: func(v interface{}) string {
json, _ := structure.NormalizeJsonString(v)
return json
Expand Down Expand Up @@ -741,7 +767,7 @@ func ResourceBigQueryTable() *schema.Resource {
Type: schema.TypeString,
Optional: true,
Computed: true,
ValidateFunc: validation.StringIsJSON,
ValidateFunc: validateBigQueryTableSchema,
StateFunc: func(v interface{}) string {
json, _ := structure.NormalizeJsonString(v)
return json
Expand Down

0 comments on commit 55d3909

Please sign in to comment.