Skip to content

Commit

Permalink
Stop sending external data configuration schema when updating google_…
Browse files Browse the repository at this point in the history
…bigquery_table (#11739) (#8234)

[upstream:a864502459b19991660ef35f570f62468ac99c3c]

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Sep 20, 2024
1 parent fe3a61c commit 99850a0
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 5 deletions.
3 changes: 3 additions & 0 deletions .changelog/11739.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:bug
bigquery: fixed bug of sending both `schema` and `external_data_configuration.schema` when updating `google_bigquery_table`
```
5 changes: 5 additions & 0 deletions google-beta/services/bigquery/resource_bigquery_table.go
Original file line number Diff line number Diff line change
Expand Up @@ -1897,6 +1897,11 @@ func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error
return err
}

if table.ExternalDataConfiguration != nil && table.ExternalDataConfiguration.Schema != nil {
log.Printf("[INFO] Removing ExternalDataConfiguration.Schema when updating BigQuery table %s", d.Id())
table.ExternalDataConfiguration.Schema = nil
}

log.Printf("[INFO] Updating BigQuery table: %s", d.Id())

project, err := tpgresource.GetProject(d, config)
Expand Down
23 changes: 18 additions & 5 deletions google-beta/services/bigquery/resource_bigquery_table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ func TestAccBigQueryTable_HivePartitioning(t *testing.T) {
})
}

func TestAccBigQueryTable_HivePartitioningCustomSchema(t *testing.T) {
func TestAccBigQueryTable_HivePartitioningCustomSchema_update(t *testing.T) {
t.Parallel()
bucketName := acctest.TestBucketName(t)
resourceName := "google_bigquery_table.test"
Expand All @@ -243,13 +243,22 @@ func TestAccBigQueryTable_HivePartitioningCustomSchema(t *testing.T) {
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID),
Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, "old-label"),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "deletion_protection"},
ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection"},
},
{
Config: testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, "new-label"),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"external_data_configuration.0.schema", "labels", "deletion_protection"},
},
},
})
Expand Down Expand Up @@ -2131,7 +2140,7 @@ resource "google_bigquery_table" "test" {
`, bucketName, datasetID, tableID)
}

func testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID string) string {
func testAccBigQueryTableHivePartitioningCustomSchema(bucketName, datasetID, tableID, tableLabel string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "test" {
name = "%s"
Expand All @@ -2154,6 +2163,10 @@ resource "google_bigquery_table" "test" {
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
labels = {
label = "%s"
}
external_data_configuration {
source_format = "NEWLINE_DELIMITED_JSON"
autodetect = false
Expand All @@ -2180,7 +2193,7 @@ EOH
}
depends_on = ["google_storage_bucket_object.test"]
}
`, bucketName, datasetID, tableID)
`, bucketName, datasetID, tableID, tableLabel)
}

func testAccBigQueryTableAvroPartitioning(bucketName, avroFilePath, datasetID, tableID string) string {
Expand Down

0 comments on commit 99850a0

Please sign in to comment.