Skip to content

Commit

Permalink
Allow in-place column drop for bigquery table (#10170) (#17777)
Browse files Browse the repository at this point in the history
[upstream:e94c56025ee1215d53e4e3ca83dd4568349dac7b]

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Apr 4, 2024
1 parent 678449a commit 088ff36
Show file tree
Hide file tree
Showing 4 changed files with 225 additions and 22 deletions.
3 changes: 3 additions & 0 deletions .changelog/10170.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquery: added in-place column drop support for `bigquery_table`
```
90 changes: 78 additions & 12 deletions google/services/bigquery/resource_bigquery_table.go
Original file line number Diff line number Diff line change
Expand Up @@ -258,19 +258,17 @@ func bigQueryTableNormalizePolicyTags(val interface{}) interface{} {

// Compares two existing schema implementations and decides if
// it is changeable.. pairs with a force new on not changeable
func resourceBigQueryTableSchemaIsChangeable(old, new interface{}) (bool, error) {
func resourceBigQueryTableSchemaIsChangeable(old, new interface{}, isExternalTable bool, topLevel bool) (bool, error) {
switch old.(type) {
case []interface{}:
arrayOld := old.([]interface{})
arrayNew, ok := new.([]interface{})
sameNameColumns := 0
droppedColumns := 0
if !ok {
// if not both arrays not changeable
return false, nil
}
if len(arrayOld) > len(arrayNew) {
// if not growing not changeable
return false, nil
}
if err := bigQueryTablecheckNameExists(arrayOld); err != nil {
return false, err
}
Expand All @@ -291,16 +289,28 @@ func resourceBigQueryTableSchemaIsChangeable(old, new interface{}) (bool, error)
}
}
for key := range mapOld {
// all old keys should be represented in the new config
// dropping top level columns can happen in-place
// but this doesn't apply to external tables
if _, ok := mapNew[key]; !ok {
return false, nil
if !topLevel || isExternalTable {
return false, nil
}
droppedColumns += 1
continue
}
if isChangable, err :=
resourceBigQueryTableSchemaIsChangeable(mapOld[key], mapNew[key]); err != nil || !isChangable {

isChangable, err := resourceBigQueryTableSchemaIsChangeable(mapOld[key], mapNew[key], isExternalTable, false)
if err != nil || !isChangable {
return false, err
} else if isChangable && topLevel {
// top level column that exists in the new schema
sameNameColumns += 1
}
}
return true, nil
// in-place column dropping alongside column additions is not allowed
// as of now because user intention can be ambiguous (e.g. column renaming)
newColumns := len(arrayNew) - sameNameColumns
return (droppedColumns == 0) || (newColumns == 0), nil
case map[string]interface{}:
objectOld := old.(map[string]interface{})
objectNew, ok := new.(map[string]interface{})
Expand Down Expand Up @@ -339,7 +349,7 @@ func resourceBigQueryTableSchemaIsChangeable(old, new interface{}) (bool, error)
return false, nil
}
case "fields":
return resourceBigQueryTableSchemaIsChangeable(valOld, valNew)
return resourceBigQueryTableSchemaIsChangeable(valOld, valNew, isExternalTable, false)

// other parameters: description, policyTags and
// policyTags.names[] are changeable
Expand Down Expand Up @@ -378,7 +388,8 @@ func resourceBigQueryTableSchemaCustomizeDiffFunc(d tpgresource.TerraformResourc
// same as above
log.Printf("[DEBUG] unable to unmarshal json customized diff - %v", err)
}
isChangeable, err := resourceBigQueryTableSchemaIsChangeable(old, new)
_, isExternalTable := d.GetOk("external_data_configuration")
isChangeable, err := resourceBigQueryTableSchemaIsChangeable(old, new, isExternalTable, true)
if err != nil {
return err
}
Expand Down Expand Up @@ -1712,6 +1723,12 @@ func resourceBigQueryTableRead(d *schema.ResourceData, meta interface{}) error {
return nil
}

type TableReference struct {
project string
datasetID string
tableID string
}

func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error {
config := meta.(*transport_tpg.Config)
userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent)
Expand All @@ -1734,13 +1751,62 @@ func resourceBigQueryTableUpdate(d *schema.ResourceData, meta interface{}) error
datasetID := d.Get("dataset_id").(string)
tableID := d.Get("table_id").(string)

tableReference := &TableReference{
project: project,
datasetID: datasetID,
tableID: tableID,
}

if err = resourceBigQueryTableColumnDrop(config, userAgent, table, tableReference); err != nil {
return err
}

if _, err = config.NewBigQueryClient(userAgent).Tables.Update(project, datasetID, tableID, table).Do(); err != nil {
return err
}

return resourceBigQueryTableRead(d, meta)
}

func resourceBigQueryTableColumnDrop(config *transport_tpg.Config, userAgent string, table *bigquery.Table, tableReference *TableReference) error {
oldTable, err := config.NewBigQueryClient(userAgent).Tables.Get(tableReference.project, tableReference.datasetID, tableReference.tableID).Do()
if err != nil {
return err
}

newTableFields := map[string]bool{}
for _, field := range table.Schema.Fields {
newTableFields[field.Name] = true
}

droppedColumns := []string{}
for _, field := range oldTable.Schema.Fields {
if !newTableFields[field.Name] {
droppedColumns = append(droppedColumns, field.Name)
}
}

if len(droppedColumns) > 0 {
droppedColumnsString := strings.Join(droppedColumns, ", DROP COLUMN ")

dropColumnsDDL := fmt.Sprintf("ALTER TABLE `%s.%s.%s` DROP COLUMN %s", tableReference.project, tableReference.datasetID, tableReference.tableID, droppedColumnsString)
log.Printf("[INFO] Dropping columns in-place: %s", dropColumnsDDL)

useLegacySQL := false
req := &bigquery.QueryRequest{
Query: dropColumnsDDL,
UseLegacySql: &useLegacySQL,
}

_, err = config.NewBigQueryClient(userAgent).Jobs.Query(tableReference.project, req).Do()
if err != nil {
return err
}
}

return nil
}

func resourceBigQueryTableDelete(d *schema.ResourceData, meta interface{}) error {
if d.Get("deletion_protection").(bool) {
return fmt.Errorf("cannot destroy instance without setting deletion_protection=false and running `terraform apply`")
Expand Down
65 changes: 55 additions & 10 deletions google/services/bigquery/resource_bigquery_table_internal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -391,10 +391,11 @@ func TestBigQueryTableSchemaDiffSuppress(t *testing.T) {
}

type testUnitBigQueryDataTableJSONChangeableTestCase struct {
name string
jsonOld string
jsonNew string
changeable bool
name string
jsonOld string
jsonNew string
isExternalTable bool
changeable bool
}

func (testcase *testUnitBigQueryDataTableJSONChangeableTestCase) check(t *testing.T) {
Expand All @@ -405,7 +406,7 @@ func (testcase *testUnitBigQueryDataTableJSONChangeableTestCase) check(t *testin
if err := json.Unmarshal([]byte(testcase.jsonNew), &new); err != nil {
t.Fatalf("unable to unmarshal json - %v", err)
}
changeable, err := resourceBigQueryTableSchemaIsChangeable(old, new)
changeable, err := resourceBigQueryTableSchemaIsChangeable(old, new, testcase.isExternalTable, true)
if err != nil {
t.Errorf("%s failed unexpectedly: %s", testcase.name, err)
}
Expand All @@ -421,6 +422,11 @@ func (testcase *testUnitBigQueryDataTableJSONChangeableTestCase) check(t *testin
d.Before["schema"] = testcase.jsonOld
d.After["schema"] = testcase.jsonNew

if testcase.isExternalTable {
d.Before["external_data_configuration"] = ""
d.After["external_data_configuration"] = ""
}

err = resourceBigQueryTableSchemaCustomizeDiffFunc(d)
if err != nil {
t.Errorf("error on testcase %s - %v", testcase.name, err)
Expand All @@ -430,7 +436,7 @@ func (testcase *testUnitBigQueryDataTableJSONChangeableTestCase) check(t *testin
}
}

var testUnitBigQueryDataTableIsChangableTestCases = []testUnitBigQueryDataTableJSONChangeableTestCase{
var testUnitBigQueryDataTableIsChangeableTestCases = []testUnitBigQueryDataTableJSONChangeableTestCase{
{
name: "defaultEquality",
jsonOld: "[{\"name\": \"someValue\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
Expand All @@ -447,7 +453,14 @@ var testUnitBigQueryDataTableIsChangableTestCases = []testUnitBigQueryDataTableJ
name: "arraySizeDecreases",
jsonOld: "[{\"name\": \"someValue\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }, {\"name\": \"asomeValue\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
jsonNew: "[{\"name\": \"someValue\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
changeable: false,
changeable: true,
},
{
name: "externalArraySizeDecreases",
jsonOld: "[{\"name\": \"someValue\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }, {\"name\": \"asomeValue\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
jsonNew: "[{\"name\": \"someValue\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
isExternalTable: true,
changeable: false,
},
{
name: "descriptionChanges",
Expand Down Expand Up @@ -525,6 +538,24 @@ var testUnitBigQueryDataTableIsChangableTestCases = []testUnitBigQueryDataTableJ
jsonNew: "[{\"name\": \"value3\", \"type\" : \"BOOLEAN\", \"mode\" : \"NULLABLE\", \"description\" : \"newVal\" }, {\"name\": \"value1\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
changeable: false,
},
{
name: "renameRequiredColumn",
jsonOld: "[{\"name\": \"value1\", \"type\" : \"INTEGER\", \"mode\" : \"REQUIRED\", \"description\" : \"someVal\" }]",
jsonNew: "[{\"name\": \"value3\", \"type\" : \"INTEGER\", \"mode\" : \"REQUIRED\", \"description\" : \"someVal\" }]",
changeable: false,
},
{
name: "renameNullableColumn",
jsonOld: "[{\"name\": \"value1\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
jsonNew: "[{\"name\": \"value3\", \"type\" : \"INTEGER\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
changeable: false,
},
{
name: "typeModeReqToNullAndColumnDropped",
jsonOld: "[{\"name\": \"someValue\", \"type\" : \"BOOLEAN\", \"mode\" : \"REQUIRED\", \"description\" : \"someVal\" }, {\"name\": \"someValue2\", \"type\" : \"BOOLEAN\", \"mode\" : \"NULLABLE\", \"description\" : \"someVal\" }]",
jsonNew: "[{\"name\": \"someValue\", \"type\" : \"BOOLEAN\", \"mode\" : \"NULLABLE\", \"description\" : \"some new value\" }]",
changeable: true,
},
{
name: "policyTags",
jsonOld: `[
Expand All @@ -550,15 +581,29 @@ var testUnitBigQueryDataTableIsChangableTestCases = []testUnitBigQueryDataTableJ
},
}

func TestUnitBigQueryDataTable_schemaIsChangable(t *testing.T) {
func TestUnitBigQueryDataTable_schemaIsChangeable(t *testing.T) {
t.Parallel()
for _, testcase := range testUnitBigQueryDataTableIsChangableTestCases {
for _, testcase := range testUnitBigQueryDataTableIsChangeableTestCases {
testcase.check(t)
}
}

func TestUnitBigQueryDataTable_schemaIsChangeableNested(t *testing.T) {
t.Parallel()
// Only top level column drops are changeable
customNestedValues := map[string]bool{"arraySizeDecreases": false, "typeModeReqToNullAndColumnDropped": false}
for _, testcase := range testUnitBigQueryDataTableIsChangeableTestCases {
changeable := testcase.changeable
if overrideValue, ok := customNestedValues[testcase.name]; ok {
changeable = overrideValue
}

testcaseNested := &testUnitBigQueryDataTableJSONChangeableTestCase{
testcase.name + "Nested",
fmt.Sprintf("[{\"name\": \"someValue\", \"type\" : \"INTEGER\", \"fields\" : %s }]", testcase.jsonOld),
fmt.Sprintf("[{\"name\": \"someValue\", \"type\" : \"INT64\", \"fields\" : %s }]", testcase.jsonNew),
testcase.changeable,
testcase.isExternalTable,
changeable,
}
testcaseNested.check(t)
}
Expand Down
89 changes: 89 additions & 0 deletions google/services/bigquery/resource_bigquery_table_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,39 @@ func TestAccBigQueryTable_Basic(t *testing.T) {
})
}

func TestAccBigQueryTable_DropColumns(t *testing.T) {
t.Parallel()

datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))
tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10))

acctest.VcrTest(t, resource.TestCase{
PreCheck: func() { acctest.AccTestPreCheck(t) },
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigQueryTableTimePartitioningDropColumns(datasetID, tableID),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
{
Config: testAccBigQueryTableTimePartitioningDropColumnsUpdate(datasetID, tableID),
},
{
ResourceName: "google_bigquery_table.test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"deletion_protection"},
},
},
})
}

func TestAccBigQueryTable_Kms(t *testing.T) {
t.Parallel()
resourceName := "google_bigquery_table.test"
Expand Down Expand Up @@ -1761,6 +1794,62 @@ EOH
`, datasetID, tableID, partitioningType)
}

func testAccBigQueryTableTimePartitioningDropColumns(datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "ts",
"type": "TIMESTAMP"
},
{
"name": "some_string",
"type": "STRING"
},
{
"name": "some_int",
"type": "INTEGER"
}
]
EOH
}
`, datasetID, tableID)
}

func testAccBigQueryTableTimePartitioningDropColumnsUpdate(datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
dataset_id = "%s"
}
resource "google_bigquery_table" "test" {
deletion_protection = false
table_id = "%s"
dataset_id = google_bigquery_dataset.test.dataset_id
schema = <<EOH
[
{
"name": "ts",
"type": "TIMESTAMP"
}
]
EOH
}
`, datasetID, tableID)
}

func testAccBigQueryTableKms(cryptoKeyName, datasetID, tableID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down

0 comments on commit 088ff36

Please sign in to comment.