From 5fe624fbd8ee962c88e0048b596c4a28ec742bf8 Mon Sep 17 00:00:00 2001 From: Sarah French <15078782+SarahFrench@users.noreply.github.com> Date: Mon, 7 Oct 2024 14:18:40 +0100 Subject: [PATCH 01/19] Add TeamCity testing project for ephemeral resources feature branch, clean up imports (#11847) --- .../components/builds/build_steps.kt | 1 - .../FEATURE-BRANCH-ephemeral-resource.kt | 102 ++++++++++++++++++ .../components/projects/reused/mm_upstream.kt | 7 +- .../projects/reused/vcr_recording.kt | 3 +- .../components/projects/root_project.kt | 5 + .../terraform/.teamcity/settings.kts | 2 +- .../FEATURE-BRANCH-ephemeral-resource.kt | 79 ++++++++++++++ .../terraform/.teamcity/tests/sweepers.kt | 4 +- 8 files changed, 192 insertions(+), 11 deletions(-) create mode 100644 mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-ephemeral-resource.kt create mode 100644 mmv1/third_party/terraform/.teamcity/tests/FEATURE-BRANCH-ephemeral-resource.kt diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt index 68d9aa0a0206..2f1ce2a79ad5 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt @@ -7,7 +7,6 @@ package builds -import DefaultTerraformCoreVersion import jetbrains.buildServer.configs.kotlin.BuildSteps import jetbrains.buildServer.configs.kotlin.buildSteps.ScriptBuildStep diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-ephemeral-resource.kt b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-ephemeral-resource.kt new file mode 100644 index 000000000000..de3e5f426229 --- /dev/null +++ b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-ephemeral-resource.kt @@ -0,0 +1,102 @@ +/* + * Copyright (c) HashiCorp, Inc. + * SPDX-License-Identifier: MPL-2.0 + */ + +// This file is maintained in the GoogleCloudPlatform/magic-modules repository and copied into the downstream provider repositories. Any changes to this file in the downstream will be overwritten. + +package projects.feature_branches + +import ProviderNameBeta +import ProviderNameGa +import SharedResourceNameBeta +import SharedResourceNameGa +import SharedResourceNameVcr +import builds.* +import generated.ServicesListBeta +import generated.ServicesListGa +import jetbrains.buildServer.configs.kotlin.Project +import replaceCharsId +import vcs_roots.HashiCorpVCSRootBeta +import vcs_roots.HashiCorpVCSRootGa +import vcs_roots.ModularMagicianVCSRootBeta +import vcs_roots.ModularMagicianVCSRootGa + +const val featureBranchEphemeralResources = "FEATURE-BRANCH-ephemeral-resource" +const val EphemeralResourcesTfCoreVersion = "1.10.0-alpha20240926" // TODO - update with correct release + +// featureBranchEphemeralResourcesSubProject creates a project just for testing ephemeral resources. +// We know that all ephemeral resources we're adding are part of the Resource Manager service, so we only include those builds. +// We create builds for testing the resourcemanager service: +// - Against the GA hashicorp repo +// - Against the GA modular-magician repo +// - Against the Beta hashicorp repo +// - Against the Beta modular-magician repo +// These resemble existing projects present in TeamCity, but these all use a more recent version of Terraform including +// the new ephemeral values feature. +fun featureBranchEphemeralResourcesSubProject(allConfig: AllContextParameters): Project { + + val projectId = replaceCharsId(featureBranchEphemeralResources) + + val packageName = "resourcemanager" // All ephemeral resources will be in the resourcemanager package + val vcrConfig = getVcrAcceptanceTestConfig(allConfig) // Reused below for both MM testing build configs + val trigger = NightlyTriggerConfiguration( + branch = "refs/heads/$featureBranchEphemeralResources" // triggered builds must test the feature branch + ) + + + // GA + val gaConfig = getGaAcceptanceTestConfig(allConfig) + // How to make only build configuration to the relevant package(s) + val resourceManagerPackageGa = ServicesListGa.getValue(packageName) + + // Enable testing using hashicorp/terraform-provider-google + var parentId = "${projectId}_HC_GA" + val buildConfigHashiCorpGa = BuildConfigurationForSinglePackage(packageName, resourceManagerPackageGa.getValue("path"), "Ephemeral resources in $packageName (GA provider, HashiCorp downstream)", ProviderNameGa, parentId, HashiCorpVCSRootGa, listOf(SharedResourceNameGa), gaConfig) + buildConfigHashiCorpGa.addTrigger(trigger) + + // Enable testing using modular-magician/terraform-provider-google + parentId = "${projectId}_MM_GA" + val buildConfigModularMagicianGa = BuildConfigurationForSinglePackage(packageName, resourceManagerPackageGa.getValue("path"), "Ephemeral resources in $packageName (GA provider, MM upstream)", ProviderNameGa, parentId, ModularMagicianVCSRootGa, listOf(SharedResourceNameVcr), vcrConfig) + // No trigger added here (MM upstream is manual only) + + // Beta + val betaConfig = getBetaAcceptanceTestConfig(allConfig) + val resourceManagerPackageBeta = ServicesListBeta.getValue(packageName) + + // Enable testing using hashicorp/terraform-provider-google-beta + parentId = "${projectId}_HC_BETA" + val buildConfigHashiCorpBeta = BuildConfigurationForSinglePackage(packageName, resourceManagerPackageBeta.getValue("path"), "Ephemeral resources in $packageName (Beta provider, HashiCorp downstream)", ProviderNameBeta, parentId, HashiCorpVCSRootBeta, listOf(SharedResourceNameBeta), betaConfig) + buildConfigHashiCorpBeta.addTrigger(trigger) + + // Enable testing using modular-magician/terraform-provider-google-beta + parentId = "${projectId}_MM_BETA" + val buildConfigModularMagicianBeta = BuildConfigurationForSinglePackage(packageName, resourceManagerPackageBeta.getValue("path"), "Ephemeral resources in $packageName (Beta provider, MM upstream)", ProviderNameBeta, parentId, ModularMagicianVCSRootBeta, listOf(SharedResourceNameVcr), vcrConfig) + // No trigger added here (MM upstream is manual only) + + + // ------ + + // Make all builds use a 1.10.0-ish version of TF core + val allBuildConfigs = listOf(buildConfigHashiCorpGa, buildConfigModularMagicianGa, buildConfigHashiCorpBeta, buildConfigModularMagicianBeta) + allBuildConfigs.forEach{ b -> + b.overrideTerraformCoreVersion(EphemeralResourcesTfCoreVersion) + } + + // ------ + + return Project{ + id(projectId) + name = featureBranchEphemeralResources + description = "Subproject for testing feature branch $featureBranchEphemeralResources" + + // Register all build configs in the project + allBuildConfigs.forEach{ b -> + buildType(b) + } + + params { + readOnlySettings() + } + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/reused/mm_upstream.kt b/mmv1/third_party/terraform/.teamcity/components/projects/reused/mm_upstream.kt index 288df583bf25..7d1d79b15b63 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/reused/mm_upstream.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/reused/mm_upstream.kt @@ -14,12 +14,7 @@ import ServiceSweeperCronName import ServiceSweeperManualName import SharedResourceNameVcr import builds.* -import generated.PackagesListBeta -import generated.PackagesListGa -import generated.ServicesListBeta -import generated.ServicesListGa -import generated.SweepersListBeta -import generated.SweepersListGa +import generated.* import jetbrains.buildServer.configs.kotlin.BuildType import jetbrains.buildServer.configs.kotlin.Project import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/reused/vcr_recording.kt b/mmv1/third_party/terraform/.teamcity/components/projects/reused/vcr_recording.kt index 8df65299015f..89d6ffa04431 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/reused/vcr_recording.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/reused/vcr_recording.kt @@ -9,7 +9,8 @@ package projects.reused import SharedResourceNameVcr import VcrRecordingProjectId -import builds.* +import builds.AccTestConfiguration +import builds.VcrDetails import jetbrains.buildServer.configs.kotlin.Project import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot import replaceCharsId diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt index 7130a9c35ea8..d0a4308a2702 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt @@ -18,6 +18,7 @@ import generated.ServicesListBeta import generated.ServicesListGa import jetbrains.buildServer.configs.kotlin.Project import jetbrains.buildServer.configs.kotlin.sharedResource +import projects.feature_branches.featureBranchEphemeralResourcesSubProject // googleCloudRootProject returns a root project that contains a subprojects for the GA and Beta version of the // Google provider. There are also resources to help manage the test projects used for acceptance tests. @@ -62,6 +63,10 @@ fun googleCloudRootProject(allConfig: AllContextParameters): Project { subProject(googleSubProjectBeta(allConfig)) subProject(projectSweeperSubProject(allConfig)) + // Feature branch-testing projects - these will be added and removed as needed + subProject(featureBranchEphemeralResourcesSubProject(allConfig)) + + params { readOnlySettings() } diff --git a/mmv1/third_party/terraform/.teamcity/settings.kts b/mmv1/third_party/terraform/.teamcity/settings.kts index 518323d7bdef..ac329c4bd6b3 100644 --- a/mmv1/third_party/terraform/.teamcity/settings.kts +++ b/mmv1/third_party/terraform/.teamcity/settings.kts @@ -5,9 +5,9 @@ // This file is maintained in the GoogleCloudPlatform/magic-modules repository and copied into the downstream provider repositories. Any changes to this file in the downstream will be overwritten. -import projects.googleCloudRootProject import builds.AllContextParameters import jetbrains.buildServer.configs.kotlin.* +import projects.googleCloudRootProject version = "2024.03" diff --git a/mmv1/third_party/terraform/.teamcity/tests/FEATURE-BRANCH-ephemeral-resource.kt b/mmv1/third_party/terraform/.teamcity/tests/FEATURE-BRANCH-ephemeral-resource.kt new file mode 100644 index 000000000000..9b52c2c9d2fe --- /dev/null +++ b/mmv1/third_party/terraform/.teamcity/tests/FEATURE-BRANCH-ephemeral-resource.kt @@ -0,0 +1,79 @@ +/* + * Copyright (c) HashiCorp, Inc. + * SPDX-License-Identifier: MPL-2.0 + */ + +// This file is maintained in the GoogleCloudPlatform/magic-modules repository and copied into the downstream provider repositories. Any changes to this file in the downstream will be overwritten. + +package tests + +import jetbrains.buildServer.configs.kotlin.triggers.ScheduleTrigger +import org.junit.Assert +import org.junit.Test +import projects.feature_branches.featureBranchEphemeralResources +import projects.googleCloudRootProject + +class FeatureBranchEphemeralResourcesSubProject { + @Test + fun buildsUsingHashiCorpReposAreOnSchedule() { + val root = googleCloudRootProject(testContextParameters()) + + // Find feature branch project + val project = getSubProject(root, featureBranchEphemeralResources) + + // All builds using the HashiCorp owned GitHub repos + val hashiBuilds = project.buildTypes.filter { bt -> + bt.name.contains("HashiCorp downstream") + } + + hashiBuilds.forEach{bt -> + Assert.assertTrue( + "Build configuration `${bt.name}` should contain at least one trigger", + bt.triggers.items.isNotEmpty() + ) + // Look for at least one CRON trigger + var found = false + lateinit var schedulingTrigger: ScheduleTrigger + for (item in bt.triggers.items){ + if (item.type == "schedulingTrigger") { + schedulingTrigger = item as ScheduleTrigger + found = true + break + } + } + + Assert.assertTrue( + "Build configuration `${bt.name}` should contain a CRON/'schedulingTrigger' trigger", + found + ) + + // Check that triggered builds are being run on the feature branch + val isCorrectBranch: Boolean = schedulingTrigger.branchFilter == "+:refs/heads/$featureBranchEphemeralResources" + + Assert.assertTrue( + "Build configuration `${bt.name}` is using the $featureBranchEphemeralResources branch filter", + isCorrectBranch + ) + } + } + + @Test + fun buildsUsingModularMagicianReposAreNotTriggered() { + val root = googleCloudRootProject(testContextParameters()) + + // Find feature branch project + val project = getSubProject(root, featureBranchEphemeralResources) + + // All builds using the HashiCorp owned GitHub repos + val magicianBuilds = project.buildTypes.filter { bt -> + bt.name.contains("MM upstream") + } + + magicianBuilds.forEach{bt -> + Assert.assertTrue( + "Build configuration `${bt.name}` should not have any triggers", + bt.triggers.items.isEmpty() + ) + } + } +} diff --git a/mmv1/third_party/terraform/.teamcity/tests/sweepers.kt b/mmv1/third_party/terraform/.teamcity/tests/sweepers.kt index f6babaa4807a..6fd5c9e0efc0 100644 --- a/mmv1/third_party/terraform/.teamcity/tests/sweepers.kt +++ b/mmv1/third_party/terraform/.teamcity/tests/sweepers.kt @@ -8,9 +8,9 @@ package tests import ProjectSweeperName -import ServiceSweeperName import ServiceSweeperCronName import ServiceSweeperManualName +import ServiceSweeperName import jetbrains.buildServer.configs.kotlin.BuildType import jetbrains.buildServer.configs.kotlin.Project import jetbrains.buildServer.configs.kotlin.triggers.ScheduleTrigger @@ -136,7 +136,7 @@ class SweeperTests { // Find Project sweeper project's build val projectSweeperProject = getSubProject(root, projectSweeperProjectName) - val projectSweeper: BuildType = getBuildFromProject(projectSweeperProject!!, ProjectSweeperName) + val projectSweeper: BuildType = getBuildFromProject(projectSweeperProject, ProjectSweeperName) // Check only one schedule trigger is on the builds in question assertTrue(sweeperGa.triggers.items.size == 1) From 64d9dd8b5388a936338247d6ffa1148f6ba18de5 Mon Sep 17 00:00:00 2001 From: karolgorc Date: Mon, 7 Oct 2024 18:22:13 +0200 Subject: [PATCH 02/19] Clarify doc for `google_compute_instance` and related resources regarding `access_config` block (#11921) Co-authored-by: Sam Levenick --- .../terraform/website/docs/r/compute_instance.html.markdown | 2 +- .../website/docs/r/compute_instance_template.html.markdown | 2 +- .../docs/r/compute_region_instance_template.html.markdown | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown index ab0e0f752037..9f51d2dfa18f 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown @@ -386,7 +386,7 @@ is desired, you will need to modify your state file manually using is not accessible from the Internet. If omitted, ssh provisioners will not work unless Terraform can send traffic to the instance's network (e.g. via tunnel or because it is running on another cloud instance on that network). - This block can be repeated multiple times. Structure [documented below](#nested_access_config). + This block can be specified once per `network_interface`. Structure [documented below](#nested_access_config). * `alias_ip_range` - (Optional) An array of alias IP ranges for this network interface. Can only be specified for network diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown index e02439fe1233..afe75854b4e5 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown @@ -553,7 +553,7 @@ The following arguments are supported: is not accessible from the Internet (this means that ssh provisioners will not work unless you are running Terraform can send traffic to the instance's network (e.g. via tunnel or because it is running on another cloud instance - on that network). This block can be repeated multiple times. Structure [documented below](#nested_access_config). + on that network). This block can be specified once per `network_interface`. Structure [documented below](#nested_access_config). * `alias_ip_range` - (Optional) An array of alias IP ranges for this network interface. Can only be specified for network diff --git a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown index 84cbcae282cd..facff74df4b6 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown @@ -519,7 +519,7 @@ The following arguments are supported: is not accessible from the Internet (this means that ssh provisioners will not work unless you are running Terraform can send traffic to the instance's network (e.g. via tunnel or because it is running on another cloud instance - on that network). This block can be repeated multiple times. Structure [documented below](#nested_access_config). + on that network). This block can be specified once per `network_interface`. Structure [documented below](#nested_access_config). * `alias_ip_range` - (Optional) An array of alias IP ranges for this network interface. Can only be specified for network From 1c7cde219fcd5a3568735db9e132b6cdc563ff05 Mon Sep 17 00:00:00 2001 From: Feng Zhe Date: Mon, 7 Oct 2024 10:58:53 -0700 Subject: [PATCH 03/19] tests: add a acc test for CAS instance creation. (#11931) --- .../resource_sql_database_instance_test.go | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go index ee76c51dd43d..40cd23f6eec8 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go @@ -2488,6 +2488,50 @@ func TestAccSqlDatabaseInstance_useInternalCaByDefault(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_useCasBasedServerCa(t *testing.T) { + t.Parallel() + + databaseName := "tf-test-" + acctest.RandString(t, 10) + resourceName := "google_sql_database_instance.instance" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + + Steps: []resource.TestStep{ + { + Config: testGoogleSqlDatabaseInstance_setCasServerCa(databaseName, "GOOGLE_MANAGED_CAS_CA"), + Check: resource.ComposeTestCheckFunc(resource.TestCheckResourceAttr(resourceName, "settings.0.ip_configuration.0.server_ca_mode", "GOOGLE_MANAGED_CAS_CA")), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testGoogleSqlDatabaseInstance_setCasServerCa(databaseName, serverCaMode string) string { + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "POSTGRES_15" + deletion_protection = false + settings { + tier = "db-f1-micro" + ip_configuration { + ipv4_enabled = "true" + server_ca_mode = "%s" + } + } +} +`, databaseName, serverCaMode) +} + func testGoogleSqlDatabaseInstance_setSslOptionsForPostgreSQL(databaseName string, databaseVersion string, sslMode string) string { return fmt.Sprintf(` resource "google_sql_database_instance" "instance" { From 55d45c535ff09145223f3f5f62c52fb1053de30b Mon Sep 17 00:00:00 2001 From: sawaiba-s <114277441+sawaiba-s@users.noreply.github.com> Date: Mon, 7 Oct 2024 15:29:37 -0400 Subject: [PATCH 04/19] adding new google_secure_source_manager_branch_rule resource (#11709) Co-authored-by: Zhenhua Li --- .../securesourcemanager/BranchRule.yaml | 142 ++++++++++++++++++ ...e_source_manager_branch_rule_basic.tf.tmpl | 26 ++++ ...ce_manager_branch_rule_with_fields.tf.tmpl | 32 ++++ ..._source_manager_branch_rule_update_test.go | 112 ++++++++++++++ 4 files changed, 312 insertions(+) create mode 100644 mmv1/products/securesourcemanager/BranchRule.yaml create mode 100644 mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go diff --git a/mmv1/products/securesourcemanager/BranchRule.yaml b/mmv1/products/securesourcemanager/BranchRule.yaml new file mode 100644 index 000000000000..5d5bd25580f9 --- /dev/null +++ b/mmv1/products/securesourcemanager/BranchRule.yaml @@ -0,0 +1,142 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'BranchRule' +description: 'BranchRule is the protection rule to enforce pre-defined rules on designated branches within a repository.' +references: + guides: + 'Official Documentation': 'https://cloud.google.com/secure-source-manager/docs/overview' +docs: +id_format: 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/branchRules/{{branch_rule_id}}' +base_url: 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/branchRules?branch_rule_id={{branch_rule_id}}' +self_link: 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/branchRules/{{branch_rule_id}}' +import_format: + - 'projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/branchRules/{{branch_rule_id}}' + - '{{branch_rule_id}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +autogen_async: true +update_verb: 'PATCH' +update_mask: true +async: + actions: ['create', 'delete'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + wait_ms: 1000 + result: + path: 'response' + resource_inside_response: false + error: + path: 'error' + message: 'message' +custom_code: +examples: + - name: 'secure_source_manager_branch_rule_basic' + primary_resource_id: 'basic' + vars: + branch_rule_id: 'my-basic-branchrule' + repository_id: 'my-basic-repository' + instance_id: 'my-basic-instance' + prevent_destroy: 'true' + test_vars_overrides: + 'prevent_destroy': 'false' + oics_vars_overrides: + 'prevent_destroy': 'false' + - name: 'secure_source_manager_branch_rule_with_fields' + primary_resource_id: 'default' + vars: + branch_rule_id: 'my-initial-branchrule' + repository_id: 'my-initial-repository' + instance_id: 'my-initial-instance' + prevent_destroy: 'true' + test_vars_overrides: + 'prevent_destroy': 'false' + oics_vars_overrides: + 'prevent_destroy': 'false' +parameters: + - name: 'branch_rule_id' + type: String + description: | + The ID for the BranchRule. + url_param_only: true + required: true + - name: 'location' + type: String + description: | + The location for the Repository. + url_param_only: true + required: true + - name: 'repository_id' + type: String + description: | + The ID for the Repository. + url_param_only: true + required: true +properties: + - name: 'name' + type: String + description: | + The resource name for the BranchRule. + output: true + - name: 'uid' + type: String + description: | + Unique identifier of the BranchRule. + output: true + - name: 'createTime' + type: Time + description: | + Time the BranchRule was created in UTC. + output: true + - name: 'updateTime' + type: Time + description: | + Time the BranchRule was updated in UTC. + output: true + - name: 'includePattern' + type: String + description: | + The BranchRule matches branches based on the specified regular expression. Use .* to match all branches. + required: true + - name: 'disabled' + type: Boolean + description: | + Determines if the branch rule is disabled or not. + - name: 'requirePullRequest' + type: Boolean + description: | + Determines if the branch rule requires a pull request or not. + - name: 'minimumReviewsCount' + type: Integer + description: | + The minimum number of reviews required for the branch rule to be matched. + - name: 'minimumApprovalsCount' + type: Integer + description: | + The minimum number of approvals required for the branch rule to be matched. + - name: 'requireCommentsResolved' + type: Boolean + description: | + Determines if require comments resolved before merging to the branch. + - name: 'allowStaleReviews' + type: Boolean + description: | + Determines if allow stale reviews or approvals before merging to the branch. + - name: 'requireLinearHistory' + type: Boolean + description: | + Determines if require linear history before merging to the branch. diff --git a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl new file mode 100644 index 000000000000..395020b4a1be --- /dev/null +++ b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_basic.tf.tmpl @@ -0,0 +1,26 @@ +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "{{index $.Vars "instance_id"}}" + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "{{index $.Vars "prevent_destroy"}}" + } +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "{{index $.Vars "repository_id"}}" + location = google_secure_source_manager_instance.instance.location + instance = google_secure_source_manager_instance.instance.name + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "{{index $.Vars "prevent_destroy"}}" + } +} + +resource "google_secure_source_manager_branch_rule" "basic" { + branch_rule_id = "{{index $.Vars "branch_rule_id"}}" + repository_id = google_secure_source_manager_repository.repository.repository_id + location = google_secure_source_manager_repository.repository.location + # This field is required for BranchRule creation + include_pattern = "main" +} diff --git a/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl new file mode 100644 index 000000000000..be6b581b660a --- /dev/null +++ b/mmv1/templates/terraform/examples/secure_source_manager_branch_rule_with_fields.tf.tmpl @@ -0,0 +1,32 @@ +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "{{index $.Vars "instance_id"}}" + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "{{index $.Vars "prevent_destroy"}}" + } +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "{{index $.Vars "repository_id"}}" + instance = google_secure_source_manager_instance.instance.name + location = google_secure_source_manager_instance.instance.location + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "{{index $.Vars "prevent_destroy"}}" + } +} + +resource "google_secure_source_manager_branch_rule" "default" { + branch_rule_id = "{{index $.Vars "branch_rule_id"}}" + location = google_secure_source_manager_repository.repository.location + repository_id = google_secure_source_manager_repository.repository.repository_id + include_pattern = "test" + minimum_approvals_count = 2 + minimum_reviews_count = 2 + require_comments_resolved = true + require_linear_history = true + require_pull_request = true + disabled = false + allow_stale_reviews = false +} diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go new file mode 100644 index 000000000000..3e219e5659b7 --- /dev/null +++ b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go @@ -0,0 +1,112 @@ +package securesourcemanager_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithFieldsExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "prevent_destroy": false, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithFieldsExample_full(context), + }, + { + ResourceName: "google_secure_source_manager_branch_rule.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"branch_rule_id", "location", "repository_id"}, + }, + { + Config: testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithFieldsExample_update(context), + }, + { + ResourceName: "google_secure_source_manager_branch_rule.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"branch_rule_id", "location", "repository_id"}, + }, + }, + }) +} + +func testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithFieldsExample_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "tf-test-my-initial-instance%{random_suffix}" + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "tf-test-my-initial-repository%{random_suffix}" + instance = google_secure_source_manager_instance.instance.name + location = google_secure_source_manager_instance.instance.location + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } +} + +resource "google_secure_source_manager_branch_rule" "default" { + branch_rule_id = "tf-test-my-initial-branchrule%{random_suffix}" + location = google_secure_source_manager_repository.repository.location + repository_id = google_secure_source_manager_repository.repository.repository_id + include_pattern = "test" + minimum_approvals_count = 2 + minimum_reviews_count = 2 + require_comments_resolved = true + require_linear_history = true + require_pull_request = true + disabled = false + allow_stale_reviews = false +} +`, context) +} + +func testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithFieldsExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "tf-test-my-initial-instance%{random_suffix}" + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "tf-test-my-initial-repository%{random_suffix}" + instance = google_secure_source_manager_instance.instance.name + location = google_secure_source_manager_instance.instance.location + # Prevent accidental deletions. + lifecycle { + prevent_destroy = "%{prevent_destroy}" + } +} + +resource "google_secure_source_manager_branch_rule" "default" { + branch_rule_id = "tf-test-my-initial-branchrule%{random_suffix}" + location = google_secure_source_manager_repository.repository.location + repository_id = google_secure_source_manager_repository.repository.repository_id + include_pattern = "test" + minimum_approvals_count = 1 + minimum_reviews_count = 1 + require_linear_history = false +} +`, context) +} From b8be56910e7d8667d276af632607d93e7b858eed Mon Sep 17 00:00:00 2001 From: Sarah French <15078782+SarahFrench@users.noreply.github.com> Date: Mon, 7 Oct 2024 23:09:03 +0100 Subject: [PATCH 05/19] Update examples about how to make major release test projects in TeamCity (#11888) --- mmv1/third_party/terraform/.teamcity/CONTRIBUTION_GUIDE.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/.teamcity/CONTRIBUTION_GUIDE.md b/mmv1/third_party/terraform/.teamcity/CONTRIBUTION_GUIDE.md index 01da8c75c787..b8e5b3d281d6 100644 --- a/mmv1/third_party/terraform/.teamcity/CONTRIBUTION_GUIDE.md +++ b/mmv1/third_party/terraform/.teamcity/CONTRIBUTION_GUIDE.md @@ -89,7 +89,12 @@ If you want to test a feature branch on a schedule ahead of a release you can up First, make sure that the feature branch `FEATURE-BRANCH-major-release-X.0.0` is created in the downstream TPG and TPGB repositories, where X is the major version. -See this PR as an example of adding a major release testing project: https://github.com/SarahFrench/magic-modules/pull/9/files +See these PRs as examples of adding a major release testing project: +- v6.0.0: + - https://github.com/GoogleCloudPlatform/magic-modules/pull/11104 + - https://github.com/GoogleCloudPlatform/magic-modules/pull/11143 (a fix to the one above) +- v7.0.0: + - https://github.com/GoogleCloudPlatform/magic-modules/pull/11887 That PR creates a new file at `.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-X.0.0.kt` (replacing `X` with the version number). This file defines a new project that will contain all the builds run against the feature branch. See [FEATURE-BRANCH-major-release-6.0.0.kt](https://github.com/GoogleCloudPlatform/magic-modules/blob/main/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-6.0.0.kt) as an example. From 862a4608eae0216ab1a5300af9638d51e0b12ae4 Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Tue, 8 Oct 2024 12:44:50 -0700 Subject: [PATCH 06/19] Add major release number to `dev` ua (#11938) --- mmv1/third_party/terraform/version/version.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/version/version.go b/mmv1/third_party/terraform/version/version.go index b2d946a5d206..f21ad9d7203c 100644 --- a/mmv1/third_party/terraform/version/version.go +++ b/mmv1/third_party/terraform/version/version.go @@ -2,5 +2,5 @@ package version var ( // ProviderVersion is set during the release process to the release version of the binary - ProviderVersion = "dev" + ProviderVersion = "dev6" ) From 991966b4652bc7d10cc7545349f33b2a0fceed2a Mon Sep 17 00:00:00 2001 From: Sam Levenick Date: Tue, 8 Oct 2024 17:07:43 -0400 Subject: [PATCH 07/19] Force send internal_ip_only (#11923) --- .../dataproc/resource_dataproc_cluster.go | 1 + .../resource_dataproc_cluster_test.go.tmpl | 28 ++++++++++++------- .../resource_dataproc_job_test.go.tmpl | 1 + 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go index 66e53985a69c..e6b7c6e27d1a 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster.go @@ -2177,6 +2177,7 @@ func expandGceClusterConfig(d *schema.ResourceData, config *transport_tpg.Config } if v, ok := cfg["internal_ip_only"]; ok { conf.InternalIpOnly = v.(bool) + conf.ForceSendFields = append(conf.ForceSendFields, "InternalIpOnly") } if v, ok := cfg["metadata"]; ok { conf.Metadata = tpgresource.ConvertStringMap(v.(map[string]interface{})) diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl index 962b8f521239..7cb8c98bd466 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_cluster_test.go.tmpl @@ -28,7 +28,7 @@ func TestAccDataprocCluster_missingZoneGlobalRegion1(t *testing.T) { rnd := acctest.RandString(t, 10) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -44,7 +44,7 @@ func TestAccDataprocCluster_missingZoneGlobalRegion2(t *testing.T) { rnd := acctest.RandString(t, 10) acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), Steps: []resource.TestStep{ { @@ -559,7 +559,6 @@ func TestAccDataprocCluster_spotWithAuxiliaryNodeGroups(t *testing.T) { resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group.0.node_group_config.0.accelerators.0.accelerator_count", "1"), resource.TestCheckResourceAttr("google_dataproc_cluster.with_auxiliary_node_groups", "cluster_config.0.auxiliary_node_groups.0.node_group_id", "node-group-id"), testAccCheckDataprocAuxiliaryNodeGroupAccelerator(&cluster, project), - ), }, }, @@ -707,7 +706,7 @@ func TestAccDataprocCluster_withServiceAcc(t *testing.T) { ExternalProviders: map[string]resource.ExternalProvider{ "time": {}, }, - CheckDestroy: testAccCheckDataprocClusterDestroy(t), + CheckDestroy: testAccCheckDataprocClusterDestroy(t), Steps: []resource.TestStep{ { Config: testAccDataprocCluster_withServiceAcc(sa, rnd, subnetworkName), @@ -827,13 +826,13 @@ func TestAccDataprocCluster_withLifecycleConfigAutoDeletion(t *testing.T) { CheckDestroy: testAccCheckDataprocClusterDestroy(t), Steps: []resource.TestStep{ { - Config: testAccDataprocCluster_withLifecycleConfigAutoDeletionTime(rnd, now.Add(time.Hour * 10).Format(fmtString), subnetworkName), + Config: testAccDataprocCluster_withLifecycleConfigAutoDeletionTime(rnd, now.Add(time.Hour*10).Format(fmtString), subnetworkName), Check: resource.ComposeTestCheckFunc( testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_lifecycle_config", &cluster), ), }, { - Config: testAccDataprocCluster_withLifecycleConfigAutoDeletionTime(rnd, now.Add(time.Hour * 20).Format(fmtString), subnetworkName), + Config: testAccDataprocCluster_withLifecycleConfigAutoDeletionTime(rnd, now.Add(time.Hour*20).Format(fmtString), subnetworkName), Check: resource.ComposeTestCheckFunc( testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_lifecycle_config", &cluster), ), @@ -1046,7 +1045,7 @@ func TestAccDataprocCluster_withMetastoreConfig(t *testing.T) { updateServiceId := "tf-test-metastore-srv-update-" + acctest.RandString(t, 10) msName_basic := fmt.Sprintf("projects/%s/locations/us-central1/services/%s", pid, basicServiceId) msName_update := fmt.Sprintf("projects/%s/locations/us-central1/services/%s", pid, updateServiceId) - + var cluster dataproc.Cluster clusterName := "tf-test-" + acctest.RandString(t, 10) acctest.VcrTest(t, resource.TestCase{ @@ -1058,8 +1057,7 @@ func TestAccDataprocCluster_withMetastoreConfig(t *testing.T) { Config: testAccDataprocCluster_withMetastoreConfig(clusterName, basicServiceId), Check: resource.ComposeTestCheckFunc( testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_metastore_config", &cluster), - resource.TestCheckResourceAttr("google_dataproc_cluster.with_metastore_config", "cluster_config.0.metastore_config.0.dataproc_metastore_service",msName_basic), - + resource.TestCheckResourceAttr("google_dataproc_cluster.with_metastore_config", "cluster_config.0.metastore_config.0.dataproc_metastore_service", msName_basic), ), }, { @@ -1067,7 +1065,6 @@ func TestAccDataprocCluster_withMetastoreConfig(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckDataprocClusterExists(t, "google_dataproc_cluster.with_metastore_config", &cluster), resource.TestCheckResourceAttr("google_dataproc_cluster.with_metastore_config", "cluster_config.0.metastore_config.0.dataproc_metastore_service", msName_update), - ), }, }, @@ -1417,6 +1414,10 @@ resource "google_dataproc_cluster" "accelerated_cluster" { region = "us-central1" cluster_config { + software_config { + image_version = "2.0.35-debian10" + } + gce_cluster_config { subnetwork = "%s" zone = "%s" @@ -1652,6 +1653,9 @@ resource "google_dataproc_cluster" "basic" { region = "us-central1" cluster_config { + software_config { + image_version = "2.0.35-debian10" + } gce_cluster_config { subnetwork = "%s" zone = "us-central1-f" @@ -1764,6 +1768,7 @@ resource "google_dataproc_cluster" "with_init_action" { # Keep the costs down with smallest config we can get away with software_config { + image_version = "2.0.35-debian10" override_properties = { "dataproc:dataproc.allow.zero.workers" = "true" } @@ -2028,6 +2033,7 @@ resource "google_dataproc_cluster" "with_bucket" { # Keep the costs down with smallest config we can get away with software_config { + image_version = "2.0.35-debian10" override_properties = { "dataproc:dataproc.allow.zero.workers" = "true" } @@ -2061,6 +2067,7 @@ resource "google_dataproc_cluster" "with_bucket" { # Keep the costs down with smallest config we can get away with software_config { + image_version = "2.0.35-debian10" override_properties = { "dataproc:dataproc.allow.zero.workers" = "true" } @@ -2250,6 +2257,7 @@ resource "google_dataproc_cluster" "with_service_account" { cluster_config { # Keep the costs down with smallest config we can get away with software_config { + image_version = "2.0.35-debian10" override_properties = { "dataproc:dataproc.allow.zero.workers" = "true" } diff --git a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl index afb392cf6ec2..247e6b5c834b 100644 --- a/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataproc/resource_dataproc_job_test.go.tmpl @@ -862,6 +862,7 @@ resource "google_dataproc_cluster" "basic" { cluster_config { # Keep the costs down with smallest config we can get away with software_config { + image_version = "2.0.35-debian10" override_properties = { "dataproc:dataproc.allow.zero.workers" = "true" } From b20c8e5eda439c3c958a213d2cfabbc6b8f44662 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 8 Oct 2024 14:09:58 -0700 Subject: [PATCH 08/19] Added information about VCR tests (#11943) --- docs/content/contribute/create-pr.md | 3 ++- docs/content/develop/test/test.md | 7 +++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/docs/content/contribute/create-pr.md b/docs/content/contribute/create-pr.md index 3b2617016c25..4c8c24b95138 100644 --- a/docs/content/contribute/create-pr.md +++ b/docs/content/contribute/create-pr.md @@ -17,8 +17,9 @@ weight: 10 ## Code review 1. A reviewer will automatically be assigned to your PR. -1. Creating a new pull request or pushing a new commit automatically triggers our CI pipelines and workflows. After CI starts, downstream diff generation takes about 10 minutes; VCR tests can take up to 2 hours. If you are a community contributor, some tests will only run after approval from a reviewer. +1. Creating a new pull request or pushing a new commit automatically triggers our CI pipelines and workflows. After CI starts, downstream diff generation takes about 10 minutes; [VCR tests]({{< ref "/develop/test/test.md" >}}) can take up to 2 hours. If you are a community contributor, some tests will only run after approval from a reviewer. - While convenient, relying on CI to test iterative changes to PRs often adds extreme latency to reviews if there are errors in test configurations or at runtime. We **strongly** recommend you [test your changes locally before pushing]({{< ref "/develop/test/run-tests" >}}) even after the initial change. + - VCR tests will first attempt to play back recorded HTTP requests (REPLAYING mode). If any tests fail, they will run in RECORDING mode to generate a new cassette; then, the same tests will run again in REPLAYING mode to detect any nondeterministic behavior in the test (which can cause flaky tests.) 1. If your assigned reviewer does not respond to changes on a pull request within two US business days, ping them on the pull request. {{< hint info >}} diff --git a/docs/content/develop/test/test.md b/docs/content/develop/test/test.md index 38754fdff36a..69f97e9dd679 100644 --- a/docs/content/develop/test/test.md +++ b/docs/content/develop/test/test.md @@ -15,6 +15,13 @@ aliases: This page describes how to add tests to a new resource in the `google` or `google-beta` Terraform provider. +The providers have two basic types of tests: + +- Unit tests: test specific functions thoroughly. Unit tests do not interact with GCP APIs. +- Acceptance tests (aka VCR tests, or create and update tests): test that resources interact as expected with the APIs. Acceptance tests interact with GCP APIs, but should only test the provider's behavior in constructing the API requests and parsing the responses. + +Acceptance tests are also called "VCR tests" because they use [`go-vcr`](https://github.com/dnaeon/go-vcr) to record and play back HTTP requests. This allows tests to run more quickly on PRs because the resources don't actually need to be created, updated, or destroyed by the live API. + For more information about testing, see the [official Terraform documentation](https://developer.hashicorp.com/terraform/plugin/sdkv2/testing/acceptance-tests). ## Before you begin From 2c219b6d3cf2720fde86ff8b1d65abca8752f7d8 Mon Sep 17 00:00:00 2001 From: Avinash Navada Date: Tue, 8 Oct 2024 18:33:53 -0400 Subject: [PATCH 09/19] pubsub: fix permadiff with configuring an empty retry_policy. (#11834) --- mmv1/products/pubsub/Subscription.yaml | 2 + .../resource_pubsub_subscription_test.go | 40 +++++++++++++++++++ .../data/example_pubsub_subscription.json | 3 +- ...ample_pubsub_subscription_iam_binding.json | 3 +- ...xample_pubsub_subscription_iam_member.json | 3 +- ...xample_pubsub_subscription_iam_policy.json | 3 +- 6 files changed, 50 insertions(+), 4 deletions(-) diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index 5ed3ea7988db..637be094da13 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -471,6 +471,8 @@ properties: A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". default_from_api: true diff_suppress_func: 'tpgresource.DurationDiffSuppress' + send_empty_value: true + allow_empty_object: true - name: 'enableMessageOrdering' type: Boolean description: | diff --git a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go index af40a3411333..403a732ded4d 100644 --- a/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go +++ b/mmv1/third_party/terraform/services/pubsub/resource_pubsub_subscription_test.go @@ -36,6 +36,30 @@ func TestAccPubsubSubscription_emptyTTL(t *testing.T) { }) } +func TestAccPubsubSubscription_emptyRetryPolicy(t *testing.T) { + t.Parallel() + + topic := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10)) + subscription := fmt.Sprintf("tf-test-sub-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckPubsubSubscriptionDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccPubsubSubscription_emptyRetryPolicy(topic, subscription), + }, + { + ResourceName: "google_pubsub_subscription.foo", + ImportStateId: subscription, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func TestAccPubsubSubscription_basic(t *testing.T) { t.Parallel() @@ -494,6 +518,22 @@ resource "google_pubsub_subscription" "foo" { `, topic, subscription) } +func testAccPubsubSubscription_emptyRetryPolicy(topic, subscription string) string { + return fmt.Sprintf(` +resource "google_pubsub_topic" "foo" { + name = "%s" +} + +resource "google_pubsub_subscription" "foo" { + name = "%s" + topic = google_pubsub_topic.foo.id + + retry_policy { + } +} +`, topic, subscription) +} + func testAccPubsubSubscription_push(topicFoo, saAccount, subscription string) string { return fmt.Sprintf(` data "google_project" "project" { } diff --git a/mmv1/third_party/tgc/tests/data/example_pubsub_subscription.json b/mmv1/third_party/tgc/tests/data/example_pubsub_subscription.json index bdc3d1f93e56..2fa16e2aee4f 100644 --- a/mmv1/third_party/tgc/tests/data/example_pubsub_subscription.json +++ b/mmv1/third_party/tgc/tests/data/example_pubsub_subscription.json @@ -48,8 +48,9 @@ "pushConfig": { "pushEndpoint": "https://example.com/push" }, + "retryPolicy": null, "topic": "projects/{{.Provider.project}}/topics/example-pubsub-topic" } } } -] \ No newline at end of file +] diff --git a/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_binding.json b/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_binding.json index dd012c01a7af..7a753fb0f81a 100644 --- a/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_binding.json +++ b/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_binding.json @@ -20,6 +20,7 @@ "pushConfig": { "pushEndpoint": "https://example.com/push" }, + "retryPolicy": null, "topic": "projects/{{.Provider.project}}/topics/example-pubsub-topic" } }, @@ -34,4 +35,4 @@ ] } } -] \ No newline at end of file +] diff --git a/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_member.json b/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_member.json index dd012c01a7af..7a753fb0f81a 100644 --- a/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_member.json +++ b/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_member.json @@ -20,6 +20,7 @@ "pushConfig": { "pushEndpoint": "https://example.com/push" }, + "retryPolicy": null, "topic": "projects/{{.Provider.project}}/topics/example-pubsub-topic" } }, @@ -34,4 +35,4 @@ ] } } -] \ No newline at end of file +] diff --git a/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_policy.json b/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_policy.json index dd012c01a7af..7a753fb0f81a 100644 --- a/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_policy.json +++ b/mmv1/third_party/tgc/tests/data/example_pubsub_subscription_iam_policy.json @@ -20,6 +20,7 @@ "pushConfig": { "pushEndpoint": "https://example.com/push" }, + "retryPolicy": null, "topic": "projects/{{.Provider.project}}/topics/example-pubsub-topic" } }, @@ -34,4 +35,4 @@ ] } } -] \ No newline at end of file +] From 47f6ff1f4da285bf47ffbb0584a4cf476324f18e Mon Sep 17 00:00:00 2001 From: Brock Mammen Date: Wed, 9 Oct 2024 08:08:13 -0700 Subject: [PATCH 10/19] Add example of a Workload Identity Pool configuration for GitHub Actions (#11936) --- .../iambeta/WorkloadIdentityPoolProvider.yaml | 5 ++++ ...ntity_pool_provider_github_actions.tf.tmpl | 26 +++++++++++++++++++ 2 files changed, 31 insertions(+) create mode 100644 mmv1/templates/terraform/examples/iam_workload_identity_pool_provider_github_actions.tf.tmpl diff --git a/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml b/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml index f45e2771b16d..13400e181be5 100644 --- a/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml +++ b/mmv1/products/iambeta/WorkloadIdentityPoolProvider.yaml @@ -59,6 +59,11 @@ examples: vars: workload_identity_pool_id: 'example-pool' workload_identity_pool_provider_id: 'example-prvdr' + - name: 'iam_workload_identity_pool_provider_github_actions' + primary_resource_id: 'example' + vars: + workload_identity_pool_id: 'example-pool' + workload_identity_pool_provider_id: 'example-prvdr' - name: 'iam_workload_identity_pool_provider_oidc_basic' primary_resource_id: 'example' vars: diff --git a/mmv1/templates/terraform/examples/iam_workload_identity_pool_provider_github_actions.tf.tmpl b/mmv1/templates/terraform/examples/iam_workload_identity_pool_provider_github_actions.tf.tmpl new file mode 100644 index 000000000000..a891a7c59417 --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workload_identity_pool_provider_github_actions.tf.tmpl @@ -0,0 +1,26 @@ +resource "google_iam_workload_identity_pool" "pool" { + workload_identity_pool_id = "{{index $.Vars "workload_identity_pool_id"}}" +} + +resource "google_iam_workload_identity_pool_provider" "{{$.PrimaryResourceId}}" { + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_provider_id = "{{index $.Vars "workload_identity_pool_provider_id"}}" + display_name = "Name of provider" + description = "GitHub Actions identity pool provider for automated test" + disabled = true + attribute_condition = < Date: Wed, 9 Oct 2024 16:41:49 +0100 Subject: [PATCH 11/19] Add missing test for `data.google_firebase_web_app_config`, clean up plugin-framework Firebase code a bit (#11927) --- ...google_firebase_android_app_config.go.tmpl | 18 ++++--- ...e_google_firebase_apple_app_config.go.tmpl | 18 ++++--- ...rce_google_firebase_web_app_config.go.tmpl | 28 +++++----- ...oogle_firebase_web_app_config_test.go.tmpl | 51 +++++++++++++++++++ 4 files changed, 87 insertions(+), 28 deletions(-) create mode 100644 mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config_test.go.tmpl diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl index 261a700a2a10..cb8aef4a8a2d 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl @@ -75,6 +75,7 @@ func (d *GoogleFirebaseAndroidAppConfigDataSource) Schema(ctx context.Context, r Computed: true, }, + // This is included for backwards compatibility with the original, SDK-implemented data source. "id": schema.StringAttribute{ Description: "Firebase Android App Config identifier", MarkdownDescription: "Firebase Android App Config identifier", @@ -116,25 +117,26 @@ func (d *GoogleFirebaseAndroidAppConfigDataSource) Read(ctx context.Context, req return } - d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) - - client := firebase.NewProjectsAndroidAppsService(d.client) - // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) if resp.Diagnostics.HasError() { return } + // Use provider_meta to set User-Agent + d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) + data.Project = fwresource.GetProjectFramework(data.Project, d.project, &resp.Diagnostics) if resp.Diagnostics.HasError() { return } - appName := fmt.Sprintf("projects/%s/androidApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) - data.Id = types.StringValue(appName) - clientResp, err := client.GetConfig(appName).Do() + + // GET Request + service := firebase.NewProjectsAndroidAppsService(d.client) + appName := fmt.Sprintf("projects/%s/androidApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) + clientResp, err := service.GetConfig(appName).Do() if err != nil { fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAndroidAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { @@ -144,6 +146,8 @@ func (d *GoogleFirebaseAndroidAppConfigDataSource) Read(ctx context.Context, req tflog.Trace(ctx, "read firebase android app config data source") + // Put data in model + data.Id = types.StringValue(appName) data.ConfigFilename = types.StringValue(clientResp.ConfigFilename) data.ConfigFileContents = types.StringValue(clientResp.ConfigFileContents) diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl index f7f5b137fafb..69d1da3451ea 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl @@ -75,6 +75,7 @@ func (d *GoogleFirebaseAppleAppConfigDataSource) Schema(ctx context.Context, req Computed: true, }, + // This is included for backwards compatibility with the original, SDK-implemented data source. "id": schema.StringAttribute{ Description: "Firebase Apple App Config identifier", MarkdownDescription: "Firebase Apple App Config identifier", @@ -116,25 +117,24 @@ func (d *GoogleFirebaseAppleAppConfigDataSource) Read(ctx context.Context, req d return } - d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) - - client := firebase.NewProjectsIosAppsService(d.client) - // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) if resp.Diagnostics.HasError() { return } + // Use provider_meta to set User-Agent + d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) + data.Project = fwresource.GetProjectFramework(data.Project, d.project, &resp.Diagnostics) if resp.Diagnostics.HasError() { return } + // GET Request + service := firebase.NewProjectsIosAppsService(d.client) appName := fmt.Sprintf("projects/%s/iosApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) - data.Id = types.StringValue(appName) - - clientResp, err := client.GetConfig(appName).Do() + clientResp, err := service.GetConfig(appName).Do() if err != nil { fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAppleAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { @@ -144,10 +144,12 @@ func (d *GoogleFirebaseAppleAppConfigDataSource) Read(ctx context.Context, req d tflog.Trace(ctx, "read firebase apple app config data source") + // Put data in model + data.Id = types.StringValue(appName) data.ConfigFilename = types.StringValue(clientResp.ConfigFilename) data.ConfigFileContents = types.StringValue(clientResp.ConfigFileContents) // Save data into Terraform state resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) } -{{- end }} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl index 7479139af4f4..8782d08fdbc7 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl @@ -87,17 +87,17 @@ func (d *GoogleFirebaseWebAppConfigDataSource) Schema(ctx context.Context, req d }, "location_id": schema.StringAttribute{ - Description: "The ID of the project's default GCP resource location. The location is one of the available GCP resource locations. " + - "This field is omitted if the default GCP resource location has not been finalized yet. To set your project's " + + Description: "The ID of the project's default GCP resource location. The location is one of the available GCP resource locations. " + + "This field is omitted if the default GCP resource location has not been finalized yet. To set your project's " + "default GCP resource location, call defaultLocation.finalize after you add Firebase services to your project.", MarkdownDescription: "The ID of the project's default GCP resource location. The location is one of the available GCP resource locations. " + - "This field is omitted if the default GCP resource location has not been finalized yet. To set your project's " + + "This field is omitted if the default GCP resource location has not been finalized yet. To set your project's " + "default GCP resource location, call defaultLocation.finalize after you add Firebase services to your project.", - Computed: true, + Computed: true, }, "measurement_id": schema.StringAttribute{ - Description: "The unique Google-assigned identifier of the Google Analytics web stream associated with the Firebase Web App. " + + Description: "The unique Google-assigned identifier of the Google Analytics web stream associated with the Firebase Web App. " + "Firebase SDKs use this ID to interact with Google Analytics APIs. " + "This field is only present if the App is linked to a web stream in a Google Analytics App + Web property. " + "Learn more about this ID and Google Analytics web streams in the Analytics documentation. " + @@ -107,7 +107,7 @@ func (d *GoogleFirebaseWebAppConfigDataSource) Schema(ctx context.Context, req d "This field is only present if the App is linked to a web stream in a Google Analytics App + Web property. " + "Learn more about this ID and Google Analytics web streams in the Analytics documentation. " + "To generate a measurementId and link the Web App with a Google Analytics web stream, call projects.addGoogleAnalytics.", - Computed: true, + Computed: true, }, "messaging_sender_id": schema.StringAttribute{ @@ -122,6 +122,7 @@ func (d *GoogleFirebaseWebAppConfigDataSource) Schema(ctx context.Context, req d Computed: true, }, + // This is included for backwards compatibility with the original, SDK-implemented data source. "id": schema.StringAttribute{ Description: "Firebase Web App Config identifier", MarkdownDescription: "Firebase Web App Config identifier", @@ -163,25 +164,24 @@ func (d *GoogleFirebaseWebAppConfigDataSource) Read(ctx context.Context, req dat return } - d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) - - client := firebase.NewProjectsWebAppsService(d.client) - // Read Terraform configuration data into the model resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) if resp.Diagnostics.HasError() { return } + // Use provider_meta to set User-Agent + d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) + data.Project = fwresource.GetProjectFramework(data.Project, d.project, &resp.Diagnostics) if resp.Diagnostics.HasError() { return } + // GET Request + service := firebase.NewProjectsWebAppsService(d.client) appName := fmt.Sprintf("projects/%s/webApps/%s/config", data.Project.ValueString(), data.WebAppId.ValueString()) - data.Id = data.WebAppId - - clientResp, err := client.GetConfig(appName).Do() + clientResp, err := service.GetConfig(appName).Do() if err != nil { fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseWebAppConfig %q", data.WebAppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { @@ -191,6 +191,8 @@ func (d *GoogleFirebaseWebAppConfigDataSource) Read(ctx context.Context, req dat tflog.Trace(ctx, "read firebase web app config data source") + // Put data in model + data.Id = data.WebAppId data.ApiKey = types.StringValue(clientResp.ApiKey) data.AuthDomain = types.StringValue(clientResp.AuthDomain) data.DatabaseUrl = types.StringValue(clientResp.DatabaseURL) diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config_test.go.tmpl new file mode 100644 index 000000000000..6358866e69ed --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config_test.go.tmpl @@ -0,0 +1,51 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccDataSourceGoogleFirebaseWebAppConfig(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14158 + acctest.SkipIfVcr(t) + t.Parallel() + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "display_name": "tf_test Display Name WebApp DataSource", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + CheckDestroy: testAccCheckFirebaseWebAppDestroyProducer(t), + Steps: []resource.TestStep{ + { + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Config: testAccDataSourceGoogleFirebaseWebAppConfig(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.my_app_config", "api_key"), + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.my_app_config", "auth_domain"), + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.my_app_config", "database_url"), + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.my_app_config", "storage_bucket"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFirebaseWebAppConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_web_app" "my_app" { + project = "%{project_id}" + display_name = "%{display_name}" +} + +data "google_firebase_web_app_config" "my_app_config" { + web_app_id = google_firebase_web_app.my_app.app_id +} +`, context) +} +{{- end }} \ No newline at end of file From 162ba7ca6d7650a2839f8be9eae696347cd47ecf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Norbert=20Kami=C5=84ski?= Date: Wed, 9 Oct 2024 18:07:54 +0200 Subject: [PATCH 12/19] Clarify data source attached_disk documentation (#11956) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Norbert Kamiński --- .../terraform/website/docs/d/compute_instance.html.markdown | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/website/docs/d/compute_instance.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_instance.html.markdown index 743fc9f1aae0..b6c32b263ff2 100644 --- a/mmv1/third_party/terraform/website/docs/d/compute_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/compute_instance.html.markdown @@ -139,7 +139,7 @@ The following arguments are supported: The `attached_disk` block supports: -* `source` - The name or self_link of the disk attached to this instance. +* `source` - The self_link of the disk attached to this instance. * `device_name` - Name with which the attached disk is accessible under `/dev/disk/by-id/` From 34be5c13974ef948af01f5ed654e7646d2688e44 Mon Sep 17 00:00:00 2001 From: Arnab Dasgupta <150280802+arnabadg-google@users.noreply.github.com> Date: Wed, 9 Oct 2024 18:56:08 +0200 Subject: [PATCH 13/19] Allow sending `false` for `enabled` field under IAP message for resource google_compute_backend_service and resource google_compute_region_backend_service (#11907) --- mmv1/products/compute/BackendService.yaml | 1 + .../compute/RegionBackendService.yaml | 1 + ...ource_compute_backend_service_test.go.tmpl | 67 +++++++++++++++++ ...ompute_region_backend_service_test.go.tmpl | 71 +++++++++++++++++++ 4 files changed, 140 insertions(+) diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 5bb6764e9975..931ccc5cc50f 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -754,6 +754,7 @@ properties: type: Boolean description: Whether the serving infrastructure will authenticate and authorize all incoming requests. required: true + send_empty_value: true - name: 'oauth2ClientId' type: String description: | diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index 76f9dbf223be..316921eaa92e 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -759,6 +759,7 @@ properties: type: Boolean description: Whether the serving infrastructure will authenticate and authorize all incoming requests. required: true + send_empty_value: true - name: 'oauth2ClientId' type: String description: | diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl index c918404dfa9e..229198175489 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl @@ -146,6 +146,39 @@ func TestAccComputeBackendService_withBackendAndIAP(t *testing.T) { }) } +func TestAccComputeBackendService_updateIAPEnabled(t *testing.T) { + t.Parallel() + + serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeBackendService_withIAPEnabled( + serviceName, 10), + }, + { + ResourceName: "google_compute_backend_service.lipsum", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeBackendService_withIAPDisabled( + serviceName, 10), + }, + { + ResourceName: "google_compute_backend_service.lipsum", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"iap.0.oauth2_client_secret"}, + }, + }, + }) +} + func TestAccComputeBackendService_updatePreservesOptionalParameters(t *testing.T) { t.Parallel() @@ -1370,6 +1403,40 @@ resource "google_compute_http_health_check" "default" { `, serviceName, timeout, igName, itName, checkName) } +func testAccComputeBackendService_withIAPEnabled( + serviceName string, timeout int64) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "lipsum" { + name = "%s" + description = "Hello World 1234" + port_name = "http" + protocol = "HTTP" + timeout_sec = %v + + iap { + enabled = true + } +} +`, serviceName, timeout) +} + +func testAccComputeBackendService_withIAPDisabled( + serviceName string, timeout int64) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "lipsum" { + name = "%s" + description = "Hello World 1234" + port_name = "http" + protocol = "HTTP" + timeout_sec = %v + + iap { + enabled = false + } +} +`, serviceName, timeout) +} + func testAccComputeBackendService_withSessionAffinity(serviceName, checkName, description, affinityName string) string { return fmt.Sprintf(` resource "google_compute_backend_service" "foobar" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl index 9d3d9e1bfbce..74ceab88911a 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl @@ -283,6 +283,39 @@ func TestAccComputeRegionBackendService_withBackendAndIAP(t *testing.T) { }) } +func TestAccComputeRegionBackendService_updateIAPEnabled(t *testing.T) { + t.Parallel() + + serviceName := fmt.Sprintf("foo-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionBackendService_withIAPEnabled( + serviceName, 10), + }, + { + ResourceName: "google_compute_region_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeRegionBackendService_withIAPDisabled( + serviceName, 10), + }, + { + ResourceName: "google_compute_region_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"iap.0.oauth2_client_secret"}, + }, + }, + }) +} + func TestAccComputeRegionBackendService_UDPFailOverPolicyUpdate(t *testing.T) { t.Parallel() @@ -1021,6 +1054,44 @@ resource "google_compute_health_check" "zero" { `, serviceName, drainingTimeout, checkName) } +func testAccComputeRegionBackendService_withIAPEnabled( + serviceName string, timeout int64) string { + return fmt.Sprintf(` +resource "google_compute_region_backend_service" "foobar" { + name = "%s" + description = "Hello World 1234" + port_name = "http" + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + region = "us-central1" + timeout_sec = %v + + iap { + enabled = true + } +} +`, serviceName, timeout) +} + +func testAccComputeRegionBackendService_withIAPDisabled( + serviceName string, timeout int64) string { + return fmt.Sprintf(` +resource "google_compute_region_backend_service" "foobar" { + name = "%s" + description = "Hello World 1234" + port_name = "http" + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + region = "us-central1" + timeout_sec = %v + + iap { + enabled = false + } +} +`, serviceName, timeout) +} + func testAccComputeRegionBackendService_ilbBasicwithIAP(serviceName, checkName string) string { return fmt.Sprintf(` resource "google_compute_region_backend_service" "foobar" { From 765b5e376e30ce8665acd15d7e61b8289c8ac504 Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Wed, 9 Oct 2024 12:08:52 -0700 Subject: [PATCH 14/19] Update TestAccNetworkConnectivitySpoke_networkConnectivitySpokeVpnTunnelBasicExample skip reason (#11952) --- mmv1/products/networkconnectivity/Spoke.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/networkconnectivity/Spoke.yaml b/mmv1/products/networkconnectivity/Spoke.yaml index a2ffef94adff..05152cff4ec2 100644 --- a/mmv1/products/networkconnectivity/Spoke.yaml +++ b/mmv1/products/networkconnectivity/Spoke.yaml @@ -75,7 +75,7 @@ examples: hub_name: 'basic-hub1' vpn_tunnel_1_spoke_name: 'vpn-tunnel-1-spoke' vpn_tunnel_2_spoke_name: 'vpn-tunnel-2-spoke' - # Failing in replaying mode (https://github.com/hashicorp/terraform-provider-google/issues/19592) + # Skip due to multiple fine-grained resources skip_vcr: true - name: 'network_connectivity_spoke_interconnect_attachment_basic' primary_resource_id: 'primary' From 577cf5d47b998e9a936d76147d38b987aca39128 Mon Sep 17 00:00:00 2001 From: tulika-aakriti Date: Thu, 10 Oct 2024 00:52:31 +0530 Subject: [PATCH 15/19] Feat: Add google_oracle_database_cloud_exadata_infrastructure resource (#11879) Co-authored-by: Thomas Rodgers --- .../CloudExadataInfrastructure.yaml | 296 ++++++++++++++++++ mmv1/products/oracledatabase/product.yaml | 22 ++ ...cloud_exadata_infrastructure_basic.tf.tmpl | 11 + ..._cloud_exadata_infrastructure_full.tf.tmpl | 30 ++ .../components/inputs/services_beta.kt | 5 + .../components/inputs/services_ga.kt | 5 + 6 files changed, 369 insertions(+) create mode 100644 mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml create mode 100644 mmv1/products/oracledatabase/product.yaml create mode 100644 mmv1/templates/terraform/examples/oracledatabase_cloud_exadata_infrastructure_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/oracledatabase_cloud_exadata_infrastructure_full.tf.tmpl diff --git a/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml b/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml new file mode 100644 index 000000000000..cfcf4923f23e --- /dev/null +++ b/mmv1/products/oracledatabase/CloudExadataInfrastructure.yaml @@ -0,0 +1,296 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +--- +name: 'CloudExadataInfrastructure' +description: A CloudExadataInfrastructure resource. +references: + guides: + 'Create Exadata Infrastructure instances': 'https://cloud.google.com/oracle/database/docs/create-instances' + api: 'https://cloud.google.com/oracle/database/docs/reference/rest/v1/projects.locations.cloudExadataInfrastructures' +docs: +id_format: 'projects/{{project}}/locations/{{location}}/cloudExadataInfrastructures/{{cloud_exadata_infrastructure_id}}' +base_url: 'projects/{{project}}/locations/{{location}}/cloudExadataInfrastructures' +self_link: 'projects/{{project}}/locations/{{location}}/cloudExadataInfrastructures/{{cloud_exadata_infrastructure_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/cloudExadataInfrastructures?cloudExadataInfrastructureId={{cloud_exadata_infrastructure_id}}' +immutable: true +import_format: + - 'projects/{{project}}/locations/{{location}}/cloudExadataInfrastructures/{{cloud_exadata_infrastructure_id}}' +timeouts: + insert_minutes: 240 + update_minutes: 120 + delete_minutes: 120 +autogen_async: true +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + path: 'name' + wait_ms: 1000 + timeouts: + insert_minutes: 240 + update_minutes: 120 + delete_minutes: 120 + result: + path: 'response' + resource_inside_response: true + error: + path: 'error' + message: 'message' +custom_code: +examples: + - name: 'oracledatabase_cloud_exadata_infrastructure_basic' + primary_resource_id: 'my-cloud-exadata' + vars: + project: 'my-project' + test_vars_overrides: + 'project': '"oci-terraform-testing"' + - name: 'oracledatabase_cloud_exadata_infrastructure_full' + primary_resource_id: 'my-cloud-exadata' + vars: + project: 'my-project' + test_vars_overrides: + 'project': '"oci-terraform-testing"' +parameters: + - name: 'location' + type: String + description: 'Resource ID segment making up resource `name`. See documentation + for resource type `oracledatabase.googleapis.com/DbServer`. ' + url_param_only: true + required: true + immutable: true + - name: 'cloudExadataInfrastructureId' + type: String + description: "The ID of the Exadata Infrastructure to create. This value + is restricted\nto (^[a-z]([a-z0-9-]{0,61}[a-z0-9])?$) and must be a maximum of + 63\ncharacters in length. The value must start with a letter and end with\na letter + or a number. " + url_param_only: true + required: true + immutable: true +properties: + - name: 'name' + type: String + description: "Identifier. The name of the Exadata Infrastructure resource with the + following format:\nprojects/{project}/locations/{region}/cloudExadataInfrastructures/{cloud_exadata_infrastructure} " + output: true + - name: 'displayName' + type: String + description: 'User friendly name for this resource. ' + - name: 'gcpOracleZone' + type: String + description: 'GCP location where Oracle Exadata is hosted. ' + default_from_api: true + - name: 'entitlementId' + type: String + description: "Entitlement ID of the private offer against which this + infrastructure\nresource is provisioned. " + output: true + - name: 'properties' + type: NestedObject + description: 'Various properties of Exadata Infrastructure. ' + properties: + - name: 'ocid' + type: String + description: "OCID of created infra.\nhttps://docs.oracle.com/en-us/iaas/Content/General/Concepts/identifiers.htm#Oracle " + output: true + - name: 'computeCount' + type: Integer + description: 'The number of compute servers for the Exadata Infrastructure. ' + - name: 'storageCount' + type: Integer + description: 'The number of Cloud Exadata storage servers for the Exadata + Infrastructure. ' + - name: 'totalStorageSizeGb' + type: Integer + description: "The total storage allocated to the Exadata Infrastructure\nresource, + in gigabytes (GB). " + default_from_api: true + - name: 'availableStorageSizeGb' + type: Integer + description: "The available storage can be allocated to the Exadata + Infrastructure\nresource, in gigabytes (GB). " + output: true + - name: 'maintenanceWindow' + type: NestedObject + description: "Maintenance window as defined by Oracle.\nhttps://docs.oracle.com/en-us/iaas/api/#/en/database/20160918/datatypes/MaintenanceWindow " + default_from_api: true + properties: + - name: 'preference' + type: String + description: "The maintenance window scheduling preference. \n + Possible values:\n MAINTENANCE_WINDOW_PREFERENCE_UNSPECIFIED\nCUSTOM_PREFERENCE\nNO_PREFERENCE" + default_from_api: true + - name: 'months' + type: Array + description: 'Months during the year when maintenance should be performed. ' + default_from_api: true + item_type: + type: String + - name: 'weeksOfMonth' + type: Array + description: "Weeks during the month when maintenance should be performed. + Weeks start on\nthe 1st, 8th, 15th, and 22nd days of the month, and have a + duration of 7\ndays. Weeks start and end based on calendar dates, not days + of the week. " + default_from_api: true + item_type: + type: Integer + - name: 'daysOfWeek' + type: Array + description: 'Days during the week when maintenance should be performed. ' + default_from_api: true + item_type: + type: String + - name: 'hoursOfDay' + type: Array + description: "The window of hours during the day when maintenance + should be performed.\nThe window is a 4 hour slot. Valid values are:\n 0 + - represents time slot 0:00 - 3:59 UTC\n 4 - represents time slot 4:00 - + 7:59 UTC\n 8 - represents time slot 8:00 - 11:59 UTC\n 12 - represents time + slot 12:00 - 15:59 UTC\n 16 - represents time slot 16:00 - 19:59 UTC\n 20 + - represents time slot 20:00 - 23:59 UTC " + default_from_api: true + item_type: + type: Integer + - name: 'leadTimeWeek' + type: Integer + description: "Lead time window allows user to set a lead time to prepare + for a down time.\nThe lead time is in weeks and valid value is between 1 to + 4. " + default_from_api: true + - name: 'patchingMode' + type: String + description: "Cloud CloudExadataInfrastructure node patching method, + either \"ROLLING\"\n or \"NONROLLING\". Default value is ROLLING. \n + Possible values:\n PATCHING_MODE_UNSPECIFIED\nROLLING\nNON_ROLLING" + default_from_api: true + - name: 'customActionTimeoutMins' + type: Integer + description: "Determines the amount of time the system will wait before + the start of each\ndatabase server patching operation. Custom action timeout + is in minutes and\nvalid value is between 15 to 120 (inclusive). " + default_from_api: true + - name: 'isCustomActionTimeoutEnabled' + type: Boolean + description: "If true, enables the configuration of a custom action + timeout (waiting\nperiod) between database server patching operations. " + default_from_api: true + - name: 'state' + type: String + description: "The current lifecycle state of the Exadata Infrastructure. + \n Possible values:\n STATE_UNSPECIFIED\nPROVISIONING\nAVAILABLE\nUPDATING\nTERMINATING\nTERMINATED\nFAILED\nMAINTENANCE_IN_PROGRESS" + output: true + - name: 'shape' + type: String + description: "The shape of the Exadata Infrastructure. The shape determines + the\namount of CPU, storage, and memory resources allocated to the instance. " + required: true + - name: 'ociUrl' + type: String + description: 'Deep link to the OCI console to view this resource. ' + output: true + - name: 'cpuCount' + type: Integer + description: 'The number of enabled CPU cores. ' + output: true + - name: 'maxCpuCount' + type: Integer + description: 'The total number of CPU cores available. ' + output: true + - name: 'memorySizeGb' + type: Integer + description: 'The memory allocated in GBs. ' + output: true + - name: 'maxMemoryGb' + type: Integer + description: 'The total memory available in GBs. ' + output: true + - name: 'dbNodeStorageSizeGb' + type: Integer + description: 'The local node storage allocated in GBs. ' + output: true + - name: 'maxDbNodeStorageSizeGb' + type: Integer + description: 'The total local node storage available in GBs. ' + output: true + - name: 'dataStorageSizeTb' + type: Double + description: 'Size, in terabytes, of the DATA disk group. ' + output: true + - name: 'maxDataStorageTb' + type: Double + description: 'The total available DATA disk group size. ' + output: true + - name: 'activatedStorageCount' + type: Integer + description: "The requested number of additional storage servers + activated for the\nExadata Infrastructure. " + output: true + - name: 'additionalStorageCount' + type: Integer + description: "The requested number of additional storage servers + for the Exadata\nInfrastructure. " + output: true + - name: 'dbServerVersion' + type: String + description: "The software version of the database servers (dom0) + in the Exadata\nInfrastructure. " + output: true + - name: 'storageServerVersion' + type: String + description: "The software version of the storage servers (cells) + in the Exadata\nInfrastructure. " + output: true + - name: 'nextMaintenanceRunId' + type: String + description: 'The OCID of the next maintenance run. ' + output: true + - name: 'nextMaintenanceRunTime' + type: String + description: 'The time when the next maintenance run will occur. ' + output: true + - name: 'nextSecurityMaintenanceRunTime' + type: String + description: 'The time when the next security maintenance run will + occur. ' + output: true + - name: 'customerContacts' + type: Array + description: 'The list of customer contacts. ' + item_type: + type: NestedObject + properties: + - name: 'email' + type: String + description: "The email address used by Oracle to send notifications + regarding databases\nand infrastructure. " + required: true + - name: 'monthlyStorageServerVersion' + type: String + description: "The monthly software version of the storage servers + (cells)\nin the Exadata Infrastructure. Example: 20.1.15 " + output: true + - name: 'monthlyDbServerVersion' + type: String + description: "The monthly software version of the database servers + (dom0)\nin the Exadata Infrastructure. Example: 20.1.15 " + output: true + - name: 'labels' + type: KeyValueLabels + description: 'Labels or tags associated with the resource. ' + - name: 'createTime' + type: String + description: 'The date and time that the Exadata Infrastructure was + created. ' + output: true diff --git a/mmv1/products/oracledatabase/product.yaml b/mmv1/products/oracledatabase/product.yaml new file mode 100644 index 000000000000..9356d79a4a2e --- /dev/null +++ b/mmv1/products/oracledatabase/product.yaml @@ -0,0 +1,22 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +--- +name: 'OracleDatabase' +display_name: 'OracleDatabase' +versions: + - name: 'ga' + base_url: 'https://oracledatabase.googleapis.com/v1/' + - name: 'beta' + base_url: 'https://oracledatabase.googleapis.com/v1/' +scopes: + - 'https://www.googleapis.com/auth/cloud-platform' diff --git a/mmv1/templates/terraform/examples/oracledatabase_cloud_exadata_infrastructure_basic.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_cloud_exadata_infrastructure_basic.tf.tmpl new file mode 100644 index 000000000000..21f914337cd8 --- /dev/null +++ b/mmv1/templates/terraform/examples/oracledatabase_cloud_exadata_infrastructure_basic.tf.tmpl @@ -0,0 +1,11 @@ +resource "google_oracle_database_cloud_exadata_infrastructure" "{{$.PrimaryResourceId}}"{ + display_name = "OFake exadata displayname" + cloud_exadata_infrastructure_id = "ofake-exadata" + location = "us-east4" + project = "{{index $.Vars "project"}}" + properties { + shape = "Exadata.X9M" + compute_count= "2" + storage_count= "3" + } +} diff --git a/mmv1/templates/terraform/examples/oracledatabase_cloud_exadata_infrastructure_full.tf.tmpl b/mmv1/templates/terraform/examples/oracledatabase_cloud_exadata_infrastructure_full.tf.tmpl new file mode 100644 index 000000000000..965d13f08281 --- /dev/null +++ b/mmv1/templates/terraform/examples/oracledatabase_cloud_exadata_infrastructure_full.tf.tmpl @@ -0,0 +1,30 @@ +resource "google_oracle_database_cloud_exadata_infrastructure" "{{$.PrimaryResourceId}}"{ + display_name = "OFake exadata displayname" + cloud_exadata_infrastructure_id = "ofake-exadata-id" + location = "us-east4" + project = "{{index $.Vars "project"}}" + gcp_oracle_zone = "us-east4-b-r1" + properties { + shape = "Exadata.X9M" + compute_count= "2" + storage_count= "3" + customer_contacts { + email = "xyz@example.com" + } + maintenance_window { + custom_action_timeout_mins = "20" + days_of_week = ["SUNDAY"] + hours_of_day = [4] + is_custom_action_timeout_enabled = "0" + lead_time_week = "1" + months = ["JANUARY","APRIL","MAY","OCTOBER"] + patching_mode = "ROLLING" + preference = "CUSTOM_PREFERENCE" + weeks_of_month = [4] + } + total_storage_size_gb = "196608" + } + labels = { + "label-one" = "value-one" + } +} diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index 26258f910ad3..5835aa5bf549 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -546,6 +546,11 @@ var ServicesListBeta = mapOf( "displayName" to "Notebooks", "path" to "./google-beta/services/notebooks" ), + "oracledatabase" to mapOf( + "name" to "oracledatabase", + "displayName" to "OracleDatabase", + "path" to "./google-beta/services/oracledatabase" + ), "orgpolicy" to mapOf( "name" to "orgpolicy", "displayName" to "Orgpolicy", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 1cfbab121519..80a77899b4dd 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -541,6 +541,11 @@ var ServicesListGa = mapOf( "displayName" to "Notebooks", "path" to "./google/services/notebooks" ), + "oracledatabase" to mapOf( + "name" to "oracledatabase", + "displayName" to "OracleDatabase", + "path" to "./google/services/oracledatabase" + ), "orgpolicy" to mapOf( "name" to "orgpolicy", "displayName" to "Orgpolicy", From fc795a1e76f8b78be3b9553066856d67a20d10ef Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Wed, 9 Oct 2024 13:47:32 -0700 Subject: [PATCH 16/19] Increase timeout time to 20 minutes for cloudfunctions function (#11949) --- .../cloudfunctions/resource_cloudfunctions_function.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go index 0be8a90f4676..afb8458f2974 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go @@ -136,10 +136,10 @@ func ResourceCloudFunctionsFunction() *schema.Resource { }, Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(5 * time.Minute), - Read: schema.DefaultTimeout(5 * time.Minute), - Update: schema.DefaultTimeout(5 * time.Minute), - Delete: schema.DefaultTimeout(5 * time.Minute), + Create: schema.DefaultTimeout(20 * time.Minute), + Read: schema.DefaultTimeout(20 * time.Minute), + Update: schema.DefaultTimeout(20 * time.Minute), + Delete: schema.DefaultTimeout(20 * time.Minute), }, CustomizeDiff: customdiff.All( From a3338d784fdab9a825be2ae7ba033d7349a9553e Mon Sep 17 00:00:00 2001 From: Philip Sabri Date: Wed, 9 Oct 2024 22:56:49 +0200 Subject: [PATCH 17/19] gkehub: remove `google_gke_hub_membership` deprecated `description` field from example (#11808) --- .../examples/gkehub_feature_multi_cluster_ingress.tf.tmpl | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/templates/terraform/examples/gkehub_feature_multi_cluster_ingress.tf.tmpl b/mmv1/templates/terraform/examples/gkehub_feature_multi_cluster_ingress.tf.tmpl index 3a286936114d..dcd8b57517aa 100644 --- a/mmv1/templates/terraform/examples/gkehub_feature_multi_cluster_ingress.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkehub_feature_multi_cluster_ingress.tf.tmpl @@ -11,7 +11,6 @@ resource "google_gke_hub_membership" "membership" { resource_link = "//container.googleapis.com/${google_container_cluster.cluster.id}" } } - description = "Membership" } resource "google_gke_hub_feature" "feature" { From db994558105fe3703175e7d255a24941c1afa3bb Mon Sep 17 00:00:00 2001 From: Aleksandr Averbukh Date: Wed, 9 Oct 2024 23:19:10 +0200 Subject: [PATCH 18/19] Add logging_settings and speech_settings to dialogflow_cx_agent/flow (#11940) --- mmv1/products/dialogflowcx/Agent.yaml | 52 +++++++++++++++++++ mmv1/products/dialogflowcx/Flow.yaml | 49 +++++++++++++++++ .../examples/dialogflowcx_agent_full.tf.tmpl | 19 ++++++- .../examples/dialogflowcx_flow_full.tf.tmpl | 17 +++++- .../resource_dialogflowcx_agent_test.go.tmpl | 25 +++++++-- .../resource_dialogflowcx_flow_test.go | 29 ++++++++--- 6 files changed, 177 insertions(+), 14 deletions(-) diff --git a/mmv1/products/dialogflowcx/Agent.yaml b/mmv1/products/dialogflowcx/Agent.yaml index 85270a9abc33..8d3f2b006552 100644 --- a/mmv1/products/dialogflowcx/Agent.yaml +++ b/mmv1/products/dialogflowcx/Agent.yaml @@ -40,6 +40,8 @@ examples: bucket_name: 'dialogflowcx-bucket' ignore_read_extra: - 'git_integration_settings.0.github_settings.0.access_token' + - 'enable_stackdriver_logging' + - 'advanced_settings.0.logging_settings' parameters: properties: - name: 'name' @@ -113,8 +115,11 @@ properties: Name of the SecuritySettings reference for the agent. Format: projects//locations//securitySettings/. - name: 'enableStackdriverLogging' type: Boolean + # Ignore read, field is deprecated and conflicts with advanced_settings.logging_settings.enable_stackdriver_logging when it's set + ignore_read: true description: | Determines whether this agent should log conversation queries. + deprecation_message: '`enable_stackdriver_logging` is deprecated and will be removed in a future major release. Please use `advanced_settings.logging_settings.enable_stackdriver_logging`instead.' - name: 'enableSpellCorrection' type: Boolean description: | @@ -138,6 +143,33 @@ properties: description: | The Google Cloud Storage URI for the exported objects. Whether a full object name, or just a prefix, its usage depends on the Dialogflow operation. Format: gs://bucket/object-name-or-prefix + - name: 'speechSettings' + type: NestedObject + description: | + Settings for speech to text detection. Exposed at the following levels: + * Agent level + * Flow level + * Page level + * Parameter level + properties: + - name: 'endpointerSensitivity' + type: Integer + description: | + Sensitivity of the speech model that detects the end of speech. Scale from 0 to 100. + - name: 'noSpeechTimeout' + type: String + description: | + Timeout before detecting no speech. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.5s". + - name: 'useTimeoutBasedEndpointing' + type: Boolean + description: | + Use timeout based endpointing, interpreting endpointer sensitivy as seconds of timeout value. + - name: 'models' + type: KeyValuePairs + description: | + Mapping from language to Speech-to-Text model. The mapped Speech-to-Text model will be selected for requests from its corresponding language. For more information, see [Speech models](https://cloud.google.com/dialogflow/cx/docs/concept/speech-models). + An object containing a list of **"key": value** pairs. Example: **{ "name": "wrench", "mass": "1.3kg", "count": "3" }**. - name: 'dtmfSettings' type: NestedObject description: | @@ -159,6 +191,26 @@ properties: type: String description: | The digit that terminates a DTMF digit sequence. + - name: 'loggingSettings' + type: NestedObject + # Due to inconsistent API behaviour http://b/303056144, ignore read can be removed once fixed + ignore_read: true + description: | + Settings for logging. Settings for Dialogflow History, Contact Center messages, StackDriver logs, and speech logging. Exposed at the following levels: + * Agent level + properties: + - name: 'enableStackdriverLogging' + type: Boolean + description: | + Enables Google Cloud Logging. + - name: 'enableInteractionLogging' + type: Boolean + description: | + Enables DF Interaction logging. + - name: 'enableConsentBasedRedaction' + type: Boolean + description: | + Enables consent-based end-user input redaction, if true, a pre-defined session parameter **$session.params.conversation-redaction** will be used to determine if the utterance should be redacted. - name: 'gitIntegrationSettings' type: NestedObject description: | diff --git a/mmv1/products/dialogflowcx/Flow.yaml b/mmv1/products/dialogflowcx/Flow.yaml index 22c11a4ac68b..c08b57be66bf 100644 --- a/mmv1/products/dialogflowcx/Flow.yaml +++ b/mmv1/products/dialogflowcx/Flow.yaml @@ -42,6 +42,8 @@ examples: primary_resource_id: 'basic_flow' vars: agent_name: 'dialogflowcx-agent' + ignore_read_extra: + - 'advanced_settings.0.logging_settings' - name: 'dialogflowcx_flow_full' primary_resource_id: 'basic_flow' vars: @@ -564,6 +566,33 @@ properties: description: | The Google Cloud Storage URI for the exported objects. Whether a full object name, or just a prefix, its usage depends on the Dialogflow operation. Format: gs://bucket/object-name-or-prefix + - name: 'speechSettings' + type: NestedObject + description: | + Settings for speech to text detection. Exposed at the following levels: + * Agent level + * Flow level + * Page level + * Parameter level + properties: + - name: 'endpointerSensitivity' + type: Integer + description: | + Sensitivity of the speech model that detects the end of speech. Scale from 0 to 100. + - name: 'noSpeechTimeout' + type: String + description: | + Timeout before detecting no speech. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.5s". + - name: 'useTimeoutBasedEndpointing' + type: Boolean + description: | + Use timeout based endpointing, interpreting endpointer sensitivy as seconds of timeout value. + - name: 'models' + type: KeyValuePairs + description: | + Mapping from language to Speech-to-Text model. The mapped Speech-to-Text model will be selected for requests from its corresponding language. For more information, see [Speech models](https://cloud.google.com/dialogflow/cx/docs/concept/speech-models). + An object containing a list of **"key": value** pairs. Example: **{ "name": "wrench", "mass": "1.3kg", "count": "3" }**. - name: 'dtmfSettings' type: NestedObject description: | @@ -585,3 +614,23 @@ properties: type: String description: | The digit that terminates a DTMF digit sequence. + - name: 'loggingSettings' + type: NestedObject + ignore_read: true + # Ignore read as API does not return loggingSettings back, only accepts in the /create/update API call + description: | + Settings for logging. Settings for Dialogflow History, Contact Center messages, StackDriver logs, and speech logging. Exposed at the following levels: + * Agent level + properties: + - name: 'enableStackdriverLogging' + type: Boolean + description: | + Enables Google Cloud Logging. + - name: 'enableInteractionLogging' + type: Boolean + description: | + Enables DF Interaction logging. + - name: 'enableConsentBasedRedaction' + type: Boolean + description: | + Enables consent-based end-user input redaction, if true, a pre-defined session parameter **$session.params.conversation-redaction** will be used to determine if the utterance should be redacted. diff --git a/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl index c58d83218701..4e76cd8876e5 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_agent_full.tf.tmpl @@ -21,11 +21,26 @@ resource "google_dialogflow_cx_agent" "{{$.PrimaryResourceId}}" { audio_export_gcs_destination { uri = "${google_storage_bucket.bucket.url}/prefix-" } + speech_settings { + endpointer_sensitivity = 30 + no_speech_timeout = "3.500s" + use_timeout_based_endpointing = true + models = { + name : "wrench" + mass : "1.3kg" + count : "3" + } + } dtmf_settings { - enabled = true - max_digits = 1 + enabled = true + max_digits = 1 finish_digit = "#" } + logging_settings { + enable_stackdriver_logging = true + enable_interaction_logging = true + enable_consent_based_redaction = true + } } git_integration_settings { github_settings { diff --git a/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl b/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl index 0ed8216c4812..774be2521980 100644 --- a/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/dialogflowcx_flow_full.tf.tmpl @@ -273,10 +273,25 @@ resource "google_dialogflow_cx_flow" "{{$.PrimaryResourceId}}" { audio_export_gcs_destination { uri = "${google_storage_bucket.bucket.url}/prefix-" } + speech_settings { + endpointer_sensitivity = 30 + no_speech_timeout = "3.500s" + use_timeout_based_endpointing = true + models = { + name : "wrench" + mass : "1.3kg" + count : "3" + } + } dtmf_settings { enabled = true max_digits = 1 finish_digit = "#" } + logging_settings { + enable_stackdriver_logging = true + enable_interaction_logging = true + enable_consent_based_redaction = true + } } -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_agent_test.go.tmpl b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_agent_test.go.tmpl index d6d7f1b16f17..9e4965209503 100644 --- a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_agent_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_agent_test.go.tmpl @@ -29,7 +29,7 @@ func TestAccDialogflowCXAgent_update(t *testing.T) { ResourceName: "google_dialogflow_cx_agent.foobar", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token"}, + ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token", "enable_stackdriver_logging", "advanced_settings.0.logging_settings"}, }, { Config: testAccDialogflowCXAgent_full(context), @@ -38,7 +38,7 @@ func TestAccDialogflowCXAgent_update(t *testing.T) { ResourceName: "google_dialogflow_cx_agent.foobar", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token"}, + ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token", "enable_stackdriver_logging", "advanced_settings.0.logging_settings"}, }, { Config: testAccDialogflowCXAgent_removeSettings(context), @@ -47,7 +47,7 @@ func TestAccDialogflowCXAgent_update(t *testing.T) { ResourceName: "google_dialogflow_cx_agent.foobar", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token"}, + ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token", "enable_stackdriver_logging", "advanced_settings.0.logging_settings"}, }, }, }) @@ -92,11 +92,26 @@ func testAccDialogflowCXAgent_full(context map[string]interface{}) string { audio_export_gcs_destination { uri = "${google_storage_bucket.bucket.url}/prefix-" } + speech_settings { + endpointer_sensitivity = 30 + no_speech_timeout = "3.500s" + use_timeout_based_endpointing = true + models = { + name : "wrench" + mass : "1.3kg" + count : "3" + } + } dtmf_settings { - enabled = true - max_digits = 1 + enabled = true + max_digits = 1 finish_digit = "#" } + logging_settings { + enable_stackdriver_logging = true + enable_interaction_logging = true + enable_consent_based_redaction = true + } } git_integration_settings { github_settings { diff --git a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_flow_test.go b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_flow_test.go index e4a7b516aa15..b3745305614e 100644 --- a/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_flow_test.go +++ b/mmv1/third_party/terraform/services/dialogflowcx/resource_dialogflowcx_flow_test.go @@ -25,17 +25,19 @@ func TestAccDialogflowCXFlow_update(t *testing.T) { Config: testAccDialogflowCXFlow_basic(context), }, { - ResourceName: "google_dialogflow_cx_flow.my_flow", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_dialogflow_cx_flow.my_flow", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"advanced_settings.0.logging_settings"}, }, { Config: testAccDialogflowCXFlow_full(context), }, { - ResourceName: "google_dialogflow_cx_flow.my_flow", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_dialogflow_cx_flow.my_flow", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"advanced_settings.0.logging_settings"}, }, }, }) @@ -341,11 +343,26 @@ func testAccDialogflowCXFlow_full(context map[string]interface{}) string { audio_export_gcs_destination { uri = "${google_storage_bucket.bucket.url}/prefix-" } + speech_settings { + endpointer_sensitivity = 30 + no_speech_timeout = "3.500s" + use_timeout_based_endpointing = true + models = { + name : "wrench" + mass : "1.3kg" + count : "3" + } + } dtmf_settings { enabled = true max_digits = 1 finish_digit = "#" } + logging_settings { + enable_stackdriver_logging = true + enable_interaction_logging = true + enable_consent_based_redaction = true + } } } `, context) From 54e6e59f3f51bd6a4013b864655dab4b3f7626ec Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Wed, 9 Oct 2024 14:55:17 -0700 Subject: [PATCH 19/19] Break best practices pages out into standalone pages (#11962) Co-authored-by: Stephen Lewis (Burrows) --- docs/content/best-practices/_index.md | 191 ------------------ .../best-practices/deletion-behaviors.md | 35 ++++ .../best-practices/immutable-fields.md | 25 +++ .../best-practices/labels-and-annotations.md | 158 +++++++++++++++ 4 files changed, 218 insertions(+), 191 deletions(-) create mode 100644 docs/content/best-practices/deletion-behaviors.md create mode 100644 docs/content/best-practices/immutable-fields.md create mode 100644 docs/content/best-practices/labels-and-annotations.md diff --git a/docs/content/best-practices/_index.md b/docs/content/best-practices/_index.md index 86683b8c510a..19c18f2d9d98 100644 --- a/docs/content/best-practices/_index.md +++ b/docs/content/best-practices/_index.md @@ -1,195 +1,4 @@ --- title: "Best practices" weight: 25 -aliases: - - /docs/best-practices --- - -# Best practices - -The following is a list of best practices that contributions are expected to follow in order to ensure a consistent UX for the Terraform provider for Google Cloud internally and also compared to other Terraform providers. - -## ForceNew - -[`ForceNew`](https://developer.hashicorp.com/terraform/intro#how-does-terraform-work) in a Terraform resource schema attribute that indicates that a field is immutable – that is, that a change to the field requires the resource to be destroyed and recreated. - -This is necessary and required for cases where a field can't be updated in-place, so that [Terraform's core workflow](https://developer.hashicorp.com/terraform/intro#how-does-terraform-work) of aligning real infrastructure with configuration can be achieved. If a field or resource can never be updated in-place and is not marked with `ForceNew`, that is considered a bug in the provider. - -Some fields or resources may be possible to update in place, but only under specific conditions. In these cases, you can treat the field as updatable - that is, do not mark it as ForceNew; instead, implement standard update functionality. Then, call `diff.ForceNew` inside a [`CustomizeDiff`](https://developer.hashicorp.com/terraform/plugin/sdkv2/resources/customizing-differences) if the appropriate conditions to allow update in place are not met. Any `CustomizeDiff` function like this must be thoroughly unit tested. Making a field conditionally updatable like this is considered a good and useful enhancement in cases where recreation is costly and conditional updates do not introduce undue complexity. - -In complex cases, it is better to mark the field `ForceNew` to ensure that users can apply their configurations successfully. - -### Mitigating data loss risk via deletion_protection {#deletion_protection} - -Some resources, such as databases, have a significant risk of unrecoverable data loss if the resource is accidentally deleted due to a change to a ForceNew field. For these resources, the best practice is to add a `deletion_protection` field that defaults to `true`, which prevents the resource from being deleted if enabled. Although it is a small breaking change, for users, the benefits of `deletion_protection` defaulting to `true` outweigh the cost. - -APIs also sometimes add `deletion_protection` fields, which will generally default to `false` for backwards-compatibility reasons. Any `deletion_protection` API field added to an existing Terraform resource must match the API default initially. The default may be set to `true` in the next major release. For new Terraform resources, any `deletion_protection` field should default to `true` in Terraform regardless of the API default. When creating the corresponding Terraform field, the name -should match the API field name (i.e. it need not literally be named `deletion_protection` if the API uses something different) and should be the same field type (example: if the API field is an enum, so should the Terraform field). - -A resource can have up to two `deletion_protection` fields (with different names): one that represents a field in the API, and one that is only in Terraform. This could happen because the API added its field after `deletion_protection` already existed in Terraform; it could also happen because a separate field was added in Terraform to make sure that `deletion_protection` is enabled by default. In either case, they should be reconciled into a single field (that defaults to enabled and whose name matches the API field) in the next major release. - -Resources that do not have a significant risk of unrecoverable data loss or similar critical concern will not be given `deletion_protection` fields. - -See [Client-side fields]({{< ref "/develop/client-side-fields" >}}) for information about adding `deletion_protection` fields. - -{{< hint info >}} -**Note:** The previous best practice was a field called `force_delete` that defaulted to `false`. This is still present on some resources for backwards-compatibility reasons, but `deletion_protection` is preferred going forward. -{{< /hint >}} - -## Deletion policy {#deletion_policy} - -Some resources need to let users control the actions taken add deletion time. For these resources, the best practice is to add a `deletion_policy` enum field that defaults to an empty string and allows special values that control the deletion behavior. - -One common example is `ABANDON`, which is useful if the resource is safe to delete from Terraform but could cause problems if deleted from the API - for example, `google_bigtable_gc_policy` deletion can fail in replicated instances. `ABANDON` indicates that attempts to delete the resource should remove it from state without actually deleting it. - -See [Client-side fields]({{< ref "/develop/client-side-fields" >}}) for information about adding `deletion_policy` fields. - -## Add labels and annotations support - -The new labels model and the new annotations model are introduced in [Terraform provider for Google Cloud 5.0.0](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_5_upgrade#provider). - -There are now three label-related fields with the new labels model: -* The `labels` field is now non-authoritative and only manages the label keys defined in your configuration for the resource. -* The `terraform_labels` cannot be specified directly by the user. It merges the labels defined in the resource's configuration and the default labels configured in the provider block. If the same label key exists on both the resource level and provider level, the value on the resource will override the provider-level default. -* The output-only `effective_labels` will list all the labels present on the resource in GCP, including the labels configured through Terraform, the system, and other clients. - -There are now two annotation-related fields with the new annotations model: -* The `annotations` field is now non-authoritative and only manages the annotation keys defined in your configuration for the resource. -* The output-only `effective_annotations` will list all the annotations present on the resource in GCP, including the annotations configured through Terraform, the system, and other clients. - -This document describes how to add `labels` and `annotations` field to resources to support the new models. - -### Labels support -When adding a new `labels` field, please make the changes below to support the new labels model. Otherwise, it has to wait for the next major release to make the changes. - -#### MMv1 resources - -1. Use the type `KeyValueLabels` for the standard resource `labels` field. The standard resource `labels` field could be the top level `labels` field or the nested `labels` field inside the top level `metadata` field. Don't add `default_from_api: true` to this field or don't use this type for other `labels` fields in the resource. `KeyValueLabels` will add all of changes required for the new model automatically. - -```yaml - - name: 'labels' - type: KeyValueLabels - description: | - The labels associated with this dataset. You can use these to - organize and group your datasets. -``` -2. In the handwritten acceptance tests, add `labels` and `terraform_labels` to `ImportStateVerifyIgnore` if `labels` field is in the configuration. - -```go -ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, -``` -3. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceLabels(d)` to make `labels` and `terraform_labels` have all of the labels on the resource. - -```go -err = resourceArtifactRegistryRepositoryRead(d, meta) -if err != nil { - return err -} - -if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err -} -``` - -#### Handwritten resources - -1. Add `tpgresource.SetLabelsDiff` to `CustomizeDiff` of the resource. -```go -CustomizeDiff: customdiff.All( - tpgresource.SetLabelsDiff, -), -``` -2. Add `labels` field and add more attributes (such as `ForceNew: true,`, `Set: schema.HashString,`) to this field if necessary. -```go -"labels": { - Type: schema.TypeMap, - Optional: true, - Elem: &schema.Schema{Type: schema.TypeString}, - Description: `A set of key/value label pairs to assign to the project. - - **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. - Please refer to the field 'effective_labels' for all of the labels present on the resource.`, -}, -``` -3. Add output only field `terraform_labels` and add more attributes (such as `Set: schema.HashString,`) to this field if necessary. Don't add `ForceNew:true,` to this field. -```go -"terraform_labels": { - Type: schema.TypeMap, - Computed: true, - Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, - Elem: &schema.Schema{Type: schema.TypeString}, -}, -``` -4. Add output only field `effective_labels` and add more attributes (such as `ForceNew: true,`, `Set: schema.HashString,`) to this field if necessary. -```go -"effective_labels": { - Type: schema.TypeMap, - Computed: true, - Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, - Elem: &schema.Schema{Type: schema.TypeString}, -}, -``` -5. In the create method, use the value of `effective_labels` in API request. -6. In the update method, use the value of `effective_labels` in API request. -7. In the read mehtod, set `labels`, `terraform_labels` and `effective_labels` to state. -```go -if err := tpgresource.SetLabels(res.Labels, d, "labels"); err != nil { - return fmt.Errorf("Error setting labels: %s", err) -} -if err := tpgresource.SetLabels(res.Labels, d, "terraform_labels"); err != nil { - return fmt.Errorf("Error setting terraform_labels: %s", err) -} -if err := d.Set("effective_labels", res.Labels); err != nil { - return fmt.Errorf("Error setting effective_labels: %s", err) -} -``` -8. In the handwritten acceptance tests, add `labels` and `terraform_labels` to `ImportStateVerifyIgnore`. -9. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceLabels(d)` to make `labels` and `terraform_labels` have all of the labels on the resource. -10. Add the documentation for these label-related fields. - -### Annotations support -When adding a new `annotations` field, please make the changes below below to support the new annotations model. Otherwise, it has to wait for the next major release to make the breaking changes. - -#### MMv1 resources - -1. Use the type `KeyValueAnnotations` for the standard resource `annotations` field. The standard resource `annotations` field could be the top level `annotations` field or the nested `annotations` field inside the top level `metadata` field. Don't add `default_from_api: true` to this field or don't use this type for other `annotations` fields in the resource. `KeyValueAnnotations` will add all of changes required for the new model automatically. - -```yaml -- name: 'annotations' - type: KeyValueAnnotations - description: | - Client-specified annotations. This is distinct from labels. -``` -2. In the handwritten acceptance tests, add `annotations` to `ImportStateVerifyIgnore` if `annotations` field is in the configuration. - -```go -ImportStateVerifyIgnore: []string{"annotations"}, -``` -3. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceAnnotations(d)` to make `annotations` have all of the annotations on the resource. - -```go -err = resourceSecretManagerSecretRead(d, meta) -if err != nil { - return err -} - -if err := tpgresource.SetDataSourceLabels(d); err != nil { - return err -} - -if err := tpgresource.SetDataSourceAnnotations(d); err != nil { - return err -} -``` - -#### Handwritten resources - -1. Add `tpgresource.SetAnnotationsDiff` to `CustomizeDiff` of the resource. -2. Add `annotations` field and add more attributes (such as `ForceNew: true,`, `Set: schema.HashString,`) to this field if necessary. -3. Add output only field `effective_annotations` and add more attributes (such as `ForceNew: true,`, `Set: schema.HashString,`) to this field if necessary. -4. In the create method, use the value of `effective_annotations` in API request. -5. In the update method, use the value of `effective_annotations` in API request. -6. In the read mehtod, set `annotations`, and `effective_annotations` to state. -7. In the handwritten acceptance tests, add `annotations` to `ImportStateVerifyIgnore`. -8. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceAnnotations(d)` to make `annotations` have all of the labels on the resource. -9. Add the documentation for these annotation-related fields. diff --git a/docs/content/best-practices/deletion-behaviors.md b/docs/content/best-practices/deletion-behaviors.md new file mode 100644 index 000000000000..f88ae7da51bb --- /dev/null +++ b/docs/content/best-practices/deletion-behaviors.md @@ -0,0 +1,35 @@ +--- +title: "Deletion behaviors" +weight: 20 +--- + +# Deletion behaviors + +{{< hint info >}} +**Note:** This page covers best practices guidance for the Terraform provider for Google Cloud, which is used to ensure a consistent UX for Terraform users across providers or GCP users across the Google provider. Generally, this guidance should be followed and exceptions should be clearly demarcated / discussed. +{{< /hint >}} + +## Mitigating data loss risk via deletion_protection {#deletion_protection} + +Some resources, such as databases, have a significant risk of unrecoverable data loss if the resource is accidentally deleted due to a change to a ForceNew field. For these resources, the best practice is to add a `deletion_protection` field that defaults to `true`, which prevents the resource from being deleted if enabled. Although it is a small breaking change, for users, the benefits of `deletion_protection` defaulting to `true` outweigh the cost. + +APIs also sometimes add `deletion_protection` fields, which will generally default to `false` for backwards-compatibility reasons. Any `deletion_protection` API field added to an existing Terraform resource must match the API default initially. The default may be set to `true` in the next major release. For new Terraform resources, any `deletion_protection` field should default to `true` in Terraform regardless of the API default. When creating the corresponding Terraform field, the name +should match the API field name (i.e. it need not literally be named `deletion_protection` if the API uses something different) and should be the same field type (example: if the API field is an enum, so should the Terraform field). + +A resource can have up to two `deletion_protection` fields (with different names): one that represents a field in the API, and one that is only in Terraform. This could happen because the API added its field after `deletion_protection` already existed in Terraform; it could also happen because a separate field was added in Terraform to make sure that `deletion_protection` is enabled by default. In either case, they should be reconciled into a single field (that defaults to enabled and whose name matches the API field) in the next major release. + +Resources that do not have a significant risk of unrecoverable data loss or similar critical concern will not be given `deletion_protection` fields. + +See [Client-side fields]({{< ref "/develop/client-side-fields" >}}) for information about adding `deletion_protection` fields. + +{{< hint info >}} +**Note:** The previous best practice was a field called `force_delete` that defaulted to `false`. This is still present on some resources for backwards-compatibility reasons, but `deletion_protection` is preferred going forward. +{{< /hint >}} + +## Deletion policy {#deletion_policy} + +Some resources need to let users control the actions taken add deletion time. For these resources, the best practice is to add a `deletion_policy` enum field that defaults to an empty string and allows special values that control the deletion behavior. + +One common example is `ABANDON`, which is useful if the resource is safe to delete from Terraform but could cause problems if deleted from the API - for example, `google_bigtable_gc_policy` deletion can fail in replicated instances. `ABANDON` indicates that attempts to delete the resource should remove it from state without actually deleting it. + +See [Client-side fields]({{< ref "/develop/client-side-fields" >}}) for information about adding `deletion_policy` fields. diff --git a/docs/content/best-practices/immutable-fields.md b/docs/content/best-practices/immutable-fields.md new file mode 100644 index 000000000000..294e124cacbe --- /dev/null +++ b/docs/content/best-practices/immutable-fields.md @@ -0,0 +1,25 @@ +--- +title: "Immutable fields" +weight: 10 +aliases: + - /docs/best-practices + - /best-practices +--- + +# Immutable fields + +{{< hint info >}} +**Note:** This page covers best practices guidance for the Terraform provider for Google Cloud, which is used to ensure a consistent UX for Terraform users across providers or GCP users across the Google provider. Generally, this guidance should be followed and exceptions should be clearly demarcated / discussed. +{{< /hint >}} + +[`ForceNew`](https://developer.hashicorp.com/terraform/intro#how-does-terraform-work) in a Terraform resource schema attribute that indicates that a field is immutable – that is, that a change to the field requires the resource to be destroyed and recreated. + +This is necessary and required for cases where a field can't be updated in-place, so that [Terraform's core workflow](https://developer.hashicorp.com/terraform/intro#how-does-terraform-work) of aligning real infrastructure with configuration can be achieved. If a field or resource can never be updated in-place and is not marked with `ForceNew`, that is considered a bug in the provider. + +Some fields or resources may be possible to update in place, but only under specific conditions. In these cases, you can treat the field as updatable - that is, do not mark it as ForceNew; instead, implement standard update functionality. Then, call `diff.ForceNew` inside a [`CustomizeDiff`](https://developer.hashicorp.com/terraform/plugin/sdkv2/resources/customizing-differences) if the appropriate conditions to allow update in place are not met. Any `CustomizeDiff` function like this must be thoroughly unit tested. Making a field conditionally updatable like this is considered a good and useful enhancement in cases where recreation is costly and conditional updates do not introduce undue complexity. + +In complex cases, it is better to mark the field `ForceNew` to ensure that users can apply their configurations successfully. + +## Safeguarding against deletion + +See [Deletion behaviors]({{< ref "/best-practices/deletion-behaviors" >}}) for some mitigations against accidental deletion or other means to safeguard against deletion. diff --git a/docs/content/best-practices/labels-and-annotations.md b/docs/content/best-practices/labels-and-annotations.md new file mode 100644 index 000000000000..16382bde1811 --- /dev/null +++ b/docs/content/best-practices/labels-and-annotations.md @@ -0,0 +1,158 @@ +--- +title: "Labels and annotations" +weight: 30 +--- + +# Add labels and annotations support + +{{< hint info >}} +**Note:** This page covers best practices guidance for the Terraform provider for Google Cloud, which is used to ensure a consistent UX for Terraform users across providers or GCP users across the Google provider. Generally, this guidance should be followed and exceptions should be clearly demarcated / discussed. +{{< /hint >}} + +The new labels model and the new annotations model are introduced in [Terraform provider for Google Cloud 5.0.0](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_5_upgrade#provider). + +There are now three label-related fields with the new labels model: +* The `labels` field is now non-authoritative and only manages the label keys defined in your configuration for the resource. +* The `terraform_labels` cannot be specified directly by the user. It merges the labels defined in the resource's configuration and the default labels configured in the provider block. If the same label key exists on both the resource level and provider level, the value on the resource will override the provider-level default. +* The output-only `effective_labels` will list all the labels present on the resource in GCP, including the labels configured through Terraform, the system, and other clients. + +There are now two annotation-related fields with the new annotations model: +* The `annotations` field is now non-authoritative and only manages the annotation keys defined in your configuration for the resource. +* The output-only `effective_annotations` will list all the annotations present on the resource in GCP, including the annotations configured through Terraform, the system, and other clients. + +This document describes how to add `labels` and `annotations` field to resources to support the new models. + +## Labels support +When adding a new `labels` field, please make the changes below to support the new labels model. Otherwise, it has to wait for the next major release to make the changes. + +### MMv1 resources + +1. Use the type `KeyValueLabels` for the standard resource `labels` field. The standard resource `labels` field could be the top level `labels` field or the nested `labels` field inside the top level `metadata` field. Don't add `default_from_api: true` to this field or don't use this type for other `labels` fields in the resource. `KeyValueLabels` will add all of changes required for the new model automatically. + +```yaml + - name: 'labels' + type: KeyValueLabels + description: | + The labels associated with this dataset. You can use these to + organize and group your datasets. +``` +2. In the handwritten acceptance tests, add `labels` and `terraform_labels` to `ImportStateVerifyIgnore` if `labels` field is in the configuration. + +```go +ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, +``` +3. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceLabels(d)` to make `labels` and `terraform_labels` have all of the labels on the resource. + +```go +err = resourceArtifactRegistryRepositoryRead(d, meta) +if err != nil { + return err +} + +if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err +} +``` + +### Handwritten resources + +1. Add `tpgresource.SetLabelsDiff` to `CustomizeDiff` of the resource. +```go +CustomizeDiff: customdiff.All( + tpgresource.SetLabelsDiff, +), +``` +2. Add `labels` field and add more attributes (such as `ForceNew: true,`, `Set: schema.HashString,`) to this field if necessary. +```go +"labels": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A set of key/value label pairs to assign to the project. + + **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. + Please refer to the field 'effective_labels' for all of the labels present on the resource.`, +}, +``` +3. Add output only field `terraform_labels` and add more attributes (such as `Set: schema.HashString,`) to this field if necessary. Don't add `ForceNew:true,` to this field. +```go +"terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, +}, +``` +4. Add output only field `effective_labels` and add more attributes (such as `ForceNew: true,`, `Set: schema.HashString,`) to this field if necessary. +```go +"effective_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, +}, +``` +5. In the create method, use the value of `effective_labels` in API request. +6. In the update method, use the value of `effective_labels` in API request. +7. In the read mehtod, set `labels`, `terraform_labels` and `effective_labels` to state. +```go +if err := tpgresource.SetLabels(res.Labels, d, "labels"); err != nil { + return fmt.Errorf("Error setting labels: %s", err) +} +if err := tpgresource.SetLabels(res.Labels, d, "terraform_labels"); err != nil { + return fmt.Errorf("Error setting terraform_labels: %s", err) +} +if err := d.Set("effective_labels", res.Labels); err != nil { + return fmt.Errorf("Error setting effective_labels: %s", err) +} +``` +8. In the handwritten acceptance tests, add `labels` and `terraform_labels` to `ImportStateVerifyIgnore`. +9. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceLabels(d)` to make `labels` and `terraform_labels` have all of the labels on the resource. +10. Add the documentation for these label-related fields. + +## Annotations support +When adding a new `annotations` field, please make the changes below below to support the new annotations model. Otherwise, it has to wait for the next major release to make the breaking changes. + +### MMv1 resources + +1. Use the type `KeyValueAnnotations` for the standard resource `annotations` field. The standard resource `annotations` field could be the top level `annotations` field or the nested `annotations` field inside the top level `metadata` field. Don't add `default_from_api: true` to this field or don't use this type for other `annotations` fields in the resource. `KeyValueAnnotations` will add all of changes required for the new model automatically. + +```yaml +- name: 'annotations' + type: KeyValueAnnotations + description: | + Client-specified annotations. This is distinct from labels. +``` +2. In the handwritten acceptance tests, add `annotations` to `ImportStateVerifyIgnore` if `annotations` field is in the configuration. + +```go +ImportStateVerifyIgnore: []string{"annotations"}, +``` +3. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceAnnotations(d)` to make `annotations` have all of the annotations on the resource. + +```go +err = resourceSecretManagerSecretRead(d, meta) +if err != nil { + return err +} + +if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err +} + +if err := tpgresource.SetDataSourceAnnotations(d); err != nil { + return err +} +``` + +### Handwritten resources + +1. Add `tpgresource.SetAnnotationsDiff` to `CustomizeDiff` of the resource. +2. Add `annotations` field and add more attributes (such as `ForceNew: true,`, `Set: schema.HashString,`) to this field if necessary. +3. Add output only field `effective_annotations` and add more attributes (such as `ForceNew: true,`, `Set: schema.HashString,`) to this field if necessary. +4. In the create method, use the value of `effective_annotations` in API request. +5. In the update method, use the value of `effective_annotations` in API request. +6. In the read mehtod, set `annotations`, and `effective_annotations` to state. +7. In the handwritten acceptance tests, add `annotations` to `ImportStateVerifyIgnore`. +8. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceAnnotations(d)` to make `annotations` have all of the labels on the resource. +9. Add the documentation for these annotation-related fields.