diff --git a/.changelog/3608.txt b/.changelog/3608.txt new file mode 100644 index 00000000000..7b2868e75ef --- /dev/null +++ b/.changelog/3608.txt @@ -0,0 +1,9 @@ +```release-note:new-resource +`google_bigquery_dataset_iam_binding` +``` +```release-note:new-resource +`google_bigquery_dataset_iam_member` +``` +```release-note:new-resource +`google_bigquery_dataset_iam_policy` +``` diff --git a/google/iam_bigquery_dataset.go b/google/iam_bigquery_dataset.go new file mode 100644 index 00000000000..4041da43261 --- /dev/null +++ b/google/iam_bigquery_dataset.go @@ -0,0 +1,236 @@ +package google + +import ( + "errors" + "fmt" + "strings" + + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform-plugin-sdk/helper/schema" + "google.golang.org/api/cloudresourcemanager/v1" +) + +var IamBigqueryDatasetSchema = map[string]*schema.Schema{ + "dataset_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, +} + +var bigqueryAccessPrimitiveToRoleMap = map[string]string{ + "OWNER": "roles/bigquery.dataOwner", + "WRITER": "roles/bigquery.dataEditor", + "READER": "roles/bigquery.dataViewer", +} + +type BigqueryDatasetIamUpdater struct { + project string + datasetId string + Config *Config +} + +func NewBigqueryDatasetIamUpdater(d *schema.ResourceData, config *Config) (ResourceIamUpdater, error) { + project, err := getProject(d, config) + if err != nil { + return nil, err + } + + d.Set("project", project) + + return &BigqueryDatasetIamUpdater{ + project: project, + datasetId: d.Get("dataset_id").(string), + Config: config, + }, nil +} + +func BigqueryDatasetIdParseFunc(d *schema.ResourceData, config *Config) error { + fv, err := parseProjectFieldValue("datasets", d.Id(), "project", d, config, false) + if err != nil { + return err + } + + d.Set("project", fv.Project) + d.Set("dataset_id", fv.Name) + + // Explicitly set the id so imported resources have the same ID format as non-imported ones. + d.SetId(fv.RelativeLink()) + return nil +} + +func (u *BigqueryDatasetIamUpdater) GetResourceIamPolicy() (*cloudresourcemanager.Policy, error) { + url := fmt.Sprintf("%s%s", u.Config.BigQueryBasePath, u.GetResourceId()) + + res, err := sendRequest(u.Config, "GET", u.project, url, nil) + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("Error retrieving IAM policy for %s: {{err}}", u.DescribeResource()), err) + } + + policy, err := accessToPolicy(res["access"]) + if err != nil { + return nil, err + } + return policy, nil +} + +func (u *BigqueryDatasetIamUpdater) SetResourceIamPolicy(policy *cloudresourcemanager.Policy) error { + url := fmt.Sprintf("%s%s", u.Config.BigQueryBasePath, u.GetResourceId()) + + access, err := policyToAccess(policy) + if err != nil { + return err + } + obj := map[string]interface{}{ + "access": access, + } + + _, err = sendRequest(u.Config, "PATCH", u.project, url, obj) + if err != nil { + return fmt.Errorf("Error creating DatasetAccess: %s", err) + } + + return nil +} + +func accessToPolicy(access interface{}) (*cloudresourcemanager.Policy, error) { + if access == nil { + return nil, nil + } + roleToBinding := make(map[string]*cloudresourcemanager.Binding) + + accessArr := access.([]interface{}) + for _, v := range accessArr { + memberRole := v.(map[string]interface{}) + rawRole, ok := memberRole["role"] + if !ok { + // "view" allows role to not be defined. It is a special dataset access construct, so ignore + // If a user wants to manage "view" access they should use the `bigquery_dataset_access` resource + continue + } + role := rawRole.(string) + if iamRole, ok := bigqueryAccessPrimitiveToRoleMap[role]; ok { + // API changes certain IAM roles to legacy roles. Revert these changes + role = iamRole + } + member, err := accessToIamMember(memberRole) + if err != nil { + return nil, err + } + // We have to combine bindings manually + binding, ok := roleToBinding[role] + if !ok { + binding = &cloudresourcemanager.Binding{Role: role, Members: []string{}} + } + binding.Members = append(binding.Members, member) + + roleToBinding[role] = binding + } + bindings := make([]*cloudresourcemanager.Binding, 0) + for _, v := range roleToBinding { + bindings = append(bindings, v) + } + + return &cloudresourcemanager.Policy{Bindings: bindings}, nil +} + +func policyToAccess(policy *cloudresourcemanager.Policy) ([]map[string]interface{}, error) { + res := make([]map[string]interface{}, 0) + if len(policy.AuditConfigs) != 0 { + return nil, errors.New("Access policies not allowed on BigQuery Dataset IAM policies") + } + for _, binding := range policy.Bindings { + if binding.Condition != nil { + return nil, errors.New("IAM conditions not allowed on BigQuery Dataset IAM") + } + if fullRole, ok := bigqueryAccessPrimitiveToRoleMap[binding.Role]; ok { + return nil, fmt.Errorf("BigQuery Dataset legacy role %s is not allowed when using google_bigquery_dataset_iam resources. Please use the full form: %s", binding.Role, fullRole) + } + for _, member := range binding.Members { + access := map[string]interface{}{ + "role": binding.Role, + } + memberType, member, err := iamMemberToAccess(member) + if err != nil { + return nil, err + } + access[memberType] = member + res = append(res, access) + } + } + + return res, nil +} + +// Returns the member access type and member for an IAM member. +// Dataset access uses different member types to identify groups, domains, etc. +// these types are used as keys in the access JSON payload +func iamMemberToAccess(member string) (string, string, error) { + pieces := strings.SplitN(member, ":", 2) + if len(pieces) > 1 { + switch pieces[0] { + case "group": + return "groupByEmail", pieces[1], nil + case "domain": + return "domain", pieces[1], nil + case "user": + return "userByEmail", pieces[1], nil + case "serviceAccount": + return "userByEmail", pieces[1], nil + default: + return "", "", fmt.Errorf("Failed to parse BigQuery Dataset IAM member type: %s", member) + } + } + if member == "projectOwners" || member == "projectReaders" || member == "projectWriters" || member == "allAuthenticatedUsers" { + // These are special BigQuery Dataset permissions + return "specialGroup", member, nil + } + return "iamMember", member, nil +} + +func accessToIamMember(access map[string]interface{}) (string, error) { + // One of the fields must be set, we have to find which IAM member type this maps to + if member, ok := access["groupByEmail"]; ok { + return fmt.Sprintf("group:%s", member.(string)), nil + } + if member, ok := access["domain"]; ok { + return fmt.Sprintf("domain:%s", member.(string)), nil + } + if member, ok := access["specialGroup"]; ok { + return member.(string), nil + } + if member, ok := access["iamMember"]; ok { + return member.(string), nil + } + if _, ok := access["view"]; ok { + // view does not map to an IAM member, use access instead + return "", fmt.Errorf("Failed to convert BigQuery Dataset access to IAM member. To use views with a dataset, please use dataset_access") + } + if member, ok := access["userByEmail"]; ok { + // service accounts have "gservice" in their email. This is best guess due to lost information + if strings.Contains(member.(string), "gserviceaccount") { + return fmt.Sprintf("serviceAccount:%s", member.(string)), nil + } + return fmt.Sprintf("user:%s", member.(string)), nil + } + return "", fmt.Errorf("Failed to identify IAM member from BigQuery Dataset access: %v", access) +} + +func (u *BigqueryDatasetIamUpdater) GetResourceId() string { + return fmt.Sprintf("projects/%s/datasets/%s", u.project, u.datasetId) +} + +// Matches the mutex of google_big_query_dataset_access +func (u *BigqueryDatasetIamUpdater) GetMutexKey() string { + return fmt.Sprintf("%s", u.datasetId) +} + +func (u *BigqueryDatasetIamUpdater) DescribeResource() string { + return fmt.Sprintf("Bigquery Dataset %s/%s", u.project, u.datasetId) +} diff --git a/google/provider.go b/google/provider.go index 8961f4af6e4..c6cdd6f2d5f 100644 --- a/google/provider.go +++ b/google/provider.go @@ -785,6 +785,9 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { "google_bigtable_instance_iam_member": ResourceIamMember(IamBigtableInstanceSchema, NewBigtableInstanceUpdater, BigtableInstanceIdParseFunc), "google_bigtable_instance_iam_policy": ResourceIamPolicy(IamBigtableInstanceSchema, NewBigtableInstanceUpdater, BigtableInstanceIdParseFunc), "google_bigtable_table": resourceBigtableTable(), + "google_bigquery_dataset_iam_binding": ResourceIamBinding(IamBigqueryDatasetSchema, NewBigqueryDatasetIamUpdater, BigqueryDatasetIdParseFunc), + "google_bigquery_dataset_iam_member": ResourceIamMember(IamBigqueryDatasetSchema, NewBigqueryDatasetIamUpdater, BigqueryDatasetIdParseFunc), + "google_bigquery_dataset_iam_policy": ResourceIamPolicy(IamBigqueryDatasetSchema, NewBigqueryDatasetIamUpdater, BigqueryDatasetIdParseFunc), "google_billing_account_iam_binding": ResourceIamBinding(IamBillingAccountSchema, NewBillingAccountIamUpdater, BillingAccountIdParseFunc), "google_billing_account_iam_member": ResourceIamMember(IamBillingAccountSchema, NewBillingAccountIamUpdater, BillingAccountIdParseFunc), "google_billing_account_iam_policy": ResourceIamPolicy(IamBillingAccountSchema, NewBillingAccountIamUpdater, BillingAccountIdParseFunc), diff --git a/google/resource_bigquery_dataset_iam_member_test.go b/google/resource_bigquery_dataset_iam_member_test.go new file mode 100644 index 00000000000..5b4d6a4bdad --- /dev/null +++ b/google/resource_bigquery_dataset_iam_member_test.go @@ -0,0 +1,62 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" +) + +func TestAccBigqueryDatasetIamMember_basic(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_%s", randString(t, 10)) + saID := fmt.Sprintf("tf-test-%s", randString(t, 10)) + + expected := map[string]interface{}{ + "role": "roles/viewer", + "userByEmail": fmt.Sprintf("%s@%s.iam.gserviceaccount.com", saID, getTestProjectFromEnv()), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccBigqueryDatasetIamMember_basic(datasetID, saID), + Check: testAccCheckBigQueryDatasetAccessPresent(t, "google_bigquery_dataset.dataset", expected), + }, + { + // Destroy step instead of CheckDestroy so we can check the access is removed without deleting the dataset + Config: testAccBigqueryDatasetIamMember_destroy(datasetID, "dataset"), + Check: testAccCheckBigQueryDatasetAccessAbsent(t, "google_bigquery_dataset.dataset", expected), + }, + }, + }) +} + +func testAccBigqueryDatasetIamMember_destroy(datasetID, rs string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "%s" { + dataset_id = "%s" +} +`, rs, datasetID) +} + +func testAccBigqueryDatasetIamMember_basic(datasetID, saID string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset_iam_member" "access" { + dataset_id = google_bigquery_dataset.dataset.dataset_id + role = "roles/viewer" + member = "serviceAccount:${google_service_account.bqviewer.email}" +} + +resource "google_bigquery_dataset" "dataset" { + dataset_id = "%s" +} + +resource "google_service_account" "bqviewer" { + account_id = "%s" +} +`, datasetID, saID) +} diff --git a/google/resource_bigquery_dataset_iam_test.go b/google/resource_bigquery_dataset_iam_test.go new file mode 100644 index 00000000000..3e41cbcbc72 --- /dev/null +++ b/google/resource_bigquery_dataset_iam_test.go @@ -0,0 +1,202 @@ +package google + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/helper/resource" +) + +func TestAccBigqueryDatasetIamBinding(t *testing.T) { + t.Parallel() + + dataset := "tf_test_dataset_iam_" + randString(t, 10) + account := "tf-test-bq-iam-" + randString(t, 10) + role := "roles/bigquery.dataViewer" + + importId := fmt.Sprintf("projects/%s/datasets/%s %s", + getTestProjectFromEnv(), dataset, role) + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test IAM Binding creation + Config: testAccBigqueryDatasetIamBinding_basic(dataset, account, role), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_bigquery_dataset_iam_binding.binding", "role", role), + ), + }, + { + ResourceName: "google_bigquery_dataset_iam_binding.binding", + ImportStateId: importId, + ImportState: true, + ImportStateVerify: true, + }, + { + // Test IAM Binding update + Config: testAccBigqueryDatasetIamBinding_update(dataset, account, role), + }, + { + ResourceName: "google_bigquery_dataset_iam_binding.binding", + ImportStateId: importId, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccBigqueryDatasetIamMember(t *testing.T) { + t.Parallel() + + dataset := "tf_test_dataset_iam_" + randString(t, 10) + account := "tf-test-bq-iam-" + randString(t, 10) + role := "roles/editor" + + importId := fmt.Sprintf("projects/%s/datasets/%s %s serviceAccount:%s", + getTestProjectFromEnv(), + dataset, + role, + serviceAccountCanonicalEmail(account)) + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test IAM Binding creation + Config: testAccBigqueryDatasetIamMember(dataset, account, role), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_bigquery_dataset_iam_member.member", "role", role), + resource.TestCheckResourceAttr( + "google_bigquery_dataset_iam_member.member", "member", "serviceAccount:"+serviceAccountCanonicalEmail(account)), + ), + }, + { + ResourceName: "google_bigquery_dataset_iam_member.member", + ImportStateId: importId, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccBigqueryDatasetIamPolicy(t *testing.T) { + t.Parallel() + + dataset := "tf_test_dataset_iam_" + randString(t, 10) + account := "tf-test-bq-iam-" + randString(t, 10) + role := "roles/bigquery.dataOwner" + + importId := fmt.Sprintf("projects/%s/datasets/%s", + getTestProjectFromEnv(), dataset) + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + // Test IAM Binding creation + Config: testAccBigqueryDatasetIamPolicy(dataset, account, role), + }, + { + ResourceName: "google_bigquery_dataset_iam_policy.policy", + ImportStateId: importId, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccBigqueryDatasetIamBinding_basic(dataset, account, role string) string { + return fmt.Sprintf(testBigqueryDatasetIam+` +resource "google_service_account" "test-account1" { + account_id = "%s-1" + display_name = "Bigquery Dataset IAM Testing Account" +} + +resource "google_service_account" "test-account2" { + account_id = "%s-2" + display_name = "Bigquery Dataset Iam Testing Account" +} + +resource "google_bigquery_dataset_iam_binding" "binding" { + dataset_id = google_bigquery_dataset.dataset.dataset_id + role = "%s" + members = [ + "serviceAccount:${google_service_account.test-account1.email}", + ] +} +`, dataset, account, account, role) +} + +func testAccBigqueryDatasetIamBinding_update(dataset, account, role string) string { + return fmt.Sprintf(testBigqueryDatasetIam+` +resource "google_service_account" "test-account1" { + account_id = "%s-1" + display_name = "Bigquery Dataset IAM Testing Account" +} + +resource "google_service_account" "test-account2" { + account_id = "%s-2" + display_name = "Bigquery Dataset IAM Testing Account" +} + +resource "google_bigquery_dataset_iam_binding" "binding" { + dataset_id = google_bigquery_dataset.dataset.dataset_id + role = "%s" + members = [ + "serviceAccount:${google_service_account.test-account1.email}", + "serviceAccount:${google_service_account.test-account2.email}", + ] +} +`, dataset, account, account, role) +} + +func testAccBigqueryDatasetIamMember(dataset, account, role string) string { + return fmt.Sprintf(testBigqueryDatasetIam+` +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Bigquery Dataset IAM Testing Account" +} + +resource "google_bigquery_dataset_iam_member" "member" { + dataset_id = google_bigquery_dataset.dataset.dataset_id + role = "%s" + member = "serviceAccount:${google_service_account.test-account.email}" +} +`, dataset, account, role) +} + +func testAccBigqueryDatasetIamPolicy(dataset, account, role string) string { + return fmt.Sprintf(testBigqueryDatasetIam+` +resource "google_service_account" "test-account" { + account_id = "%s" + display_name = "Bigquery Dataset IAM Testing Account" +} + +data "google_iam_policy" "policy" { + binding { + role = "%s" + members = ["serviceAccount:${google_service_account.test-account.email}"] + } +} + +resource "google_bigquery_dataset_iam_policy" "policy" { + dataset_id = google_bigquery_dataset.dataset.dataset_id + policy_data = data.google_iam_policy.policy.policy_data +} +`, dataset, account, role) +} + +var testBigqueryDatasetIam = ` +resource "google_bigquery_dataset" "dataset" { + dataset_id = "%s" +} +` diff --git a/website/docs/r/bigquery_dataset_iam.html.markdown b/website/docs/r/bigquery_dataset_iam.html.markdown new file mode 100644 index 00000000000..27411a6fbb2 --- /dev/null +++ b/website/docs/r/bigquery_dataset_iam.html.markdown @@ -0,0 +1,120 @@ +--- +layout: "google" +subcategory: "BigQuery" +page_title: "Google: google_bigquery_dataset_iam" +sidebar_current: "docs-google-bigquery-dataset-iam" +description: |- + Collection of resources to manage IAM policy for a BigQuery dataset. +--- + +# IAM policy for BigQuery dataset + +Three different resources help you manage your IAM policy for BigQuery dataset. Each of these resources serves a different use case: + +* `google_bigquery_dataset_iam_policy`: Authoritative. Sets the IAM policy for the dataset and replaces any existing policy already attached. +* `google_bigquery_dataset_iam_binding`: Authoritative for a given role. Updates the IAM policy to grant a role to a list of members. Other roles within the IAM policy for the dataset are preserved. +* `google_bigquery_dataset_iam_member`: Non-authoritative. Updates the IAM policy to grant a role to a new member. Other members for the role for the dataset are preserved. + +~> **Note:** These resources **cannot** be used with `google_bigquery_dataset_access` resources or the `access` field on `google_bigquery_dataset` or they will fight over what the policy should be. + +~> **Note:** Using any of these resources will remove any authorized view permissions from the dataset. To assign and preserve authorized view permissions use the `google_bigquery_dataset_access` instead. + +~> **Note:** Legacy BigQuery roles `OWNER` `WRITER` and `READER` **cannot** be used with any of these IAM resources. Instead use the full role form of: `roles/bigquery.dataOwner` `roles/bigquery.dataEditor` and `roles/bigquery.dataViewer`. + +~> **Note:** `google_bigquery_dataset_iam_policy` **cannot** be used in conjunction with `google_bigquery_dataset_iam_binding` and `google_bigquery_dataset_iam_member` or they will fight over what your policy should be. + +~> **Note:** `google_bigquery_dataset_iam_binding` resources **can be** used in conjunction with `google_bigquery_dataset_iam_member` resources **only if** they do not grant privilege to the same role. + +## google\bigquery\_dataset\_iam\_policy + +```hcl +data "google_iam_policy" "owner" { + binding { + role = "roles/dataOwner" + + members = [ + "user:jane@example.com", + ] + } +} + +resource "google_bigquery_dataset_iam_policy" "dataset" { + dataset_id = "your-dataset-id" + policy_data = data.google_iam_policy.owner.policy_data +} +``` + +## google\_bigquery\_dataset\_iam\_binding + +```hcl +resource "google_bigquery_dataset_iam_binding" "reader" { + dataset_id = "your-dataset-id" + role = "roles/bigquery.dataViewer" + + members = [ + "user:jane@example.com", + ] +} +``` + +## google\_bigquery\_dataset\_iam\_member + +```hcl +resource "google_bigquery_dataset_iam_member" "editor" { + dataset_id = "your-dataset-id" + role = "roles/bigquery.dataEditor" + member = "user:jane@example.com" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `dataset_id` - (Required) The dataset ID, in the form `projects/{project}/datasets/{dataset_id}` + +* `member/members` - (Required) Identities that will be granted the privilege in `role`. + Each entry can have one of the following values: + * **allUsers**: A special identifier that represents anyone who is on the internet; with or without a Google account. + * **allAuthenticatedUsers**: A special identifier that represents anyone who is authenticated with a Google account or a service account. + * **user:{emailid}**: An email address that represents a specific Google account. For example, alice@gmail.com or joe@example.com. + * **serviceAccount:{emailid}**: An email address that represents a service account. For example, my-other-app@appspot.gserviceaccount.com. + * **group:{emailid}**: An email address that represents a Google group. For example, admins@example.com. + * **domain:{domain}**: A G Suite domain (primary, instead of alias) name that represents all the users of that domain. For example, google.com or example.com. + +* `role` - (Required) The role that should be applied. Only one + `google_bigquery_dataset_iam_binding` can be used per role. Note that custom roles must be of the format + `[projects|organizations]/{parent-name}/roles/{role-name}`. + +* `policy_data` - (Required only by `google_bigquery_dataset_iam_policy`) The policy data generated by + a `google_iam_policy` data source. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are +exported: + +* `etag` - (Computed) The etag of the dataset's IAM policy. + +## Import + +IAM member imports use space-delimited identifiers; the resource in question, the role, and the account. This member resource can be imported using the `dataset_id`, role, and account e.g. + +``` +$ terraform import google_bigquery_dataset_iam_member.dataset_iam "projects/your-project-id/datasets/dataset-id roles/viewer user:foo@example.com" +``` + +IAM binding imports use space-delimited identifiers; the resource in question and the role. This binding resource can be imported using the `dataset_id` and role, e.g. + +``` +$ terraform import google_bigquery_dataset_iam_binding.dataset_iam "projects/your-project-id/datasets/dataset-id roles/viewer" +``` + +IAM policy imports use the identifier of the resource in question. This policy resource can be imported using the `dataset_id`, role, and account e.g. + +``` +$ terraform import google_bigquery_dataset_iam_policy.dataset_iam projects/your-project-id/datasets/dataset-id +``` + +-> **Custom Roles**: If you're importing a IAM resource with a custom role, make sure to use the + full name of the custom role, e.g. `[projects/my-project|organizations/my-org]/roles/my-custom-role`. diff --git a/website/google.erb b/website/google.erb index 06432a0fe30..c746d79a08f 100644 --- a/website/google.erb +++ b/website/google.erb @@ -133,6 +133,10 @@ google_bigquery_dataset_access +
  • + google_bigquery_dataset_iam +
  • +
  • google_bigquery_job