diff --git a/.changelog/12562.txt b/.changelog/12562.txt
new file mode 100644
index 0000000000..672bb10db1
--- /dev/null
+++ b/.changelog/12562.txt
@@ -0,0 +1,3 @@
+```release-note:enhancement
+storagetransfer: add support for `replication_spec` fields to `google_storage_transfer_job` resource
+```
\ No newline at end of file
diff --git a/google-beta/services/storagetransfer/resource_storage_transfer_job.go b/google-beta/services/storagetransfer/resource_storage_transfer_job.go
index 51b81a27a2..8631d7622c 100644
--- a/google-beta/services/storagetransfer/resource_storage_transfer_job.go
+++ b/google-beta/services/storagetransfer/resource_storage_transfer_job.go
@@ -21,7 +21,7 @@ import (
)
var (
- objectConditionsKeys = []string{
+ transferSpecObjectConditionsKeys = []string{
"transfer_spec.0.object_conditions.0.min_time_elapsed_since_last_modification",
"transfer_spec.0.object_conditions.0.max_time_elapsed_since_last_modification",
"transfer_spec.0.object_conditions.0.include_prefixes",
@@ -29,13 +29,27 @@ var (
"transfer_spec.0.object_conditions.0.last_modified_since",
"transfer_spec.0.object_conditions.0.last_modified_before",
}
+ replicationSpecObjectConditionsKeys = []string{
+ "replication_spec.0.object_conditions.0.min_time_elapsed_since_last_modification",
+ "replication_spec.0.object_conditions.0.max_time_elapsed_since_last_modification",
+ "replication_spec.0.object_conditions.0.include_prefixes",
+ "replication_spec.0.object_conditions.0.exclude_prefixes",
+ "replication_spec.0.object_conditions.0.last_modified_since",
+ "replication_spec.0.object_conditions.0.last_modified_before",
+ }
- transferOptionsKeys = []string{
+ transferSpecTransferOptionsKeys = []string{
"transfer_spec.0.transfer_options.0.overwrite_objects_already_existing_in_sink",
"transfer_spec.0.transfer_options.0.delete_objects_unique_in_sink",
"transfer_spec.0.transfer_options.0.delete_objects_from_source_after_transfer",
"transfer_spec.0.transfer_options.0.overwrite_when",
}
+ replicationSpecTransferOptionsKeys = []string{
+ "replication_spec.0.transfer_options.0.overwrite_objects_already_existing_in_sink",
+ "replication_spec.0.transfer_options.0.delete_objects_unique_in_sink",
+ "replication_spec.0.transfer_options.0.delete_objects_from_source_after_transfer",
+ "replication_spec.0.transfer_options.0.overwrite_when",
+ }
transferSpecDataSourceKeys = []string{
"transfer_spec.0.gcs_data_source",
@@ -49,6 +63,14 @@ var (
"transfer_spec.0.gcs_data_sink",
"transfer_spec.0.posix_data_sink",
}
+
+ replicationSpecDataSourceKeys = []string{
+ "replication_spec.0.gcs_data_source",
+ }
+ replicationSpecDataSinkKeys = []string{
+ "replication_spec.0.gcs_data_sink",
+ }
+
awsS3AuthKeys = []string{
"transfer_spec.0.aws_s3_data_source.0.aws_access_key",
"transfer_spec.0.aws_s3_data_source.0.role_arn",
@@ -94,10 +116,11 @@ func ResourceStorageTransferJob() *schema.Resource {
Description: `The project in which the resource belongs. If it is not provided, the provider project is used.`,
},
"event_stream": {
- Type: schema.TypeList,
- Optional: true,
- MaxItems: 1,
- ConflictsWith: []string{"schedule"},
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ ConflictsWith: []string{"schedule"},
+ DiffSuppressFunc: diffSuppressEventStream,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
@@ -120,14 +143,46 @@ func ResourceStorageTransferJob() *schema.Resource {
},
},
},
+ "replication_spec": {
+ Type: schema.TypeList,
+ MaxItems: 1,
+ Optional: true,
+ ConflictsWith: []string{"transfer_spec", "schedule"},
+ ExactlyOneOf: []string{"transfer_spec", "replication_spec"},
+ Elem: &schema.Resource{
+ Schema: map[string]*schema.Schema{
+ "object_conditions": objectConditionsSchema(replicationSpecObjectConditionsKeys),
+ "transfer_options": transferOptionsSchema(replicationSpecTransferOptionsKeys),
+ "gcs_data_sink": {
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ Elem: gcsDataSchema(),
+ ExactlyOneOf: replicationSpecDataSinkKeys,
+ Description: `A Google Cloud Storage data sink.`,
+ },
+ "gcs_data_source": {
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ Elem: gcsDataSchema(),
+ ExactlyOneOf: replicationSpecDataSourceKeys,
+ Description: `A Google Cloud Storage data source.`,
+ },
+ },
+ },
+ Description: `Replication specification.`,
+ },
"transfer_spec": {
- Type: schema.TypeList,
- Required: true,
- MaxItems: 1,
+ Type: schema.TypeList,
+ Optional: true,
+ MaxItems: 1,
+ ConflictsWith: []string{"replication_spec"},
+ ExactlyOneOf: []string{"transfer_spec", "replication_spec"},
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
- "object_conditions": objectConditionsSchema(),
- "transfer_options": transferOptionsSchema(),
+ "object_conditions": objectConditionsSchema(transferSpecObjectConditionsKeys),
+ "transfer_options": transferOptionsSchema(transferSpecTransferOptionsKeys),
"source_agent_pool_name": {
Type: schema.TypeString,
Optional: true,
@@ -307,7 +362,7 @@ func ResourceStorageTransferJob() *schema.Resource {
}
}
-func objectConditionsSchema() *schema.Schema {
+func objectConditionsSchema(objectConditionsKeys []string) *schema.Schema {
return &schema.Schema{
Type: schema.TypeList,
Optional: true,
@@ -368,7 +423,7 @@ func objectConditionsSchema() *schema.Schema {
}
}
-func transferOptionsSchema() *schema.Schema {
+func transferOptionsSchema(transferOptionsKeys []string) *schema.Schema {
return &schema.Schema{
Type: schema.TypeList,
Optional: true,
@@ -636,6 +691,7 @@ func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{})
Schedule: expandTransferSchedules(d.Get("schedule").([]interface{})),
EventStream: expandEventStream(d.Get("event_stream").([]interface{})),
TransferSpec: expandTransferSpecs(d.Get("transfer_spec").([]interface{})),
+ ReplicationSpec: expandReplicationSpecs(d.Get("replication_spec").([]interface{})),
NotificationConfig: expandTransferJobNotificationConfig(d.Get("notification_config").([]interface{})),
}
@@ -720,6 +776,11 @@ func resourceStorageTransferJobRead(d *schema.ResourceData, meta interface{}) er
return err
}
+ err = d.Set("replication_spec", flattenReplicationSpec(res.ReplicationSpec))
+ if err != nil {
+ return err
+ }
+
err = d.Set("notification_config", flattenTransferJobNotificationConfig(res.NotificationConfig))
if err != nil {
return err
@@ -778,6 +839,13 @@ func resourceStorageTransferJobUpdate(d *schema.ResourceData, meta interface{})
}
}
+ if d.HasChange("replication_spec") {
+ fieldMask = append(fieldMask, "replication_spec")
+ if v, ok := d.GetOk("replication_spec"); ok {
+ transferJob.ReplicationSpec = expandReplicationSpecs(v.([]interface{}))
+ }
+ }
+
if d.HasChange("notification_config") {
fieldMask = append(fieldMask, "notification_config")
if v, ok := d.GetOk("notification_config"); ok {
@@ -1265,6 +1333,9 @@ func expandTransferSpecs(transferSpecs []interface{}) *storagetransfer.TransferS
}
func flattenTransferSpec(transferSpec *storagetransfer.TransferSpec, d *schema.ResourceData) []map[string]interface{} {
+ if transferSpec == nil || reflect.DeepEqual(transferSpec, &storagetransfer.TransferSpec{}) {
+ return nil
+ }
data := map[string]interface{}{}
@@ -1342,3 +1413,44 @@ func flattenTransferJobNotificationConfig(notificationConfig *storagetransfer.No
return []map[string]interface{}{data}
}
+
+func diffSuppressEventStream(k, old, new string, d *schema.ResourceData) bool {
+ // Check if it's a replication job.
+ _, is_replication := d.GetOk("replication_spec")
+ return is_replication
+}
+
+func expandReplicationSpecs(replicationSpecs []interface{}) *storagetransfer.ReplicationSpec {
+ if len(replicationSpecs) == 0 || replicationSpecs[0] == nil {
+ return nil
+ }
+
+ replicationSpec := replicationSpecs[0].(map[string]interface{})
+ return &storagetransfer.ReplicationSpec{
+ GcsDataSink: expandGcsData(replicationSpec["gcs_data_sink"].([]interface{})),
+ ObjectConditions: expandObjectConditions(replicationSpec["object_conditions"].([]interface{})),
+ TransferOptions: expandTransferOptions(replicationSpec["transfer_options"].([]interface{})),
+ GcsDataSource: expandGcsData(replicationSpec["gcs_data_source"].([]interface{})),
+ }
+}
+
+func flattenReplicationSpec(replicationSpec *storagetransfer.ReplicationSpec) []map[string]interface{} {
+ if replicationSpec == nil || reflect.DeepEqual(replicationSpec, &storagetransfer.ReplicationSpec{}) {
+ return nil
+ }
+
+ data := map[string]interface{}{}
+ if replicationSpec.GcsDataSink != nil {
+ data["gcs_data_sink"] = flattenGcsData(replicationSpec.GcsDataSink)
+ }
+ if replicationSpec.GcsDataSource != nil {
+ data["gcs_data_source"] = flattenGcsData(replicationSpec.GcsDataSource)
+ }
+ if replicationSpec.ObjectConditions != nil {
+ data["object_conditions"] = flattenObjectCondition(replicationSpec.ObjectConditions)
+ }
+ if replicationSpec.TransferOptions != nil {
+ data["transfer_options"] = flattenTransferOption(replicationSpec.TransferOptions)
+ }
+ return []map[string]interface{}{data}
+}
diff --git a/google-beta/services/storagetransfer/resource_storage_transfer_job_test.go b/google-beta/services/storagetransfer/resource_storage_transfer_job_test.go
index 2996489808..f188a599ed 100644
--- a/google-beta/services/storagetransfer/resource_storage_transfer_job_test.go
+++ b/google-beta/services/storagetransfer/resource_storage_transfer_job_test.go
@@ -80,6 +80,63 @@ func TestAccStorageTransferJob_basic(t *testing.T) {
})
}
+func TestAccStorageTransferReplicationJob_basic(t *testing.T) {
+ t.Parallel()
+
+ testDataSourceBucketName := acctest.RandString(t, 10)
+ testDataSinkName := acctest.RandString(t, 10)
+ testTransferReplicationJobDescription := acctest.RandString(t, 10)
+ testUpdatedTransferReplicationJobDescription := acctest.RandString(t, 10)
+ testOverwriteWhen := []string{"ALWAYS", "NEVER", "DIFFERENT"}
+
+ acctest.VcrTest(t, resource.TestCase{
+ PreCheck: func() { acctest.AccTestPreCheck(t) },
+ ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
+ CheckDestroy: testAccStorageTransferJobDestroyProducer(t),
+ Steps: []resource.TestStep{
+ {
+ Config: testAccStorageTransferReplicationJob_basic(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferReplicationJobDescription),
+ },
+ {
+ ResourceName: "google_storage_transfer_job.transfer_job",
+ ImportState: true,
+ ImportStateVerify: true,
+ },
+ {
+ Config: testAccStorageTransferReplicationJob_basic(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testUpdatedTransferReplicationJobDescription),
+ },
+ {
+ ResourceName: "google_storage_transfer_job.transfer_job",
+ ImportState: true,
+ ImportStateVerify: true,
+ },
+ {
+ Config: testAccStorageTransferReplicationJob_with_transferOptions(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testUpdatedTransferReplicationJobDescription, true, false, testOverwriteWhen[0]),
+ },
+ {
+ ResourceName: "google_storage_transfer_job.transfer_job",
+ ImportState: true,
+ ImportStateVerify: true,
+ },
+ {
+ Config: testAccStorageTransferReplicationJob_with_transferOptions(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testUpdatedTransferReplicationJobDescription, false, false, testOverwriteWhen[1]),
+ },
+ {
+ ResourceName: "google_storage_transfer_job.transfer_job",
+ ImportState: true,
+ ImportStateVerify: true,
+ },
+ {
+ Config: testAccStorageTransferReplicationJob_with_transferOptions(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testUpdatedTransferReplicationJobDescription, false, false, testOverwriteWhen[2]),
+ },
+ {
+ ResourceName: "google_storage_transfer_job.transfer_job",
+ ImportState: true,
+ ImportStateVerify: true,
+ },
+ }})
+}
+
func TestAccStorageTransferJob_transferJobName(t *testing.T) {
t.Parallel()
@@ -1627,3 +1684,177 @@ resource "google_storage_transfer_job" "transfer_job" {
}
`, project, dataSourceBucketName, project, dataSinkBucketName, project, pubsubTopicName, transferJobDescription, project)
}
+
+func testAccStorageTransferReplicationJob_basic(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string) string {
+ return fmt.Sprintf(`
+data "google_storage_transfer_project_service_account" "default" {
+ project = "%s"
+}
+
+data "google_project" "my_project" {
+ project_id = "%s"
+}
+
+resource "google_project_iam_binding" "pubsub_publisher" {
+ project = "%s"
+ role = "roles/pubsub.publisher"
+ members = [
+ "serviceAccount:service-${data.google_project.my_project.number}@gs-project-accounts.iam.gserviceaccount.com",
+ ]
+}
+
+resource "google_project_iam_binding" "service_agent_binding" {
+ project = "%s"
+ role = "roles/storagetransfer.serviceAgent"
+ members = [
+ "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}",
+ ]
+}
+
+resource "google_storage_bucket" "data_source" {
+ name = "%s"
+ project = "%s"
+ location = "US"
+ force_destroy = true
+ uniform_bucket_level_access = true
+}
+
+resource "google_storage_bucket_iam_member" "data_source" {
+ bucket = google_storage_bucket.data_source.name
+ role = "roles/storage.admin"
+ member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
+}
+
+resource "google_storage_bucket" "data_sink" {
+ name = "%s"
+ project = "%s"
+ location = "US"
+ force_destroy = true
+ uniform_bucket_level_access = true
+}
+
+resource "google_storage_bucket_iam_member" "data_sink" {
+ bucket = google_storage_bucket.data_sink.name
+ role = "roles/storage.admin"
+ member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
+}
+
+resource "google_storage_transfer_job" "transfer_job" {
+ description = "%s"
+ project = "%s"
+
+ replication_spec {
+ gcs_data_source {
+ bucket_name = google_storage_bucket.data_source.name
+ path = "foo/bar/"
+ }
+ gcs_data_sink {
+ bucket_name = google_storage_bucket.data_sink.name
+ path = "foo/bar/"
+ }
+ }
+
+ depends_on = [
+ google_storage_bucket_iam_member.data_source,
+ google_storage_bucket_iam_member.data_sink,
+ google_project_iam_binding.pubsub_publisher
+ ]
+}
+`, project, project, project, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project)
+}
+
+func testAccStorageTransferReplicationJob_with_transferOptions(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string, overwriteObjectsAlreadyExistingInSink bool, deleteObjectsUniqueInSink bool, overwriteWhenVal string) string {
+ return fmt.Sprintf(`
+data "google_storage_transfer_project_service_account" "default" {
+ project = "%s"
+}
+
+data "google_project" "my_project" {
+ project_id = "%s"
+}
+
+resource "google_project_iam_binding" "pubsub_publisher" {
+ project = "%s"
+ role = "roles/pubsub.publisher"
+ members = [
+ "serviceAccount:service-${data.google_project.my_project.number}@gs-project-accounts.iam.gserviceaccount.com",
+ ]
+}
+
+resource "google_project_iam_binding" "service_agent_binding" {
+ project = "%s"
+ role = "roles/storagetransfer.serviceAgent"
+ members = [
+ "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}",
+ ]
+}
+
+resource "google_storage_bucket" "data_source" {
+ name = "%s"
+ project = "%s"
+ location = "US"
+ force_destroy = true
+ uniform_bucket_level_access = true
+}
+
+resource "google_storage_bucket_iam_member" "data_source" {
+ bucket = google_storage_bucket.data_source.name
+ role = "roles/storage.admin"
+ member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
+}
+
+resource "google_storage_bucket" "data_sink" {
+ name = "%s"
+ project = "%s"
+ location = "US"
+ force_destroy = true
+ uniform_bucket_level_access = true
+}
+
+resource "google_storage_bucket_iam_member" "data_sink" {
+ bucket = google_storage_bucket.data_sink.name
+ role = "roles/storage.admin"
+ member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}"
+}
+
+resource "google_storage_transfer_job" "transfer_job" {
+ description = "%s"
+ project = "%s"
+
+ replication_spec {
+ gcs_data_source {
+ bucket_name = google_storage_bucket.data_source.name
+ path = "foo/bar/"
+ }
+ gcs_data_sink {
+ bucket_name = google_storage_bucket.data_sink.name
+ path = "foo/bar/"
+ }
+ transfer_options {
+ overwrite_objects_already_existing_in_sink = %t
+ delete_objects_unique_in_sink = %t
+ overwrite_when = "%s"
+ delete_objects_from_source_after_transfer = false
+ }
+ object_conditions {
+ last_modified_since = "2020-01-01T00:00:00Z"
+ last_modified_before = "2020-01-01T00:00:00Z"
+ exclude_prefixes = [
+ "a/b/c",
+ ]
+ include_prefixes = [
+ "a/b"
+ ]
+ max_time_elapsed_since_last_modification="300s"
+ min_time_elapsed_since_last_modification="3s"
+ }
+ }
+
+ depends_on = [
+ google_storage_bucket_iam_member.data_source,
+ google_storage_bucket_iam_member.data_sink,
+ google_project_iam_binding.pubsub_publisher
+ ]
+}
+`, project, project, project, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project, overwriteObjectsAlreadyExistingInSink, deleteObjectsUniqueInSink, overwriteWhenVal)
+}
diff --git a/website/docs/r/storage_transfer_job.html.markdown b/website/docs/r/storage_transfer_job.html.markdown
index 15daf0dd51..bcc1df058b 100644
--- a/website/docs/r/storage_transfer_job.html.markdown
+++ b/website/docs/r/storage_transfer_job.html.markdown
@@ -116,7 +116,9 @@ The following arguments are supported:
* `description` - (Required) Unique description to identify the Transfer Job.
-* `transfer_spec` - (Required) Transfer specification. Structure [documented below](#nested_transfer_spec).
+* `transfer_spec` - (Optional) Transfer specification. Structure [documented below](#nested_transfer_spec). One of `transfer_spec`, or `replication_spec` can be specified.
+
+* `replication_spec` - (Optional) Replication specification. Structure [documented below](#nested_replication_spec). User should not configure `schedule`, `event_stream` with this argument. One of `transfer_spec`, or `replication_spec` must be specified.
- - -
@@ -157,6 +159,16 @@ The following arguments are supported:
* `hdfs_data_source` - (Optional) An HDFS data source. Structure [documented below](#nested_hdfs_data_source).
+The `replication_spec` block supports:
+
+* `gcs_data_sink` - (Optional) A Google Cloud Storage data sink. Structure [documented below](#nested_gcs_data_sink).
+
+* `gcs_data_source` - (Optional) A Google Cloud Storage data source. Structure [documented below](#nested_gcs_data_source).
+
+* `object_conditions` - (Optional) Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' `last_modification_time` do not exclude objects in a data sink. Structure [documented below](#nested_object_conditions).
+
+* `transfer_options` - (Optional) Characteristics of how to treat files from datasource and sink during job. If the option `delete_objects_unique_in_sink` is true, object conditions based on objects' `last_modification_time` are ignored and do not exclude objects in a data source or a data sink. Structure [documented below](#nested_transfer_options).
+
The `schedule` block supports:
* `schedule_start_date` - (Required) The first day the recurring transfer is scheduled to run. If `schedule_start_date` is in the past, the transfer will run for the first time on the following day. Structure [documented below](#nested_schedule_start_end_date).