diff --git a/.changelog/40944.txt b/.changelog/40944.txt new file mode 100644 index 000000000000..183e447c0391 --- /dev/null +++ b/.changelog/40944.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +resource/aws_s3_directory_bucket: The default value for `data_redundancy` is `SingleLocalZone` if `location.type` is `LocalZone` +``` \ No newline at end of file diff --git a/internal/service/s3/bucket.go b/internal/service/s3/bucket.go index 53f53b2057e3..735474d98990 100644 --- a/internal/service/s3/bucket.go +++ b/internal/service/s3/bucket.go @@ -719,7 +719,7 @@ func resourceBucketCreate(ctx context.Context, d *schema.ResourceData, meta inte // Special case: us-east-1 does not return error if the bucket already exists and is owned by // current account. It also resets the Bucket ACLs. if region == endpoints.UsEast1RegionID { - if err := findBucket(ctx, conn, bucket); err == nil { + if _, err := findBucket(ctx, conn, bucket); err == nil { return sdkdiag.AppendErrorf(diags, "creating S3 Bucket (%s): %s", bucket, errors.New(errCodeBucketAlreadyExists)) } } @@ -766,7 +766,7 @@ func resourceBucketCreate(ctx context.Context, d *schema.ResourceData, meta inte d.SetId(bucket) _, err = tfresource.RetryWhenNotFound(ctx, d.Timeout(schema.TimeoutCreate), func() (interface{}, error) { - return nil, findBucket(ctx, conn, d.Id()) + return findBucket(ctx, conn, d.Id()) }) if err != nil { @@ -784,7 +784,7 @@ func resourceBucketRead(ctx context.Context, d *schema.ResourceData, meta interf var diags diag.Diagnostics conn := meta.(*conns.AWSClient).S3Client(ctx) - err := findBucket(ctx, conn, d.Id()) + _, err := findBucket(ctx, conn, d.Id()) if !d.IsNewResource() && tfresource.NotFound(err) { log.Printf("[WARN] S3 Bucket (%s) not found, removing from state", d.Id()) @@ -1586,7 +1586,7 @@ func resourceBucketDelete(ctx context.Context, d *schema.ResourceData, meta inte } _, err = tfresource.RetryUntilNotFound(ctx, d.Timeout(schema.TimeoutDelete), func() (interface{}, error) { - return nil, findBucket(ctx, conn, d.Id()) + return findBucket(ctx, conn, d.Id()) }) if err != nil { @@ -1596,23 +1596,27 @@ func resourceBucketDelete(ctx context.Context, d *schema.ResourceData, meta inte return diags } -func findBucket(ctx context.Context, conn *s3.Client, bucket string, optFns ...func(*s3.Options)) error { - input := &s3.HeadBucketInput{ +func findBucket(ctx context.Context, conn *s3.Client, bucket string, optFns ...func(*s3.Options)) (*s3.HeadBucketOutput, error) { + input := s3.HeadBucketInput{ Bucket: aws.String(bucket), } - _, err := conn.HeadBucket(ctx, input, optFns...) + output, err := conn.HeadBucket(ctx, &input, optFns...) // For directory buckets that no longer exist it's the CreateSession call invoked by HeadBucket that returns "NoSuchBucket", // and that error code is flattend into HeadBucket's error message -- hence the 'errs.Contains' call. if tfawserr.ErrHTTPStatusCodeEquals(err, http.StatusNotFound) || tfawserr.ErrCodeEquals(err, errCodeNoSuchBucket) || errs.Contains(err, errCodeNoSuchBucket) { - return &retry.NotFoundError{ + return nil, &retry.NotFoundError{ LastError: err, LastRequest: input, } } - return err + if output == nil { + return nil, tfresource.NewEmptyResultError(input) + } + + return output, nil } func findBucketRegion(ctx context.Context, awsClient *conns.AWSClient, bucket string, optFns ...func(*s3.Options)) (string, error) { diff --git a/internal/service/s3/bucket_accelerate_configuration_test.go b/internal/service/s3/bucket_accelerate_configuration_test.go index a9cab3c4faec..627c35095648 100644 --- a/internal/service/s3/bucket_accelerate_configuration_test.go +++ b/internal/service/s3/bucket_accelerate_configuration_test.go @@ -260,7 +260,7 @@ resource "aws_s3_bucket_accelerate_configuration" "test" { } func testAccBucketAccelerateConfigurationConfig_directoryBucket(bucketName, status string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(bucketName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(bucketName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_acl_test.go b/internal/service/s3/bucket_acl_test.go index cf1a0ce09191..a021c76a9f79 100644 --- a/internal/service/s3/bucket_acl_test.go +++ b/internal/service/s3/bucket_acl_test.go @@ -856,7 +856,7 @@ resource "aws_s3_bucket_acl" "test" { } func testAccBucketACLConfig_directoryBucket(rName, acl string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_analytics_configuration_test.go b/internal/service/s3/bucket_analytics_configuration_test.go index 0f1e236824ef..efa57e755ffc 100644 --- a/internal/service/s3/bucket_analytics_configuration_test.go +++ b/internal/service/s3/bucket_analytics_configuration_test.go @@ -745,7 +745,7 @@ resource "aws_s3_bucket" "destination" { } func testAccBucketAnalyticsConfigurationConfig_directoryBucket(bucket, name string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(bucket), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(bucket), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_cors_configuration_test.go b/internal/service/s3/bucket_cors_configuration_test.go index fb5cf08ce44a..d7d1f156b709 100644 --- a/internal/service/s3/bucket_cors_configuration_test.go +++ b/internal/service/s3/bucket_cors_configuration_test.go @@ -506,7 +506,7 @@ resource "aws_s3_bucket_cors_configuration" "test" { } func testAccBucketCORSConfigurationConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_data_source.go b/internal/service/s3/bucket_data_source.go index 995986316c4c..0df715035051 100644 --- a/internal/service/s3/bucket_data_source.go +++ b/internal/service/s3/bucket_data_source.go @@ -73,7 +73,7 @@ func dataSourceBucketRead(ctx context.Context, d *schema.ResourceData, meta inte optFns = append(optFns, func(o *s3.Options) { o.UseARNRegion = true }) } - err := findBucket(ctx, conn, bucket, optFns...) + _, err := findBucket(ctx, conn, bucket, optFns...) if err != nil { return sdkdiag.AppendErrorf(diags, "reading S3 Bucket (%s): %s", bucket, err) diff --git a/internal/service/s3/bucket_intelligent_tiering_configuration_test.go b/internal/service/s3/bucket_intelligent_tiering_configuration_test.go index fec44fdb5201..d8b28a83c932 100644 --- a/internal/service/s3/bucket_intelligent_tiering_configuration_test.go +++ b/internal/service/s3/bucket_intelligent_tiering_configuration_test.go @@ -432,7 +432,7 @@ resource "aws_s3_bucket" "test" { } func testAccBucketIntelligentTieringConfigurationConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_inventory_test.go b/internal/service/s3/bucket_inventory_test.go index 6cae18fa45cb..14d5c52e5c12 100644 --- a/internal/service/s3/bucket_inventory_test.go +++ b/internal/service/s3/bucket_inventory_test.go @@ -311,7 +311,7 @@ resource "aws_s3_bucket_inventory" "test" { } func testAccBucketInventoryConfig_directoryBucket(bucketName, inventoryName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(bucketName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(bucketName), fmt.Sprintf(` data "aws_caller_identity" "current" {} resource "aws_s3_directory_bucket" "test" { diff --git a/internal/service/s3/bucket_lifecycle_configuration_test.go b/internal/service/s3/bucket_lifecycle_configuration_test.go index 7ce6d4d1d7b7..e7401b5655db 100644 --- a/internal/service/s3/bucket_lifecycle_configuration_test.go +++ b/internal/service/s3/bucket_lifecycle_configuration_test.go @@ -2869,7 +2869,7 @@ resource "aws_s3_bucket_lifecycle_configuration" "test" { } func testAccBucketLifecycleConfigurationConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_logging_test.go b/internal/service/s3/bucket_logging_test.go index 4544fd4b1585..10c38958ed0b 100644 --- a/internal/service/s3/bucket_logging_test.go +++ b/internal/service/s3/bucket_logging_test.go @@ -705,7 +705,7 @@ resource "aws_s3_bucket_logging" "test" { } func testAccBucketLoggingConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccBucketLoggingConfig_base(rName), testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccBucketLoggingConfig_base(rName), testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket location { diff --git a/internal/service/s3/bucket_metric_test.go b/internal/service/s3/bucket_metric_test.go index 45c4937b1a3b..3861549aff3a 100644 --- a/internal/service/s3/bucket_metric_test.go +++ b/internal/service/s3/bucket_metric_test.go @@ -904,7 +904,7 @@ resource "aws_s3_bucket_metric" "test" { } func testAccBucketMetricConfig_directoryBucket(bucketName, metricName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(bucketName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(bucketName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_notification_test.go b/internal/service/s3/bucket_notification_test.go index c031c031b4e2..9dcdf6061c8b 100644 --- a/internal/service/s3/bucket_notification_test.go +++ b/internal/service/s3/bucket_notification_test.go @@ -776,7 +776,7 @@ resource "aws_s3_bucket_notification" "test" { } func testAccBucketNotificationConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_object_lock_configuration_test.go b/internal/service/s3/bucket_object_lock_configuration_test.go index 7ad52a6df9c4..07b471783f84 100644 --- a/internal/service/s3/bucket_object_lock_configuration_test.go +++ b/internal/service/s3/bucket_object_lock_configuration_test.go @@ -345,7 +345,7 @@ resource "aws_s3_bucket_object_lock_configuration" "test" { } func testAccBucketObjectLockConfigurationConfig_directoryBucket(bucketName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(bucketName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(bucketName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_ownership_controls_test.go b/internal/service/s3/bucket_ownership_controls_test.go index 368154feb4a7..50c1dc57097c 100644 --- a/internal/service/s3/bucket_ownership_controls_test.go +++ b/internal/service/s3/bucket_ownership_controls_test.go @@ -217,7 +217,7 @@ resource "aws_s3_bucket_ownership_controls" "test" { } func testAccBucketOwnershipControlsConfig_directoryBucket(rName, objectOwnership string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_policy_test.go b/internal/service/s3/bucket_policy_test.go index 5f7a175cb7f9..13f7a7957893 100644 --- a/internal/service/s3/bucket_policy_test.go +++ b/internal/service/s3/bucket_policy_test.go @@ -944,7 +944,7 @@ resource "aws_s3_bucket_policy" "test" { } func testAccBucketPolicyConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` data "aws_partition" "current" {} data "aws_caller_identity" "current" {} diff --git a/internal/service/s3/bucket_public_access_block_test.go b/internal/service/s3/bucket_public_access_block_test.go index 486ccdc3d593..364f4b9860d4 100644 --- a/internal/service/s3/bucket_public_access_block_test.go +++ b/internal/service/s3/bucket_public_access_block_test.go @@ -360,7 +360,7 @@ resource "aws_s3_bucket_public_access_block" "test" { } func testAccBucketPublicAccessBlockConfig_directoryBucket(bucketName, blockPublicAcls, blockPublicPolicy, ignorePublicAcls, restrictPublicBuckets string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(bucketName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(bucketName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket location { diff --git a/internal/service/s3/bucket_replication_configuration_test.go b/internal/service/s3/bucket_replication_configuration_test.go index 03130f55cf3a..25c4be5ce93b 100644 --- a/internal/service/s3/bucket_replication_configuration_test.go +++ b/internal/service/s3/bucket_replication_configuration_test.go @@ -2441,7 +2441,7 @@ resource "aws_s3_bucket_replication_configuration" "test" { } func testAccBucketReplicationConfigurationConfig_directoryBucket(rName, storageClass string) string { - return acctest.ConfigCompose(testAccBucketReplicationConfigurationConfig_base(rName), testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccBucketReplicationConfigurationConfig_base(rName), testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket location { diff --git a/internal/service/s3/bucket_request_payment_configuration_test.go b/internal/service/s3/bucket_request_payment_configuration_test.go index 388b837103bc..13942725b240 100644 --- a/internal/service/s3/bucket_request_payment_configuration_test.go +++ b/internal/service/s3/bucket_request_payment_configuration_test.go @@ -267,7 +267,7 @@ resource "aws_s3_bucket_request_payment_configuration" "test" { } func testAccBucketRequestPaymentConfigurationConfig_directoryBucket(rName, payer string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_server_side_encryption_configuration_test.go b/internal/service/s3/bucket_server_side_encryption_configuration_test.go index 7d008bb2e73a..23493e08d38f 100644 --- a/internal/service/s3/bucket_server_side_encryption_configuration_test.go +++ b/internal/service/s3/bucket_server_side_encryption_configuration_test.go @@ -650,7 +650,7 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "test" { } func testAccBucketServerSideEncryptionConfigurationConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_test.go b/internal/service/s3/bucket_test.go index 684e4e009e14..56aa6735dbf3 100644 --- a/internal/service/s3/bucket_test.go +++ b/internal/service/s3/bucket_test.go @@ -2505,7 +2505,7 @@ func testAccCheckBucketDestroyWithProvider(ctx context.Context) acctest.TestChec // S3 seems to be highly eventually consistent. Even if one connection reports that the queue is gone, // another connection may still report it as present. _, err := tfresource.RetryUntilNotFound(ctx, tfs3.BucketPropagationTimeout, func() (interface{}, error) { - return nil, tfs3.FindBucket(ctx, conn, rs.Primary.ID) + return tfs3.FindBucket(ctx, conn, rs.Primary.ID) }) if errors.Is(err, tfresource.ErrFoundResource) { @@ -2536,7 +2536,9 @@ func testAccCheckBucketExistsWithProvider(ctx context.Context, n string, provide conn := providerF().Meta().(*conns.AWSClient).S3Client(ctx) - return tfs3.FindBucket(ctx, conn, rs.Primary.ID) + _, err := tfs3.FindBucket(ctx, conn, rs.Primary.ID) + + return err } } diff --git a/internal/service/s3/bucket_versioning_test.go b/internal/service/s3/bucket_versioning_test.go index 3ee786dff415..455d6549a75e 100644 --- a/internal/service/s3/bucket_versioning_test.go +++ b/internal/service/s3/bucket_versioning_test.go @@ -622,7 +622,7 @@ resource "aws_s3_bucket_versioning" "test" { } func testAccBucketVersioningConfig_directoryBucket(rName, status string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/bucket_website_configuration_test.go b/internal/service/s3/bucket_website_configuration_test.go index 824d87397eee..46068b74eee8 100644 --- a/internal/service/s3/bucket_website_configuration_test.go +++ b/internal/service/s3/bucket_website_configuration_test.go @@ -941,7 +941,7 @@ resource "aws_s3_bucket_website_configuration" "test" { } func testAccBucketWebsiteConfigurationConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/directory_bucket.go b/internal/service/s3/directory_bucket.go index fe995ed53626..968357c32ed4 100644 --- a/internal/service/s3/directory_bucket.go +++ b/internal/service/s3/directory_bucket.go @@ -23,7 +23,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-provider-aws/internal/errs/fwdiag" "github.com/hashicorp/terraform-provider-aws/internal/framework" - "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" + fwflex "github.com/hashicorp/terraform-provider-aws/internal/framework/flex" fwtypes "github.com/hashicorp/terraform-provider-aws/internal/framework/types" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" "github.com/hashicorp/terraform-provider-aws/names" @@ -72,9 +72,9 @@ func (r *directoryBucketResource) Schema(ctx context.Context, request resource.S CustomType: dataRedundancyType, Optional: true, Computed: true, - Default: dataRedundancyType.AttributeDefault(awstypes.DataRedundancySingleAvailabilityZone), PlanModifiers: []planmodifier.String{ stringplanmodifier.RequiresReplace(), + directoryBucketDataRedundancyPlanModifier{}, }, }, names.AttrForceDestroy: schema.BoolAttribute{ @@ -140,14 +140,14 @@ func (r *directoryBucketResource) Create(ctx context.Context, request resource.C conn := r.Meta().S3ExpressClient(ctx) input := &s3.CreateBucketInput{ - Bucket: flex.StringFromFramework(ctx, data.Bucket), + Bucket: fwflex.StringFromFramework(ctx, data.Bucket), CreateBucketConfiguration: &awstypes.CreateBucketConfiguration{ Bucket: &awstypes.BucketInfo{ DataRedundancy: data.DataRedundancy.ValueEnum(), Type: awstypes.BucketType(data.Type.ValueString()), }, Location: &awstypes.LocationInfo{ - Name: flex.StringFromFramework(ctx, locationInfoData.Name), + Name: fwflex.StringFromFramework(ctx, locationInfoData.Name), Type: locationInfoData.Type.ValueEnum(), }, }, @@ -163,7 +163,7 @@ func (r *directoryBucketResource) Create(ctx context.Context, request resource.C // Set values for unknowns. data.ARN = types.StringValue(r.arn(ctx, data.Bucket.ValueString())) - data.setID() + data.ID = data.Bucket response.Diagnostics.Append(response.State.Set(ctx, &data)...) } @@ -175,15 +175,10 @@ func (r *directoryBucketResource) Read(ctx context.Context, request resource.Rea return } - if err := data.InitFromID(); err != nil { - response.Diagnostics.AddError("parsing resource ID", err.Error()) - - return - } - conn := r.Meta().S3ExpressClient(ctx) - err := findBucket(ctx, conn, data.Bucket.ValueString()) + data.Bucket = data.ID + output, err := findBucket(ctx, conn, data.Bucket.ValueString()) if tfresource.NotFound(err) { response.Diagnostics.Append(fwdiag.NewResourceNotFoundWarningDiagnostic(err)) @@ -200,15 +195,11 @@ func (r *directoryBucketResource) Read(ctx context.Context, request resource.Rea // Set attributes for import. data.ARN = types.StringValue(r.arn(ctx, data.Bucket.ValueString())) - - // No API to return bucket type, location etc. - data.DataRedundancy = fwtypes.StringEnumValue(awstypes.DataRedundancySingleAvailabilityZone) - if matches := directoryBucketNameRegex.FindStringSubmatch(data.ID.ValueString()); len(matches) == 3 { - data.Location = fwtypes.NewListNestedObjectValueOfPtrMust(ctx, &locationInfoModel{ - Name: flex.StringValueToFramework(ctx, matches[2]), - Type: fwtypes.StringEnumValue(awstypes.LocationTypeAvailabilityZone), - }) - } + data.DataRedundancy = fwtypes.StringEnumValue(defaultDirectoryBucketDataRedundancy(output.BucketLocationType)) + data.Location = fwtypes.NewListNestedObjectValueOfPtrMust(ctx, &locationInfoModel{ + Name: fwflex.StringToFramework(ctx, output.BucketLocationName), + Type: fwtypes.StringEnumValue(output.BucketLocationType), + }) data.Type = fwtypes.StringEnumValue(awstypes.BucketTypeDirectory) response.Diagnostics.Append(response.State.Set(ctx, &data)...) @@ -224,7 +215,7 @@ func (r *directoryBucketResource) Delete(ctx context.Context, request resource.D conn := r.Meta().S3ExpressClient(ctx) _, err := conn.DeleteBucket(ctx, &s3.DeleteBucketInput{ - Bucket: flex.StringFromFramework(ctx, data.ID), + Bucket: fwflex.StringFromFramework(ctx, data.ID), }) if tfawserr.ErrCodeEquals(err, errCodeBucketNotEmpty) { @@ -239,7 +230,7 @@ func (r *directoryBucketResource) Delete(ctx context.Context, request resource.D } _, err = conn.DeleteBucket(ctx, &s3.DeleteBucketInput{ - Bucket: flex.StringFromFramework(ctx, data.ID), + Bucket: fwflex.StringFromFramework(ctx, data.ID), }) } } @@ -270,16 +261,48 @@ type directoryBucketResourceModel struct { Type fwtypes.StringEnum[awstypes.BucketType] `tfsdk:"type"` } -func (data *directoryBucketResourceModel) InitFromID() error { - data.Bucket = data.ID - return nil +type locationInfoModel struct { + Name types.String `tfsdk:"name"` + Type fwtypes.StringEnum[awstypes.LocationType] `tfsdk:"type"` } -func (data *directoryBucketResourceModel) setID() { - data.ID = data.Bucket +func defaultDirectoryBucketDataRedundancy(locationType awstypes.LocationType) awstypes.DataRedundancy { + switch locationType { + case awstypes.LocationTypeLocalZone: + return awstypes.DataRedundancySingleLocalZone + default: + return awstypes.DataRedundancySingleAvailabilityZone + } } -type locationInfoModel struct { - Name types.String `tfsdk:"name"` - Type fwtypes.StringEnum[awstypes.LocationType] `tfsdk:"type"` +type directoryBucketDataRedundancyPlanModifier struct{} + +func (d directoryBucketDataRedundancyPlanModifier) Description(ctx context.Context) string { + return "Sets default value for data_redundancy based on location type value" +} + +func (d directoryBucketDataRedundancyPlanModifier) MarkdownDescription(ctx context.Context) string { + return d.Description(ctx) +} + +func (d directoryBucketDataRedundancyPlanModifier) PlanModifyString(ctx context.Context, request planmodifier.StringRequest, response *planmodifier.StringResponse) { + // Do nothing if there is a known planned value. + if !request.PlanValue.IsUnknown() { + return + } + + var data directoryBucketResourceModel + response.Diagnostics.Append(request.Plan.Get(ctx, &data)...) + if response.Diagnostics.HasError() { + return + } + + locationInfo, diags := data.Location.ToPtr(ctx) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + // Set the default value for data_redundancy based on the location type. + response.PlanValue = fwflex.StringValueToFramework(ctx, defaultDirectoryBucketDataRedundancy(locationInfo.Type.ValueEnum())) } diff --git a/internal/service/s3/directory_bucket_test.go b/internal/service/s3/directory_bucket_test.go index 0743ac21d067..14bedc921907 100644 --- a/internal/service/s3/directory_bucket_test.go +++ b/internal/service/s3/directory_bucket_test.go @@ -11,8 +11,13 @@ import ( "github.com/YakDriver/regexache" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/knownvalue" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/statecheck" "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/tfjsonpath" "github.com/hashicorp/terraform-provider-aws/internal/acctest" + tfknownvalue "github.com/hashicorp/terraform-provider-aws/internal/acctest/knownvalue" "github.com/hashicorp/terraform-provider-aws/internal/conns" tfs3 "github.com/hashicorp/terraform-provider-aws/internal/service/s3" "github.com/hashicorp/terraform-provider-aws/internal/tfresource" @@ -34,13 +39,26 @@ func TestAccS3DirectoryBucket_basic(t *testing.T) { Config: testAccDirectoryBucketConfig_basic(rName), Check: resource.ComposeAggregateTestCheckFunc( testAccCheckDirectoryBucketExists(ctx, resourceName), - acctest.MatchResourceAttrRegionalARN(ctx, resourceName, names.AttrARN, "s3express", regexache.MustCompile(fmt.Sprintf(`bucket/%s--.*-x-s3`, rName))), - resource.TestCheckResourceAttr(resourceName, "data_redundancy", "SingleAvailabilityZone"), - resource.TestCheckResourceAttr(resourceName, "location.#", "1"), - resource.TestCheckResourceAttrSet(resourceName, "location.0.name"), - resource.TestCheckResourceAttr(resourceName, "location.0.type", "AvailabilityZone"), - resource.TestCheckResourceAttr(resourceName, names.AttrType, "Directory"), ), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionCreate), + plancheck.ExpectKnownValue(resourceName, tfjsonpath.New("data_redundancy"), knownvalue.StringExact("SingleAvailabilityZone")), + }, + }, + ConfigStateChecks: []statecheck.StateCheck{ + statecheck.ExpectKnownValue(resourceName, tfjsonpath.New(names.AttrARN), tfknownvalue.RegionalARNRegexp("s3express", regexache.MustCompile(`bucket/.+--x-s3`))), + statecheck.ExpectKnownValue(resourceName, tfjsonpath.New(names.AttrBucket), knownvalue.StringRegexp(tfs3.DirectoryBucketNameRegex)), + statecheck.ExpectKnownValue(resourceName, tfjsonpath.New("data_redundancy"), knownvalue.StringExact("SingleAvailabilityZone")), + statecheck.ExpectKnownValue(resourceName, tfjsonpath.New(names.AttrForceDestroy), knownvalue.Bool(false)), + statecheck.ExpectKnownValue(resourceName, tfjsonpath.New(names.AttrLocation), knownvalue.ListExact([]knownvalue.Check{ + knownvalue.ObjectExact(map[string]knownvalue.Check{ + names.AttrName: knownvalue.NotNull(), + names.AttrType: knownvalue.StringExact("AvailabilityZone"), + }), + })), + statecheck.ExpectKnownValue(resourceName, tfjsonpath.New(names.AttrType), knownvalue.StringExact("Directory")), + }, }, { ResourceName: resourceName, @@ -119,6 +137,87 @@ func TestAccS3DirectoryBucket_forceDestroyWithUnusualKeyBytes(t *testing.T) { }) } +func TestAccS3DirectoryBucket_defaultDataRedundancy(t *testing.T) { + ctx := acctest.Context(t) + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_s3_directory_bucket.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.S3ServiceID), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + CheckDestroy: testAccCheckDirectoryBucketDestroy(ctx), + Steps: []resource.TestStep{ + { + Config: testAccDirectoryBucketConfig_defaultDataRedundancy(rName, "AvailabilityZone"), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckDirectoryBucketExists(ctx, resourceName), + ), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionCreate), + plancheck.ExpectKnownValue(resourceName, tfjsonpath.New("data_redundancy"), knownvalue.StringExact("SingleAvailabilityZone")), + }, + }, + }, + { + Config: testAccDirectoryBucketConfig_defaultDataRedundancy(rName, "LocalZone"), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionReplace), + plancheck.ExpectKnownValue(resourceName, tfjsonpath.New("data_redundancy"), knownvalue.StringExact("SingleLocalZone")), + }, + }, + ExpectError: regexache.MustCompile(`InvalidRequest: Invalid Data Redundancy value`), + }, + }, + }) +} + +func TestAccS3DirectoryBucket_upgradeDefaultDataRedundancy(t *testing.T) { + ctx := acctest.Context(t) + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + resourceName := "aws_s3_directory_bucket.test" + + resource.ParallelTest(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(ctx, t) }, + ErrorCheck: acctest.ErrorCheck(t, names.S3ServiceID), + CheckDestroy: testAccCheckDirectoryBucketDestroy(ctx), + Steps: []resource.TestStep{ + { + ExternalProviders: map[string]resource.ExternalProvider{ + "aws": { + Source: "hashicorp/aws", + VersionConstraint: "5.88.0", + }, + }, + Config: testAccDirectoryBucketConfig_basic(rName), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckDirectoryBucketExists(ctx, resourceName), + ), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionCreate), + plancheck.ExpectKnownValue(resourceName, tfjsonpath.New("data_redundancy"), knownvalue.StringExact("SingleAvailabilityZone")), + }, + }, + }, + { + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, + Config: testAccDirectoryBucketConfig_basic(rName), + Check: resource.ComposeAggregateTestCheckFunc( + testAccCheckDirectoryBucketExists(ctx, resourceName), + ), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(resourceName, plancheck.ResourceActionNoop), + }, + }, + }, + }, + }) +} + func testAccCheckDirectoryBucketDestroy(ctx context.Context) resource.TestCheckFunc { return func(s *terraform.State) error { conn := acctest.Provider.Meta().(*conns.AWSClient).S3ExpressClient(ctx) @@ -128,7 +227,7 @@ func testAccCheckDirectoryBucketDestroy(ctx context.Context) resource.TestCheckF continue } - err := tfs3.FindBucket(ctx, conn, rs.Primary.ID) + _, err := tfs3.FindBucket(ctx, conn, rs.Primary.ID) if tfresource.NotFound(err) { continue @@ -138,7 +237,7 @@ func testAccCheckDirectoryBucketDestroy(ctx context.Context) resource.TestCheckF return err } - return fmt.Errorf("S3 Bucket %s still exists", rs.Primary.ID) + return fmt.Errorf("S3 Directory Bucket %s still exists", rs.Primary.ID) } return nil @@ -154,17 +253,19 @@ func testAccCheckDirectoryBucketExists(ctx context.Context, n string) resource.T conn := acctest.Provider.Meta().(*conns.AWSClient).S3ExpressClient(ctx) - return tfs3.FindBucket(ctx, conn, rs.Primary.ID) + _, err := tfs3.FindBucket(ctx, conn, rs.Primary.ID) + + return err } } -func testAccConfigAvailableAZsDirectoryBucket() string { - // https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-express-networking.html#s3-express-endpoints. - return acctest.ConfigAvailableAZsNoOptInExclude("use1-az1", "use1-az2", "use1-az3", "usw2-az2", "apne1-az2") +func testAccConfigDirectoryBucket_availableAZs() string { + // https://docs.aws.amazon.com/AmazonS3/latest/userguide/directory-bucket-az-networking.html#s3-express-endpoints-az. + return acctest.ConfigAvailableAZsNoOptInExclude("use1-az1", "use1-az2", "use1-az3", "use2-az2", "usw2-az2", "aps1-az3", "apne1-az2", "euw1-az2") } -func testAccDirectoryBucketConfig_base(rName string) string { - return acctest.ConfigCompose(testAccConfigAvailableAZsDirectoryBucket(), fmt.Sprintf(` +func testAccDirectoryBucketConfig_baseAZ(rName string) string { + return acctest.ConfigCompose(testAccConfigDirectoryBucket_availableAZs(), fmt.Sprintf(` locals { location_name = data.aws_availability_zones.available.zone_ids[0] bucket = "%[1]s--${local.location_name}--x-s3" @@ -173,7 +274,7 @@ locals { } func testAccDirectoryBucketConfig_basic(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket @@ -185,7 +286,7 @@ resource "aws_s3_directory_bucket" "test" { } func testAccDirectoryBucketConfig_forceDestroy(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket @@ -199,7 +300,7 @@ resource "aws_s3_directory_bucket" "test" { } func testAccDirectoryBucketConfig_forceDestroyUnusualKeyBytes(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket @@ -211,3 +312,16 @@ resource "aws_s3_directory_bucket" "test" { } `) } + +func testAccDirectoryBucketConfig_defaultDataRedundancy(rName, locationType string) string { + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` +resource "aws_s3_directory_bucket" "test" { + bucket = local.bucket + + location { + name = local.location_name + type = %[1]q + } +} +`, locationType)) +} diff --git a/internal/service/s3/directory_buckets_data_source_test.go b/internal/service/s3/directory_buckets_data_source_test.go index e41cc7e0035b..17661a6f216b 100644 --- a/internal/service/s3/directory_buckets_data_source_test.go +++ b/internal/service/s3/directory_buckets_data_source_test.go @@ -35,7 +35,7 @@ func TestAccS3DirectoryBucketsDataSource_basic(t *testing.T) { } func testAccDirectoryBucketsDataSourceConfig_basic(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/exports_test.go b/internal/service/s3/exports_test.go index 1876395cda6e..e895491e8ed0 100644 --- a/internal/service/s3/exports_test.go +++ b/internal/service/s3/exports_test.go @@ -74,6 +74,8 @@ var ( CreateResourceID = createResourceID ParseResourceID = parseResourceID + + DirectoryBucketNameRegex = directoryBucketNameRegex ) type ( diff --git a/internal/service/s3/object_copy_test.go b/internal/service/s3/object_copy_test.go index 47c663e34626..73e7f9e76664 100644 --- a/internal/service/s3/object_copy_test.go +++ b/internal/service/s3/object_copy_test.go @@ -1023,7 +1023,7 @@ resource "aws_s3_object_copy" "test" { } func testAccObjectCopyConfig_directoryBucket(sourceBucket, sourceKey, targetBucket, targetKey string) string { - return acctest.ConfigCompose(testAccConfigAvailableAZsDirectoryBucket(), fmt.Sprintf(` + return acctest.ConfigCompose(testAccConfigDirectoryBucket_availableAZs(), fmt.Sprintf(` locals { location_name = data.aws_availability_zones.available.zone_ids[0] source_bucket = "%[1]s--${local.location_name}--x-s3" diff --git a/internal/service/s3/object_data_source_test.go b/internal/service/s3/object_data_source_test.go index 315735a42e15..6af757d60e78 100644 --- a/internal/service/s3/object_data_source_test.go +++ b/internal/service/s3/object_data_source_test.go @@ -1030,7 +1030,7 @@ data "aws_s3_object" "test" { } func testAccObjectDataSourceConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/object_test.go b/internal/service/s3/object_test.go index 6512de29b67c..eec2699b5557 100644 --- a/internal/service/s3/object_test.go +++ b/internal/service/s3/object_test.go @@ -3106,7 +3106,7 @@ resource "aws_s3_object" "object" { } func testAccObjectConfig_directoryBucket(rName string) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), ` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), ` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/internal/service/s3/objects_data_source_test.go b/internal/service/s3/objects_data_source_test.go index 7e5e486d5a93..9d0700ff4b23 100644 --- a/internal/service/s3/objects_data_source_test.go +++ b/internal/service/s3/objects_data_source_test.go @@ -383,7 +383,7 @@ data "aws_s3_objects" "test" { } func testAccObjectsDataSourceConfig_directoryBucket(rName string, n int) string { - return acctest.ConfigCompose(testAccDirectoryBucketConfig_base(rName), fmt.Sprintf(` + return acctest.ConfigCompose(testAccDirectoryBucketConfig_baseAZ(rName), fmt.Sprintf(` resource "aws_s3_directory_bucket" "test" { bucket = local.bucket diff --git a/website/docs/r/s3_directory_bucket.html.markdown b/website/docs/r/s3_directory_bucket.html.markdown index aa9622b652b5..41e8816f6202 100644 --- a/website/docs/r/s3_directory_bucket.html.markdown +++ b/website/docs/r/s3_directory_bucket.html.markdown @@ -12,6 +12,8 @@ Provides an Amazon S3 Express directory bucket resource. ## Example Usage +### Availability Zone + ```terraform resource "aws_s3_directory_bucket" "example" { bucket = "example--usw2-az1--x-s3" @@ -22,12 +24,25 @@ resource "aws_s3_directory_bucket" "example" { } ``` +### Dedicated Local Zone + +``` +resource "aws_s3_directory_bucket" "example_local_zone" { + bucket = "example--usw2-xxx-lz1--x-s3" + + location { + name = "usw2-xxx-lz1" # LocalZone ID + type = "LocalZone" + } +} +``` + ## Argument Reference This resource supports the following arguments: * `bucket` - (Required) Name of the bucket. The name must be in the format `[bucket_name]--[azid]--x-s3`. Use the [`aws_s3_bucket`](s3_bucket.html) resource to manage general purpose buckets. -* `data_redundancy` - (Optional, Default:`SingleAvailabilityZone`) Data redundancy. Valid values: `SingleAvailabilityZone`. +* `data_redundancy` - (Optional) Data redundancy. Valid values: `SingleAvailabilityZone`, `SingleLocalZone`. The default value depends on the value of the `location.type` attribute. * `force_destroy` - (Optional, Default:`false`) Boolean that indicates all objects should be deleted from the bucket *when the bucket is destroyed* so that the bucket can be destroyed without error. These objects are *not* recoverable. This only deletes objects when the bucket is destroyed, *not* when setting this parameter to `true`. Once this parameter is set to `true`, there must be a successful `terraform apply` run before a destroy is required to update this value in the resource state. Without a successful `terraform apply` after this parameter is set, this flag will have no effect. If setting this field in the same operation that would require replacing the bucket or destroying the bucket, this flag will not work. Additionally when importing a bucket, a successful `terraform apply` is required to set this value in state before it will take effect on a destroy operation. * `location` - (Required) Bucket location. See [Location](#location) below for more details. * `type` - (Optional, Default:`Directory`) Bucket type. Valid values: `Directory`.