diff --git a/.changelog/#####.txt b/.changelog/#####.txt new file mode 100644 index 00000000000..427c147d3d9 --- /dev/null +++ b/.changelog/#####.txt @@ -0,0 +1,11 @@ +```release-note:enhancement +data-source/aws_s3_objects: Add `request_payer` argument and `request_charged` attribute +``` + +```release-note:enhancement +data-source/aws_s3_objects: Add plan-time validation of `encoding_type` +``` + +```release-note:bug +data-source/aws_s3_objects: Respect configured `max_keys` value if it's greater than `1000` +``` \ No newline at end of file diff --git a/go.mod b/go.mod index 75cb53fc3d6..786fb465fdf 100644 --- a/go.mod +++ b/go.mod @@ -52,6 +52,7 @@ require ( github.com/aws/aws-sdk-go-v2/service/resourceexplorer2 v1.3.5 github.com/aws/aws-sdk-go-v2/service/rolesanywhere v1.3.5 github.com/aws/aws-sdk-go-v2/service/route53domains v1.17.3 + github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5 github.com/aws/aws-sdk-go-v2/service/s3control v1.33.0 github.com/aws/aws-sdk-go-v2/service/scheduler v1.2.5 github.com/aws/aws-sdk-go-v2/service/securitylake v1.7.0 @@ -117,7 +118,10 @@ require ( github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.3.39 // indirect + github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.4 // indirect github.com/aws/aws-sdk-go-v2/service/iam v1.22.5 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.14 // indirect + github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.36 // indirect github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.7.35 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.35 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.4 // indirect diff --git a/go.sum b/go.sum index 3e41821ba98..77cd548d5fe 100644 --- a/go.sum +++ b/go.sum @@ -44,6 +44,8 @@ github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35 h1:SijA0mgjV8E+8G45lt github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35/go.mod h1:SJC1nEVVva1g3pHAIdCp7QsRIkMmLAgoDquQ9Rr8kYw= github.com/aws/aws-sdk-go-v2/internal/ini v1.3.39 h1:fc0ukRAiP1syoSGZYu+DaE+FulSYhTiJ8WpVu5jElU4= github.com/aws/aws-sdk-go-v2/internal/ini v1.3.39/go.mod h1:WLAW8PT7+JhjZfLSWe7WEJaJu0GNo0cKc2Zyo003RBs= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.4 h1:6lJvvkQ9HmbHZ4h/IEwclwv2mrTW8Uq1SOB/kXy0mfw= +github.com/aws/aws-sdk-go-v2/internal/v4a v1.1.4/go.mod h1:1PrKYwxTM+zjpw9Y41KFtoJCQrJ34Z47Y4VgVbfndjo= github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.20.5 h1:1w0ELQMC3AptxEFS4A+vJuhyIuC9IoNN2YxNKK5pSYQ= github.com/aws/aws-sdk-go-v2/service/accessanalyzer v1.20.5/go.mod h1:zwKhX2c7u7XDz2ToVE+qunfyoy9+3AO0rZynN5TwXCc= github.com/aws/aws-sdk-go-v2/service/account v1.11.5 h1:UX7HDdPZwTmrr1zu1j8e9QNINZS2YSJ+DoxhnnPyJY8= @@ -92,6 +94,10 @@ github.com/aws/aws-sdk-go-v2/service/identitystore v1.17.6 h1:1+CSnP3TCGEnv6D12I github.com/aws/aws-sdk-go-v2/service/identitystore v1.17.6/go.mod h1:uP4598oNnSTY5AClqIoK6QHQnwz7cuRS8CBkVMXuxOU= github.com/aws/aws-sdk-go-v2/service/inspector2 v1.16.6 h1:HhLDyWzcq1QAQM9/D6r49CA1NX7mSuE77XruZ/GM0tI= github.com/aws/aws-sdk-go-v2/service/inspector2 v1.16.6/go.mod h1:ZThso1NAB0Pt7ZHiE8QjGxZsdSq3yE3IHTO8DSsIj0Y= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.14 h1:m0QTSI6pZYJTk5WSKx3fm5cNW/DCicVzULBgU/6IyD0= +github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.14/go.mod h1:dDilntgHy9WnHXsh7dDtUPgHKEfTJIBUTHM8OWm0f/0= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.36 h1:eev2yZX7esGRjqRbnVk1UxMLw4CyVZDpZXRCcy75oQk= +github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.36/go.mod h1:lGnOkH9NJATw0XEPcAknFBj3zzNTEGRHtSw+CwC1YTg= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.7.35 h1:UKjpIDLVF90RfV88XurdduMoTxPqtGHZMIDYZQM7RO4= github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.7.35/go.mod h1:B3dUg0V6eJesUTi+m27NUkj7n8hdDKYUpxj8f4+TqaQ= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.32/go.mod h1:4jwAWKEkCR0anWk5+1RbfSg1R5Gzld7NLiuaq5bTR/Y= @@ -141,6 +147,8 @@ github.com/aws/aws-sdk-go-v2/service/rolesanywhere v1.3.5 h1:tfmJZFDrma1cgraLRuE github.com/aws/aws-sdk-go-v2/service/rolesanywhere v1.3.5/go.mod h1:vXPkNV5GGPdMjRRNzO45nX3qsNTgB5lP19Tk4Go30xQ= github.com/aws/aws-sdk-go-v2/service/route53domains v1.17.3 h1:aaHlZb06fyEQ3uqEVJiN3hLt8syCzX+tWZiz40S4c0Y= github.com/aws/aws-sdk-go-v2/service/route53domains v1.17.3/go.mod h1:SK+5R1cYgVgSfBGi9T/gPGNIuLInF3eIRYNruia62rg= +github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5 h1:A42xdtStObqy7NGvzZKpnyNXvoOmm+FENobZ0/ssHWk= +github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5/go.mod h1:rDGMZA7f4pbmTtPOk5v5UM2lmX6UAbRnMDJeDvnH7AM= github.com/aws/aws-sdk-go-v2/service/s3control v1.33.0 h1:f4qHghGTcns4L4F7u8AHH6pcVLwgtTMNkNZeRJZ5xlA= github.com/aws/aws-sdk-go-v2/service/s3control v1.33.0/go.mod h1:YSdqo9knBVm5H3JVmWDhx9Wts9828nColUJzL3OKXDk= github.com/aws/aws-sdk-go-v2/service/scheduler v1.2.5 h1:AGRPn7Hef59Eb9zfXjf6MGn0xRPpO73dIV8u8pfo5Z8= diff --git a/internal/conns/awsclient_gen.go b/internal/conns/awsclient_gen.go index f37ed2fc46d..e22e23c5868 100644 --- a/internal/conns/awsclient_gen.go +++ b/internal/conns/awsclient_gen.go @@ -48,6 +48,7 @@ import ( resourceexplorer2_sdkv2 "github.com/aws/aws-sdk-go-v2/service/resourceexplorer2" rolesanywhere_sdkv2 "github.com/aws/aws-sdk-go-v2/service/rolesanywhere" route53domains_sdkv2 "github.com/aws/aws-sdk-go-v2/service/route53domains" + s3_sdkv2 "github.com/aws/aws-sdk-go-v2/service/s3" s3control_sdkv2 "github.com/aws/aws-sdk-go-v2/service/s3control" scheduler_sdkv2 "github.com/aws/aws-sdk-go-v2/service/scheduler" securitylake_sdkv2 "github.com/aws/aws-sdk-go-v2/service/securitylake" @@ -909,6 +910,10 @@ func (c *AWSClient) S3Conn(ctx context.Context) *s3_sdkv1.S3 { return errs.Must(conn[*s3_sdkv1.S3](ctx, c, names.S3)) } +func (c *AWSClient) S3Client(ctx context.Context) *s3_sdkv2.Client { + return errs.Must(client[*s3_sdkv2.Client](ctx, c, names.S3)) +} + func (c *AWSClient) S3ControlConn(ctx context.Context) *s3control_sdkv1.S3Control { return errs.Must(conn[*s3control_sdkv1.S3Control](ctx, c, names.S3Control)) } diff --git a/internal/service/s3/bucket_objects_data_source_test.go b/internal/service/s3/bucket_objects_data_source_test.go index 71a952321d8..3123c131b12 100644 --- a/internal/service/s3/bucket_objects_data_source_test.go +++ b/internal/service/s3/bucket_objects_data_source_test.go @@ -34,7 +34,6 @@ func TestAccS3BucketObjectsDataSource_basic(t *testing.T) { { Config: testAccBucketObjectsDataSourceConfig_basic(rInt), Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/navajo/north_window"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/navajo/sand_dune"), @@ -61,7 +60,6 @@ func TestAccS3BucketObjectsDataSource_basicViaAccessPoint(t *testing.T) { { Config: testAccBucketObjectsDataSourceConfig_basicViaAccessPoint(rInt), Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/navajo/north_window"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/navajo/sand_dune"), @@ -88,7 +86,6 @@ func TestAccS3BucketObjectsDataSource_all(t *testing.T) { { Config: testAccBucketObjectsDataSourceConfig_all(rInt), Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "7"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/courthouse_towers/landscape"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/navajo/north_window"), @@ -120,7 +117,6 @@ func TestAccS3BucketObjectsDataSource_prefixes(t *testing.T) { { Config: testAccBucketObjectsDataSourceConfig_prefixes(rInt), Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "1"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/rubicon"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "common_prefixes.#", "4"), @@ -151,7 +147,6 @@ func TestAccS3BucketObjectsDataSource_encoded(t *testing.T) { { Config: testAccBucketObjectsDataSourceConfig_encoded(rInt), Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/ru+b+ic+on"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/rubicon"), @@ -178,7 +173,6 @@ func TestAccS3BucketObjectsDataSource_maxKeys(t *testing.T) { { Config: testAccBucketObjectsDataSourceConfig_maxKeys(rInt), Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/courthouse_towers/landscape"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/navajo/north_window"), @@ -205,7 +199,6 @@ func TestAccS3BucketObjectsDataSource_startAfter(t *testing.T) { { Config: testAccBucketObjectsDataSourceConfig_startAfter(rInt), Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "1"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/three_gossips/turret"), ), @@ -231,7 +224,6 @@ func TestAccS3BucketObjectsDataSource_fetchOwner(t *testing.T) { { Config: testAccBucketObjectsDataSourceConfig_owners(rInt), Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "owners.#", "2"), ), diff --git a/internal/service/s3/canonical_user_id_data_source.go b/internal/service/s3/canonical_user_id_data_source.go index 6d083a467ea..ea633c69b22 100644 --- a/internal/service/s3/canonical_user_id_data_source.go +++ b/internal/service/s3/canonical_user_id_data_source.go @@ -5,10 +5,9 @@ package s3 import ( "context" - "log" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-aws/internal/conns" @@ -16,7 +15,7 @@ import ( ) // @SDKDataSource("aws_canonical_user_id") -func DataSourceCanonicalUserID() *schema.Resource { +func dataSourceCanonicalUserID() *schema.Resource { return &schema.Resource{ ReadWithoutTimeout: dataSourceCanonicalUserIDRead, @@ -31,21 +30,20 @@ func DataSourceCanonicalUserID() *schema.Resource { func dataSourceCanonicalUserIDRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).S3Conn(ctx) + conn := meta.(*conns.AWSClient).S3Client(ctx) - log.Printf("[DEBUG] Reading S3 Buckets") + output, err := conn.ListBuckets(ctx, &s3.ListBucketsInput{}) - req := &s3.ListBucketsInput{} - resp, err := conn.ListBucketsWithContext(ctx, req) if err != nil { return sdkdiag.AppendErrorf(diags, "listing S3 Buckets: %s", err) } - if resp == nil || resp.Owner == nil { - return sdkdiag.AppendErrorf(diags, "no canonical user ID found") + + if output == nil || output.Owner == nil { + return sdkdiag.AppendErrorf(diags, "S3 Canonical User ID not found") } - d.SetId(aws.StringValue(resp.Owner.ID)) - d.Set("display_name", resp.Owner.DisplayName) + d.SetId(aws.ToString(output.Owner.ID)) + d.Set("display_name", output.Owner.DisplayName) return diags } diff --git a/internal/service/s3/canonical_user_id_data_source_test.go b/internal/service/s3/canonical_user_id_data_source_test.go index c0da297edb8..451583c90c9 100644 --- a/internal/service/s3/canonical_user_id_data_source_test.go +++ b/internal/service/s3/canonical_user_id_data_source_test.go @@ -4,50 +4,33 @@ package s3_test import ( - "fmt" "testing" - "github.com/aws/aws-sdk-go/service/s3" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-aws/internal/acctest" + "github.com/hashicorp/terraform-provider-aws/names" ) func TestAccS3CanonicalUserIDDataSource_basic(t *testing.T) { ctx := acctest.Context(t) + dataSourceName := "data.aws_canonical_user_id.test" + resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, Steps: []resource.TestStep{ { Config: testAccCanonicalUserIDDataSourceConfig_basic, - Check: resource.ComposeTestCheckFunc( - testAccCanonicalUserIdCheckExistsDataSource("data.aws_canonical_user_id.current"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet(dataSourceName, "display_name"), + resource.TestCheckResourceAttrSet(dataSourceName, "id"), ), }, }, }) } -func testAccCanonicalUserIdCheckExistsDataSource(name string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[name] - if !ok { - return fmt.Errorf("Can't find Canonical User ID resource: %s", name) - } - - if rs.Primary.Attributes["id"] == "" { - return fmt.Errorf("Missing Canonical User ID") - } - if rs.Primary.Attributes["display_name"] == "" { - return fmt.Errorf("Missing Display Name") - } - - return nil - } -} - const testAccCanonicalUserIDDataSourceConfig_basic = ` -data "aws_canonical_user_id" "current" {} +data "aws_canonical_user_id" "test" {} ` diff --git a/internal/service/s3/objects_data_source.go b/internal/service/s3/objects_data_source.go index fb754992a07..192b4ea63b2 100644 --- a/internal/service/s3/objects_data_source.go +++ b/internal/service/s3/objects_data_source.go @@ -6,11 +6,13 @@ package s3 import ( "context" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/service/s3" + "github.com/aws/aws-sdk-go-v2/service/s3/types" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-aws/internal/conns" + "github.com/hashicorp/terraform-provider-aws/internal/enum" "github.com/hashicorp/terraform-provider-aws/internal/errs/sdkdiag" ) @@ -26,26 +28,19 @@ func DataSourceObjects() *schema.Resource { Type: schema.TypeString, Required: true, }, - "prefix": { - Type: schema.TypeString, - Optional: true, + "common_prefixes": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, }, "delimiter": { Type: schema.TypeString, Optional: true, }, "encoding_type": { - Type: schema.TypeString, - Optional: true, - }, - "max_keys": { - Type: schema.TypeInt, - Optional: true, - Default: 1000, - }, - "start_after": { - Type: schema.TypeString, - Optional: true, + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: enum.Validate[types.EncodingType](), }, "fetch_owner": { Type: schema.TypeBool, @@ -56,102 +51,117 @@ func DataSourceObjects() *schema.Resource { Computed: true, Elem: &schema.Schema{Type: schema.TypeString}, }, - "common_prefixes": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, + "max_keys": { + Type: schema.TypeInt, + Optional: true, + Default: 1000, }, "owners": { Type: schema.TypeList, Computed: true, Elem: &schema.Schema{Type: schema.TypeString}, }, + "prefix": { + Type: schema.TypeString, + Optional: true, + }, + "request_charged": { + Type: schema.TypeString, + Computed: true, + }, + "request_payer": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: enum.Validate[types.RequestPayer](), + }, + "start_after": { + Type: schema.TypeString, + Optional: true, + }, }, } } func dataSourceObjectsRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { var diags diag.Diagnostics - conn := meta.(*conns.AWSClient).S3Conn(ctx) + conn := meta.(*conns.AWSClient).S3Client(ctx) bucket := d.Get("bucket").(string) - prefix := d.Get("prefix").(string) - - listInput := s3.ListObjectsV2Input{ + input := &s3.ListObjectsV2Input{ Bucket: aws.String(bucket), } - if prefix != "" { - listInput.Prefix = aws.String(prefix) + if v, ok := d.GetOk("delimiter"); ok { + input.Delimiter = aws.String(v.(string)) } - if s, ok := d.GetOk("delimiter"); ok { - listInput.Delimiter = aws.String(s.(string)) + if v, ok := d.GetOk("encoding_type"); ok { + input.EncodingType = types.EncodingType(v.(string)) } - if s, ok := d.GetOk("encoding_type"); ok { - listInput.EncodingType = aws.String(s.(string)) + if v, ok := d.GetOk("fetch_owner"); ok { + input.FetchOwner = v.(bool) } - // "listInput.MaxKeys" refers to max keys returned in a single request - // (i.e., page size), not the total number of keys returned if you page - // through the results. "maxKeys" does refer to total keys returned. + // "input.MaxKeys" refers to max keys returned in a single request + // (i.e. page size), not the total number of keys returned if you page + // through the results. "max_keys" does refer to total keys returned. maxKeys := int64(d.Get("max_keys").(int)) if maxKeys <= keyRequestPageSize { - listInput.MaxKeys = aws.Int64(maxKeys) + input.MaxKeys = int32(maxKeys) } - if s, ok := d.GetOk("start_after"); ok { - listInput.StartAfter = aws.String(s.(string)) + if v, ok := d.GetOk("prefix"); ok { + input.Prefix = aws.String(v.(string)) } - if b, ok := d.GetOk("fetch_owner"); ok { - listInput.FetchOwner = aws.Bool(b.(bool)) + if v, ok := d.GetOk("request_payer"); ok { + input.RequestPayer = types.RequestPayer(v.(string)) } - var commonPrefixes []string - var keys []string - var owners []string + if v, ok := d.GetOk("start_after"); ok { + input.StartAfter = aws.String(v.(string)) + } - err := conn.ListObjectsV2PagesWithContext(ctx, &listInput, func(page *s3.ListObjectsV2Output, lastPage bool) bool { - for _, commonPrefix := range page.CommonPrefixes { - commonPrefixes = append(commonPrefixes, aws.StringValue(commonPrefix.Prefix)) - } + var nKeys int64 + var commonPrefixes, keys, owners []string + var requestCharged string - for _, object := range page.Contents { - keys = append(keys, aws.StringValue(object.Key)) + pages := s3.NewListObjectsV2Paginator(conn, input) +pageLoop: + for pages.HasMorePages() { + page, err := pages.NextPage(ctx) - if object.Owner != nil { - owners = append(owners, aws.StringValue(object.Owner.ID)) - } + if err != nil { + return sdkdiag.AppendErrorf(diags, "listing S3 Bucket (%s) Objects: %s", bucket, err) } - maxKeys = maxKeys - aws.Int64Value(page.KeyCount) + requestCharged = string(page.RequestCharged) - if maxKeys <= keyRequestPageSize { - listInput.MaxKeys = aws.Int64(maxKeys) + for _, v := range page.CommonPrefixes { + commonPrefixes = append(commonPrefixes, aws.ToString(v.Prefix)) } - return !lastPage - }) - - if err != nil { - return sdkdiag.AppendErrorf(diags, "listing S3 Bucket (%s) Objects: %s", bucket, err) - } + for _, v := range page.Contents { + if nKeys >= maxKeys { + break pageLoop + } - d.SetId(bucket) + keys = append(keys, aws.ToString(v.Key)) - if err := d.Set("common_prefixes", commonPrefixes); err != nil { - return sdkdiag.AppendErrorf(diags, "setting common_prefixes: %s", err) - } + if v := v.Owner; v != nil { + owners = append(owners, aws.ToString(v.ID)) + } - if err := d.Set("keys", keys); err != nil { - return sdkdiag.AppendErrorf(diags, "setting keys: %s", err) + nKeys++ + } } - if err := d.Set("owners", owners); err != nil { - return sdkdiag.AppendErrorf(diags, "setting owners: %s", err) - } + d.SetId(bucket) + d.Set("common_prefixes", commonPrefixes) + d.Set("keys", keys) + d.Set("owners", owners) + d.Set("request_charged", requestCharged) return diags } diff --git a/internal/service/s3/objects_data_source_test.go b/internal/service/s3/objects_data_source_test.go index ae9c46b493d..52ee0abfafc 100644 --- a/internal/service/s3/objects_data_source_test.go +++ b/internal/service/s3/objects_data_source_test.go @@ -7,34 +7,30 @@ import ( "fmt" "testing" - "github.com/aws/aws-sdk-go/service/s3" sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-aws/internal/acctest" + "github.com/hashicorp/terraform-provider-aws/names" ) func TestAccS3ObjectsDataSource_basic(t *testing.T) { ctx := acctest.Context(t) - rInt := sdkacctest.RandInt() + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_s3_objects.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, PreventPostDestroyRefresh: true, Steps: []resource.TestStep{ { - Config: testAccObjectsDataSourceConfig_resources(rInt), // NOTE: contains no data source - // Does not need Check - }, - { - Config: testAccObjectsDataSourceConfig_basic(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/navajo/north_window"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/navajo/sand_dune"), + Config: testAccObjectsDataSourceConfig_basic(rName, 1), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "3"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "request_charged", ""), ), }, }, @@ -43,142 +39,135 @@ func TestAccS3ObjectsDataSource_basic(t *testing.T) { func TestAccS3ObjectsDataSource_basicViaAccessPoint(t *testing.T) { ctx := acctest.Context(t) - rInt := sdkacctest.RandInt() + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_s3_objects.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, PreventPostDestroyRefresh: true, Steps: []resource.TestStep{ { - Config: testAccObjectsDataSourceConfig_resourcesPlusAccessPoint(rInt), // NOTE: contains no data source - // Does not need Check - }, - { - Config: testAccObjectsDataSourceConfig_basicViaAccessPoint(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/navajo/north_window"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/navajo/sand_dune"), + Config: testAccObjectsDataSourceConfig_basicViaAccessPoint(rName, 1), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "3"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), ), }, }, }) } -func TestAccS3ObjectsDataSource_all(t *testing.T) { +func TestAccS3ObjectsDataSource_prefixes(t *testing.T) { ctx := acctest.Context(t) - rInt := sdkacctest.RandInt() + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_s3_objects.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, PreventPostDestroyRefresh: true, Steps: []resource.TestStep{ { - Config: testAccObjectsDataSourceConfig_resources(rInt), // NOTE: contains no data source - // Does not need Check - }, - { - Config: testAccObjectsDataSourceConfig_all(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "7"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/courthouse_towers/landscape"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/navajo/north_window"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.2", "arch/navajo/sand_dune"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.3", "arch/partition/park_avenue"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.4", "arch/rubicon"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.5", "arch/three_gossips/broken"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.6", "arch/three_gossips/turret"), + Config: testAccObjectsDataSourceConfig_prefixes(rName, 1), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "2"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), ), }, }, }) } -func TestAccS3ObjectsDataSource_prefixes(t *testing.T) { +func TestAccS3ObjectsDataSource_encoded(t *testing.T) { ctx := acctest.Context(t) - rInt := sdkacctest.RandInt() + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_s3_objects.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, PreventPostDestroyRefresh: true, Steps: []resource.TestStep{ { - Config: testAccObjectsDataSourceConfig_resources(rInt), // NOTE: contains no data source - // Does not need Check - }, - { - Config: testAccObjectsDataSourceConfig_prefixes(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "1"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/rubicon"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "common_prefixes.#", "4"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "common_prefixes.0", "arch/courthouse_towers/"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "common_prefixes.1", "arch/navajo/"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "common_prefixes.2", "arch/partition/"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "common_prefixes.3", "arch/three_gossips/"), + Config: testAccObjectsDataSourceConfig_encoded(rName), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "1"), + resource.TestCheckResourceAttr(dataSourceName, "keys.0", "prefix/a+b"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), ), }, }, }) } -func TestAccS3ObjectsDataSource_encoded(t *testing.T) { +func TestAccS3ObjectsDataSource_maxKeysSmall(t *testing.T) { ctx := acctest.Context(t) - rInt := sdkacctest.RandInt() + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_s3_objects.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, PreventPostDestroyRefresh: true, Steps: []resource.TestStep{ { - Config: testAccObjectsDataSourceConfig_extraResource(rInt), // NOTE: contains no data source - // Does not need Check + Config: testAccObjectsDataSourceConfig_maxKeysSmall(rName, 1, 5), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "3"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), + ), }, { - Config: testAccObjectsDataSourceConfig_encoded(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/ru+b+ic+on"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/rubicon"), + Config: testAccObjectsDataSourceConfig_maxKeysSmall(rName, 2, 5), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "5"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), ), }, }, }) } -func TestAccS3ObjectsDataSource_maxKeys(t *testing.T) { +func TestAccS3ObjectsDataSource_maxKeysLarge(t *testing.T) { ctx := acctest.Context(t) - rInt := sdkacctest.RandInt() + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_s3_objects.test" + var keys []string + for i := 0; i < 1500; i++ { + keys = append(keys, fmt.Sprintf("data%d", i)) + } resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, PreventPostDestroyRefresh: true, Steps: []resource.TestStep{ { - Config: testAccObjectsDataSourceConfig_resources(rInt), // NOTE: contains no data source - // Does not need Check + Config: testAccObjectsDataSourceConfig_maxKeysLarge(rName, 1002), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), + testAccCheckBucketAddObjects(ctx, "aws_s3_bucket.test", keys...), + ), }, { - Config: testAccObjectsDataSourceConfig_maxKeys(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/courthouse_towers/landscape"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.1", "arch/navajo/north_window"), + Config: testAccObjectsDataSourceConfig_maxKeysLarge(rName, 1002), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "1002"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), ), }, }, @@ -187,24 +176,21 @@ func TestAccS3ObjectsDataSource_maxKeys(t *testing.T) { func TestAccS3ObjectsDataSource_startAfter(t *testing.T) { ctx := acctest.Context(t) - rInt := sdkacctest.RandInt() + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_s3_objects.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, PreventPostDestroyRefresh: true, Steps: []resource.TestStep{ { - Config: testAccObjectsDataSourceConfig_resources(rInt), // NOTE: contains no data source - // Does not need Check - }, - { - Config: testAccObjectsDataSourceConfig_startAfter(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "1"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.0", "arch/three_gossips/turret"), + Config: testAccObjectsDataSourceConfig_startAfter(rName, 1, "prefix1/sub2/0"), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "1"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "0"), ), }, }, @@ -213,202 +199,161 @@ func TestAccS3ObjectsDataSource_startAfter(t *testing.T) { func TestAccS3ObjectsDataSource_fetchOwner(t *testing.T) { ctx := acctest.Context(t) - rInt := sdkacctest.RandInt() + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) + dataSourceName := "data.aws_s3_objects.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(ctx, t) }, - ErrorCheck: acctest.ErrorCheck(t, s3.EndpointsID), + ErrorCheck: acctest.ErrorCheck(t, names.S3EndpointID), ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories, PreventPostDestroyRefresh: true, Steps: []resource.TestStep{ { - Config: testAccObjectsDataSourceConfig_resources(rInt), // NOTE: contains no data source - // Does not need Check - }, - { - Config: testAccObjectsDataSourceConfig_owners(rInt), - Check: resource.ComposeTestCheckFunc( - testAccCheckObjectsExistsDataSource("data.aws_s3_objects.yesh"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "keys.#", "2"), - resource.TestCheckResourceAttr("data.aws_s3_objects.yesh", "owners.#", "2"), + Config: testAccObjectsDataSourceConfig_owners(rName, 1), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr(dataSourceName, "common_prefixes.#", "0"), + resource.TestCheckResourceAttr(dataSourceName, "keys.#", "3"), + resource.TestCheckResourceAttr(dataSourceName, "owners.#", "3"), ), }, }, }) } -func testAccCheckObjectsExistsDataSource(addr string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[addr] - if !ok { - return fmt.Errorf("Can't find S3 objects data source: %s", addr) - } - - if rs.Primary.ID == "" { - return fmt.Errorf("S3 objects data source ID not set") - } - - return nil - } -} - -func testAccObjectsDataSourceConfig_resources(randInt int) string { +func testAccObjectsDataSourceConfig_base(rName string, n int) string { return fmt.Sprintf(` -resource "aws_s3_bucket" "objects_bucket" { - bucket = "tf-acc-objects-test-bucket-%d" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } -resource "aws_s3_object" "object1" { - bucket = aws_s3_bucket.objects_bucket.id - key = "arch/three_gossips/turret" - content = "Delicate" -} +resource "aws_s3_object" "test1" { + count = %[2]d -resource "aws_s3_object" "object2" { - bucket = aws_s3_bucket.objects_bucket.id - key = "arch/three_gossips/broken" - content = "Dark Angel" + bucket = aws_s3_bucket.test.id + key = "prefix1/sub1/${count.index}" + content = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" } -resource "aws_s3_object" "object3" { - bucket = aws_s3_bucket.objects_bucket.id - key = "arch/navajo/north_window" - content = "Balanced Rock" -} +resource "aws_s3_object" "test2" { + count = %[2]d -resource "aws_s3_object" "object4" { - bucket = aws_s3_bucket.objects_bucket.id - key = "arch/navajo/sand_dune" - content = "Queen Victoria Rock" + bucket = aws_s3_bucket.test.id + key = "prefix1/sub2/${count.index}" + content = "0123456789" } -resource "aws_s3_object" "object5" { - bucket = aws_s3_bucket.objects_bucket.id - key = "arch/partition/park_avenue" - content = "Double-O" -} +resource "aws_s3_object" "test3" { + count = %[2]d -resource "aws_s3_object" "object6" { - bucket = aws_s3_bucket.objects_bucket.id - key = "arch/courthouse_towers/landscape" - content = "Fiery Furnace" + bucket = aws_s3_bucket.test.id + key = "prefix2/${count.index}" + content = "abcdefghijklmnopqrstuvwxyz" } +`, rName, n) +} + +func testAccObjectsDataSourceConfig_basic(rName string, n int) string { + return acctest.ConfigCompose(testAccObjectsDataSourceConfig_base(rName, n), ` +data "aws_s3_objects" "test" { + bucket = aws_s3_bucket.test.id -resource "aws_s3_object" "object7" { - bucket = aws_s3_bucket.objects_bucket.id - key = "arch/rubicon" - content = "Devils Garden" + depends_on = [aws_s3_object.test1, aws_s3_object.test2, aws_s3_object.test3] } -`, randInt) +`) } -func testAccObjectsDataSourceConfig_resourcesPlusAccessPoint(randInt int) string { - return testAccObjectsDataSourceConfig_resources(randInt) + fmt.Sprintf(` +func testAccObjectsDataSourceConfig_basicViaAccessPoint(rName string, n int) string { + return acctest.ConfigCompose(testAccObjectsDataSourceConfig_base(rName, n), fmt.Sprintf(` resource "aws_s3_access_point" "test" { - bucket = aws_s3_bucket.objects_bucket.bucket - name = "tf-objects-test-access-point-%[1]d" -} -`, randInt) + bucket = aws_s3_bucket.test.bucket + name = "%[1]s-access-point" } -func testAccObjectsDataSourceConfig_basic(randInt int) string { - return fmt.Sprintf(` -%s +data "aws_s3_objects" "test" { + bucket = aws_s3_access_point.test.arn -data "aws_s3_objects" "yesh" { - bucket = aws_s3_bucket.objects_bucket.id - prefix = "arch/navajo/" - delimiter = "/" + depends_on = [aws_s3_object.test1, aws_s3_object.test2, aws_s3_object.test3] } -`, testAccObjectsDataSourceConfig_resources(randInt)) +`, rName)) } -func testAccObjectsDataSourceConfig_basicViaAccessPoint(randInt int) string { - return testAccObjectsDataSourceConfig_resourcesPlusAccessPoint(randInt) + ` -data "aws_s3_objects" "yesh" { - bucket = aws_s3_access_point.test.arn - prefix = "arch/navajo/" +func testAccObjectsDataSourceConfig_prefixes(rName string, n int) string { + return acctest.ConfigCompose(testAccObjectsDataSourceConfig_base(rName, n), ` +data "aws_s3_objects" "test" { + bucket = aws_s3_bucket.test.id + prefix = "prefix1/" delimiter = "/" -} -` -} - -func testAccObjectsDataSourceConfig_all(randInt int) string { - return fmt.Sprintf(` -%s -data "aws_s3_objects" "yesh" { - bucket = aws_s3_bucket.objects_bucket.id + depends_on = [aws_s3_object.test1, aws_s3_object.test2, aws_s3_object.test3] } -`, testAccObjectsDataSourceConfig_resources(randInt)) +`) } -func testAccObjectsDataSourceConfig_prefixes(randInt int) string { +func testAccObjectsDataSourceConfig_encoded(rName string) string { return fmt.Sprintf(` -%s - -data "aws_s3_objects" "yesh" { - bucket = aws_s3_bucket.objects_bucket.id - prefix = "arch/" - delimiter = "/" +resource "aws_s3_bucket" "test" { + bucket = %[1]q } -`, testAccObjectsDataSourceConfig_resources(randInt)) + +resource "aws_s3_object" "test" { + bucket = aws_s3_bucket.test.id + key = "prefix/a b" + content = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" } -func testAccObjectsDataSourceConfig_extraResource(randInt int) string { - return fmt.Sprintf(` -%s +data "aws_s3_objects" "test" { + bucket = aws_s3_bucket.test.id + encoding_type = "url" -resource "aws_s3_object" "object8" { - bucket = aws_s3_bucket.objects_bucket.id - key = "arch/ru b ic on" - content = "Goose Island" + depends_on = [aws_s3_object.test] } -`, testAccObjectsDataSourceConfig_resources(randInt)) +`, rName) } -func testAccObjectsDataSourceConfig_encoded(randInt int) string { - return fmt.Sprintf(` -%s +func testAccObjectsDataSourceConfig_maxKeysSmall(rName string, n, maxKeys int) string { + return acctest.ConfigCompose(testAccObjectsDataSourceConfig_base(rName, n), fmt.Sprintf(` +data "aws_s3_objects" "test" { + bucket = aws_s3_bucket.test.id + max_keys = %[1]d -data "aws_s3_objects" "yesh" { - bucket = aws_s3_bucket.objects_bucket.id - encoding_type = "url" - prefix = "arch/ru" + depends_on = [aws_s3_object.test1, aws_s3_object.test2, aws_s3_object.test3] } -`, testAccObjectsDataSourceConfig_extraResource(randInt)) +`, maxKeys)) } -func testAccObjectsDataSourceConfig_maxKeys(randInt int) string { +// Objects are added to the bucket outside this configuration. +func testAccObjectsDataSourceConfig_maxKeysLarge(rName string, maxKeys int) string { return fmt.Sprintf(` -%s +resource "aws_s3_bucket" "test" { + bucket = %[1]q + force_destroy = true +} -data "aws_s3_objects" "yesh" { - bucket = aws_s3_bucket.objects_bucket.id - max_keys = 2 +data "aws_s3_objects" "test" { + bucket = aws_s3_bucket.test.id + max_keys = %[2]d } -`, testAccObjectsDataSourceConfig_resources(randInt)) +`, rName, maxKeys) } -func testAccObjectsDataSourceConfig_startAfter(randInt int) string { - return fmt.Sprintf(` -%s +func testAccObjectsDataSourceConfig_startAfter(rName string, n int, startAfter string) string { + return acctest.ConfigCompose(testAccObjectsDataSourceConfig_base(rName, n), fmt.Sprintf(` +data "aws_s3_objects" "test" { + bucket = aws_s3_bucket.test.id + start_after = %[1]q -data "aws_s3_objects" "yesh" { - bucket = aws_s3_bucket.objects_bucket.id - start_after = "arch/three_gossips/broken" + depends_on = [aws_s3_object.test1, aws_s3_object.test2, aws_s3_object.test3] } -`, testAccObjectsDataSourceConfig_resources(randInt)) +`, startAfter)) } -func testAccObjectsDataSourceConfig_owners(randInt int) string { - return fmt.Sprintf(` -%s - -data "aws_s3_objects" "yesh" { - bucket = aws_s3_bucket.objects_bucket.id - prefix = "arch/three_gossips/" +func testAccObjectsDataSourceConfig_owners(rName string, n int) string { + return acctest.ConfigCompose(testAccObjectsDataSourceConfig_base(rName, n), ` +data "aws_s3_objects" "test" { + bucket = aws_s3_bucket.test.id fetch_owner = true + + depends_on = [aws_s3_object.test1, aws_s3_object.test2, aws_s3_object.test3] } -`, testAccObjectsDataSourceConfig_resources(randInt)) +`) } diff --git a/internal/service/s3/service_package.go b/internal/service/s3/service_package.go index 3ff205c2e17..f0cfc7d7b73 100644 --- a/internal/service/s3/service_package.go +++ b/internal/service/s3/service_package.go @@ -6,6 +6,8 @@ package s3 import ( "context" + aws_sdkv2 "github.com/aws/aws-sdk-go-v2/aws" + s3_sdkv2 "github.com/aws/aws-sdk-go-v2/service/s3" aws_sdkv1 "github.com/aws/aws-sdk-go/aws" endpoints_sdkv1 "github.com/aws/aws-sdk-go/aws/endpoints" request_sdkv1 "github.com/aws/aws-sdk-go/aws/request" @@ -39,3 +41,19 @@ func (p *servicePackage) CustomizeConn(ctx context.Context, conn *s3_sdkv1.S3) ( return conn, nil } + +// NewClient returns a new AWS SDK for Go v2 client for this service package's AWS API. +func (p *servicePackage) NewClient(ctx context.Context, config map[string]any) (*s3_sdkv2.Client, error) { + cfg := *(config["aws_sdkv2_config"].(*aws_sdkv2.Config)) + + return s3_sdkv2.NewFromConfig(cfg, func(o *s3_sdkv2.Options) { + if endpoint := config["endpoint"].(string); endpoint != "" { + o.BaseEndpoint = aws_sdkv2.String(endpoint) + } else if o.Region == endpoints_sdkv1.UsEast1RegionID && config["s3_us_east_1_regional_endpoint"].(endpoints_sdkv1.S3UsEast1RegionalEndpoint) != endpoints_sdkv1.RegionalS3UsEast1Endpoint { + // Maintain the AWS SDK for Go v1 default of using the global endpoint in us-east-1. + // See https://github.com/hashicorp/terraform-provider-aws/issues/33028. + o.Region = "aws-global" + } + o.UsePathStyle = config["s3_use_path_style"].(bool) + }), nil +} diff --git a/internal/service/s3/service_package_gen.go b/internal/service/s3/service_package_gen.go index 2e89cb39789..f0ff0323eec 100644 --- a/internal/service/s3/service_package_gen.go +++ b/internal/service/s3/service_package_gen.go @@ -23,7 +23,7 @@ func (p *servicePackage) FrameworkResources(ctx context.Context) []*types.Servic func (p *servicePackage) SDKDataSources(ctx context.Context) []*types.ServicePackageSDKDataSource { return []*types.ServicePackageSDKDataSource{ { - Factory: DataSourceCanonicalUserID, + Factory: dataSourceCanonicalUserID, TypeName: "aws_canonical_user_id", }, { diff --git a/names/names.go b/names/names.go index c4b757bb330..1153edc0d93 100644 --- a/names/names.go +++ b/names/names.go @@ -56,6 +56,7 @@ const ( RolesAnywhereEndpointID = "rolesanywhere" Route53DomainsEndpointID = "route53domains" SchedulerEndpointID = "scheduler" + S3EndpointID = "s3" SESV2EndpointID = "sesv2" SSMEndpointID = "ssm" SSMContactsEndpointID = "ssm-contacts" diff --git a/names/names_data.csv b/names/names_data.csv index 7c00d2c0ea3..735b1638e96 100644 --- a/names/names_data.csv +++ b/names/names_data.csv @@ -302,7 +302,7 @@ route53-recovery-cluster,route53recoverycluster,route53recoverycluster,route53re route53-recovery-control-config,route53recoverycontrolconfig,route53recoverycontrolconfig,route53recoverycontrolconfig,,route53recoverycontrolconfig,,,Route53RecoveryControlConfig,Route53RecoveryControlConfig,x,1,,,aws_route53recoverycontrolconfig_,,route53recoverycontrolconfig_,Route 53 Recovery Control Config,Amazon,,,,,, route53-recovery-readiness,route53recoveryreadiness,route53recoveryreadiness,route53recoveryreadiness,,route53recoveryreadiness,,,Route53RecoveryReadiness,Route53RecoveryReadiness,x,1,,,aws_route53recoveryreadiness_,,route53recoveryreadiness_,Route 53 Recovery Readiness,Amazon,,,,,, route53resolver,route53resolver,route53resolver,route53resolver,,route53resolver,,,Route53Resolver,Route53Resolver,,1,,aws_route53_resolver_,aws_route53resolver_,,route53_resolver_,Route 53 Resolver,Amazon,,,,,, -s3api,s3api,s3,s3,,s3,,s3api,S3,S3,x,1,,aws_(canonical_user_id|s3_bucket|s3_object),aws_s3_,,s3_bucket;s3_object;canonical_user_id,S3 (Simple Storage),Amazon,,,,AWS_S3_ENDPOINT,TF_AWS_S3_ENDPOINT, +s3api,s3api,s3,s3,,s3,,s3api,S3,S3,x,1,2,aws_(canonical_user_id|s3_bucket|s3_object),aws_s3_,,s3_bucket;s3_object;canonical_user_id,S3 (Simple Storage),Amazon,,,,AWS_S3_ENDPOINT,TF_AWS_S3_ENDPOINT, s3control,s3control,s3control,s3control,,s3control,,,S3Control,S3Control,,1,2,aws_(s3_account_|s3control_|s3_access_),aws_s3control_,,s3control;s3_account_;s3_access_,S3 Control,Amazon,,,,,, glacier,glacier,glacier,glacier,,glacier,,,Glacier,Glacier,,,2,,aws_glacier_,,glacier_,S3 Glacier,Amazon,,,,,, s3outposts,s3outposts,s3outposts,s3outposts,,s3outposts,,,S3Outposts,S3Outposts,,1,,,aws_s3outposts_,,s3outposts_,S3 on Outposts,Amazon,,,,,, diff --git a/website/docs/d/s3_objects.html.markdown b/website/docs/d/s3_objects.html.markdown index 2a4747d776c..8a6fe73f5a2 100644 --- a/website/docs/d/s3_objects.html.markdown +++ b/website/docs/d/s3_objects.html.markdown @@ -39,6 +39,7 @@ This data source supports the following arguments: * `max_keys` - (Optional) Maximum object keys to return (Default: 1000) * `start_after` - (Optional) Returns key names lexicographically after a specific object key in your bucket (Default: none; S3 lists object keys in UTF-8 character encoding in lexicographical order) * `fetch_owner` - (Optional) Boolean specifying whether to populate the owner list (Default: false) +* `request_payer` - (Optional) Confirms that the requester knows that they will be charged for the request. Bucket owners need not specify this parameter in their requests. If included, the only valid value is `requester`. ## Attribute Reference @@ -48,3 +49,4 @@ This data source exports the following attributes in addition to the arguments a * `common_prefixes` - List of any keys between `prefix` and the next occurrence of `delimiter` (i.e., similar to subdirectories of the `prefix` "directory"); the list is only returned when you specify `delimiter` * `id` - S3 Bucket. * `owners` - List of strings representing object owner IDs (see `fetch_owner` above) +* `request_charged` - If present, indicates that the requester was successfully charged for the request.