Skip to content

Commit

Permalink
Support CodeBuild BuildBatchConfig
Browse files Browse the repository at this point in the history
  • Loading branch information
shuheiktgw authored and ewbankkit committed Apr 29, 2021
1 parent 245c4bd commit 424d237
Show file tree
Hide file tree
Showing 4 changed files with 230 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .changelog/14534.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
resource/aws_codebuild_project: Add `build_batch_config` argument
```
134 changes: 134 additions & 0 deletions aws/resource_aws_codebuild_project.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,50 @@ func resourceAwsCodeBuildProject() *schema.Resource {
},
},
},
"build_batch_config": {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"combine_artifacts": {
Type: schema.TypeBool,
Optional: true,
},
"restrictions": {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"compute_types_allowed": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{
Type: schema.TypeString,
ValidateFunc: validation.StringInSlice(codebuild.ComputeType_Values(), false),
},
},
"maximum_builds_allowed": {
Type: schema.TypeInt,
Optional: true,
ValidateFunc: validation.IntBetween(1, 100),
},
},
},
},
"service_role": {
Type: schema.TypeString,
Required: true,
},
"timeout_in_mins": {
Type: schema.TypeInt,
Optional: true,
ValidateFunc: validation.IntBetween(5, 480),
},
},
},
},
"cache": {
Type: schema.TypeList,
Optional: true,
Expand Down Expand Up @@ -675,6 +719,7 @@ func resourceAwsCodeBuildProjectCreate(d *schema.ResourceData, meta interface{})
projectSecondaryArtifacts := expandProjectSecondaryArtifacts(d)
projectSecondarySources := expandProjectSecondarySources(d)
projectLogsConfig := expandProjectLogsConfig(d)
projectBatchConfig := expandCodeBuildBuildBatchConfig(d)

if aws.StringValue(projectSource.Type) == codebuild.SourceTypeNoSource {
if aws.StringValue(projectSource.Buildspec) == "" {
Expand All @@ -694,6 +739,7 @@ func resourceAwsCodeBuildProjectCreate(d *schema.ResourceData, meta interface{})
SecondaryArtifacts: projectSecondaryArtifacts,
SecondarySources: projectSecondarySources,
LogsConfig: projectLogsConfig,
BuildBatchConfig: projectBatchConfig,
Tags: tags.IgnoreAws().CodebuildTags(),
}

Expand Down Expand Up @@ -963,6 +1009,49 @@ func expandProjectLogsConfig(d *schema.ResourceData) *codebuild.LogsConfig {
return logsConfig
}

func expandCodeBuildBuildBatchConfig(d *schema.ResourceData) *codebuild.ProjectBuildBatchConfig {
configs, ok := d.Get("build_batch_config").([]interface{})
if !ok || len(configs) == 0 || configs[0] == nil {
return nil
}

data := configs[0].(map[string]interface{})

projectBuildBatchConfig := &codebuild.ProjectBuildBatchConfig{
Restrictions: expandCodeBuildBatchRestrictions(data),
ServiceRole: aws.String(data["service_role"].(string)),
}

if v, ok := data["combine_artifacts"]; ok {
projectBuildBatchConfig.CombineArtifacts = aws.Bool(v.(bool))
}

if v, ok := data["timeout_in_mins"]; ok && v != 0 {
projectBuildBatchConfig.TimeoutInMins = aws.Int64(int64(v.(int)))
}

return projectBuildBatchConfig
}

func expandCodeBuildBatchRestrictions(data map[string]interface{}) *codebuild.BatchRestrictions {
if v, ok := data["restrictions"]; !ok || len(v.([]interface{})) == 0 || v.([]interface{})[0] == nil {
return nil
}

restrictionsData := data["restrictions"].([]interface{})[0].(map[string]interface{})

restrictions := &codebuild.BatchRestrictions{}
if v, ok := restrictionsData["compute_types_allowed"]; ok && len(v.([]interface{})) != 0 {
restrictions.ComputeTypesAllowed = expandStringList(v.([]interface{}))
}

if v, ok := restrictionsData["maximum_builds_allowed"]; ok && v != 0 {
restrictions.MaximumBuildsAllowed = aws.Int64(int64(v.(int)))
}

return restrictions
}

func expandCodeBuildCloudWatchLogsConfig(configList []interface{}) *codebuild.CloudWatchLogsConfig {
if len(configList) == 0 || configList[0] == nil {
return nil
Expand Down Expand Up @@ -1183,6 +1272,10 @@ func resourceAwsCodeBuildProjectRead(d *schema.ResourceData, meta interface{}) e
return fmt.Errorf("error setting vpc_config: %s", err)
}

if err := d.Set("build_batch_config", flattenAwsCodeBuildBuildBatchConfig(project.BuildBatchConfig)); err != nil {
return fmt.Errorf("error setting build_batch_config: %s", err)
}

d.Set("arn", project.Arn)
d.Set("description", project.Description)
d.Set("encryption_key", project.EncryptionKey)
Expand Down Expand Up @@ -1256,6 +1349,10 @@ func resourceAwsCodeBuildProjectUpdate(d *schema.ResourceData, meta interface{})
params.LogsConfig = logsConfig
}

if d.HasChange("build_batch_config") {
params.BuildBatchConfig = expandCodeBuildBuildBatchConfig(d)
}

if d.HasChange("cache") {
if v, ok := d.GetOk("cache"); ok {
params.Cache = expandProjectCache(v.([]interface{}))
Expand Down Expand Up @@ -1563,6 +1660,43 @@ func flattenAwsCodeBuildVpcConfig(vpcConfig *codebuild.VpcConfig) []interface{}
return nil
}

func flattenAwsCodeBuildBuildBatchConfig(buildBatchConfig *codebuild.ProjectBuildBatchConfig) []interface{} {
if buildBatchConfig == nil {
return nil
}

values := map[string]interface{}{}

values["service_role"] = aws.StringValue(buildBatchConfig.ServiceRole)

if buildBatchConfig.CombineArtifacts != nil {
values["combine_artifacts"] = aws.BoolValue(buildBatchConfig.CombineArtifacts)
}

if buildBatchConfig.Restrictions != nil {
values["restrictions"] = flattenAwsCodeBuildBuildBatchConfigRestrictions(buildBatchConfig.Restrictions)
}

if buildBatchConfig.TimeoutInMins != nil {
values["timeout_in_mins"] = aws.Int64Value(buildBatchConfig.TimeoutInMins)
}

return []interface{}{values}
}

func flattenAwsCodeBuildBuildBatchConfigRestrictions(restrictions *codebuild.BatchRestrictions) []interface{} {
if restrictions == nil {
return []interface{}{}
}

values := map[string]interface{}{
"compute_types_allowed": aws.StringValueSlice(restrictions.ComputeTypesAllowed),
"maximum_builds_allowed": aws.Int64Value(restrictions.MaximumBuildsAllowed),
}

return []interface{}{values}
}

func resourceAwsCodeBuildProjectArtifactsHash(v interface{}) int {
var buf bytes.Buffer
m := v.(map[string]interface{})
Expand Down
80 changes: 80 additions & 0 deletions aws/resource_aws_codebuild_project_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -618,6 +618,50 @@ func TestAccAWSCodeBuildProject_LogsConfig_S3Logs(t *testing.T) {
})
}

func TestAccAWSCodeBuildProject_BuildBatchConfig(t *testing.T) {
var project codebuild.Project
rName := acctest.RandomWithPrefix("tf-acc-test")
resourceName := "aws_codebuild_project.test"

resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSCodeBuild(t) },
ErrorCheck: testAccErrorCheck(t, codebuild.EndpointsID),
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSCodeBuildProjectDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSCodeBuildProjectConfig_BuildBatchConfig(rName, true, "BUILD_GENERAL1_SMALL", 10, 5),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSCodeBuildProjectExists(resourceName, &project),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.combine_artifacts", "true"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.#", "1"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.0.compute_types_allowed.#", "1"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.0.compute_types_allowed.0", "BUILD_GENERAL1_SMALL"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.0.maximum_builds_allowed", "10"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.timeout_in_mins", "5"),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
{
Config: testAccAWSCodeBuildProjectConfig_BuildBatchConfig(rName, false, "BUILD_GENERAL1_MEDIUM", 20, 10),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSCodeBuildProjectExists(resourceName, &project),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.combine_artifacts", "false"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.#", "1"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.0.compute_types_allowed.#", "1"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.0.compute_types_allowed.0", "BUILD_GENERAL1_MEDIUM"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.restrictions.0.maximum_builds_allowed", "20"),
resource.TestCheckResourceAttr(resourceName, "build_batch_config.0.timeout_in_mins", "10"),
),
},
},
})
}

func TestAccAWSCodeBuildProject_Source_GitCloneDepth(t *testing.T) {
var project codebuild.Project
rName := acctest.RandomWithPrefix("tf-acc-test")
Expand Down Expand Up @@ -2865,6 +2909,42 @@ resource "aws_codebuild_project" "test" {
`, rName, status, gName, sName))
}

func testAccAWSCodeBuildProjectConfig_BuildBatchConfig(rName string, combineArtifacts bool, computeTypesAllowed string, maximumBuildsAllowed, timeoutInMins int) string {
return composeConfig(testAccAWSCodeBuildProjectConfig_Base_ServiceRole(rName), fmt.Sprintf(`
resource "aws_codebuild_project" "test" {
name = %[1]q
service_role = aws_iam_role.test.arn
artifacts {
type = "NO_ARTIFACTS"
}
environment {
compute_type = "BUILD_GENERAL1_SMALL"
image = "2"
type = "LINUX_CONTAINER"
}
source {
location = "https://github.com/hashicorp/packer.git"
type = "GITHUB"
}
build_batch_config {
combine_artifacts = %[2]t
restrictions {
compute_types_allowed = [%[3]q]
maximum_builds_allowed = %[4]d
}
service_role = aws_iam_role.test.arn
timeout_in_mins = %[5]d
}
}
`, rName, combineArtifacts, computeTypesAllowed, maximumBuildsAllowed, timeoutInMins))
}

func testAccAWSCodeBuildProjectConfig_LogsConfig_S3Logs(rName, bName, status, location string, encryptionDisabled bool) string {
return composeConfig(
testAccAWSCodeBuildProjectConfig_Base_ServiceRole(rName),
Expand Down
13 changes: 13 additions & 0 deletions website/docs/r/codebuild_project.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,7 @@ The following arguments are required:
The following arguments are optional:

* `badge_enabled` - (Optional) Generates a publicly-accessible URL for the projects build badge. Available as `badge_url` attribute when enabled.
* `build_batch_config` - (Optional) Defines the batch build options for the project.
* `build_timeout` - (Optional) Number of minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait until timing out any related build that does not get marked as completed. The default is 60 minutes.
* `cache` - (Optional) Configuration block. Detailed below.
* `description` - (Optional) Short description of the project.
Expand All @@ -255,6 +256,18 @@ The following arguments are optional:
* `path` - (Optional) If `type` is set to `S3`, this is the path to the output artifact.
* `type` - (Required) Build output artifact's type. Valid values: `CODEPIPELINE`, `NO_ARTIFACTS`, `S3`.

### build_batch_config

* `combine_artifacts` - (Optional) Specifies if the build artifacts for the batch build should be combined into a single artifact location.
* `restrictions` - (Optional) Specifies the restrictions for the batch build.
* `service_role` - (Required) Specifies the service role ARN for the batch build project.
* `timeout_in_mins` - (Optional) Specifies the maximum amount of time, in minutes, that the batch build must be completed in.

#### restrictions

* `compute_types_allowed` - (Optional) An array of strings that specify the compute types that are allowed for the batch build. See [Build environment compute types](https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html) in the AWS CodeBuild User Guide for these values.
* `maximum_builds_allowed` - (Optional) Specifies the maximum number of builds allowed.

### cache

* `location` - (Required when cache type is `S3`) Location where the AWS CodeBuild project stores cached resources. For type `S3`, the value must be a valid S3 bucket name/prefix.
Expand Down

0 comments on commit 424d237

Please sign in to comment.