From a4934be6cbee7d347cdb088b30d1963633b93175 Mon Sep 17 00:00:00 2001 From: Elad Ben-Israel Date: Thu, 24 Dec 2020 14:40:27 +0200 Subject: [PATCH 1/2] chore(s3-deployment): restore lambda tests (#12220) The change in #12129 accidentally deleted the unit tests for the s3-deployment lambda function. This change restores them. Since tests are written in python, run them inside a docker image derived from public.ecr.aws/lambda/python. ---- *By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license* --- .../aws-s3-deployment/test/lambda.test.ts | 7 + .../aws-s3-deployment/test/lambda/Dockerfile | 13 + .../aws-s3-deployment/test/lambda/aws | 27 + .../aws-s3-deployment/test/lambda/test.py | 486 ++++++++++++++++++ .../aws-s3-deployment/test/lambda/test.sh | 19 + .../aws-s3-deployment/test/lambda/test.zip | Bin 0 -> 179 bytes 6 files changed, 552 insertions(+) create mode 100644 packages/@aws-cdk/aws-s3-deployment/test/lambda.test.ts create mode 100644 packages/@aws-cdk/aws-s3-deployment/test/lambda/Dockerfile create mode 100755 packages/@aws-cdk/aws-s3-deployment/test/lambda/aws create mode 100644 packages/@aws-cdk/aws-s3-deployment/test/lambda/test.py create mode 100755 packages/@aws-cdk/aws-s3-deployment/test/lambda/test.sh create mode 100644 packages/@aws-cdk/aws-s3-deployment/test/lambda/test.zip diff --git a/packages/@aws-cdk/aws-s3-deployment/test/lambda.test.ts b/packages/@aws-cdk/aws-s3-deployment/test/lambda.test.ts new file mode 100644 index 0000000000000..830938b3811b3 --- /dev/null +++ b/packages/@aws-cdk/aws-s3-deployment/test/lambda.test.ts @@ -0,0 +1,7 @@ +import { spawnSync } from 'child_process'; +import * as path from 'path'; + +test('lambda python pytest', () => { + const result = spawnSync(path.join(__dirname, 'lambda', 'test.sh'), { stdio: 'inherit' }); + expect(result.status).toBe(0); +}); \ No newline at end of file diff --git a/packages/@aws-cdk/aws-s3-deployment/test/lambda/Dockerfile b/packages/@aws-cdk/aws-s3-deployment/test/lambda/Dockerfile new file mode 100644 index 0000000000000..8e3c05334efac --- /dev/null +++ b/packages/@aws-cdk/aws-s3-deployment/test/lambda/Dockerfile @@ -0,0 +1,13 @@ +FROM public.ecr.aws/lambda/python:latest + +# add everything to /opt/awscli (this is where `aws` is executed from) +ADD . /opt/awscli + +# install boto3, which is available on Lambda +RUN pip3 install boto3 + +# run tests +WORKDIR /opt/awscli +RUN ["python3", "./test.py"] + +ENTRYPOINT [ "/bin/bash" ] \ No newline at end of file diff --git a/packages/@aws-cdk/aws-s3-deployment/test/lambda/aws b/packages/@aws-cdk/aws-s3-deployment/test/lambda/aws new file mode 100755 index 0000000000000..969bb982cd08c --- /dev/null +++ b/packages/@aws-cdk/aws-s3-deployment/test/lambda/aws @@ -0,0 +1,27 @@ +#!/usr/bin/env python +#=================================================================================================== +# mock for the "aws" cli +# +# the mock behaves as follows: +# - argvs are going to be written to "aws.out" (one command in each line) +# - if "aws s3 cp" is invoked, the destination will be populated with a test zip file. +# - for "cp" and "sync", "aws.out" argv[4] is replaced by "archive.zip" and "contents.zip" +# becuase the actual value is a full path of a temporary directory +# +import sys +import json +import os +import shutil + +scriptdir=os.path.dirname(os.path.realpath(__file__)) + +# if "cp" is called with a local destination, copy a test zip file to the destination or +if sys.argv[2] == "cp" and not sys.argv[4].startswith("s3://"): + shutil.copyfile(os.path.join(scriptdir, 'test.zip'), sys.argv[4]) + sys.argv[4] = "archive.zip" + +if sys.argv[2] == "sync": + sys.argv[4 if '--delete' in sys.argv else 3] = "contents.zip" + +with open("./aws.out", "a") as myfile: + myfile.write(json.dumps(sys.argv[1:]) + "\n") diff --git a/packages/@aws-cdk/aws-s3-deployment/test/lambda/test.py b/packages/@aws-cdk/aws-s3-deployment/test/lambda/test.py new file mode 100644 index 0000000000000..cd88eaf6a5269 --- /dev/null +++ b/packages/@aws-cdk/aws-s3-deployment/test/lambda/test.py @@ -0,0 +1,486 @@ +# unit tests for the s3 bucket deployment lambda handler +import index +import os +import unittest +import json +import sys +import traceback +import logging +import botocore +from botocore.vendored import requests +from botocore.exceptions import ClientError +from unittest.mock import MagicMock +from unittest.mock import patch + +# set TEST_AWSCLI_PATH to point to the "aws" stub we have here +scriptdir=os.path.dirname(os.path.realpath(__file__)) +os.environ['TEST_AWSCLI_PATH'] = os.path.join(scriptdir, 'aws') + +class TestHandler(unittest.TestCase): + def setUp(self): + logger = logging.getLogger() + + # clean up old aws.out file (from previous runs) + try: os.remove("aws.out") + except OSError: pass + + def test_invalid_request(self): + resp = invoke_handler("Create", {}, expected_status="FAILED") + self.assertEqual(resp["Reason"], "missing request resource property 'SourceBucketNames'. props: {}") + + def test_create_update(self): + invoke_handler("Create", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "" + }) + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_create_no_delete(self): + invoke_handler("Create", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "Prune": "false" + }) + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "contents.zip", "s3:///"] + ) + + def test_update_no_delete(self): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "Prune": "false" + }, old_resource_props={ + "DestinationBucketName": "", + }, physical_id="") + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "contents.zip", "s3:///"] + ) + + def test_create_update_multiple_sources(self): + invoke_handler("Create", { + "SourceBucketNames": ["", ""], + "SourceObjectKeys": ["", ""], + "DestinationBucketName": "" + }) + + # Note: these are different files in real-life. For testing purposes, we hijack + # the command to output a static filename, archive.zip + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_create_with_backslash_prefix_same_as_no_prefix(self): + invoke_handler("Create", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "/" + }) + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + + def test_create_update_with_dest_key(self): + invoke_handler("Create", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "" + }) + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_create_update_with_metadata(self): + invoke_handler("Create", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "", + "UserMetadata": { "best": "game" }, + "SystemMetadata": { "content-type": "text/html", "content-language": "en" } + }) + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///", "--content-type", "text/html", "--content-language", "en", "--metadata", "{\"x-amz-meta-best\":\"game\"}", "--metadata-directive", "REPLACE"] + ) + + def test_delete_no_retain(self): + invoke_handler("Delete", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "RetainOnDelete": "false" + }, physical_id="") + + self.assertAwsCommands(["s3", "rm", "s3:///", "--recursive"]) + + def test_delete_with_dest_key(self): + invoke_handler("Delete", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "", + "RetainOnDelete": "false" + }, physical_id="") + + self.assertAwsCommands(["s3", "rm", "s3:///", "--recursive"]) + + def test_delete_with_retain_explicit(self): + invoke_handler("Delete", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "RetainOnDelete": "true" + }, physical_id="") + + # no aws commands (retain) + self.assertAwsCommands() + + # RetainOnDelete=true is the default + def test_delete_with_retain_implicit_default(self): + invoke_handler("Delete", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "" + }, physical_id="") + + # no aws commands (retain) + self.assertAwsCommands() + + def test_delete_with_retain_explicitly_false(self): + invoke_handler("Delete", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "RetainOnDelete": "false" + }, physical_id="") + + self.assertAwsCommands( + ["s3", "rm", "s3:///", "--recursive"] + ) + + # + # update + # + + def test_update_same_dest(self): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + }, old_resource_props={ + "DestinationBucketName": "", + }, physical_id="") + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_update_same_dest_cf_invalidate(self): + def mock_make_api_call(self, operation_name, kwarg): + if operation_name == 'CreateInvalidation': + assert kwarg['DistributionId'] == '' + assert kwarg['InvalidationBatch']['Paths']['Quantity'] == 1 + assert kwarg['InvalidationBatch']['Paths']['Items'][0] == '/*' + return {'Invalidation': {'Id': ''}} + if operation_name == 'GetInvalidation' and kwarg['Id'] == '': + return {'Invalidation': {'Id': '', 'Status': 'Completed'}} + raise ClientError({'Error': {'Code': '500', 'Message': 'Unsupported operation'}}, operation_name) + + with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DistributionId": "" + }, old_resource_props={ + "DestinationBucketName": "", + }, physical_id="") + + def test_update_same_dest_cf_invalidate_custom_prefix(self): + def mock_make_api_call(self, operation_name, kwarg): + if operation_name == 'CreateInvalidation': + assert kwarg['DistributionId'] == '' + assert kwarg['InvalidationBatch']['Paths']['Quantity'] == 1 + assert kwarg['InvalidationBatch']['Paths']['Items'][0] == '//*' + return {'Invalidation': {'Id': ''}} + if operation_name == 'GetInvalidation' and kwarg['Id'] == '': + return {'Invalidation': {'Id': '', 'Status': 'Completed'}} + raise ClientError({'Error': {'Code': '500', 'Message': 'Unsupported operation'}}, operation_name) + + with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "", + "DistributionId": "" + }, old_resource_props={ + "DestinationBucketName": "", + }, physical_id="") + + def test_update_same_dest_cf_invalidate_custom_paths(self): + def mock_make_api_call(self, operation_name, kwarg): + if operation_name == 'CreateInvalidation': + assert kwarg['DistributionId'] == '' + assert kwarg['InvalidationBatch']['Paths']['Quantity'] == 2 + assert kwarg['InvalidationBatch']['Paths']['Items'][0] == '/path1/*' + assert kwarg['InvalidationBatch']['Paths']['Items'][1] == '/path2/*' + return {'Invalidation': {'Id': ''}} + if operation_name == 'GetInvalidation' and kwarg['Id'] == '': + return {'Invalidation': {'Id': '', 'Status': 'Completed'}} + raise ClientError({'Error': {'Code': '500', 'Message': 'Unsupported operation'}}, operation_name) + + with patch('botocore.client.BaseClient._make_api_call', new=mock_make_api_call): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DistributionId": "", + "DistributionPaths": ["/path1/*", "/path2/*"] + }, old_resource_props={ + "DestinationBucketName": "", + }, physical_id="") + + def test_update_new_dest_retain(self): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + }, old_resource_props={ + "DestinationBucketName": "", + "RetainOnDelete": "true" + }, physical_id="") + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_update_new_dest_no_retain(self): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "RetainOnDelete": "false" + }, old_resource_props={ + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "", + "RetainOnDelete": "false" + }, physical_id="") + + self.assertAwsCommands( + ["s3", "rm", "s3:///", "--recursive"], + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_update_new_dest_retain_implicit(self): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + }, old_resource_props={ + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "" + }, physical_id="") + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_update_new_dest_prefix_no_retain(self): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "", + "RetainOnDelete": "false" + }, old_resource_props={ + "DestinationBucketName": "", + "RetainOnDelete": "false" + }, physical_id="") + + self.assertAwsCommands( + ["s3", "rm", "s3:///", "--recursive"], + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + def test_update_new_dest_prefix_retain_implicit(self): + invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "DestinationBucketKeyPrefix": "" + }, old_resource_props={ + "DestinationBucketName": "", + }, physical_id="") + + self.assertAwsCommands( + ["s3", "cp", "s3:///", "archive.zip"], + ["s3", "sync", "--delete", "contents.zip", "s3:///"] + ) + + # + # physical id + # + + def test_physical_id_allocated_on_create_and_reused_afterwards(self): + create_resp = invoke_handler("Create", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + }) + + phid = create_resp['PhysicalResourceId'] + self.assertTrue(phid.startswith('aws.cdk.s3deployment')) + + # now issue an update and pass in the physical id. expect the same + # one to be returned back + update_resp = invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + }, old_resource_props={ + "DestinationBucketName": "", + }, physical_id=phid) + self.assertEqual(update_resp['PhysicalResourceId'], phid) + + # now issue a delete, and make sure this also applies + delete_resp = invoke_handler("Delete", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + "RetainOnDelete": "false" + }, physical_id=phid) + self.assertEqual(delete_resp['PhysicalResourceId'], phid) + + def test_fails_when_physical_id_not_present_in_update(self): + update_resp = invoke_handler("Update", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + }, old_resource_props={ + "DestinationBucketName": "", + }, expected_status="FAILED") + + self.assertEqual(update_resp['Reason'], "invalid request: request type is 'Update' but 'PhysicalResourceId' is not defined") + + def test_fails_when_physical_id_not_present_in_delete(self): + update_resp = invoke_handler("Delete", { + "SourceBucketNames": [""], + "SourceObjectKeys": [""], + "DestinationBucketName": "", + }, old_resource_props={ + "DestinationBucketName": "", + }, expected_status="FAILED") + + self.assertEqual(update_resp['Reason'], "invalid request: request type is 'Delete' but 'PhysicalResourceId' is not defined") + + + # asserts that a given list of "aws xxx" commands have been invoked (in order) + def assertAwsCommands(self, *expected): + actual = read_aws_out() + self.assertEqual(actual, list(expected)) + +# ================================================================================================== +# helpers + +# +# reads "aws.out" and returns a list of "aws" commands (as strings) +def read_aws_out(): + if not os.path.exists("aws.out"): + return [] + + with open("aws.out") as f: + return [json.loads(l) for l in f.read().splitlines()] + +# +# invokes the handler under test +# requestType: CloudFormation request type ("Create", "Update", "Delete") +# resourceProps: map to pass to "ResourceProperties" +# expected_status: "SUCCESS" or "FAILED" +def invoke_handler(requestType, resourceProps, old_resource_props=None, physical_id=None, expected_status='SUCCESS'): + response_url = '' + + event={ + 'ResponseURL': response_url, + 'StackId': '', + 'RequestId': '', + 'LogicalResourceId': '', + 'RequestType': requestType, + 'ResourceProperties': resourceProps + } + + if old_resource_props: + event['OldResourceProperties'] = old_resource_props + + if physical_id: + event['PhysicalResourceId'] = physical_id + + class ContextMock: log_stream_name = 'log_stream' + class ResponseMock: reason = 'OK' + + context = ContextMock() + requests.put = MagicMock(return_value=ResponseMock()) + + #-------------------- + # invoke the handler + #-------------------- + index.handler(event, context) + + requests.put.assert_called_once() + (pos_args, kw_args) = requests.put.call_args + + actual_url = pos_args[0] + actual_data = kw_args['data'] + + if actual_url != response_url: + raise Exception("Invalid url used for sending CFN response. expected=%s actual=%s" % (response_url, actual_url)) + + resp = json.loads(actual_data) + + def assert_field(name, expect=None): + value=resp.get(name) + + if not expect: + if not resp.get(name): + raise Exception("Missing '%s' field from response: %s" % (name, resp)) + elif expect and value != expect: + raise Exception("Expecting response field '%s' to be '%s' but got '%s'.\n%s" % (name, expect, value, json.dumps(resp, indent=2))) + + assert_field('Status', expected_status) + assert_field('Reason') + assert_field('PhysicalResourceId') + assert_field('StackId', '') + assert_field('RequestId', '') + assert_field('LogicalResourceId', '') + + return resp + +if __name__ == '__main__': + unittest.main() diff --git a/packages/@aws-cdk/aws-s3-deployment/test/lambda/test.sh b/packages/@aws-cdk/aws-s3-deployment/test/lambda/test.sh new file mode 100755 index 0000000000000..a094c8ae16cfa --- /dev/null +++ b/packages/@aws-cdk/aws-s3-deployment/test/lambda/test.sh @@ -0,0 +1,19 @@ +#!/bin/bash +#--------------------------------------------------------------------------------------------------- +# exeuctes unit tests +# +# prepares a staging directory with the requirements +set -e +scriptdir=$(cd $(dirname $0) && pwd) + +# prepare staging directory +staging=$(mktemp -d) +mkdir -p ${staging} +cd ${staging} + +# copy src and overlay with test +cp -f ${scriptdir}/../../lib/lambda/* $PWD +cp -f ${scriptdir}/* $PWD + +# this will run our tests inside the right environment +docker build . diff --git a/packages/@aws-cdk/aws-s3-deployment/test/lambda/test.zip b/packages/@aws-cdk/aws-s3-deployment/test/lambda/test.zip new file mode 100644 index 0000000000000000000000000000000000000000..56829f65a2681a83665ce8539b84c0dd081e97bd GIT binary patch literal 179 zcmWIWW@h1H00AzGVBcl-QSQ7zHVAVt$S{d2?{eZMTh{FH?)6OLz literal 0 HcmV?d00001 From 5279f37288283a37c952440a7f2082517c56af3a Mon Sep 17 00:00:00 2001 From: Tom Jenkinson Date: Thu, 24 Dec 2020 18:44:08 +0000 Subject: [PATCH 2/2] fix(codepipeline-actions): use codebuild batch iam permissions when `executeBatchBuild: true` (#12181) If the `executeBatchBuild` prop is added to trigger a batch build, we also need to switch the IAM permissions to the ones which allow triggering a batch build. This does that. This should probably have been part of https://github.com/aws/aws-cdk/pull/11741 --- .../lib/codebuild/build-action.ts | 4 +- ...eg.pipeline-code-build-batch.expected.json | 487 ++++++++++++++++++ .../test/integ.pipeline-code-build-batch.ts | 56 ++ 3 files changed, 545 insertions(+), 2 deletions(-) create mode 100644 packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-code-build-batch.expected.json create mode 100644 packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-code-build-batch.ts diff --git a/packages/@aws-cdk/aws-codepipeline-actions/lib/codebuild/build-action.ts b/packages/@aws-cdk/aws-codepipeline-actions/lib/codebuild/build-action.ts index e81095b746eee..ba1d1df38c453 100644 --- a/packages/@aws-cdk/aws-codepipeline-actions/lib/codebuild/build-action.ts +++ b/packages/@aws-cdk/aws-codepipeline-actions/lib/codebuild/build-action.ts @@ -139,8 +139,8 @@ export class CodeBuildAction extends Action { resources: [this.props.project.projectArn], actions: [ 'codebuild:BatchGetBuilds', - 'codebuild:StartBuild', - 'codebuild:StopBuild', + `codebuild:${this.props.executeBatchBuild ? 'StartBuildBatch' : 'StartBuild'}`, + `codebuild:${this.props.executeBatchBuild ? 'StopBuildBatch' : 'StopBuild'}`, ], })); diff --git a/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-code-build-batch.expected.json b/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-code-build-batch.expected.json new file mode 100644 index 0000000000000..d489a5712eeba --- /dev/null +++ b/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-code-build-batch.expected.json @@ -0,0 +1,487 @@ +{ + "Resources": { + "MyRepoF4F48043": { + "Type": "AWS::CodeCommit::Repository", + "Properties": { + "RepositoryName": "MyIntegTestTempRepo" + } + }, + "MyRepoawscdkcodepipelinecodebuildbatchPipeline674F06D4EventRuleD3DE52E7": { + "Type": "AWS::Events::Rule", + "Properties": { + "EventPattern": { + "source": [ + "aws.codecommit" + ], + "resources": [ + { + "Fn::GetAtt": [ + "MyRepoF4F48043", + "Arn" + ] + } + ], + "detail-type": [ + "CodeCommit Repository State Change" + ], + "detail": { + "event": [ + "referenceCreated", + "referenceUpdated" + ], + "referenceName": [ + "master" + ] + } + }, + "State": "ENABLED", + "Targets": [ + { + "Arn": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":codepipeline:", + { + "Ref": "AWS::Region" + }, + ":", + { + "Ref": "AWS::AccountId" + }, + ":", + { + "Ref": "PipelineC660917D" + } + ] + ] + }, + "Id": "Target0", + "RoleArn": { + "Fn::GetAtt": [ + "PipelineEventsRole46BEEA7C", + "Arn" + ] + } + } + ] + } + }, + "MyBucketF68F3FF0": { + "Type": "AWS::S3::Bucket", + "Properties": { + "VersioningConfiguration": { + "Status": "Enabled" + } + }, + "UpdateReplacePolicy": "Delete", + "DeletionPolicy": "Delete" + }, + "PipelineRoleD68726F7": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codepipeline.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineRoleDefaultPolicyC7A05455": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*", + "s3:DeleteObject*", + "s3:PutObject*", + "s3:Abort*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "MyBucketF68F3FF0", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "MyBucketF68F3FF0", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + }, + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "PipelineRoleD68726F7", + "Arn" + ] + } + }, + { + "Action": [ + "codecommit:GetBranch", + "codecommit:GetCommit", + "codecommit:UploadArchive", + "codecommit:GetUploadArchiveStatus", + "codecommit:CancelUploadArchive" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "MyRepoF4F48043", + "Arn" + ] + } + }, + { + "Action": [ + "codebuild:BatchGetBuilds", + "codebuild:StartBuildBatch", + "codebuild:StopBuildBatch" + ], + "Effect": "Allow", + "Resource": { + "Fn::GetAtt": [ + "MyBuildProject30DB9D6E", + "Arn" + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineRoleDefaultPolicyC7A05455", + "Roles": [ + { + "Ref": "PipelineRoleD68726F7" + } + ] + } + }, + "PipelineC660917D": { + "Type": "AWS::CodePipeline::Pipeline", + "Properties": { + "RoleArn": { + "Fn::GetAtt": [ + "PipelineRoleD68726F7", + "Arn" + ] + }, + "Stages": [ + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Source", + "Owner": "AWS", + "Provider": "CodeCommit", + "Version": "1" + }, + "Configuration": { + "RepositoryName": { + "Fn::GetAtt": [ + "MyRepoF4F48043", + "Name" + ] + }, + "BranchName": "master", + "PollForSourceChanges": false + }, + "Name": "Source", + "OutputArtifacts": [ + { + "Name": "Artifact_Source_Source" + } + ], + "RoleArn": { + "Fn::GetAtt": [ + "PipelineRoleD68726F7", + "Arn" + ] + }, + "RunOrder": 1 + } + ], + "Name": "Source" + }, + { + "Actions": [ + { + "ActionTypeId": { + "Category": "Build", + "Owner": "AWS", + "Provider": "CodeBuild", + "Version": "1" + }, + "Configuration": { + "ProjectName": { + "Ref": "MyBuildProject30DB9D6E" + }, + "BatchEnabled": "true" + }, + "InputArtifacts": [ + { + "Name": "Artifact_Source_Source" + } + ], + "Name": "Build", + "RoleArn": { + "Fn::GetAtt": [ + "PipelineRoleD68726F7", + "Arn" + ] + }, + "RunOrder": 1 + } + ], + "Name": "Build" + } + ], + "ArtifactStore": { + "Location": { + "Ref": "MyBucketF68F3FF0" + }, + "Type": "S3" + } + }, + "DependsOn": [ + "PipelineRoleDefaultPolicyC7A05455", + "PipelineRoleD68726F7" + ] + }, + "PipelineEventsRole46BEEA7C": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "events.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "PipelineEventsRoleDefaultPolicyFF4FCCE0": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": "codepipeline:StartPipelineExecution", + "Effect": "Allow", + "Resource": { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":codepipeline:", + { + "Ref": "AWS::Region" + }, + ":", + { + "Ref": "AWS::AccountId" + }, + ":", + { + "Ref": "PipelineC660917D" + } + ] + ] + } + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "PipelineEventsRoleDefaultPolicyFF4FCCE0", + "Roles": [ + { + "Ref": "PipelineEventsRole46BEEA7C" + } + ] + } + }, + "MyBuildProjectRole6B7E2258": { + "Type": "AWS::IAM::Role", + "Properties": { + "AssumeRolePolicyDocument": { + "Statement": [ + { + "Action": "sts:AssumeRole", + "Effect": "Allow", + "Principal": { + "Service": "codebuild.amazonaws.com" + } + } + ], + "Version": "2012-10-17" + } + } + }, + "MyBuildProjectRoleDefaultPolicy5604AA87": { + "Type": "AWS::IAM::Policy", + "Properties": { + "PolicyDocument": { + "Statement": [ + { + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:", + { + "Ref": "AWS::Region" + }, + ":", + { + "Ref": "AWS::AccountId" + }, + ":log-group:/aws/codebuild/", + { + "Ref": "MyBuildProject30DB9D6E" + } + ] + ] + }, + { + "Fn::Join": [ + "", + [ + "arn:", + { + "Ref": "AWS::Partition" + }, + ":logs:", + { + "Ref": "AWS::Region" + }, + ":", + { + "Ref": "AWS::AccountId" + }, + ":log-group:/aws/codebuild/", + { + "Ref": "MyBuildProject30DB9D6E" + }, + ":*" + ] + ] + } + ] + }, + { + "Action": [ + "s3:GetObject*", + "s3:GetBucket*", + "s3:List*" + ], + "Effect": "Allow", + "Resource": [ + { + "Fn::GetAtt": [ + "MyBucketF68F3FF0", + "Arn" + ] + }, + { + "Fn::Join": [ + "", + [ + { + "Fn::GetAtt": [ + "MyBucketF68F3FF0", + "Arn" + ] + }, + "/*" + ] + ] + } + ] + } + ], + "Version": "2012-10-17" + }, + "PolicyName": "MyBuildProjectRoleDefaultPolicy5604AA87", + "Roles": [ + { + "Ref": "MyBuildProjectRole6B7E2258" + } + ] + } + }, + "MyBuildProject30DB9D6E": { + "Type": "AWS::CodeBuild::Project", + "Properties": { + "Artifacts": { + "Type": "CODEPIPELINE" + }, + "Environment": { + "ComputeType": "BUILD_GENERAL1_SMALL", + "Image": "aws/codebuild/standard:1.0", + "ImagePullCredentialsType": "CODEBUILD", + "PrivilegedMode": false, + "Type": "LINUX_CONTAINER" + }, + "ServiceRole": { + "Fn::GetAtt": [ + "MyBuildProjectRole6B7E2258", + "Arn" + ] + }, + "Source": { + "Type": "CODEPIPELINE" + }, + "EncryptionKey": "alias/aws/s3" + } + } + } +} diff --git a/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-code-build-batch.ts b/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-code-build-batch.ts new file mode 100644 index 0000000000000..d5fe1b2fb9b2f --- /dev/null +++ b/packages/@aws-cdk/aws-codepipeline-actions/test/integ.pipeline-code-build-batch.ts @@ -0,0 +1,56 @@ +import * as codebuild from '@aws-cdk/aws-codebuild'; +import * as codecommit from '@aws-cdk/aws-codecommit'; +import * as codepipeline from '@aws-cdk/aws-codepipeline'; +import * as s3 from '@aws-cdk/aws-s3'; +import * as cdk from '@aws-cdk/core'; +import * as cpactions from '../lib'; + +const app = new cdk.App(); + +const stack = new cdk.Stack(app, 'aws-cdk-codepipeline-codebuild-batch'); + +const repository = new codecommit.Repository(stack, 'MyRepo', { + repositoryName: 'MyIntegTestTempRepo', +}); +const bucket = new s3.Bucket(stack, 'MyBucket', { + versioned: true, + removalPolicy: cdk.RemovalPolicy.DESTROY, +}); + +const pipeline = new codepipeline.Pipeline(stack, 'Pipeline', { + artifactBucket: bucket, +}); +const pipelineRole = pipeline.role; + +const sourceOutput = new codepipeline.Artifact(); +const sourceAction = new cpactions.CodeCommitSourceAction({ + actionName: 'Source', + repository, + output: sourceOutput, + role: pipelineRole, +}); +pipeline.addStage({ + stageName: 'Source', + actions: [ + sourceAction, + ], +}); + +const project = new codebuild.PipelineProject(stack, 'MyBuildProject', { + grantReportGroupPermissions: false, +}); +const buildAction = new cpactions.CodeBuildAction({ + actionName: 'Build', + project, + executeBatchBuild: true, + input: sourceOutput, + role: pipelineRole, +}); +pipeline.addStage({ + stageName: 'Build', + actions: [ + buildAction, + ], +}); + +app.synth();