Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support for CodeStar CodeBuild clone ref and update CodeStar setup #563

Merged
merged 1 commit into from
Dec 9, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/providers-guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,11 @@ Provider type: `codestar`.
web hook as part of the pipeline. Read the CodeStar Connections
documentation for more
[information](https://docs.aws.amazon.com/dtconsole/latest/userguide/connections.html).
- *output_artifact_format* - *(String)* default: `CODE_ZIP`
- The output artifact format. Values can be either `CODEBUILD_CLONE_REF` or
`CODE_ZIP`. If unspecified, the default is `CODE_ZIP`.
- NB: The `CODEBUILD_CLONE_REF` value can only be used by CodeBuild
downstream actions.

## Build

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,11 @@
ROOT_ACCOUNT_ID = os.environ["ROOT_ACCOUNT_ID"]


def store_regional_parameter_config(pipeline, parameter_store, deployment_map_source):
def store_regional_parameter_config(
pipeline,
parameter_store,
deployment_map_source,
):
"""
Responsible for storing the region information for specific
pipelines. These regions are defined in the deployment_map
Expand All @@ -41,21 +45,36 @@ def store_regional_parameter_config(pipeline, parameter_store, deployment_map_so
)


def fetch_required_ssm_params(regions):
def fetch_required_ssm_params(pipeline_input, regions):
output = {}
for region in regions:
parameter_store = ParameterStore(region, boto3)
output[region] = {
"s3": parameter_store.fetch_parameter(
f"/cross_region/s3_regional_bucket/{region}"
f"/cross_region/s3_regional_bucket/{region}",
),
"kms": parameter_store.fetch_parameter(
f"/cross_region/kms_arn/{region}",
),
"kms": parameter_store.fetch_parameter(f"/cross_region/kms_arn/{region}"),
}
if region == DEPLOYMENT_ACCOUNT_REGION:
output[region]["modules"] = parameter_store.fetch_parameter(
"deployment_account_bucket"
)
output['default_scm_branch'] = parameter_store.fetch_parameter('default_scm_branch')
output['default_scm_branch'] = parameter_store.fetch_parameter(
'default_scm_branch',
)
codestar_connection_path = (
pipeline_input
.get('default_providers', {})
.get('source')
.get('properties', {})
.get('codestar_connection_path', {})
)
if codestar_connection_path:
output['codestar_connection_arn'] = (
parameter_store.fetch_parameter(codestar_connection_path)
)
return output


Expand Down Expand Up @@ -84,9 +103,12 @@ def generate_pipeline_inputs(pipeline, organizations, parameter_store):
# Targets should be a list of lists.

# Note: This is a big shift away from how ADF handles targets natively.
# Previously this would be a list of [accountId(s)] it now returns a list of [[account_ids], [account_ids]]
# for the sake of consistency we should probably think of a target consisting of multiple "waves". So if you see
# any reference to a wave going forward it will be the individual batch of account ids
# Previously this would be a list of [accountId(s)] it now returns a
# list of [[account_ids], [account_ids]].
#
# For the sake of consistency we should probably think of a target
# consisting of multiple "waves". So if you see any reference to
# a wave going forward it will be the individual batch of account ids.
pipeline_object.template_dictionary["targets"].append(
list(target_structure.generate_waves()),
)
Expand All @@ -96,20 +118,37 @@ def generate_pipeline_inputs(pipeline, organizations, parameter_store):

pipeline_object.generate_input()
data["ssm_params"] = fetch_required_ssm_params(
pipeline_object.input,
pipeline_object.input["regions"] or [DEPLOYMENT_ACCOUNT_REGION]
)
data["input"] = pipeline_object.input
data['input']['default_scm_branch'] = data["ssm_params"].get('default_scm_branch')
store_regional_parameter_config(pipeline_object, parameter_store, pipeline.get("deployment_map_source"))
if 'codestar_connection_arn' in data["ssm_params"]:
data['input']['default_providers']['source']['properties'][
'codestar_connection_arn'
] = data["ssm_params"]['codestar_connection_arn']
data['input']['default_scm_branch'] = data["ssm_params"].get(
'default_scm_branch',
)
store_regional_parameter_config(
pipeline_object,
parameter_store,
pipeline.get("deployment_map_source"),
)
return data


def lambda_handler(pipeline, _):
"""Main Lambda Entry point"""
parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
sts = STS()
cross_account_role_name = parameter_store.fetch_parameter(
"cross_account_access_role",
)
role = sts.assume_cross_account_role(
f'arn:{get_partition(DEPLOYMENT_ACCOUNT_REGION)}:iam::{ROOT_ACCOUNT_ID}:role/{parameter_store.fetch_parameter("cross_account_access_role")}-readonly',
(
f'arn:{get_partition(DEPLOYMENT_ACCOUNT_REGION)}:iam::'
f'{ROOT_ACCOUNT_ID}:role/{cross_account_role_name}-readonly'
),
"pipeline",
)
organizations = Organizations(role)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,9 @@ def lambda_handler(event, _):
)
parameter_store = ParameterStore(DEPLOYMENT_ACCOUNT_REGION, boto3)
current_pipelines = {
parameter.get("Name").split("/")[-2]
for parameter in get_current_pipelines(parameter_store)
}
parameter.get("Name").split("/")[-2]
for parameter in get_current_pipelines(parameter_store)
}

pipeline_names = {
p.get("name") for p in deployment_map.map_contents["pipelines"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -818,6 +818,15 @@ Resources:
- "iam:PassRole"
Resource:
- !Sub arn:${AWS::Partition}:iam::*:role/*
- Effect: Allow
Sid: "AllowCodeStarConnection"
Action:
- "codestar-connections:PassConnection"
Resource:
- !Sub arn:${AWS::Partition}:codestar-connections:${AWS::Region}:${AWS::AccountId}:connection/*
Condition:
StringEquals:
'codestar-connections:PassedToService': 'codepipeline.amazonaws.com'
- Effect: Allow
Action:
- "events:PutRule"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

import os
import json
import boto3

from aws_cdk import (
aws_codepipeline as _codepipeline,
Expand Down Expand Up @@ -61,7 +60,11 @@ def __init__(self, **kwargs):
self.action_name = kwargs.get('action_name')
self.action_mode = kwargs.get('action_mode', '').upper()
self.region = kwargs.get('region') or ADF_DEPLOYMENT_REGION
self.account_id = self.map_params["default_providers"]["source"].get('properties', {}).get("account_id")
self.account_id = (
self.map_params["default_providers"]["source"]
.get('properties', {})
.get("account_id")
)
self.role_arn = self._generate_role_arn()
self.notification_endpoint = self.map_params.get("topic_arn")
self.default_scm_branch = self.map_params.get(
Expand Down Expand Up @@ -122,25 +125,41 @@ def _generate_configuration(self): #pylint: disable=R0912, R0911, R0915
'object_key')
}
if self.provider == "CodeStarSourceConnection":
owner = self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('owner', {})
repo = self.map_params.get('default_providers', {}).get('source', {}).get('properties', {}).get('repository', {}) or self.map_params['name']
codestar_connection_path = self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('codestar_connection_path', {})
ssm_client = boto3.client('ssm')
try:
response = ssm_client.get_parameter(Name=codestar_connection_path)
except Exception as e:
LOGGER.error(f"No parameter found at {codestar_connection_path}. Check the path/value.")
raise e
connection_arn = response['Parameter']['Value']
return {
"ConnectionArn": connection_arn,
default_source_props = (
self.map_params
.get('default_providers', {})
.get('source', {})
.get('properties', {})
)
owner = default_source_props.get('owner')
repo = (
default_source_props.get('repository')
or self.map_params['name']
)
if not default_source_props.get('codestar_connection_arn'):
raise Exception(
"The CodeStar Connection Arn could not be resolved for "
f"the {self.map_params['name']} pipeline. Please check "
"whether the codestar_connection_path is setup correctly "
"and validate that the Parameter it points to is properly "
"configured in SSM Parameter Store."
)
props = {
"ConnectionArn": default_source_props.get(
'codestar_connection_arn',
),
"FullRepositoryId": f"{owner}/{repo}",
"BranchName": self.map_params.get('default_providers', {}).get(
'source', {}).get('properties', {}).get(
'branch',
self.default_scm_branch
)
"BranchName": default_source_props.get(
'branch',
self.default_scm_branch,
)
}
output_artifact_format = default_source_props.get(
'output_artifact_format',
)
if output_artifact_format:
props["OutputArtifactFormat"] = output_artifact_format
return props
if self.provider == "GitHub":
return {
"Owner": self.map_params.get('default_providers', {}).get('source').get('properties', {}).get('owner', {}),
Expand Down Expand Up @@ -273,7 +292,11 @@ def _generate_configuration(self): #pylint: disable=R0912, R0911, R0915
and self.map_params['default_providers']['source'].get('properties', {}).get('poll_for_changes', False)
)
}
output_artifact_format = self.map_params['default_providers']['source'].get('properties', {}).get('output_artifact_format', None)
output_artifact_format = (
self.map_params['default_providers']['source']
.get('properties', {})
.get('output_artifact_format')
)
if output_artifact_format:
props["OutputArtifactFormat"] = output_artifact_format
return props
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

NOTIFICATION_PROPS = {
Optional("target"): str,
Optional("type") : Or("lambda", "chat_bot")
Optional("type"): Or("lambda", "chat_bot")
}

# Pipeline Params
Expand Down Expand Up @@ -47,6 +47,12 @@
)
)

SOURCE_OUTPUT_ARTIFACT_FORMAT = Or(
"CODEBUILD_CLONE_REF",
"CODE_ZIP",
None,
)

# CodeCommit Source
CODECOMMIT_SOURCE_PROPS = {
"account_id": AWS_ACCOUNT_ID_SCHEMA,
Expand All @@ -56,7 +62,9 @@
Optional("owner"): str,
Optional("role"): str,
Optional("trigger_on_changes"): bool,
Optional("output_artifact_format", default=None): Or("CODEBUILD_CLONE_REF", "CODE_ZIP", None)
Optional("output_artifact_format", default=None): (
SOURCE_OUTPUT_ARTIFACT_FORMAT
),
}
CODECOMMIT_SOURCE = {
"provider": 'codecommit',
Expand All @@ -82,7 +90,10 @@
Optional("repository"): str,
Optional("branch"): str,
"owner": str,
"codestar_connection_path": str
"codestar_connection_path": str,
Optional("output_artifact_format", default=None): (
SOURCE_OUTPUT_ARTIFACT_FORMAT
),
}

CODESTAR_SOURCE = {
Expand Down Expand Up @@ -114,7 +125,9 @@
Optional("image"): Or(str, CODEBUILD_IMAGE_PROPS),
Optional("size"): Or('small', 'medium', 'large'),
Optional("spec_filename"): str,
Optional("environment_variables"): {Optional(str): Or(str, bool, int, object)},
Optional("environment_variables"): {
Optional(str): Or(str, bool, int, object)
},
Optional("role"): str,
Optional("timeout"): int,
Optional("privileged"): bool,
Expand Down Expand Up @@ -273,15 +286,19 @@
'provider': Or('codecommit', 'github', 's3', 'codestar'),
'properties': dict,
},
lambda x: PROVIDER_SOURCE_SCHEMAS[x['provider']].validate(x), #pylint: disable=W0108
# pylint: disable=W0108
lambda x: PROVIDER_SOURCE_SCHEMAS[x['provider']].validate(x),
),
Optional('build'): And(
{
Optional('provider'): Or('codebuild', 'jenkins'),
Optional('enabled'): bool,
Optional('properties'): dict,
},
lambda x: PROVIDER_BUILD_SCHEMAS[x.get('provider', 'codebuild')].validate(x), #pylint: disable=W0108
# pylint: disable=W0108
lambda x: PROVIDER_BUILD_SCHEMAS[
x.get('provider', 'codebuild')
].validate(x),
),
Optional('deploy'): And(
{
Expand All @@ -292,7 +309,8 @@
Optional('enabled'): bool,
Optional('properties'): dict,
},
lambda x: PROVIDER_DEPLOY_SCHEMAS[x['provider']].validate(x), #pylint: disable=W0108
# pylint: disable=W0108
lambda x: PROVIDER_DEPLOY_SCHEMAS[x['provider']].validate(x),
),
}
REGION_SCHEMA = Or(
Expand All @@ -313,11 +331,31 @@

TARGET_SCHEMA = {
Optional("path"): Or(str, int, TARGET_LIST_SCHEMA),
Optional("tags"): {And(str, Regex(r"\A.{1,128}\Z")): And(str, Regex(r"\A.{0,256}\Z"))},
Optional("tags"): {
And(str, Regex(r"\A.{1,128}\Z")): And(str, Regex(r"\A.{0,256}\Z"))
},
Optional("target"): Or(str, int, TARGET_LIST_SCHEMA),
Optional("name"): str,
Optional("provider"): Or('lambda', 's3', 'codedeploy', 'cloudformation', 'service_catalog', 'approval', 'codebuild', 'jenkins'),
Optional("properties"): Or(CODEBUILD_PROPS, JENKINS_PROPS, CLOUDFORMATION_PROPS, CODEDEPLOY_PROPS, S3_DEPLOY_PROPS, SERVICECATALOG_PROPS, LAMBDA_PROPS, APPROVAL_PROPS),
Optional("provider"): Or(
'lambda',
's3',
'codedeploy',
'cloudformation',
'service_catalog',
'approval',
'codebuild',
'jenkins',
),
Optional("properties"): Or(
CODEBUILD_PROPS,
JENKINS_PROPS,
CLOUDFORMATION_PROPS,
CODEDEPLOY_PROPS,
S3_DEPLOY_PROPS,
SERVICECATALOG_PROPS,
LAMBDA_PROPS,
APPROVAL_PROPS,
),
Optional("regions"): REGION_SCHEMA,
Optional("exclude", default=[]): [str],
Optional("wave", default={"size": 50}): TARGET_WAVE_SCHEME
Expand Down Expand Up @@ -348,8 +386,9 @@
}
TOP_LEVEL_SCHEMA = {
"pipelines": [PIPELINE_SCHEMA],
# Allow any toplevel key starting with "x-" or "x_".
# ADF will ignore these, but users can use them to define anchors in one place.
# Allow any top level key starting with "x-" or "x_".
# ADF will ignore these, but users can use them to define anchors
# in one place.
Optional(Regex('^[x][-_].*')): object
}

Expand Down