-
Notifications
You must be signed in to change notification settings - Fork 402
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
chore(tests): refactor E2E test mechanics to ease maintenance, writin…
…g tests and parallelization (#1444)
- Loading branch information
1 parent
0e20ee0
commit 3464ee9
Showing
11 changed files
with
933 additions
and
357 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
import pytest | ||
|
||
from tests.e2e.metrics.infrastructure import MetricsStack | ||
from tests.e2e.utils.infrastructure import deploy_once | ||
|
||
|
||
@pytest.fixture(autouse=True, scope="module") | ||
def infrastructure(request: pytest.FixtureRequest, tmp_path_factory: pytest.TempPathFactory, worker_id: str): | ||
"""Setup and teardown logic for E2E test infrastructure | ||
Parameters | ||
---------- | ||
request : fixtures.SubRequest | ||
test fixture containing metadata about test execution | ||
Yields | ||
------ | ||
Dict[str, str] | ||
CloudFormation Outputs from deployed infrastructure | ||
""" | ||
yield from deploy_once(stack=MetricsStack, request=request, tmp_path_factory=tmp_path_factory, worker_id=worker_id) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,14 +1,17 @@ | ||
import os | ||
|
||
from aws_lambda_powertools import Metrics | ||
from aws_lambda_powertools.metrics import MetricUnit | ||
|
||
METRIC_NAME = os.environ["METRIC_NAME"] | ||
|
||
metrics = Metrics() | ||
my_metrics = Metrics() | ||
|
||
|
||
@metrics.log_metrics | ||
@my_metrics.log_metrics | ||
def lambda_handler(event, context): | ||
metrics.add_metric(name=METRIC_NAME, unit=MetricUnit.Count, value=1) | ||
metrics, namespace, service = event.get("metrics"), event.get("namespace"), event.get("service") | ||
|
||
# Maintenance: create a public method to set these explicitly | ||
my_metrics.namespace = namespace | ||
my_metrics.service = service | ||
|
||
for metric in metrics: | ||
my_metrics.add_metric(**metric) | ||
|
||
return "success" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
from aws_lambda_powertools import Metrics | ||
|
||
my_metrics = Metrics() | ||
|
||
|
||
@my_metrics.log_metrics(capture_cold_start_metric=True) | ||
def lambda_handler(event, context): | ||
# Maintenance: create a public method to set these explicitly | ||
my_metrics.namespace = event.get("namespace") | ||
my_metrics.service = event.get("service") | ||
|
||
return "success" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
from pathlib import Path | ||
|
||
from tests.e2e.utils.infrastructure import BaseInfrastructureV2 | ||
|
||
|
||
class MetricsStack(BaseInfrastructureV2): | ||
def __init__(self, handlers_dir: Path, feature_name: str = "metrics") -> None: | ||
super().__init__(feature_name, handlers_dir) | ||
|
||
def create_resources(self): | ||
self.create_lambda_functions() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,40 +1,69 @@ | ||
import datetime | ||
import uuid | ||
import json | ||
|
||
import boto3 | ||
import pytest | ||
from e2e import conftest | ||
from e2e.utils import helpers | ||
|
||
from tests.e2e.utils import helpers | ||
|
||
@pytest.fixture(scope="module") | ||
def config() -> conftest.LambdaConfig: | ||
return { | ||
"parameters": {}, | ||
"environment_variables": { | ||
"POWERTOOLS_METRICS_NAMESPACE": "powertools-e2e-metric", | ||
"POWERTOOLS_SERVICE_NAME": "test-powertools-service", | ||
"METRIC_NAME": f"business-metric-{str(uuid.uuid4()).replace('-','_')}", | ||
}, | ||
} | ||
|
||
@pytest.fixture | ||
def basic_handler_fn(infrastructure: dict) -> str: | ||
return infrastructure.get("BasicHandler", "") | ||
|
||
def test_basic_lambda_metric_visible(execute_lambda: conftest.InfrastructureOutput, config: conftest.LambdaConfig): | ||
|
||
@pytest.fixture | ||
def basic_handler_fn_arn(infrastructure: dict) -> str: | ||
return infrastructure.get("BasicHandlerArn", "") | ||
|
||
|
||
@pytest.fixture | ||
def cold_start_fn(infrastructure: dict) -> str: | ||
return infrastructure.get("ColdStart", "") | ||
|
||
|
||
@pytest.fixture | ||
def cold_start_fn_arn(infrastructure: dict) -> str: | ||
return infrastructure.get("ColdStartArn", "") | ||
|
||
|
||
METRIC_NAMESPACE = "powertools-e2e-metric" | ||
|
||
|
||
def test_basic_lambda_metric_is_visible(basic_handler_fn: str, basic_handler_fn_arn: str): | ||
# GIVEN | ||
start_date = execute_lambda.get_lambda_execution_time() | ||
end_date = start_date + datetime.timedelta(minutes=5) | ||
metric_name = helpers.build_metric_name() | ||
service = helpers.build_service_name() | ||
dimensions = helpers.build_add_dimensions_input(service=service) | ||
metrics = helpers.build_multiple_add_metric_input(metric_name=metric_name, value=1, quantity=3) | ||
|
||
# WHEN | ||
event = json.dumps({"metrics": metrics, "service": service, "namespace": METRIC_NAMESPACE}) | ||
_, execution_time = helpers.trigger_lambda(lambda_arn=basic_handler_fn_arn, payload=event) | ||
|
||
metrics = helpers.get_metrics( | ||
namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions | ||
) | ||
|
||
# THEN | ||
metric_data = metrics.get("Values", []) | ||
assert metric_data and metric_data[0] == 3.0 | ||
|
||
|
||
def test_cold_start_metric(cold_start_fn_arn: str, cold_start_fn: str): | ||
# GIVEN | ||
metric_name = "ColdStart" | ||
service = helpers.build_service_name() | ||
dimensions = helpers.build_add_dimensions_input(function_name=cold_start_fn, service=service) | ||
|
||
# WHEN we invoke twice | ||
event = json.dumps({"service": service, "namespace": METRIC_NAMESPACE}) | ||
|
||
_, execution_time = helpers.trigger_lambda(lambda_arn=cold_start_fn_arn, payload=event) | ||
_, _ = helpers.trigger_lambda(lambda_arn=cold_start_fn_arn, payload=event) | ||
|
||
metrics = helpers.get_metrics( | ||
start_date=start_date, | ||
end_date=end_date, | ||
namespace=config["environment_variables"]["POWERTOOLS_METRICS_NAMESPACE"], | ||
metric_name=config["environment_variables"]["METRIC_NAME"], | ||
service_name=config["environment_variables"]["POWERTOOLS_SERVICE_NAME"], | ||
cw_client=boto3.client(service_name="cloudwatch"), | ||
namespace=METRIC_NAMESPACE, start_date=execution_time, metric_name=metric_name, dimensions=dimensions | ||
) | ||
|
||
# THEN | ||
assert metrics.get("Timestamps") and len(metrics.get("Timestamps")) == 1 | ||
assert metrics.get("Values") and len(metrics.get("Values")) == 1 | ||
assert metrics.get("Values") and metrics.get("Values")[0] == 1 | ||
metric_data = metrics.get("Values", []) | ||
assert metric_data and metric_data[0] == 1.0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,120 @@ | ||
import io | ||
import json | ||
import zipfile | ||
from pathlib import Path | ||
from typing import List, Optional | ||
|
||
import boto3 | ||
import botocore.exceptions | ||
from mypy_boto3_s3 import S3Client | ||
|
||
from aws_lambda_powertools import Logger | ||
from tests.e2e.utils.models import AssetTemplateConfig, TemplateAssembly | ||
|
||
logger = Logger(service="e2e-utils") | ||
|
||
|
||
class Asset: | ||
def __init__( | ||
self, config: AssetTemplateConfig, account_id: str, region: str, boto3_client: Optional[S3Client] = None | ||
) -> None: | ||
"""CDK Asset logic to verify existence and resolve deeply nested configuration | ||
Parameters | ||
---------- | ||
config : AssetTemplateConfig | ||
CDK Asset configuration found in synthesized template | ||
account_id : str | ||
AWS Account ID | ||
region : str | ||
AWS Region | ||
boto3_client : Optional["S3Client"], optional | ||
S3 client instance for asset operations, by default None | ||
""" | ||
self.config = config | ||
self.s3 = boto3_client or boto3.client("s3") | ||
self.account_id = account_id | ||
self.region = region | ||
self.asset_path = config.source.path | ||
self.asset_packaging = config.source.packaging | ||
self.object_key = config.destinations.current_account_current_region.object_key | ||
self._bucket = config.destinations.current_account_current_region.bucket_name | ||
self.bucket_name = self._resolve_bucket_name() | ||
|
||
@property | ||
def is_zip(self): | ||
return self.asset_packaging == "zip" | ||
|
||
def exists_in_s3(self, key: str) -> bool: | ||
try: | ||
return self.s3.head_object(Bucket=self.bucket_name, Key=key) is not None | ||
except botocore.exceptions.ClientError: | ||
return False | ||
|
||
def _resolve_bucket_name(self) -> str: | ||
return self._bucket.replace("${AWS::AccountId}", self.account_id).replace("${AWS::Region}", self.region) | ||
|
||
|
||
class Assets: | ||
def __init__( | ||
self, asset_manifest: Path, account_id: str, region: str, boto3_client: Optional[S3Client] = None | ||
) -> None: | ||
"""CDK Assets logic to find each asset, compress, and upload | ||
Parameters | ||
---------- | ||
asset_manifest : Path | ||
Asset manifest JSON file (self.__synthesize) | ||
account_id : str | ||
AWS Account ID | ||
region : str | ||
AWS Region | ||
boto3_client : Optional[S3Client], optional | ||
S3 client instance for asset operations, by default None | ||
""" | ||
self.asset_manifest = asset_manifest | ||
self.account_id = account_id | ||
self.region = region | ||
self.s3 = boto3_client or boto3.client("s3") | ||
self.assets = self._find_assets_from_template() | ||
self.assets_location = str(self.asset_manifest.parent) | ||
|
||
def upload(self): | ||
"""Drop-in replacement for cdk-assets package s3 upload part. | ||
https://www.npmjs.com/package/cdk-assets. | ||
We use custom solution to avoid dependencies from nodejs ecosystem. | ||
We follow the same design cdk-assets: | ||
https://github.com/aws/aws-cdk-rfcs/blob/master/text/0092-asset-publishing.md. | ||
""" | ||
for asset in self.assets: | ||
if not asset.is_zip: | ||
logger.debug(f"Asset '{asset.object_key}' is not zip. Skipping upload.") | ||
continue | ||
|
||
if asset.exists_in_s3(key=asset.object_key): | ||
logger.debug(f"Asset '{asset.object_key}' already exists in S3. Skipping upload.") | ||
continue | ||
|
||
archive = self._compress_assets(asset) | ||
logger.debug("Uploading archive to S3") | ||
self.s3.upload_fileobj(Fileobj=archive, Bucket=asset.bucket_name, Key=asset.object_key) | ||
logger.debug("Successfully uploaded") | ||
|
||
def _find_assets_from_template(self) -> List[Asset]: | ||
data = json.loads(self.asset_manifest.read_text()) | ||
template = TemplateAssembly(**data) | ||
return [ | ||
Asset(config=asset_config, account_id=self.account_id, region=self.region) | ||
for asset_config in template.files.values() | ||
] | ||
|
||
def _compress_assets(self, asset: Asset) -> io.BytesIO: | ||
buf = io.BytesIO() | ||
asset_dir = f"{self.assets_location}/{asset.asset_path}" | ||
asset_files = list(Path(asset_dir).iterdir()) | ||
with zipfile.ZipFile(buf, "w", compression=zipfile.ZIP_DEFLATED) as archive: | ||
for asset_file in asset_files: | ||
logger.debug(f"Adding file '{asset_file}' to the archive.") | ||
archive.write(asset_file, arcname=asset_file.relative_to(asset_dir)) | ||
buf.seek(0) | ||
return buf |
Oops, something went wrong.