From ffe7f901a1591a83db15a52db677b02b204e86d3 Mon Sep 17 00:00:00 2001 From: Victor Engmark Date: Mon, 25 Nov 2024 10:48:42 +1300 Subject: [PATCH] refactor: Use boto3 clients rather than resources TDE-1034 (#1172) ### Motivation The latter is deprecated . ### Modifications - Use the lower-level clients API instead of resources. - Type annotate S3 clients to make IDE use easier. ### Verification Standard tests. --- scripts/aws/aws_helper.py | 13 +++- scripts/aws/tests/aws_helper_test.py | 6 +- scripts/collection_from_items.py | 9 ++- scripts/files/fs.py | 4 +- scripts/files/fs_s3.py | 67 ++++++++-------- scripts/files/tests/fs_s3_test.py | 77 ++++++++----------- scripts/files/tests/fs_test.py | 14 ++-- scripts/gdal/gdal_helper.py | 2 +- scripts/stac/imagery/tests/collection_test.py | 16 ++-- scripts/tests/collection_from_items_test.py | 31 ++++---- 10 files changed, 120 insertions(+), 119 deletions(-) diff --git a/scripts/aws/aws_helper.py b/scripts/aws/aws_helper.py index fc1baadbf..981a802e9 100644 --- a/scripts/aws/aws_helper.py +++ b/scripts/aws/aws_helper.py @@ -1,7 +1,7 @@ import json from os import environ from time import sleep -from typing import Any, NamedTuple +from typing import TYPE_CHECKING, Any, NamedTuple from urllib.parse import urlparse from boto3 import Session @@ -11,6 +11,11 @@ from scripts.aws.aws_credential_source import CredentialSource +if TYPE_CHECKING: + from mypy_boto3_s3 import S3Client +else: + S3Client = dict + S3Path = NamedTuple("S3Path", [("bucket", str), ("key", str)]) aws_profile = environ.get("AWS_PROFILE") @@ -26,9 +31,9 @@ def _init_roles() -> None: """Load bucket to roleArn mapping for LINZ internal buckets from SSM""" - s3 = session.resource("s3") - config_path = parse_path(bucket_config_path) - content_object = s3.Object(config_path.bucket, config_path.key) + s3_client: S3Client = session.client("s3") + bucket, key = parse_path(bucket_config_path) + content_object = s3_client.Object(bucket, key) file_content = content_object.get()["Body"].read().decode("utf-8") json_content = json.loads(file_content) diff --git a/scripts/aws/tests/aws_helper_test.py b/scripts/aws/tests/aws_helper_test.py index 3cc58598c..912ccf6ea 100644 --- a/scripts/aws/tests/aws_helper_test.py +++ b/scripts/aws/tests/aws_helper_test.py @@ -6,13 +6,13 @@ def test_parse_path_s3(subtests: SubTests) -> None: s3_path = "s3://bucket-name/path/to/the/file.test" - path = parse_path(s3_path) + bucket, key = parse_path(s3_path) with subtests.test(): - assert path.bucket == "bucket-name" + assert bucket == "bucket-name" with subtests.test(): - assert path.key == "path/to/the/file.test" + assert key == "path/to/the/file.test" def test_parse_path_local() -> None: diff --git a/scripts/collection_from_items.py b/scripts/collection_from_items.py index aa6d111a8..3f810f586 100644 --- a/scripts/collection_from_items.py +++ b/scripts/collection_from_items.py @@ -2,7 +2,7 @@ import json import os from argparse import Namespace -from typing import List +from typing import TYPE_CHECKING, List import shapely.geometry import shapely.ops @@ -17,6 +17,11 @@ from scripts.stac.imagery.create_stac import create_collection from scripts.stac.imagery.metadata_constants import DATA_CATEGORIES, HUMAN_READABLE_REGIONS, CollectionMetadata +if TYPE_CHECKING: + from mypy_boto3_s3 import S3Client +else: + S3Client = GetObjectOutputTypeDef = dict + class NoItemsError(Exception): pass @@ -114,7 +119,7 @@ def main(args: List[str] | None = None) -> None: msg = f"uri is not a s3 path: {uri}" raise argparse.ArgumentTypeError(msg) - s3_client = client("s3") + s3_client: S3Client = client("s3") files_to_read = list_files_in_uri(uri, [SUFFIX_JSON, SUFFIX_FOOTPRINT], s3_client) diff --git a/scripts/files/fs.py b/scripts/files/fs.py index 7981a6a90..292aabf7e 100644 --- a/scripts/files/fs.py +++ b/scripts/files/fs.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import TYPE_CHECKING -from boto3 import resource +from boto3 import client from linz_logger import get_log from scripts.aws.aws_helper import is_s3 @@ -47,7 +47,7 @@ def read(path: str) -> bytes: try: return fs_s3.read(path) # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html#parsing-error-responses-and-catching-exceptions-from-aws-services - except resource("s3").meta.client.exceptions.ClientError as ce: + except client("s3").exceptions.ClientError as ce: # Error Code can be found here: # https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html#ErrorCodeList if ce.response["Error"]["Code"] == "NoSuchKey": diff --git a/scripts/files/fs_s3.py b/scripts/files/fs_s3.py index 968e36f63..89764eaec 100644 --- a/scripts/files/fs_s3.py +++ b/scripts/files/fs_s3.py @@ -4,8 +4,7 @@ from datetime import datetime from typing import TYPE_CHECKING, Any -from boto3 import client, resource -from botocore.exceptions import ClientError +from boto3 import client from linz_logger import get_log from scripts.aws.aws_helper import get_session, parse_path @@ -31,19 +30,19 @@ def write(destination: str, source: bytes, content_type: str | None = None) -> N if source is None: get_log().error("write_s3_source_none", path=destination, error="The 'source' is None.") raise Exception("The 'source' is None.") - s3_path = parse_path(destination) - key = s3_path.key - s3 = resource("s3") + bucket, key = parse_path(destination) + s3_client: S3Client = client("s3") multihash = checksum.multihash_as_hex(source) try: - s3_object = s3.Object(s3_path.bucket, key) if content_type: - s3_object.put(Body=source, ContentType=content_type, Metadata={"multihash": multihash}) + s3_client.put_object( + Bucket=bucket, Key=key, Body=source, ContentType=content_type, Metadata={"multihash": multihash} + ) else: - s3_object.put(Body=source, Metadata={"multihash": multihash}) + s3_client.put_object(Bucket=bucket, Key=key, Body=source, Metadata={"multihash": multihash}) get_log().debug("write_s3_success", path=destination, duration=time_in_ms() - start_time) - except ClientError as ce: + except s3_client.exceptions.ClientError as ce: get_log().error("write_s3_error", path=destination, error=f"Unable to write the file: {ce}") raise ce @@ -56,34 +55,33 @@ def read(path: str, needs_credentials: bool = False) -> bytes: needs_credentials: Tells if credentials are needed. Defaults to False. Raises: - ce: botocore ClientError + ClientError Returns: The file in bytes. """ start_time = time_in_ms() - s3_path = parse_path(path) - key = s3_path.key - s3 = resource("s3") + bucket, key = parse_path(path) + s3_client: S3Client = client("s3") try: if needs_credentials: - s3 = get_session(path).resource("s3") + s3_client = get_session(path).client("s3") - s3_object = s3.Object(s3_path.bucket, key) - file: bytes = s3_object.get()["Body"].read() - except s3.meta.client.exceptions.NoSuchBucket as nsb: + s3_object: GetObjectOutputTypeDef = s3_client.get_object(Bucket=bucket, Key=key) + file: bytes = s3_object["Body"].read() + except s3_client.exceptions.NoSuchBucket as nsb: get_log().error("s3_bucket_not_found", path=path, error=f"The specified bucket does not seem to exist: {nsb}") - raise nsb - except s3.meta.client.exceptions.NoSuchKey as nsk: + raise + except s3_client.exceptions.NoSuchKey as nsk: get_log().error("s3_key_not_found", path=path, error=f"The specified file does not seem to exist: {nsk}") - raise nsk - except s3.meta.client.exceptions.ClientError as ce: + raise + except s3_client.exceptions.ClientError as ce: # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html#parsing-error-responses-and-catching-exceptions-from-aws-services if not needs_credentials and ce.response["Error"]["Code"] == "AccessDenied": get_log().debug("read_s3_needs_credentials", path=path) return read(path, True) - raise ce + raise get_log().debug("read_s3_success", path=path, duration=time_in_ms() - start_time) return file @@ -97,36 +95,33 @@ def exists(path: str, needs_credentials: bool = False) -> bool: needs_credentials: if acces to object needs credentials. Defaults to False. Raises: - ce: ClientError - nsb: NoSuchBucket + s3_client.exceptions.ClientError + NoSuchBucket Returns: True if the S3 Object exists """ - s3_path, key = parse_path(path) - s3 = resource("s3") + bucket, key = parse_path(path) + s3_client: S3Client = client("s3") try: if needs_credentials: - s3 = get_session(path).resource("s3") + s3_client = get_session(path).client("s3") if path.endswith("/"): - bucket_name = bucket_name_from_path(s3_path) - bucket = s3.Bucket(bucket_name) # MaxKeys limits to 1 object in the response - objects = bucket.objects.filter(Prefix=key, MaxKeys=1) - + objects = s3_client.list_objects_v2(Bucket=bucket, Prefix=key, MaxKeys=1) if len(list(objects)) > 0: return True return False - # load() fetch the metadata, not the data. Calls a `head` behind the scene. - s3.Object(s3_path, key).load() + s3_client.head_object(Bucket=bucket, Key=key) + return True - except s3.meta.client.exceptions.NoSuchBucket as nsb: + except s3_client.exceptions.NoSuchBucket as nsb: get_log().debug("s3_bucket_not_found", path=path, info=f"The specified bucket does not seem to exist: {nsb}") return False - except s3.meta.client.exceptions.ClientError as ce: + except s3_client.exceptions.ClientError as ce: if not needs_credentials and ce.response["Error"]["Code"] == "AccessDenied": get_log().debug("read_s3_needs_credentials", path=path) return exists(path, True) @@ -136,7 +131,7 @@ def exists(path: str, needs_credentials: bool = False) -> bool: get_log().debug("s3_key_not_found", path=path, info=f"The specified key does not seem to exist: {ce}") return False get_log().error("s3_client_error", path=path, error=f"ClientError raised: {ce}") - raise ce + raise def bucket_name_from_path(path: str) -> str: diff --git a/scripts/files/tests/fs_s3_test.py b/scripts/files/tests/fs_s3_test.py index 98a193652..419af5ebc 100644 --- a/scripts/files/tests/fs_s3_test.py +++ b/scripts/files/tests/fs_s3_test.py @@ -1,6 +1,6 @@ import json -from boto3 import client, resource +from boto3 import client from botocore.exceptions import ClientError from moto import mock_aws from moto.core.models import DEFAULT_ACCOUNT_ID @@ -18,13 +18,12 @@ @mock_aws def test_write(subtests: SubTests) -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") write("s3://testbucket/test.file", b"test content") - resp = boto3_client.get_object(Bucket="testbucket", Key="test.file") + resp = s3_client.get_object(Bucket="testbucket", Key="test.file") with subtests.test(): assert resp["Body"].read() == b"test content" @@ -34,12 +33,11 @@ def test_write(subtests: SubTests) -> None: @mock_aws def test_write_content_type(subtests: SubTests) -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") write("s3://testbucket/test.tiff", b"test content", ContentType.GEOTIFF.value) - resp = boto3_client.get_object(Bucket="testbucket", Key="test.tiff") + resp = s3_client.get_object(Bucket="testbucket", Key="test.tiff") with subtests.test(): assert resp["Body"].read() == b"test content" @@ -49,12 +47,11 @@ def test_write_content_type(subtests: SubTests) -> None: @mock_aws def test_write_multihash_as_metadata(subtests: SubTests) -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") write("s3://testbucket/test.tiff", b"test content", ContentType.GEOTIFF.value) - resp = boto3_client.get_object(Bucket="testbucket", Key="test.tiff") + resp = s3_client.get_object(Bucket="testbucket", Key="test.tiff") with subtests.test(): assert resp["Metadata"]["multihash"] == "12206ae8a75555209fd6c44157c0aed8016e763ff435a19cf186f76863140143ff72" @@ -62,10 +59,9 @@ def test_write_multihash_as_metadata(subtests: SubTests) -> None: @mock_aws def test_read() -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") - boto3_client.put_object(Bucket="testbucket", Key="test.file", Body=b"test content") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") + s3_client.put_object(Bucket="testbucket", Key="test.file", Body=b"test content") content = read("s3://testbucket/test.file") @@ -84,8 +80,8 @@ def test_read_bucket_not_found(capsys: CaptureFixture[str]) -> None: @mock_aws def test_read_key_not_found(capsys: CaptureFixture[str]) -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") with raises(ClientError): read("s3://testbucket/test.file") @@ -96,10 +92,9 @@ def test_read_key_not_found(capsys: CaptureFixture[str]) -> None: @mock_aws def test_exists() -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") - boto3_client.put_object(Bucket="testbucket", Key="test.file", Body=b"test content") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") + s3_client.put_object(Bucket="testbucket", Key="test.file", Body=b"test content") file_exists = exists("s3://testbucket/test.file") @@ -108,10 +103,9 @@ def test_exists() -> None: @mock_aws def test_directory_exists() -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") - boto3_client.put_object(Bucket="testbucket", Key="hello/test.file", Body=b"test content") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") + s3_client.put_object(Bucket="testbucket", Key="hello/test.file", Body=b"test content") directory_exists = exists("s3://testbucket/hello/") @@ -132,10 +126,9 @@ def test_exists_bucket_not_exists(capsys: CaptureFixture[str], subtests: SubTest @mock_aws def test_exists_object_not_exists() -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") - boto3_client.put_object(Bucket="testbucket", Key="hello/another.file", Body=b"test content") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") + s3_client.put_object(Bucket="testbucket", Key="hello/another.file", Body=b"test content") file_exists = exists("s3://testbucket/test.file") @@ -144,10 +137,9 @@ def test_exists_object_not_exists() -> None: @mock_aws def test_exists_object_starting_with_not_exists() -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") - boto3_client.put_object(Bucket="testbucket", Key="hello/another.file", Body=b"test content") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") + s3_client.put_object(Bucket="testbucket", Key="hello/another.file", Body=b"test content") file_exists = exists("s3://testbucket/hello/another.fi") @@ -157,14 +149,13 @@ def test_exists_object_starting_with_not_exists() -> None: @mock_aws def test_list_files_in_uri(subtests: SubTests) -> None: bucket_name = "testbucket" - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket=bucket_name) - boto3_client.put_object(Bucket=bucket_name, Key="data/collection.json", Body=b"") - boto3_client.put_object(Bucket=bucket_name, Key="data/image.tiff", Body=b"") - boto3_client.put_object(Bucket=bucket_name, Key="data/image_meta.xml", Body=b"") - - files = list_files_in_uri(f"s3://{bucket_name}/data/", [".json", "_meta.xml"], boto3_client) + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket=bucket_name) + s3_client.put_object(Bucket=bucket_name, Key="data/collection.json", Body=b"") + s3_client.put_object(Bucket=bucket_name, Key="data/image.tiff", Body=b"") + s3_client.put_object(Bucket=bucket_name, Key="data/image_meta.xml", Body=b"") + + files = list_files_in_uri(f"s3://{bucket_name}/data/", [".json", "_meta.xml"], s3_client) with subtests.test(): assert len(files) == 2 diff --git a/scripts/files/tests/fs_test.py b/scripts/files/tests/fs_test.py index 120d597e6..d9f902a4b 100644 --- a/scripts/files/tests/fs_test.py +++ b/scripts/files/tests/fs_test.py @@ -3,7 +3,7 @@ from shutil import rmtree from tempfile import mkdtemp -from boto3 import client, resource +from boto3 import client from moto import mock_aws from moto.core.models import DEFAULT_ACCOUNT_ID from moto.s3.models import s3_backends @@ -24,8 +24,8 @@ def test_read_key_not_found_local() -> None: @mock_aws def test_read_key_not_found_s3(capsys: CaptureFixture[str]) -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") with raises(NoSuchFileError): read("s3://testbucket/test.file") @@ -48,8 +48,8 @@ def test_write_sidecars_file_not_found_local(capsys: CaptureFixture[str]) -> Non @mock_aws def test_write_all_key_not_found_s3() -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") # Raises an exception as all files are not written with raises(Exception) as e: @@ -60,8 +60,8 @@ def test_write_all_key_not_found_s3() -> None: @mock_aws def test_write_sidecars_key_not_found_s3(capsys: CaptureFixture[str]) -> None: - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="testbucket") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="testbucket") write_sidecars(["s3://testbucket/test.prj"], "/tmp") diff --git a/scripts/gdal/gdal_helper.py b/scripts/gdal/gdal_helper.py index baba1bf88..e6a9b165f 100644 --- a/scripts/gdal/gdal_helper.py +++ b/scripts/gdal/gdal_helper.py @@ -66,7 +66,7 @@ def run_gdal( output_file: the output file path Raises: - cpe: CalledProcessError is raised if something goes wrong during the execution of the command + CalledProcessError is raised if something goes wrong during the execution of the command Returns: subprocess.CompletedProcess: the output process. diff --git a/scripts/stac/imagery/tests/collection_test.py b/scripts/stac/imagery/tests/collection_test.py index f77388552..03d8e19c4 100644 --- a/scripts/stac/imagery/tests/collection_test.py +++ b/scripts/stac/imagery/tests/collection_test.py @@ -5,9 +5,10 @@ from tempfile import mkdtemp import shapely.geometry -from boto3 import resource +from boto3 import client from moto import mock_aws from moto.s3.responses import DEFAULT_REGION_NAME +from mypy_boto3_s3 import S3Client from pytest_subtests import SubTests from shapely.predicates import is_valid @@ -35,9 +36,9 @@ def test_title_description_id_created_on_init( assert collection.stac["title"] == "Hawke's Bay Forest Assessment 0.3m Rural Aerial Photos (2023)" with subtests.test(): - assert ( - collection.stac["description"] - == "Orthophotography within the Hawke's Bay region captured in the 2023 flying season, published as a record of the Forest Assessment event." # pylint: disable=line-too-long + assert collection.stac["description"] == ( + "Orthophotography within the Hawke's Bay region captured in the 2023 flying season, " + "published as a record of the Forest Assessment event." ) with subtests.test(): @@ -333,7 +334,8 @@ def test_capture_area_added(fake_collection_metadata: CollectionMetadata, fake_l with subtests.test(): assert collection.stac["assets"]["capture_area"]["file:checksum"] in ( - "1220ba57cd77defc7fa72e140f4faa0846e8905ae443de04aef99bf381d4650c17a0", # geos 3.11 - geos 3.12 as yet untested + "1220ba57cd77defc7fa72e140f4faa0846e8905ae443de04aef99bf381d4650c17a0", + # geos 3.11 - geos 3.12 as yet untested ) with subtests.test(): @@ -384,8 +386,8 @@ def test_linz_slug_is_present(fake_collection_metadata: CollectionMetadata, fake @mock_aws def test_capture_dates_added(fake_collection_metadata: CollectionMetadata, fake_linz_slug: str) -> None: collection = ImageryCollection(fake_collection_metadata, any_epoch_datetime, fake_linz_slug) - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="flat") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="flat") write("s3://flat/capture-dates.geojson", b"") collection.add_capture_dates("s3://flat") assert collection.stac["assets"]["capture_dates"] == { diff --git a/scripts/tests/collection_from_items_test.py b/scripts/tests/collection_from_items_test.py index 03e022474..40ded74a2 100644 --- a/scripts/tests/collection_from_items_test.py +++ b/scripts/tests/collection_from_items_test.py @@ -1,11 +1,11 @@ from datetime import datetime from decimal import Decimal from os import environ -from typing import Iterator +from typing import TYPE_CHECKING, Iterator from unittest.mock import patch import pytest -from boto3 import client, resource +from boto3 import client from moto import mock_aws from moto.s3.responses import DEFAULT_REGION_NAME from pytest import CaptureFixture, raises @@ -21,6 +21,11 @@ from scripts.stac.imagery.tests.generators import any_stac_asset, any_stac_processing from scripts.tests.datetimes_test import any_epoch_datetime_string +if TYPE_CHECKING: + from mypy_boto3_s3 import S3Client +else: + S3Client = GetObjectOutputTypeDef = dict + @pytest.fixture(name="item", autouse=True) def setup() -> Iterator[ImageryItem]: @@ -42,9 +47,8 @@ def setup() -> Iterator[ImageryItem]: @mock_aws def test_should_create_collection_file(item: ImageryItem, fake_linz_slug: str) -> None: # Mock AWS S3 - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="stacfiles") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="stacfiles") item.add_collection("abc") write("s3://stacfiles/item.json", dict_to_json_bytes(item.stac)) # CLI arguments @@ -74,7 +78,7 @@ def test_should_create_collection_file(item: ImageryItem, fake_linz_slug: str) - main(args) # Verify collection.json has been created - resp = boto3_client.get_object(Bucket="stacfiles", Key="collection.json") + resp = s3_client.get_object(Bucket="stacfiles", Key="collection.json") assert '"type": "Collection"' in resp["Body"].read().decode("utf-8") @@ -83,8 +87,8 @@ def test_should_fail_if_collection_has_no_matching_items( item: ImageryItem, fake_linz_slug: str, capsys: CaptureFixture[str], subtests: SubTests ) -> None: # Mock AWS S3 - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="stacfiles") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="stacfiles") item_collection_id = "abc" item.add_collection(item_collection_id) write("s3://stacfiles/item.json", dict_to_json_bytes(item.stac)) @@ -155,8 +159,8 @@ def test_should_fail_to_create_collection_file_without_linz_slug(capsys: Capture @mock_aws def test_should_not_add_if_not_item(fake_linz_slug: str, capsys: CaptureFixture[str]) -> None: # Mock AWS S3 - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="stacfiles") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="stacfiles") collection_id = "abc" # Create mocked "existing" Collection metadata: CollectionMetadata = { @@ -205,9 +209,8 @@ def test_should_not_add_if_not_item(fake_linz_slug: str, capsys: CaptureFixture[ @mock_aws def test_should_determine_dates_from_items(item: ImageryItem, fake_linz_slug: str) -> None: # Mock AWS S3 - s3 = resource("s3", region_name=DEFAULT_REGION_NAME) - boto3_client = client("s3", region_name=DEFAULT_REGION_NAME) - s3.create_bucket(Bucket="stacfiles") + s3_client: S3Client = client("s3", region_name=DEFAULT_REGION_NAME) + s3_client.create_bucket(Bucket="stacfiles") item.add_collection("abc") write("s3://stacfiles/item_a.json", dict_to_json_bytes(item.stac)) item.stac["properties"]["start_datetime"] = "2022-04-12T00:00:00Z" @@ -241,5 +244,5 @@ def test_should_determine_dates_from_items(item: ImageryItem, fake_linz_slug: st main(args) # Verify collection.json has been created - resp = boto3_client.get_object(Bucket="stacfiles", Key="collection.json") + resp = s3_client.get_object(Bucket="stacfiles", Key="collection.json") assert "(2021-2022)" in resp["Body"].read().decode("utf-8")