diff --git a/.github/workflows/integration-test-local.yml b/.github/workflows/integration-test-local.yml index de25eea96..2948847d0 100644 --- a/.github/workflows/integration-test-local.yml +++ b/.github/workflows/integration-test-local.yml @@ -100,7 +100,7 @@ jobs: run: poetry run skyplane deprovision - name: Delete matching S3 buckets run: | - for pattern in "test-skyplane-" "skyplane-integration-us-east-1-" "integrationus-east-1-"; do + for pattern in "test-skyplane-" "skyplane-integration-" "integrationus-east-1-"; do aws s3api list-buckets --query "Buckets[?starts_with(Name, \`${pattern}\`) == \`true\`].Name" --output text | tr '\t' '\n' | while read bucket; do aws s3 rb "s3://$bucket" --force; done done - name: Cleanup GCP service account diff --git a/.github/workflows/integration-test-multiple-sizes.yml b/.github/workflows/integration-test-multiple-sizes.yml index 0fbc7bd65..4b9a72b18 100644 --- a/.github/workflows/integration-test-multiple-sizes.yml +++ b/.github/workflows/integration-test-multiple-sizes.yml @@ -104,7 +104,7 @@ jobs: run: poetry run skyplane deprovision - name: Delete matching S3 buckets run: | - for pattern in "test-skyplane-" "skyplane-integration-us-east-1-" "integrationus-east-1-"; do + for pattern in "test-skyplane-" "skyplane-integration-" "integrationus-east-1-"; do aws s3api list-buckets --query "Buckets[?starts_with(Name, \`${pattern}\`) == \`true\`].Name" --output text | tr '\t' '\n' | while read bucket; do aws s3 rb "s3://$bucket" --force; done done - name: Cleanup GCP service account diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index eb7b4822c..cfafe1f35 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -97,7 +97,7 @@ jobs: run: poetry run skyplane deprovision - name: Delete matching S3 buckets run: | - for pattern in "test-skyplane-" "skyplane-integration-us-east-1-" "integrationus-east-1-"; do + for pattern in "test-skyplane-" "skyplane-integration-" "integrationus-east-1-"; do aws s3api list-buckets --query "Buckets[?starts_with(Name, \`${pattern}\`) == \`true\`].Name" --output text | tr '\t' '\n' | while read bucket; do aws s3 rb "s3://$bucket" --force; done done - name: Cleanup GCP service account diff --git a/skyplane/api/transfer_job.py b/skyplane/api/transfer_job.py index 9d8fdeceb..02b92d439 100644 --- a/skyplane/api/transfer_job.py +++ b/skyplane/api/transfer_job.py @@ -25,12 +25,8 @@ from skyplane import exceptions from skyplane.api.config import TransferConfig from skyplane.chunk import Chunk, ChunkRequest -from skyplane.obj_store.azure_blob_interface import AzureBlobInterface, AzureBlobObject -from skyplane.obj_store.gcs_interface import GCSObject -from skyplane.obj_store.r2_interface import R2Object from skyplane.obj_store.storage_interface import StorageInterface from skyplane.obj_store.object_store_interface import ObjectStoreObject, ObjectStoreInterface -from skyplane.obj_store.s3_interface import S3Object from skyplane.utils import logger from skyplane.utils.definitions import MB from skyplane.utils.fn import do_parallel @@ -161,7 +157,9 @@ def _run_multipart_chunk_thread( metadata = None # Convert parts to base64 and store mime_type if destination interface is AzureBlobInterface - if isinstance(dest_iface, AzureBlobInterface): + if dest_iface.provider == "azure": + from skyplane.obj_store.azure_blob_interface import AzureBlobInterface + block_ids = list(map(lambda part_num: AzureBlobInterface.id_to_base64_encoding(part_num, dest_object.key), parts)) metadata = (block_ids, mime_type) @@ -294,12 +292,20 @@ def transfer_pair_generator( raise e from None if dest_provider == "aws": + from skyplane.obj_store.s3_interface import S3Object + dest_obj = S3Object(provider=dest_provider, bucket=dst_iface.bucket(), key=dest_key) elif dest_provider == "azure": + from skyplane.obj_store.azure_blob_interface import AzureBlobObject + dest_obj = AzureBlobObject(provider=dest_provider, bucket=dst_iface.bucket(), key=dest_key) elif dest_provider == "gcp": + from skyplane.obj_store.gcs_interface import GCSObject + dest_obj = GCSObject(provider=dest_provider, bucket=dst_iface.bucket(), key=dest_key) elif dest_provider == "cloudflare": + from skyplane.obj_store.r2_interface import R2Object + dest_obj = R2Object(provider=dest_provider, bucket=dst_iface.bucket(), key=dest_key) else: raise ValueError(f"Invalid dest_region {dest_region}, unknown provider") diff --git a/skyplane/obj_store/r2_interface.py b/skyplane/obj_store/r2_interface.py index b29faea4a..bdb1c679d 100644 --- a/skyplane/obj_store/r2_interface.py +++ b/skyplane/obj_store/r2_interface.py @@ -33,7 +33,7 @@ def __init__(self, account_id: str, bucket_name: str): endpoint_url=self.endpoint_url, aws_access_key_id=self.config.cloudflare_access_key_id, aws_secret_access_key=self.config.cloudflare_secret_access_key, - region_name="auto", # explicity set region, otherwise may be read from AWS boto3 env + region_name="auto", # explicity set region, otherwise may be read from AWS boto3 env ) except Exception as e: raise ValueError("Error with connecting to {self.endpoint_url}: {e}")