diff --git a/rootfs/bin/create-bucket b/rootfs/bin/create-bucket index 9a3b891..6e5d2a9 100755 --- a/rootfs/bin/create-bucket +++ b/rootfs/bin/create-bucket @@ -25,7 +25,11 @@ if os.getenv('REGISTRY_STORAGE') == "s3" and os.getenv('REGISTRY_STORAGE_S3_BACK conn = boto.s3.connect_to_region(region) if not bucket_exists(conn, bucket_name): - conn.create_bucket(bucket_name, location=region) + if region == "us-east-1": + # use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125 + conn.create_bucket(bucket_name) + else: + conn.create_bucket(bucket_name, location=region) elif os.getenv('REGISTRY_STORAGE') == "gcs": scopes = ['https://www.googleapis.com/auth/devstorage.full_control'] diff --git a/rootfs/bin/create_bucket b/rootfs/bin/create_bucket new file mode 100755 index 0000000..2d23f63 --- /dev/null +++ b/rootfs/bin/create_bucket @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +import os + +import boto.s3 +import json +import swiftclient +from boto import config as botoconfig +from boto.exception import S3CreateError +from boto.s3.connection import S3Connection, OrdinaryCallingFormat +from oauth2client.service_account import ServiceAccountCredentials +from gcloud.storage.client import Client +from gcloud import exceptions +from azure.storage.blob import BlobService + +def bucket_exists(conn, name): + bucket = conn.lookup(name) + if not bucket: + return False + return True + +bucket_name = os.getenv('BUCKET_NAME') +region = os.getenv('AWS_REGION') + +if os.getenv('DATABASE_STORAGE') == "s3": + conn = boto.s3.connect_to_region(region) + if not bucket_exists(conn, bucket_name): + try: + if region == "us-east-1": + # use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125 + conn.create_bucket(bucket_name) + else: + conn.create_bucket(bucket_name, location=region) + # NOTE(bacongobbler): for versions prior to v2.9.0, the bucket is created in the default region. + # if we got here, we need to propagate "us-east-1" into WALE_S3_ENDPOINT because the bucket + # exists in a different region and we cannot find it. + # TODO(bacongobbler): deprecate this once we drop support for v2.8.0 and lower + except S3CreateError as err: + if region != 'us-east-1': + print('Failed to create bucket in {}. We are now assuming that the bucket was created in us-east-1.'.format(region)) + with open(os.path.join(os.environ['WALE_ENVDIR'], "WALE_S3_ENDPOINT"), "w+") as file: + file.write('https+path://s3.amazonaws.com:443') + else: + raise + +elif os.getenv('DATABASE_STORAGE') == "gcs": + scopes = ['https://www.googleapis.com/auth/devstorage.full_control'] + credentials = ServiceAccountCredentials.from_json_keyfile_name(os.getenv('GOOGLE_APPLICATION_CREDENTIALS'), scopes=scopes) + with open(os.getenv('GOOGLE_APPLICATION_CREDENTIALS')) as data_file: + data = json.load(data_file) + conn = Client(credentials=credentials, project=data['project_id']) + exists = True + try: + conn.get_bucket(bucket_name) + except exceptions.NotFound: + exists = False + except: + raise + if not exists: + conn.create_bucket(bucket_name) + +elif os.getenv('DATABASE_STORAGE') == "azure": + conn = BlobService(account_name=os.getenv('WABS_ACCOUNT_NAME'), account_key=os.getenv('WABS_ACCESS_KEY')) + #It doesn't throw an exception if the container exists by default(https://github.com/Azure/azure-storage-python/blob/master/azure/storage/blob/baseblobservice.py#L504). + conn.create_container(bucket_name) + +elif os.getenv('DATABASE_STORAGE') == "swift": + conn = swiftclient.Connection( + user=os.getenv('SWIFT_USER'), + key=os.getenv('SWIFT_PASSWORD'), + authurl=os.getenv('SWIFT_AUTHURL'), + auth_version=os.getenv('SWIFT_AUTH_VERSION'), + tenant_name=os.getenv('SWIFT_TENANT') + ) + # swift also does not throw exception if container already exists. + conn.put_container(os.getenv('BUCKET_NAME')) + +else: + botoconfig.add_section('s3') + botoconfig.set('s3', 'use-sigv4', 'True') + botoconfig.add_section('Boto') + botoconfig.set('Boto', 'is_secure', 'False') + conn = S3Connection( + host=os.getenv('S3_HOST'), + port=int(os.getenv('S3_PORT')), + calling_format=OrdinaryCallingFormat()) + # HACK(bacongobbler): allow boto to connect to minio by changing the region name for s3v4 auth + conn.auth_region_name = os.getenv('AWS_REGION') + if not bucket_exists(conn, bucket_name): + conn.create_bucket(bucket_name)