forked from deis/registry
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
fix(create_bucket): avoid S3 InvalidLocationRestraint error
calling create_bucket(bucket_name, region="us-east-1") yields the following error: boto.exception.S3ResponseError: S3ResponseError: 400 Bad Request <?xml version="1.0" encoding="UTF-8"?> <Error><Code>InvalidLocationConstraint</Code> <Message>The specified location-constraint is not valid</Message> <LocationConstraint>us-east-1</LocationConstraint>...</Error> based on the comments in boto/boto3#125 this commit omits the region kwarg to the create_bucket() call when `s3.region` is set to "us-east-1"
- Loading branch information
Showing
2 changed files
with
95 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
#!/usr/bin/env python | ||
|
||
import os | ||
|
||
import boto.s3 | ||
import json | ||
import swiftclient | ||
from boto import config as botoconfig | ||
from boto.exception import S3CreateError | ||
from boto.s3.connection import S3Connection, OrdinaryCallingFormat | ||
from oauth2client.service_account import ServiceAccountCredentials | ||
from gcloud.storage.client import Client | ||
from gcloud import exceptions | ||
from azure.storage.blob import BlobService | ||
|
||
def bucket_exists(conn, name): | ||
bucket = conn.lookup(name) | ||
if not bucket: | ||
return False | ||
return True | ||
|
||
bucket_name = os.getenv('BUCKET_NAME') | ||
region = os.getenv('AWS_REGION') | ||
|
||
if os.getenv('DATABASE_STORAGE') == "s3": | ||
conn = boto.s3.connect_to_region(region) | ||
if not bucket_exists(conn, bucket_name): | ||
try: | ||
if region == "us-east-1": | ||
# use "US Standard" region. workaround for https://github.com/boto/boto3/issues/125 | ||
conn.create_bucket(bucket_name) | ||
else: | ||
conn.create_bucket(bucket_name, location=region) | ||
# NOTE(bacongobbler): for versions prior to v2.9.0, the bucket is created in the default region. | ||
# if we got here, we need to propagate "us-east-1" into WALE_S3_ENDPOINT because the bucket | ||
# exists in a different region and we cannot find it. | ||
# TODO(bacongobbler): deprecate this once we drop support for v2.8.0 and lower | ||
except S3CreateError as err: | ||
if region != 'us-east-1': | ||
print('Failed to create bucket in {}. We are now assuming that the bucket was created in us-east-1.'.format(region)) | ||
with open(os.path.join(os.environ['WALE_ENVDIR'], "WALE_S3_ENDPOINT"), "w+") as file: | ||
file.write('https+path://s3.amazonaws.com:443') | ||
else: | ||
raise | ||
|
||
elif os.getenv('DATABASE_STORAGE') == "gcs": | ||
scopes = ['https://www.googleapis.com/auth/devstorage.full_control'] | ||
credentials = ServiceAccountCredentials.from_json_keyfile_name(os.getenv('GOOGLE_APPLICATION_CREDENTIALS'), scopes=scopes) | ||
with open(os.getenv('GOOGLE_APPLICATION_CREDENTIALS')) as data_file: | ||
data = json.load(data_file) | ||
conn = Client(credentials=credentials, project=data['project_id']) | ||
exists = True | ||
try: | ||
conn.get_bucket(bucket_name) | ||
except exceptions.NotFound: | ||
exists = False | ||
except: | ||
raise | ||
if not exists: | ||
conn.create_bucket(bucket_name) | ||
|
||
elif os.getenv('DATABASE_STORAGE') == "azure": | ||
conn = BlobService(account_name=os.getenv('WABS_ACCOUNT_NAME'), account_key=os.getenv('WABS_ACCESS_KEY')) | ||
#It doesn't throw an exception if the container exists by default(https://github.com/Azure/azure-storage-python/blob/master/azure/storage/blob/baseblobservice.py#L504). | ||
conn.create_container(bucket_name) | ||
|
||
elif os.getenv('DATABASE_STORAGE') == "swift": | ||
conn = swiftclient.Connection( | ||
user=os.getenv('SWIFT_USER'), | ||
key=os.getenv('SWIFT_PASSWORD'), | ||
authurl=os.getenv('SWIFT_AUTHURL'), | ||
auth_version=os.getenv('SWIFT_AUTH_VERSION'), | ||
tenant_name=os.getenv('SWIFT_TENANT') | ||
) | ||
# swift also does not throw exception if container already exists. | ||
conn.put_container(os.getenv('BUCKET_NAME')) | ||
|
||
else: | ||
botoconfig.add_section('s3') | ||
botoconfig.set('s3', 'use-sigv4', 'True') | ||
botoconfig.add_section('Boto') | ||
botoconfig.set('Boto', 'is_secure', 'False') | ||
conn = S3Connection( | ||
host=os.getenv('S3_HOST'), | ||
port=int(os.getenv('S3_PORT')), | ||
calling_format=OrdinaryCallingFormat()) | ||
# HACK(bacongobbler): allow boto to connect to minio by changing the region name for s3v4 auth | ||
conn.auth_region_name = os.getenv('AWS_REGION') | ||
if not bucket_exists(conn, bucket_name): | ||
conn.create_bucket(bucket_name) |