-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathivs3
executable file
·87 lines (64 loc) · 3.11 KB
/
ivs3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
#!/usr/bin/env python
import argparse
import os
from boto.s3.connection import S3Connection
from boto.s3.key import Key
import eventlet
eventlet.monkey_patch()
import invar
class IVS3(invar.InvarUtility):
description = 'Upload a directory of images to S3 concurrently.'
def _init_common_parser(self):
"""
Override default parser behavior.
"""
self.argparser = argparse.ArgumentParser(description=self.description, epilog=self.epilog)
def add_arguments(self):
self.argparser.add_argument('upload_dir', help='Directory to be uploaded.')
self.argparser.add_argument('bucket', help='Bucket in which to put files. You may also specify a path component (e.g. a subdirectory to put the files in).')
self.argparser.add_argument('-c', '--concurrency', help='Number of concurrent uploads.', type=int, default=32)
self.argparser.add_argument('-P', '--acl-public', help='Set uploaded files as public.', dest='public', action='store_true')
self.argparser.add_argument('--acl-private', help='Set uploaded files as private (default).', dest='private', action='store_true')
self.argparser.add_argument('--add-header', help='Add a given HTTP header to each file. Use NAME:VALUE format. May be specified multiple times.', dest='headers', action='append')
self.argparser.add_argument('-v', '--verbose', action='store_true', help='Display detailed error messages.')
def main(self):
if '/' in self.args.bucket:
self.bucket, self.upload_path = self.args.bucket.split('/', 1)
if self.upload_path[-1] != '/':
self.upload_path += '/'
else:
self.bucket = self.args.bucket
self.upload_path = None
self.headers = dict([h.split(':') for h in self.args.headers]) if self.args.headers else None
pile = eventlet.GreenPile(self.args.concurrency)
for key_name, absolute_path in self.find_file_paths():
pile.spawn(self.upload, key_name, absolute_path)
# Wait for all greenlets to finish
list(pile)
def find_file_paths(self):
"""
A generator function that recursively finds all files in the upload directory.
"""
for root, dirs, files in os.walk(self.args.upload_dir):
rel_path = os.path.relpath(root, self.args.upload_dir)
for f in files:
if rel_path == '.':
yield (f, os.path.join(root, f))
else:
yield (os.path.join(rel_path, f), os.path.join(root, f))
def upload(self, key_name, absolute_path):
conn = S3Connection()
bucket = conn.get_bucket(self.bucket)
k = Key(bucket)
k.key = key_name
if self.upload_path:
k.key = self.upload_path + k.key
if self.args.public and not self.args.private:
policy = 'public-read'
else:
policy = 'private'
k.set_contents_from_filename(absolute_path, headers=self.headers, policy=policy)
print 'Uploaded %s' % key_name
if __name__ == "__main__":
ivs3 = IVS3()
ivs3.main()