Skip to content

Commit

Permalink
Merge branch 'release-1.7.4'
Browse files Browse the repository at this point in the history
* release-1.7.4:
  Bumping version to 1.7.4
  Update changelog with new service updates
  Update CHANGELOG with kinesis fix
  Update changelog with new feature
  Update doc example
  Switch to base-2 units
  Rename --humanize to --human-readable
  Add --humanize and --summarize options for s3 ls
  • Loading branch information
AWS committed Jan 27, 2015
2 parents 4fa4590 + 2754f24 commit 77f9941
Show file tree
Hide file tree
Showing 10 changed files with 193 additions and 16 deletions.
13 changes: 13 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,19 @@
CHANGELOG
=========

1.7.4
=====

* feature:``aws dynamodb``: Add support for online indexing.
* feature:``aws importexport get-shipping-label``: Add support for
``get-shipping-label``.
* feature:``aws s3 ls``: Add ``--human-readable`` and ``--summarize`` options
(`issue 1103 <https://github.com/aws/aws-cli/pull/1103>`__)
* bugfix:``aws kinesis put-records``: Fix issue with base64 encoding for
blob types
(`botocore issue 413 <https://github.com/boto/botocore/pull/413>`__)


1.7.3
=====

Expand Down
2 changes: 1 addition & 1 deletion awscli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
"""
import os

__version__ = '1.7.3'
__version__ = '1.7.4'

#
# Get our data path to be added to botocore's search path
Expand Down
39 changes: 30 additions & 9 deletions awscli/customizations/s3/subcommands.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
from awscli.customizations.s3.filters import create_filter
from awscli.customizations.s3.s3handler import S3Handler, S3StreamHandler
from awscli.customizations.s3.utils import find_bucket_key, uni_print, \
AppendFilter, find_dest_path_comp_key
AppendFilter, find_dest_path_comp_key, human_readable_size
from awscli.customizations.s3.syncstrategy.base import MissingFileSync, \
SizeAndLastModifiedSync, NeverSync

Expand All @@ -38,6 +38,12 @@
"Command is performed on all files or objects "
"under the specified directory or prefix.")}

HUMAN_READABLE = {'name': 'human-readable', 'action': 'store_true',
'help_text': "Displays file sizes in human readable format."}

SUMMARIZE = {'name': 'summarize', 'action': 'store_true', 'help_text': (
"Displays summary information (number of objects, total size).")}

DRYRUN = {'name': 'dryrun', 'action': 'store_true',
'help_text': (
"Displays the operations that would be performed using the "
Expand Down Expand Up @@ -242,13 +248,16 @@ class ListCommand(S3Command):
USAGE = "<S3Path> or NONE"
ARG_TABLE = [{'name': 'paths', 'nargs': '?', 'default': 's3://',
'positional_arg': True, 'synopsis': USAGE}, RECURSIVE,
PAGE_SIZE]
PAGE_SIZE, HUMAN_READABLE, SUMMARIZE]
EXAMPLES = BasicCommand.FROM_FILE('s3/ls.rst')

def _run_main(self, parsed_args, parsed_globals):
super(ListCommand, self)._run_main(parsed_args, parsed_globals)
self._empty_result = False
self._at_first_page = True
self._size_accumulator = 0
self._total_objects = 0
self._human_readable = parsed_args.human_readable
path = parsed_args.paths
if path.startswith('s3://'):
path = path[5:]
Expand All @@ -261,8 +270,10 @@ def _run_main(self, parsed_args, parsed_globals):
parsed_args.page_size)
else:
self._list_all_objects(bucket, key, parsed_args.page_size)
if parsed_args.summarize:
self._print_summary()
if key:
# User specified a key to look for. We should return an rc of one
# User specified a key to look for. We should return an rc of one
# if there are no matching keys and/or prefixes or return an rc
# of zero if there are matching keys or prefixes.
return self._check_no_objects()
Expand All @@ -276,7 +287,6 @@ def _run_main(self, parsed_args, parsed_globals):
return 0

def _list_all_objects(self, bucket, key, page_size=None):

operation = self.service.get_operation('ListObjects')
iterator = operation.paginate(self.endpoint, bucket=bucket,
prefix=key, delimiter='/',
Expand All @@ -298,6 +308,8 @@ def _display_page(self, response_data, use_basename=True):
uni_print(print_str)
for content in contents:
last_mod_str = self._make_last_mod_str(content['LastModified'])
self._size_accumulator += int(content['Size'])
self._total_objects += 1
size_str = self._make_size_str(content['Size'])
if use_basename:
filename_components = content['Key'].split('/')
Expand Down Expand Up @@ -343,17 +355,26 @@ def _make_last_mod_str(self, last_mod):
str(last_mod.day).zfill(2),
str(last_mod.hour).zfill(2),
str(last_mod.minute).zfill(2),
str(last_mod.second).zfill(2))
str(last_mod.second).zfill(2))
last_mod_str = "%s-%s-%s %s:%s:%s" % last_mod_tup
return last_mod_str.ljust(19, ' ')

def _make_size_str(self, size):
"""
This function creates the size string when objects are being listed.
"""
size_str = str(size)
size_str = human_readable_size(size) if self._human_readable else str(size)
return size_str.rjust(10, ' ')

def _print_summary(self):
"""
This function prints a summary of total objects and total bytes
"""
print_str = str(self._total_objects)
uni_print("\nTotal Objects: ".rjust(15, ' ') + print_str + "\n")
print_str = human_readable_size(self._size_accumulator) if self._human_readable else str(self._size_accumulator)
uni_print("Total Size: ".rjust(15, ' ') + print_str + "\n")


class WebsiteCommand(S3Command):
NAME = 'website'
Expand Down Expand Up @@ -553,7 +574,7 @@ def needs_filegenerator(self):
return False
else:
return True

def choose_sync_strategies(self):
"""Determines the sync strategy for the command.
Expand Down Expand Up @@ -648,7 +669,7 @@ def run(self):
endpoint=self._endpoint,
is_stream=True)]
file_info_builder = FileInfoBuilder(self._service, self._endpoint,
self._source_endpoint, self.parameters)
self._source_endpoint, self.parameters)
s3handler = S3Handler(self.session, self.parameters,
result_queue=result_queue)
s3_stream_handler = S3StreamHandler(self.session, self.parameters,
Expand Down Expand Up @@ -712,7 +733,7 @@ def run(self):
# tasks failed and the number of tasks warned.
# This means that files[0] now contains a namedtuple with
# the number of failed tasks and the number of warned tasks.
# In terms of the RC, we're keeping it simple and saying
# In terms of the RC, we're keeping it simple and saying
# that > 0 failed tasks will give a 1 RC and > 0 warned
# tasks will give a 2 RC. Otherwise a RC of zero is returned.
rc = 0
Expand Down
36 changes: 36 additions & 0 deletions awscli/customizations/s3/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,42 @@
from awscli.compat import queue


humanize_suffixes = ('KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB')


def human_readable_size(value):
"""Convert an size in bytes into a human readable format.
For example::
>>> human_readable_size(1)
'1 Byte'
>>> human_readable_size(10)
'10 Bytes'
>>> human_readable_size(1024)
'1.0 KiB'
>>> human_readable_size(1024 * 1024)
'1.0 MiB'
:param value: The size in bytes
:return: The size in a human readable format based on base-2 units.
"""
one_decimal_point = '%.1f'
base = 1024
bytes_int = float(value)

if bytes_int == 1:
return '1 Byte'
elif bytes_int < base:
return '%d Bytes' % bytes_int

for i, suffix in enumerate(humanize_suffixes):
unit = base ** (i+2)
if round((bytes_int / unit) * base) < base:
return '%.1f %s' % ((base * bytes_int / unit), suffix)


class AppendFilter(argparse.Action):
"""
This class is used as an action when parsing the parameters.
Expand Down
22 changes: 22 additions & 0 deletions awscli/examples/s3/ls.rst
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,25 @@ Output::
2013-09-02 21:32:57 189 foo/bar/.baz/hooks/foo
2013-09-02 21:32:57 398 z.txt

The following ``ls`` command demonstrates the same command using the --humanize
and --summarize options. --humanize displays file size in
Bytes/MiB/KiB/GiB/TiB/PiB/EiB. --summarize displays the total number of objects
and total size at the end of the result listing::

aws s3 ls s3://mybucket --recursive --humanize --summarize

Output::

2013-09-02 21:37:53 10 Bytes a.txt
2013-09-02 21:37:53 2.9 MiB foo.zip
2013-09-02 21:32:57 23 Bytes foo/bar/.baz/a
2013-09-02 21:32:58 41 Bytes foo/bar/.baz/b
2013-09-02 21:32:57 281 Bytes foo/bar/.baz/c
2013-09-02 21:32:57 73 Bytes foo/bar/.baz/d
2013-09-02 21:32:57 452 Bytes foo/bar/.baz/e
2013-09-02 21:32:57 896 Bytes foo/bar/.baz/hooks/bar
2013-09-02 21:32:57 189 Bytes foo/bar/.baz/hooks/foo
2013-09-02 21:32:57 398 Bytes z.txt

Total Objects: 10
Total Size: 2.9 MiB
2 changes: 1 addition & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
# The short X.Y version.
version = '1.7'
# The full version, including alpha/beta/rc tags.
release = '1.7.3'
release = '1.7.4'

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import awscli


requires = ['botocore>=0.84.0,<0.85.0',
requires = ['botocore>=0.85.0,<0.86.0',
'bcdoc>=0.12.0,<0.13.0',
'colorama==0.2.5',
'docutils>=0.10',
Expand Down
59 changes: 59 additions & 0 deletions tests/unit/customizations/s3/test_ls_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,65 @@ def test_fail_rc_no_objects_nor_prefixes(self):
self.parsed_responses = [{}]
self.run_cmd('s3 ls s3://bucket/foo', expected_rc=1)

def test_human_readable_file_size(self):
time_utc = "2014-01-09T20:45:49.000Z"
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
{"Key": "onebyte.txt", "Size": 1, "LastModified": time_utc},
{"Key": "onekilobyte.txt", "Size": 1024, "LastModified": time_utc},
{"Key": "onemegabyte.txt", "Size": 1024 ** 2, "LastModified": time_utc},
{"Key": "onegigabyte.txt", "Size": 1024 ** 3, "LastModified": time_utc},
{"Key": "oneterabyte.txt", "Size": 1024 ** 4, "LastModified": time_utc},
{"Key": "onepetabyte.txt", "Size": 1024 ** 5, "LastModified": time_utc} ]}]
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --human-readable',
expected_rc=0)
call_args = self.operations_called[0][1]
# Time is stored in UTC timezone, but the actual time displayed
# is specific to your tzinfo, so shift the timezone to your local's.
time_local = parser.parse(time_utc).astimezone(tz.tzlocal())
time_fmt = time_local.strftime('%Y-%m-%d %H:%M:%S')
self.assertIn('%s 1 Byte onebyte.txt\n' % time_fmt, stdout)
self.assertIn('%s 1.0 KiB onekilobyte.txt\n' % time_fmt, stdout)
self.assertIn('%s 1.0 MiB onemegabyte.txt\n' % time_fmt, stdout)
self.assertIn('%s 1.0 GiB onegigabyte.txt\n' % time_fmt, stdout)
self.assertIn('%s 1.0 TiB oneterabyte.txt\n' % time_fmt, stdout)
self.assertIn('%s 1.0 PiB onepetabyte.txt\n' % time_fmt, stdout)

def test_summarize(self):
time_utc = "2014-01-09T20:45:49.000Z"
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
{"Key": "onebyte.txt", "Size": 1, "LastModified": time_utc},
{"Key": "onekilobyte.txt", "Size": 1024, "LastModified": time_utc},
{"Key": "onemegabyte.txt", "Size": 1024 ** 2, "LastModified": time_utc},
{"Key": "onegigabyte.txt", "Size": 1024 ** 3, "LastModified": time_utc},
{"Key": "oneterabyte.txt", "Size": 1024 ** 4, "LastModified": time_utc},
{"Key": "onepetabyte.txt", "Size": 1024 ** 5, "LastModified": time_utc} ]}]
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --summarize', expected_rc=0)
call_args = self.operations_called[0][1]
# Time is stored in UTC timezone, but the actual time displayed
# is specific to your tzinfo, so shift the timezone to your local's.
time_local = parser.parse(time_utc).astimezone(tz.tzlocal())
time_fmt = time_local.strftime('%Y-%m-%d %H:%M:%S')
self.assertIn('Total Objects: 6\n', stdout)
self.assertIn('Total Size: 1127000493261825\n', stdout)

def test_summarize_with_human_readable(self):
time_utc = "2014-01-09T20:45:49.000Z"
self.parsed_responses = [{"CommonPrefixes": [], "Contents": [
{"Key": "onebyte.txt", "Size": 1, "LastModified": time_utc},
{"Key": "onekilobyte.txt", "Size": 1024, "LastModified": time_utc},
{"Key": "onemegabyte.txt", "Size": 1024 ** 2, "LastModified": time_utc},
{"Key": "onegigabyte.txt", "Size": 1024 ** 3, "LastModified": time_utc},
{"Key": "oneterabyte.txt", "Size": 1024 ** 4, "LastModified": time_utc},
{"Key": "onepetabyte.txt", "Size": 1024 ** 5, "LastModified": time_utc} ]}]
stdout, _, _ = self.run_cmd('s3 ls s3://bucket/ --human-readable --summarize', expected_rc=0)
call_args = self.operations_called[0][1]
# Time is stored in UTC timezone, but the actual time displayed
# is specific to your tzinfo, so shift the timezone to your local's.
time_local = parser.parse(time_utc).astimezone(tz.tzlocal())
time_fmt = time_local.strftime('%Y-%m-%d %H:%M:%S')
self.assertIn('Total Objects: 6\n', stdout)
self.assertIn('Total Size: 1.0 PiB\n', stdout)


if __name__ == "__main__":
unittest.main()
9 changes: 6 additions & 3 deletions tests/unit/customizations/s3/test_subcommands.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ def setUp(self):

def test_ls_command_for_bucket(self):
ls_command = ListCommand(self.session)
parsed_args = FakeArgs(paths='s3://mybucket/', dir_op=False, page_size='5')
parsed_args = FakeArgs(paths='s3://mybucket/', dir_op=False, page_size='5',
human_readable=False, summarize=False)
parsed_globals = mock.Mock()
ls_command._run_main(parsed_args, parsed_globals)
call = self.session.get_service.return_value.get_operation\
Expand All @@ -78,7 +79,8 @@ def test_ls_command_for_bucket(self):
def test_ls_command_with_no_args(self):
ls_command = ListCommand(self.session)
parsed_global = FakeArgs(region=None, endpoint_url=None, verify_ssl=None)
parsed_args = FakeArgs(dir_op=False, paths='s3://')
parsed_args = FakeArgs(dir_op=False, paths='s3://',
human_readable=False, summarize=False)
ls_command._run_main(parsed_args, parsed_global)
# We should only be a single call.
self.session.get_service.return_value.get_operation.assert_called_with(
Expand All @@ -98,7 +100,8 @@ def test_ls_with_verify_argument(self):
ls_command = ListCommand(self.session)
parsed_global = FakeArgs(region='us-west-2', endpoint_url=None,
verify_ssl=False)
parsed_args = FakeArgs(paths='s3://', dir_op=False)
parsed_args = FakeArgs(paths='s3://', dir_op=False,
human_readable=False, summarize=False)
ls_command._run_main(parsed_args, parsed_global)
# Verify get_endpoint
get_endpoint = self.session.get_service.return_value.get_endpoint
Expand Down
25 changes: 24 additions & 1 deletion tests/unit/customizations/s3/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import mock
from dateutil.tz import tzlocal
from nose.tools import assert_equal

from botocore.hooks import HierarchicalEmitter
from awscli.customizations.s3.utils import find_bucket_key, find_chunksize
Expand All @@ -19,10 +20,32 @@
from awscli.customizations.s3.utils import ScopedEventHandler
from awscli.customizations.s3.utils import get_file_stat
from awscli.customizations.s3.utils import AppendFilter
from awscli.customizations.s3.utils import create_warning
from awscli.customizations.s3.utils import create_warning
from awscli.customizations.s3.utils import human_readable_size
from awscli.customizations.s3.constants import MAX_SINGLE_UPLOAD_SIZE


def test_human_readable_size():
yield _test_human_size_matches, 1, '1 Byte'
yield _test_human_size_matches, 10, '10 Bytes'
yield _test_human_size_matches, 1000, '1000 Bytes'
yield _test_human_size_matches, 1024, '1.0 KiB'
yield _test_human_size_matches, 1024 ** 2, '1.0 MiB'
yield _test_human_size_matches, 1024 ** 2, '1.0 MiB'
yield _test_human_size_matches, 1024 ** 3, '1.0 GiB'
yield _test_human_size_matches, 1024 ** 4, '1.0 TiB'
yield _test_human_size_matches, 1024 ** 5, '1.0 PiB'
yield _test_human_size_matches, 1024 ** 6, '1.0 EiB'

# Round to the nearest block.
yield _test_human_size_matches, 1024 ** 2 - 1, '1.0 MiB'
yield _test_human_size_matches, 1024 ** 3 - 1, '1.0 GiB'


def _test_human_size_matches(bytes_int, expected):
assert_equal(human_readable_size(bytes_int), expected)


class AppendFilterTest(unittest.TestCase):
def test_call(self):
parser = argparse.ArgumentParser()
Expand Down

0 comments on commit 77f9941

Please sign in to comment.