Skip to content

Commit

Permalink
Move glacier check logic to FileInfo
Browse files Browse the repository at this point in the history
  • Loading branch information
kyleknap committed Oct 28, 2015
1 parent 99c3c46 commit dfd269c
Show file tree
Hide file tree
Showing 3 changed files with 78 additions and 3 deletions.
30 changes: 30 additions & 0 deletions awscli/customizations/s3/fileinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,11 @@ def remove_bucket(self):
bucket, key = find_bucket_key(self.src)
self.client.delete_bucket(Bucket=bucket)

def is_glacier_compatible(self):
# These operations do not involving transferring glacier objects
# so they are always glacier compatible.
return True


class FileInfo(TaskInfo):
"""
Expand Down Expand Up @@ -253,6 +258,31 @@ def _handle_metadata_directive(self, params):
params['MetadataDirective'] = \
self.parameters['metadata_directive'][0]

def is_glacier_compatible(self):
"""Determines if a file info object is glacier compatible
Operations will fail if the S3 object has a storage class of GLACIER
and it involves copying from S3 to S3, downloading from S3, or moving
where S3 is the source (the delete will actually succeed, but we do
not want fail to transfer the file and then successfully delete it).
:returns: True if the FileInfo's operation will not fail because the
operation is on a glacier object. False if it will fail.
"""
if self._is_glacier_object(self.associated_response_data):
if self.operation_name in ['copy', 'download']:
return False
elif self.operation_name == 'move':
if self.src_type == 's3':
return False
return True

def _is_glacier_object(self, response_data):
if response_data:
if response_data.get('StorageClass') == 'GLACIER':
return True
return False

def upload(self, payload=None):
"""
Redirects the file to the multipart upload function if the file is
Expand Down
5 changes: 2 additions & 3 deletions awscli/customizations/s3/s3handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,7 @@
import sys

from awscli.customizations.s3.utils import find_chunksize, \
find_bucket_key, relative_path, PrintTask, create_warning, \
is_glacier_compatible_operation
find_bucket_key, relative_path, PrintTask, create_warning
from awscli.customizations.s3.executor import Executor
from awscli.customizations.s3 import tasks
from awscli.customizations.s3.transferconfig import RuntimeConfig
Expand Down Expand Up @@ -186,7 +185,7 @@ def _enqueue_tasks(self, files):
message=warning_message)
self.result_queue.put(warning)
# Warn and skip over glacier incompatible tasks.
elif not is_glacier_compatible_operation(filename):
elif not filename.is_glacier_compatible():
LOGGER.debug(
'Encountered glacier object s3://%s. Not performing '
'%s on object.' % (filename.src, filename.operation_name))
Expand Down
46 changes: 46 additions & 0 deletions tests/unit/customizations/s3/test_fileinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from awscli.customizations.s3 import fileinfo
from awscli.customizations.s3.utils import MD5Error
from awscli.customizations.s3.fileinfo import FileInfo
from awscli.customizations.s3.fileinfo import TaskInfo


class TestSaveFile(unittest.TestCase):
Expand Down Expand Up @@ -102,3 +103,48 @@ def test_set_size_from_s3(self):
file_info = FileInfo(src="bucket/key", client=client)
file_info.set_size_from_s3()
self.assertEqual(file_info.size, 5)


class TestIsGlacierCompatible(unittest.TestCase):
def setUp(self):
self.file_info = FileInfo('bucket/key')
self.file_info.associated_response_data = {'StorageClass': 'GLACIER'}

def test_operation_is_glacier_compatible(self):
self.file_info.operation_name = 'delete'
self.assertTrue(self.file_info.is_glacier_compatible())

def test_download_operation_is_not_glacier_compatible(self):
self.file_info.operation_name = 'download'
self.assertFalse(self.file_info.is_glacier_compatible())

def test_copy_operation_is_not_glacier_compatible(self):
self.file_info.operation_name = 'copy'
self.assertFalse(self.file_info.is_glacier_compatible())

def test_operation_is_glacier_compatible_for_non_glacier(self):
self.file_info.operation_name = 'download'
self.file_info.associated_response_data = {'StorageClass': 'STANDARD'}
self.assertTrue(self.file_info.is_glacier_compatible())

def test_move_operation_is_not_glacier_compatible_for_s3_source(self):
self.file_info.operation_name = 'move'
self.file_info.src_type = 's3'
self.assertFalse(self.file_info.is_glacier_compatible())

def test_move_operation_is_glacier_compatible_for_local_source(self):
self.file_info.operation_name = 'move'
self.file_info.src_type = 'local'
self.assertTrue(self.file_info.is_glacier_compatible())

def test_response_is_not_glacier(self):
self.file_info.associated_response_data = {'StorageClass': 'STANDARD'}
self.assertTrue(self.file_info.is_glacier_compatible())

def test_response_missing_storage_class(self):
self.file_info.associated_response_data = {'Key': 'Foo'}
self.assertTrue(self.file_info.is_glacier_compatible())

def test_task_info_glacier_compatibility(self):
task_info = TaskInfo('bucket/key', 's3', 'remove_bucket', None)
self.assertTrue(task_info.is_glacier_compatible())

0 comments on commit dfd269c

Please sign in to comment.