Skip to content

Commit

Permalink
Merge pull request #1575 from kyleknap/exist-local-check
Browse files Browse the repository at this point in the history
Add some path checks around user provided paths
  • Loading branch information
kyleknap committed Oct 27, 2015
2 parents b70ea64 + 1401714 commit af3a079
Show file tree
Hide file tree
Showing 5 changed files with 73 additions and 65 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@
CHANGELOG
=========

Next Release (TBD)
==================
* bugfix:``aws s3``: Fix some local path validation issues
(`issue 1575 <https://github.com/aws/aws-cli/pull/1575>`__)


1.9.1
=====
* feature:``aws ssm``: Add support for Amazon EC2 Run Command
Expand Down
43 changes: 14 additions & 29 deletions awscli/customizations/s3/subcommands.py
Original file line number Diff line number Diff line change
Expand Up @@ -914,6 +914,20 @@ def _validate_path_args(self):
raise ValueError("Cannot mv a file onto itself: '%s' - '%s'" % (
params['src'], params['dest']))

# If the user provided local path does not exist, hard fail because
# we know that we will not be able to upload the file.
if 'locals3' == params['paths_type'] and not params['is_stream']:
if not os.path.exists(params['src']):
raise RuntimeError(
'The user-provided path %s does not exist.' %
params['src'])
# If the operation is downloading to a directory that does not exist,
# create the directories so no warnings are thrown during the syncing
# process.
elif 's3local' == params['paths_type'] and params['dir_op']:
if not os.path.exists(params['dest']):
os.makedirs(params['dest'])

def _same_path(self, src, dest):
if not self.parameters['paths_type'] == 's3s3':
return False
Expand Down Expand Up @@ -957,35 +971,6 @@ def check_path_type(self, paths):
else:
raise TypeError("%s\nError: Invalid argument type" % usage)

def check_src_path(self, paths):
"""
This checks the source paths to deem if they are valid. The check
performed in S3 is first it lists the objects using the source path.
If there is an error like the bucket does not exist, the error will be
caught with ``check_error()`` function. If the operation is on a
single object in s3, it checks that a list of object was returned and
that the first object listed is the name of the specified in the
command line. If the operation is on objects under a common prefix,
it will check that there are common prefixes and objects under
the specified prefix.
For local files, it first checks that the path exists. Then it checks
that the path is a directory if it is a directory operation or that
the path is a file if the operation is on a single file.
"""
src_path = paths[0]
dir_op = self.parameters['dir_op']
if not src_path.startswith('s3://'):
src_path = os.path.abspath(src_path)
if os.path.exists(src_path):
if os.path.isdir(src_path) and not dir_op:
raise Exception("Error: Requires a local file")
elif os.path.isfile(src_path) and dir_op:
raise Exception("Error: Requires a local directory")
else:
pass
else:
raise Exception("Error: Local path does not exist")

def add_region(self, parsed_globals):
self.parameters['region'] = parsed_globals.region

Expand Down
28 changes: 26 additions & 2 deletions tests/functional/s3/test_sync_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,8 @@
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from awscli.testutils import BaseAWSCommandParamsTest, FileCreator
import re
import os

import mock
from awscli.compat import six


Expand Down Expand Up @@ -54,3 +53,28 @@ def test_no_recursive_option(self):
cmdline = '. s3://mybucket --recursive'
# Return code will be 2 for invalid parameter ``--recursive``
self.run_cmd(cmdline, expected_rc=2)

def test_sync_from_non_existant_directory(self):
non_existant_directory = os.path.join(self.files.rootdir, 'fakedir')
cmdline = '%s %s s3://bucket/' % (self.prefix, non_existant_directory)
self.parsed_responses = [
{"CommonPrefixes": [], "Contents": []}
]
_, stderr, _ = self.run_cmd(cmdline, expected_rc=255)
self.assertIn('does not exist', stderr)

def test_sync_to_non_existant_directory(self):
key = 'foo.txt'
non_existant_directory = os.path.join(self.files.rootdir, 'fakedir')
cmdline = '%s s3://bucket/ %s' % (self.prefix, non_existant_directory)
self.parsed_responses = [
{"CommonPrefixes": [], "Contents": [
{"Key": key, "Size": 3,
"LastModified": "2014-01-09T20:45:49.000Z"}]},
{'ETag': '"c8afdb36c52cf4727836669019e69222-"',
'Body': six.BytesIO(b'foo')}
]
self.run_cmd(cmdline, expected_rc=0)
# Make sure the file now exists.
self.assertTrue(
os.path.exists(os.path.join(non_existant_directory, key)))
6 changes: 4 additions & 2 deletions tests/integration/customizations/s3/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -778,8 +778,10 @@ def extra_setup(self):
def test_no_exist(self):
filename = os.path.join(self.files.rootdir, "no-exists-file")
p = aws('s3 cp %s s3://%s/' % (filename, self.bucket_name))
self.assertEqual(p.rc, 2, p.stderr)
self.assertIn('warning: Skipping file %s. File does not exist.' %
# If the local path provided by the user is nonexistant for an
# upload, this should error out.
self.assertEqual(p.rc, 255, p.stderr)
self.assertIn('The user-provided path %s does not exist.' %
filename, p.stderr)

@unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],
Expand Down
55 changes: 23 additions & 32 deletions tests/unit/customizations/s3/test_subcommands.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,49 +596,26 @@ def test_check_path_type_fail(self):
with self.assertRaises(TypeError):
cmd_param.check_path_type(combos[path_args])

def test_check_src_path_pass(self):
# This tests to see if all of the checks on the source path works. It
# does so by testing if s3 objects and and prefixes exist as well as
# local files and directories. All of these should not throw an
# exception.
s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
local_file = self.loc_files[0]
s3_prefix = 's3://' + self.bucket
local_dir = self.loc_files[3]

# :var files: a list of tuples where the first element is a single
# element list of file paths. The second element is a boolean
# representing if the operation is a directory operation.
files = [([s3_file], False), ([local_file], False),
([s3_prefix], True), ([local_dir], True)]

parameters = {}
for filename in files:
parameters['dir_op'] = filename[1]
cmd_parameter = CommandParameters('put', parameters, '')
cmd_parameter.add_region(mock.Mock())
cmd_parameter.check_src_path(filename[0])

def test_validate_streaming_paths_upload(self):
parameters = {'src': '-', 'dest': 's3://bucket'}
cmd_params = CommandParameters('cp', parameters, '')
cmd_params._validate_streaming_paths()
paths = ['-', 's3://bucket']
cmd_params = CommandParameters('cp', {}, '')
cmd_params.add_paths(paths)
self.assertTrue(cmd_params.parameters['is_stream'])
self.assertTrue(cmd_params.parameters['only_show_errors'])
self.assertFalse(cmd_params.parameters['dir_op'])

def test_validate_streaming_paths_download(self):
parameters = {'src': 'localfile', 'dest': '-'}
cmd_params = CommandParameters('cp', parameters, '')
cmd_params._validate_streaming_paths()
paths = ['s3://bucket/key', '-']
cmd_params = CommandParameters('cp', {}, '')
cmd_params.add_paths(paths)
self.assertTrue(cmd_params.parameters['is_stream'])
self.assertTrue(cmd_params.parameters['only_show_errors'])
self.assertFalse(cmd_params.parameters['dir_op'])

def test_validate_no_streaming_paths(self):
parameters = {'src': 'localfile', 'dest': 's3://bucket'}
cmd_params = CommandParameters('cp', parameters, '')
cmd_params._validate_streaming_paths()
paths = [self.file_creator.rootdir, 's3://bucket']
cmd_params = CommandParameters('cp', {}, '')
cmd_params.add_paths(paths)
self.assertFalse(cmd_params.parameters['is_stream'])

def test_validate_streaming_paths_error(self):
Expand All @@ -647,6 +624,20 @@ def test_validate_streaming_paths_error(self):
with self.assertRaises(ValueError):
cmd_params._validate_streaming_paths()

def test_validate_non_existent_local_path_upload(self):
non_existent_path = os.path.join(self.file_creator.rootdir, 'foo')
paths = [non_existent_path, 's3://bucket/']
cmd_param = CommandParameters('cp', {}, '')
with self.assertRaises(RuntimeError):
cmd_param.add_paths(paths)

def test_add_path_for_non_existsent_local_path_download(self):
non_existent_path = os.path.join(self.file_creator.rootdir, 'foo')
paths = ['s3://bucket', non_existent_path]
cmd_param = CommandParameters('cp', {'dir_op': True}, '')
cmd_param.add_paths(paths)
self.assertTrue(os.path.exists(non_existent_path))


class HelpDocTest(BaseAWSHelpOutputTest):
def setUp(self):
Expand Down

0 comments on commit af3a079

Please sign in to comment.