Skip to content

Commit

Permalink
use returncode of iterable_subproccess generator
Browse files Browse the repository at this point in the history
This commit modifies SshUrlOperations to make use
of the `returncode`-attribute of the generator that
is returned by `with iterable_subprocess`.

This allows to fetch the return code of the subprocess
even if no `IterableSubprocessError` was raised.
This situation can occur, if an exception is raised
in an `iterable_subprocess`-context.
  • Loading branch information
christian-monch committed Dec 11, 2023
1 parent 23cf483 commit cae9210
Showing 1 changed file with 33 additions and 49 deletions.
82 changes: 33 additions & 49 deletions datalad_next/url_operations/ssh.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,17 +94,9 @@ def stat(self,
cmd = ssh_cat.get_cmd(SshUrlOperations._stat_cmd)
try:
with iter_subproc(cmd) as stream:
# any exception that is raised in this context and not caught
# will prevent the creation of `IterableSubprocessError`. But
# we rely on the return code of the ssh-process to signal
# specific errors. Therefore, we catch the expected
# `StopIteration` here.
try:
props = self._get_props(url, stream)
except StopIteration:
pass
except IterableSubprocessError as e:
self._check_return_code(e.returncode, url)
props = self._get_props(url, stream)
except (IterableSubprocessError, StopIteration):
self._check_return_code(stream.returncode, url)
return {k: v for k, v in props.items() if not k.startswith('_')}

def _get_props(self, url, stream: Generator) -> dict:
Expand Down Expand Up @@ -177,44 +169,36 @@ def download(self,
cmd = ssh_cat.get_cmd(f'{SshUrlOperations._stat_cmd}; {SshUrlOperations._cat_cmd}')
try:
with iter_subproc(cmd) as stream:
# any exception that is raised in this context and not caught
# will prevent the creation of `IterableSubprocessError`. But
# we rely on the return code of the ssh-process to signal
# specific errors. Therefore, we catch the expected
# `StopIteration` here.
try:
props = self._get_props(from_url, stream)
expected_size = props['content-length']
# The stream might have changed due to not yet processed, but
# fetched data, that is now chained in front of it. Therefore we
# get the updated stream from the props
download_stream = props.pop('_stream')

dst_fp = sys.stdout.buffer \
if to_path is None \
else open(to_path, 'wb')

# Localize variable access to minimize overhead
dst_fp_write = dst_fp.write

# download can start
for chunk in self._with_progress(
download_stream,
progress_id=progress_id,
label='downloading',
expected_size=expected_size,
start_log_msg=('Download %s to %s', from_url, to_path),
end_log_msg=('Finished download',),
update_log_msg=('Downloaded chunk',)
):
# write data
dst_fp_write(chunk)
# compute hash simultaneously
hasher.update(chunk)
except StopIteration:
pass
except IterableSubprocessError as e:
self._check_return_code(e.returncode, from_url)
props = self._get_props(from_url, stream)
expected_size = props['content-length']
# The stream might have changed due to not yet processed, but
# fetched data, that is now chained in front of it. Therefore we
# get the updated stream from the props
download_stream = props.pop('_stream')

dst_fp = sys.stdout.buffer \
if to_path is None \
else open(to_path, 'wb')

# Localize variable access to minimize overhead
dst_fp_write = dst_fp.write

# download can start
for chunk in self._with_progress(
download_stream,
progress_id=progress_id,
label='downloading',
expected_size=expected_size,
start_log_msg=('Download %s to %s', from_url, to_path),
end_log_msg=('Finished download',),
update_log_msg=('Downloaded chunk',)
):
# write data
dst_fp_write(chunk)
# compute hash simultaneously
hasher.update(chunk)
except (IterableSubprocessError, StopIteration):
self._check_return_code(stream.returncode, from_url)
finally:
if dst_fp and to_path is not None:
dst_fp.close()
Expand Down

0 comments on commit cae9210

Please sign in to comment.