diff --git a/datalad_next/url_operations/ssh.py b/datalad_next/url_operations/ssh.py index d52477cb8..6b4cb7a95 100644 --- a/datalad_next/url_operations/ssh.py +++ b/datalad_next/url_operations/ssh.py @@ -94,17 +94,9 @@ def stat(self, cmd = ssh_cat.get_cmd(SshUrlOperations._stat_cmd) try: with iter_subproc(cmd) as stream: - # any exception that is raised in this context and not caught - # will prevent the creation of `IterableSubprocessError`. But - # we rely on the return code of the ssh-process to signal - # specific errors. Therefore, we catch the expected - # `StopIteration` here. - try: - props = self._get_props(url, stream) - except StopIteration: - pass - except IterableSubprocessError as e: - self._check_return_code(e.returncode, url) + props = self._get_props(url, stream) + except (IterableSubprocessError, StopIteration): + self._check_return_code(stream.returncode, url) return {k: v for k, v in props.items() if not k.startswith('_')} def _get_props(self, url, stream: Generator) -> dict: @@ -177,44 +169,36 @@ def download(self, cmd = ssh_cat.get_cmd(f'{SshUrlOperations._stat_cmd}; {SshUrlOperations._cat_cmd}') try: with iter_subproc(cmd) as stream: - # any exception that is raised in this context and not caught - # will prevent the creation of `IterableSubprocessError`. But - # we rely on the return code of the ssh-process to signal - # specific errors. Therefore, we catch the expected - # `StopIteration` here. - try: - props = self._get_props(from_url, stream) - expected_size = props['content-length'] - # The stream might have changed due to not yet processed, but - # fetched data, that is now chained in front of it. Therefore we - # get the updated stream from the props - download_stream = props.pop('_stream') - - dst_fp = sys.stdout.buffer \ - if to_path is None \ - else open(to_path, 'wb') - - # Localize variable access to minimize overhead - dst_fp_write = dst_fp.write - - # download can start - for chunk in self._with_progress( - download_stream, - progress_id=progress_id, - label='downloading', - expected_size=expected_size, - start_log_msg=('Download %s to %s', from_url, to_path), - end_log_msg=('Finished download',), - update_log_msg=('Downloaded chunk',) - ): - # write data - dst_fp_write(chunk) - # compute hash simultaneously - hasher.update(chunk) - except StopIteration: - pass - except IterableSubprocessError as e: - self._check_return_code(e.returncode, from_url) + props = self._get_props(from_url, stream) + expected_size = props['content-length'] + # The stream might have changed due to not yet processed, but + # fetched data, that is now chained in front of it. Therefore we + # get the updated stream from the props + download_stream = props.pop('_stream') + + dst_fp = sys.stdout.buffer \ + if to_path is None \ + else open(to_path, 'wb') + + # Localize variable access to minimize overhead + dst_fp_write = dst_fp.write + + # download can start + for chunk in self._with_progress( + download_stream, + progress_id=progress_id, + label='downloading', + expected_size=expected_size, + start_log_msg=('Download %s to %s', from_url, to_path), + end_log_msg=('Finished download',), + update_log_msg=('Downloaded chunk',) + ): + # write data + dst_fp_write(chunk) + # compute hash simultaneously + hasher.update(chunk) + except (IterableSubprocessError, StopIteration): + self._check_return_code(stream.returncode, from_url) finally: if dst_fp and to_path is not None: dst_fp.close()