Skip to content

Commit

Permalink
Merge pull request #1810 from jku/fetcher-error-cleanup
Browse files Browse the repository at this point in the history
ngclient: Make DownloadErrors more consistent
  • Loading branch information
Jussi Kukkonen authored Feb 4, 2022
2 parents 67e2b24 + e6f3632 commit a8a7337
Show file tree
Hide file tree
Showing 7 changed files with 56 additions and 28 deletions.
6 changes: 3 additions & 3 deletions examples/client_example/client_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import shutil
from pathlib import Path

from tuf.api.exceptions import RepositoryError
from tuf.api.exceptions import DownloadError, RepositoryError
from tuf.ngclient import Updater

# constants
Expand Down Expand Up @@ -73,8 +73,8 @@ def download(target: str) -> bool:
path = updater.download_target(info)
print(f"Target downloaded and available in {path}")

except (OSError, RepositoryError) as e:
print(str(e))
except (OSError, RepositoryError, DownloadError) as e:
print(f"Failed to download target {target}: {e}")
return False

return True
Expand Down
14 changes: 7 additions & 7 deletions tests/repository_simulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
from securesystemslib.keys import generate_ed25519_key
from securesystemslib.signer import SSlibSigner

from tuf.api.exceptions import FetcherHTTPError
from tuf.api.exceptions import DownloadHTTPError
from tuf.api.metadata import (
SPECIFICATION_VERSION,
TOP_LEVEL_ROLE_NAMES,
Expand Down Expand Up @@ -205,7 +205,7 @@ def publish_root(self) -> None:
self.signed_roots.append(self.md_root.to_bytes(JSONSerializer()))
logger.debug("Published root v%d", self.root.version)

def fetch(self, url: str) -> Iterator[bytes]:
def _fetch(self, url: str) -> Iterator[bytes]:
"""Fetches data from the given url and returns an Iterator (or yields
bytes).
"""
Expand Down Expand Up @@ -238,7 +238,7 @@ def fetch(self, url: str) -> Iterator[bytes]:

yield self.fetch_target(target_path, prefix)
else:
raise FetcherHTTPError(f"Unknown path '{path}'", 404)
raise DownloadHTTPError(f"Unknown path '{path}'", 404)

def fetch_target(
self, target_path: str, target_hash: Optional[str]
Expand All @@ -251,12 +251,12 @@ def fetch_target(

repo_target = self.target_files.get(target_path)
if repo_target is None:
raise FetcherHTTPError(f"No target {target_path}", 404)
raise DownloadHTTPError(f"No target {target_path}", 404)
if (
target_hash
and target_hash not in repo_target.target_file.hashes.values()
):
raise FetcherHTTPError(f"hash mismatch for {target_path}", 404)
raise DownloadHTTPError(f"hash mismatch for {target_path}", 404)

logger.debug("fetched target %s", target_path)
return repo_target.data
Expand All @@ -273,7 +273,7 @@ def fetch_metadata(self, role: str, version: Optional[int] = None) -> bytes:
if role == Root.type:
# return a version previously serialized in publish_root()
if version is None or version > len(self.signed_roots):
raise FetcherHTTPError(f"Unknown root version {version}", 404)
raise DownloadHTTPError(f"Unknown root version {version}", 404)
logger.debug("fetched root version %d", version)
return self.signed_roots[version - 1]

Expand All @@ -289,7 +289,7 @@ def fetch_metadata(self, role: str, version: Optional[int] = None) -> bytes:
md = self.md_delegates.get(role)

if md is None:
raise FetcherHTTPError(f"Unknown role {role}", 404)
raise DownloadHTTPError(f"Unknown role {role}", 404)

md.signatures.clear()
for signer in self.signers[role].values():
Expand Down
2 changes: 1 addition & 1 deletion tests/test_fetcher_ng.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def test_url_parsing(self) -> None:

# File not found error
def test_http_error(self) -> None:
with self.assertRaises(exceptions.FetcherHTTPError) as cm:
with self.assertRaises(exceptions.DownloadHTTPError) as cm:
self.url = f"{self.url_prefix}/non-existing-path"
self.fetcher.fetch(self.url)
self.assertEqual(cm.exception.status_code, 404)
Expand Down
2 changes: 1 addition & 1 deletion tuf/api/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class SlowRetrievalError(DownloadError):
"""Indicate that downloading a file took an unreasonably long time."""


class FetcherHTTPError(DownloadError):
class DownloadHTTPError(DownloadError):
"""
Returned by FetcherInterface implementations for HTTP errors.
Expand Down
9 changes: 4 additions & 5 deletions tuf/ngclient/_internal/requests_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def __init__(self) -> None:
self.socket_timeout: int = 4 # seconds
self.chunk_size: int = 400000 # bytes

def fetch(self, url: str) -> Iterator[bytes]:
def _fetch(self, url: str) -> Iterator[bytes]:
"""Fetches the contents of HTTP/HTTPS url from a remote server
Arguments:
Expand All @@ -60,8 +60,7 @@ def fetch(self, url: str) -> Iterator[bytes]:
Raises:
exceptions.SlowRetrievalError: A timeout occurs while receiving
data.
exceptions.FetcherHTTPError: An HTTP error code is received.
exceptions.DownloadError: When there is a problem parsing the url.
exceptions.DownloadHTTPError: An HTTP error code is received.
Returns:
A bytes iterator
Expand Down Expand Up @@ -89,7 +88,7 @@ def fetch(self, url: str) -> Iterator[bytes]:
except requests.HTTPError as e:
response.close()
status = e.response.status_code
raise exceptions.FetcherHTTPError(str(e), status)
raise exceptions.DownloadHTTPError(str(e), status)

return self._chunks(response)

Expand Down Expand Up @@ -122,7 +121,7 @@ def _get_session(self, url: str) -> requests.Session:
parsed_url = parse.urlparse(url)

if not parsed_url.scheme or not parsed_url.hostname:
raise exceptions.DownloadError("Failed to parse URL {url}")
raise exceptions.DownloadError(f"Failed to parse URL {url}")

session_index = f"{parsed_url.scheme}+{parsed_url.hostname}"
session = self._sessions.get(session_index)
Expand Down
49 changes: 39 additions & 10 deletions tuf/ngclient/fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,38 +28,65 @@ class FetcherInterface:
__metaclass__ = abc.ABCMeta

@abc.abstractmethod
def fetch(self, url: str) -> Iterator[bytes]:
def _fetch(self, url: str) -> Iterator[bytes]:
"""Fetches the contents of HTTP/HTTPS url from a remote server.
Implementations must raise DownloadHTTPError if they receive an
HTTP error code.
Implementations may raise any errors but the ones that are not
DownloadErrors will be wrapped in a DownloadError by fetch().
Arguments:
url: A URL string that represents a file location.
Raises:
exceptions.SlowRetrievalError: A timeout occurs while receiving
data.
exceptions.FetcherHTTPError: An HTTP error code is received.
exceptions.DownloadHTTPError: An HTTP error code was received.
Returns:
A bytes iterator
"""
raise NotImplementedError # pragma: no cover

def fetch(self, url: str) -> Iterator[bytes]:
"""Fetches the contents of HTTP/HTTPS url from a remote server.
Arguments:
url: A URL string that represents a file location.
Raises:
exceptions.DownloadError: An error occurred during download.
exceptions.DownloadHTTPError: An HTTP error code was received.
Returns:
A bytes iterator
"""
# Ensure that fetch() only raises DownloadErrors, regardless of the
# fetcher implementation
try:
return self._fetch(url)
except exceptions.DownloadError as e:
raise e
except Exception as e:
raise exceptions.DownloadError(f"Failed to download {url}") from e

@contextmanager
def download_file(self, url: str, max_length: int) -> Iterator[IO]:
"""Opens a connection to 'url' and downloads the content
up to 'max_length'.
Args:
url: a URL string that represents the location of the file.
max_length: an integer value representing the length of
the file or an upper bound.
url: a URL string that represents the location of the file.
max_length: upper bound of file size in bytes.
Raises:
exceptions.DownloadLengthMismatchError: downloaded bytes exceed
'max_length'.
exceptions.DownloadError: An error occurred during download.
exceptions.DownloadLengthMismatchError: downloaded bytes exceed
'max_length'.
exceptions.DownloadHTTPError: An HTTP error code was received.
Yields:
A TemporaryFile object that points to the contents of 'url'.
A TemporaryFile object that points to the contents of 'url'.
"""
logger.debug("Downloading: %s", url)

Expand Down Expand Up @@ -96,8 +123,10 @@ def download_bytes(self, url: str, max_length: int) -> bytes:
max_length: upper bound of data size in bytes.
Raises:
exceptions.DownloadError: An error occurred during download.
exceptions.DownloadLengthMismatchError: downloaded bytes exceed
'max_length'.
exceptions.DownloadHTTPError: An HTTP error code was received.
Returns:
The content of the file in bytes.
Expand Down
2 changes: 1 addition & 1 deletion tuf/ngclient/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@ def _load_root(self) -> None:
self._trusted_set.update_root(data)
self._persist_metadata(Root.type, data)

except exceptions.FetcherHTTPError as exception:
except exceptions.DownloadHTTPError as exception:
if exception.status_code not in {403, 404}:
raise
# 404/403 means current root is newest available
Expand Down

0 comments on commit a8a7337

Please sign in to comment.