Skip to content

Commit

Permalink
Add type annotations
Browse files Browse the repository at this point in the history
Add missing type annotations in download.py, fetcher.py
and requests_fetcher.py
Update docstrings to match the new style.

Signed-off-by: Teodora Sechkova <[email protected]>
  • Loading branch information
sechkova committed Jun 15, 2021
1 parent af6fbd6 commit 7e288a0
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 27 deletions.
30 changes: 6 additions & 24 deletions tuf/ngclient/_internal/download.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,13 @@
#!/usr/bin/env python

# Copyright 2012 - 2017, New York University and the TUF contributors
# SPDX-License-Identifier: MIT OR Apache-2.0

"""Provides a class handling the download of URL contents to a file"
"""
<Program Name>
download.py
<Started>
February 21, 2012. Based on previous version by Geremy Condra.
<Author>
Konstantin Andrianov
Vladimir Diaz <[email protected]>

<Copyright>
See LICENSE-MIT OR LICENSE for licensing information.
<Purpose>
Download metadata and target files and check their validity. The hash and
length of a downloaded file has to match the hash and length supplied by the
metadata of that file.
"""
import logging
import tempfile
from contextlib import contextmanager
from typing import IO, Iterator
from urllib import parse

from tuf import exceptions
Expand All @@ -41,14 +24,14 @@ class FileDownloader:
the network IO library.
"""

def __init__(self, fetcher):
def __init__(self, fetcher: "FetcherInterface"):
if fetcher is None:
fetcher = RequestsFetcher()

self._fetcher = fetcher

@contextmanager
def download_file(self, url, required_length):
def download_file(self, url: str, required_length: int) -> Iterator[IO]:
"""Opens a connection to 'url' and downloads the content
up to 'required_length'.
Expand All @@ -74,8 +57,7 @@ def download_file(self, url, required_length):
logger.debug("Downloading: %s", url)

number_of_bytes_received = 0
# This is the temporary file that we will return to contain the
# contents of the downloaded file.

with tempfile.TemporaryFile() as temp_file:
chunks = self._fetcher.fetch(url, required_length)
for chunk in chunks:
Expand All @@ -88,7 +70,7 @@ def download_file(self, url, required_length):
temp_file.seek(0)
yield temp_file

def download_bytes(self, url, required_length):
def download_bytes(self, url: str, required_length: int) -> bytes:
"""Download bytes from given url
Returns the downloaded bytes, otherwise like download_file()
Expand Down
7 changes: 5 additions & 2 deletions tuf/ngclient/_internal/requests_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import logging
import time
from typing import Iterator
from urllib import parse

# Imports
Expand Down Expand Up @@ -47,7 +48,7 @@ def __init__(self):
# Some cookies may not be HTTP-safe.
self._sessions = {}

def fetch(self, url, required_length):
def fetch(self, url: str, required_length: int) -> Iterator[bytes]:
"""Fetches the contents of HTTP/HTTPS url from a remote server.
Ensures the length of the downloaded data is up to 'required_length'.
Expand Down Expand Up @@ -89,7 +90,9 @@ def fetch(self, url, required_length):
return self._chunks(response, required_length)

@staticmethod
def _chunks(response, required_length):
def _chunks(
response: "requests.Response", required_length: int
) -> Iterator[bytes]:
"""A generator function to be returned by fetch. This way the
caller of fetch can differentiate between connection and actual data
download."""
Expand Down
3 changes: 2 additions & 1 deletion tuf/ngclient/fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

# Imports
import abc
from typing import Iterator


# Classes
Expand All @@ -20,7 +21,7 @@ class FetcherInterface:
__metaclass__ = abc.ABCMeta

@abc.abstractmethod
def fetch(self, url, required_length):
def fetch(self, url: str, required_length: int) -> Iterator[bytes]:
"""Fetches the contents of HTTP/HTTPS url from a remote server.
Ensures the length of the downloaded data is up to 'required_length'.
Expand Down

0 comments on commit 7e288a0

Please sign in to comment.