Skip to content

Commit

Permalink
ngclient: Fix logging to remove pylint disable
Browse files Browse the repository at this point in the history
Remove pylint disable logging-no-lazy, fix remaining non-lazy logging
(ngclient/updater.py still contains some but pylint does not notice
them: These will be fixed in issue theupdateframework#1400)

Signed-off-by: Jussi Kukkonen <[email protected]>
  • Loading branch information
Jussi Kukkonen committed May 21, 2021
1 parent 5f37eb3 commit 534021b
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 42 deletions.
1 change: 0 additions & 1 deletion tuf/api/pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
disable=fixme,
too-few-public-methods,
too-many-arguments,
logging-not-lazy,

[BASIC]
good-names=i,j,k,v,e,f,fn,fp,_type
Expand Down
43 changes: 11 additions & 32 deletions tuf/ngclient/_internal/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def download_file(url, required_length, fetcher, strict_required_length=True):
# encoded as %5c in the url, which should also be replaced with a forward
# slash.
url = parse.unquote(url).replace("\\", "/")
logger.info("Downloading: " + repr(url))
logger.info("Downloading: %s", url)

# This is the temporary file that we will return to contain the contents of
# the downloaded file.
Expand Down Expand Up @@ -134,7 +134,7 @@ def download_file(url, required_length, fetcher, strict_required_length=True):
except Exception:
# Close 'temp_file'. Any written data is lost.
temp_file.close()
logger.debug("Could not download URL: " + repr(url))
logger.debug("Could not download URL: %s", url)
raise

else:
Expand Down Expand Up @@ -202,44 +202,23 @@ def _check_downloaded_length(
"""

if total_downloaded == required_length:
logger.info(
"Downloaded " + str(total_downloaded) + " bytes out of the"
" expected " + str(required_length) + " bytes."
)
logger.info("Downloaded %d bytes as expected.", total_downloaded)

else:
difference_in_bytes = abs(total_downloaded - required_length)

# What we downloaded is not equal to the required length, but did we ask
# for strict checking of required length?
if strict_required_length:
logger.info(
"Downloaded " + str(total_downloaded) + " bytes, but"
" expected "
+ str(required_length)
+ " bytes. There is a difference"
" of " + str(difference_in_bytes) + " bytes."
"Downloaded %d bytes, but expected %d bytes",
total_downloaded,
required_length,
)

# If the average download speed is below a certain threshold, we
# flag this as a possible slow-retrieval attack.
logger.debug(
"Average download speed: " + repr(average_download_speed)
)
logger.debug(
"Minimum average download speed: "
+ repr(tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED)
)

if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED:
raise exceptions.SlowRetrievalError(average_download_speed)

logger.debug(
"Good average download speed: "
+ repr(average_download_speed)
+ " bytes per second"
)

raise exceptions.DownloadLengthMismatchError(
required_length, total_downloaded
)
Expand All @@ -253,12 +232,12 @@ def _check_downloaded_length(
raise exceptions.SlowRetrievalError(average_download_speed)

logger.debug(
"Good average download speed: "
+ repr(average_download_speed)
+ " bytes per second"
"Good average download speed: %f bytes per second",
average_download_speed,
)

logger.info(
"Downloaded " + str(total_downloaded) + " bytes out of an"
" upper limit of " + str(required_length) + " bytes."
"Downloaded %d bytes out of upper limit of %d bytes.",
total_downloaded,
required_length,
)
16 changes: 7 additions & 9 deletions tuf/ngclient/_internal/requests_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,11 +118,9 @@ def chunks():
# downloaded.
if not data:
logger.debug(
"Downloaded "
+ repr(bytes_received)
+ "/"
+ repr(required_length)
+ " bytes."
"Downloaded %d out of %d bytes",
bytes_received,
required_length,
)

# Finally, we signal that the download is complete.
Expand Down Expand Up @@ -156,8 +154,8 @@ def _get_session(self, url):

session_index = parsed_url.scheme + "+" + parsed_url.hostname

logger.debug("url: " + url)
logger.debug("session index: " + session_index)
logger.debug("url: %s", url)
logger.debug("session index: %s", session_index)

session = self._sessions.get(session_index)

Expand All @@ -181,9 +179,9 @@ def _get_session(self, url):
}
)

logger.debug("Made new session for " + session_index)
logger.debug("Made new session %s", session_index)

else:
logger.debug("Reusing session for " + session_index)
logger.debug("Reusing session %s", session_index)

return session

0 comments on commit 534021b

Please sign in to comment.