Skip to content

Commit

Permalink
imports: Make 'settings' imports vendoring-compatible
Browse files Browse the repository at this point in the history
Use "from tuf import <module>" instead of "import tuf.<module>": this
makes it possible for vendoring tool to vendor tuf. Fix all references
to <module> in the code.

Signed-off-by: Jussi Kukkonen <[email protected]>
  • Loading branch information
Jussi Kukkonen committed Mar 4, 2021
1 parent 88a6ae0 commit 7841008
Show file tree
Hide file tree
Showing 5 changed files with 30 additions and 30 deletions.
22 changes: 11 additions & 11 deletions tuf/client/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,8 @@
from tuf import log
from tuf import mirrors
from tuf import roledb
from tuf import settings
import tuf.requests_fetcher
import tuf.settings
import tuf.keydb
import tuf.sig

Expand All @@ -151,14 +151,14 @@
# The Timestamp role does not have signed metadata about it; otherwise we
# would need an infinite regress of metadata. Therefore, we use some
# default, but sane, upper file length for its metadata.
DEFAULT_TIMESTAMP_UPPERLENGTH = tuf.settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH
DEFAULT_TIMESTAMP_UPPERLENGTH = settings.DEFAULT_TIMESTAMP_REQUIRED_LENGTH

# The Root role may be updated without knowing its version number if
# top-level metadata cannot be safely downloaded (e.g., keys may have been
# revoked, thus requiring a new Root file that includes the updated keys)
# and 'unsafely_update_root_if_necessary' is True.
# We use some default, but sane, upper file length for its metadata.
DEFAULT_ROOT_UPPERLENGTH = tuf.settings.DEFAULT_ROOT_REQUIRED_LENGTH
DEFAULT_ROOT_UPPERLENGTH = settings.DEFAULT_ROOT_REQUIRED_LENGTH

# See 'log.py' to learn how logging is handled in TUF.
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -270,7 +270,7 @@ def get_valid_targetinfo(self, target_filename, match_custom_field=True):
formats.MAPPING_SCHEMA.check_match(self.map_file['mapping'])

# Set the top-level directory containing the metadata for each repository.
repositories_directory = tuf.settings.repositories_directory
repositories_directory = settings.repositories_directory

# Verify that the required local directories exist for each repository.
self._verify_metadata_directories(repositories_directory)
Expand Down Expand Up @@ -731,13 +731,13 @@ def __init__(self, repository_name, repository_mirrors, fetcher=None):
self.consistent_snapshot = False

# Ensure the repository metadata directory has been set.
if tuf.settings.repositories_directory is None:
if settings.repositories_directory is None:
raise exceptions.RepositoryError('The TUF update client'
' module must specify the directory containing the local repository'
' files. "tuf.settings.repositories_directory" MUST be set.')

# Set the path for the current set of metadata files.
repositories_directory = tuf.settings.repositories_directory
repositories_directory = settings.repositories_directory
repository_directory = os.path.join(repositories_directory, self.repository_name)

# raise MissingLocalRepository if the repo does not exist at all.
Expand Down Expand Up @@ -1138,7 +1138,7 @@ def neither_403_nor_404(mirror_error):
# Following the spec, try downloading the N+1th root for a certain maximum
# number of times.
lower_bound = current_root_metadata['version'] + 1
upper_bound = lower_bound + tuf.settings.MAX_NUMBER_ROOT_ROTATIONS
upper_bound = lower_bound + settings.MAX_NUMBER_ROOT_ROTATIONS

# Try downloading the next root.
for next_version in range(lower_bound, upper_bound):
Expand Down Expand Up @@ -1852,11 +1852,11 @@ def _update_metadata_if_changed(self, metadata_role,
# expected role. Note: The Timestamp role is not updated via this
# function.
if metadata_role == 'snapshot':
upperbound_filelength = tuf.settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH
upperbound_filelength = settings.DEFAULT_SNAPSHOT_REQUIRED_LENGTH

# The metadata is considered Targets (or delegated Targets metadata).
else:
upperbound_filelength = tuf.settings.DEFAULT_TARGETS_REQUIRED_LENGTH
upperbound_filelength = settings.DEFAULT_TARGETS_REQUIRED_LENGTH

try:
self._update_metadata(metadata_role, upperbound_filelength,
Expand Down Expand Up @@ -2681,7 +2681,7 @@ def _preorder_depth_first_walk(self, target_filepath):
current_metadata = self.metadata['current']
role_names = ['targets']
visited_role_names = set()
number_of_delegations = tuf.settings.MAX_NUMBER_OF_DELEGATIONS
number_of_delegations = settings.MAX_NUMBER_OF_DELEGATIONS

# Ensure the client has the most up-to-date version of 'targets.json'.
# Raise 'tuf.exceptions.NoWorkingMirrorError' if the changed metadata
Expand Down Expand Up @@ -2755,7 +2755,7 @@ def _preorder_depth_first_walk(self, target_filepath):
if target is None and number_of_delegations == 0 and len(role_names) > 0:
logger.debug(repr(len(role_names)) + ' roles left to visit, ' +
'but allowed to visit at most ' +
repr(tuf.settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.')
repr(settings.MAX_NUMBER_OF_DELEGATIONS) + ' delegations.')

return target

Expand Down
16 changes: 8 additions & 8 deletions tuf/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import tuf
from tuf import exceptions
from tuf import formats
from tuf import settings

# See 'log.py' to learn how logging is handled in TUF.
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -208,15 +209,14 @@ def _download_file(url, required_length, fetcher, STRICT_REQUIRED_LENGTH=True):
seconds_spent_receiving = stop_time - start_time
average_download_speed = number_of_bytes_received / seconds_spent_receiving

if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED:
if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED:
logger.debug('The average download speed dropped below the minimum'
' average download speed set in tuf.settings.py. Stopping the'
' download!')
' average download speed set in settings. Stopping the download!.')
break

else:
logger.debug('The average download speed has not dipped below the'
' minimum average download speed set in tuf.settings.py.')
' minimum average download speed set in settings.')

# Does the total number of downloaded bytes match the required length?
_check_downloaded_length(number_of_bytes_received, required_length,
Expand Down Expand Up @@ -273,7 +273,7 @@ def _check_downloaded_length(total_downloaded, required_length,
tuf.exceptions.SlowRetrievalError, if the total downloaded was
done in less than the acceptable download speed (as set in
tuf.settings.py).
tuf.settings).
<Returns>
None.
Expand All @@ -296,9 +296,9 @@ def _check_downloaded_length(total_downloaded, required_length,
# If the average download speed is below a certain threshold, we flag
# this as a possible slow-retrieval attack.
logger.debug('Average download speed: ' + repr(average_download_speed))
logger.debug('Minimum average download speed: ' + repr(tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED))
logger.debug('Minimum average download speed: ' + repr(settings.MIN_AVERAGE_DOWNLOAD_SPEED))

if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED:
if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED:
raise exceptions.SlowRetrievalError(average_download_speed)

else:
Expand All @@ -312,7 +312,7 @@ def _check_downloaded_length(total_downloaded, required_length,
# will log a warning anyway. This is useful when we wish to download the
# Timestamp or Root metadata, for which we have no signed metadata; so,
# we must guess a reasonable required_length for it.
if average_download_speed < tuf.settings.MIN_AVERAGE_DOWNLOAD_SPEED:
if average_download_speed < settings.MIN_AVERAGE_DOWNLOAD_SPEED:
raise exceptions.SlowRetrievalError(average_download_speed)

else:
Expand Down
8 changes: 4 additions & 4 deletions tuf/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@
import time

import tuf
import tuf.settings
from tuf import exceptions
from tuf import settings

import securesystemslib.formats

Expand Down Expand Up @@ -125,8 +125,8 @@
# '_DEFAULT_LOG_LEVEL'. The log level of messages handled by 'file_handler'
# may be modified with 'set_filehandler_log_level()'. 'settings.LOG_FILENAME'
# will be opened in append mode.
if tuf.settings.ENABLE_FILE_LOGGING:
file_handler = logging.FileHandler(tuf.settings.LOG_FILENAME)
if settings.ENABLE_FILE_LOGGING:
file_handler = logging.FileHandler(settings.LOG_FILENAME)
file_handler.setLevel(_DEFAULT_FILE_LOG_LEVEL)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
Expand Down Expand Up @@ -381,7 +381,7 @@ def remove_console_handler():



def enable_file_logging(log_filename=tuf.settings.LOG_FILENAME):
def enable_file_logging(log_filename=settings.LOG_FILENAME):
"""
<Purpose>
Log messages to a file (i.e., 'log_filename'). The log level for the file
Expand Down
10 changes: 5 additions & 5 deletions tuf/repository_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@
from tuf import formats
from tuf import log
from tuf import roledb
from tuf import settings
import tuf.keydb
import tuf.sig
import tuf.settings

import securesystemslib
import securesystemslib.hash
Expand Down Expand Up @@ -87,7 +87,7 @@

# The algorithm used by the repository to generate the path hash prefixes
# of hashed bin delegations. Please see delegate_hashed_bins()
HASH_FUNCTION = tuf.settings.DEFAULT_HASH_ALGORITHM
HASH_FUNCTION = settings.DEFAULT_HASH_ALGORITHM



Expand Down Expand Up @@ -803,7 +803,7 @@ def import_ed25519_privatekey_from_file(filepath, password=None):
securesystemslib.exceptions.UnsupportedLibraryError, if 'filepath' cannot be
decrypted due to an invalid configuration setting (i.e., invalid
'tuf.settings.py' setting).
'tuf.settings' setting).
<Side Effects>
'password' is used to decrypt the 'filepath' key file.
Expand Down Expand Up @@ -993,7 +993,7 @@ def get_targets_metadata_fileinfo(filename, storage_backend, custom=None):
# file information, such as the file's author, version/revision
# numbers, etc.
filesize, filehashes = securesystemslib.util.get_file_details(filename,
tuf.settings.FILE_HASH_ALGORITHMS, storage_backend)
settings.FILE_HASH_ALGORITHMS, storage_backend)

return formats.make_targets_fileinfo(filesize, filehashes, custom=custom)

Expand Down Expand Up @@ -1543,7 +1543,7 @@ def _get_hashes_and_length_if_needed(use_length, use_hashes, full_file_path,

if use_hashes:
hashes = securesystemslib.util.get_file_hashes(full_file_path,
tuf.settings.FILE_HASH_ALGORITHMS, storage_backend)
settings.FILE_HASH_ALGORITHMS, storage_backend)

return length, hashes

Expand Down
4 changes: 2 additions & 2 deletions tuf/scripts/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@
import tuf
from tuf import exceptions
from tuf import log
from tuf import settings
import tuf.client.updater
import tuf.settings

# See 'log.py' to learn how logging is handled in TUF.
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -113,7 +113,7 @@ def update_client(parsed_arguments):
logger.debug('We have a valid argparse Namespace object.')

# Set the local repositories directory containing all of the metadata files.
tuf.settings.repositories_directory = '.'
settings.repositories_directory = '.'

# Set the repository mirrors. This dictionary is needed by the Updater
# class of updater.py.
Expand Down

0 comments on commit 7841008

Please sign in to comment.