-
Notifications
You must be signed in to change notification settings - Fork 272
/
updater.py
490 lines (401 loc) · 19.4 KB
/
updater.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
# Copyright 2020, New York University and the TUF contributors
# SPDX-License-Identifier: MIT OR Apache-2.0
"""Client update workflow implementation
The Updater class provides an implementation of the
`TUF client workflow
<https://theupdateframework.github.io/specification/latest/#detailed-client-workflow>`_.
Updater provides an API to query available targets and to download them in a
secure manner: All downloaded files are verified by signed metadata.
High-level description of Updater functionality:
* Initializing an ``Updater`` loads and validates the trusted local root
metadata: This root metadata is used as the source of trust for all other
metadata.
* ``refresh()`` can optionally be called to update and load all top-level
metadata as described in the specification, using both locally cached
metadata and metadata downloaded from the remote repository. If refresh is
not done explicitly, it will happen automatically during the first target
info lookup.
* Updater can be used to download targets. For each target:
* ``Updater.get_targetinfo()`` is first used to find information about a
specific target. This will load new targets metadata as needed (from
local cache or remote repository).
* ``Updater.find_cached_target()`` can optionally be used to check if a
target file is already locally cached.
* ``Updater.download_target()`` downloads a target file and ensures it is
verified correct by the metadata.
Below is a simple example of using the Updater to download and verify
"file.txt" from a remote repository. The required environment for this example
is:
* A webserver running on http://localhost:8000, serving TUF repository
metadata at "/tuf-repo/" and targets at "/targets/"
* Local metadata directory "~/tufclient/metadata/" is writable and contains
a root metadata version for the remote repository
* Download directory "~/tufclient/downloads/" is writable
Example::
from tuf.ngclient import Updater
# Load trusted local root metadata from client metadata cache. Define
# where metadata and targets will be downloaded from.
updater = Updater(
repository_dir="~/tufclient/metadata/",
metadata_base_url="http://localhost:8000/tuf-repo/",
target_dir="~/tufclient/downloads/",
target_base_url="http://localhost:8000/targets/",
)
# Update metadata, then download target if needed
info = updater.get_targetinfo("file.txt")
path = updater.find_cached_target(info)
if path is None:
path = updater.download_target(info)
print(f"Local file {path} contains target {info.path}")
"""
import logging
import os
import tempfile
from typing import Optional, Set
from urllib import parse
from securesystemslib import util as sslib_util
from tuf import exceptions
from tuf.api.metadata import (
Metadata,
Root,
Snapshot,
TargetFile,
Targets,
Timestamp,
)
from tuf.ngclient._internal import requests_fetcher, trusted_metadata_set
from tuf.ngclient.config import UpdaterConfig
from tuf.ngclient.fetcher import FetcherInterface
logger = logging.getLogger(__name__)
class Updater:
"""Creates a new Updater instance and loads trusted root metadata.
Args:
repository_dir: Local metadata directory. Directory must be
writable and it must contain a trusted root.json file.
metadata_base_url: Base URL for all remote metadata downloads
target_dir: Local targets directory. Directory must be writable. It
will be used as the default target download directory by
``find_cached_target()`` and ``download_target()``
target_base_url: Optional; Default base URL for all remote target
downloads. Can be individually set in download_target()
fetcher: Optional; FetcherInterface implementation used to download
both metadata and targets. Default is RequestsFetcher
Raises:
OSError: Local root.json cannot be read
RepositoryError: Local root.json is invalid
"""
def __init__(
self,
repository_dir: str,
metadata_base_url: str,
target_dir: Optional[str] = None,
target_base_url: Optional[str] = None,
fetcher: Optional[FetcherInterface] = None,
config: Optional[UpdaterConfig] = None,
):
self._dir = repository_dir
self._metadata_base_url = _ensure_trailing_slash(metadata_base_url)
self.target_dir = target_dir
if target_base_url is None:
self._target_base_url = None
else:
self._target_base_url = _ensure_trailing_slash(target_base_url)
# Read trusted local root metadata
data = self._load_local_metadata(Root.type)
self._trusted_set = trusted_metadata_set.TrustedMetadataSet(data)
self._fetcher = fetcher or requests_fetcher.RequestsFetcher()
self.config = config or UpdaterConfig()
def refresh(self) -> None:
"""Refreshes top-level metadata.
Downloads, verifies, and loads metadata for the top-level roles in the
specified order (root -> timestamp -> snapshot -> targets) implementing
all the checks required in the TUF client workflow.
A ``refresh()`` can be done only once during the lifetime of an Updater.
If ``refresh()`` has not been explicitly called before the first
``get_targetinfo()`` call, it will be done implicitly at that time.
The metadata for delegated roles is not updated by ``refresh()``:
that happens on demand during ``get_targetinfo()``. However, if the
repository uses `consistent_snapshot
<https://theupdateframework.github.io/specification/latest/#consistent-snapshots>`_,
then all metadata downloaded downloaded by the Updater will use the same
consistent repository state.
Raises:
OSError: New metadata could not be written to disk
RepositoryError: Metadata failed to verify in some way
TODO: download-related errors
"""
self._load_root()
self._load_timestamp()
self._load_snapshot()
self._load_targets(Targets.type, Root.type)
def _generate_target_file_path(self, targetinfo: TargetFile) -> str:
if self.target_dir is None:
raise ValueError("target_dir must be set if filepath is not given")
# Use URL encoded target path as filename
filename = parse.quote(targetinfo.path, "")
return os.path.join(self.target_dir, filename)
def get_targetinfo(self, target_path: str) -> Optional[TargetFile]:
"""Returns TargetFile instance with information for 'target_path'.
The return value can be used as an argument to
``download_target()`` and ``find_cached_target()``.
If ``refresh()`` has not been called before calling
``get_targetinfo()``, the refresh will be done implicitly.
As a side-effect this method downloads all the additional (delegated
targets) metadata it needs to return the target information.
Args:
target_path: A `path-relative-URL string
<https://url.spec.whatwg.org/#path-relative-url-string>`_
that uniquely identifies the target within the repository.
Raises:
OSError: New metadata could not be written to disk
RepositoryError: Metadata failed to verify in some way
TODO: download-related errors
Returns:
A TargetFile instance or None.
"""
if self._trusted_set.targets is None:
self.refresh()
return self._preorder_depth_first_walk(target_path)
def find_cached_target(
self,
targetinfo: TargetFile,
filepath: Optional[str] = None,
) -> Optional[str]:
"""Checks whether a local file is an up to date target
Args:
targetinfo: TargetFile from ``get_targetinfo()``.
filepath: Local path to file. If None, a file path is generated
based on ``target_dir`` constructor argument.
Raises:
ValueError: Incorrect arguments
Returns:
Local file path if the file is an up to date target file.
None if file is not found or it is not up to date.
"""
if filepath is None:
filepath = self._generate_target_file_path(targetinfo)
try:
with open(filepath, "rb") as target_file:
targetinfo.verify_length_and_hashes(target_file)
return filepath
except (OSError, exceptions.LengthOrHashMismatchError):
return None
def download_target(
self,
targetinfo: TargetFile,
filepath: Optional[str] = None,
target_base_url: Optional[str] = None,
) -> str:
"""Downloads the target file specified by ``targetinfo``.
Args:
targetinfo: TargetFile from ``get_targetinfo()``.
filepath: Local path to download into. If None, the file is
downloaded into directory defined by ``target_dir`` constructor
argument using a generated filename. If file already exists,
it is overwritten.
target_base_url: Base URL used to form the final target
download URL. Default is the value provided in Updater()
Raises:
ValueError: Invalid arguments
TODO: download-related errors
TODO: file write errors
Returns:
Local path to downloaded file
"""
if filepath is None:
filepath = self._generate_target_file_path(targetinfo)
if target_base_url is None:
if self._target_base_url is None:
raise ValueError(
"target_base_url must be set in either "
"download_target() or constructor"
)
target_base_url = self._target_base_url
else:
target_base_url = _ensure_trailing_slash(target_base_url)
target_filepath = targetinfo.path
consistent_snapshot = self._trusted_set.root.signed.consistent_snapshot
if consistent_snapshot and self.config.prefix_targets_with_hash:
hashes = list(targetinfo.hashes.values())
dirname, sep, basename = target_filepath.rpartition("/")
target_filepath = f"{dirname}{sep}{hashes[0]}.{basename}"
full_url = f"{target_base_url}{target_filepath}"
with self._fetcher.download_file(
full_url, targetinfo.length
) as target_file:
try:
targetinfo.verify_length_and_hashes(target_file)
except exceptions.LengthOrHashMismatchError as e:
raise exceptions.RepositoryError(
f"{target_filepath} length or hashes do not match"
) from e
sslib_util.persist_temp_file(target_file, filepath)
logger.info("Downloaded target %s", targetinfo.path)
return filepath
def _download_metadata(
self, rolename: str, length: int, version: Optional[int] = None
) -> bytes:
"""Download a metadata file and return it as bytes"""
if version is None:
url = f"{self._metadata_base_url}{rolename}.json"
else:
url = f"{self._metadata_base_url}{version}.{rolename}.json"
return self._fetcher.download_bytes(url, length)
def _load_local_metadata(self, rolename: str) -> bytes:
encoded_name = parse.quote(rolename, "")
with open(os.path.join(self._dir, f"{encoded_name}.json"), "rb") as f:
return f.read()
def _persist_metadata(self, rolename: str, data: bytes) -> None:
"""Write metadata to disk atomically to avoid data loss."""
# encode the rolename to avoid issues with e.g. path separators
encoded_name = parse.quote(rolename, "")
filename = os.path.join(self._dir, f"{encoded_name}.json")
with tempfile.NamedTemporaryFile(
dir=self._dir, delete=False
) as temp_file:
temp_file.write(data)
os.replace(temp_file.name, filename)
def _load_root(self) -> None:
"""Load remote root metadata.
Sequentially load and persist on local disk every newer root metadata
version available on the remote.
"""
# Update the root role
lower_bound = self._trusted_set.root.signed.version + 1
upper_bound = lower_bound + self.config.max_root_rotations
for next_version in range(lower_bound, upper_bound):
try:
data = self._download_metadata(
Root.type,
self.config.root_max_length,
next_version,
)
self._trusted_set.update_root(data)
self._persist_metadata(Root.type, data)
except exceptions.FetcherHTTPError as exception:
if exception.status_code not in {403, 404}:
raise
# 404/403 means current root is newest available
break
def _load_timestamp(self) -> None:
"""Load local and remote timestamp metadata"""
try:
data = self._load_local_metadata(Timestamp.type)
self._trusted_set.update_timestamp(data)
except (OSError, exceptions.RepositoryError) as e:
# Local timestamp does not exist or is invalid
logger.debug("Local timestamp not valid as final: %s", e)
# Load from remote (whether local load succeeded or not)
data = self._download_metadata(
Timestamp.type, self.config.timestamp_max_length
)
self._trusted_set.update_timestamp(data)
self._persist_metadata(Timestamp.type, data)
def _load_snapshot(self) -> None:
"""Load local (and if needed remote) snapshot metadata"""
try:
data = self._load_local_metadata(Snapshot.type)
self._trusted_set.update_snapshot(data, trusted=True)
logger.debug("Local snapshot is valid: not downloading new one")
except (OSError, exceptions.RepositoryError) as e:
# Local snapshot does not exist or is invalid: update from remote
logger.debug("Local snapshot not valid as final: %s", e)
assert self._trusted_set.timestamp is not None # nosec
snapshot_meta = self._trusted_set.timestamp.signed.snapshot_meta
length = snapshot_meta.length or self.config.snapshot_max_length
version = None
if self._trusted_set.root.signed.consistent_snapshot:
version = snapshot_meta.version
data = self._download_metadata(Snapshot.type, length, version)
self._trusted_set.update_snapshot(data)
self._persist_metadata(Snapshot.type, data)
def _load_targets(self, role: str, parent_role: str) -> Metadata[Targets]:
"""Load local (and if needed remote) metadata for 'role'."""
# Avoid loading 'role' more than once during "get_targetinfo"
if role in self._trusted_set:
return self._trusted_set[role]
try:
data = self._load_local_metadata(role)
delegated_targets = self._trusted_set.update_delegated_targets(
data, role, parent_role
)
logger.debug("Local %s is valid: not downloading new one", role)
return delegated_targets
except (OSError, exceptions.RepositoryError) as e:
# Local 'role' does not exist or is invalid: update from remote
logger.debug("Failed to load local %s: %s", role, e)
assert self._trusted_set.snapshot is not None # nosec
metainfo = self._trusted_set.snapshot.signed.meta[f"{role}.json"]
length = metainfo.length or self.config.targets_max_length
version = None
if self._trusted_set.root.signed.consistent_snapshot:
version = metainfo.version
data = self._download_metadata(role, length, version)
delegated_targets = self._trusted_set.update_delegated_targets(
data, role, parent_role
)
self._persist_metadata(role, data)
return delegated_targets
def _preorder_depth_first_walk(
self, target_filepath: str
) -> Optional[TargetFile]:
"""
Interrogates the tree of target delegations in order of appearance
(which implicitly order trustworthiness), and returns the matching
target found in the most trusted role.
"""
# List of delegations to be interrogated. A (role, parent role) pair
# is needed to load and verify the delegated targets metadata.
delegations_to_visit = [(Targets.type, Root.type)]
visited_role_names: Set[str] = set()
number_of_delegations = self.config.max_delegations
# Preorder depth-first traversal of the graph of target delegations.
while number_of_delegations > 0 and len(delegations_to_visit) > 0:
# Pop the role name from the top of the stack.
role_name, parent_role = delegations_to_visit.pop(-1)
# Skip any visited current role to prevent cycles.
if role_name in visited_role_names:
logger.debug("Skipping visited current role %s", role_name)
continue
# The metadata for 'role_name' must be downloaded/updated before
# its targets, delegations, and child roles can be inspected.
targets = self._load_targets(role_name, parent_role).signed
target = targets.targets.get(target_filepath)
if target is not None:
logger.debug("Found target in current role %s", role_name)
return target
# After preorder check, add current role to set of visited roles.
visited_role_names.add(role_name)
# And also decrement number of visited roles.
number_of_delegations -= 1
if targets.delegations is not None:
child_roles_to_visit = []
# NOTE: This may be a slow operation if there are many
# delegated roles.
for child_role in targets.delegations.roles.values():
if child_role.is_delegated_path(target_filepath):
logger.debug("Adding child role %s", child_role.name)
child_roles_to_visit.append(
(child_role.name, role_name)
)
if child_role.terminating:
logger.debug("Not backtracking to other roles.")
delegations_to_visit = []
break
# Push 'child_roles_to_visit' in reverse order of appearance
# onto 'delegations_to_visit'. Roles are popped from the end of
# the list.
child_roles_to_visit.reverse()
delegations_to_visit.extend(child_roles_to_visit)
if number_of_delegations == 0 and len(delegations_to_visit) > 0:
logger.debug(
"%d roles left to visit, but allowed to "
"visit at most %d delegations.",
len(delegations_to_visit),
self.config.max_delegations,
)
# If this point is reached then target is not found, return None
return None
def _ensure_trailing_slash(url: str) -> str:
"""Return url guaranteed to end in a slash"""
return url if url.endswith("/") else f"{url}/"