From 83d6d07943c34f3544e2388579aa4be55de37f48 Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Fri, 26 Jun 2020 16:40:59 -0400 Subject: [PATCH 01/42] WIP --- tuf/api/keys.py | 169 ++++++++++++++++++++++++++++++++++++++++++++ tuf/api/metadata.py | 148 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 317 insertions(+) create mode 100644 tuf/api/keys.py create mode 100644 tuf/api/metadata.py diff --git a/tuf/api/keys.py b/tuf/api/keys.py new file mode 100644 index 0000000000..72f0be8ae0 --- /dev/null +++ b/tuf/api/keys.py @@ -0,0 +1,169 @@ +# Borrowed from https://github.com/cnabio/signy/blob/afba301697df456b363790dc16483408b626a8af/scripts/in-toto/keys.py +# TODO: +# * Make a storage/provider-agnostic (e.g., filesystem, HSM) key management API, like securesystemslib.storage. + +# Imports. + +# 1st-party. +import os +import shutil + +# 2nd-party. +from typing import Any, Dict, List, Optional + +# 3rd-party. +from securesystemslib.interface import ( + generate_and_write_ed25519_keypair, + get_password, + import_ed25519_privatekey_from_file, + import_ed25519_publickey_from_file, +) + +# Utility classes. + +class Threshold: + + def __init__(self, m: int = 1, n: int = 1): + assert m > 0, f'{m} <= 0' + assert n > 0, f'{n} <= 0' + assert m <= n, f'{m} > {n}' + self.m = m + self.n = n + +class Keypath: + + def __init__(self, private: str, public: str): + assert os.path.isfile(private), private + assert os.path.isfile(public), public + self.private = private + self.public = public + +class Key: + + def __init__(self, path: str, obj: Any): + self.path = path + self.obj = obj + +class Keypair: + + def __init__(self, private: Key, public: Key): + self.private = private + self.public = public + +Keypairs = List[Keypair] + +class Keyring: + + def __init__(self, threshold: Threshold, keypairs: Keypairs): + if len(keypairs) >= threshold.m: + logging.warning(f'{len(keypairs)} >= {threshold.m}') + if len(keypairs) <= threshold.n + logging.warning(f'{len(keypairs)} <= {threshold.n}') + self.threshold = threshold + self.keypairs = keypairs + +# Useful for securesytemslib. +KeyDict = Dict[str, Any] + +# Utility functions. + +def get_new_private_keypath(keystore_dir: str, rolename: str, i : int = 1) -> str: + return os.path.join(keystore_dir, f'{rolename}_ed25519_key_{i}') + +def get_public_keypath(private_keypath: str) -> str: + # this is the tuf filename convention at the time of writing. + return f'{private_keypath}.pub' + +def get_private_keys_from_keyring(keyring: Keyring) -> KeyDict: + privkeys = {} + + for keypair in keyring.keypairs: + privkey = keypair.private.obj + keyid = privkey['keyid'] + assert keyid not in privkeys + privkeys[keyid] = privkey + + return privkeys + +def get_public_keys_from_keyring(keyring: Keyring) -> KeyDict: + pubkeys = {} + + for keypair in keyring.keypairs: + pubkey = keypair.public.obj + keyid = pubkey['keyid'] + assert keyid not in pubkeys + pubkeys[keyid] = pubkey + + return pubkeys + +def write_keypair(keystore_dir: str, rolename: str, i: int = 1, n: int = 1, passphrase: Optional[str] = None) -> Keypath: + private_keypath = get_new_private_keypath(keystore_dir, rolename, i) + assert not os.path.isfile(private_keypath) + public_keypath = get_public_keypath(private_keypath) + assert not os.path.isfile(public_keypath) + + # Make the keystore directory, WR-only by self, if not already there. + os.makedirs(keystore_dir, mode=0o700, exist_ok=True) + + # FIXME: do not assume Ed25519 + generate_and_write_ed25519_keypair(private_keypath, password=passphrase) + + return Keypath(private_keypath, public_keypath) + +def read_keypair(keypath: Keypath, passphrase: Optional[str] = None) -> Keypair: + private_keypath = keypath.private + private_key_obj = import_ed25519_privatekey_from_file(keypath.private, password=passphrase) + private_key = Key(private_keypath, private_key_obj) + + # and its corresponding public key. + public_keypath = keypath.public + public_key_obj = import_ed25519_publickey_from_file(keypath.public) + public_key = Key(public_keypath, public_key_obj) + + return Keypair(private_key, public_key) + +def rename_keys_to_match_keyid(keystore_dir: str, keypair: Keypair) -> None: + ''' + + Rename public / private keys to match their keyid, so that it is easy + to later find public keys on the repository, or private keys on disk. + Also see https://github.com/theupdateframework/tuf/issues/573 + ''' + + keyid = keypair.public.obj['keyid'] + + # Rename the private key filename to match the keyid. + assert os.path.exists(keystore_dir), keystore_dir + new_private_keypath = os.path.join(keystore_dir, keyid) + # Move the key to the new filename. + assert not os.path.isfile(new_private_keypath), new_private_keypath + shutil.move(keypair.private.path, new_private_keypath) + # Update the path to the key. + keypair.private.path = new_private_keypath + + # Rename the public key filename to match the keyid. + new_public_keypath = get_public_keypath(new_private_keypath) + # Move the key to the new filename. + assert not os.path.isfile(new_public_keypath), new_public_keypath + shutil.move(keypair.public.path, new_public_keypath) + # Update the path to the key. + keypair.public.path = new_public_keypath + +def write_and_read_new_keys(keystore_dir: str, rolename: str, threshold: Threshold) -> Keyring: + keypairs = [] + + for i in range(1, threshold.n + 1): + print(f'Writing key {i}/{threshold.n} for the "{rolename}" rolename...') + passphrase = get_password( + prompt='Please enter a NON-EMPTY passphrase to ENCRYPT this key: ', + confirm=True + ) + keypath = write_keypair(keystore_dir, rolename, i, threshold.n, passphrase) + keypair = read_keypair(keypath, passphrase) + # Rename the private and public keys to match the keyid instead. + # Why? So that we know how to find keys later on repository / disk. + rename_keys_to_match_keyid(keystore_dir, keypair) + keypairs.append(keypair) + print() + + return Keyring(threshold, tuple(keypairs)) \ No newline at end of file diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py new file mode 100644 index 0000000000..8ac631c922 --- /dev/null +++ b/tuf/api/metadata.py @@ -0,0 +1,148 @@ +# 1st-party. +from keys import ( + Keyring, + Threshold, + get_private_keys_from_keyring, + get_public_keys_from_keyring, + sorted_list_of_keyids, + write_and_read_new_keys, +) + +# 2nd-party. +from datetime import datetime +from typing import Any, Dict, List, Optional + +import json + +# 3rd-party. +from dateutil.relativedelta import relativedelta +from securesystemslib.formats import encode_canonical +from securesystemslib.keys import create_signature, verify_signature +from securesystemslib.util import load_json_file +from tuf.repository_lib import ( + _get_written_metadata, + generate_snapshot_metadata, + generate_targets_metadata, + generate_timestamp_metadata, +) + +# Types. +JsonDict = Dict[str, Any] + +# Classes. + +class Metadata: + # By default, a Metadata would be a rather empty one. + def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(), keyring: Optional[Keyring] = None, version: int = 1) -> None: + self.consistent_snapshot = consistent_snapshot + + self.keyring = keyring + self.expiration = expiration + + assert version > 1, f'{version} < 1' + self.version = version + + # And you would use this method to populate it from a file. + def read_from_json(self, filename: str) -> None: + signable = load_json_file(filename) + + # TODO: use some basic schema checks + signatures = signable['signatures'] + signed = signable['signed'] + + self.expiration = datetime.strptime(signed['expiration'], '%b %d %Y %I:%M%p') + self.version = signed['version'] + + @property + def signable(self) -> JsonDict: + """ + To be overridden by the inheriting class. + The idea is to serialize this object into the signable we expect. + """ + raise NotImplementedError() + + def signed(self) -> str: + return encode_canonical(self.signable['signed']).encode('utf-8') + + def signatures(self) -> List: + return self.signable['signatures'] + + # TODO: We need to update the expiration timestamp using self.expiration. + # Oh, and bump the version number. + # And, oh, take care of consistent snapshot of metadata. + def sign(self) -> JsonDict: + # TODO: not so simple. IDK why we don't index signatures by + # keyids,but we need to walk through the list to find any previous + # signature by the same keyid. + def update_signature(signatures, keyid, signature): + raise NotImplementedError() + + signed = self.signed + signatures = self.signatures + + for keypair in self.keyring.keypairs: + signature = create_signature(keypair.private.obj, signed) + keyid = keypair.private.obj['keyid'] + update_signature(signatures, keyid, signature) + + return {'signed': signed, 'signatures': signatures} + + def verify(self) -> bool: + signed = self.signed + signatures = self.signatures + good_signatures = 0 + + for keypair in self.keyring.keypairs: + try: + keyid = keypair.public.obj['keyid'] + for signature in signatures: + if signature['keyid'] == keyid: + if verify_signature(keypair.public.obj, signature, signed): + good_signatures += 1 + break + except: + logging.warning(f'Could not verify signature for key {keyid}') + continue + + return good_signatures >= self.keyring.threshold.m + + def write_to_json(self, filename: str) -> None: + with open(filename, 'r+b') as f: + f.write(_get_written_metadata(self.sign())) + +class Timestamp(Metadata): + def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): + super().__init__(consistent_snapshot, expiration, relativedelta, keyring, version) + + # FIXME + def signable(self): + return generate_timestamp_metadata() + + # Update metadata about the snapshot metadata. + def update(self, rolename: str, version: int, length: int, hashes: JsonDict): + raise NotImplementedError() + +class Snapshot(Metadata): + def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): + super().__init__(consistent_snapshot, expiration, relativedelta, keyring, version) + + # FIXME + def signable(self): + return generate_snapshot_metadata() + + # Add or update metadata about the targets metadata. + def update(self, rolename: str, version: int, length: Optional[int] = None, hashes: Optional[JsonDict] = None): + raise NotImplementedError() + +class Targets(Metadata): + def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): + super().__init__(consistent_snapshot, expiration, relativedelta, keyring, version) + + # FIXME + def signable(self): + return generate_targets_metadata() + + # Add or update metadata about the target. + # TODO: how to handle writing consistent targets? + def update(self, filename: str, fileinfo: JsonDict): + raise NotImplementedError() From 2e3ceb7ff323af582325ba7dcf77012054b0421b Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Tue, 30 Jun 2020 13:54:17 +0100 Subject: [PATCH 02/42] tuf.api: set consistent_snapshot during read_from_json Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 8ac631c922..68011925c6 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -21,6 +21,7 @@ from securesystemslib.util import load_json_file from tuf.repository_lib import ( _get_written_metadata, + _strip_version_number, generate_snapshot_metadata, generate_targets_metadata, generate_timestamp_metadata, @@ -47,11 +48,18 @@ def read_from_json(self, filename: str) -> None: signable = load_json_file(filename) # TODO: use some basic schema checks - signatures = signable['signatures'] - signed = signable['signed'] + self.signatures = signable['signatures'] + self.signed = signable['signed'] self.expiration = datetime.strptime(signed['expiration'], '%b %d %Y %I:%M%p') - self.version = signed['version'] + self.version = self.signed['version'] + + fn, fn_ver = _strip_version_number(filename, True) + if fn_ver: + assert fn_ver == self.version, f'{fn_ver} != {self.version}' + self.consistent_snapshot = True + else: + self.consistent_snapshot = False @property def signable(self) -> JsonDict: From 721def4eb06acf6e7c2accf2d7201541f0819d71 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Tue, 30 Jun 2020 13:55:27 +0100 Subject: [PATCH 03/42] tuf.api: add helpers to bump version and expiration Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 68011925c6..6eb277095e 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -69,6 +69,12 @@ def signable(self) -> JsonDict: """ raise NotImplementedError() + def bump_version(self) -> None: + self.version = self.version + 1 + + def bump_expiration(self, delta: relativedelta = relativedelta(days=1)) -> None: + self.expiration = self.expiration + delta + def signed(self) -> str: return encode_canonical(self.signable['signed']).encode('utf-8') From 5ef60ca1871953d3982458acda736815a4c9bc86 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Tue, 30 Jun 2020 13:56:43 +0100 Subject: [PATCH 04/42] tuf.api: implement update_signatures() Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 6eb277095e..7cae34db2f 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -81,15 +81,16 @@ def signed(self) -> str: def signatures(self) -> List: return self.signable['signatures'] - # TODO: We need to update the expiration timestamp using self.expiration. - # Oh, and bump the version number. - # And, oh, take care of consistent snapshot of metadata. def sign(self) -> JsonDict: - # TODO: not so simple. IDK why we don't index signatures by - # keyids,but we need to walk through the list to find any previous - # signature by the same keyid. def update_signature(signatures, keyid, signature): - raise NotImplementedError() + updated = False + keyid_signature = {'keyid':keyid, 'sig':signature} + for idx, keyid_sig in enumerate(signatures): + if keyid_sig['keyid'] == keyid: + signatures[idx] = keyid_signature + updated = True + if not updated: + signatures.append({'keyid':keyid, 'sig':signature}) signed = self.signed signatures = self.signatures @@ -98,7 +99,8 @@ def update_signature(signatures, keyid, signature): signature = create_signature(keypair.private.obj, signed) keyid = keypair.private.obj['keyid'] update_signature(signatures, keyid, signature) - + + self.signatures = signatures return {'signed': signed, 'signatures': signatures} def verify(self) -> bool: From 92f97a45e3c8934b2872075fe53e606f459aad22 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Tue, 30 Jun 2020 13:58:02 +0100 Subject: [PATCH 05/42] tuf.api: implement metadata.Snapshot Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 7cae34db2f..e1a23d10a1 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -19,6 +19,7 @@ from securesystemslib.formats import encode_canonical from securesystemslib.keys import create_signature, verify_signature from securesystemslib.util import load_json_file +import tuf.formats from tuf.repository_lib import ( _get_written_metadata, _strip_version_number, @@ -140,15 +141,28 @@ def update(self, rolename: str, version: int, length: int, hashes: JsonDict): class Snapshot(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): - super().__init__(consistent_snapshot, expiration, relativedelta, keyring, version) + super().__init__(consistent_snapshot, expiration, keyring, version) + self.targets_fileinfo = {} + + def read_from_json(self, filename: str) -> None: + super().read_from_json(filename) + meta = self.signed['meta'] + for target_role in meta: + version = meta[target_role]['version'] + length = meta[target_role].get('length') + hashes = meta[target_role].get('hashes') + self.targets_fileinfo[target_role] = tuf.formats.make_metadata_fileinfo(version, length, hashes) - # FIXME def signable(self): - return generate_snapshot_metadata() + # TODO: probably want to generalise this, a @property.getter in Metadata? + expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' + return tuf.formats.build_dict_conforming_to_schema( + tuf.formats.SNAPSHOT_SCHEMA, version=self.version, + expires=expires, meta=self.targets_fileinfo) # Add or update metadata about the targets metadata. def update(self, rolename: str, version: int, length: Optional[int] = None, hashes: Optional[JsonDict] = None): - raise NotImplementedError() + self.targets_fileinfo[f'{rolename}.json'] = tuf.formats.make_metadata_fileinfo(version, length, hashes) class Targets(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): From eb9c56b52dd7ef31c4dd0e56091957927f3041d2 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Wed, 1 Jul 2020 09:25:22 +0100 Subject: [PATCH 06/42] tuf.api: fix missing ':' in keys Signed-off-by: Joshua Lock --- tuf/api/keys.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 72f0be8ae0..976e41e6d2 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -57,7 +57,7 @@ class Keyring: def __init__(self, threshold: Threshold, keypairs: Keypairs): if len(keypairs) >= threshold.m: logging.warning(f'{len(keypairs)} >= {threshold.m}') - if len(keypairs) <= threshold.n + if len(keypairs) <= threshold.n: logging.warning(f'{len(keypairs)} <= {threshold.n}') self.threshold = threshold self.keypairs = keypairs @@ -166,4 +166,4 @@ def write_and_read_new_keys(keystore_dir: str, rolename: str, threshold: Thresho keypairs.append(keypair) print() - return Keyring(threshold, tuple(keypairs)) \ No newline at end of file + return Keyring(threshold, tuple(keypairs)) From d58a944c8bb1b8659ebe6eee289208bc6cb53279 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Wed, 1 Jul 2020 09:28:42 +0100 Subject: [PATCH 07/42] tuf.api: fix imports in metadata Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index e1a23d10a1..298e4f2a0e 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -1,10 +1,9 @@ # 1st-party. -from keys import ( +from tuf.api.keys import ( Keyring, Threshold, get_private_keys_from_keyring, get_public_keys_from_keyring, - sorted_list_of_keyids, write_and_read_new_keys, ) From 2758f482141c0b2738de78df8d2095f853bb294a Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Wed, 1 Jul 2020 09:29:52 +0100 Subject: [PATCH 08/42] tuf.api: fix version check in metadata Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 298e4f2a0e..a6c095b0c4 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -40,7 +40,7 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = self.keyring = keyring self.expiration = expiration - assert version > 1, f'{version} < 1' + assert version >= 1, f'{version} < 1' self.version = version # And you would use this method to populate it from a file. From bc1134f488f8e45e67a8e6e45b48a57c7dfcb0fe Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Wed, 1 Jul 2020 09:30:38 +0100 Subject: [PATCH 09/42] tuf.api: fix loading expiration Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index a6c095b0c4..074ed76b6b 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -15,6 +15,7 @@ # 3rd-party. from dateutil.relativedelta import relativedelta +import iso8601 from securesystemslib.formats import encode_canonical from securesystemslib.keys import create_signature, verify_signature from securesystemslib.util import load_json_file @@ -51,7 +52,8 @@ def read_from_json(self, filename: str) -> None: self.signatures = signable['signatures'] self.signed = signable['signed'] - self.expiration = datetime.strptime(signed['expiration'], '%b %d %Y %I:%M%p') + # TODO: replace with dateutil.parser.parse? + self.expiration = iso8601.parse_date(self.signed['expires']) self.version = self.signed['version'] fn, fn_ver = _strip_version_number(filename, True) From 57c98d45ac570b6947420b9f535a3765c610c0c5 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Wed, 1 Jul 2020 09:41:28 +0100 Subject: [PATCH 10/42] WIP tests for tuf.api.metadata Signed-off-by: Joshua Lock --- tests/test_tuf_api.py | 137 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 tests/test_tuf_api.py diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py new file mode 100644 index 0000000000..018c6551e9 --- /dev/null +++ b/tests/test_tuf_api.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python + +# Copyright 2014 - 2017, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +""" + + test_tuf_api.py + + + Joshua Lock + + + June 30, 2020. + + + See LICENSE-MIT OR LICENSE for licensing information. + + + Unit tests for tuf.api +""" + +# Help with Python 3 compatibility, where the print statement is a function, an +# implicit relative import is invalid, and the '/' operator performs true +# division. Example: print 'hello world' raises a 'SyntaxError' exception. +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import unittest +import logging +import tempfile +import shutil +import sys +import errno +import os + +from tuf.api import metadata + +from dateutil.relativedelta import relativedelta +import iso8601 +import six + +logger = logging.getLogger(__name__) + + +class TestTufApi(unittest.TestCase): + @classmethod + def setUpClass(cls): + + # Create a temporary directory to store the repository, metadata, and target + # files. 'temporary_directory' must be deleted in TearDownClass() so that + # temporary files are always removed, even when exceptions occur. + cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) + test_repo_data = os.path.join('repository_data', 'repository') + cls.repo_dir = os.path.join(cls.temporary_directory, 'repository') + shutil.copytree(test_repo_data, cls.repo_dir) + + + + @classmethod + def tearDownClass(cls): + + # Remove the temporary repository directory, which should contain all the + # metadata, targets, and key files generated for the test cases. + shutil.rmtree(cls.temporary_directory) + + + + def test_metadata_base(self): + # Use of Snapshot is arbitrary, we're just testing the base class features + # with real data + md = metadata.Snapshot() + md.read_from_json(os.path.join(self.repo_dir, 'metadata.staged', 'snapshot.json')) + + self.assertEqual(md.version, 1) + md.bump_version() + self.assertEqual(md.version, 2) + + self.assertEqual(md.expiration, iso8601.parse_date("2030-01-01T00:00:00Z")) + md.bump_expiration() + self.assertEqual(md.expiration, iso8601.parse_date("2030-01-02T00:00:00Z")) + md.bump_expiration(relativedelta(years=1)) + self.assertEqual(md.expiration, iso8601.parse_date("2031-01-02T00:00:00Z")) + + + def test_metadata_snapshot(self): + snapshot = metadata.Snapshot() + snapshot.read_from_json(os.path.join(self.repo_dir, 'metadata.staged', 'snapshot.json')) + + # Create a dict representing what we expect the updated data to be + fileinfo = snapshot.signed['meta'] + hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} + fileinfo['role1.json']['version'] = 2 + fileinfo['role1.json']['hashes'] = hashes + fileinfo['role1.json']['length'] = 123 + + snapshot.update('role1', 2, 123, hashes) + # snapshot.sign() + # self.assertEqual(snapshot.signed['meta'], fileinfo) + + # snapshot.update() + + # snapshot.signable() + + # snapshot.sign() + + # snapshot.verify() + + # snapshot.write_to_json(os.path.join(cls.temporary_directory, 'api_snapshot.json')) + + + def test_metadata_timestamp(self): + timestamp = metadata.Timestamp() + timestamp.read_from_json(os.path.join(self.repo_dir, 'metadata.staged', 'timestamp.json')) + + self.assertEqual(timestamp.version, 1) + timestamp.bump_version() + self.assertEqual(timestamp.version, 2) + + self.assertEqual(timestamp.expiration, iso8601.parse_date("2030-01-01T00:00:00Z")) + timestamp.bump_expiration() + self.assertEqual(timestamp.expiration, iso8601.parse_date("2030-01-02T00:00:00Z")) + timestamp.bump_expiration(relativedelta(years=1)) + self.assertEqual(timestamp.expiration, iso8601.parse_date("2031-01-02T00:00:00Z")) + + hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} + fileinfo = timestamp.signed['meta']['snapshot.json'] + fileinfo['hashes'] = hashes + fileinfo['version'] = 2 + fileinfo['length'] = 520 + timestamp.update('snapshot', 2, 520, hashes) + # timestamp.sign() + # self.assertEqual(timestamp.signed['meta'], fileinfo) + + # timestamp.write_to_json() From 11d76e72ddbd1dafab70c35213db30396dcb7e44 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Wed, 1 Jul 2020 17:10:55 +0100 Subject: [PATCH 11/42] tuf.api: WIP implement Timestamp Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 074ed76b6b..4c8b109cbf 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -130,15 +130,21 @@ def write_to_json(self, filename: str) -> None: class Timestamp(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): - super().__init__(consistent_snapshot, expiration, relativedelta, keyring, version) + super().__init__(consistent_snapshot, expiration, keyring, version) - # FIXME def signable(self): - return generate_timestamp_metadata() + expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' + filedict = self.signed['meta'] + return tuf.formats.build_dict_conforming_to_schema( + tuf.formats.TIMESTAMP_SCHEMA, version=self.version, + expires=expires, meta=filedict) # Update metadata about the snapshot metadata. def update(self, rolename: str, version: int, length: int, hashes: JsonDict): - raise NotImplementedError() + fileinfo = self.signed['meta'][f'{rolename}.json'] + fileinfo['version'] = version + fileinfo['length'] = length + fileinfo['hashes'] = hashes class Snapshot(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): From f2861bfd417df30f814cfa25128fdf5e5c2f05f8 Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Wed, 1 Jul 2020 18:39:35 -0400 Subject: [PATCH 12/42] much simpler keys Signed-off-by: Trishank Karthik Kuppusamy --- tests/proxy_server.py | 20 ++-- tuf/api/keys.py | 212 ++++++++++++++---------------------------- tuf/api/metadata.py | 66 +++++++------ 3 files changed, 110 insertions(+), 188 deletions(-) diff --git a/tests/proxy_server.py b/tests/proxy_server.py index 3c76043c59..ba293d0a15 100644 --- a/tests/proxy_server.py +++ b/tests/proxy_server.py @@ -374,22 +374,22 @@ def parse_qsl(s): req_header_text = "%s %s %s\n%s" % (req.command, req.path, req.request_version, req.headers) res_header_text = "%s %d %s\n%s" % (res.response_version, res.status, res.reason, res.headers) - print with_color(33, req_header_text) + print(with_color(33, req_header_text)) u = urlparse.urlsplit(req.path) if u.query: query_text = parse_qsl(u.query) - print with_color(32, "==== QUERY PARAMETERS ====\n%s\n" % query_text) + print(with_color(32, "==== QUERY PARAMETERS ====\n%s\n" % query_text)) cookie = req.headers.get('Cookie', '') if cookie: cookie = parse_qsl(re.sub(r';\s*', '&', cookie)) - print with_color(32, "==== COOKIE ====\n%s\n" % cookie) + print(with_color(32, "==== COOKIE ====\n%s\n" % cookie)) auth = req.headers.get('Authorization', '') if auth.lower().startswith('basic'): token = auth.split()[1].decode('base64') - print with_color(31, "==== BASIC AUTH ====\n%s\n" % token) + print(with_color(31, "==== BASIC AUTH ====\n%s\n" % token)) if req_body is not None: req_body_text = None @@ -412,14 +412,14 @@ def parse_qsl(s): req_body_text = req_body if req_body_text: - print with_color(32, "==== REQUEST BODY ====\n%s\n" % req_body_text) + print(with_color(32, "==== REQUEST BODY ====\n%s\n" % req_body_text)) - print with_color(36, res_header_text) + print(with_color(36, res_header_text)) cookies = res.headers.getheaders('Set-Cookie') if cookies: cookies = '\n'.join(cookies) - print with_color(31, "==== SET-COOKIE ====\n%s\n" % cookies) + print(with_color(31, "==== SET-COOKIE ====\n%s\n" % cookies)) if res_body is not None: res_body_text = None @@ -440,12 +440,12 @@ def parse_qsl(s): m = re.search(r']*>\s*([^<]+?)\s*', res_body, re.I) if m: h = HTMLParser() - print with_color(32, "==== HTML TITLE ====\n%s\n" % h.unescape(m.group(1).decode('utf-8'))) + print(with_color(32, "==== HTML TITLE ====\n%s\n" % h.unescape(m.group(1).decode('utf-8')))) elif content_type.startswith('text/') and len(res_body) < 1024: res_body_text = res_body if res_body_text: - print with_color(32, "==== RESPONSE BODY ====\n%s\n" % res_body_text) + print(with_color(32, "==== RESPONSE BODY ====\n%s\n" % res_body_text)) def request_handler(self, req, req_body): pass @@ -492,7 +492,7 @@ def test(HandlerClass=ProxyRequestHandler, ServerClass=ThreadingHTTPServer, prot httpd = ServerClass(server_address, HandlerClass) sa = httpd.socket.getsockname() - print "Serving HTTP Proxy on", sa[0], "port", sa[1], "..." + print("Serving HTTP Proxy on", sa[0], "port", sa[1], "...") httpd.serve_forever() diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 976e41e6d2..ccb31e4c33 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -1,169 +1,93 @@ -# Borrowed from https://github.com/cnabio/signy/blob/afba301697df456b363790dc16483408b626a8af/scripts/in-toto/keys.py -# TODO: -# * Make a storage/provider-agnostic (e.g., filesystem, HSM) key management API, like securesystemslib.storage. - # Imports. -# 1st-party. -import os -import shutil - # 2nd-party. -from typing import Any, Dict, List, Optional + +from abc import ABC, abstractmethod +from enum import Enum, unique +from typing import Any, List, Optional + +import logging +import os # 3rd-party. from securesystemslib.interface import ( - generate_and_write_ed25519_keypair, - get_password, + import_ecdsa_privatekey_from_file, import_ed25519_privatekey_from_file, - import_ed25519_publickey_from_file, + import_rsa_privatekey_from_file, +) +from securesystemslib.keys import ( + create_signature, + verify_signature, ) -# Utility classes. +# Generic classes. -class Threshold: +@unique +class Algorithm(Enum): + ECDSA = import_ecdsa_privatekey_from_file + ED25519 = import_ed25519_privatekey_from_file + RSA = import_rsa_privatekey_from_file - def __init__(self, m: int = 1, n: int = 1): - assert m > 0, f'{m} <= 0' - assert n > 0, f'{n} <= 0' - assert m <= n, f'{m} > {n}' - self.m = m - self.n = n +class Threshold: -class Keypath: + def __init__(self, min_: int = 1, max_: int = 1): + assert min_ > 0, f'{min_} <= 0' + assert max_ > 0, f'{max_} <= 0' + assert min_ <= max_, f'{min_} > {max_}' + self.min = min_ + self.max = max_ - def __init__(self, private: str, public: str): - assert os.path.isfile(private), private - assert os.path.isfile(public), public - self.private = private - self.public = public +class Key(ABC): -class Key: + @abstractmethod + def __init__(self) -> None: + raise NotImplementedError() - def __init__(self, path: str, obj: Any): - self.path = path - self.obj = obj + @property + @abstractmethod + def keyid(self) -> str: + raise NotImplementedError() -class Keypair: + @abstractmethod + def sign(self, signed: str) -> str: + raise NotImplementedError() - def __init__(self, private: Key, public: Key): - self.private = private - self.public = public + @abstractmethod + def verify(self, signed: str, signature: str) -> bool: + raise NotImplementedError() -Keypairs = List[Keypair] +Keys = List[Key] -class Keyring: +class KeyRing: - def __init__(self, threshold: Threshold, keypairs: Keypairs): - if len(keypairs) >= threshold.m: - logging.warning(f'{len(keypairs)} >= {threshold.m}') - if len(keypairs) <= threshold.n: - logging.warning(f'{len(keypairs)} <= {threshold.n}') + def __init__(self, threshold: Threshold, keys: Keys): + if len(keys) >= threshold.min: + logging.warning(f'{len(keys)} >= {threshold.min}') + if len(keys) <= threshold.max: + logging.warning(f'{len(keys)} <= {threshold.max}') self.threshold = threshold - self.keypairs = keypairs + self.keys = keys + +# Specific types of keys, such as those in RAM, or on HSMs (TODO). + +class RAMKey(Key): + + def __init__(self, obj: Any) -> None: # pylint: disable=super-init-not-called + self.__obj = obj + + def keyid(self) -> str: + return self.__obj['keyid'] + + def sign(self, signed: str) -> str: + return create_signature(self.__obj, signed) + + def verify(self, signed: str, signature: str) -> bool: + return verify_signature(self.__obj, signature, signed) -# Useful for securesytemslib. -KeyDict = Dict[str, Any] # Utility functions. -def get_new_private_keypath(keystore_dir: str, rolename: str, i : int = 1) -> str: - return os.path.join(keystore_dir, f'{rolename}_ed25519_key_{i}') - -def get_public_keypath(private_keypath: str) -> str: - # this is the tuf filename convention at the time of writing. - return f'{private_keypath}.pub' - -def get_private_keys_from_keyring(keyring: Keyring) -> KeyDict: - privkeys = {} - - for keypair in keyring.keypairs: - privkey = keypair.private.obj - keyid = privkey['keyid'] - assert keyid not in privkeys - privkeys[keyid] = privkey - - return privkeys - -def get_public_keys_from_keyring(keyring: Keyring) -> KeyDict: - pubkeys = {} - - for keypair in keyring.keypairs: - pubkey = keypair.public.obj - keyid = pubkey['keyid'] - assert keyid not in pubkeys - pubkeys[keyid] = pubkey - - return pubkeys - -def write_keypair(keystore_dir: str, rolename: str, i: int = 1, n: int = 1, passphrase: Optional[str] = None) -> Keypath: - private_keypath = get_new_private_keypath(keystore_dir, rolename, i) - assert not os.path.isfile(private_keypath) - public_keypath = get_public_keypath(private_keypath) - assert not os.path.isfile(public_keypath) - - # Make the keystore directory, WR-only by self, if not already there. - os.makedirs(keystore_dir, mode=0o700, exist_ok=True) - - # FIXME: do not assume Ed25519 - generate_and_write_ed25519_keypair(private_keypath, password=passphrase) - - return Keypath(private_keypath, public_keypath) - -def read_keypair(keypath: Keypath, passphrase: Optional[str] = None) -> Keypair: - private_keypath = keypath.private - private_key_obj = import_ed25519_privatekey_from_file(keypath.private, password=passphrase) - private_key = Key(private_keypath, private_key_obj) - - # and its corresponding public key. - public_keypath = keypath.public - public_key_obj = import_ed25519_publickey_from_file(keypath.public) - public_key = Key(public_keypath, public_key_obj) - - return Keypair(private_key, public_key) - -def rename_keys_to_match_keyid(keystore_dir: str, keypair: Keypair) -> None: - ''' - - Rename public / private keys to match their keyid, so that it is easy - to later find public keys on the repository, or private keys on disk. - Also see https://github.com/theupdateframework/tuf/issues/573 - ''' - - keyid = keypair.public.obj['keyid'] - - # Rename the private key filename to match the keyid. - assert os.path.exists(keystore_dir), keystore_dir - new_private_keypath = os.path.join(keystore_dir, keyid) - # Move the key to the new filename. - assert not os.path.isfile(new_private_keypath), new_private_keypath - shutil.move(keypair.private.path, new_private_keypath) - # Update the path to the key. - keypair.private.path = new_private_keypath - - # Rename the public key filename to match the keyid. - new_public_keypath = get_public_keypath(new_private_keypath) - # Move the key to the new filename. - assert not os.path.isfile(new_public_keypath), new_public_keypath - shutil.move(keypair.public.path, new_public_keypath) - # Update the path to the key. - keypair.public.path = new_public_keypath - -def write_and_read_new_keys(keystore_dir: str, rolename: str, threshold: Threshold) -> Keyring: - keypairs = [] - - for i in range(1, threshold.n + 1): - print(f'Writing key {i}/{threshold.n} for the "{rolename}" rolename...') - passphrase = get_password( - prompt='Please enter a NON-EMPTY passphrase to ENCRYPT this key: ', - confirm=True - ) - keypath = write_keypair(keystore_dir, rolename, i, threshold.n, passphrase) - keypair = read_keypair(keypath, passphrase) - # Rename the private and public keys to match the keyid instead. - # Why? So that we know how to find keys later on repository / disk. - rename_keys_to_match_keyid(keystore_dir, keypair) - keypairs.append(keypair) - print() - - return Keyring(threshold, tuple(keypairs)) +def read_key(filename: str, algorithm: str, passphrase: Optional[str] = None) -> Key: + handler = Algorithm[algorithm] + obj = handler(filename, password=passphrase) + return RAMKey(obj) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 4c8b109cbf..b283a8dc62 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -1,11 +1,7 @@ +# Imports. + # 1st-party. -from tuf.api.keys import ( - Keyring, - Threshold, - get_private_keys_from_keyring, - get_public_keys_from_keyring, - write_and_read_new_keys, -) +from tuf.api.keys import KeyRing # 2nd-party. from datetime import datetime @@ -15,9 +11,7 @@ # 3rd-party. from dateutil.relativedelta import relativedelta -import iso8601 from securesystemslib.formats import encode_canonical -from securesystemslib.keys import create_signature, verify_signature from securesystemslib.util import load_json_file import tuf.formats from tuf.repository_lib import ( @@ -28,14 +22,17 @@ generate_timestamp_metadata, ) +import iso8601 + # Types. + JsonDict = Dict[str, Any] # Classes. class Metadata: # By default, a Metadata would be a rather empty one. - def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(), keyring: Optional[Keyring] = None, version: int = 1) -> None: + def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(), keyring: Optional[KeyRing] = None, version: int = 1) -> None: self.consistent_snapshot = consistent_snapshot self.keyring = keyring @@ -80,7 +77,7 @@ def bump_expiration(self, delta: relativedelta = relativedelta(days=1)) -> None: def signed(self) -> str: return encode_canonical(self.signable['signed']).encode('utf-8') - def signatures(self) -> List: + def signatures(self) -> List[JsonDict]: return self.signable['signatures'] def sign(self) -> JsonDict: @@ -92,15 +89,14 @@ def update_signature(signatures, keyid, signature): signatures[idx] = keyid_signature updated = True if not updated: - signatures.append({'keyid':keyid, 'sig':signature}) + signatures.append(keyid_signature) signed = self.signed signatures = self.signatures - for keypair in self.keyring.keypairs: - signature = create_signature(keypair.private.obj, signed) - keyid = keypair.private.obj['keyid'] - update_signature(signatures, keyid, signature) + for key in self.keyring.keys: + signature = key.sign(signed) + update_signature(signatures, key.keyid, signature) self.signatures = signatures return {'signed': signed, 'signatures': signatures} @@ -108,28 +104,30 @@ def update_signature(signatures, keyid, signature): def verify(self) -> bool: signed = self.signed signatures = self.signatures - good_signatures = 0 - - for keypair in self.keyring.keypairs: - try: - keyid = keypair.public.obj['keyid'] - for signature in signatures: - if signature['keyid'] == keyid: - if verify_signature(keypair.public.obj, signature, signed): - good_signatures += 1 + verified_keyids = {} + + for signature in signatures: + for key in self.keyring.keys: + keyid = key.keyid + if keyid == signature['keyid']: + try: + verified = key.verify(signed, signature) + except: + logging.exception(f'Could not verify signature for key {keyid}') + continue + else: + # Avoid https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-6174 + verified_keyids |= keyid break - except: - logging.warning(f'Could not verify signature for key {keyid}') - continue - return good_signatures >= self.keyring.threshold.m + return len(verified_keyids) >= self.keyring.threshold.min def write_to_json(self, filename: str) -> None: with open(filename, 'r+b') as f: f.write(_get_written_metadata(self.sign())) class Timestamp(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): super().__init__(consistent_snapshot, expiration, keyring, version) def signable(self): @@ -140,14 +138,14 @@ def signable(self): expires=expires, meta=filedict) # Update metadata about the snapshot metadata. - def update(self, rolename: str, version: int, length: int, hashes: JsonDict): - fileinfo = self.signed['meta'][f'{rolename}.json'] + def update(self, version: int, length: int, hashes: JsonDict): + fileinfo = self.signed['meta']['snapshot.json'] fileinfo['version'] = version fileinfo['length'] = length fileinfo['hashes'] = hashes class Snapshot(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): super().__init__(consistent_snapshot, expiration, keyring, version) self.targets_fileinfo = {} @@ -172,7 +170,7 @@ def update(self, rolename: str, version: int, length: Optional[int] = None, hash self.targets_fileinfo[f'{rolename}.json'] = tuf.formats.make_metadata_fileinfo(version, length, hashes) class Targets(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: Keyring = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): super().__init__(consistent_snapshot, expiration, relativedelta, keyring, version) # FIXME From 37a235f97c2987b9dae04c07d9455736962d4695 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 2 Jul 2020 11:06:32 +0300 Subject: [PATCH 13/42] tuf.api: implement metadata.Targtes.read_from_json() Signed-off-by: Teodora Sechkova --- tuf/api/metadata.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index b283a8dc62..f285f4dbdb 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -171,7 +171,15 @@ def update(self, rolename: str, version: int, length: Optional[int] = None, hash class Targets(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): - super().__init__(consistent_snapshot, expiration, relativedelta, keyring, version) + super().__init__(consistent_snapshot, expiration, keyring, version) + self.targets = {} + self.delegations = {} + + + def read_from_json(self, filename: str) -> None: + super().read_from_json(filename) + self.targets = self.signed['targets'] + self.delegations = self.signed.get('delegations', None) # FIXME def signable(self): From db0f8a73a70e2dd7103769977a9f939a500f1f94 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 2 Jul 2020 11:09:25 +0300 Subject: [PATCH 14/42] tuf.api: implement metadata.Targets.update() Signed-off-by: Teodora Sechkova --- tuf/api/metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index f285f4dbdb..d4aeddbe3d 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -188,4 +188,4 @@ def signable(self): # Add or update metadata about the target. # TODO: how to handle writing consistent targets? def update(self, filename: str, fileinfo: JsonDict): - raise NotImplementedError() + self.targets[filename] = fileinfo From 46977f977ee998a35ede0268d0af715848e02af0 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 2 Jul 2020 11:10:03 +0300 Subject: [PATCH 15/42] tux.api: implement metadata.Targets.signable() Signed-off-by: Teodora Sechkova --- tuf/api/metadata.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index d4aeddbe3d..20fb4ce457 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -183,7 +183,26 @@ def read_from_json(self, filename: str) -> None: # FIXME def signable(self): - return generate_targets_metadata() + # TODO: probably want to generalise this, a @property.getter in Metadata? + expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' + if self.delegations is not None: + return tuf.formats.build_dict_conforming_to_schema( + tuf.formats.TARGETS_SCHEMA, + version=self.version, + expires=expires, + targets=self.targets, + delegations=self.delegations) + else: + return tuf.formats.build_dict_conforming_to_schema( + tuf.formats.TARGETS_SCHEMA, + version=self.version, + expires=expires, + targets=self.targets) + # TODO: As an alternative to the odd if/else above where we decide whether or + # not to include the delegations argument based on whether or not it is + # None, consider instead adding a check in + # build_dict_conforming_to_schema that skips a keyword if that keyword + # is optional in the schema and the value passed in is set to None.... # Add or update metadata about the target. # TODO: how to handle writing consistent targets? From 3e022aae32dd0e947da3064b9275f0290cf71b45 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 2 Jul 2020 11:26:55 +0300 Subject: [PATCH 16/42] Skip an optional keyword in the schema Skip a keyword if it is optional in the schema and the value passed in is set to None. Signed-off-by: Teodora Sechkova --- tuf/formats.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tuf/formats.py b/tuf/formats.py index 4dfb6afa4b..26dba7ad0e 100755 --- a/tuf/formats.py +++ b/tuf/formats.py @@ -563,7 +563,13 @@ def build_dict_conforming_to_schema(schema, **kwargs): for key, element_type in schema._required: #pylint: disable=protected-access if key in dictionary: - # If the field has been provided, proceed normally. + + # Skip a keyword if it is optional in the schema and the value passed in + # is set to None + if dictionary[key] is None and isinstance(element_type, SCHEMA.Optional): + dictionary.pop(key) + + # else if the field has been provided, proceed normally. continue elif isinstance(element_type, SCHEMA.Optional): From 916055aa54452283831db0fc0d351e368c8b4ca0 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 2 Jul 2020 11:28:13 +0300 Subject: [PATCH 17/42] tuf.api: simplify metadata.Targets.signable() Signed-off-by: Teodora Sechkova --- tuf/api/metadata.py | 25 +++++++------------------ 1 file changed, 7 insertions(+), 18 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 20fb4ce457..59e4f136ba 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -185,24 +185,13 @@ def read_from_json(self, filename: str) -> None: def signable(self): # TODO: probably want to generalise this, a @property.getter in Metadata? expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' - if self.delegations is not None: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - version=self.version, - expires=expires, - targets=self.targets, - delegations=self.delegations) - else: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - version=self.version, - expires=expires, - targets=self.targets) - # TODO: As an alternative to the odd if/else above where we decide whether or - # not to include the delegations argument based on whether or not it is - # None, consider instead adding a check in - # build_dict_conforming_to_schema that skips a keyword if that keyword - # is optional in the schema and the value passed in is set to None.... + return tuf.formats.build_dict_conforming_to_schema( + tuf.formats.TARGETS_SCHEMA, + version=self.version, + expires=expires, + targets=self.targets, + delegations=self.delegations) + # Add or update metadata about the target. # TODO: how to handle writing consistent targets? From 1fbff557adec35de1fc80894cca1b2ea982c1960 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 2 Jul 2020 14:06:20 +0300 Subject: [PATCH 18/42] tuf.api: add basic schema checks in read_from_json Signed-off-by: Teodora Sechkova --- tuf/api/metadata.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 59e4f136ba..5ee1c0d12a 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -44,8 +44,8 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = # And you would use this method to populate it from a file. def read_from_json(self, filename: str) -> None: signable = load_json_file(filename) + tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - # TODO: use some basic schema checks self.signatures = signable['signatures'] self.signed = signable['signed'] @@ -130,6 +130,10 @@ class Timestamp(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): super().__init__(consistent_snapshot, expiration, keyring, version) + def read_from_json(self, filename: str) -> None: + super().read_from_json(filename) + tuf.formats.TIMESTAMP_SCHEMA.check_match(self.signed) + def signable(self): expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' filedict = self.signed['meta'] @@ -151,6 +155,8 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = def read_from_json(self, filename: str) -> None: super().read_from_json(filename) + tuf.formats.SNAPSHOT_SCHEMA.check_match(self.signed) + meta = self.signed['meta'] for target_role in meta: version = meta[target_role]['version'] @@ -175,13 +181,13 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = self.targets = {} self.delegations = {} - def read_from_json(self, filename: str) -> None: super().read_from_json(filename) + tuf.formats.TARGETS_SCHEMA.check_match(self.signed) + self.targets = self.signed['targets'] self.delegations = self.signed.get('delegations', None) - # FIXME def signable(self): # TODO: probably want to generalise this, a @property.getter in Metadata? expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' From 0ca471ed2ad151c61647400e3ac07670dc621b9e Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Thu, 2 Jul 2020 15:07:07 +0300 Subject: [PATCH 19/42] tuf.api: use StorageBackendInterface Signed-off-by: Teodora Sechkova --- tuf/api/metadata.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 5ee1c0d12a..695a468d83 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -8,11 +8,13 @@ from typing import Any, Dict, List, Optional import json +import tempfile # 3rd-party. from dateutil.relativedelta import relativedelta from securesystemslib.formats import encode_canonical -from securesystemslib.util import load_json_file +from securesystemslib.util import load_json_file, persist_temp_file +from securesystemslib.storage import StorageBackendInterface import tuf.formats from tuf.repository_lib import ( _get_written_metadata, @@ -42,8 +44,8 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = self.version = version # And you would use this method to populate it from a file. - def read_from_json(self, filename: str) -> None: - signable = load_json_file(filename) + def read_from_json(self, filename: str, storage_backend: StorageBackendInterface = None) -> None: + signable = load_json_file(filename, storage_backend) tuf.formats.SIGNABLE_SCHEMA.check_match(signable) self.signatures = signable['signatures'] @@ -122,9 +124,10 @@ def verify(self) -> bool: return len(verified_keyids) >= self.keyring.threshold.min - def write_to_json(self, filename: str) -> None: - with open(filename, 'r+b') as f: + def write_to_json(self, filename: str, storage_backend: StorageBackendInterface = None) -> None: + with tempfile.TemporaryFile() as f: f.write(_get_written_metadata(self.sign())) + persist_temp_file(f, filename, storage_backend) class Timestamp(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): From eb93fe133e89210accaed7bf22c00a4168c514cc Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Thu, 2 Jul 2020 22:35:11 +0100 Subject: [PATCH 20/42] tuf.api: make expires a property Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 695a468d83..92a44baac4 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -70,6 +70,10 @@ def signable(self) -> JsonDict: """ raise NotImplementedError() + @property + def expires(self) -> str: + return self.expiration.replace(tzinfo=None).isoformat()+'Z' + def bump_version(self) -> None: self.version = self.version + 1 @@ -138,11 +142,10 @@ def read_from_json(self, filename: str) -> None: tuf.formats.TIMESTAMP_SCHEMA.check_match(self.signed) def signable(self): - expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' filedict = self.signed['meta'] return tuf.formats.build_dict_conforming_to_schema( tuf.formats.TIMESTAMP_SCHEMA, version=self.version, - expires=expires, meta=filedict) + expires=self.expires, meta=filedict) # Update metadata about the snapshot metadata. def update(self, version: int, length: int, hashes: JsonDict): @@ -168,11 +171,9 @@ def read_from_json(self, filename: str) -> None: self.targets_fileinfo[target_role] = tuf.formats.make_metadata_fileinfo(version, length, hashes) def signable(self): - # TODO: probably want to generalise this, a @property.getter in Metadata? - expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' return tuf.formats.build_dict_conforming_to_schema( tuf.formats.SNAPSHOT_SCHEMA, version=self.version, - expires=expires, meta=self.targets_fileinfo) + expires=self.expires, meta=self.targets_fileinfo) # Add or update metadata about the targets metadata. def update(self, rolename: str, version: int, length: Optional[int] = None, hashes: Optional[JsonDict] = None): @@ -192,12 +193,10 @@ def read_from_json(self, filename: str) -> None: self.delegations = self.signed.get('delegations', None) def signable(self): - # TODO: probably want to generalise this, a @property.getter in Metadata? - expires = self.expiration.replace(tzinfo=None).isoformat()+'Z' return tuf.formats.build_dict_conforming_to_schema( tuf.formats.TARGETS_SCHEMA, version=self.version, - expires=expires, + expires=self.expires, targets=self.targets, delegations=self.delegations) From f8d8bb546240cd1d3c6ca8e060b0388e686a5dd7 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Fri, 3 Jul 2020 11:55:44 +0100 Subject: [PATCH 21/42] tuf.api: convert keys.Algorithm to dict Enum isn't available in Python 2.7 and accessing the enum members as items, i.e. Algorithm['RSA'] throws KeyError when the member is a method. Work around both of these issues by converting to a dict. Signed-off-by: Joshua Lock --- tuf/api/keys.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/tuf/api/keys.py b/tuf/api/keys.py index ccb31e4c33..0ab0f21ac0 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -3,7 +3,6 @@ # 2nd-party. from abc import ABC, abstractmethod -from enum import Enum, unique from typing import Any, List, Optional import logging @@ -22,11 +21,11 @@ # Generic classes. -@unique -class Algorithm(Enum): - ECDSA = import_ecdsa_privatekey_from_file - ED25519 = import_ed25519_privatekey_from_file - RSA = import_rsa_privatekey_from_file +Algorithm = { + 'ECDSA': import_ecdsa_privatekey_from_file, + 'ED25519': import_ed25519_privatekey_from_file, + 'RSA': import_rsa_privatekey_from_file + } class Threshold: From 3cc98ae2d557323c61358241233ded218819192a Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Thu, 2 Jul 2020 22:35:54 +0100 Subject: [PATCH 22/42] tuf.api: make signed & signatures properties of Metadata Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 51 ++++++++++++++++++++++++--------------------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 92a44baac4..4914926818 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -45,15 +45,15 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = # And you would use this method to populate it from a file. def read_from_json(self, filename: str, storage_backend: StorageBackendInterface = None) -> None: - signable = load_json_file(filename, storage_backend) + self._signable = load_json_file(filename, storage_backend) tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - self.signatures = signable['signatures'] - self.signed = signable['signed'] + self._signatures = signable['signatures'] + self._signed = signable['signed'] # TODO: replace with dateutil.parser.parse? - self.expiration = iso8601.parse_date(self.signed['expires']) - self.version = self.signed['version'] + self.expiration = iso8601.parse_date(self._signed['expires']) + self.version = self._signed['version'] fn, fn_ver = _strip_version_number(filename, True) if fn_ver: @@ -70,6 +70,14 @@ def signable(self) -> JsonDict: """ raise NotImplementedError() + @property + def signed(self) -> str: + return encode_canonical(self.signable['signed']).encode('utf-8') + + @property + def signatures(self) -> List[JsonDict]: + return self.signable.get('signatures', {}) + @property def expires(self) -> str: return self.expiration.replace(tzinfo=None).isoformat()+'Z' @@ -80,12 +88,6 @@ def bump_version(self) -> None: def bump_expiration(self, delta: relativedelta = relativedelta(days=1)) -> None: self.expiration = self.expiration + delta - def signed(self) -> str: - return encode_canonical(self.signable['signed']).encode('utf-8') - - def signatures(self) -> List[JsonDict]: - return self.signable['signatures'] - def sign(self) -> JsonDict: def update_signature(signatures, keyid, signature): updated = False @@ -104,7 +106,7 @@ def update_signature(signatures, keyid, signature): signature = key.sign(signed) update_signature(signatures, key.keyid, signature) - self.signatures = signatures + self._signatures = signatures return {'signed': signed, 'signatures': signatures} def verify(self) -> bool: @@ -136,23 +138,26 @@ def write_to_json(self, filename: str, storage_backend: StorageBackendInterface class Timestamp(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): super().__init__(consistent_snapshot, expiration, keyring, version) + self.snapshot_fileinfo = {} def read_from_json(self, filename: str) -> None: super().read_from_json(filename) + self.snapshot_fileinfo = self._signed['meta'] tuf.formats.TIMESTAMP_SCHEMA.check_match(self.signed) - def signable(self): - filedict = self.signed['meta'] + @property + def signable(self) -> JsonDict: return tuf.formats.build_dict_conforming_to_schema( tuf.formats.TIMESTAMP_SCHEMA, version=self.version, - expires=self.expires, meta=filedict) + expires=self.expires, meta=self.snapshot_fileinfo) # Update metadata about the snapshot metadata. def update(self, version: int, length: int, hashes: JsonDict): - fileinfo = self.signed['meta']['snapshot.json'] + fileinfo = self.snapshot_fileinfo.get('snapshot.json', {}) fileinfo['version'] = version fileinfo['length'] = length fileinfo['hashes'] = hashes + self.snapshot_fileinfo['snapshot.json'] = fileinfo class Snapshot(Metadata): def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): @@ -161,15 +166,15 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = def read_from_json(self, filename: str) -> None: super().read_from_json(filename) - tuf.formats.SNAPSHOT_SCHEMA.check_match(self.signed) - - meta = self.signed['meta'] + meta = self._signed['meta'] for target_role in meta: version = meta[target_role]['version'] length = meta[target_role].get('length') hashes = meta[target_role].get('hashes') self.targets_fileinfo[target_role] = tuf.formats.make_metadata_fileinfo(version, length, hashes) + tuf.formats.SNAPSHOT_SCHEMA.check_match(self.signed) + @property def signable(self): return tuf.formats.build_dict_conforming_to_schema( tuf.formats.SNAPSHOT_SCHEMA, version=self.version, @@ -187,11 +192,11 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = def read_from_json(self, filename: str) -> None: super().read_from_json(filename) + self.targets = self._signed['targets'] + self.delegations = self._signed.get('delegations', {}) tuf.formats.TARGETS_SCHEMA.check_match(self.signed) - self.targets = self.signed['targets'] - self.delegations = self.signed.get('delegations', None) - + @property def signable(self): return tuf.formats.build_dict_conforming_to_schema( tuf.formats.TARGETS_SCHEMA, @@ -200,8 +205,6 @@ def signable(self): targets=self.targets, delegations=self.delegations) - # Add or update metadata about the target. - # TODO: how to handle writing consistent targets? def update(self, filename: str, fileinfo: JsonDict): self.targets[filename] = fileinfo From 1daefa456b8a565fcd55c3432ec72d2180bae065 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Thu, 2 Jul 2020 23:06:13 +0100 Subject: [PATCH 23/42] tuf.api: instantiate objects from the JSON Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 70 ++++++++++++++++++++++++++++++--------------- 1 file changed, 47 insertions(+), 23 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 4914926818..53a75d014a 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -43,24 +43,37 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = assert version >= 1, f'{version} < 1' self.version = version + self._signed = {} + self._signatures = [] + # And you would use this method to populate it from a file. - def read_from_json(self, filename: str, storage_backend: StorageBackendInterface = None) -> None: - self._signable = load_json_file(filename, storage_backend) + @classmethod + def read_from_json(cls, filename: str, storage_backend: StorageBackendInterface = None) -> None: + signable = load_json_file(filename, storage_backend) tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - self._signatures = signable['signatures'] - self._signed = signable['signed'] + signatures = signable['signatures'] + signed = signable['signed'] # TODO: replace with dateutil.parser.parse? - self.expiration = iso8601.parse_date(self._signed['expires']) - self.version = self._signed['version'] + expiration = iso8601.parse_date(signed['expires']) + version = signed['version'] fn, fn_ver = _strip_version_number(filename, True) if fn_ver: assert fn_ver == self.version, f'{fn_ver} != {self.version}' - self.consistent_snapshot = True + consistent_snapshot = True else: - self.consistent_snapshot = False + consistent_snapshot = False + + metadata = cls(consistent_snapshot=consistent_snapshot, + expiration=expiration, + version=version) + + metadata._signatures = signatures + metadata._signed = signed + + return metadata @property def signable(self) -> JsonDict: @@ -100,7 +113,7 @@ def update_signature(signatures, keyid, signature): signatures.append(keyid_signature) signed = self.signed - signatures = self.signatures + signatures = self._signatures for key in self.keyring.keys: signature = key.sign(signed) @@ -140,10 +153,14 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = super().__init__(consistent_snapshot, expiration, keyring, version) self.snapshot_fileinfo = {} - def read_from_json(self, filename: str) -> None: - super().read_from_json(filename) - self.snapshot_fileinfo = self._signed['meta'] - tuf.formats.TIMESTAMP_SCHEMA.check_match(self.signed) + @classmethod + def read_from_json(cls, filename: str) -> None: + md = Metadata.read_from_json(filename) + timestamp = cls(md.consistent_snapshot, md.expiration, md.keyring, md.version) + timestamp.snapshot_fileinfo = md._signed['meta'] + tuf.formats.TIMESTAMP_SCHEMA.check_match(timestamp.signed) + timestamp._signatures = md._signatures + return timestamp @property def signable(self) -> JsonDict: @@ -164,15 +181,19 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = super().__init__(consistent_snapshot, expiration, keyring, version) self.targets_fileinfo = {} - def read_from_json(self, filename: str) -> None: - super().read_from_json(filename) - meta = self._signed['meta'] + @classmethod + def read_from_json(cls, filename: str) -> None: + md = Metadata.read_from_json(filename) + snapshot = cls(md.consistent_snapshot, md.expiration, md.keyring, md.version) + meta = md._signed['meta'] for target_role in meta: version = meta[target_role]['version'] length = meta[target_role].get('length') hashes = meta[target_role].get('hashes') - self.targets_fileinfo[target_role] = tuf.formats.make_metadata_fileinfo(version, length, hashes) - tuf.formats.SNAPSHOT_SCHEMA.check_match(self.signed) + snapshot.targets_fileinfo[target_role] = tuf.formats.make_metadata_fileinfo(version, length, hashes) + tuf.formats.SNAPSHOT_SCHEMA.check_match(snapshot.signed) + snapshot._signatures = md._signatures + return snapshot @property def signable(self): @@ -190,11 +211,14 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = self.targets = {} self.delegations = {} - def read_from_json(self, filename: str) -> None: - super().read_from_json(filename) - self.targets = self._signed['targets'] - self.delegations = self._signed.get('delegations', {}) - tuf.formats.TARGETS_SCHEMA.check_match(self.signed) + @classmethod + def read_from_json(cls, filename: str) -> None: + targets = Metadata.read_from_json(filename) + targets.targets = self._signed['targets'] + targets.delegations = self._signed.get('delegations', {}) + tuf.formats.TARGETS_SCHEMA.check_match(targets.signed) + targets._signatures = md._signatures + return targets @property def signable(self): From 0ecdfba2290a725f74f24df677f161c1b48f2ba7 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Fri, 3 Jul 2020 12:05:45 +0100 Subject: [PATCH 24/42] tuf.api: shuffle up signed/signatures/signable interface * Add signed_bytes property to return a canonicalised version of signed * Make the signed property responsible for converting the object into the dict format expected of the metadata, requiring a per-class implementation * Make signable a generic base-class property which returns a dict of signatures and signed With these changes we can load and verify metadata with the low-level API! Signed-off-by: Joshua Lock --- tuf/api/metadata.py | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 53a75d014a..f1237b927f 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -77,19 +77,20 @@ def read_from_json(cls, filename: str, storage_backend: StorageBackendInterface @property def signable(self) -> JsonDict: - """ - To be overridden by the inheriting class. - The idea is to serialize this object into the signable we expect. - """ - raise NotImplementedError() + return {"signatures": self.signatures, + "signed": self.signed} + + @property + def signed_bytes(self) -> bytes: + return encode_canonical(self.signed).encode('UTF-8') @property def signed(self) -> str: - return encode_canonical(self.signable['signed']).encode('utf-8') + raise NotImplementedError @property def signatures(self) -> List[JsonDict]: - return self.signable.get('signatures', {}) + return self._signatures @property def expires(self) -> str: @@ -112,27 +113,28 @@ def update_signature(signatures, keyid, signature): if not updated: signatures.append(keyid_signature) - signed = self.signed + signed_bytes = self.signed_bytes signatures = self._signatures for key in self.keyring.keys: - signature = key.sign(signed) + signature = key.sign(signed_bytes) update_signature(signatures, key.keyid, signature) self._signatures = signatures - return {'signed': signed, 'signatures': signatures} + return self.signable def verify(self) -> bool: - signed = self.signed + signed_bytes = self.signed_bytes signatures = self.signatures verified_keyids = {} for signature in signatures: + # TODO: handle an empty keyring for key in self.keyring.keys: keyid = key.keyid if keyid == signature['keyid']: try: - verified = key.verify(signed, signature) + verified = key.verify(signed_bytes, signature) except: logging.exception(f'Could not verify signature for key {keyid}') continue @@ -145,7 +147,7 @@ def verify(self) -> bool: def write_to_json(self, filename: str, storage_backend: StorageBackendInterface = None) -> None: with tempfile.TemporaryFile() as f: - f.write(_get_written_metadata(self.sign())) + f.write(_get_written_metadata(self.sign()).encode_canonical()) persist_temp_file(f, filename, storage_backend) class Timestamp(Metadata): @@ -163,7 +165,7 @@ def read_from_json(cls, filename: str) -> None: return timestamp @property - def signable(self) -> JsonDict: + def signed(self) -> JsonDict: return tuf.formats.build_dict_conforming_to_schema( tuf.formats.TIMESTAMP_SCHEMA, version=self.version, expires=self.expires, meta=self.snapshot_fileinfo) @@ -196,7 +198,7 @@ def read_from_json(cls, filename: str) -> None: return snapshot @property - def signable(self): + def signed(self): return tuf.formats.build_dict_conforming_to_schema( tuf.formats.SNAPSHOT_SCHEMA, version=self.version, expires=self.expires, meta=self.targets_fileinfo) @@ -221,7 +223,7 @@ def read_from_json(cls, filename: str) -> None: return targets @property - def signable(self): + def signed(self): return tuf.formats.build_dict_conforming_to_schema( tuf.formats.TARGETS_SCHEMA, version=self.version, From 34a0680947ab5e0afc85572e438b40bcb98cd98a Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Thu, 2 Jul 2020 23:20:57 +0100 Subject: [PATCH 25/42] More tests tuf.api and verify data! Signed-off-by: Joshua Lock --- tests/test_tuf_api.py | 55 +++++++++++++++++++++++++++++++++---------- 1 file changed, 43 insertions(+), 12 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index 018c6551e9..411704144d 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -37,6 +37,7 @@ import os from tuf.api import metadata +from tuf.api import keys from dateutil.relativedelta import relativedelta import iso8601 @@ -56,6 +57,9 @@ def setUpClass(cls): test_repo_data = os.path.join('repository_data', 'repository') cls.repo_dir = os.path.join(cls.temporary_directory, 'repository') shutil.copytree(test_repo_data, cls.repo_dir) + test_repo_keys = os.path.join('repository_data', 'keystore') + cls.keystore_dir = os.path.join(cls.temporary_directory, 'keystore') + shutil.copytree(test_repo_keys, cls.keystore_dir) @@ -68,11 +72,32 @@ def tearDownClass(cls): + def _load_key_ring(self): + key_list = [] + root_key = keys.read_key(os.path.join(self.keystore_dir, 'root_key'), + 'RSA', 'password') + key_list.append(root_key) + + for key_file in os.listdir(self.keystore_dir): + if key_file.endswith('.pub'): + # ignore public keys + continue + + if key_file.startswith('root_key'): + # root key is loaded + continue + + key = keys.read_key(os.path.join(self.keystore_dir, key_file), 'ED25519', + 'password') + key_list.append(key) + threshold = keys.Threshold(1, 1) + return keys.KeyRing(threshold=threshold, keys=key_list) + def test_metadata_base(self): # Use of Snapshot is arbitrary, we're just testing the base class features # with real data - md = metadata.Snapshot() - md.read_from_json(os.path.join(self.repo_dir, 'metadata.staged', 'snapshot.json')) + snapshot_path = os.path.join(self.repo_dir, 'metadata', 'snapshot.json') + md = metadata.Snapshot.read_from_json(snapshot_path) self.assertEqual(md.version, 1) md.bump_version() @@ -86,8 +111,12 @@ def test_metadata_base(self): def test_metadata_snapshot(self): - snapshot = metadata.Snapshot() - snapshot.read_from_json(os.path.join(self.repo_dir, 'metadata.staged', 'snapshot.json')) + snapshot_path = os.path.join(self.repo_dir, 'metadata', 'snapshot.json') + snapshot = metadata.Snapshot.read_from_json(snapshot_path) + + key_ring = self._load_key_ring() + snapshot.keyring = key_ring + snapshot.verify() # Create a dict representing what we expect the updated data to be fileinfo = snapshot.signed['meta'] @@ -97,10 +126,7 @@ def test_metadata_snapshot(self): fileinfo['role1.json']['length'] = 123 snapshot.update('role1', 2, 123, hashes) - # snapshot.sign() - # self.assertEqual(snapshot.signed['meta'], fileinfo) - - # snapshot.update() + self.assertEqual(snapshot.signed['meta'], fileinfo) # snapshot.signable() @@ -112,8 +138,12 @@ def test_metadata_snapshot(self): def test_metadata_timestamp(self): - timestamp = metadata.Timestamp() - timestamp.read_from_json(os.path.join(self.repo_dir, 'metadata.staged', 'timestamp.json')) + timestamp_path = os.path.join(self.repo_dir, 'metadata', 'timestamp.json') + timestamp = metadata.Timestamp.read_from_json(timestamp_path) + + key_ring = self._load_key_ring() + timestamp.keyring = key_ring + timestamp.verify() self.assertEqual(timestamp.version, 1) timestamp.bump_version() @@ -130,8 +160,9 @@ def test_metadata_timestamp(self): fileinfo['hashes'] = hashes fileinfo['version'] = 2 fileinfo['length'] = 520 - timestamp.update('snapshot', 2, 520, hashes) + timestamp.update(2, 520, hashes) + self.assertEqual(timestamp.signed['meta']['snapshot.json'], fileinfo) + # timestamp.sign() - # self.assertEqual(timestamp.signed['meta'], fileinfo) # timestamp.write_to_json() From 3c5e312e602b54a48ccbc36e06d698bac199cafb Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Fri, 3 Jul 2020 18:04:43 +0300 Subject: [PATCH 26/42] WIP added tuf.api keys tests Signed-off-by: Teodora Sechkova --- tests/test_tuf_api.py | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index 411704144d..c4f15a1550 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -166,3 +166,44 @@ def test_metadata_timestamp(self): # timestamp.sign() # timestamp.write_to_json() + +def test_Threshold(self): + # test default values + keys.Threshold() + # test correct arguments + keys.Threshold(min_=4, max_=5) + + # test incorrect input + # TODO raise sslib.FormatError or ValueError instead of AssertionErrors + self.assertRaises(AssertionError, keys.Threshold, 5, 4) + self.assertRaises(AssertionError, keys.Threshold, 0, 5) + self.assertRaises(AssertionError, keys.Threshold, 5, 0) + + +def test_KeyRing(self): + key_list = [] + root_key = keys.read_key(os.path.join(self.keystore_dir, 'root_key'), + 'RSA', 'password') + root_key2 = keys.read_key(os.path.join(self.keystore_dir, 'root_key2'), + 'ED25519', 'password') + key_list.append(root_key) + key_list.append(root_key2) + threshold = keys.Threshold() + keyring = keys.KeyRing(threshold, key_list) + self.assertEqual(keyring.threshold, threshold) + self.assertEqual(keyring.keys, key_list) + + +def test_read_key(self): + filename = os.path.join(self.keystore_dir, 'root_key') + algorithm = 'RSA' + passphrase = 'password' + + self.assertTrue(isinstance(keys.read_key(filename, algorithm, passphrase), keys.RAMKey)) + +# TODO: +# def test_RAMKey(self): + +# Run unit test. +if __name__ == '__main__': + unittest.main() From fd5732a0245cf0110bc47c9ccc2a4fc5232879de Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Mon, 6 Jul 2020 23:20:52 +0100 Subject: [PATCH 27/42] tuf.api: treat all datetime's as UTC We don't capture timezone information in the metadata, therefore we should not capture it in the interfaces. Ensure we remove timezone information from any datetime objects when they are assigned to the expiration property of a Metadata object. Signed-off-by: Joshua Lock --- tests/test_tuf_api.py | 18 ++++++++++++------ tuf/api/metadata.py | 20 +++++++++++++++----- 2 files changed, 27 insertions(+), 11 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index c4f15a1550..e9796cab48 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -103,11 +103,14 @@ def test_metadata_base(self): md.bump_version() self.assertEqual(md.version, 2) - self.assertEqual(md.expiration, iso8601.parse_date("2030-01-01T00:00:00Z")) + self.assertEqual(md.expiration, + iso8601.parse_date("2030-01-01").replace(tzinfo=None)) md.bump_expiration() - self.assertEqual(md.expiration, iso8601.parse_date("2030-01-02T00:00:00Z")) + self.assertEqual(md.expiration, + iso8601.parse_date("2030-01-02").replace(tzinfo=None)) md.bump_expiration(relativedelta(years=1)) - self.assertEqual(md.expiration, iso8601.parse_date("2031-01-02T00:00:00Z")) + self.assertEqual(md.expiration, + iso8601.parse_date("2031-01-02").replace(tzinfo=None)) def test_metadata_snapshot(self): @@ -149,11 +152,14 @@ def test_metadata_timestamp(self): timestamp.bump_version() self.assertEqual(timestamp.version, 2) - self.assertEqual(timestamp.expiration, iso8601.parse_date("2030-01-01T00:00:00Z")) + self.assertEqual(timestamp.expiration, + iso8601.parse_date("2030-01-01").replace(tzinfo=None)) timestamp.bump_expiration() - self.assertEqual(timestamp.expiration, iso8601.parse_date("2030-01-02T00:00:00Z")) + self.assertEqual(timestamp.expiration, + iso8601.parse_date("2030-01-02").replace(tzinfo=None)) timestamp.bump_expiration(relativedelta(years=1)) - self.assertEqual(timestamp.expiration, iso8601.parse_date("2031-01-02T00:00:00Z")) + self.assertEqual(timestamp.expiration, + iso8601.parse_date("2031-01-02").replace(tzinfo=None)) hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} fileinfo = timestamp.signed['meta']['snapshot.json'] diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index f1237b927f..3f93ddae48 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -38,7 +38,7 @@ def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = self.consistent_snapshot = consistent_snapshot self.keyring = keyring - self.expiration = expiration + self._expiration = expiration assert version >= 1, f'{version} < 1' self.version = version @@ -55,8 +55,8 @@ def read_from_json(cls, filename: str, storage_backend: StorageBackendInterface signatures = signable['signatures'] signed = signable['signed'] - # TODO: replace with dateutil.parser.parse? - expiration = iso8601.parse_date(signed['expires']) + # We always intend times to be UTC + expiration = iso8601.parse_date(signed['expires']).replace(tzinfo=None) version = signed['version'] fn, fn_ver = _strip_version_number(filename, True) @@ -94,13 +94,23 @@ def signatures(self) -> List[JsonDict]: @property def expires(self) -> str: - return self.expiration.replace(tzinfo=None).isoformat()+'Z' + """The expiration property as a string""" + return self._expiration.isoformat()+'Z' + + @property + def expiration(self) -> datetime: + return self._expiration + + @expiration.setter + def expiration(self, datetime) -> None: + # We always treat dates as UTC + self._expiration = datetime.replace(tzinfo=None) def bump_version(self) -> None: self.version = self.version + 1 def bump_expiration(self, delta: relativedelta = relativedelta(days=1)) -> None: - self.expiration = self.expiration + delta + self._expiration = self._expiration + delta def sign(self) -> JsonDict: def update_signature(signatures, keyid, signature): From 54e1f9c03b09bdf05fd0a3cd3e4db35c6480c5a7 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Mon, 6 Jul 2020 23:21:43 +0100 Subject: [PATCH 28/42] tuf.api: drop use of dateutil All of the functionality we need is available from the standard library which reduces our dependency footprint. Having minimal dependencies is especially important for update clients which often have to vendor their dependencies. However, dateutil.relativedelta is richer than timedelta and helps to provide a clearer API. For example, with relativedelta it's possible to specify a delta in years *and* dateutil will do the right thing for leap years. Signed-off-by: Joshua Lock --- tests/test_tuf_api.py | 6 +++--- tuf/api/metadata.py | 18 +++++++++--------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index e9796cab48..14aa0d007b 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -35,11 +35,11 @@ import sys import errno import os +from datetime import timedelta from tuf.api import metadata from tuf.api import keys -from dateutil.relativedelta import relativedelta import iso8601 import six @@ -108,7 +108,7 @@ def test_metadata_base(self): md.bump_expiration() self.assertEqual(md.expiration, iso8601.parse_date("2030-01-02").replace(tzinfo=None)) - md.bump_expiration(relativedelta(years=1)) + md.bump_expiration(timedelta(days=365)) self.assertEqual(md.expiration, iso8601.parse_date("2031-01-02").replace(tzinfo=None)) @@ -157,7 +157,7 @@ def test_metadata_timestamp(self): timestamp.bump_expiration() self.assertEqual(timestamp.expiration, iso8601.parse_date("2030-01-02").replace(tzinfo=None)) - timestamp.bump_expiration(relativedelta(years=1)) + timestamp.bump_expiration(timedelta(days=365)) self.assertEqual(timestamp.expiration, iso8601.parse_date("2031-01-02").replace(tzinfo=None)) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 3f93ddae48..a1dc2c185a 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -4,14 +4,14 @@ from tuf.api.keys import KeyRing # 2nd-party. -from datetime import datetime +from datetime import datetime, timedelta from typing import Any, Dict, List, Optional import json import tempfile # 3rd-party. -from dateutil.relativedelta import relativedelta +import iso8601 from securesystemslib.formats import encode_canonical from securesystemslib.util import load_json_file, persist_temp_file from securesystemslib.storage import StorageBackendInterface @@ -24,8 +24,6 @@ generate_timestamp_metadata, ) -import iso8601 - # Types. JsonDict = Dict[str, Any] @@ -34,7 +32,7 @@ class Metadata: # By default, a Metadata would be a rather empty one. - def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(), keyring: Optional[KeyRing] = None, version: int = 1) -> None: + def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: Optional[KeyRing] = None, version: int = 1) -> None: self.consistent_snapshot = consistent_snapshot self.keyring = keyring @@ -56,6 +54,8 @@ def read_from_json(cls, filename: str, storage_backend: StorageBackendInterface signed = signable['signed'] # We always intend times to be UTC + # NOTE: we could do this with datetime.fromisoformat() but that is not + # available in Python 2.7's datetime expiration = iso8601.parse_date(signed['expires']).replace(tzinfo=None) version = signed['version'] @@ -109,7 +109,7 @@ def expiration(self, datetime) -> None: def bump_version(self) -> None: self.version = self.version + 1 - def bump_expiration(self, delta: relativedelta = relativedelta(days=1)) -> None: + def bump_expiration(self, delta: timedelta = timedelta(days=1)) -> None: self._expiration = self._expiration + delta def sign(self) -> JsonDict: @@ -161,7 +161,7 @@ def write_to_json(self, filename: str, storage_backend: StorageBackendInterface persist_temp_file(f, filename, storage_backend) class Timestamp(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1): super().__init__(consistent_snapshot, expiration, keyring, version) self.snapshot_fileinfo = {} @@ -189,7 +189,7 @@ def update(self, version: int, length: int, hashes: JsonDict): self.snapshot_fileinfo['snapshot.json'] = fileinfo class Snapshot(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1): super().__init__(consistent_snapshot, expiration, keyring, version) self.targets_fileinfo = {} @@ -218,7 +218,7 @@ def update(self, rolename: str, version: int, length: Optional[int] = None, hash self.targets_fileinfo[f'{rolename}.json'] = tuf.formats.make_metadata_fileinfo(version, length, hashes) class Targets(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: relativedelta = relativedelta(days=1), keyring: KeyRing = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1): super().__init__(consistent_snapshot, expiration, keyring, version) self.targets = {} self.delegations = {} From 42372872d2a7f83bc4abad34d3f52a1d606fd786 Mon Sep 17 00:00:00 2001 From: Joshua Lock Date: Tue, 7 Jul 2020 10:16:42 +0100 Subject: [PATCH 29/42] Test Metadata.bump_expiration() with relativedelta dateutil provides an interface which is much easier to reason about for users, i.e. it provides an interface for year deltas which automatically handles leap years. Add some tests to try and ensure that, even though it uses standard library functionality, the metadata API can accept dateutil.relativedelta and do the right thing. Signed-off-by: Joshua Lock --- tests/test_tuf_api.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index 14aa0d007b..ef1b185543 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2014 - 2017, New York University and the TUF contributors +# Copyright 2020, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """ @@ -36,6 +36,7 @@ import errno import os from datetime import timedelta +from dateutil.relativedelta import relativedelta from tuf.api import metadata from tuf.api import keys @@ -161,6 +162,16 @@ def test_metadata_timestamp(self): self.assertEqual(timestamp.expiration, iso8601.parse_date("2031-01-02").replace(tzinfo=None)) + # Test whether dateutil.relativedelta works, this provides a much easier to + # use interface for callers + saved_expiration = timestamp.expiration + delta = relativedelta(days=1) + timestamp.bump_expiration(delta) + self.assertEqual(timestamp.expires, "2031-01-03T00:00:00Z") + delta = relativedelta(years=5) + timestamp.bump_expiration(delta) + self.assertEqual(timestamp.expires, "2036-01-03T00:00:00Z") + hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} fileinfo = timestamp.signed['meta']['snapshot.json'] fileinfo['hashes'] = hashes From 76cb560a4601dc264390d10143a68cc05d16787f Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Tue, 7 Jul 2020 23:55:22 -0400 Subject: [PATCH 30/42] minor edits * classmethod for init RAMKey from file * private class variables * more typing for methods * better names for arguments Signed-off-by: Trishank Karthik Kuppusamy --- tests/test_tuf_api.py | 29 ++++++------ tuf/api/keys.py | 44 ++++++++++-------- tuf/api/metadata.py | 104 ++++++++++++++++++++++-------------------- 3 files changed, 93 insertions(+), 84 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index ef1b185543..c7b15671de 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -75,8 +75,8 @@ def tearDownClass(cls): def _load_key_ring(self): key_list = [] - root_key = keys.read_key(os.path.join(self.keystore_dir, 'root_key'), - 'RSA', 'password') + root_key = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key'), + 'RSA', 'password') key_list.append(root_key) for key_file in os.listdir(self.keystore_dir): @@ -88,8 +88,8 @@ def _load_key_ring(self): # root key is loaded continue - key = keys.read_key(os.path.join(self.keystore_dir, key_file), 'ED25519', - 'password') + key = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, key_file), + 'ED25519', 'password') key_list.append(key) threshold = keys.Threshold(1, 1) return keys.KeyRing(threshold=threshold, keys=key_list) @@ -188,21 +188,20 @@ def test_Threshold(self): # test default values keys.Threshold() # test correct arguments - keys.Threshold(min_=4, max_=5) + keys.Threshold(least=4, most=5) # test incorrect input - # TODO raise sslib.FormatError or ValueError instead of AssertionErrors - self.assertRaises(AssertionError, keys.Threshold, 5, 4) - self.assertRaises(AssertionError, keys.Threshold, 0, 5) - self.assertRaises(AssertionError, keys.Threshold, 5, 0) + self.assertRaises(ValueError, keys.Threshold, 5, 4) + self.assertRaises(ValueError, keys.Threshold, 0, 5) + self.assertRaises(ValueError, keys.Threshold, 5, 0) def test_KeyRing(self): key_list = [] - root_key = keys.read_key(os.path.join(self.keystore_dir, 'root_key'), - 'RSA', 'password') - root_key2 = keys.read_key(os.path.join(self.keystore_dir, 'root_key2'), - 'ED25519', 'password') + root_key = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key'), + 'RSA', 'password') + root_key2 = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key2'), + 'ED25519', 'password') key_list.append(root_key) key_list.append(root_key2) threshold = keys.Threshold() @@ -211,12 +210,12 @@ def test_KeyRing(self): self.assertEqual(keyring.keys, key_list) -def test_read_key(self): +def test_RAMKey_read_from_file(self): filename = os.path.join(self.keystore_dir, 'root_key') algorithm = 'RSA' passphrase = 'password' - self.assertTrue(isinstance(keys.read_key(filename, algorithm, passphrase), keys.RAMKey)) + self.assertTrue(isinstance(keys.RAMKey.read_from_file(filename, algorithm, passphrase), keys.RAMKey)) # TODO: # def test_RAMKey(self): diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 0ab0f21ac0..874acff22c 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -6,7 +6,6 @@ from typing import Any, List, Optional import logging -import os # 3rd-party. from securesystemslib.interface import ( @@ -18,6 +17,7 @@ create_signature, verify_signature, ) +from securesystemslib.storage import StorageBackendInterface # Generic classes. @@ -29,31 +29,38 @@ class Threshold: - def __init__(self, min_: int = 1, max_: int = 1): - assert min_ > 0, f'{min_} <= 0' - assert max_ > 0, f'{max_} <= 0' - assert min_ <= max_, f'{min_} > {max_}' - self.min = min_ - self.max = max_ + def __init__(self, least: int = 1, most: int = 1): + if least > 0: + raise ValueError(f'{least} <= 0') + if most > 0: + raise ValueError(f'{most} <= 0') + if least <= most: + raise ValueError(f'{least} > {most}') + self.least = least + self.most = most class Key(ABC): @abstractmethod def __init__(self) -> None: - raise NotImplementedError() + raise NotImplementedError + + @classmethod + def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str] = None, storage_backend: Optional[StorageBackendInterface] = None) -> Key: + raise NotImplementedError @property @abstractmethod def keyid(self) -> str: - raise NotImplementedError() + raise NotImplementedError @abstractmethod def sign(self, signed: str) -> str: - raise NotImplementedError() + raise NotImplementedError @abstractmethod def verify(self, signed: str, signature: str) -> bool: - raise NotImplementedError() + raise NotImplementedError Keys = List[Key] @@ -74,6 +81,13 @@ class RAMKey(Key): def __init__(self, obj: Any) -> None: # pylint: disable=super-init-not-called self.__obj = obj + @classmethod + def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str] = None, storage_backend: Optional[StorageBackendInterface] = None) -> Key: + handler = Algorithm[algorithm] + obj = handler(filename, password=passphrase) + return cls(obj) + + @property def keyid(self) -> str: return self.__obj['keyid'] @@ -82,11 +96,3 @@ def sign(self, signed: str) -> str: def verify(self, signed: str, signature: str) -> bool: return verify_signature(self.__obj, signature, signed) - - -# Utility functions. - -def read_key(filename: str, algorithm: str, passphrase: Optional[str] = None) -> Key: - handler = Algorithm[algorithm] - obj = handler(filename, password=passphrase) - return RAMKey(obj) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index a1dc2c185a..d9a4a688cf 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -4,6 +4,7 @@ from tuf.api.keys import KeyRing # 2nd-party. +from abc import ABC, abstractmethod from datetime import datetime, timedelta from typing import Any, Dict, List, Optional @@ -11,11 +12,10 @@ import tempfile # 3rd-party. -import iso8601 + from securesystemslib.formats import encode_canonical from securesystemslib.util import load_json_file, persist_temp_file from securesystemslib.storage import StorageBackendInterface -import tuf.formats from tuf.repository_lib import ( _get_written_metadata, _strip_version_number, @@ -24,29 +24,32 @@ generate_timestamp_metadata, ) +import iso8601 +import tuf.formats + # Types. JsonDict = Dict[str, Any] # Classes. -class Metadata: +class Metadata(ABC): # By default, a Metadata would be a rather empty one. def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: Optional[KeyRing] = None, version: int = 1) -> None: - self.consistent_snapshot = consistent_snapshot + self.__consistent_snapshot = consistent_snapshot - self.keyring = keyring - self._expiration = expiration + self.__keyring = keyring + self.__expiration = expiration assert version >= 1, f'{version} < 1' - self.version = version + self.__version = version - self._signed = {} - self._signatures = [] + self.__signed = {} + self.__signatures = [] # And you would use this method to populate it from a file. @classmethod - def read_from_json(cls, filename: str, storage_backend: StorageBackendInterface = None) -> None: + def read_from_json(cls, filename: str, storage_backend: Optional[StorageBackendInterface] = None) -> Metadata: signable = load_json_file(filename, storage_backend) tuf.formats.SIGNABLE_SCHEMA.check_match(signable) @@ -61,7 +64,7 @@ def read_from_json(cls, filename: str, storage_backend: StorageBackendInterface fn, fn_ver = _strip_version_number(filename, True) if fn_ver: - assert fn_ver == self.version, f'{fn_ver} != {self.version}' + assert fn_ver == self.__version, f'{fn_ver} != {self.__version}' consistent_snapshot = True else: consistent_snapshot = False @@ -85,52 +88,53 @@ def signed_bytes(self) -> bytes: return encode_canonical(self.signed).encode('UTF-8') @property - def signed(self) -> str: + @abstractmethod + def signed(self) -> JsonDict: raise NotImplementedError @property def signatures(self) -> List[JsonDict]: - return self._signatures + return self.__signatures @property def expires(self) -> str: """The expiration property as a string""" - return self._expiration.isoformat()+'Z' + return self.__expiration.isoformat()+'Z' @property def expiration(self) -> datetime: - return self._expiration + return self.__expiration @expiration.setter def expiration(self, datetime) -> None: # We always treat dates as UTC - self._expiration = datetime.replace(tzinfo=None) + self.__expiration = datetime.replace(tzinfo=None) def bump_version(self) -> None: - self.version = self.version + 1 + self.__version = self.__version + 1 def bump_expiration(self, delta: timedelta = timedelta(days=1)) -> None: - self._expiration = self._expiration + delta + self.__expiration = self.__expiration + delta + + def __update_signature(self, signatures, keyid, signature): + updated = False + keyid_signature = {'keyid':keyid, 'sig':signature} + for idx, keyid_sig in enumerate(signatures): + if keyid_sig['keyid'] == keyid: + signatures[idx] = keyid_signature + updated = True + if not updated: + signatures.append(keyid_signature) def sign(self) -> JsonDict: - def update_signature(signatures, keyid, signature): - updated = False - keyid_signature = {'keyid':keyid, 'sig':signature} - for idx, keyid_sig in enumerate(signatures): - if keyid_sig['keyid'] == keyid: - signatures[idx] = keyid_signature - updated = True - if not updated: - signatures.append(keyid_signature) - signed_bytes = self.signed_bytes - signatures = self._signatures + signatures = self.__signatures - for key in self.keyring.keys: + for key in self.__keyring.keys: signature = key.sign(signed_bytes) - update_signature(signatures, key.keyid, signature) + self.__update_signature(signatures, key.keyid, signature) - self._signatures = signatures + self.__signatures = signatures return self.signable def verify(self) -> bool: @@ -140,7 +144,7 @@ def verify(self) -> bool: for signature in signatures: # TODO: handle an empty keyring - for key in self.keyring.keys: + for key in self.__keyring.keys: keyid = key.keyid if keyid == signature['keyid']: try: @@ -153,7 +157,7 @@ def verify(self) -> bool: verified_keyids |= keyid break - return len(verified_keyids) >= self.keyring.threshold.min + return len(verified_keyids) >= self.__keyring.threshold.least def write_to_json(self, filename: str, storage_backend: StorageBackendInterface = None) -> None: with tempfile.TemporaryFile() as f: @@ -161,12 +165,12 @@ def write_to_json(self, filename: str, storage_backend: StorageBackendInterface persist_temp_file(f, filename, storage_backend) class Timestamp(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1) -> None: super().__init__(consistent_snapshot, expiration, keyring, version) self.snapshot_fileinfo = {} @classmethod - def read_from_json(cls, filename: str) -> None: + def read_from_json(cls, filename: str) -> Metadata: md = Metadata.read_from_json(filename) timestamp = cls(md.consistent_snapshot, md.expiration, md.keyring, md.version) timestamp.snapshot_fileinfo = md._signed['meta'] @@ -177,11 +181,11 @@ def read_from_json(cls, filename: str) -> None: @property def signed(self) -> JsonDict: return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, version=self.version, + tuf.formats.TIMESTAMP_SCHEMA, version=self.__version, expires=self.expires, meta=self.snapshot_fileinfo) # Update metadata about the snapshot metadata. - def update(self, version: int, length: int, hashes: JsonDict): + def update(self, version: int, length: int, hashes: JsonDict) -> None: fileinfo = self.snapshot_fileinfo.get('snapshot.json', {}) fileinfo['version'] = version fileinfo['length'] = length @@ -189,12 +193,12 @@ def update(self, version: int, length: int, hashes: JsonDict): self.snapshot_fileinfo['snapshot.json'] = fileinfo class Snapshot(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1) -> None: super().__init__(consistent_snapshot, expiration, keyring, version) self.targets_fileinfo = {} @classmethod - def read_from_json(cls, filename: str) -> None: + def read_from_json(cls, filename: str) -> Metadata: md = Metadata.read_from_json(filename) snapshot = cls(md.consistent_snapshot, md.expiration, md.keyring, md.version) meta = md._signed['meta'] @@ -208,39 +212,39 @@ def read_from_json(cls, filename: str) -> None: return snapshot @property - def signed(self): + def signed(self) -> JsonDict: return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, version=self.version, + tuf.formats.SNAPSHOT_SCHEMA, version=self.__version, expires=self.expires, meta=self.targets_fileinfo) # Add or update metadata about the targets metadata. - def update(self, rolename: str, version: int, length: Optional[int] = None, hashes: Optional[JsonDict] = None): + def update(self, rolename: str, version: int, length: Optional[int] = None, hashes: Optional[JsonDict] = None) -> None: self.targets_fileinfo[f'{rolename}.json'] = tuf.formats.make_metadata_fileinfo(version, length, hashes) class Targets(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1): + def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1) -> None: super().__init__(consistent_snapshot, expiration, keyring, version) self.targets = {} self.delegations = {} @classmethod - def read_from_json(cls, filename: str) -> None: + def read_from_json(cls, filename: str) -> Metadata: targets = Metadata.read_from_json(filename) - targets.targets = self._signed['targets'] - targets.delegations = self._signed.get('delegations', {}) + targets.targets = self.__signed['targets'] + targets.delegations = self.__signed.get('delegations', {}) tuf.formats.TARGETS_SCHEMA.check_match(targets.signed) targets._signatures = md._signatures return targets @property - def signed(self): + def signed(self) -> JsonDict: return tuf.formats.build_dict_conforming_to_schema( tuf.formats.TARGETS_SCHEMA, - version=self.version, + version=self.__version, expires=self.expires, targets=self.targets, delegations=self.delegations) # Add or update metadata about the target. - def update(self, filename: str, fileinfo: JsonDict): + def update(self, filename: str, fileinfo: JsonDict) -> None: self.targets[filename] = fileinfo From 565768efd92f692bf8076d8723270ec0af299cf7 Mon Sep 17 00:00:00 2001 From: Lukas Puehringer Date: Fri, 10 Jul 2020 15:24:29 +0200 Subject: [PATCH 31/42] Quick-fix programming errors in api.keys module - Finalize min/max -> least/most refactor - Comment out unclear input validation - Use string literal for foward referencing type hint (see https://www.python.org/dev/peps/pep-0484/#forward-references) Signed-off-by: Lukas Puehringer --- tuf/api/keys.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 874acff22c..0e43714565 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -30,12 +30,12 @@ class Threshold: def __init__(self, least: int = 1, most: int = 1): - if least > 0: - raise ValueError(f'{least} <= 0') - if most > 0: - raise ValueError(f'{most} <= 0') - if least <= most: - raise ValueError(f'{least} > {most}') + # if least > 0: + # raise ValueError(f'{least} <= 0') + # if most > 0: + # raise ValueError(f'{most} <= 0') + # if least <= most: + # raise ValueError(f'{least} > {most}') self.least = least self.most = most @@ -46,7 +46,7 @@ def __init__(self) -> None: raise NotImplementedError @classmethod - def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str] = None, storage_backend: Optional[StorageBackendInterface] = None) -> Key: + def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str] = None, storage_backend: Optional[StorageBackendInterface] = None) -> 'Key': raise NotImplementedError @property @@ -67,10 +67,10 @@ def verify(self, signed: str, signature: str) -> bool: class KeyRing: def __init__(self, threshold: Threshold, keys: Keys): - if len(keys) >= threshold.min: - logging.warning(f'{len(keys)} >= {threshold.min}') - if len(keys) <= threshold.max: - logging.warning(f'{len(keys)} <= {threshold.max}') + if len(keys) >= threshold.least: + logging.warning(f'{len(keys)} >= {threshold.least}') + if len(keys) <= threshold.most: + logging.warning(f'{len(keys)} <= {threshold.most}') self.threshold = threshold self.keys = keys From 3e249f5bdd9e600475a05eab3587c5388eeedc3a Mon Sep 17 00:00:00 2001 From: Lukas Puehringer Date: Fri, 10 Jul 2020 16:04:50 +0200 Subject: [PATCH 32/42] Make Metadata a container class (WIP) This commit performs restructuring on the recently added metadata class model architecture, which shall be part of a new simple TUF API. The key change is that the Metadata class is now used as container for inner TUF metadata (Root, Timestamp, Snapshot, Targets) instead of serving as base class for these, that means we use 'composition' instead of 'inheritance'. Still, in order to aggregate common attributes of the inner Metadata (expires, version, spec_version), we use a new baseclass 'Signed', which also corresponds to the signed field of the outer metadata container. Based on prior observations in TUF's sister project in-toto, this architecture seems to more closely represent the metadata model as it is defined in the specification (see in-toto/in-toto#98 and in-toto/in-toto#142 for related discussions). Note that the proposed changes require us to now access some attributes/methods via the signed attribute of a Metadata object and not directly on the Metadata object, but it would be possible to add short-cuts. (see todo notes in doc header). Further changes include: - Add minimal doc header with TODO notes - Make attributes that correspond to fields in TUF JSON metadata public again. There doesn't seem to be a good reason to protect them with leading underscores and use setters/getters instead, it just adds more code. - Generally try to reduce code. - Remove keyring and consistent_snapshot attributes from metadata class. As discussed in #1060 they are a better fit for extra management code (also see #660) - Remove sslib schema checks (see TODO notes about validation in doc header) - Drop usage of build_dict_conforming_to_schema, it seems a lot simpler and more explicit to just code this here. - ... same goes for make_metadata_fileinfo - Adapt tests accordingly TODO: Document!!! Signed-off-by: Lukas Puehringer --- tests/test_tuf_api.py | 81 +++++----- tuf/api/metadata.py | 352 +++++++++++++++++++++++------------------- 2 files changed, 232 insertions(+), 201 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index c7b15671de..c9a4d9d7a8 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -55,12 +55,15 @@ def setUpClass(cls): # files. 'temporary_directory' must be deleted in TearDownClass() so that # temporary files are always removed, even when exceptions occur. cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) - test_repo_data = os.path.join('repository_data', 'repository') + + test_repo_data = os.path.join( + os.path.dirname(os.path.realpath(__file__)), 'repository_data') + cls.repo_dir = os.path.join(cls.temporary_directory, 'repository') - shutil.copytree(test_repo_data, cls.repo_dir) - test_repo_keys = os.path.join('repository_data', 'keystore') + shutil.copytree(os.path.join(test_repo_data, 'repository'), cls.repo_dir) + cls.keystore_dir = os.path.join(cls.temporary_directory, 'keystore') - shutil.copytree(test_repo_keys, cls.keystore_dir) + shutil.copytree(os.path.join(test_repo_data, 'keystore'), cls.keystore_dir) @@ -100,18 +103,14 @@ def test_metadata_base(self): snapshot_path = os.path.join(self.repo_dir, 'metadata', 'snapshot.json') md = metadata.Snapshot.read_from_json(snapshot_path) - self.assertEqual(md.version, 1) - md.bump_version() - self.assertEqual(md.version, 2) - - self.assertEqual(md.expiration, - iso8601.parse_date("2030-01-01").replace(tzinfo=None)) - md.bump_expiration() - self.assertEqual(md.expiration, - iso8601.parse_date("2030-01-02").replace(tzinfo=None)) - md.bump_expiration(timedelta(days=365)) - self.assertEqual(md.expiration, - iso8601.parse_date("2031-01-02").replace(tzinfo=None)) + self.assertEqual(md.signed.version, 1) + md.signed.bump_version() + self.assertEqual(md.signed.version, 2) + self.assertEqual(md.signed.expires, '2030-01-01T00:00:00Z') + md.signed.bump_expiration() + self.assertEqual(md.signed.expires, '2030-01-02T00:00:00Z') + md.signed.bump_expiration(timedelta(days=365)) + self.assertEqual(md.signed.expires, '2031-01-02T00:00:00Z') def test_metadata_snapshot(self): @@ -119,18 +118,17 @@ def test_metadata_snapshot(self): snapshot = metadata.Snapshot.read_from_json(snapshot_path) key_ring = self._load_key_ring() - snapshot.keyring = key_ring - snapshot.verify() + snapshot.verify(key_ring) # Create a dict representing what we expect the updated data to be - fileinfo = snapshot.signed['meta'] + fileinfo = snapshot.signed.meta hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} fileinfo['role1.json']['version'] = 2 fileinfo['role1.json']['hashes'] = hashes fileinfo['role1.json']['length'] = 123 - snapshot.update('role1', 2, 123, hashes) - self.assertEqual(snapshot.signed['meta'], fileinfo) + snapshot.signed.update('role1', 2, 123, hashes) + self.assertEqual(snapshot.signed.meta, fileinfo) # snapshot.signable() @@ -146,39 +144,34 @@ def test_metadata_timestamp(self): timestamp = metadata.Timestamp.read_from_json(timestamp_path) key_ring = self._load_key_ring() - timestamp.keyring = key_ring - timestamp.verify() - - self.assertEqual(timestamp.version, 1) - timestamp.bump_version() - self.assertEqual(timestamp.version, 2) - - self.assertEqual(timestamp.expiration, - iso8601.parse_date("2030-01-01").replace(tzinfo=None)) - timestamp.bump_expiration() - self.assertEqual(timestamp.expiration, - iso8601.parse_date("2030-01-02").replace(tzinfo=None)) - timestamp.bump_expiration(timedelta(days=365)) - self.assertEqual(timestamp.expiration, - iso8601.parse_date("2031-01-02").replace(tzinfo=None)) + timestamp.verify(key_ring) + + self.assertEqual(timestamp.signed.version, 1) + timestamp.signed.bump_version() + self.assertEqual(timestamp.signed.version, 2) + + self.assertEqual(timestamp.signed.expires, '2030-01-01T00:00:00Z') + timestamp.signed.bump_expiration() + self.assertEqual(timestamp.signed.expires, '2030-01-02T00:00:00Z') + timestamp.signed.bump_expiration(timedelta(days=365)) + self.assertEqual(timestamp.signed.expires, '2031-01-02T00:00:00Z') # Test whether dateutil.relativedelta works, this provides a much easier to # use interface for callers - saved_expiration = timestamp.expiration delta = relativedelta(days=1) - timestamp.bump_expiration(delta) - self.assertEqual(timestamp.expires, "2031-01-03T00:00:00Z") + timestamp.signed.bump_expiration(delta) + self.assertEqual(timestamp.signed.expires, '2031-01-03T00:00:00Z') delta = relativedelta(years=5) - timestamp.bump_expiration(delta) - self.assertEqual(timestamp.expires, "2036-01-03T00:00:00Z") + timestamp.signed.bump_expiration(delta) + self.assertEqual(timestamp.signed.expires, '2036-01-03T00:00:00Z') hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} - fileinfo = timestamp.signed['meta']['snapshot.json'] + fileinfo = timestamp.signed.meta['snapshot.json'] fileinfo['hashes'] = hashes fileinfo['version'] = 2 fileinfo['length'] = 520 - timestamp.update(2, 520, hashes) - self.assertEqual(timestamp.signed['meta']['snapshot.json'], fileinfo) + timestamp.signed.update(2, 520, hashes) + self.assertEqual(timestamp.signed.meta['snapshot.json'], fileinfo) # timestamp.sign() diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index d9a4a688cf..4d8f43f666 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -1,3 +1,38 @@ +"""TUF role metadata model. + +This module provides container classes for TUF role metadata, including methods +to read/serialize/write from and to JSON, perform TUF-compliant metadata +updates, and create and verify signatures. + +TODO: + + * Add docstrings + + * Finalize/Document Verify/Sign functions (I am not fully sure about expected + behavior) + + * Validation (some thoughts ...) + - Avoid schema, see secure-systems-lab/securesystemslib#183 + - Provide methods to validate JSON representation (at user boundary) + - Fail on bad json metadata in read_from_json method + - Be lenient on bad/invalid metadata objects in memory, they might be + work in progress. E.g. it might be convenient to create empty metadata + and assign attributes later on. + - Fail on bad json metadata in write_to_json method, but with option to + disable check as there might be a justified reason to write WIP + metadata to json. + + * It might be nice to have short-cuts on the Metadata class to methods and + attributes of the contained Signed object. If we do this, we should only do + it on common methods/attributes (e.g. version, bump_version, expires, + bump_expiration) + + * Similarly, it might be nice to have a generic Metadata.read_from_json that + can load any TUF role metadata and instantiate the appropriate object based + on the json '_type' field. + + +""" # Imports. # 1st-party. @@ -33,88 +68,19 @@ # Classes. -class Metadata(ABC): - # By default, a Metadata would be a rather empty one. - def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: Optional[KeyRing] = None, version: int = 1) -> None: - self.__consistent_snapshot = consistent_snapshot - - self.__keyring = keyring - self.__expiration = expiration - - assert version >= 1, f'{version} < 1' - self.__version = version - - self.__signed = {} - self.__signatures = [] - - # And you would use this method to populate it from a file. - @classmethod - def read_from_json(cls, filename: str, storage_backend: Optional[StorageBackendInterface] = None) -> Metadata: - signable = load_json_file(filename, storage_backend) - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - - signatures = signable['signatures'] - signed = signable['signed'] - - # We always intend times to be UTC - # NOTE: we could do this with datetime.fromisoformat() but that is not - # available in Python 2.7's datetime - expiration = iso8601.parse_date(signed['expires']).replace(tzinfo=None) - version = signed['version'] - - fn, fn_ver = _strip_version_number(filename, True) - if fn_ver: - assert fn_ver == self.__version, f'{fn_ver} != {self.__version}' - consistent_snapshot = True - else: - consistent_snapshot = False - - metadata = cls(consistent_snapshot=consistent_snapshot, - expiration=expiration, - version=version) - - metadata._signatures = signatures - metadata._signed = signed - return metadata - - @property - def signable(self) -> JsonDict: - return {"signatures": self.signatures, - "signed": self.signed} - - @property - def signed_bytes(self) -> bytes: - return encode_canonical(self.signed).encode('UTF-8') - - @property - @abstractmethod - def signed(self) -> JsonDict: - raise NotImplementedError - - @property - def signatures(self) -> List[JsonDict]: - return self.__signatures - - @property - def expires(self) -> str: - """The expiration property as a string""" - return self.__expiration.isoformat()+'Z' - - @property - def expiration(self) -> datetime: - return self.__expiration - - @expiration.setter - def expiration(self, datetime) -> None: - # We always treat dates as UTC - self.__expiration = datetime.replace(tzinfo=None) - - def bump_version(self) -> None: - self.__version = self.__version + 1 - - def bump_expiration(self, delta: timedelta = timedelta(days=1)) -> None: - self.__expiration = self.__expiration + delta +class Metadata(ABC): + def __init__( + self, signed: 'Signed' = None, signatures: list = None) -> None: + # TODO: How much init magic do we want? + self.signed = signed + self.signatures = signatures + + def as_dict(self) -> JsonDict: + return { + 'signatures': self.signatures, + 'signed': self.signed.as_dict() + } def __update_signature(self, signatures, keyid, signature): updated = False @@ -126,25 +92,27 @@ def __update_signature(self, signatures, keyid, signature): if not updated: signatures.append(keyid_signature) - def sign(self) -> JsonDict: + def sign(self, key_ring: KeyRing) -> JsonDict: + # FIXME: Needs documentation of expected behavior signed_bytes = self.signed_bytes signatures = self.__signatures - for key in self.__keyring.keys: - signature = key.sign(signed_bytes) + for key in key_ring.keys: + signature = key.sign(self.signed_bytes) self.__update_signature(signatures, key.keyid, signature) self.__signatures = signatures return self.signable - def verify(self) -> bool: - signed_bytes = self.signed_bytes + def verify(self, key_ring: KeyRing) -> bool: + # FIXME: Needs documentation of expected behavior + signed_bytes = self.signed.signed_bytes signatures = self.signatures - verified_keyids = {} + verified_keyids = set() for signature in signatures: # TODO: handle an empty keyring - for key in self.__keyring.keys: + for key in key_ring.keys: keyid = key.keyid if keyid == signature['keyid']: try: @@ -154,96 +122,166 @@ def verify(self) -> bool: continue else: # Avoid https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-6174 - verified_keyids |= keyid + verified_keyids.add(keyid) + break - return len(verified_keyids) >= self.__keyring.threshold.least + return len(verified_keyids) >= key_ring.threshold.least - def write_to_json(self, filename: str, storage_backend: StorageBackendInterface = None) -> None: + def write_to_json( + self, filename: str, + storage_backend: StorageBackendInterface = None) -> None: with tempfile.TemporaryFile() as f: f.write(_get_written_metadata(self.sign()).encode_canonical()) persist_temp_file(f, filename, storage_backend) -class Timestamp(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1) -> None: - super().__init__(consistent_snapshot, expiration, keyring, version) - self.snapshot_fileinfo = {} - @classmethod - def read_from_json(cls, filename: str) -> Metadata: - md = Metadata.read_from_json(filename) - timestamp = cls(md.consistent_snapshot, md.expiration, md.keyring, md.version) - timestamp.snapshot_fileinfo = md._signed['meta'] - tuf.formats.TIMESTAMP_SCHEMA.check_match(timestamp.signed) - timestamp._signatures = md._signatures - return timestamp +class Signed: + # NOTE: Signed is a stupid name, because this might not be signed yet, but + # we keep it to match spec terminology (I often refer to this as "payload", + # or "inner metadata") + + # TODO: Re-think default values. It might be better to pass some things + # as args and not es kwargs. Then we'd need to pop those from + # signable["signed"] in read_from_json and pass them explicitly, which + # some say is better than implicit. :) + def __init__( + self, _type: str = None, version: int = 0, + spec_version: str = None, expires: datetime = None + ) -> None: + # TODO: How much init magic do we want? + + self._type = _type + self.spec_version = spec_version + + # We always intend times to be UTC + # NOTE: we could do this with datetime.fromisoformat() but that is not + # available in Python 2.7's datetime + # NOTE: Store as datetime object for convenient handling, use 'expires' + # property to get the TUF metadata format representation + self.__expiration = iso8601.parse_date(expires).replace(tzinfo=None) + + if version < 0: + raise ValueError(f'version must be < 0, got {version}') + self.version = version @property - def signed(self) -> JsonDict: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, version=self.__version, - expires=self.expires, meta=self.snapshot_fileinfo) + def signed_bytes(self) -> bytes: + return encode_canonical(self.as_dict()).encode('UTF-8') + + @property + def expires(self) -> str: + """The expiration property in TUF metadata format.""" + return self.__expiration.isoformat() + 'Z' + + def bump_expiration(self, delta: timedelta = timedelta(days=1)) -> None: + self.__expiration = self.__expiration + delta + + def bump_version(self) -> None: + self.version += 1 + + def as_dict(self) -> JsonDict: + # NOTE: The classes should be the single source of truth about metadata + # let's define the dict representation here and not in some dubious + # build_dict_conforming_to_schema + return { + '_type': self._type, + 'version': self.version, + 'spec_version': self.spec_version, + 'expires': self.expires + } + + @classmethod + def read_from_json( + cls, filename: str, + storage_backend: Optional[StorageBackendInterface] = None + ) -> Metadata: + signable = load_json_file(filename, storage_backend) + + # FIXME: It feels dirty to access signable["signed"]["version"] here in + # order to do this check, and also a bit random (there are likely other + # things to check), but later we don't have the filename anymore. If we + # want to stick to the check, which seems reasonable, we should maybe + # think of a better place. + _, fn_prefix = _strip_version_number(filename, True) + if fn_prefix and fn_prefix != signable['signed']['version']: + raise ValueError( + f'version filename prefix ({fn_prefix}) must align with ' + f'version in metadata ({signable["signed"]["version"]}).') + + return Metadata( + signed=cls(**signable['signed']), + signatures=signable['signatures']) + + +class Timestamp(Signed): + def __init__(self, meta: JsonDict = None, **kwargs) -> None: + super().__init__(**kwargs) + # TODO: How much init magic do we want? + # TODO: Is there merit in creating classes for dict fields? + self.meta = meta + + def as_dict(self) -> JsonDict: + json_dict = super().as_dict() + json_dict.update({ + 'meta': self.meta + }) + return json_dict # Update metadata about the snapshot metadata. def update(self, version: int, length: int, hashes: JsonDict) -> None: - fileinfo = self.snapshot_fileinfo.get('snapshot.json', {}) + fileinfo = self.meta.get('snapshot.json', {}) fileinfo['version'] = version fileinfo['length'] = length fileinfo['hashes'] = hashes - self.snapshot_fileinfo['snapshot.json'] = fileinfo + self.meta['snapshot.json'] = fileinfo -class Snapshot(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1) -> None: - super().__init__(consistent_snapshot, expiration, keyring, version) - self.targets_fileinfo = {} - @classmethod - def read_from_json(cls, filename: str) -> Metadata: - md = Metadata.read_from_json(filename) - snapshot = cls(md.consistent_snapshot, md.expiration, md.keyring, md.version) - meta = md._signed['meta'] - for target_role in meta: - version = meta[target_role]['version'] - length = meta[target_role].get('length') - hashes = meta[target_role].get('hashes') - snapshot.targets_fileinfo[target_role] = tuf.formats.make_metadata_fileinfo(version, length, hashes) - tuf.formats.SNAPSHOT_SCHEMA.check_match(snapshot.signed) - snapshot._signatures = md._signatures - return snapshot +class Snapshot(Signed): + def __init__(self, meta: JsonDict = None, **kwargs) -> None: + # TODO: How much init magic do we want? + # TODO: Is there merit in creating classes for dict fields? + super().__init__(**kwargs) + self.meta = meta - @property - def signed(self) -> JsonDict: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, version=self.__version, - expires=self.expires, meta=self.targets_fileinfo) + def as_dict(self) -> JsonDict: + json_dict = super().as_dict() + json_dict.update({ + 'meta': self.meta + }) + return json_dict # Add or update metadata about the targets metadata. - def update(self, rolename: str, version: int, length: Optional[int] = None, hashes: Optional[JsonDict] = None) -> None: - self.targets_fileinfo[f'{rolename}.json'] = tuf.formats.make_metadata_fileinfo(version, length, hashes) - -class Targets(Metadata): - def __init__(self, consistent_snapshot: bool = True, expiration: datetime = datetime.today(), keyring: KeyRing = None, version: int = 1) -> None: - super().__init__(consistent_snapshot, expiration, keyring, version) - self.targets = {} - self.delegations = {} - - @classmethod - def read_from_json(cls, filename: str) -> Metadata: - targets = Metadata.read_from_json(filename) - targets.targets = self.__signed['targets'] - targets.delegations = self.__signed.get('delegations', {}) - tuf.formats.TARGETS_SCHEMA.check_match(targets.signed) - targets._signatures = md._signatures - return targets - - @property - def signed(self) -> JsonDict: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - version=self.__version, - expires=self.expires, - targets=self.targets, - delegations=self.delegations) + def update( + self, rolename: str, version: int, length: Optional[int] = None, + hashes: Optional[JsonDict] = None) -> None: + metadata_fn = f'{rolename}.json' + + self.meta[metadata_fn] = {'version': version} + if length is not None: + self.meta[metadata_fn]['length'] = length + + if hashes is not None: + self.meta[metadata_fn]['hashes'] = hashes + + +class Targets(Signed): + def __init__( + self, targets: JsonDict = None, delegations: JsonDict = None, + **kwargs) -> None: + # TODO: How much init magic do we want? + # TODO: Is there merit in creating classes for dict fields? + super().__init__(**kwargs) + self.targets = targets + self.delegations = delegations + + def as_dict(self) -> JsonDict: + json_dict = super().as_dict() + json_dict.update({ + 'targets': self.targets, + 'delegations': self.delegations, + }) + return json_dict # Add or update metadata about the target. def update(self, filename: str, fileinfo: JsonDict) -> None: From 1a8a0e760c50a9188532c433fab5f59c9366210b Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Thu, 16 Jul 2020 21:27:43 -0400 Subject: [PATCH 33/42] fix badly translated assertions Signed-off-by: Trishank Karthik Kuppusamy --- tuf/api/keys.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 0e43714565..76af7bc27f 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -30,12 +30,12 @@ class Threshold: def __init__(self, least: int = 1, most: int = 1): - # if least > 0: - # raise ValueError(f'{least} <= 0') - # if most > 0: - # raise ValueError(f'{most} <= 0') - # if least <= most: - # raise ValueError(f'{least} > {most}') + if least <= 0: + raise ValueError(f'{least} <= 0') + if most <= 0: + raise ValueError(f'{most} <= 0') + if least > most: + raise ValueError(f'{least} > {most}') self.least = least self.most = most From 41c0241b9783e45acd13e5036000ffeb92a39400 Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Thu, 16 Jul 2020 21:45:53 -0400 Subject: [PATCH 34/42] Not all Keys read from files Signed-off-by: Trishank Karthik Kuppusamy --- tuf/api/keys.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 76af7bc27f..33652e4d60 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -17,7 +17,6 @@ create_signature, verify_signature, ) -from securesystemslib.storage import StorageBackendInterface # Generic classes. @@ -45,10 +44,6 @@ class Key(ABC): def __init__(self) -> None: raise NotImplementedError - @classmethod - def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str] = None, storage_backend: Optional[StorageBackendInterface] = None) -> 'Key': - raise NotImplementedError - @property @abstractmethod def keyid(self) -> str: @@ -82,7 +77,7 @@ def __init__(self, obj: Any) -> None: # pylint: disable=super-init-not-called self.__obj = obj @classmethod - def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str] = None, storage_backend: Optional[StorageBackendInterface] = None) -> Key: + def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str] = None) -> Key: handler = Algorithm[algorithm] obj = handler(filename, password=passphrase) return cls(obj) From 9285727ec50d1bb746c980525efc03d647f9925f Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Fri, 17 Jul 2020 19:09:04 -0400 Subject: [PATCH 35/42] sign/verify w/ Vault Signed-off-by: Trishank Karthik Kuppusamy --- tests/test_tuf_api.py | 135 +++++++++++++++++++------- tuf/api/keys.py | 217 ++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 307 insertions(+), 45 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index c9a4d9d7a8..5a882482b4 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -40,6 +40,7 @@ from tuf.api import metadata from tuf.api import keys +from tuf.api.keys import VaultKey import iso8601 import six @@ -48,6 +49,8 @@ class TestTufApi(unittest.TestCase): + # TODO: Start Vault in a dev mode, and export VAULT_ADDR as well as VAULT_TOKEN. + # TODO: Enable the Vault Transit secrets engine. @classmethod def setUpClass(cls): @@ -66,7 +69,7 @@ def setUpClass(cls): shutil.copytree(os.path.join(test_repo_data, 'keystore'), cls.keystore_dir) - + # TODO: Shut down Vault. @classmethod def tearDownClass(cls): @@ -177,41 +180,101 @@ def test_metadata_timestamp(self): # timestamp.write_to_json() -def test_Threshold(self): - # test default values - keys.Threshold() - # test correct arguments - keys.Threshold(least=4, most=5) - - # test incorrect input - self.assertRaises(ValueError, keys.Threshold, 5, 4) - self.assertRaises(ValueError, keys.Threshold, 0, 5) - self.assertRaises(ValueError, keys.Threshold, 5, 0) - - -def test_KeyRing(self): - key_list = [] - root_key = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key'), - 'RSA', 'password') - root_key2 = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key2'), - 'ED25519', 'password') - key_list.append(root_key) - key_list.append(root_key2) - threshold = keys.Threshold() - keyring = keys.KeyRing(threshold, key_list) - self.assertEqual(keyring.threshold, threshold) - self.assertEqual(keyring.keys, key_list) - - -def test_RAMKey_read_from_file(self): - filename = os.path.join(self.keystore_dir, 'root_key') - algorithm = 'RSA' - passphrase = 'password' - - self.assertTrue(isinstance(keys.RAMKey.read_from_file(filename, algorithm, passphrase), keys.RAMKey)) - -# TODO: -# def test_RAMKey(self): + def test_Threshold(self): + # test default values + keys.Threshold() + # test correct arguments + keys.Threshold(least=4, most=5) + + # test incorrect input + self.assertRaises(ValueError, keys.Threshold, 5, 4) + self.assertRaises(ValueError, keys.Threshold, 0, 5) + self.assertRaises(ValueError, keys.Threshold, 5, 0) + + + def test_KeyRing(self): + key_list = [] + root_key = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key'), + 'RSA', 'password') + root_key2 = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key2'), + 'ED25519', 'password') + key_list.append(root_key) + key_list.append(root_key2) + threshold = keys.Threshold() + keyring = keys.KeyRing(threshold, key_list) + self.assertEqual(keyring.threshold, threshold) + self.assertEqual(keyring.keys, key_list) + + + def test_RAMKey_read_from_file(self): + filename = os.path.join(self.keystore_dir, 'root_key') + algorithm = 'RSA' + passphrase = 'password' + + self.assertTrue(isinstance(keys.RAMKey.read_from_file(filename, algorithm, passphrase), keys.RAMKey)) + + + def test_VaultKey_Ed25519(self): + VAULT_ADDR = os.environ['VAULT_ADDR'] + VAULT_TOKEN = os.environ['VAULT_TOKEN'] + KEY_TYPE = VaultKey.KeyTypes.ED25519.value + NAME = f'test-{KEY_TYPE}-key' + + for hash_algorithm in {h.value for h in VaultKey.HashAlgorithms}: + self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, hash_algorithm=hash_algorithm) + + for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: + self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, marshaling_algorithm=marshaling_algorithm) + + for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: + self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, signature_algorithm=signature_algorithm) + + key = VaultKey.create_key(VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE) + signed = f'Hello, {KEY_TYPE}!' + signature = key.sign(signed) + self.assertTrue(key.verify(signed, signature)) + + + def test_VaultKey_ECDSA(self): + VAULT_ADDR = os.environ['VAULT_ADDR'] + VAULT_TOKEN = os.environ['VAULT_TOKEN'] + KEY_TYPE = VaultKey.KeyTypes.P_256.value + NAME = f'test-{KEY_TYPE}-key' + + for hash_algorithm in { + VaultKey.HashAlgorithms.SHA2_224.value, + VaultKey.HashAlgorithms.SHA2_384.value, + VaultKey.HashAlgorithms.SHA2_512.value + }: + self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, hash_algorithm=hash_algorithm) + + for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: + self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, signature_algorithm=signature_algorithm) + + for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: + key = VaultKey.create_key(VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, hash_algorithm=VaultKey.HashAlgorithms.SHA2_256.value, marshaling_algorithm=marshaling_algorithm,) + signed = f'Hello, {KEY_TYPE}!' + signature = key.sign(signed) + self.assertTrue(key.verify(signed, signature)) + + + def test_VaultKey_RSA(self): + VAULT_ADDR = os.environ['VAULT_ADDR'] + VAULT_TOKEN = os.environ['VAULT_TOKEN'] + + for key_type in {VaultKey.KeyTypes.RSA_2048.value, VaultKey.KeyTypes.RSA_4096.value}: + NAME = f'test-{key_type}-key' + + for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: + for hash_algorithm in {VaultKey.HashAlgorithms.SHA2_224.value, VaultKey.HashAlgorithms.SHA2_384.value, VaultKey.HashAlgorithms.SHA2_512.value,}: + for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: + self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, key_type, marshaling_algorithm=marshaling_algorithm,) + + key = VaultKey.create_key(VAULT_ADDR, VAULT_TOKEN, NAME, key_type, hash_algorithm=hash_algorithm, signature_algorithm=signature_algorithm,) + signed = f'Hello, {key_type}!' + signature = key.sign(signed) + self.assertTrue(key.verify(signed, signature)) + # Run unit test. if __name__ == '__main__': diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 33652e4d60..0df7f53672 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -3,11 +3,16 @@ # 2nd-party. from abc import ABC, abstractmethod -from typing import Any, List, Optional +from enum import Enum, unique +from typing import Any, List, Optional, Union +import base64 import logging +import sys # 3rd-party. +from securesystemslib.hash import digest +from securesystemslib.formats import encode_canonical from securesystemslib.interface import ( import_ecdsa_privatekey_from_file, import_ed25519_privatekey_from_file, @@ -15,16 +20,13 @@ ) from securesystemslib.keys import ( create_signature, + format_keyval_to_metadata, verify_signature, ) -# Generic classes. +import hvac -Algorithm = { - 'ECDSA': import_ecdsa_privatekey_from_file, - 'ED25519': import_ed25519_privatekey_from_file, - 'RSA': import_rsa_privatekey_from_file - } +# Generic classes. class Threshold: @@ -49,6 +51,11 @@ def __init__(self) -> None: def keyid(self) -> str: raise NotImplementedError + @property + @abstractmethod + def public_key(self) -> str: + raise NotImplementedError + @abstractmethod def sign(self, signed: str) -> str: raise NotImplementedError @@ -73,12 +80,21 @@ def __init__(self, threshold: Threshold, keys: Keys): class RAMKey(Key): + # FIXME: Need a way to load *either* private or public keys. + KEY_TYPES = { + 'ECDSA': import_ecdsa_privatekey_from_file, + 'ED25519': import_ed25519_privatekey_from_file, + 'RSA': import_rsa_privatekey_from_file + } + def __init__(self, obj: Any) -> None: # pylint: disable=super-init-not-called self.__obj = obj @classmethod - def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str] = None) -> Key: - handler = Algorithm[algorithm] + def read_from_file(cls, filename: str, key_type: str, passphrase: Optional[str] = None) -> 'RAMKey': + handler = cls.KEY_TYPES.get(key_type) + if not handler: + return ValueError(key_type) obj = handler(filename, password=passphrase) return cls(obj) @@ -86,8 +102,191 @@ def read_from_file(cls, filename: str, algorithm: str, passphrase: Optional[str def keyid(self) -> str: return self.__obj['keyid'] + @property + def public_key(self) -> str: + return self.__obj['keyval']['public'] + def sign(self, signed: str) -> str: return create_signature(self.__obj, signed) def verify(self, signed: str, signature: str) -> bool: return verify_signature(self.__obj, signature, signed) + +class VaultKey(Key): + + class AuthenticationError(Exception): pass + + @unique + class KeyTypes(Enum): + ED25519 = 'ed25519' + P_256 = 'ecdsa-p256' + RSA_2048 = 'rsa-2048' + RSA_4096 = 'rsa-4096' + + @unique + class HashAlgorithms(Enum): + SHA2_224 = 'sha2-224' + SHA2_256 = 'sha2-256' + SHA2_384 = 'sha2-384' + SHA2_512 = 'sha2-512' + + @unique + class SignatureAlgorithms(Enum): + PSS = 'pss' + PKCS1 = 'pkcs1v15' + + @unique + class MarshalingAlgorithms(Enum): + ANS1 = 'asn1' + JWS = 'jws' + + def __init__( + self, + url: str, + token: str, + name: str, + hash_algorithm: Optional[str] = None, + marshaling_algorithm: Optional[str] = None, + signature_algorithm: Optional[str] = None, + ) -> None: # pylint: disable=super-init-not-called + """Reads the key using the Transit Secrets Engine as a side effect.""" + + self.__client = hvac.Client(url=url, token=token) + if not self.__client.is_authenticated(): + raise self.AuthenticationError + + # Guess why this isn't a requests.Response? + # https://github.com/hvac/hvac/pull/537#issuecomment-660304707 + response = self.__client.secrets.transit.read_key(name=name) + self.__name = name + + # Get public key. + data = response['data'] + + key_type = data['type'] + if key_type not in {k.value for k in self.KeyTypes}: + return ValueError(key_type) + self.__key_type = data['type'] + + # NOTE: The documentation is not clear, but presumably the returned + # keys are different versions of keys under the same name. Therefore, + # we shall select the one with the latest version number. + # NOTE: We are also taking it for granted that Vault will generate + # public keys in formats TUF will recognize out of the box. + keys = data['keys'] + latest_version = data['latest_version'] + key = keys.get(str(latest_version)) + self.__public_key = key['public_key'] + + # A valid hash algorithm is only good for ECDSA or RSA. + if hash_algorithm is not None: + if hash_algorithm not in {h.value for h in self.HashAlgorithms}: + raise ValueError(hash_algorithm) + if key_type == self.KeyTypes.ED25519.value: + raise ValueError(hash_algorithm) + # P-256 only takes SHA2-256. + # https://tools.ietf.org/html/rfc5656#section-6.2.1 + if key_type == self.KeyTypes.P_256.value and hash_algorithm != self.HashAlgorithms.SHA2_256.value: + raise ValueError(hash_algorithm) + self.__hash_algorithm = hash_algorithm + + # A valid marshaling algorithm is only good for P-256. + if marshaling_algorithm is not None: + if marshaling_algorithm not in {m.value for m in self.MarshalingAlgorithms}: + raise ValueError(marshaling_algorithm) + if key_type != self.KeyTypes.P_256.value: + raise ValueError(marshaling_algorithm) + self.__marshaling_algorithm = marshaling_algorithm + + # A signature algorithm is good only for RSA. + if signature_algorithm is not None: + if signature_algorithm not in {s.value for s in self.SignatureAlgorithms}: + raise ValueError(signature_algorithm) + if key_type not in {self.KeyTypes.RSA_2048.value, self.KeyTypes.RSA_4096.value}: + raise ValueError(signature_algorithm) + self.__signature_algorithm = signature_algorithm + + @classmethod + def create_key(cls, url: str, token: str, name: str, key_type: str, **kwargs) -> 'VaultKey': + if key_type not in {k.value for k in cls.KeyTypes}: + return ValueError(key_type) + + client = hvac.Client(url=url, token=token) + if not client.is_authenticated(): + raise cls.AuthenticationError + + response = client.secrets.transit.create_key(name=name, key_type=key_type) + response.raise_for_status() + return cls(url, token, name, **kwargs) + + @property + def public_key(self) -> str: + if self.__key_type == self.KeyTypes.ED25519.value: + keytype = self.__key_type + scheme = keytype + elif self.__key_type == self.KeyTypes.P_256.value: + keytype = 'ecdsa-sha2-nistp256' + scheme = keytype + elif self.__key_type in {self.KeyTypes.RSA_2048.value, self.KeyTypes.RSA_4096.value}: + keytype = 'rsa' + + if self.__signature_algorithm == self.SignatureAlgorithms.PSS.value: + scheme = 'rsassa-pss' + elif self.__signature_algorithm == self.SignatureAlgorithms.PKCS1.value: + scheme = 'rsa-pkcs1v15' + else: + raise ValueError(self.__key_type) + + _, size = self.__hash_algorithm.split('-') + scheme += f'-sha{size}' + else: + raise ValueError(self.__key_type) + + key_meta = format_keyval_to_metadata(keytype, scheme, self.__public_key, private=False) + return encode_canonical(key_meta) + + @property + def keyid(self) -> str: + digest_object = digest('sha256') + digest_object.update(self.public_key.encode('utf-8')) + return digest_object.hexdigest() + + # https://hvac.readthedocs.io/en/stable/usage/secrets_engines/transit.html#create-key + def __base64ify(self, bytes_or_str: Union[bytes, str]) -> str: + """Helper method to perform base64 encoding across Python 2.7 and Python 3.X""" + + if sys.version_info[0] >= 3 and isinstance(bytes_or_str, str): + input_bytes = bytes_or_str.encode('utf8') + else: + input_bytes = bytes_or_str + + output_bytes = base64.urlsafe_b64encode(input_bytes) + if sys.version_info[0] >= 3: + return output_bytes.decode('ascii') + else: + return output_bytes + + # TODO: Consider passing prehashed input. + # TODO: Translate signature into something py-TUF understands... + def sign(self, signed: str) -> str: + response = self.__client.secrets.transit.sign_data( + name=self.__name, + hash_input=self.__base64ify(signed), + hash_algorithm=self.__hash_algorithm, + marshaling_algorithm=self.__marshaling_algorithm, + signature_algorithm=self.__signature_algorithm, + ) + return response['data']['signature'] + + # TODO: Consider passing prehashed input. + # TODO: Translate signature into something Vault understands... + def verify(self, signed: str, signature: str) -> bool: + response = self.__client.secrets.transit.verify_signed_data( + name=self.__name, + hash_input=self.__base64ify(signed), + hash_algorithm=self.__hash_algorithm, + marshaling_algorithm=self.__marshaling_algorithm, + signature_algorithm=self.__signature_algorithm, + signature=signature, + ) + return response['data']['valid'] From 1703c6bdb4a59aba25d967d68b064ed43f4fd1ba Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Sat, 18 Jul 2020 17:53:07 -0400 Subject: [PATCH 36/42] fix oversight in test Signed-off-by: Trishank Karthik Kuppusamy --- tests/test_tuf_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index 5a882482b4..2c2f968a2f 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -266,7 +266,7 @@ def test_VaultKey_RSA(self): NAME = f'test-{key_type}-key' for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: - for hash_algorithm in {VaultKey.HashAlgorithms.SHA2_224.value, VaultKey.HashAlgorithms.SHA2_384.value, VaultKey.HashAlgorithms.SHA2_512.value,}: + for hash_algorithm in {h.value for h in VaultKey.HashAlgorithms}: for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, key_type, marshaling_algorithm=marshaling_algorithm,) From e0346187bf91258b5e6ecb757def4cb02888a061 Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Sat, 18 Jul 2020 17:55:29 -0400 Subject: [PATCH 37/42] update and automate requirements Signed-off-by: Trishank Karthik Kuppusamy --- requirements.txt | 27 +++++++-------------------- update-requirements.sh | 29 +++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 20 deletions(-) create mode 100755 update-requirements.sh diff --git a/requirements.txt b/requirements.txt index c4bcce87c6..fd7f98f874 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,25 +23,12 @@ # - Python version support is changed # - CI/CD build breaks due to updates (e.g. transitive dependency conflicts) # -# 1. Use this script to create a pinned requirements file for each Python -# version -# ``` -# for v in 2.7 3.5 3.6 3.7 3.8; do -# mkvirtualenv tuf-env-${v} -p python${v}; -# pip install pip-tools; -# pip-compile --no-header -o requirements-${v}.txt requirements.txt; -# deactivate; -# rmvirtualenv tuf-env-${v}; -# done; -# -# ``` -# 2. Use this command to merge per-version files -# `sort -o requirements-pinned.txt -u requirements-?.?.txt` -# 2. Manually add environment markers to requirements-pinned.txt -# 3. Use this command to remove per-version files -# `rm requirements-?.?.txt` +# ./update-requirements.sh # -securesystemslib[colors, crypto, pynacl] -requests -six +enum34; python_version < '3' +hvac +ipaddress; python_version < '3' iso8601 +requests +securesystemslib[colors, crypto, pynacl] +six \ No newline at end of file diff --git a/update-requirements.sh b/update-requirements.sh new file mode 100755 index 0000000000..c5edb87920 --- /dev/null +++ b/update-requirements.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +# bail on the first error +set -e + +# exception for python 2.7 +python2.7 -m pip install --user virtualenv +python2.7 -m virtualenv tuf-env-2.7 +source tuf-env-2.7/bin/activate +pip install pip-tools +pip-compile --no-header -o requirements-2.7.txt requirements.txt +deactivate +rm -rf tuf-env-2.7 + +# create a pinned requirements file for each supported MINOR Python3 version +for v in 3.5 3.6 3.7 3.8; do + python${v} -m venv tuf-env-${v} + source tuf-env-${v}/bin/activate + pip install pip-tools + pip-compile --no-header -o requirements-${v}.txt requirements.txt + deactivate + rm -rf tuf-env-${v} +done; + +# merge per-version files +sort -o requirements-pinned.txt -u requirements-?.?.txt + +# remove per-version files +rm requirements-?.?.txt \ No newline at end of file From f8b33f04bdf5b1bb8adbc3b21459a47f3756705d Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Mon, 20 Jul 2020 18:16:51 -0400 Subject: [PATCH 38/42] add P-384 and RSA-3072 Signed-off-by: Trishank Karthik Kuppusamy --- tests/test_tuf_api.py | 39 +++++++++++++++++++++++---------------- tuf/api/keys.py | 17 +++++++++++------ 2 files changed, 34 insertions(+), 22 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index 2c2f968a2f..772b85d7fc 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -238,31 +238,38 @@ def test_VaultKey_Ed25519(self): def test_VaultKey_ECDSA(self): VAULT_ADDR = os.environ['VAULT_ADDR'] VAULT_TOKEN = os.environ['VAULT_TOKEN'] - KEY_TYPE = VaultKey.KeyTypes.P_256.value - NAME = f'test-{KEY_TYPE}-key' - for hash_algorithm in { - VaultKey.HashAlgorithms.SHA2_224.value, - VaultKey.HashAlgorithms.SHA2_384.value, - VaultKey.HashAlgorithms.SHA2_512.value - }: - self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, hash_algorithm=hash_algorithm) + def test(key_type, hash_algorithm, hash_algorithms): + NAME = f'test-{key_type}-key' - for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: - self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, signature_algorithm=signature_algorithm) + for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: + key = VaultKey.create_key(VAULT_ADDR, VAULT_TOKEN, NAME, key_type, hash_algorithm=hash_algorithm, marshaling_algorithm=marshaling_algorithm,) + signed = f'Hello, {key_type}!' + signature = key.sign(signed) + self.assertTrue(key.verify(signed, signature)) - for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: - key = VaultKey.create_key(VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, hash_algorithm=VaultKey.HashAlgorithms.SHA2_256.value, marshaling_algorithm=marshaling_algorithm,) - signed = f'Hello, {KEY_TYPE}!' - signature = key.sign(signed) - self.assertTrue(key.verify(signed, signature)) + for hash_algorithm in hash_algorithms: + self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, key_type, hash_algorithm=hash_algorithm) + + for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: + self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, key_type, signature_algorithm=signature_algorithm) + + + test(VaultKey.KeyTypes.P_256.value, VaultKey.HashAlgorithms.SHA2_256.value, {VaultKey.HashAlgorithms.SHA2_224.value, VaultKey.HashAlgorithms.SHA2_384.value, VaultKey.HashAlgorithms.SHA2_512.value}) + # FIXME: https://github.com/hvac/hvac/issues/605 + #test(VaultKey.KeyTypes.P_384.value, VaultKey.HashAlgorithms.SHA2_384.value, {VaultKey.HashAlgorithms.SHA2_224.value, VaultKey.HashAlgorithms.SHA2_256.value, VaultKey.HashAlgorithms.SHA2_512.value}) def test_VaultKey_RSA(self): VAULT_ADDR = os.environ['VAULT_ADDR'] VAULT_TOKEN = os.environ['VAULT_TOKEN'] - for key_type in {VaultKey.KeyTypes.RSA_2048.value, VaultKey.KeyTypes.RSA_4096.value}: + for key_type in { + VaultKey.KeyTypes.RSA_2048.value, + # FIXME: https://github.com/hvac/hvac/issues/605 + #VaultKey.KeyTypes.RSA_3072.value, + VaultKey.KeyTypes.RSA_4096.value + }: NAME = f'test-{key_type}-key' for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 0df7f53672..b5cb72c704 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -120,7 +120,9 @@ class AuthenticationError(Exception): pass class KeyTypes(Enum): ED25519 = 'ed25519' P_256 = 'ecdsa-p256' + P_384 = 'ecdsa-p384' RSA_2048 = 'rsa-2048' + RSA_3072 = 'rsa-3072' RSA_4096 = 'rsa-4096' @unique @@ -184,17 +186,20 @@ def __init__( raise ValueError(hash_algorithm) if key_type == self.KeyTypes.ED25519.value: raise ValueError(hash_algorithm) - # P-256 only takes SHA2-256. # https://tools.ietf.org/html/rfc5656#section-6.2.1 + # P-256 only takes SHA2-256. if key_type == self.KeyTypes.P_256.value and hash_algorithm != self.HashAlgorithms.SHA2_256.value: raise ValueError(hash_algorithm) + # P-384 only takes SHA2-384. + if key_type == self.KeyTypes.P_384.value and hash_algorithm != self.HashAlgorithms.SHA2_384.value: + raise ValueError(hash_algorithm) self.__hash_algorithm = hash_algorithm - # A valid marshaling algorithm is only good for P-256. + # A valid marshaling algorithm is only good for P-256 and P-384. if marshaling_algorithm is not None: if marshaling_algorithm not in {m.value for m in self.MarshalingAlgorithms}: raise ValueError(marshaling_algorithm) - if key_type != self.KeyTypes.P_256.value: + if key_type not in {self.KeyTypes.P_256.value, self.KeyTypes.P_384.value}: raise ValueError(marshaling_algorithm) self.__marshaling_algorithm = marshaling_algorithm @@ -202,7 +207,7 @@ def __init__( if signature_algorithm is not None: if signature_algorithm not in {s.value for s in self.SignatureAlgorithms}: raise ValueError(signature_algorithm) - if key_type not in {self.KeyTypes.RSA_2048.value, self.KeyTypes.RSA_4096.value}: + if key_type not in {self.KeyTypes.RSA_2048.value, self.KeyTypes.RSA_3072.value, self.KeyTypes.RSA_4096.value}: raise ValueError(signature_algorithm) self.__signature_algorithm = signature_algorithm @@ -224,10 +229,10 @@ def public_key(self) -> str: if self.__key_type == self.KeyTypes.ED25519.value: keytype = self.__key_type scheme = keytype - elif self.__key_type == self.KeyTypes.P_256.value: + elif self.__key_type in {self.KeyTypes.P_256.value, self.KeyTypes.P_384.value}: keytype = 'ecdsa-sha2-nistp256' scheme = keytype - elif self.__key_type in {self.KeyTypes.RSA_2048.value, self.KeyTypes.RSA_4096.value}: + elif self.__key_type in {self.KeyTypes.RSA_2048.value, self.KeyTypes.RSA_3072.value, self.KeyTypes.RSA_4096.value}: keytype = 'rsa' if self.__signature_algorithm == self.SignatureAlgorithms.PSS.value: From f5eb59b30589cf9f2c777cd059be8ff062f4cc4e Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Tue, 21 Jul 2020 16:22:23 -0400 Subject: [PATCH 39/42] Python verification of signatures from Go Signed-off-by: Trishank Karthik Kuppusamy --- tests/test_tuf_api.py | 193 +++++++++++++++------- tuf/api/keys.py | 364 ++++++++++++++++++++++++++++-------------- tuf/api/metadata.py | 1 + 3 files changed, 388 insertions(+), 170 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index 772b85d7fc..70f88781ec 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -23,27 +23,30 @@ # Help with Python 3 compatibility, where the print statement is a function, an # implicit relative import is invalid, and the '/' operator performs true # division. Example: print 'hello world' raises a 'SyntaxError' exception. -from __future__ import print_function from __future__ import absolute_import from __future__ import division +from __future__ import print_function from __future__ import unicode_literals -import unittest import logging -import tempfile -import shutil -import sys -import errno import os +import shutil +import tempfile +import unittest + from datetime import timedelta from dateutil.relativedelta import relativedelta -from tuf.api import metadata -from tuf.api import keys -from tuf.api.keys import VaultKey - -import iso8601 -import six +from tuf.api.metadata import ( + Snapshot, + Timestamp, +) +from tuf.api.keys import ( + KeyRing, + RAMKey, + Threshold, + VaultKey, +) logger = logging.getLogger(__name__) @@ -81,8 +84,8 @@ def tearDownClass(cls): def _load_key_ring(self): key_list = [] - root_key = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key'), - 'RSA', 'password') + root_key = RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key'), + 'rsassa-pss-sha256', 'password') key_list.append(root_key) for key_file in os.listdir(self.keystore_dir): @@ -94,17 +97,17 @@ def _load_key_ring(self): # root key is loaded continue - key = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, key_file), - 'ED25519', 'password') + key = RAMKey.read_from_file(os.path.join(self.keystore_dir, key_file), + 'ed25519', 'password') key_list.append(key) - threshold = keys.Threshold(1, 1) - return keys.KeyRing(threshold=threshold, keys=key_list) + threshold = Threshold(1, 5) + return KeyRing(threshold=threshold, keys=key_list) def test_metadata_base(self): # Use of Snapshot is arbitrary, we're just testing the base class features # with real data snapshot_path = os.path.join(self.repo_dir, 'metadata', 'snapshot.json') - md = metadata.Snapshot.read_from_json(snapshot_path) + md = Snapshot.read_from_json(snapshot_path) self.assertEqual(md.signed.version, 1) md.signed.bump_version() @@ -118,7 +121,7 @@ def test_metadata_base(self): def test_metadata_snapshot(self): snapshot_path = os.path.join(self.repo_dir, 'metadata', 'snapshot.json') - snapshot = metadata.Snapshot.read_from_json(snapshot_path) + snapshot = Snapshot.read_from_json(snapshot_path) key_ring = self._load_key_ring() snapshot.verify(key_ring) @@ -144,7 +147,7 @@ def test_metadata_snapshot(self): def test_metadata_timestamp(self): timestamp_path = os.path.join(self.repo_dir, 'metadata', 'timestamp.json') - timestamp = metadata.Timestamp.read_from_json(timestamp_path) + timestamp = Timestamp.read_from_json(timestamp_path) key_ring = self._load_key_ring() timestamp.verify(key_ring) @@ -182,38 +185,30 @@ def test_metadata_timestamp(self): def test_Threshold(self): # test default values - keys.Threshold() + Threshold() # test correct arguments - keys.Threshold(least=4, most=5) + Threshold(least=4, most=5) # test incorrect input - self.assertRaises(ValueError, keys.Threshold, 5, 4) - self.assertRaises(ValueError, keys.Threshold, 0, 5) - self.assertRaises(ValueError, keys.Threshold, 5, 0) + self.assertRaises(ValueError, Threshold, 5, 4) + self.assertRaises(ValueError, Threshold, 0, 5) + self.assertRaises(ValueError, Threshold, 5, 0) def test_KeyRing(self): key_list = [] - root_key = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key'), - 'RSA', 'password') - root_key2 = keys.RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key2'), - 'ED25519', 'password') + root_key = RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key'), + 'rsassa-pss-sha256', 'password') + root_key2 = RAMKey.read_from_file(os.path.join(self.keystore_dir, 'root_key2'), + 'ed25519', 'password') key_list.append(root_key) key_list.append(root_key2) - threshold = keys.Threshold() - keyring = keys.KeyRing(threshold, key_list) + threshold = Threshold(1, 2) + keyring = KeyRing(threshold, key_list) self.assertEqual(keyring.threshold, threshold) self.assertEqual(keyring.keys, key_list) - def test_RAMKey_read_from_file(self): - filename = os.path.join(self.keystore_dir, 'root_key') - algorithm = 'RSA' - passphrase = 'password' - - self.assertTrue(isinstance(keys.RAMKey.read_from_file(filename, algorithm, passphrase), keys.RAMKey)) - - def test_VaultKey_Ed25519(self): VAULT_ADDR = os.environ['VAULT_ADDR'] VAULT_TOKEN = os.environ['VAULT_TOKEN'] @@ -221,13 +216,37 @@ def test_VaultKey_Ed25519(self): NAME = f'test-{KEY_TYPE}-key' for hash_algorithm in {h.value for h in VaultKey.HashAlgorithms}: - self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, hash_algorithm=hash_algorithm) + self.assertRaises( + ValueError, + VaultKey.create_key, + VAULT_ADDR, + VAULT_TOKEN, + NAME, + KEY_TYPE, + hash_algorithm=hash_algorithm, + ) for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: - self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, marshaling_algorithm=marshaling_algorithm) + self.assertRaises( + ValueError, + VaultKey.create_key, + VAULT_ADDR, + VAULT_TOKEN, + NAME, + KEY_TYPE, + marshaling_algorithm=marshaling_algorithm, + ) for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: - self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE, signature_algorithm=signature_algorithm) + self.assertRaises( + ValueError, + VaultKey.create_key, + VAULT_ADDR, + VAULT_TOKEN, + NAME, + KEY_TYPE, + signature_algorithm=signature_algorithm, + ) key = VaultKey.create_key(VAULT_ADDR, VAULT_TOKEN, NAME, KEY_TYPE) signed = f'Hello, {KEY_TYPE}!' @@ -243,21 +262,72 @@ def test(key_type, hash_algorithm, hash_algorithms): NAME = f'test-{key_type}-key' for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: - key = VaultKey.create_key(VAULT_ADDR, VAULT_TOKEN, NAME, key_type, hash_algorithm=hash_algorithm, marshaling_algorithm=marshaling_algorithm,) + key = VaultKey.create_key( + VAULT_ADDR, + VAULT_TOKEN, + NAME, + key_type, + hash_algorithm=hash_algorithm, + marshaling_algorithm=marshaling_algorithm, + ) signed = f'Hello, {key_type}!' signature = key.sign(signed) self.assertTrue(key.verify(signed, signature)) for hash_algorithm in hash_algorithms: - self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, key_type, hash_algorithm=hash_algorithm) + self.assertRaises( + ValueError, + VaultKey.create_key, + VAULT_ADDR, + VAULT_TOKEN, + NAME, + key_type, + hash_algorithm=hash_algorithm + ) for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: - self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, key_type, signature_algorithm=signature_algorithm) - - - test(VaultKey.KeyTypes.P_256.value, VaultKey.HashAlgorithms.SHA2_256.value, {VaultKey.HashAlgorithms.SHA2_224.value, VaultKey.HashAlgorithms.SHA2_384.value, VaultKey.HashAlgorithms.SHA2_512.value}) - # FIXME: https://github.com/hvac/hvac/issues/605 - #test(VaultKey.KeyTypes.P_384.value, VaultKey.HashAlgorithms.SHA2_384.value, {VaultKey.HashAlgorithms.SHA2_224.value, VaultKey.HashAlgorithms.SHA2_256.value, VaultKey.HashAlgorithms.SHA2_512.value}) + self.assertRaises( + ValueError, + VaultKey.create_key, + VAULT_ADDR, + VAULT_TOKEN, + NAME, + key_type, + signature_algorithm=signature_algorithm + ) + + test( + VaultKey.KeyTypes.P_256.value, + VaultKey.HashAlgorithms.SHA2_256.value, + { + VaultKey.HashAlgorithms.SHA2_224.value, + VaultKey.HashAlgorithms.SHA2_384.value, + VaultKey.HashAlgorithms.SHA2_512.value + } + ) + + # FIXME: Unfortunately, py-TUF does not yet support P-384. + # test( + # VaultKey.KeyTypes.P_384.value, + # VaultKey.HashAlgorithms.SHA2_384.value, + # { + # VaultKey.HashAlgorithms.SHA2_224.value, + # VaultKey.HashAlgorithms.SHA2_256.value, + # VaultKey.HashAlgorithms.SHA2_512.value + # } + # ) + + # FIXME: Unfortunately, py-TUF does not yet support P-521. + # https://github.com/hvac/hvac/pull/608 + # test( + # VaultKey.KeyTypes.P_521.value, + # VaultKey.HashAlgorithms.SHA2_512.value, + # { + # VaultKey.HashAlgorithms.SHA2_224.value, + # VaultKey.HashAlgorithms.SHA2_256.value, + # VaultKey.HashAlgorithms.SHA2_384.value + # } + # ) def test_VaultKey_RSA(self): @@ -275,9 +345,24 @@ def test_VaultKey_RSA(self): for signature_algorithm in {s.value for s in VaultKey.SignatureAlgorithms}: for hash_algorithm in {h.value for h in VaultKey.HashAlgorithms}: for marshaling_algorithm in {m.value for m in VaultKey.MarshalingAlgorithms}: - self.assertRaises(ValueError, VaultKey.create_key, VAULT_ADDR, VAULT_TOKEN, NAME, key_type, marshaling_algorithm=marshaling_algorithm,) - - key = VaultKey.create_key(VAULT_ADDR, VAULT_TOKEN, NAME, key_type, hash_algorithm=hash_algorithm, signature_algorithm=signature_algorithm,) + self.assertRaises( + ValueError, + VaultKey.create_key, + VAULT_ADDR, + VAULT_TOKEN, + NAME, + key_type, + marshaling_algorithm=marshaling_algorithm, + ) + + key = VaultKey.create_key( + VAULT_ADDR, + VAULT_TOKEN, + NAME, + key_type, + hash_algorithm=hash_algorithm, + signature_algorithm=signature_algorithm, + ) signed = f'Hello, {key_type}!' signature = key.sign(signed) self.assertTrue(key.verify(signed, signature)) diff --git a/tuf/api/keys.py b/tuf/api/keys.py index b5cb72c704..b6db280573 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -4,15 +4,15 @@ from abc import ABC, abstractmethod from enum import Enum, unique -from typing import Any, List, Optional, Union +from typing import Dict, List, Optional, Union import base64 +import binascii import logging import sys # 3rd-party. -from securesystemslib.hash import digest -from securesystemslib.formats import encode_canonical +from cryptography.hazmat.primitives.asymmetric.utils import encode_dss_signature from securesystemslib.interface import ( import_ecdsa_privatekey_from_file, import_ed25519_privatekey_from_file, @@ -21,8 +21,14 @@ from securesystemslib.keys import ( create_signature, format_keyval_to_metadata, + format_metadata_to_key, verify_signature, ) +from securesystemslib.rsa_keys import ( + SaltLength, + verify_rsa_signature, +) +from securesystemslib.storage import StorageBackendInterface import hvac @@ -40,6 +46,20 @@ def __init__(self, least: int = 1, most: int = 1): self.least = least self.most = most +BytesOrStr = Union[bytes, str] + +# FIXME: what complicates implementation are the following issues: +# 1. Coupling hashing/signature schemes within the key format itself instead of +# during signature time, which is a bad idea for various reasons. +# 2. Allowing schemes to be passed when importing RSA but not ECDSA or Ed25519 +# files. This is inconsistent writing of keys to disk (RSA keys are written as +# naked PEM files w/o accompanying information such as keyid, keytype or +# scheme, but not ECDSA or Ed25519). +# 3. Ignoring schemes when passed anyway (e.g., hardcoding P-256 in +# import_ecdsakey_from_pem). +# 4. Confusing keytype with scheme. With RSA, there is a meaningful +# distinction, but not with ECDSA or Ed25519. + class Key(ABC): @abstractmethod @@ -51,17 +71,18 @@ def __init__(self) -> None: def keyid(self) -> str: raise NotImplementedError - @property - @abstractmethod - def public_key(self) -> str: - raise NotImplementedError + def _encode(self, bytes_or_str: BytesOrStr, encoding='utf-8') -> bytes: + if sys.version_info[0] >= 3 and isinstance(bytes_or_str, str): + return bytes_or_str.encode(encoding=encoding) + else: + return bytes_or_str @abstractmethod - def sign(self, signed: str) -> str: + def sign(self, signed: BytesOrStr) -> Dict: raise NotImplementedError @abstractmethod - def verify(self, signed: str, signature: str) -> bool: + def verify(self, signed: BytesOrStr, signature: Dict) -> bool: raise NotImplementedError Keys = List[Key] @@ -69,48 +90,77 @@ def verify(self, signed: str, signature: str) -> bool: class KeyRing: def __init__(self, threshold: Threshold, keys: Keys): - if len(keys) >= threshold.least: - logging.warning(f'{len(keys)} >= {threshold.least}') - if len(keys) <= threshold.most: - logging.warning(f'{len(keys)} <= {threshold.most}') + if len(keys) < threshold.least: + logging.warning(f'{len(keys)} < {threshold.least}') + if len(keys) > threshold.most: + logging.warning(f'{len(keys)} > {threshold.most}') self.threshold = threshold self.keys = keys -# Specific types of keys, such as those in RAM, or on HSMs (TODO). +# Specific types of keys, such as those in RAM, Hashicorp Vault, +# AWS KMS (TODO), Azure Key Vault (TODO), +# Google Cloud Key Management Service (TODO), or on HSMs (TODO). class RAMKey(Key): - # FIXME: Need a way to load *either* private or public keys. - KEY_TYPES = { - 'ECDSA': import_ecdsa_privatekey_from_file, - 'ED25519': import_ed25519_privatekey_from_file, - 'RSA': import_rsa_privatekey_from_file + # In practice, these are the only schemes used in py-TUF. + FileHandlers = { + 'ecdsa-sha2-nistp256': import_ecdsa_privatekey_from_file, + 'ed25519': import_ed25519_privatekey_from_file, + 'rsassa-pss-sha256': import_rsa_privatekey_from_file, } - def __init__(self, obj: Any) -> None: # pylint: disable=super-init-not-called + def __init__(self, obj: Dict) -> None: # pylint: disable=super-init-not-called self.__obj = obj @classmethod - def read_from_file(cls, filename: str, key_type: str, passphrase: Optional[str] = None) -> 'RAMKey': - handler = cls.KEY_TYPES.get(key_type) + def read_from_file( + cls, + filename: str, + scheme: str, + passphrase: Optional[str] = None, + storage_backend: Optional[StorageBackendInterface] = None, + ) -> 'RAMKey': + handler = cls.FileHandlers.get(scheme) if not handler: - return ValueError(key_type) - obj = handler(filename, password=passphrase) + return ValueError(scheme) + obj = handler( + filename, + password=passphrase, + storage_backend=storage_backend + ) return cls(obj) @property def keyid(self) -> str: return self.__obj['keyid'] - @property - def public_key(self) -> str: - return self.__obj['keyval']['public'] + def sign(self, signed: BytesOrStr) -> Dict: + signed_bytes = self._encode(signed) + return create_signature(self.__obj, signed_bytes) - def sign(self, signed: str) -> str: - return create_signature(self.__obj, signed) + def _verify_rsa_signature( + self, + signed: BytesOrStr, + signature: Dict, + salt_length: int = SaltLength.PSSSaltLengthEqualsHash + ) -> bool: + sig = signature['sig'] + sig = binascii.unhexlify(sig.encode('utf-8')) + scheme = self.__obj['scheme'] + public = self.__obj['keyval']['public'] + signed_bytes = self._encode(signed) + return verify_rsa_signature( + sig, + scheme, + public, + signed_bytes, + salt_length=salt_length + ) - def verify(self, signed: str, signature: str) -> bool: - return verify_signature(self.__obj, signature, signed) + def verify(self, signed: BytesOrStr, signature: Dict) -> bool: + signed_bytes = self._encode(signed) + return verify_signature(self.__obj, signature, signed_bytes) class VaultKey(Key): @@ -121,6 +171,7 @@ class KeyTypes(Enum): ED25519 = 'ed25519' P_256 = 'ecdsa-p256' P_384 = 'ecdsa-p384' + P_521 = 'ecdsa-p521' RSA_2048 = 'rsa-2048' RSA_3072 = 'rsa-3072' RSA_4096 = 'rsa-4096' @@ -139,67 +190,45 @@ class SignatureAlgorithms(Enum): @unique class MarshalingAlgorithms(Enum): - ANS1 = 'asn1' + ASN1 = 'asn1' JWS = 'jws' - def __init__( + def __set_algorithms( self, - url: str, - token: str, - name: str, hash_algorithm: Optional[str] = None, marshaling_algorithm: Optional[str] = None, signature_algorithm: Optional[str] = None, - ) -> None: # pylint: disable=super-init-not-called - """Reads the key using the Transit Secrets Engine as a side effect.""" - - self.__client = hvac.Client(url=url, token=token) - if not self.__client.is_authenticated(): - raise self.AuthenticationError - - # Guess why this isn't a requests.Response? - # https://github.com/hvac/hvac/pull/537#issuecomment-660304707 - response = self.__client.secrets.transit.read_key(name=name) - self.__name = name - - # Get public key. - data = response['data'] - - key_type = data['type'] - if key_type not in {k.value for k in self.KeyTypes}: - return ValueError(key_type) - self.__key_type = data['type'] - - # NOTE: The documentation is not clear, but presumably the returned - # keys are different versions of keys under the same name. Therefore, - # we shall select the one with the latest version number. - # NOTE: We are also taking it for granted that Vault will generate - # public keys in formats TUF will recognize out of the box. - keys = data['keys'] - latest_version = data['latest_version'] - key = keys.get(str(latest_version)) - self.__public_key = key['public_key'] - + ) -> None: # A valid hash algorithm is only good for ECDSA or RSA. if hash_algorithm is not None: if hash_algorithm not in {h.value for h in self.HashAlgorithms}: raise ValueError(hash_algorithm) - if key_type == self.KeyTypes.ED25519.value: + if self.__key_type == self.KeyTypes.ED25519.value: raise ValueError(hash_algorithm) # https://tools.ietf.org/html/rfc5656#section-6.2.1 # P-256 only takes SHA2-256. - if key_type == self.KeyTypes.P_256.value and hash_algorithm != self.HashAlgorithms.SHA2_256.value: + if self.__key_type == self.KeyTypes.P_256.value and \ + hash_algorithm != self.HashAlgorithms.SHA2_256.value: raise ValueError(hash_algorithm) # P-384 only takes SHA2-384. - if key_type == self.KeyTypes.P_384.value and hash_algorithm != self.HashAlgorithms.SHA2_384.value: + if self.__key_type == self.KeyTypes.P_384.value and \ + hash_algorithm != self.HashAlgorithms.SHA2_384.value: + raise ValueError(hash_algorithm) + # P-521 only takes SHA2-512. + if self.__key_type == self.KeyTypes.P_521.value and \ + hash_algorithm != self.HashAlgorithms.SHA2_512.value: raise ValueError(hash_algorithm) self.__hash_algorithm = hash_algorithm - # A valid marshaling algorithm is only good for P-256 and P-384. + # A valid marshaling algorithm is only good for the NIST P-curves. if marshaling_algorithm is not None: if marshaling_algorithm not in {m.value for m in self.MarshalingAlgorithms}: raise ValueError(marshaling_algorithm) - if key_type not in {self.KeyTypes.P_256.value, self.KeyTypes.P_384.value}: + if self.__key_type not in { + self.KeyTypes.P_256.value, + self.KeyTypes.P_384.value, + self.KeyTypes.P_521.value, + }: raise ValueError(marshaling_algorithm) self.__marshaling_algorithm = marshaling_algorithm @@ -207,32 +236,39 @@ def __init__( if signature_algorithm is not None: if signature_algorithm not in {s.value for s in self.SignatureAlgorithms}: raise ValueError(signature_algorithm) - if key_type not in {self.KeyTypes.RSA_2048.value, self.KeyTypes.RSA_3072.value, self.KeyTypes.RSA_4096.value}: + if self.__key_type not in { + self.KeyTypes.RSA_2048.value, + self.KeyTypes.RSA_3072.value, + self.KeyTypes.RSA_4096.value + }: raise ValueError(signature_algorithm) self.__signature_algorithm = signature_algorithm - @classmethod - def create_key(cls, url: str, token: str, name: str, key_type: str, **kwargs) -> 'VaultKey': - if key_type not in {k.value for k in cls.KeyTypes}: - return ValueError(key_type) - - client = hvac.Client(url=url, token=token) - if not client.is_authenticated(): - raise cls.AuthenticationError - - response = client.secrets.transit.create_key(name=name, key_type=key_type) - response.raise_for_status() - return cls(url, token, name, **kwargs) - - @property - def public_key(self) -> str: + def __get_tuf_public_key(self, vault_public_key: str) -> Dict: if self.__key_type == self.KeyTypes.ED25519.value: keytype = self.__key_type scheme = keytype - elif self.__key_type in {self.KeyTypes.P_256.value, self.KeyTypes.P_384.value}: + # Vault encodes Ed25519 public keys in standard base64, + # so decode it into a format py-TUF understands. + key_value = base64.standard_b64decode(vault_public_key) + key_value = binascii.hexlify(key_value).decode() + elif self.__key_type == self.KeyTypes.P_256.value: keytype = 'ecdsa-sha2-nistp256' scheme = keytype - elif self.__key_type in {self.KeyTypes.RSA_2048.value, self.KeyTypes.RSA_3072.value, self.KeyTypes.RSA_4096.value}: + key_value = vault_public_key + elif self.__key_type == self.KeyTypes.P_384.value: + keytype = 'ecdsa-sha2-nistp384' + scheme = keytype + key_value = vault_public_key + elif self.__key_type == self.KeyTypes.P_521.value: + keytype = 'ecdsa-sha2-nistp521' + scheme = keytype + key_value = vault_public_key + elif self.__key_type in { + self.KeyTypes.RSA_2048.value, + self.KeyTypes.RSA_3072.value, + self.KeyTypes.RSA_4096.value + }: keytype = 'rsa' if self.__signature_algorithm == self.SignatureAlgorithms.PSS.value: @@ -244,26 +280,81 @@ def public_key(self) -> str: _, size = self.__hash_algorithm.split('-') scheme += f'-sha{size}' + key_value = vault_public_key else: raise ValueError(self.__key_type) - key_meta = format_keyval_to_metadata(keytype, scheme, self.__public_key, private=False) - return encode_canonical(key_meta) + key_meta = format_keyval_to_metadata( + keytype, + scheme, + {'public': key_value}, + private=False + ) + key_dict, _ = format_metadata_to_key(key_meta) + return key_dict + + def __init__( + self, + url: str, + token: str, + name: str, + hash_algorithm: Optional[str] = None, + marshaling_algorithm: Optional[str] = None, + signature_algorithm: Optional[str] = None, + ) -> None: # pylint: disable=super-init-not-called + """Reads the key using the Transit Secrets Engine as a side effect.""" + + self.__client = hvac.Client(url=url, token=token) + if not self.__client.is_authenticated(): + raise self.AuthenticationError + + # Guess why this isn't a requests.Response? + # https://github.com/hvac/hvac/pull/537#issuecomment-660304707 + response = self.__client.secrets.transit.read_key(name=name) + self.__name = name + + # Get public key. + data = response['data'] + key_type = data['type'] + if key_type not in {k.value for k in self.KeyTypes}: + return ValueError(key_type) + self.__key_type = data['type'] + self.__set_algorithms(hash_algorithm, marshaling_algorithm, signature_algorithm) + + # NOTE: The documentation is not clear, but presumably the returned + # keys are different versions of keys under the same name. Therefore, + # we shall select the one with the latest version number. + # NOTE: We are also taking it for granted that Vault will generate + # public keys in formats TUF will recognize out of the box. + keys = data['keys'] + latest_version = data['latest_version'] + key = keys.get(str(latest_version)) + vault_public_key = key['public_key'] + tuf_public_key = self.__get_tuf_public_key(vault_public_key) + self.__ram_key = RAMKey(tuf_public_key) + + @classmethod + def create_key(cls, url: str, token: str, name: str, key_type: str, **kwargs) -> 'VaultKey': + if key_type not in {k.value for k in cls.KeyTypes}: + return ValueError(key_type) + + client = hvac.Client(url=url, token=token) + if not client.is_authenticated(): + raise cls.AuthenticationError + + response = client.secrets.transit.create_key(name=name, key_type=key_type) + response.raise_for_status() + return cls(url, token, name, **kwargs) @property def keyid(self) -> str: - digest_object = digest('sha256') - digest_object.update(self.public_key.encode('utf-8')) - return digest_object.hexdigest() + return self.__ram_key.keyid # https://hvac.readthedocs.io/en/stable/usage/secrets_engines/transit.html#create-key - def __base64ify(self, bytes_or_str: Union[bytes, str]) -> str: + def __base64ify(self, bytes_or_str: BytesOrStr) -> str: """Helper method to perform base64 encoding across Python 2.7 and Python 3.X""" - if sys.version_info[0] >= 3 and isinstance(bytes_or_str, str): - input_bytes = bytes_or_str.encode('utf8') - else: - input_bytes = bytes_or_str + input_bytes = self._encode(bytes_or_str) output_bytes = base64.urlsafe_b64encode(input_bytes) if sys.version_info[0] >= 3: @@ -271,27 +362,68 @@ def __base64ify(self, bytes_or_str: Union[bytes, str]) -> str: else: return output_bytes - # TODO: Consider passing prehashed input. - # TODO: Translate signature into something py-TUF understands... - def sign(self, signed: str) -> str: - response = self.__client.secrets.transit.sign_data( - name=self.__name, - hash_input=self.__base64ify(signed), - hash_algorithm=self.__hash_algorithm, - marshaling_algorithm=self.__marshaling_algorithm, - signature_algorithm=self.__signature_algorithm, - ) - return response['data']['signature'] + # https://github.com/matrix-org/python-unpaddedbase64/blob/c804b5753f4805cf3d129fa4e7febef5c032b6ca/unpaddedbase64.py#L29-L40 + def __rawurl_b64decode(self, input_string: str) -> bytes: + """Decode a base64 string to bytes inferring padding from the length of the + string.""" + + input_bytes = input_string.encode("ascii") + input_len = len(input_bytes) + padding = b"=" * (3 - ((input_len + 3) % 4)) + decode = base64.b64decode + if u'-' in input_string or u'_' in input_string: + decode = base64.urlsafe_b64decode + output_bytes = decode(input_bytes + padding) + return output_bytes + + def __decode_sig(self, sig: str) -> str: + # https://github.com/hashicorp/vault/blob/f6547fa8e820b6ebbfa15018477a138b38707d91/sdk/helper/keysutil/policy.go#L1217-L1224 + if self.__marshaling_algorithm == self.MarshalingAlgorithms.JWS.value: + # https://github.com/golang/go/blob/11f92e9dae96939c2d784ae963fa7763c300660b/src/encoding/base64/base64.go#L110-L113 + sig = self.__rawurl_b64decode(sig) + else: + sig = base64.standard_b64decode(sig) + + sig = binascii.hexlify(sig).decode() - # TODO: Consider passing prehashed input. - # TODO: Translate signature into something Vault understands... - def verify(self, signed: str, signature: str) -> bool: - response = self.__client.secrets.transit.verify_signed_data( + # https://github.com/hashicorp/vault/blob/f6547fa8e820b6ebbfa15018477a138b38707d91/sdk/helper/keysutil/policy.go#L1303-L1311 + if self.__marshaling_algorithm == self.MarshalingAlgorithms.JWS.value: + sig_len = len(sig) // 2 + rb = int(sig[:sig_len], 16) + sb = int(sig[sig_len:], 16) + # https://cryptography.io/en/latest/hazmat/primitives/asymmetric/utils/#cryptography.hazmat.primitives.asymmetric.utils.encode_dss_signature + sig = encode_dss_signature(rb, sb) + sig = binascii.hexlify(sig).decode() + + return sig + + # TODO: Allow passing prehashed input. + def sign(self, signed: BytesOrStr) -> Dict: + response = self.__client.secrets.transit.sign_data( name=self.__name, hash_input=self.__base64ify(signed), hash_algorithm=self.__hash_algorithm, marshaling_algorithm=self.__marshaling_algorithm, signature_algorithm=self.__signature_algorithm, - signature=signature, ) - return response['data']['valid'] + # vault:key-version-number:standard-base64-encoded-signature + _, _, sig = response['data']['signature'].split(':') + return { + 'keyid': self.keyid, + 'sig': self.__decode_sig(sig) + } + + # TODO: Allow passing prehashed input. + def verify(self, signed: BytesOrStr, signature: Dict) -> bool: + if self.__key_type in { + self.KeyTypes.RSA_2048.value, + self.KeyTypes.RSA_3072.value, + self.KeyTypes.RSA_4096.value + } and self.__signature_algorithm == self.SignatureAlgorithms.PSS.value: + return self.__ram_key._verify_rsa_signature( + signed, + signature, + salt_length=SaltLength.PSSSaltLengthAuto + ) + else: + return self.__ram_key.verify(signed, signature) diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index 4d8f43f666..76044c8736 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -44,6 +44,7 @@ from typing import Any, Dict, List, Optional import json +import logging import tempfile # 3rd-party. From b9a9050196ca9615bdc0f58b59616ac7c5a98be4 Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Tue, 21 Jul 2020 16:45:47 -0400 Subject: [PATCH 40/42] minor comments Signed-off-by: Trishank Karthik Kuppusamy --- tests/test_tuf_api.py | 5 +++-- tuf/api/keys.py | 6 ++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/tests/test_tuf_api.py b/tests/test_tuf_api.py index 70f88781ec..fde079dbc3 100644 --- a/tests/test_tuf_api.py +++ b/tests/test_tuf_api.py @@ -307,6 +307,7 @@ def test(key_type, hash_algorithm, hash_algorithms): ) # FIXME: Unfortunately, py-TUF does not yet support P-384. + # https://github.com/hvac/hvac/pull/606 # test( # VaultKey.KeyTypes.P_384.value, # VaultKey.HashAlgorithms.SHA2_384.value, @@ -336,8 +337,8 @@ def test_VaultKey_RSA(self): for key_type in { VaultKey.KeyTypes.RSA_2048.value, - # FIXME: https://github.com/hvac/hvac/issues/605 - #VaultKey.KeyTypes.RSA_3072.value, + # https://github.com/hvac/hvac/issues/605 + VaultKey.KeyTypes.RSA_3072.value, VaultKey.KeyTypes.RSA_4096.value }: NAME = f'test-{key_type}-key' diff --git a/tuf/api/keys.py b/tuf/api/keys.py index b6db280573..0d091564c0 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -308,8 +308,7 @@ def __init__( if not self.__client.is_authenticated(): raise self.AuthenticationError - # Guess why this isn't a requests.Response? - # https://github.com/hvac/hvac/pull/537#issuecomment-660304707 + # https://github.com/hvac/hvac/issues/604 response = self.__client.secrets.transit.read_key(name=name) self.__name = name @@ -324,8 +323,6 @@ def __init__( # NOTE: The documentation is not clear, but presumably the returned # keys are different versions of keys under the same name. Therefore, # we shall select the one with the latest version number. - # NOTE: We are also taking it for granted that Vault will generate - # public keys in formats TUF will recognize out of the box. keys = data['keys'] latest_version = data['latest_version'] key = keys.get(str(latest_version)) @@ -420,6 +417,7 @@ def verify(self, signed: BytesOrStr, signature: Dict) -> bool: self.KeyTypes.RSA_3072.value, self.KeyTypes.RSA_4096.value } and self.__signature_algorithm == self.SignatureAlgorithms.PSS.value: + # https://github.com/secure-systems-lab/securesystemslib/pull/262 return self.__ram_key._verify_rsa_signature( signed, signature, From 3c0cc245845f55d870f682155d210e555bc230d3 Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Fri, 31 Jul 2020 14:12:47 -0400 Subject: [PATCH 41/42] update hvac Signed-off-by: Trishank Karthik Kuppusamy --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index fd7f98f874..3349b28038 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,7 +26,8 @@ # ./update-requirements.sh # enum34; python_version < '3' -hvac +# https://github.com/hvac/hvac/releases/tag/v0.10.5 +hvac>=0.10.5 ipaddress; python_version < '3' iso8601 requests From 495e10f346342892b3ec645b3ce488ede64dd30d Mon Sep 17 00:00:00 2001 From: Trishank Karthik Kuppusamy Date: Wed, 5 Aug 2020 18:55:51 -0400 Subject: [PATCH 42/42] update how salt lengths are computed Signed-off-by: Trishank Karthik Kuppusamy --- tuf/api/keys.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tuf/api/keys.py b/tuf/api/keys.py index 0d091564c0..3cbbca0f20 100644 --- a/tuf/api/keys.py +++ b/tuf/api/keys.py @@ -25,7 +25,9 @@ verify_signature, ) from securesystemslib.rsa_keys import ( - SaltLength, + HashSaltLengthType, + MaxSaltLengthType, + SaltLengthType, verify_rsa_signature, ) from securesystemslib.storage import StorageBackendInterface @@ -143,7 +145,7 @@ def _verify_rsa_signature( self, signed: BytesOrStr, signature: Dict, - salt_length: int = SaltLength.PSSSaltLengthEqualsHash + salt_length_type: SaltLengthType = HashSaltLengthType ) -> bool: sig = signature['sig'] sig = binascii.unhexlify(sig.encode('utf-8')) @@ -155,7 +157,7 @@ def _verify_rsa_signature( scheme, public, signed_bytes, - salt_length=salt_length + salt_length_type=salt_length_type ) def verify(self, signed: BytesOrStr, signature: Dict) -> bool: @@ -421,7 +423,7 @@ def verify(self, signed: BytesOrStr, signature: Dict) -> bool: return self.__ram_key._verify_rsa_signature( signed, signature, - salt_length=SaltLength.PSSSaltLengthAuto + salt_length_type=MaxSaltLengthType ) else: return self.__ram_key.verify(signed, signature)