diff --git a/tests/test_api.py b/tests/test_api.py index 8a3045d44e..1d55b302a6 100755 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -183,7 +183,7 @@ def test_to_from_bytes(self): def test_sign_verify(self): root_path = os.path.join(self.repo_dir, 'metadata', 'root.json') - root:Root = Metadata.from_file(root_path).signed + root = Metadata[Root].from_file(root_path).signed # Locate the public keys we need from root targets_keyid = next(iter(root.roles["targets"].keyids)) @@ -302,7 +302,7 @@ def test_metadata_base(self): def test_metadata_snapshot(self): snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') - snapshot = Metadata.from_file(snapshot_path) + snapshot = Metadata[Snapshot].from_file(snapshot_path) # Create a MetaFile instance representing what we expect # the updated data to be. @@ -321,7 +321,7 @@ def test_metadata_snapshot(self): def test_metadata_timestamp(self): timestamp_path = os.path.join( self.repo_dir, 'metadata', 'timestamp.json') - timestamp = Metadata.from_file(timestamp_path) + timestamp = Metadata[Timestamp].from_file(timestamp_path) self.assertEqual(timestamp.signed.version, 1) timestamp.signed.bump_version() @@ -358,19 +358,19 @@ def test_metadata_timestamp(self): def test_metadata_verify_delegate(self): root_path = os.path.join(self.repo_dir, 'metadata', 'root.json') - root = Metadata.from_file(root_path) + root = Metadata[Root].from_file(root_path) snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') - snapshot = Metadata.from_file(snapshot_path) + snapshot = Metadata[Snapshot].from_file(snapshot_path) targets_path = os.path.join( self.repo_dir, 'metadata', 'targets.json') - targets = Metadata.from_file(targets_path) + targets = Metadata[Targets].from_file(targets_path) role1_path = os.path.join( self.repo_dir, 'metadata', 'role1.json') - role1 = Metadata.from_file(role1_path) + role1 = Metadata[Targets].from_file(role1_path) role2_path = os.path.join( self.repo_dir, 'metadata', 'role2.json') - role2 = Metadata.from_file(role2_path) + role2 = Metadata[Targets].from_file(role2_path) # test the expected delegation tree root.verify_delegate('root', root) @@ -468,7 +468,7 @@ def test_role_class(self): def test_metadata_root(self): root_path = os.path.join( self.repo_dir, 'metadata', 'root.json') - root = Metadata.from_file(root_path) + root = Metadata[Root].from_file(root_path) # Add a second key to root role root_key2 = import_ed25519_publickey_from_file( @@ -530,7 +530,7 @@ def test_delegation_class(self): def test_metadata_targets(self): targets_path = os.path.join( self.repo_dir, 'metadata', 'targets.json') - targets = Metadata.from_file(targets_path) + targets = Metadata[Targets].from_file(targets_path) # Create a fileinfo dict representing what we expect the updated data to be filename = 'file2.txt' @@ -560,7 +560,7 @@ def test_length_and_hash_validation(self): # for untrusted metadata file to verify. timestamp_path = os.path.join( self.repo_dir, 'metadata', 'timestamp.json') - timestamp = Metadata.from_file(timestamp_path) + timestamp = Metadata[Timestamp].from_file(timestamp_path) snapshot_metafile = timestamp.signed.meta["snapshot.json"] snapshot_path = os.path.join( @@ -603,7 +603,7 @@ def test_length_and_hash_validation(self): # Test target files' hash and length verification targets_path = os.path.join( self.repo_dir, 'metadata', 'targets.json') - targets = Metadata.from_file(targets_path) + targets = Metadata[Targets].from_file(targets_path) file1_targetfile = targets.signed.targets['file1.txt'] filepath = os.path.join( self.repo_dir, 'targets', 'file1.txt') diff --git a/tuf/api/metadata.py b/tuf/api/metadata.py index a7d967d8d9..4f1705b7c8 100644 --- a/tuf/api/metadata.py +++ b/tuf/api/metadata.py @@ -26,12 +26,15 @@ BinaryIO, ClassVar, Dict, + Generic, List, Mapping, Optional, Tuple, Type, + TypeVar, Union, + cast, ) from securesystemslib import exceptions as sslib_exceptions @@ -56,13 +59,31 @@ # files to have the same major version (the first number) as ours. SPECIFICATION_VERSION = ["1", "0", "19"] +# T is a Generic type constraint for Metadata.signed +T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets") -class Metadata: + +class Metadata(Generic[T]): """A container for signed TUF metadata. Provides methods to convert to and from dictionary, read and write to and from file and to create and verify metadata signatures. + Metadata[T] is a generic container type where T can be any one type of + [Root, Timestamp, Snapshot, Targets]. The purpose of this is to allow + static type checking of the signed attribute in code using Metadata:: + + root_md = Metadata[Root].from_file("root.json") + # root_md type is now Metadata[Root]. This means signed and its + # attributes like consistent_snapshot are now statically typed and the + # types can be verified by static type checkers and shown by IDEs + print(root_md.signed.consistent_snapshot) + + Using a type constraint is not required but not doing so means T is not a + specific type so static typing cannot happen. Note that the type constraint + "[Root]" is not validated at runtime (as pure annotations are not available + then). + Attributes: signed: A subclass of Signed, which has the actual metadata payload, i.e. one of Targets, Snapshot, Timestamp or Root. @@ -70,10 +91,8 @@ class Metadata: signing the canonical serialized representation of 'signed'. """ - def __init__( - self, signed: "Signed", signatures: "OrderedDict[str, Signature]" - ): - self.signed = signed + def __init__(self, signed: T, signatures: "OrderedDict[str, Signature]"): + self.signed: T = signed self.signatures = signatures @classmethod @@ -119,7 +138,8 @@ def from_dict(cls, metadata: Dict[str, Any]) -> "Metadata": signatures[sig.keyid] = sig return cls( - signed=inner_cls.from_dict(metadata.pop("signed")), + # Specific type T is not known at static type check time: use cast + signed=cast(T, inner_cls.from_dict(metadata.pop("signed"))), signatures=signatures, ) @@ -129,7 +149,7 @@ def from_file( filename: str, deserializer: Optional[MetadataDeserializer] = None, storage_backend: Optional[StorageBackendInterface] = None, - ) -> "Metadata": + ) -> "Metadata[T]": """Loads TUF metadata from file storage. Arguments: @@ -156,11 +176,12 @@ def from_file( with storage_backend.get(filename) as file_obj: return cls.from_bytes(file_obj.read(), deserializer) - @staticmethod + @classmethod def from_bytes( + cls, data: bytes, deserializer: Optional[MetadataDeserializer] = None, - ) -> "Metadata": + ) -> "Metadata[T]": """Loads TUF metadata from raw data. Arguments: