diff --git a/config/grype-db-manager/include.d/validate.yaml b/config/grype-db-manager/include.d/validate.yaml index 90ddb665..6ea468e3 100644 --- a/config/grype-db-manager/include.d/validate.yaml +++ b/config/grype-db-manager/include.d/validate.yaml @@ -1,9 +1,23 @@ # validate: listing: - image: "centos:8.2.2004" - minimum-packages: 85 - minimum-vulnerabilities: 400 + image: "alpine:3.9.2" + minimum-packages: 10 + minimum-vulnerabilities: 90 + +expected-providers: + - alpine + - amazon + - chainguard + - debian + - github + - mariner + - nvd + - oracle + - rhel + - sles + - ubuntu + - wolfi default-max-year: 2021 gates: diff --git a/manager/src/grype_db_manager/cli/config.py b/manager/src/grype_db_manager/cli/config.py index 227b195f..ec7a579f 100644 --- a/manager/src/grype_db_manager/cli/config.py +++ b/manager/src/grype_db_manager/cli/config.py @@ -81,7 +81,7 @@ class Validate: default_max_year: int = 2021 gates: list[ValidateDB] = field(default_factory=list) listing: ValidateListing = field(default_factory=ValidateListing) - + expected_providers: list[str] = field(default_factory=list) @dataclass() class ListingReplica: diff --git a/manager/src/grype_db_manager/cli/db.py b/manager/src/grype_db_manager/cli/db.py index 55c565b4..a5e10e9f 100644 --- a/manager/src/grype_db_manager/cli/db.py +++ b/manager/src/grype_db_manager/cli/db.py @@ -8,7 +8,7 @@ from yardstick.cli import config as ycfg from yardstick.cli.validate import validate as yardstick_validate -from grype_db_manager import db, s3utils +from grype_db_manager import db, grypedb, s3utils from grype_db_manager.cli import config, error from grype_db_manager.db.format import Format from grype_db_manager.grypedb import DB_DIR, DBManager, GrypeDB @@ -54,6 +54,13 @@ def clear_dbs(cfg: config.Application) -> None: click.echo("no databases to clear") +def remove_db(cfg: config.Application, db_uuid: str) -> None: + db_manager = DBManager(root_dir=cfg.data.root) + if db_manager.remove_db(db_uuid=db_uuid): + click.echo(f"database {db_uuid!r} removed") + click.echo(f"no database found with session id {db_uuid}") + + @group.command(name="build", help="build and validate a grype database") @click.option("--schema-version", "-s", required=True, help="the DB schema version to build") @click.pass_obj @@ -96,7 +103,7 @@ def show_db(cfg: config.Application, db_uuid: str) -> None: "--skip-namespace-check", "skip_namespace_check", is_flag=True, - help="do not ensure the minimum expected namespaces are present", + help="do not ensure the minimum expected namespaces are present (for v6+ this is a providers-based check)", ) @click.argument("db-uuid") @click.pass_obj @@ -120,13 +127,41 @@ def validate_db( return if not skip_namespace_check: - # ensure the minimum number of namespaces are present - db_manager.validate_namespaces(db_uuid=db_uuid) + if db_info.schema_version < 6: + # ensure the minimum number of namespaces are present + db_manager.validate_namespaces(db_uuid=db_uuid) + else: + # ensure the minimum number of namespaces are present + db_manager.validate_providers(db_uuid=db_uuid, expected=cfg.validate.expected_providers) + + _validate_db(ctx, cfg, db_info, images, db_uuid, verbosity, recapture) + + logging.info(f"validating latest.json {db_uuid}") + + if db_info.schema_version >= 6: + _validate_latest(cfg, db_info.latest_path, db_info.archive_path) + + click.echo(f"{Format.BOLD}{Format.OKGREEN}Validation passed{Format.RESET}") + +def _validate_db( + ctx: click.Context, + cfg: config.Application, + db_info: grypedb.DBInfo, + images: list[str], + db_uuid: str, + verbosity: int, + recapture: bool, +) -> None: # resolve tool versions and install them yardstick.store.config.set_values(store_root=cfg.data.yardstick_root) grype_version = db.schema.grype_version(db_info.schema_version) + basis_grype_version = grype_version + + if db_info.schema_version >= 6: + # TODO: we don't have any published v6 grype databases yet + basis_grype_version = db.schema.grype_version(5) result_sets = {} for idx, rs in enumerate(cfg.validate.gates): @@ -153,10 +188,11 @@ def validate_db( label="custom-db", name="grype", version=grype_version + f"+import-db={db_info.archive_path}", + profile="v6", ), ycfg.Tool( name="grype", - version=grype_version, + version=basis_grype_version, ), ], ), @@ -164,7 +200,13 @@ def validate_db( yardstick_cfg = ycfg.Application( profiles=ycfg.Profiles( - data={}, + data={ + "grype[custom-db]": { + "v6": { + "config_path": "./.grype-db-v6.yaml" + }, + }, + }, ), store_root=cfg.data.yardstick_root, default_max_year=cfg.validate.default_max_year, @@ -193,6 +235,31 @@ def validate_db( ) +def _validate_latest(cfg: config.Application, latest_file: str, archive_path: str) -> None: + with open(latest_file) as f: + latest_obj = db.Latest.from_json(f.read()) + + if not cfg.validate.listing.image: + msg = "no image specified to validate against" + raise ValueError(msg) + + if not cfg.validate.listing.minimum_packages: + msg = "minimum packages must be specified" + raise ValueError(msg) + + if not cfg.validate.listing.minimum_vulnerabilities: + msg = "minimum vulnerabilities must be specified" + raise ValueError(msg) + + db.latest.smoke_test( + latest_obj, + archive_path, + image=cfg.validate.listing.image, + minimum_packages=cfg.validate.listing.minimum_packages, + minimum_vulnerabilities=cfg.validate.listing.minimum_vulnerabilities, + ) + + @group.command(name="upload", help="upload a grype database") @click.option("--ttl-seconds", "-t", default=DEFAULT_TTL_SECONDS, help="the TTL for the uploaded DB (should be relatively high)") @click.argument("db-uuid") @@ -208,22 +275,35 @@ def upload_db(cfg: config.Application, db_uuid: str, ttl_seconds: int) -> None: db_manager = DBManager(root_dir=cfg.data.root) db_info = db_manager.get_db_info(db_uuid=db_uuid) - key = f"{s3_path}/{os.path.basename(db_info.archive_path)}" + if db_info.schema_version >= 6: + if not os.path.exists(db_info.archive_path): + msg = f"latest.json file not found for DB {db_uuid!r}" + raise ValueError(msg) + + # /databases -> /databases/v6 , and is dynamic based on the schema version + s3_path = f"{s3_path}/v{db_info.schema_version}" - # TODO: we have folks that require legacy behavior, where the content type was application/x-tar - kwargs = {} - if db_info.archive_path.endswith(".tar.gz"): - kwargs["ContentType"] = "application/x-tar" + db_key = f"{s3_path}/{os.path.basename(db_info.archive_path)}" + latest_key = f"{s3_path}/latest.json" s3utils.upload_file( bucket=s3_bucket, - key=key, + key=db_key, path=db_info.archive_path, CacheControl=f"public,max-age={ttl_seconds}", - **kwargs, ) - click.echo(f"DB {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}") + click.echo(f"DB archive {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}") + + if db_info.schema_version >= 6: + s3utils.upload_file( + bucket=s3_bucket, + key=latest_key, + path=db_info.latest_path, + CacheControl="public,max-age=300", # 5 minutes + ) + + click.echo(f"DB latest.json {db_uuid!r} uploaded to s3://{s3_bucket}/{s3_path}") @group.command(name="build-and-upload", help="upload a grype database") @@ -234,7 +314,7 @@ def upload_db(cfg: config.Application, db_uuid: str, ttl_seconds: int) -> None: "--skip-namespace-check", "skip_namespace_check", is_flag=True, - help="do not ensure the minimum expected namespaces are present", + help="do not ensure the minimum expected namespaces are present (for v6+ this is a providers-based check)", ) @click.option("--verbose", "-v", "verbosity", count=True, help="show details of all comparisons") @click.pass_obj diff --git a/manager/src/grype_db_manager/cli/listing.py b/manager/src/grype_db_manager/cli/listing.py index 41730310..189cda77 100644 --- a/manager/src/grype_db_manager/cli/listing.py +++ b/manager/src/grype_db_manager/cli/listing.py @@ -9,7 +9,7 @@ from grype_db_manager.db.format import Format -@click.group(name="listing", help="manage the grype-db listing file") +@click.group(name="listing", help="manage the grype-db listing file (only schemas v1-v5)") @click.pass_obj def group(_: config.Application) -> None: pass @@ -110,7 +110,7 @@ def validate_listing(cfg: config.Application, listing_file: str) -> None: raise ValueError(msg) if cfg.validate.listing.override_grype_version and not cfg.validate.listing.override_db_schema_version: - msg = "ovrerride db schema version must be specified if override grype version is specified" + msg = "override db schema version must be specified if override grype version is specified" raise ValueError(msg) override_schema_release = None diff --git a/manager/src/grype_db_manager/data/schema-info.json b/manager/src/grype_db_manager/data/schema-info.json index c68f65e1..94bd2fa2 100644 --- a/manager/src/grype_db_manager/data/schema-info.json +++ b/manager/src/grype_db_manager/data/schema-info.json @@ -24,6 +24,11 @@ "schema": "5", "grype-version": "main", "supported": true + }, + { + "schema": "6", + "grype-version": "feat/v6-query-api", + "supported": false } ] } diff --git a/manager/src/grype_db_manager/db/__init__.py b/manager/src/grype_db_manager/db/__init__.py index b7e04da3..a6a6c0d7 100644 --- a/manager/src/grype_db_manager/db/__init__.py +++ b/manager/src/grype_db_manager/db/__init__.py @@ -1,11 +1,14 @@ from . import listing, metadata, schema +from .latest import Latest from .listing import Listing from .metadata import Metadata from .validation import capture_results __all__ = [ + "Latest", "Listing", "Metadata", + "latest", "listing", "metadata", "schema", diff --git a/manager/src/grype_db_manager/db/latest.py b/manager/src/grype_db_manager/db/latest.py new file mode 100644 index 00000000..0219d3ce --- /dev/null +++ b/manager/src/grype_db_manager/db/latest.py @@ -0,0 +1,168 @@ +from __future__ import annotations + +import contextlib +import functools +import json +import logging +import os +import tempfile +import threading +from datetime import datetime +from dataclasses import dataclass +from http.server import HTTPServer, SimpleHTTPRequestHandler +from typing import TYPE_CHECKING + +from dataclass_wizard import asdict, fromdict + +from grype_db_manager import grype + +if TYPE_CHECKING: + from collections.abc import Iterator + +LATEST_FILENAME = "latest.json" + + +# Latest is a dataclass that represents the latest.json document for schemas v6. +@dataclass +class Latest: + # status indicates if the database is actively being maintained and distributed + status: str | None = None + + # schema version of the DB schema + schema_version: str | None = None + + # timestamp the database was built + built: datetime | None = None + + # path to a DB archive relative to the listing file hosted location (NOT the absolute URL) + path: str = "" + + # self-describing digest of the database archive referenced in path + checksum: str = "" + + @classmethod + def from_json(cls, contents: str) -> Latest: + return cls.from_dict(json.loads(contents)) + + @classmethod + def from_dict(cls, contents: dict) -> Latest: + return fromdict(cls, contents) + + def to_json(self, indent: int | None = None) -> str: + return json.dumps(self.to_dict(), indent=indent, sort_keys=True) + + def to_dict(self) -> dict: + return asdict(self) + + +@contextlib.contextmanager +def _http_server(directory: str, schema_version: str) -> Iterator[str]: + major_version = schema_version.split(".")[0] + server_address = ("127.0.0.1", 5555) + url = f"http://{server_address[0]}:{server_address[1]}" + latest_url = f"{url}/v{major_version}/{LATEST_FILENAME}" + + def serve() -> None: + httpd = HTTPServer( + server_address, + functools.partial(SimpleHTTPRequestHandler, directory=directory), + ) + logging.info(f"starting test server at {url!r}") + # show tree output of the given directory to the log + _log_dir(directory) + + httpd.serve_forever() + + thread = threading.Thread(target=serve) + thread.daemon = True + thread.start() + try: + yield latest_url + finally: + pass + + +def _log_dir(path: str, prefix: str = "") -> None: + items = sorted(os.listdir(path)) + for i, item in enumerate(items): + is_last = i == len(items) - 1 + connector = "└── " if is_last else "├── " + logging.info(f"{prefix}{connector}{item}") + new_prefix = prefix + (" " if is_last else "│ ") + item_path = os.path.join(path, item) + if os.path.isdir(item_path): + _log_dir(item_path, new_prefix) + + +def _smoke_test( + schema_version: str, + listing_url: str, + image: str, + minimum_packages: int, + minimum_vulnerabilities: int, + store_root: str, +) -> None: + logging.info(f"testing grype schema-version={schema_version!r}") + tool_obj = grype.Grype( + schema_version=schema_version, + store_root=store_root, + update_url=listing_url, + ) + + output = tool_obj.run(user_input=image) + packages, vulnerabilities = grype.Report(report_contents=output).parse() + logging.info(f"scan result with downloaded DB: packages={len(packages)} vulnerabilities={len(vulnerabilities)}") + if not packages or not vulnerabilities: + msg = "validation failed: missing packages and/or vulnerabilities" + raise ValueError(msg) + + if len(packages) < minimum_packages: + msg = f"validation failed: expected at least {minimum_packages} packages, got {len(packages)}" + raise ValueError(msg) + + if len(vulnerabilities) < minimum_vulnerabilities: + msg = f"validation failed: expected at least {minimum_vulnerabilities} vulnerabilities, got {len(vulnerabilities)}" + raise ValueError(msg) + + +def smoke_test( + test_latest: Latest, + archive_path: str, + image: str, + minimum_packages: int, + minimum_vulnerabilities: int, +) -> None: + # write the listing to a temp dir that is served up locally on an HTTP server. This is used by grype to locally + # download the latest.json file and check that it works against S3 (since the listing entries have DB urls that + # reside in S3). + with tempfile.TemporaryDirectory(prefix="grype-db-smoke-test") as tempdir: + listing_contents = test_latest.to_json() + + installation_path = os.path.join(tempdir, "grype-install") + + major_version = test_latest.schema_version.split(".")[0] + + sub_path = os.path.join(tempdir, "v" + major_version) + os.makedirs(sub_path, exist_ok=True) + + logging.info(listing_contents) + with open(os.path.join(sub_path, LATEST_FILENAME), "w") as f: + f.write(listing_contents) + + # make the archive available at the expected location via symlink + archive_dest = os.path.join(sub_path, test_latest.path) + os.link(archive_path, archive_dest) + + # ensure grype can perform a db update for all supported schema versions. Note: we are only testing the + # latest.json for the DB is usable (the download succeeds and grype and the update process, which does + # checksum verifications, passes). This test does NOT check the integrity of the DB since that has already + # been tested in the build steps. + with _http_server(directory=tempdir, schema_version=test_latest.schema_version) as listing_url: + _smoke_test( + schema_version=test_latest.schema_version, + listing_url=listing_url, + image=image, + minimum_packages=minimum_packages, + minimum_vulnerabilities=minimum_vulnerabilities, + store_root=installation_path, + ) diff --git a/manager/src/grype_db_manager/db/listing.py b/manager/src/grype_db_manager/db/listing.py index ddd06fef..3bf97597 100644 --- a/manager/src/grype_db_manager/db/listing.py +++ b/manager/src/grype_db_manager/db/listing.py @@ -25,6 +25,7 @@ LISTING_FILENAME = "listing.json" +# Entry is a dataclass that represents a single entry from a listing.json for schemas v1-v5. @dataclass class Entry: built: str @@ -46,6 +47,7 @@ def age_in_days(self, now: datetime.datetime | None = None) -> int: return (now - iso8601.parse_date(self.built)).days +# Listing is a dataclass that represents the listing.json for schemas v1-v5. @dataclass class Listing: available: dict[int, list[Entry]] diff --git a/manager/src/grype_db_manager/db/metadata.py b/manager/src/grype_db_manager/db/metadata.py index 633b9306..3d3c235f 100644 --- a/manager/src/grype_db_manager/db/metadata.py +++ b/manager/src/grype_db_manager/db/metadata.py @@ -10,6 +10,7 @@ FILE = "metadata.json" +# Metadata is a dataclass that represents the metadata.json for schemas v1-v5. @dataclass class Metadata: built: str diff --git a/manager/src/grype_db_manager/db/schema.py b/manager/src/grype_db_manager/db/schema.py index 71fd671b..d4f03f44 100644 --- a/manager/src/grype_db_manager/db/schema.py +++ b/manager/src/grype_db_manager/db/schema.py @@ -23,7 +23,7 @@ class SchemaEntry: class SchemaMapping: Available: list[SchemaEntry] = field(default_factory=list) - def grype_version(self, schema_version: int) -> str | None: + def grype_version(self, schema_version: int | str) -> str | None: schema_version = str(schema_version) for entry in self.Available: if entry.schema == schema_version: @@ -77,7 +77,7 @@ def _load() -> SchemaMapping: return cfg -def grype_version(schema_version: int) -> str: +def grype_version(schema_version: int | str) -> str: return _load().grype_version(schema_version) diff --git a/manager/src/grype_db_manager/grype.py b/manager/src/grype_db_manager/grype.py index d9db9806..daf301e2 100644 --- a/manager/src/grype_db_manager/grype.py +++ b/manager/src/grype_db_manager/grype.py @@ -22,7 +22,9 @@ class Grype: BIN = "grype" - def __init__(self, schema_version: int, store_root: str, update_url: str = "", release: str | None = None): + def __init__(self, schema_version: int | str, store_root: str, update_url: str = "", release: str | None = None): + if isinstance(schema_version, str): + schema_version = int(schema_version.split(".")[0]) self.schema_version = schema_version if release: logging.warning(f"overriding grype release for schema={schema_version!r} with release={release!r}") @@ -31,7 +33,7 @@ def __init__(self, schema_version: int, store_root: str, update_url: str = "", r self.release = schema.grype_version(schema_version) logging.debug(f"using grype release={self.release!r} for schema={schema_version!r}") - env = {} + env = self._env() if update_url: env["GRYPE_DB_UPDATE_URL"] = update_url self.tool = grype.Grype.install(version=self.release, path=os.path.join(store_root, self.release), env=env) @@ -43,20 +45,31 @@ def supported_schema_versions() -> list[str]: obj = json.load(fh) return obj.keys() + def _env(self, env: dict[str, str] | None = None) -> dict[str, str]: + if not env: + env = os.environ.copy() + if self.schema_version >= 6: + env.update( + { + "GRYPE_EXP_DBV6": "true", + }, + ) + return env + def update_db(self) -> None: - self.tool.run("db", "update", "-vv") + self.tool.run("db", "update", "-vv", env=self._env()) # ensure the db cache is not empty for the current schema check_db_cache_dir(self.schema_version, os.path.join(self.tool.path, "db")) def import_db(self, db_path: str) -> None: - self.tool.run("db", "import", db_path) + self.tool.run("db", "import", db_path, env=self._env()) # ensure the db cache is not empty for the current schema check_db_cache_dir(self.schema_version, os.path.join(self.tool.path, "db")) def run(self, user_input: str) -> str: - return self.tool.run("-o", "json", "-v", user_input) + return self.tool.run("-o", "json", "-v", user_input, env=self._env()) class Report: @@ -97,17 +110,17 @@ def parse(self) -> tuple[set[Package], set[Vulnerability]]: return packages, vulnerabilities -def check_db_cache_dir(schema_version: str, db_runtime_dir: str) -> None: +def check_db_cache_dir(schema_version: int, db_runtime_dir: str) -> None: """ Ensure that there is a `metadata.json` file for the cache directory, which signals that there are files related to a database pull """ # ensure the db cache is not empty for the current schema - if schema_version == "1": + if schema_version == 1: # older grype versions do not support schema-based cache directories db_metadata_file = os.path.join(db_runtime_dir, "metadata.json") else: - db_metadata_file = os.path.join(db_runtime_dir, schema_version, "metadata.json") + db_metadata_file = os.path.join(db_runtime_dir, str(schema_version), "metadata.json") if os.path.exists(db_metadata_file): # the metadata.json file exists and grype will be able to work with it diff --git a/manager/src/grype_db_manager/grypedb.py b/manager/src/grype_db_manager/grypedb.py index d5aecab2..a6a7701b 100644 --- a/manager/src/grype_db_manager/grypedb.py +++ b/manager/src/grype_db_manager/grypedb.py @@ -232,7 +232,6 @@ "wolfi:rolling", ] - def expected_namespaces(schema_version: int) -> list[str]: if schema_version <= 3: return v3_expected_namespaces @@ -249,6 +248,7 @@ class DBInfo: db_created: datetime.datetime data_created: datetime.datetime archive_path: str + latest_path: str | None = None class DBInvalidException(Exception): @@ -258,6 +258,9 @@ class DBInvalidException(Exception): class DBNamespaceException(Exception): pass +class DBProviderException(Exception): + pass + class DBManager: def __init__(self, root_dir: str): @@ -284,11 +287,31 @@ def new_session(self) -> str: return db_uuid + def list_providers(self, db_uuid: str) -> list[str]: + _, build_dir = self.db_paths(db_uuid=db_uuid) + # a sqlite3 db + db_path = os.path.join(build_dir, "vulnerability.db") + + # select distinct values in the "namespace" column of the "vulnerability" table + con = sqlite3.connect(db_path) + crsr = con.cursor() + crsr.execute("SELECT DISTINCT id FROM providers") + result = crsr.fetchall() + con.close() + + return sorted([r[0] for r in result]) + def list_namespaces(self, db_uuid: str) -> list[str]: _, build_dir = self.db_paths(db_uuid=db_uuid) # a sqlite3 db db_path = os.path.join(build_dir, "vulnerability.db") + # check if there is a metadata.json file in the build directory + metadata_path = os.path.join(build_dir, "metadata.json") + if not os.path.exists(metadata_path): + msg = f"missing metadata.json for DB {db_uuid!r}" + raise DBInvalidException(msg) + # select distinct values in the "namespace" column of the "vulnerability" table con = sqlite3.connect(db_path) crsr = con.cursor() @@ -298,6 +321,15 @@ def list_namespaces(self, db_uuid: str) -> list[str]: return sorted([r[0] for r in result]) + def validate_providers(self, db_uuid: str, expected: list[str]) -> None: + missing_providers = set(expected) - set(self.list_providers(db_uuid=db_uuid)) + + if missing_providers: + msg = f"missing providers in DB {db_uuid!r}: {sorted(missing_providers)!r}" + raise DBProviderException(msg) + + logging.info(f"minimum expected providers present in {db_uuid!r}") + def validate_namespaces(self, db_uuid: str) -> None: db_info = self.get_db_info(db_uuid) expected = expected_namespaces(db_info.schema_version) @@ -322,14 +354,8 @@ def get_db_info(self, db_uuid: str) -> DBInfo | None: with open(timestamp_path) as f: db_created_timestamp = datetime.datetime.fromisoformat(f.read()) - # read info from the metadata file in build/metadata.json - metadata_path = os.path.join(session_dir, "build", "metadata.json") - if not os.path.exists(metadata_path): - msg = f"missing metadata.json for DB {db_uuid!r}" - raise DBInvalidException(msg) - - with open(metadata_path) as f: - metadata = json.load(f) + # read info from the metadata file in build/metadata.json (v1 - v5) or build/latest.json (v6+) + metadata = db_metadata(build_dir=os.path.join(session_dir, "build")) stage_dir, _ = self.db_paths(db_uuid=db_uuid) db_pattern = os.path.join( @@ -347,13 +373,18 @@ def get_db_info(self, db_uuid: str) -> DBInfo | None: abs_archive_path = os.path.abspath(matches[0]) + db_created = db_created_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ") + if "db_created" in metadata: + db_created = metadata["db_created"] + return DBInfo( uuid=db_uuid, schema_version=metadata["version"], - db_checksum=metadata["checksum"], - db_created=db_created_timestamp.strftime("%Y-%m-%dT%H:%M:%SZ"), - data_created=metadata["built"], + db_checksum=metadata["db_checksum"], + db_created=db_created, + data_created=metadata["data_created"], archive_path=abs_archive_path, + latest_path=metadata.get("latest_path", None), ) def list_dbs(self) -> list[DBInfo]: @@ -372,6 +403,56 @@ def list_dbs(self) -> list[DBInfo]: return sorted(sessions, key=lambda x: x.db_created) + def remove_db(self, db_uuid: str) -> bool: + session_dir = os.path.join(self.db_dir, db_uuid) + if os.path.exists(session_dir): + shutil.rmtree(session_dir) + return True + return False + + +def db_metadata(build_dir: str) -> dict: + metadata_path = os.path.join(build_dir, "metadata.json") + + if os.path.exists(metadata_path): + # supports v1 - v5 + with open(metadata_path) as f: + metadata = json.load(f) + return { + "version": int(metadata["version"]), + "db_checksum": metadata["checksum"], + "data_created": metadata["built"], + } + + latest_path = os.path.join(build_dir, "latest.json") + if os.path.exists(latest_path): + # supports v6+ + with open(latest_path) as f: + + metadata = json.load(f) + # example data: + # { + # "status": "active", + # "schemaVersion": "6.0.0", + # "built": "2024-11-26T20:24:24Z", + # "path": "vulnerability-db_v6.0.0_2024-11-25T01:31:56Z_1732652663.tar.zst", + # "checksum": "sha256:1a0ec0ba815083d0ef50790c8c94307c822fd7d09632dee9c3edb6bf5a58e6ff" + # } + return { + "version": int(metadata["schemaVersion"].split(".")[0]), + "db_checksum": None, # we don't have this information + "db_created": metadata["built"], + "data_created": parse_datetime(metadata["path"].split("_")[2]), + "latest_path": os.path.abspath(latest_path), + } + + msg = "missing metadata.json and latest.json for DB" + raise DBInvalidException(msg) + + +def parse_datetime(s: str) -> datetime.datetime: + return datetime.datetime.strptime(s, "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=datetime.timezone.utc) + class GrypeDB: def __init__(self, bin_path: str, config_path: str = ""): @@ -424,7 +505,7 @@ def build_and_package(self, schema_version: int, provider_root_dir: str, root_di db_pattern = os.path.join( build_dir, - f"*_v{schema_version}_*.tar.*", + f"*_v{schema_version}[._]*.tar.*", ) matches = glob.glob(db_pattern) diff --git a/manager/src/grype_db_manager/s3utils.py b/manager/src/grype_db_manager/s3utils.py index 91d5141c..a4b7965c 100644 --- a/manager/src/grype_db_manager/s3utils.py +++ b/manager/src/grype_db_manager/s3utils.py @@ -74,13 +74,13 @@ def upload(bucket: str, key: str, contents: str, client_factory: type[ClientFact def upload_file(bucket: str, key: str, path: str, client_factory: type[ClientFactory] = ClientFactory, **kwargs) -> None: - logging.debug(f"uploading file={path} to s3 bucket={bucket} key={key}") - if "ContentType" not in kwargs: content_type = mime.from_file(path) if content_type: kwargs["ContentType"] = content_type + logging.debug(f"uploading file={path} to s3 bucket={bucket} key={key} content-type={kwargs.get('ContentType', '')}") + # boto is a little too verbose... let's tone that down just for a bit with LoggingContext(level=logging.WARNING): s3 = client_factory.new() diff --git a/manager/tests/cli/.grype-db-manager.yaml b/manager/tests/cli/.grype-db-manager.yaml index a3998433..dd80753f 100644 --- a/manager/tests/cli/.grype-db-manager.yaml +++ b/manager/tests/cli/.grype-db-manager.yaml @@ -24,6 +24,8 @@ distribution: download-url-prefix: http://localhost:4566/testbucket validate: + expected-providers: + - oracle listing: image: "docker.io/oraclelinux:6@sha256:a06327c0f1d18d753f2a60bb17864c84a850bb6dcbcf5946dd1a8123f6e75495" minimum-packages: 10 # 14 as of 2023-08-14, leaving some room for possible out-of-band changes to the data diff --git a/manager/tests/cli/.grype-db-v6.yaml b/manager/tests/cli/.grype-db-v6.yaml new file mode 100644 index 00000000..de804d28 --- /dev/null +++ b/manager/tests/cli/.grype-db-v6.yaml @@ -0,0 +1,2 @@ +exp: + dbv6: true \ No newline at end of file diff --git a/manager/tests/cli/.grype-db.yaml b/manager/tests/cli/.grype-db.yaml index e7860813..88b97a12 100644 --- a/manager/tests/cli/.grype-db.yaml +++ b/manager/tests/cli/.grype-db.yaml @@ -4,7 +4,7 @@ provider: root: cli-test-data/vunnel configs: - # let's use a single provider that we can show in isolation the setup is generally working. We don't + # let's use a limited set of providers that we can show in isolation the setup is generally working. We don't # need all providers / an entire database to test the workflow. - name: oracle kind: vunnel diff --git a/manager/tests/cli/Makefile b/manager/tests/cli/Makefile index 57c53715..f7508973 100644 --- a/manager/tests/cli/Makefile +++ b/manager/tests/cli/Makefile @@ -6,12 +6,19 @@ CYAN := $(shell tput -T linux setaf 6) RESET := $(shell tput -T linux sgr0) test: virtual-env-check ## Run CLI tests - ./run.sh + pytest . -vv -o log_cli=true -cli-test-data/vunnel/oracle: ## Prepare data for CLI tests +.PHONY: vunnel-oracle-data +vunnel-oracle-data: cli-test-data/vunnel/oracle + +cli-test-data/vunnel/oracle: ## Prepare oracle data for CLI tests mkdir -p cli-test-data/vunnel oras pull ghcr.io/anchore/grype-db/data/oracle:latest && go run ../../../cmd/grype-db cache restore --path ./grype-db-cache.tar.gz +.PHONY: install-oracle-labels +install-oracle-labels: + cp -a ../../../data/vulnerability-match-labels/labels/docker.io+oraclelinux* ./cli-test-data/yardstick/labels/ + virtual-env-check: @ if [ "${VIRTUAL_ENV}" = "" ]; then \ echo "$(ERROR)Not in a virtual environment. Try running with 'poetry run' or enter a 'poetry shell' session.$(RESET)"; \ diff --git a/manager/tests/cli/README.md b/manager/tests/cli/README.md index a511ec5f..b6f7363b 100644 --- a/manager/tests/cli/README.md +++ b/manager/tests/cli/README.md @@ -17,9 +17,8 @@ If you'd like to run a single test: ```shell # from the manager/tests/cli directory -./run.sh +pytest . -vv -o log_cli=true -k # e.g. -# ./run.sh workflow-3-update-listing.sh -# ./run.sh workflow-*db.sh +# pytest . -vv -o log_cli=true -k test_workflow_4 ``` diff --git a/manager/tests/cli/conftest.py b/manager/tests/cli/conftest.py new file mode 100644 index 00000000..95cf99e2 --- /dev/null +++ b/manager/tests/cli/conftest.py @@ -0,0 +1,205 @@ +import os +import shlex +import subprocess +import pytest +import logging +from enum import Enum +from pathlib import Path +from contextlib import contextmanager +from tempfile import TemporaryDirectory + + +class Format(Enum): + RESET = "\033[0m" + GREEN = "\033[1;32m" + RED = "\033[1;31m" + GREY = "\033[0;37m" + PURPLE = "\033[1;35m" + ORANGE_BOLD = "\033[1;33m" + ITALIC = "\033[3m" + BOLD = "\033[1m" + + def render(self, text: str) -> str: + return f"{self.value}{text}{Format.RESET.value}" + + +class CustomLogger(logging.Logger): + + def __init__(self, name, level=logging.NOTSET): + super().__init__(name, level) + self.test_function = None # Placeholder for test-specific context + + def step(self, message: str): + if self.test_function: + message = f"[{self.test_function}] {message}" + self.info(Format.GREEN.render(message)) + + +@pytest.fixture(scope="function") +def logger(request): + logging.setLoggerClass(CustomLogger) + logger = logging.getLogger(f"test_logger_{id(object())}") + logger.setLevel(logging.DEBUG) + + # set the test function name dynamically + logger.test_function = request.node.name + + return logger + + +@pytest.fixture(scope="function", autouse=True) +def change_to_cli_dir(request): + """ + Automatically change the working directory to the directory containing the test file + if it's not already set, and revert back after the test. + """ + # the directory of the current test file (which is in manage/tests/cli) + cli_dir = request.fspath.dirname + original_dir = os.getcwd() + + # bail if already in the target directory + if os.path.samefile(original_dir, cli_dir): + yield # run the test + return + + # change to the target directory + if not os.path.isdir(cli_dir): + raise FileNotFoundError(f"Expected directory '{cli_dir}' does not exist.") + + os.chdir(cli_dir) + try: + yield # run the test + finally: + os.chdir(original_dir) # revert to the original directory + + +@pytest.fixture(scope="session") +def temporary_dir() -> str: + with TemporaryDirectory() as tmp_dir: + yield tmp_dir + + +@pytest.fixture(scope="session") +def cli_env() -> dict[str, str]: + env = os.environ.copy() + env["PATH"] = f"{os.path.abspath('bin')}:{env['PATH']}" # add `bin` to PATH + return env + + +class CommandHelper: + + def __init__(self, logger: logging.Logger): + self.logger = logger + + def run(self, command: str, env=None, expect_fail=False, use_shell=True, **kwargs) -> tuple[str, str]: + self.logger.info(Format.ITALIC.render(f"{command}")) + + process = subprocess.run( + command if use_shell else shlex.split(command), + shell=use_shell, # use shell expansion if requested + capture_output=True, + text=True, + env=env, + **kwargs, + ) + + # log stdout and stderr when an error occurs + if process.returncode != 0 and not expect_fail: + self.logger.error(Format.RED.render("└── command failed unexpectedly")) + log_lines(process.stdout, " ", self.logger.error, Format.RED.render) + log_lines(process.stderr, " ", self.logger.error, Format.RED.render) + raise AssertionError("command failed unexpectedly") + elif process.returncode == 0 and expect_fail: + self.logger.error(Format.RED.render("└── expected failure, but command succeeded")) + log_lines(process.stdout, " ", self.logger.error, Format.RED.render) + log_lines(process.stderr, " ", self.logger.error, Format.RED.render) + raise AssertionError("command succeeded but was expected to fail") + + # log success + self.logger.debug(Format.GREY.render("└── command succeeded")) + return process.stdout.strip(), process.stderr.strip() + + @contextmanager + def pushd(self, path, logger): + """Temporarily change directory.""" + prev_dir = os.getcwd() + logger.info(f"pushd {path}") + os.chdir(path) + try: + yield + finally: + logger.info(f"popd # {prev_dir}") + os.chdir(prev_dir) + + +def log_lines(text: str, prefix: str, lgr, renderer=None): + for line in text.splitlines(): + msg = f"{prefix}{line}" + if renderer: + msg = renderer(msg) + lgr(msg) + + +@pytest.fixture +def command(logger) -> CommandHelper: + return CommandHelper(logger) + + +class GrypeHelper: + def __init__(self, bin_dir: str | Path | None = None): + if bin_dir: + self.bin_dir = Path(bin_dir) + else: + self.bin_dir = None + self.command = CommandHelper(logging.getLogger("grype")) + + def run(self, cmd: str, env: dict[str, str] | None = None, **kwargs) -> tuple[str, str]: + return self.command.run(f"{self.bin_dir}/grype {cmd}", env=env, **kwargs) + + def install(self, branch_or_version: str, bin_dir: str | None = None, env: dict[str, str] | None = None) -> "GrypeHelper": + """ + Install Grype either by building from a feature branch or downloading a prebuilt binary. + """ + if not bin_dir and not self.bin_dir: + raise ValueError("bin_dir is required for Grype installation") + + if bin_dir: + bin_dir = Path(bin_dir) + else: + bin_dir = self.bin_dir + + grype_binary = Path(bin_dir) / "grype" + + if branch_or_version.startswith("v"): + self.command.run( + f"curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b {bin_dir} {branch_or_version}", + use_shell=True, + env=env, + ) + + if not grype_binary.exists(): + raise RuntimeError("Grype binary installation failed via install.sh") + + else: + with TemporaryDirectory() as temp_dir: + self.command.run( + f"git clone --branch {branch_or_version} https://github.com/anchore/grype.git {temp_dir}", + check=True, + env=env, + ) + self.command.run( + f"go build -o {grype_binary} -ldflags '-X github.com/anchore/grype-db/pkg/grypedb.Version={branch_or_version}' ./cmd/grype", + cwd=temp_dir, + check=True, + env=env, + ) + + if not grype_binary.exists(): + raise RuntimeError("Grype binary build failed from feature branch") + + return GrypeHelper(bin_dir) + + +@pytest.fixture(scope="session") +def grype(): + return GrypeHelper() diff --git a/manager/tests/cli/run.sh b/manager/tests/cli/run.sh deleted file mode 100755 index 9a615f40..00000000 --- a/manager/tests/cli/run.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -# if no arguments are given then use case-*.sh, otherwise use the files given -if [ $# -eq 0 ]; then - files=$(find . -maxdepth 1 -type f -name "workflow-*.sh" | sort) -else - files=$@ -fi - -if [ -z "$files" ]; then - echo "No test files found" - exit 1 -fi - -title "Test scripts to run:" -for script in $files; do - echo " $script" -done -echo - -# run all scripts in the current directory named workflow-*.sh and exit on first failure -status=0 -for script in $files; do - bash -c "./$script" || { status=1; break; } -done - -if [ $status -eq 0 ]; then - echo -e "${SUCCESS}All tests passed${RESET}" -else - echo -e "${ERROR}Some tests failed${RESET}" -fi - -exit $status \ No newline at end of file diff --git a/manager/tests/cli/s3-mock/setup-workflow-3.py b/manager/tests/cli/s3-mock/setup-legacy-workflow-3.py similarity index 100% rename from manager/tests/cli/s3-mock/setup-workflow-3.py rename to manager/tests/cli/s3-mock/setup-legacy-workflow-3.py diff --git a/manager/tests/cli/s3-mock/setup-workflow-4.py b/manager/tests/cli/s3-mock/setup-legacy-workflow-4.py similarity index 100% rename from manager/tests/cli/s3-mock/setup-workflow-4.py rename to manager/tests/cli/s3-mock/setup-legacy-workflow-4.py diff --git a/manager/tests/cli/s3-mock/setup-workflow-1.py b/manager/tests/cli/s3-mock/setup-workflow-1.py new file mode 100644 index 00000000..dec6fdcc --- /dev/null +++ b/manager/tests/cli/s3-mock/setup-workflow-1.py @@ -0,0 +1,37 @@ +import os +import requests +import shutil + +# the credentials are not required for localstack, but the boto3 client will complain if they are not set +os.environ["AWS_ACCESS_KEY_ID"] = "test" +os.environ["AWS_SECRET_ACCESS_KEY"] = "test" + +from grype_db_manager import s3utils +from grype_db_manager.cli import config + + +def main(): + cfg = config.load() + + s3_bucket = cfg.distribution.s3_bucket + region = cfg.distribution.aws_region + + if not bucket_exists(s3_bucket): + print(f"creating bucket {s3_bucket!r}") + s3 = s3utils.ClientFactory.new() + s3.create_bucket(Bucket=s3_bucket, CreateBucketConfiguration={"LocationConstraint": region}) + + print("done!") + + +def bucket_exists(bucket: str): + try: + list(s3utils.get_matching_s3_objects(bucket=bucket, prefix="")) + return True + except Exception as e: + pass + return False + + +if __name__ == "__main__": + main() diff --git a/manager/tests/cli/test_legacy_workflows.py b/manager/tests/cli/test_legacy_workflows.py new file mode 100644 index 00000000..0157edf9 --- /dev/null +++ b/manager/tests/cli/test_legacy_workflows.py @@ -0,0 +1,231 @@ +import pytest + +from grype_db_manager.db import schema + +@pytest.mark.usefixtures("cli_env") +def test_workflow_1(cli_env, command, logger): + """ + workflow 1: create and delete a DB + """ + + logger.step("setup: clear previous data") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-oracle-data", env=cli_env) + + logger.step("case 1: create the DB") + stdout, _ = command.run("grype-db-manager -v db build -s 5", env=cli_env) + assert stdout.strip(), "Expected non-empty output" + db_id = stdout.splitlines()[-1] # assume DB ID is the last line of output + + stdout, _ = command.run("grype-db-manager db list", env=cli_env) + assert db_id in stdout, f"Expected DB ID {db_id} in output" + + logger.step("case 2: delete the DB") + command.run("grype-db-manager db clear", env=cli_env) + stdout, _ = command.run("grype-db-manager db list", env=cli_env) + assert db_id not in stdout, f"Did not expect DB ID {db_id} in output" + + +@pytest.mark.usefixtures("cli_env") +def test_workflow_2(cli_env, command, logger): + """ + workflow 2: validate DB + This test creates a database from raw vunnel data and performs validations under different conditions. + """ + + logger.step("setup: create the DB") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-oracle-data", env=cli_env) + + # create the database + stdout, _ = command.run("grype-db-manager -v db build -s 5", env=cli_env) + assert stdout.strip(), "Expected non-empty output" + db_id = stdout.splitlines()[-1] # Get the last line as the DB ID + + ### case 1: fail DB validation (too many unknowns) ### + logger.step("case 1: fail DB validation (too many unknowns)") + command.run("make clean-yardstick-labels", env=cli_env) + + # workaround for Go 1.23+ parent directory module lookup + cli_env["GOWORK"] = "off" + + stdout, _ = command.run( + f"grype-db-manager db validate {db_id} -vvv --skip-namespace-check --recapture", + env=cli_env, + expect_fail=True, + ) + assert "current indeterminate matches % is greater than 10%" in stdout + + ### case 2: fail DB validation (missing namespaces) ### + logger.step("case 2: fail DB validation (missing namespaces)") + command.run("make clean-yardstick-labels", env=cli_env) + + logger.info("installing labels") + command.run("make install-oracle-labels", env=cli_env) + + _, stderr = command.run( + f"grype-db-manager db validate {db_id} -vvv", + env=cli_env, + expect_fail=True, + ) + assert "missing namespaces in DB" in stderr + + ### case 3: pass DB validation ### + logger.step("case 3: pass DB validation") + command.run("make clean-yardstick-labels", env=cli_env) + + logger.info("installing labels") + command.run("make install-oracle-labels", env=cli_env) + + stdout, _ = command.run( + f"grype-db-manager db validate {db_id} -vvv --skip-namespace-check", + env=cli_env, + ) + assert "Quality gate passed!" in stdout + + +@pytest.mark.usefixtures("cli_env") +def test_workflow_3(cli_env, command, logger, tmp_path, grype): + """ + workflow 3: update an existing listing file + This test uses a mock S3 setup to upload databases, generate a new listing file, and validate that the updated + listing file works with grype for scanning. + """ + + logger.step("setup: prepare environment variables and directories") + + # set environment variables for aws and grype + bin_dir = tmp_path / "bin" + bin_dir.mkdir(parents=True, exist_ok=True) + + cli_env.update( + { + "AWS_ACCESS_KEY_ID": "test", + "AWS_SECRET_ACCESS_KEY": "test", + "AWS_REGION": "us-west-2", + "PATH": f"{bin_dir}:{cli_env['PATH']}", # ensure `bin` directory is in PATH + } + ) + + grype = grype.install("v0.65.0", bin_dir) + + logger.step("setup: start mock S3 and upload databases") + with command.pushd("s3-mock", logger): + command.run("docker compose up -d", env=cli_env) + command.run("python setup-legacy-workflow-3.py", env=cli_env) + + ### start of testing ### + logger.step("case 1: update a listing file based on S3 state") + + # generate a new listing file + stdout, _ = command.run("grype-db-manager listing update", env=cli_env) + assert "Validation passed" in stdout + assert "listing.json uploaded to s3://testbucket/grype/databases" in stdout + + # setup grype for DB updates and scans + cli_env.update( + { + "GRYPE_DB_UPDATE_URL": "http://localhost:4566/testbucket/grype/databases/listing.json", + "GRYPE_DB_CACHE_DIR": str(bin_dir), + } + ) + + # validate grype DB listing and scanning + stdout, _ = grype.run(f"db list", env=cli_env) + assert "http://localhost:4566" in stdout + + stdout, _ = grype.run(f"db update", env=cli_env) + + stdout, _ = grype.run(f"--platform linux/amd64 --by-cve alpine:3.2", env=cli_env) + assert "CVE-2016-2148" in stdout + + ### end of testing ### + + logger.step("teardown: stop mock S3 and clean up") + with command.pushd("s3-mock", logger): + command.run("docker compose down -t 1 -v", env=cli_env) + + +@pytest.mark.usefixtures("cli_env") +def test_workflow_4(cli_env, command, logger, tmp_path, grype): + """ + workflow 4: full publish workflow + This test builds and validates a new DB from raw vunnel data, uploads the DB to a mock S3, updates the listing file, + and uses the updated listing file in a grype scan. + """ + + logger.step("setup: prepare environment variables and directories") + + # set environment variables for aws, grype, and schema versions + bin_dir = tmp_path / "bin" + bin_dir.mkdir(parents=True, exist_ok=True) + + schema_version = "5" + cli_env.update( + { + "AWS_ACCESS_KEY_ID": "test", + "AWS_SECRET_ACCESS_KEY": "test", + "AWS_REGION": "us-west-2", + "SCHEMA_VERSION": schema_version, + "GRYPE_DB_MANAGER_VALIDATE_LISTING_OVERRIDE_GRYPE_VERSION": "v0.65.0", + "GRYPE_DB_MANAGER_VALIDATE_LISTING_OVERRIDE_DB_SCHEMA_VERSION": "5", + "PATH": f"{bin_dir}:{cli_env['PATH']}", # ensure `bin` directory is in PATH + } + ) + + grype = grype.install(schema.grype_version(schema_version), bin_dir) + + logger.step("setup: clean manager and prepare data") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-oracle-data", env=cli_env) + command.run("make install-oracle-labels", env=cli_env) + + logger.step("setup: start mock S3 and upload initial data") + with command.pushd("s3-mock", logger): + command.run("docker compose up -d", env=cli_env) + command.run("python setup-legacy-workflow-4.py", env=cli_env) + + ### start of testing ### + logger.step("case 1: create and publish a DB") + + # build, validate, and upload the database + stdout, _ = command.run( + "grype-db-manager db build-and-upload --schema-version 5 --skip-namespace-check", + env=cli_env, + ) + assert "Quality gate passed!" in stdout + assert "' uploaded to s3://testbucket/grype/databases" in stdout + + logger.step("case 2: update the listing file based on the DB uploaded") + + # update the listing file and validate + stdout, _ = command.run("grype-db-manager listing update", env=cli_env) + assert "Validation passed" in stdout + assert "listing.json uploaded to s3://testbucket/grype/databases" in stdout + + # set grype environment variables + cli_env.update( + { + "GRYPE_DB_UPDATE_URL": "http://localhost:4566/testbucket/grype/databases/listing.json", + "GRYPE_DB_CACHE_DIR": str(bin_dir), + } + ) + + # validate grype DB listing and scanning + stdout, _ = grype.run("db list", env=cli_env) + assert "http://localhost:4566" in stdout + + stdout, _ = grype.run("db update", env=cli_env) + assert "Vulnerability database updated" in stdout + + stdout, _ = grype.run( + "docker.io/oraclelinux:6@sha256:a06327c0f1d18d753f2a60bb17864c84a850bb6dcbcf5946dd1a8123f6e75495 --by-cve", + env=cli_env, + ) + assert "ELSA-2021-9591" in stdout + + ### end of testing ### + + logger.step("teardown: stop mock S3 and clean up") + with command.pushd("s3-mock", logger): + command.run("docker compose down -t 1 -v", env=cli_env) diff --git a/manager/tests/cli/test_workflows.py b/manager/tests/cli/test_workflows.py new file mode 100644 index 00000000..42a2495f --- /dev/null +++ b/manager/tests/cli/test_workflows.py @@ -0,0 +1,134 @@ +import pytest + +from grype_db_manager.db import schema +from grype_db_manager.cli import config + + +@pytest.mark.usefixtures("cli_env") +def test_workflow_1(cli_env, command, logger, tmp_path, grype): + """ + workflow 1: create, upload, and delete a DB + """ + logger.step("setup: prepare environment variables and directories") + + # set environment variables for aws and grype + bin_dir = tmp_path / "bin" + bin_dir.mkdir(parents=True, exist_ok=True) + schema_version = "6" + cli_env.update( + { + "AWS_ACCESS_KEY_ID": "test", + "AWS_SECRET_ACCESS_KEY": "test", + "AWS_REGION": "us-west-2", + "GRYPE_EXP_DBV6": "true", # while we are in development, we need to enable the experimental dbv6 feature flag + "GRYPE_DB_AUTO_UPDATE": "false", # disable auto-updating the database to avoid unexpected behavior + "GOWORK": "off", # workaround for Go 1.23+ parent directory module lookup + "PATH": f"{bin_dir}:{cli_env['PATH']}", # ensure `bin` directory is in PATH + "GOBIN": bin_dir, + "GRYPE_DB_UPDATE_URL": f"http://localhost:4566/testbucket/grype/databases/v{schema_version}/latest.json", + "GRYPE_DB_CACHE_DIR": str(bin_dir), + } + ) + + cfg = config.load() + image = cfg.validate.gates[0].images[0] + + grype = grype.install(schema.grype_version(schema_version), bin_dir) + + logger.step("setup: clear previous data") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-oracle-data", env=cli_env) + + logger.step("setup: start mock S3") + with command.pushd("s3-mock", logger): + command.run("docker compose up -d", env=cli_env) + command.run("python setup-workflow-1.py", env=cli_env) + + logger.step("case 1: create the DB") + stdout, _ = command.run(f"grype-db-manager -v db build -s {schema_version}", env=cli_env) + assert stdout.strip(), "Expected non-empty output" + db_id = stdout.splitlines()[-1] # assume DB ID is the last line of output + + stdout, _ = command.run("grype-db-manager db list", env=cli_env) + assert db_id in stdout, f"Expected DB ID {db_id} in output" + + logger.step("case 2: upload the DB") + stdout, _ = command.run(f"grype-db-manager db upload {db_id}", env=cli_env) + assert f"DB archive '{db_id}' uploaded to s3://testbucket/grype/databases/v{schema_version}" in stdout + assert f"latest.json '{db_id}' uploaded to s3://testbucket/grype/databases/v{schema_version}" in stdout + + logger.step("case 3: use the DB with grype") + stdout, _ = grype.run("db update -v", env=cli_env) + assert "Vulnerability database updated" in stdout + + stdout, _ = grype.run(f"--platform linux/amd64 {image}", env=cli_env) + assert "ELSA-2021-9314" in stdout + + logger.step("case 4: delete the DB") + command.run("grype-db-manager db clear", env=cli_env) + stdout, _ = command.run("grype-db-manager db list", env=cli_env) + assert db_id not in stdout, f"Did not expect DB ID {db_id} in output" + + ### end of testing ### + + logger.step("teardown: stop mock S3 and clean up") + with command.pushd("s3-mock", logger): + command.run("docker compose down -t 1 -v", env=cli_env) + + +@pytest.mark.usefixtures("cli_env") +def test_workflow_2(cli_env, command, logger): + """ + workflow 2: validate DB + This test creates a database from raw vunnel data and performs validations via the quality gate. + """ + + logger.step("setup: create the DB") + command.run("make clean-manager", env=cli_env) + command.run("make vunnel-oracle-data", env=cli_env) + + # create the database + stdout, _ = command.run("grype-db-manager -v db build -s 6", env=cli_env) + assert stdout.strip(), "Expected non-empty output" + db_id = stdout.splitlines()[-1] # Get the last line as the DB ID + + ### case 1: fail DB validation (too many unknowns) ### + logger.step("case 1: fail DB validation (too many unknowns)") + command.run("make clean-yardstick-labels", env=cli_env) + + # workaround for Go 1.23+ parent directory module lookup + cli_env["GOWORK"] = "off" + + stdout, _ = command.run( + f"grype-db-manager db validate {db_id} -vvv --recapture", + env=cli_env, + expect_fail=True, + ) + assert "current indeterminate matches % is greater than 10%" in stdout + + ### case 2: fail DB validation (missing providers) ### + logger.step("case 2: fail DB validation (missing providers)") + command.run("make clean-yardstick-labels", env=cli_env) + + logger.info("installing labels") + command.run("make install-oracle-labels", env=cli_env) + + _, stderr = command.run( + f"grype-db-manager db validate {db_id} -vvv", + env=cli_env, + expect_fail=True, + ) + assert "missing providers in DB" in stderr + + ### case 3: pass DB validation ### + logger.step("case 3: pass DB validation") + command.run("make clean-yardstick-labels", env=cli_env) + + logger.info("installing labels") + command.run("make install-oracle-labels", env=cli_env) + + stdout, _ = command.run( + f"grype-db-manager db validate {db_id} -vvv", + env=cli_env, + ) + assert "Quality gate passed!" in stdout diff --git a/manager/tests/cli/utils.sh b/manager/tests/cli/utils.sh deleted file mode 100755 index 312496a9..00000000 --- a/manager/tests/cli/utils.sh +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env bash -set -u - -ERROR="\033[1;31m" -SUCCESS="\033[1;32m" -STEP="\033[1;33m" -HEADER="\033[1;34m" -TITLE="\033[1;35m" -RESET="\033[0m" - -i=0 - -stdout_files=() -stderr_files=() - - - -function _run_and_capture() { - stdout_tmp_file=$(mktemp /tmp/grype-db-manager-test-stdout.XXXXXX) - stderr_tmp_file=$(mktemp /tmp/grype-db-manager-test-stderr.XXXXXX) - stdout_files+=( $stdout_tmp_file ) - stderr_files+=( $stderr_tmp_file ) - - echo -e "${STEP}$i| Running $@${RESET}" - - # we want to capture stdout and stderr to files but also print them to the screen in realtime. Using tee is the - # best resource for this, but there is an added challenge of needing the return code of the original command - # (which is now in a subshell). The "exit PIPESTATUS[0]" solves this by promoting the first command's return - # code as the subshell's return code. - ($@ | tee $stdout_tmp_file ; exit ${PIPESTATUS[0]}) 3>&1 1>&2 2>&3 | tee $stderr_tmp_file - rc=${PIPESTATUS[0]} - return $rc -} - -function run() { - _run_and_capture $@ - rc=$? - if [ $rc -eq 0 ]; then - echo -e "${SUCCESS}Success${RESET}" - else - echo -e "${ERROR}Failed: expected zero return code but got $rc${RESET}" - exit 1 - fi - ((i++)) -} - -function run_expect_fail() { - _run_and_capture $@ - rc=$? - if [ $rc -eq 0 ]; then - echo -e "${ERROR}Failed: expected non-zero return code but got $rc${RESET}" - exit 1 - else - echo -e "${SUCCESS}Success: exited with non-zero return code: $rc${RESET}" - fi - ((i++)) -} - -function last_stdout_file() { - echo ${stdout_files[${#stdout_files[@]} - 1]} -} - -function last_stderr_file() { - echo ${stderr_files[${#stderr_files[@]} - 1]} -} - -function last_stdout() { - cat $(last_stdout_file) -} - -function last_stderr() { - cat $(last_stderr_file) -} - -function assert_not_empty() { - output_file=$1 - len=$(cat $output_file | wc -l | tr -d ' ') - if [[ "$len" -gt 0 ]]; then - return - fi - echo -e "${ERROR}Unexpected length $len${RESET}" - exit 1 -} - -function assert_contains() { - output_file=$1 - target=$2 - is_in_file=$(cat $output_file | grep -c "$target") - if [ $is_in_file -eq 0 ]; then - echo -e "${ERROR}Target not found in contents '$target'${RESET}" - echo -e "${ERROR}...contents:\n$(cat $output_file)${RESET}" - exit 1 - fi -} - -function assert_does_not_contain() { - output_file=$1 - target=$1 - is_in_file=$(cat $output_file | grep -c "$target") - if [ $is_in_file -ne 0 ]; then - echo -e "${ERROR}Target found in contents '$target'${RESET}" - echo -e "${ERROR}...contents:\n$(cat output_file)${RESET}" - exit 1 - fi -} - -function header() { - echo -e "${HEADER}$@${RESET}" -} - -function title() { - echo -e "${TITLE}$@${RESET}" -} - -function end_testing() { - echo "cleaning up temp files created:" - for i in ${!stdout_files[@]}; do - echo " " ${stdout_files[$i]} - rm ${stdout_files[$i]} - done - - for i in ${!stderr_files[@]}; do - echo " " ${stderr_files[$i]} - rm ${stderr_files[$i]} - done - - echo -e "\n${SUCCESS}PASS${RESET}" -} diff --git a/manager/tests/cli/workflow-1-create-and-delete-db.sh b/manager/tests/cli/workflow-1-create-and-delete-db.sh deleted file mode 100755 index 83170857..00000000 --- a/manager/tests/cli/workflow-1-create-and-delete-db.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -title "Starting workflow 1: create and delete DB" - -header "Setup: clear previous data" - -make clean-manager -make cli-test-data/vunnel/oracle - - -### Start of testing ######################## -header "Case 1: create the DB" - -run grype-db-manager -v db build -s 5 -assert_not_empty $(last_stdout_file) -DB_ID="$(last_stdout)" -run grype-db-manager db list - -assert_contains "$(last_stdout_file)" $DB_ID - - -############################################# -header "Case 2: delete the DB" - -run grype-db-manager db clear -run grype-db-manager db list -assert_does_not_contain "$(last_stdout_file)" $DB_ID - - -### End of testing ######################## -end_testing diff --git a/manager/tests/cli/workflow-2-validate-db.sh b/manager/tests/cli/workflow-2-validate-db.sh deleted file mode 100755 index 4fe8cbc0..00000000 --- a/manager/tests/cli/workflow-2-validate-db.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -title "Starting workflow 2: validate DB" -# this test uses raw vunnel data to create a DB from scratch and see if it passes validation. There are different sets -# of labels to trigger a failing validation as well as passing validations under other non-ideal conditions. -# ultimately it is up to unit tests to fully exercise the validation logic, but this test is a good sanity check -# that the data needed for validations is wired up correctly. - -header "Setup: create the DB" -make clean-manager -make cli-test-data/vunnel/oracle -run grype-db-manager -v db build -s 5 -assert_not_empty $(last_stdout_file) -DB_ID="$(last_stdout)" - -### Start of testing ######################## -header "Case 1: fail DB validation (too many unknowns)" - -make clean-yardstick-labels - -# workaround for go1.23+ looking into parent dirs when building go modules in subdirs -export GOWORK=off - -run_expect_fail grype-db-manager db validate $DB_ID -vvv --skip-namespace-check --recapture -assert_contains $(last_stdout_file) "current indeterminate matches % is greater than 10%" - -############################################# -header "Case 2: fail DB validation (missing namespaces)" - -make clean-yardstick-labels -echo "installing labels" -# use the real labels -cp -a ../../../data/vulnerability-match-labels/labels/docker.io+oraclelinux* ./cli-test-data/yardstick/labels/ -tree ./cli-test-data/yardstick/labels/ - -run_expect_fail grype-db-manager db validate $DB_ID -vvv -assert_contains $(last_stderr_file) "missing namespaces in DB" - - -############################################# -header "Case 3: pass DB validation" - -make clean-yardstick-labels -echo "installing labels" -# use the real labels -cp -a ../../../data/vulnerability-match-labels/labels/docker.io+oraclelinux* ./cli-test-data/yardstick/labels/ -tree ./cli-test-data/yardstick/labels/ - -run grype-db-manager db validate $DB_ID -vvv --skip-namespace-check -assert_contains $(last_stdout_file) "Quality gate passed!" - - -### End of testing ######################## -end_testing diff --git a/manager/tests/cli/workflow-3-update-listing.sh b/manager/tests/cli/workflow-3-update-listing.sh deleted file mode 100755 index 31c86665..00000000 --- a/manager/tests/cli/workflow-3-update-listing.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -title "Starting workflow 3: update the listing file" -# this uses real, already-built DBs (from the production workflow) to exercise the listing file update logic. -# an S3 mock is used to upload a set of DBs and to generate a new listing file from. The uploaded listing file -# is then used by grype to download the correct DB and run a scan. - -# note: these credentials / configurations must match the ones used in s3-mock/setup.py and .grype-db-manager.yaml -export AWS_ACCESS_KEY_ID="test" -export AWS_SECRET_ACCESS_KEY="test" -export AWS_REGION="us-west-2" - -GRYPE_VERSION="v0.65.0" - -set -e - -BIN_DIR="./bin" - -rm -rf $BIN_DIR - -curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b $BIN_DIR $GRYPE_VERSION - -pushd s3-mock -docker compose up -d -python setup-workflow-3.py -popd - -set +e - -### Start of testing ######################## -header "Case 1: update a listing file based on S3 state" - -# note: this test is exercising the following commands: -# grype-db-manager listing create -# grype-db-manager listing validate - -run grype-db-manager listing update -assert_contains $(last_stdout_file) "Validation passed" -assert_contains $(last_stdout_file) "listing.json uploaded to s3://testbucket/grype/databases" - -# check if grype works with this updated listing file -export GRYPE_DB_UPDATE_URL="http://localhost:4566/testbucket/grype/databases/listing.json" -export GRYPE_DB_CACHE_DIR=$BIN_DIR - -run bin/grype db list - -assert_contains $(last_stdout_file) "http://localhost:4566" - -run bin/grype db update - -run bin/grype alpine:3.2 - -assert_contains $(last_stdout_file) "CVE-2016-2148" - - -### End of testing ######################## - -pushd s3-mock -docker compose down -t 1 -v -popd - -end_testing diff --git a/manager/tests/cli/workflow-4-full-publish.sh b/manager/tests/cli/workflow-4-full-publish.sh deleted file mode 100755 index b5fe2a5a..00000000 --- a/manager/tests/cli/workflow-4-full-publish.sh +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env bash - -. utils.sh - -title "Starting workflow 4: full publish workflow" -# this test exercises the full publish workflow, by building and validating a new DB from raw vunnel data, -# uploading the DB to an S3 mock, updating and upload the listing file, and then using the updated listing file -# in a grype scan. - -# note: these credentials / configurations must match the ones used in s3-mock/setup.py and .grype-db-manager.yaml -export AWS_ACCESS_KEY_ID="test" -export AWS_SECRET_ACCESS_KEY="test" -export AWS_REGION="us-west-2" - -GRYPE_VERSION="v0.65.0" -SCHEMA_VERSION="5" - -# there are what are used in the staging pipeline for a single DB build -export GRYPE_DB_MANAGER_VALIDATE_LISTING_OVERRIDE_GRYPE_VERSION=$GRYPE_VERSION -export GRYPE_DB_MANAGER_VALIDATE_LISTING_OVERRIDE_DB_SCHEMA_VERSION=$SCHEMA_VERSION - -set -e - -BIN_DIR="./bin" - -rm -rf $BIN_DIR - -curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sh -s -- -b $BIN_DIR $GRYPE_VERSION - -make clean-manager -make cli-test-data/vunnel/oracle - -pushd s3-mock -docker compose up -d -python setup-workflow-4.py -popd - -set +e - -### Start of testing ######################## -header "Case 1: create and publish a DB" - -# note: this test is exercising the following commands: -# grype-db-manager db build -# grype-db-manager db validate --skip-namespace-check -# grype-db-manager db upload - -run grype-db-manager db build-and-upload --schema-version $SCHEMA_VERSION --skip-namespace-check -assert_contains $(last_stdout_file) "Quality gate passed!" -assert_contains $(last_stdout_file) "' uploaded to s3://testbucket/grype/databases" - - -header "Case 2: update the listing file based on the DB uploaded" - -# note: this test is exercising the following commands: -# grype-db-manager listing create -# grype-db-manager listing validate - -run grype-db-manager listing update -assert_contains $(last_stdout_file) "Validation passed" -assert_contains $(last_stdout_file) "listing.json uploaded to s3://testbucket/grype/databases" - -# check if grype works with this updated listing file -export GRYPE_DB_UPDATE_URL="http://localhost:4566/testbucket/grype/databases/listing.json" -export GRYPE_DB_CACHE_DIR="./bin" - -run bin/grype db list - -assert_contains $(last_stdout_file) "http://localhost:4566" - -run bin/grype db update - -run bin/grype docker.io/oraclelinux:6@sha256:a06327c0f1d18d753f2a60bb17864c84a850bb6dcbcf5946dd1a8123f6e75495 - -assert_contains $(last_stdout_file) "ELSA-2021-9591" - - -### End of testing ######################## - -pushd s3-mock -docker compose down -t 1 -v -popd - -end_testing diff --git a/manager/tests/unit/cli/test_db.py b/manager/tests/unit/cli/test_db.py index 0f1d7f6d..1fc9b375 100644 --- a/manager/tests/unit/cli/test_db.py +++ b/manager/tests/unit/cli/test_db.py @@ -34,5 +34,4 @@ def test_upload_db(mocker, test_dir_path, redact_aws_credentials): bucket="testbucket", key="grype/databases/archive.tar.gz", CacheControl="public,max-age=31536000", - ContentType="application/x-tar", # this is legacy behavior, remove me ) diff --git a/manager/tests/unit/test_grypedb.py b/manager/tests/unit/test_grypedb.py index ab424626..39eec7bb 100644 --- a/manager/tests/unit/test_grypedb.py +++ b/manager/tests/unit/test_grypedb.py @@ -108,6 +108,46 @@ def test_validate_namespaces(self, tmp_path: pathlib.Path, mocker, schema_versio dbm.list_namespaces.assert_called_once() dbm.get_db_info.assert_called_once() + @pytest.mark.parametrize( + "listed_providers, schema_version, expect_error", + [ + pytest.param([], 6, True, id="empty"), + pytest.param(["nvd"], 6, True, id="too few providers"), + pytest.param(grypedb.expected_providers(6), 6, False, id="v6 matches"), + pytest.param(grypedb.expected_providers(6) + ["extra_items"], 6, False, id="v6 with extra items"), + pytest.param(list(grypedb.expected_providers(6))[:-5], 6, True, id="v6 missing items"), + ], + ) + def test_validate_providers(self, tmp_path: pathlib.Path, mocker, schema_version, listed_providers, expect_error): + assert len(grypedb.expected_providers(schema_version)) > 0 + + dbm = grypedb.DBManager(root_dir=tmp_path.as_posix()) + session_id = dbm.new_session() + + # patch list_namespaces to return a mock + dbm.list_providers = mocker.MagicMock() + dbm.list_providers.return_value = listed_providers + + # patch db_info to return a mock + dbm.get_db_info = mocker.MagicMock() + dbm.get_db_info.return_value = grypedb.DBInfo( + uuid="", + schema_version=schema_version, + db_checksum="", + db_created="", + data_created="", + archive_path="", + ) + + if expect_error: + with pytest.raises(grypedb.DBProviderException): + dbm.validate_providers(session_id) + else: + dbm.validate_providers(session_id) + + dbm.list_providers.assert_called_once() + dbm.get_db_info.assert_called_once() + class TestGrypeDB: def test_list_installed(self, top_level_fixture): diff --git a/pkg/process/v6/transformers/os/transform.go b/pkg/process/v6/transformers/os/transform.go index aba7a131..aad10645 100644 --- a/pkg/process/v6/transformers/os/transform.go +++ b/pkg/process/v6/transformers/os/transform.go @@ -199,7 +199,7 @@ func groupFixedIns(vuln unmarshal.OSVulnerability) map[groupIndex][]unmarshal.OS func getPackageType(osName string) string { switch osName { - case "redhat", "amazon", "oracle", "sles", "mariner", "azurelinux": + case "redhat", "amazonlinux", "oraclelinux", "sles", "mariner", "azurelinux": return string(pkg.RpmPkg) case "ubuntu", "debian": return string(pkg.DebPkg) @@ -247,15 +247,7 @@ func normalizeOsName(id string) string { return id } - distroName := d.String() - - switch d { - case distro.OracleLinux: - distroName = "oracle" - case distro.AmazonLinux: - distroName = "amazon" - } - return distroName + return d.String() } func getOperatingSystem(osName, osID, osVersion string) *grypeDB.OperatingSystem { diff --git a/pkg/process/v6/transformers/os/transform_test.go b/pkg/process/v6/transformers/os/transform_test.go index a6625dab..1b1b6575 100644 --- a/pkg/process/v6/transformers/os/transform_test.go +++ b/pkg/process/v6/transformers/os/transform_test.go @@ -52,7 +52,7 @@ func TestTransform(t *testing.T) { } amazonOS := &grypeDB.OperatingSystem{ - Name: "amazon", + Name: "amazonlinux", ReleaseID: "amzn", MajorVersion: "2", } @@ -76,7 +76,7 @@ func TestTransform(t *testing.T) { MinorVersion: "0", // TODO: is this right? } ol8OS := &grypeDB.OperatingSystem{ - Name: "oracle", + Name: "oraclelinux", ReleaseID: "ol", MajorVersion: "8", } @@ -1221,8 +1221,8 @@ func TestGetOSInfo(t *testing.T) { { name: "oracle linux", group: "ol:8", - expectedOS: "oracle", // normalize name - expectedID: "ol", // keep original ID + expectedOS: "oraclelinux", // normalize name + expectedID: "ol", // keep original ID expectedV: "8", }, } diff --git a/poetry.lock b/poetry.lock index 4a1f45f4..dc892827 100644 --- a/poetry.lock +++ b/poetry.lock @@ -428,19 +428,20 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "dataclass-wizard" -version = "0.22.3" -description = "Marshal dataclasses to/from JSON. Use field properties with initial values. Construct a dataclass schema with JSON input." +version = "0.30.1" +description = "Effortlessly marshal dataclasses to/from JSON. Leverage field properties with default values. Generate dataclass schemas from JSON input." optional = false python-versions = "*" files = [ - {file = "dataclass-wizard-0.22.3.tar.gz", hash = "sha256:4c46591782265058f1148cfd1f54a3a91221e63986fdd04c9d59f4ced61f4424"}, - {file = "dataclass_wizard-0.22.3-py2.py3-none-any.whl", hash = "sha256:63751203e54b9b9349212cc185331da73c1adc99c51312575eb73bb5c00c1962"}, + {file = "dataclass-wizard-0.30.1.tar.gz", hash = "sha256:f382daab1c9aca258fe47fed089a495b69da736bde2dc7ff61c5440de2233f38"}, + {file = "dataclass_wizard-0.30.1-py2.py3-none-any.whl", hash = "sha256:bf4af012d4fc04511efcc2be52024589faec150795bec5517b56d4ab131e2d1d"}, ] [package.extras] -dev = ["Sphinx (==5.3.0)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.12)", "dataclasses-json (==0.5.6)", "flake8 (>=3)", "jsons (==1.6.1)", "pip (>=21.3.1)", "pytest (==7.0.1)", "pytest-cov (==3.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==3.0.1)", "sphinx-issues (==4.0.0)", "tox (==3.24.5)", "twine (==3.8.0)", "watchdog[watchmedo] (==2.1.6)", "wheel (==0.37.1)", "wheel (==0.42.0)"] +dev = ["Sphinx (==7.4.7)", "Sphinx (==8.1.3)", "bump2version (==1.0.1)", "coverage (>=6.2)", "dataclass-factory (==2.16)", "dataclass-wizard[toml]", "dataclasses-json (==0.6.7)", "flake8 (>=3)", "jsons (==1.6.3)", "pip (>=21.3.1)", "pytest (==8.3.3)", "pytest-cov (==6.0.0)", "pytest-mock (>=3.6.1)", "pytimeparse (==1.1.8)", "sphinx-issues (==5.0.0)", "tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)", "tox (==4.23.2)", "twine (==5.1.1)", "watchdog[watchmedo] (==6.0.0)", "wheel (==0.45.1)"] timedelta = ["pytimeparse (>=1.1.7)"] -yaml = ["PyYAML (>=5.3)"] +toml = ["tomli (>=2,<3)", "tomli (>=2,<3)", "tomli-w (>=1,<2)"] +yaml = ["PyYAML (>=6,<7)"] [[package]] name = "dataclasses-json" @@ -849,13 +850,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prompt-toolkit" -version = "3.0.47" +version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, ] [package.dependencies] @@ -1383,6 +1384,138 @@ files = [ {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, ] +[[package]] +name = "xxhash" +version = "3.5.0" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +files = [ + {file = "xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ece616532c499ee9afbb83078b1b952beffef121d989841f7f4b3dc5ac0fd212"}, + {file = "xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3171f693dbc2cef6477054a665dc255d996646b4023fe56cb4db80e26f4cc520"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5d3e570ef46adaf93fc81b44aca6002b5a4d8ca11bd0580c07eac537f36680"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cb29a034301e2982df8b1fe6328a84f4b676106a13e9135a0d7e0c3e9f806da"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0d307d27099bb0cbeea7260eb39ed4fdb99c5542e21e94bb6fd29e49c57a23"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0342aafd421795d740e514bc9858ebddfc705a75a8c5046ac56d85fe97bf196"}, + {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dbbd9892c5ebffeca1ed620cf0ade13eb55a0d8c84e0751a6653adc6ac40d0c"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4cc2d67fdb4d057730c75a64c5923abfa17775ae234a71b0200346bfb0a7f482"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ec28adb204b759306a3d64358a5e5c07d7b1dd0ccbce04aa76cb9377b7b70296"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1328f6d8cca2b86acb14104e381225a3d7b42c92c4b86ceae814e5c400dbb415"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8d47ebd9f5d9607fd039c1fbf4994e3b071ea23eff42f4ecef246ab2b7334198"}, + {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b96d559e0fcddd3343c510a0fe2b127fbff16bf346dd76280b82292567523442"}, + {file = "xxhash-3.5.0-cp310-cp310-win32.whl", hash = "sha256:61c722ed8d49ac9bc26c7071eeaa1f6ff24053d553146d5df031802deffd03da"}, + {file = "xxhash-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:9bed5144c6923cc902cd14bb8963f2d5e034def4486ab0bbe1f58f03f042f9a9"}, + {file = "xxhash-3.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:893074d651cf25c1cc14e3bea4fceefd67f2921b1bb8e40fcfeba56820de80c6"}, + {file = "xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02c2e816896dc6f85922ced60097bcf6f008dedfc5073dcba32f9c8dd786f3c1"}, + {file = "xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6027dcd885e21581e46d3c7f682cfb2b870942feeed58a21c29583512c3f09f8"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1308fa542bbdbf2fa85e9e66b1077eea3a88bef38ee8a06270b4298a7a62a166"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c28b2fdcee797e1c1961cd3bcd3d545cab22ad202c846235197935e1df2f8ef7"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:924361811732ddad75ff23e90efd9ccfda4f664132feecb90895bade6a1b4623"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89997aa1c4b6a5b1e5b588979d1da048a3c6f15e55c11d117a56b75c84531f5a"}, + {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c4f4e8c59837de103344eb1c8a3851f670309eb5c361f746805c5471b8c88"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbd2ecfbfee70bc1a4acb7461fa6af7748ec2ab08ac0fa298f281c51518f982c"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25b5a51dc3dfb20a10833c8eee25903fd2e14059e9afcd329c9da20609a307b2"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a8fb786fb754ef6ff8c120cb96629fb518f8eb5a61a16aac3a979a9dbd40a084"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a905ad00ad1e1c34fe4e9d7c1d949ab09c6fa90c919860c1534ff479f40fd12d"}, + {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:963be41bcd49f53af6d795f65c0da9b4cc518c0dd9c47145c98f61cb464f4839"}, + {file = "xxhash-3.5.0-cp311-cp311-win32.whl", hash = "sha256:109b436096d0a2dd039c355fa3414160ec4d843dfecc64a14077332a00aeb7da"}, + {file = "xxhash-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:b702f806693201ad6c0a05ddbbe4c8f359626d0b3305f766077d51388a6bac58"}, + {file = "xxhash-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:c4dcb4120d0cc3cc448624147dba64e9021b278c63e34a38789b688fd0da9bf3"}, + {file = "xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00"}, + {file = "xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6"}, + {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab"}, + {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e"}, + {file = "xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8"}, + {file = "xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e"}, + {file = "xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2"}, + {file = "xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6"}, + {file = "xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb"}, + {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7"}, + {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c"}, + {file = "xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637"}, + {file = "xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43"}, + {file = "xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b"}, + {file = "xxhash-3.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6e5f70f6dca1d3b09bccb7daf4e087075ff776e3da9ac870f86ca316736bb4aa"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e76e83efc7b443052dd1e585a76201e40b3411fe3da7af4fe434ec51b2f163b"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33eac61d0796ca0591f94548dcfe37bb193671e0c9bcf065789b5792f2eda644"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ec70a89be933ea49222fafc3999987d7899fc676f688dd12252509434636622"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86b8e7f703ec6ff4f351cfdb9f428955859537125904aa8c963604f2e9d3e7"}, + {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0adfbd36003d9f86c8c97110039f7539b379f28656a04097e7434d3eaf9aa131"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:63107013578c8a730419adc05608756c3fa640bdc6abe806c3123a49fb829f43"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:683b94dbd1ca67557850b86423318a2e323511648f9f3f7b1840408a02b9a48c"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5d2a01dcce81789cf4b12d478b5464632204f4c834dc2d064902ee27d2d1f0ee"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:a9d360a792cbcce2fe7b66b8d51274ec297c53cbc423401480e53b26161a290d"}, + {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f0b48edbebea1b7421a9c687c304f7b44d0677c46498a046079d445454504737"}, + {file = "xxhash-3.5.0-cp37-cp37m-win32.whl", hash = "sha256:7ccb800c9418e438b44b060a32adeb8393764da7441eb52aa2aa195448935306"}, + {file = "xxhash-3.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c3bc7bf8cb8806f8d1c9bf149c18708cb1c406520097d6b0a73977460ea03602"}, + {file = "xxhash-3.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74752ecaa544657d88b1d1c94ae68031e364a4d47005a90288f3bab3da3c970f"}, + {file = "xxhash-3.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dee1316133c9b463aa81aca676bc506d3f80d8f65aeb0bba2b78d0b30c51d7bd"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:602d339548d35a8579c6b013339fb34aee2df9b4e105f985443d2860e4d7ffaa"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:695735deeddfb35da1677dbc16a083445360e37ff46d8ac5c6fcd64917ff9ade"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1030a39ba01b0c519b1a82f80e8802630d16ab95dc3f2b2386a0b5c8ed5cbb10"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5bc08f33c4966f4eb6590d6ff3ceae76151ad744576b5fc6c4ba8edd459fdec"}, + {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160e0c19ee500482ddfb5d5570a0415f565d8ae2b3fd69c5dcfce8a58107b1c3"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f1abffa122452481a61c3551ab3c89d72238e279e517705b8b03847b1d93d738"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d5e9db7ef3ecbfc0b4733579cea45713a76852b002cf605420b12ef3ef1ec148"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:23241ff6423378a731d84864bf923a41649dc67b144debd1077f02e6249a0d54"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:82b833d5563fefd6fceafb1aed2f3f3ebe19f84760fdd289f8b926731c2e6e91"}, + {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a80ad0ffd78bef9509eee27b4a29e56f5414b87fb01a888353e3d5bda7038bd"}, + {file = "xxhash-3.5.0-cp38-cp38-win32.whl", hash = "sha256:50ac2184ffb1b999e11e27c7e3e70cc1139047e7ebc1aa95ed12f4269abe98d4"}, + {file = "xxhash-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:392f52ebbb932db566973693de48f15ce787cabd15cf6334e855ed22ea0be5b3"}, + {file = "xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc8cdd7f33d57f0468b0614ae634cc38ab9202c6957a60e31d285a71ebe0301"}, + {file = "xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0c48b6300cd0b0106bf49169c3e0536408dfbeb1ccb53180068a18b03c662ab"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1a92cfbaa0a1253e339ccec42dbe6db262615e52df591b68726ab10338003f"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33513d6cc3ed3b559134fb307aae9bdd94d7e7c02907b37896a6c45ff9ce51bd"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eefc37f6138f522e771ac6db71a6d4838ec7933939676f3753eafd7d3f4c40bc"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a606c8070ada8aa2a88e181773fa1ef17ba65ce5dd168b9d08038e2a61b33754"}, + {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42eca420c8fa072cc1dd62597635d140e78e384a79bb4944f825fbef8bfeeef6"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:604253b2143e13218ff1ef0b59ce67f18b8bd1c4205d2ffda22b09b426386898"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6e93a5ad22f434d7876665444a97e713a8f60b5b1a3521e8df11b98309bff833"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7a46e1d6d2817ba8024de44c4fd79913a90e5f7265434cef97026215b7d30df6"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:30eb2efe6503c379b7ab99c81ba4a779748e3830241f032ab46bd182bf5873af"}, + {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c8aa771ff2c13dd9cda8166d685d7333d389fae30a4d2bb39d63ab5775de8606"}, + {file = "xxhash-3.5.0-cp39-cp39-win32.whl", hash = "sha256:5ed9ebc46f24cf91034544b26b131241b699edbfc99ec5e7f8f3d02d6eb7fba4"}, + {file = "xxhash-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:220f3f896c6b8d0316f63f16c077d52c412619e475f9372333474ee15133a558"}, + {file = "xxhash-3.5.0-cp39-cp39-win_arm64.whl", hash = "sha256:a7b1d8315d9b5e9f89eb2933b73afae6ec9597a258d52190944437158b49d38e"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2014c5b3ff15e64feecb6b713af12093f75b7926049e26a580e94dcad3c73d8c"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fab81ef75003eda96239a23eda4e4543cedc22e34c373edcaf744e721a163986"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b"}, + {file = "xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2b4154c00eb22e4d543f472cfca430e7962a0f1d0f3778334f2e08a7ba59363c"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d30bbc1644f726b825b3278764240f449d75f1a8bdda892e641d4a688b1494ae"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa0b72f2423e2aa53077e54a61c28e181d23effeaafd73fcb9c494e60930c8e"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13de2b76c1835399b2e419a296d5b38dc4855385d9e96916299170085ef72f57"}, + {file = "xxhash-3.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0691bfcc4f9c656bcb96cc5db94b4d75980b9d5589f2e59de790091028580837"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:297595fe6138d4da2c8ce9e72a04d73e58725bb60f3a19048bc96ab2ff31c692"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1276d369452040cbb943300dc8abeedab14245ea44056a2943183822513a18"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2061188a1ba352fc699c82bff722f4baacb4b4b8b2f0c745d2001e56d0dfb514"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c384c434021e4f62b8d9ba0bc9467e14d394893077e2c66d826243025e1f81"}, + {file = "xxhash-3.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e6a4dd644d72ab316b580a1c120b375890e4c52ec392d4aef3c63361ec4d77d1"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:531af8845aaadcadf951b7e0c1345c6b9c68a990eeb74ff9acd8501a0ad6a1c9"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce379bcaa9fcc00f19affa7773084dd09f5b59947b3fb47a1ceb0179f91aaa1"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd1b2281d01723f076df3c8188f43f2472248a6b63118b036e641243656b1b0f"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c770750cc80e8694492244bca7251385188bc5597b6a39d98a9f30e8da984e0"}, + {file = "xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b150b8467852e1bd844387459aa6fbe11d7f38b56e901f9f3b3e6aba0d660240"}, + {file = "xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f"}, +] + [[package]] name = "yardstick" version = "0.0.0" @@ -1395,24 +1528,26 @@ develop = false [package.dependencies] click = "^8" Colr = "^0.9.1" -dataclass-wizard = "^0.22.3" +dataclass-wizard = "^0.30.1" dataclasses-json = "^0.6.7" GitPython = "^3.1.43" importlib-metadata = ">=7.0.1,<9.0.0" mergedeep = "^1.3.4" omitempty = "^0.1.1" -prompt-toolkit = "^3.0.47" +prompt-toolkit = "^3.0.48" Pygments = "^2.18.0" PyYAML = ">= 6.0.0, < 7.0" requests = "^2.32.3" rfc3339 = "^6.2" tabulate = "^0.9.0" +xxhash = "^3.5.0" +zstandard = "^0.23.0" [package.source] type = "git" url = "https://github.com/anchore/yardstick" -reference = "v0.10.0" -resolved_reference = "fe6ae0f3a4399aeae08abc60e98670f6764614c9" +reference = "v0.11.0" +resolved_reference = "a992d49d38919fdda5d7080ce49792f39a4aac74" [[package]] name = "zipp" @@ -1431,57 +1566,108 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [[package]] name = "zstandard" -version = "0.22.0" +version = "0.23.0" description = "Zstandard bindings for Python" optional = false python-versions = ">=3.8" files = [ - {file = "zstandard-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:275df437ab03f8c033b8a2c181e51716c32d831082d93ce48002a5227ec93019"}, - {file = "zstandard-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ac9957bc6d2403c4772c890916bf181b2653640da98f32e04b96e4d6fb3252a"}, - {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe3390c538f12437b859d815040763abc728955a52ca6ff9c5d4ac707c4ad98e"}, - {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1958100b8a1cc3f27fa21071a55cb2ed32e9e5df4c3c6e661c193437f171cba2"}, - {file = "zstandard-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93e1856c8313bc688d5df069e106a4bc962eef3d13372020cc6e3ebf5e045202"}, - {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1a90ba9a4c9c884bb876a14be2b1d216609385efb180393df40e5172e7ecf356"}, - {file = "zstandard-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3db41c5e49ef73641d5111554e1d1d3af106410a6c1fb52cf68912ba7a343a0d"}, - {file = "zstandard-0.22.0-cp310-cp310-win32.whl", hash = "sha256:d8593f8464fb64d58e8cb0b905b272d40184eac9a18d83cf8c10749c3eafcd7e"}, - {file = "zstandard-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a4b358947a65b94e2501ce3e078bbc929b039ede4679ddb0460829b12f7375"}, - {file = "zstandard-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:589402548251056878d2e7c8859286eb91bd841af117dbe4ab000e6450987e08"}, - {file = "zstandard-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a97079b955b00b732c6f280d5023e0eefe359045e8b83b08cf0333af9ec78f26"}, - {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:445b47bc32de69d990ad0f34da0e20f535914623d1e506e74d6bc5c9dc40bb09"}, - {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33591d59f4956c9812f8063eff2e2c0065bc02050837f152574069f5f9f17775"}, - {file = "zstandard-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:888196c9c8893a1e8ff5e89b8f894e7f4f0e64a5af4d8f3c410f0319128bb2f8"}, - {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:53866a9d8ab363271c9e80c7c2e9441814961d47f88c9bc3b248142c32141d94"}, - {file = "zstandard-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ac59d5d6910b220141c1737b79d4a5aa9e57466e7469a012ed42ce2d3995e88"}, - {file = "zstandard-0.22.0-cp311-cp311-win32.whl", hash = "sha256:2b11ea433db22e720758cba584c9d661077121fcf60ab43351950ded20283440"}, - {file = "zstandard-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:11f0d1aab9516a497137b41e3d3ed4bbf7b2ee2abc79e5c8b010ad286d7464bd"}, - {file = "zstandard-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6c25b8eb733d4e741246151d895dd0308137532737f337411160ff69ca24f93a"}, - {file = "zstandard-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f9b2cde1cd1b2a10246dbc143ba49d942d14fb3d2b4bccf4618d475c65464912"}, - {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a88b7df61a292603e7cd662d92565d915796b094ffb3d206579aaebac6b85d5f"}, - {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466e6ad8caefb589ed281c076deb6f0cd330e8bc13c5035854ffb9c2014b118c"}, - {file = "zstandard-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1d67d0d53d2a138f9e29d8acdabe11310c185e36f0a848efa104d4e40b808e4"}, - {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:39b2853efc9403927f9065cc48c9980649462acbdf81cd4f0cb773af2fd734bc"}, - {file = "zstandard-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8a1b2effa96a5f019e72874969394edd393e2fbd6414a8208fea363a22803b45"}, - {file = "zstandard-0.22.0-cp312-cp312-win32.whl", hash = "sha256:88c5b4b47a8a138338a07fc94e2ba3b1535f69247670abfe422de4e0b344aae2"}, - {file = "zstandard-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:de20a212ef3d00d609d0b22eb7cc798d5a69035e81839f549b538eff4105d01c"}, - {file = "zstandard-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d75f693bb4e92c335e0645e8845e553cd09dc91616412d1d4650da835b5449df"}, - {file = "zstandard-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:36a47636c3de227cd765e25a21dc5dace00539b82ddd99ee36abae38178eff9e"}, - {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68953dc84b244b053c0d5f137a21ae8287ecf51b20872eccf8eaac0302d3e3b0"}, - {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2612e9bb4977381184bb2463150336d0f7e014d6bb5d4a370f9a372d21916f69"}, - {file = "zstandard-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23d2b3c2b8e7e5a6cb7922f7c27d73a9a615f0a5ab5d0e03dd533c477de23004"}, - {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d43501f5f31e22baf822720d82b5547f8a08f5386a883b32584a185675c8fbf"}, - {file = "zstandard-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a493d470183ee620a3df1e6e55b3e4de8143c0ba1b16f3ded83208ea8ddfd91d"}, - {file = "zstandard-0.22.0-cp38-cp38-win32.whl", hash = "sha256:7034d381789f45576ec3f1fa0e15d741828146439228dc3f7c59856c5bcd3292"}, - {file = "zstandard-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:d8fff0f0c1d8bc5d866762ae95bd99d53282337af1be9dc0d88506b340e74b73"}, - {file = "zstandard-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fdd53b806786bd6112d97c1f1e7841e5e4daa06810ab4b284026a1a0e484c0b"}, - {file = "zstandard-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:73a1d6bd01961e9fd447162e137ed949c01bdb830dfca487c4a14e9742dccc93"}, - {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9501f36fac6b875c124243a379267d879262480bf85b1dbda61f5ad4d01b75a3"}, - {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f260e4c7294ef275744210a4010f116048e0c95857befb7462e033f09442fe"}, - {file = "zstandard-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959665072bd60f45c5b6b5d711f15bdefc9849dd5da9fb6c873e35f5d34d8cfb"}, - {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d22fdef58976457c65e2796e6730a3ea4a254f3ba83777ecfc8592ff8d77d303"}, - {file = "zstandard-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a7ccf5825fd71d4542c8ab28d4d482aace885f5ebe4b40faaa290eed8e095a4c"}, - {file = "zstandard-0.22.0-cp39-cp39-win32.whl", hash = "sha256:f058a77ef0ece4e210bb0450e68408d4223f728b109764676e1a13537d056bb0"}, - {file = "zstandard-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:e9e9d4e2e336c529d4c435baad846a181e39a982f823f7e4495ec0b0ec8538d2"}, - {file = "zstandard-0.22.0.tar.gz", hash = "sha256:8226a33c542bcb54cd6bd0a366067b610b41713b64c9abec1bc4533d69f51e70"}, + {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, + {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"}, + {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"}, + {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"}, + {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"}, + {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"}, + {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"}, + {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"}, + {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"}, + {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"}, + {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"}, + {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"}, + {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"}, + {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"}, + {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"}, ] [package.dependencies] @@ -1493,4 +1679,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.11,<=3.13" -content-hash = "618b58c3adf4fe5edba3e40b5e77ca15a472d53e82f46d059bc6aa4b4c0d06f7" +content-hash = "13480a00995cb97f65ab331df451136f8e7738e498685330e0f87c0169842e2e" diff --git a/pyproject.toml b/pyproject.toml index 1090c98d..ebd6b313 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,14 +29,14 @@ boto3 = ">=1.28.16, <2" click = ">=8.1.6, <9" dataclass-wizard = ">=0.22.2, <1" iso8601 = ">=2.0.0, <3" -requests = ">=2.31.0, <3" +requests = "^2.32.3" semver = ">=3.0.1, <4" tabulate = ">=0.9.0, <1" zstandard = ">=0.21.0, <1" colorlog = "^6.7.0" mergedeep = "^1.3.4" pyyaml = ">=5.0.1, <7" -yardstick = {git = "https://github.com/anchore/yardstick", rev = "v0.10.0"} +yardstick = {git = "https://github.com/anchore/yardstick", rev = "v0.11.0"} # yardstick = {path = "../yardstick", develop = true} # vunnel = {path = "../vunnel", develop = true} colr = "^0.9.1" @@ -67,6 +67,8 @@ testpaths = ["manager/tests"] cache_dir = ".cache/pytest" pythonpath = ["manager/src"] norecursedirs = ["data"] +log_format = "%(levelname)-6s %(message)s" +log_cli_level = "INFO" [tool.black] line-length = 130