From 2796082096f6dcaddb668eaf89b17ae78a135f5a Mon Sep 17 00:00:00 2001 From: Alex Goodman Date: Fri, 19 Jan 2024 17:55:12 -0500 Subject: [PATCH] [wip] migrate DB definitions from grype Signed-off-by: Alex Goodman --- cmd/grype-db/application/build_info.go | 2 +- go.mod | 18 +- go.sum | 33 +- internal/file/copy.go | 68 + internal/file/exists.go | 9 +- internal/file/getter.go | 130 +- internal/file/getter_test.go | 261 ++++ internal/file/hasher.go | 38 + internal/file/hasher_test.go | 85 + internal/file/tar.go | 106 ++ internal/file/tar_test.go | 138 ++ internal/stringutil/helpers.go | 36 + internal/stringutil/helpers_test.go | 156 ++ pkg/db/curation/curator.go | 408 +++++ pkg/db/curation/curator_test.go | 366 +++++ pkg/db/curation/listing.go | 90 ++ pkg/db/curation/listing_entry.go | 97 ++ pkg/db/curation/listing_test.go | 153 ++ pkg/db/curation/metadata.go | 137 ++ pkg/db/curation/metadata_test.go | 110 ++ pkg/db/curation/status.go | 11 + .../bad-checksum/vulnerability.db | 1 + .../good-checksum/vulnerability.db | 1 + .../test-fixtures/listing-sorted.json | 18 + .../test-fixtures/listing-unsorted.json | 18 + pkg/db/curation/test-fixtures/tls/.gitignore | 5 + pkg/db/curation/test-fixtures/tls/Makefile | 45 + pkg/db/curation/test-fixtures/tls/README.md | 24 + .../tls/generate-x509-cert-pair.sh | 16 + pkg/db/curation/test-fixtures/tls/listing.py | 27 + pkg/db/curation/test-fixtures/tls/serve.py | 25 + pkg/db/internal/gormadapter/logger.go | 37 + pkg/db/internal/gormadapter/open.go | 66 + pkg/db/internal/sqlite/nullable_types.go | 73 + pkg/db/internal/sqlite/nullable_types_test.go | 109 ++ pkg/db/v1/id.go | 28 + pkg/db/v1/namespace.go | 30 + pkg/db/v1/namespace_test.go | 49 + pkg/db/v1/schema_version.go | 3 + pkg/db/v1/store.go | 19 + pkg/db/v1/store/model/id.go | 40 + pkg/db/v1/store/model/vulnerability.go | 86 + .../v1/store/model/vulnerability_metadata.go | 104 ++ pkg/db/v1/store/store.go | 211 +++ pkg/db/v1/store/store_test.go | 494 ++++++ pkg/db/v1/vulnerability.go | 31 + pkg/db/v1/vulnerability_metadata.go | 33 + pkg/db/v2/id.go | 28 + pkg/db/v2/namespace.go | 30 + pkg/db/v2/namespace_test.go | 49 + pkg/db/v2/schema_version.go | 3 + pkg/db/v2/store.go | 19 + pkg/db/v2/store/model/id.go | 40 + pkg/db/v2/store/model/vulnerability.go | 86 + .../v2/store/model/vulnerability_metadata.go | 104 ++ pkg/db/v2/store/store.go | 210 +++ pkg/db/v2/store/store_test.go | 494 ++++++ pkg/db/v2/vulnerability.go | 14 + pkg/db/v2/vulnerability_metadata.go | 20 + pkg/db/v2/vulnerability_metadata_store.go | 14 + pkg/db/v2/vulnerability_store.go | 18 + pkg/db/v3/advisory.go | 7 + pkg/db/v3/diff.go | 16 + pkg/db/v3/fix.go | 16 + pkg/db/v3/id.go | 28 + pkg/db/v3/namespace.go | 138 ++ pkg/db/v3/namespace_test.go | 523 +++++++ pkg/db/v3/schema_version.go | 3 + pkg/db/v3/store.go | 24 + pkg/db/v3/store/diff.go | 305 ++++ pkg/db/v3/store/diff_test.go | 236 +++ pkg/db/v3/store/model/id.go | 40 + pkg/db/v3/store/model/vulnerability.go | 115 ++ .../v3/store/model/vulnerability_metadata.go | 87 ++ pkg/db/v3/store/store.go | 308 ++++ pkg/db/v3/store/store_test.go | 1174 ++++++++++++++ pkg/db/v3/vulnerability.go | 96 ++ pkg/db/v3/vulnerability_metadata.go | 76 + pkg/db/v3/vulnerability_metadata_store.go | 17 + pkg/db/v3/vulnerability_store.go | 21 + pkg/db/v4/advisory.go | 7 + pkg/db/v4/diff.go | 16 + pkg/db/v4/fix.go | 16 + pkg/db/v4/id.go | 28 + pkg/db/v4/namespace/cpe/namespace.go | 54 + pkg/db/v4/namespace/cpe/namespace_test.go | 51 + pkg/db/v4/namespace/distro/namespace.go | 67 + pkg/db/v4/namespace/distro/namespace_test.go | 85 + pkg/db/v4/namespace/from_string.go | 34 + pkg/db/v4/namespace/from_string_test.go | 50 + pkg/db/v4/namespace/index.go | 133 ++ pkg/db/v4/namespace/index_test.go | 283 ++++ pkg/db/v4/namespace/language/namespace.go | 78 + .../v4/namespace/language/namespace_test.go | 73 + pkg/db/v4/namespace/namespace.go | 11 + pkg/db/v4/pkg/resolver/from_language.go | 23 + pkg/db/v4/pkg/resolver/from_language_test.go | 70 + pkg/db/v4/pkg/resolver/java/resolver.go | 47 + pkg/db/v4/pkg/resolver/java/resolver_test.go | 175 +++ pkg/db/v4/pkg/resolver/python/resolver.go | 29 + .../v4/pkg/resolver/python/resolver_test.go | 42 + pkg/db/v4/pkg/resolver/resolver.go | 10 + pkg/db/v4/pkg/resolver/stock/resolver.go | 18 + pkg/db/v4/pkg/resolver/stock/resolver_test.go | 42 + pkg/db/v4/schema_version.go | 3 + pkg/db/v4/store.go | 30 + pkg/db/v4/store/diff.go | 305 ++++ pkg/db/v4/store/diff_test.go | 231 +++ pkg/db/v4/store/model/id.go | 40 + pkg/db/v4/store/model/vulnerability.go | 93 ++ .../model/vulnerability_match_exclusion.go | 72 + .../vulnerability_match_exclusion_test.go | 201 +++ .../v4/store/model/vulnerability_metadata.go | 74 + pkg/db/v4/store/store.go | 362 +++++ pkg/db/v4/store/store_test.go | 1377 +++++++++++++++++ pkg/db/v4/vulnerability.go | 96 ++ pkg/db/v4/vulnerability_match_exclusion.go | 130 ++ .../v4/vulnerability_match_exclusion_store.go | 14 + pkg/db/v4/vulnerability_metadata.go | 76 + pkg/db/v4/vulnerability_metadata_store.go | 15 + pkg/db/v4/vulnerability_store.go | 21 + pkg/db/v5/advisory.go | 7 + pkg/db/v5/fix.go | 16 + pkg/db/v5/id.go | 28 + pkg/db/v5/namespace/cpe/namespace.go | 54 + pkg/db/v5/namespace/cpe/namespace_test.go | 51 + pkg/db/v5/namespace/distro/namespace.go | 67 + pkg/db/v5/namespace/distro/namespace_test.go | 85 + pkg/db/v5/namespace/from_string.go | 34 + pkg/db/v5/namespace/from_string_test.go | 50 + pkg/db/v5/namespace/index.go | 183 +++ pkg/db/v5/namespace/index_test.go | 369 +++++ pkg/db/v5/namespace/language/namespace.go | 78 + .../v5/namespace/language/namespace_test.go | 77 + pkg/db/v5/namespace/namespace.go | 11 + pkg/db/v5/pkg/qualifier/from_json.go | 52 + .../v5/pkg/qualifier/platformcpe/qualifier.go | 21 + pkg/db/v5/pkg/qualifier/qualifier.go | 12 + .../pkg/qualifier/rpmmodularity/qualifier.go | 21 + pkg/db/v5/pkg/resolver/from_language.go | 23 + pkg/db/v5/pkg/resolver/from_language_test.go | 70 + pkg/db/v5/pkg/resolver/java/resolver.go | 47 + pkg/db/v5/pkg/resolver/java/resolver_test.go | 175 +++ pkg/db/v5/pkg/resolver/python/resolver.go | 29 + .../v5/pkg/resolver/python/resolver_test.go | 42 + pkg/db/v5/pkg/resolver/resolver.go | 10 + pkg/db/v5/pkg/resolver/stock/resolver.go | 18 + pkg/db/v5/pkg/resolver/stock/resolver_test.go | 42 + pkg/db/v5/schema_version.go | 3 + pkg/db/v5/store.go | 25 + pkg/db/v5/store/model/id.go | 40 + pkg/db/v5/store/model/vulnerability.go | 102 ++ .../model/vulnerability_match_exclusion.go | 72 + .../vulnerability_match_exclusion_test.go | 201 +++ .../v5/store/model/vulnerability_metadata.go | 74 + pkg/db/v5/store/model/vulnerability_test.go | 167 ++ pkg/db/v5/store/store.go | 324 ++++ pkg/db/v5/store/store_test.go | 1375 ++++++++++++++++ pkg/db/v5/vulnerability.go | 114 ++ pkg/db/v5/vulnerability_match_exclusion.go | 130 ++ .../v5/vulnerability_match_exclusion_store.go | 14 + pkg/db/v5/vulnerability_metadata.go | 78 + pkg/db/v5/vulnerability_metadata_store.go | 15 + pkg/db/v5/vulnerability_store.go | 23 + pkg/process/build.go | 10 +- pkg/process/default_schema_version.go | 2 +- pkg/process/package.go | 10 +- pkg/process/v1/transformers/entry.go | 2 +- .../v1/transformers/github/transform.go | 2 +- .../v1/transformers/github/transform_test.go | 2 +- pkg/process/v1/transformers/nvd/transform.go | 9 +- .../v1/transformers/nvd/transform_test.go | 2 +- pkg/process/v1/transformers/os/transform.go | 2 +- .../v1/transformers/os/transform_test.go | 2 +- pkg/process/v1/writer.go | 12 +- pkg/process/v1/writer_test.go | 2 +- pkg/process/v2/transformers/entry.go | 2 +- .../v2/transformers/github/transform.go | 2 +- .../v2/transformers/github/transform_test.go | 2 +- pkg/process/v2/transformers/nvd/transform.go | 9 +- .../v2/transformers/nvd/transform_test.go | 2 +- pkg/process/v2/transformers/os/transform.go | 2 +- .../v2/transformers/os/transform_test.go | 2 +- pkg/process/v2/writer.go | 12 +- pkg/process/v2/writer_test.go | 2 +- pkg/process/v3/transformers/entry.go | 2 +- .../v3/transformers/github/transform.go | 2 +- .../v3/transformers/github/transform_test.go | 2 +- pkg/process/v3/transformers/msrc/transform.go | 2 +- .../v3/transformers/msrc/transform_test.go | 2 +- pkg/process/v3/transformers/nvd/transform.go | 9 +- .../v3/transformers/nvd/transform_test.go | 2 +- pkg/process/v3/transformers/os/transform.go | 2 +- .../v3/transformers/os/transform_test.go | 2 +- pkg/process/v3/writer.go | 12 +- pkg/process/v3/writer_test.go | 2 +- pkg/process/v4/transformers/entry.go | 2 +- .../v4/transformers/github/transform.go | 4 +- .../v4/transformers/github/transform_test.go | 6 +- .../transformers/matchexclusions/transform.go | 2 +- pkg/process/v4/transformers/msrc/transform.go | 4 +- .../v4/transformers/msrc/transform_test.go | 2 +- pkg/process/v4/transformers/nvd/transform.go | 11 +- .../v4/transformers/nvd/transform_test.go | 2 +- pkg/process/v4/transformers/os/transform.go | 4 +- .../v4/transformers/os/transform_test.go | 2 +- pkg/process/v4/writer.go | 12 +- pkg/process/v4/writer_test.go | 2 +- pkg/process/v5/transformers/entry.go | 2 +- .../v5/transformers/github/transform.go | 4 +- .../v5/transformers/github/transform_test.go | 8 +- .../transformers/matchexclusions/transform.go | 2 +- pkg/process/v5/transformers/msrc/transform.go | 4 +- .../v5/transformers/msrc/transform_test.go | 2 +- pkg/process/v5/transformers/nvd/transform.go | 15 +- .../v5/transformers/nvd/transform_test.go | 6 +- pkg/process/v5/transformers/os/transform.go | 8 +- .../v5/transformers/os/transform_test.go | 6 +- pkg/process/v5/writer.go | 12 +- pkg/process/v5/writer_test.go | 2 +- 220 files changed, 17402 insertions(+), 268 deletions(-) create mode 100644 internal/file/copy.go create mode 100644 internal/file/getter_test.go create mode 100644 internal/file/hasher_test.go create mode 100644 internal/file/tar.go create mode 100644 internal/file/tar_test.go create mode 100644 internal/stringutil/helpers.go create mode 100644 internal/stringutil/helpers_test.go create mode 100644 pkg/db/curation/curator.go create mode 100644 pkg/db/curation/curator_test.go create mode 100644 pkg/db/curation/listing.go create mode 100644 pkg/db/curation/listing_entry.go create mode 100644 pkg/db/curation/listing_test.go create mode 100644 pkg/db/curation/metadata.go create mode 100644 pkg/db/curation/metadata_test.go create mode 100644 pkg/db/curation/status.go create mode 100644 pkg/db/curation/test-fixtures/curator-validate/bad-checksum/vulnerability.db create mode 100644 pkg/db/curation/test-fixtures/curator-validate/good-checksum/vulnerability.db create mode 100644 pkg/db/curation/test-fixtures/listing-sorted.json create mode 100644 pkg/db/curation/test-fixtures/listing-unsorted.json create mode 100755 pkg/db/curation/test-fixtures/tls/.gitignore create mode 100644 pkg/db/curation/test-fixtures/tls/Makefile create mode 100644 pkg/db/curation/test-fixtures/tls/README.md create mode 100755 pkg/db/curation/test-fixtures/tls/generate-x509-cert-pair.sh create mode 100644 pkg/db/curation/test-fixtures/tls/listing.py create mode 100644 pkg/db/curation/test-fixtures/tls/serve.py create mode 100644 pkg/db/internal/gormadapter/logger.go create mode 100644 pkg/db/internal/gormadapter/open.go create mode 100644 pkg/db/internal/sqlite/nullable_types.go create mode 100644 pkg/db/internal/sqlite/nullable_types_test.go create mode 100644 pkg/db/v1/id.go create mode 100644 pkg/db/v1/namespace.go create mode 100644 pkg/db/v1/namespace_test.go create mode 100644 pkg/db/v1/schema_version.go create mode 100644 pkg/db/v1/store.go create mode 100644 pkg/db/v1/store/model/id.go create mode 100644 pkg/db/v1/store/model/vulnerability.go create mode 100644 pkg/db/v1/store/model/vulnerability_metadata.go create mode 100644 pkg/db/v1/store/store.go create mode 100644 pkg/db/v1/store/store_test.go create mode 100644 pkg/db/v1/vulnerability.go create mode 100644 pkg/db/v1/vulnerability_metadata.go create mode 100644 pkg/db/v2/id.go create mode 100644 pkg/db/v2/namespace.go create mode 100644 pkg/db/v2/namespace_test.go create mode 100644 pkg/db/v2/schema_version.go create mode 100644 pkg/db/v2/store.go create mode 100644 pkg/db/v2/store/model/id.go create mode 100644 pkg/db/v2/store/model/vulnerability.go create mode 100644 pkg/db/v2/store/model/vulnerability_metadata.go create mode 100644 pkg/db/v2/store/store.go create mode 100644 pkg/db/v2/store/store_test.go create mode 100644 pkg/db/v2/vulnerability.go create mode 100644 pkg/db/v2/vulnerability_metadata.go create mode 100644 pkg/db/v2/vulnerability_metadata_store.go create mode 100644 pkg/db/v2/vulnerability_store.go create mode 100644 pkg/db/v3/advisory.go create mode 100644 pkg/db/v3/diff.go create mode 100644 pkg/db/v3/fix.go create mode 100644 pkg/db/v3/id.go create mode 100644 pkg/db/v3/namespace.go create mode 100644 pkg/db/v3/namespace_test.go create mode 100644 pkg/db/v3/schema_version.go create mode 100644 pkg/db/v3/store.go create mode 100644 pkg/db/v3/store/diff.go create mode 100644 pkg/db/v3/store/diff_test.go create mode 100644 pkg/db/v3/store/model/id.go create mode 100644 pkg/db/v3/store/model/vulnerability.go create mode 100644 pkg/db/v3/store/model/vulnerability_metadata.go create mode 100644 pkg/db/v3/store/store.go create mode 100644 pkg/db/v3/store/store_test.go create mode 100644 pkg/db/v3/vulnerability.go create mode 100644 pkg/db/v3/vulnerability_metadata.go create mode 100644 pkg/db/v3/vulnerability_metadata_store.go create mode 100644 pkg/db/v3/vulnerability_store.go create mode 100644 pkg/db/v4/advisory.go create mode 100644 pkg/db/v4/diff.go create mode 100644 pkg/db/v4/fix.go create mode 100644 pkg/db/v4/id.go create mode 100644 pkg/db/v4/namespace/cpe/namespace.go create mode 100644 pkg/db/v4/namespace/cpe/namespace_test.go create mode 100644 pkg/db/v4/namespace/distro/namespace.go create mode 100644 pkg/db/v4/namespace/distro/namespace_test.go create mode 100644 pkg/db/v4/namespace/from_string.go create mode 100644 pkg/db/v4/namespace/from_string_test.go create mode 100644 pkg/db/v4/namespace/index.go create mode 100644 pkg/db/v4/namespace/index_test.go create mode 100644 pkg/db/v4/namespace/language/namespace.go create mode 100644 pkg/db/v4/namespace/language/namespace_test.go create mode 100644 pkg/db/v4/namespace/namespace.go create mode 100644 pkg/db/v4/pkg/resolver/from_language.go create mode 100644 pkg/db/v4/pkg/resolver/from_language_test.go create mode 100644 pkg/db/v4/pkg/resolver/java/resolver.go create mode 100644 pkg/db/v4/pkg/resolver/java/resolver_test.go create mode 100644 pkg/db/v4/pkg/resolver/python/resolver.go create mode 100644 pkg/db/v4/pkg/resolver/python/resolver_test.go create mode 100644 pkg/db/v4/pkg/resolver/resolver.go create mode 100644 pkg/db/v4/pkg/resolver/stock/resolver.go create mode 100644 pkg/db/v4/pkg/resolver/stock/resolver_test.go create mode 100644 pkg/db/v4/schema_version.go create mode 100644 pkg/db/v4/store.go create mode 100644 pkg/db/v4/store/diff.go create mode 100644 pkg/db/v4/store/diff_test.go create mode 100644 pkg/db/v4/store/model/id.go create mode 100644 pkg/db/v4/store/model/vulnerability.go create mode 100644 pkg/db/v4/store/model/vulnerability_match_exclusion.go create mode 100644 pkg/db/v4/store/model/vulnerability_match_exclusion_test.go create mode 100644 pkg/db/v4/store/model/vulnerability_metadata.go create mode 100644 pkg/db/v4/store/store.go create mode 100644 pkg/db/v4/store/store_test.go create mode 100644 pkg/db/v4/vulnerability.go create mode 100644 pkg/db/v4/vulnerability_match_exclusion.go create mode 100644 pkg/db/v4/vulnerability_match_exclusion_store.go create mode 100644 pkg/db/v4/vulnerability_metadata.go create mode 100644 pkg/db/v4/vulnerability_metadata_store.go create mode 100644 pkg/db/v4/vulnerability_store.go create mode 100644 pkg/db/v5/advisory.go create mode 100644 pkg/db/v5/fix.go create mode 100644 pkg/db/v5/id.go create mode 100644 pkg/db/v5/namespace/cpe/namespace.go create mode 100644 pkg/db/v5/namespace/cpe/namespace_test.go create mode 100644 pkg/db/v5/namespace/distro/namespace.go create mode 100644 pkg/db/v5/namespace/distro/namespace_test.go create mode 100644 pkg/db/v5/namespace/from_string.go create mode 100644 pkg/db/v5/namespace/from_string_test.go create mode 100644 pkg/db/v5/namespace/index.go create mode 100644 pkg/db/v5/namespace/index_test.go create mode 100644 pkg/db/v5/namespace/language/namespace.go create mode 100644 pkg/db/v5/namespace/language/namespace_test.go create mode 100644 pkg/db/v5/namespace/namespace.go create mode 100644 pkg/db/v5/pkg/qualifier/from_json.go create mode 100644 pkg/db/v5/pkg/qualifier/platformcpe/qualifier.go create mode 100644 pkg/db/v5/pkg/qualifier/qualifier.go create mode 100644 pkg/db/v5/pkg/qualifier/rpmmodularity/qualifier.go create mode 100644 pkg/db/v5/pkg/resolver/from_language.go create mode 100644 pkg/db/v5/pkg/resolver/from_language_test.go create mode 100644 pkg/db/v5/pkg/resolver/java/resolver.go create mode 100644 pkg/db/v5/pkg/resolver/java/resolver_test.go create mode 100644 pkg/db/v5/pkg/resolver/python/resolver.go create mode 100644 pkg/db/v5/pkg/resolver/python/resolver_test.go create mode 100644 pkg/db/v5/pkg/resolver/resolver.go create mode 100644 pkg/db/v5/pkg/resolver/stock/resolver.go create mode 100644 pkg/db/v5/pkg/resolver/stock/resolver_test.go create mode 100644 pkg/db/v5/schema_version.go create mode 100644 pkg/db/v5/store.go create mode 100644 pkg/db/v5/store/model/id.go create mode 100644 pkg/db/v5/store/model/vulnerability.go create mode 100644 pkg/db/v5/store/model/vulnerability_match_exclusion.go create mode 100644 pkg/db/v5/store/model/vulnerability_match_exclusion_test.go create mode 100644 pkg/db/v5/store/model/vulnerability_metadata.go create mode 100644 pkg/db/v5/store/model/vulnerability_test.go create mode 100644 pkg/db/v5/store/store.go create mode 100644 pkg/db/v5/store/store_test.go create mode 100644 pkg/db/v5/vulnerability.go create mode 100644 pkg/db/v5/vulnerability_match_exclusion.go create mode 100644 pkg/db/v5/vulnerability_match_exclusion_store.go create mode 100644 pkg/db/v5/vulnerability_metadata.go create mode 100644 pkg/db/v5/vulnerability_metadata_store.go create mode 100644 pkg/db/v5/vulnerability_store.go diff --git a/cmd/grype-db/application/build_info.go b/cmd/grype-db/application/build_info.go index af500493..691fe875 100644 --- a/cmd/grype-db/application/build_info.go +++ b/cmd/grype-db/application/build_info.go @@ -5,7 +5,7 @@ import ( "runtime" "runtime/debug" - grypeDB "github.com/anchore/grype/grype/db/v3" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" ) const valueNotProvided = "[not provided]" diff --git a/go.mod b/go.mod index ec24e6f0..98e3d77a 100644 --- a/go.mod +++ b/go.mod @@ -8,20 +8,24 @@ require ( github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d github.com/adrg/xdg v0.4.0 github.com/anchore/go-logger v0.0.0-20230725134548-c21dafa1ec5a - github.com/anchore/grype v0.74.1 - github.com/anchore/syft v0.101.0 + github.com/anchore/grype v0.74.2 + github.com/anchore/packageurl-go v0.1.1-0.20230104203445-02e0a6721501 + github.com/anchore/syft v0.101.1 github.com/dustin/go-humanize v1.0.1 github.com/glebarez/sqlite v1.10.0 github.com/go-test/deep v1.1.0 github.com/google/go-cmp v0.6.0 github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 + github.com/google/uuid v1.5.0 github.com/gookit/color v1.5.4 + github.com/hako/durafmt v0.0.0-20210608085754-5c1018a4e16b github.com/hashicorp/go-cleanhttp v0.5.2 github.com/hashicorp/go-getter v1.7.3 github.com/hashicorp/go-multierror v1.1.1 github.com/iancoleman/strcase v0.3.0 github.com/jinzhu/copier v0.4.0 github.com/klauspost/compress v1.17.4 + github.com/mholt/archiver/v3 v3.5.1 github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/mapstructure v1.5.0 github.com/pkg/profile v1.7.0 @@ -62,8 +66,7 @@ require ( github.com/anchore/go-macholibre v0.0.0-20220308212642-53e6d0aaf6fb // indirect github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 // indirect github.com/anchore/go-version v1.2.2-0.20210903204242-51efa5b487c4 // indirect - github.com/anchore/packageurl-go v0.1.1-0.20230104203445-02e0a6721501 // indirect - github.com/anchore/stereoscope v0.0.0-20231220161148-590920dabc54 // indirect + github.com/anchore/stereoscope v0.0.0-20240118133533-eb656fc71793 // indirect github.com/andybalholm/brotli v1.0.4 // indirect github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46 // indirect github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492 // indirect @@ -87,7 +90,7 @@ require ( github.com/distribution/reference v0.5.0 // indirect github.com/docker/cli v24.0.0+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker v24.0.7+incompatible // indirect + github.com/docker/docker v25.0.0+incompatible // indirect github.com/docker/docker-credential-helpers v0.7.0 // indirect github.com/docker/go-connections v0.4.0 // indirect github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c // indirect @@ -112,14 +115,12 @@ require ( github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/golang/snappy v0.0.4 // indirect - github.com/google/go-containerregistry v0.17.0 // indirect + github.com/google/go-containerregistry v0.18.0 // indirect github.com/google/licensecheck v0.3.1 // indirect github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26 // indirect github.com/google/s2a-go v0.1.7 // indirect - github.com/google/uuid v1.5.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect github.com/googleapis/gax-go/v2 v2.12.0 // indirect - github.com/hako/durafmt v0.0.0-20210608085754-5c1018a4e16b // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect github.com/hashicorp/go-version v1.6.0 // indirect @@ -140,7 +141,6 @@ require ( github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.18 // indirect github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect - github.com/mholt/archiver/v3 v3.5.1 // indirect github.com/microsoft/go-rustaudit v0.0.0-20220730194248-4b17361d90a5 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect github.com/mitchellh/hashstructure/v2 v2.0.2 // indirect diff --git a/go.sum b/go.sum index 8e312a2e..645b301f 100644 --- a/go.sum +++ b/go.sum @@ -242,14 +242,14 @@ github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04 h1:VzprUTpc0v github.com/anchore/go-testutils v0.0.0-20200925183923-d5f45b0d3c04/go.mod h1:6dK64g27Qi1qGQZ67gFmBFvEHScy0/C8qhQhNe5B5pQ= github.com/anchore/go-version v1.2.2-0.20210903204242-51efa5b487c4 h1:rmZG77uXgE+o2gozGEBoUMpX27lsku+xrMwlmBZJtbg= github.com/anchore/go-version v1.2.2-0.20210903204242-51efa5b487c4/go.mod h1:Bkc+JYWjMCF8OyZ340IMSIi2Ebf3uwByOk6ho4wne1E= -github.com/anchore/grype v0.74.1 h1:dVJ1pqZ15i/UwsvxgSGvczDABA/fnPtzwhKwxOqFeoY= -github.com/anchore/grype v0.74.1/go.mod h1:p1xBrBFJ5YAQ32pZ0MSsntVEg0GzXjBgQmlVRrEGl2g= +github.com/anchore/grype v0.74.2 h1:njw0O7kWyiZ6OB7357D09XTS1yB8pETyr7zyG7M8898= +github.com/anchore/grype v0.74.2/go.mod h1:LY/xJs0E5hF7xoEH6AZUFf0a0Lb8RMMwHlcvHYB/VMQ= github.com/anchore/packageurl-go v0.1.1-0.20230104203445-02e0a6721501 h1:AV7qjwMcM4r8wFhJq3jLRztew3ywIyPTRapl2T1s9o8= github.com/anchore/packageurl-go v0.1.1-0.20230104203445-02e0a6721501/go.mod h1:Blo6OgJNiYF41ufcgHKkbCKF2MDOMlrqhXv/ij6ocR4= -github.com/anchore/stereoscope v0.0.0-20231220161148-590920dabc54 h1:i2YK5QEs9H2YB3B2zv+AGR44ves0nmAGOD07lMphH14= -github.com/anchore/stereoscope v0.0.0-20231220161148-590920dabc54/go.mod h1:IylG7ofLoUKHwS1XDF6rPhOmaE3GgpAgsMdvvYfooTU= -github.com/anchore/syft v0.101.0 h1:1CI6dHS8Kd1F4/LzJFRvaMTswZnJELBdWd9sIlHw5s4= -github.com/anchore/syft v0.101.0/go.mod h1:oRQuHODu6IYHhb9SDw9kmNhAmdZngVbTU1kR2jVq48E= +github.com/anchore/stereoscope v0.0.0-20240118133533-eb656fc71793 h1:wji+qdjsV7ooolBwb3faVZnEK3WtY/kcT5473kxVZS4= +github.com/anchore/stereoscope v0.0.0-20240118133533-eb656fc71793/go.mod h1:IylG7ofLoUKHwS1XDF6rPhOmaE3GgpAgsMdvvYfooTU= +github.com/anchore/syft v0.101.1 h1:PTh7XBdtXq3BYhuPz67rrC6AFPZxC1Rt8jgqv7Z75rA= +github.com/anchore/syft v0.101.1/go.mod h1:6rbrRWQN16TFENxXG1uFQOh9RCIp/UHJqPAJnHSKhjQ= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY= @@ -290,6 +290,8 @@ github.com/bmatcuk/doublestar/v4 v4.6.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTS github.com/bradleyjkemp/cupaloy/v2 v2.8.0 h1:any4BmKE+jGIaMpnU8YgH/I2LPiLBufr6oMMlVBbn9M= github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= +github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM= +github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= @@ -364,8 +366,8 @@ github.com/docker/cli v24.0.0+incompatible h1:0+1VshNwBQzQAx9lOl+OYCTCEAD8fKs/qe github.com/docker/cli v24.0.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM= -github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v25.0.0+incompatible h1:g9b6wZTblhMgzOT2tspESstfw6ySZ9kdm94BLDKaZac= +github.com/docker/docker v25.0.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.7.0 h1:xtCHsjxogADNZcdv1pKUHXryefjlVRqWqIhk/uXJp0A= github.com/docker/docker-credential-helpers v0.7.0/go.mod h1:rETQfLdHNT3foU5kuNkFR1R1V12OJRRO5lzt2D1b5X0= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= @@ -515,8 +517,8 @@ github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeN github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-containerregistry v0.17.0 h1:5p+zYs/R4VGHkhyvgWurWrpJ2hW4Vv9fQI+GzdcwXLk= -github.com/google/go-containerregistry v0.17.0/go.mod h1:u0qB2l7mvtWVR5kNcbFIhFY1hLbf8eeGapA+vbFDCtQ= +github.com/google/go-containerregistry v0.18.0 h1:ShE7erKNPqRh5ue6Z9DUOlk04WsnFWPO6YGr3OxnfoQ= +github.com/google/go-containerregistry v0.18.0/go.mod h1:u0qB2l7mvtWVR5kNcbFIhFY1hLbf8eeGapA+vbFDCtQ= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/licensecheck v0.3.1 h1:QoxgoDkaeC4nFrtGN1jV7IPmDCHFNIVh54e5hSt6sPs= github.com/google/licensecheck v0.3.1/go.mod h1:ORkR35t/JjW+emNKtfJDII0zlciG9JgbT7SmsohlHmY= @@ -573,7 +575,10 @@ github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+ github.com/gookit/color v1.2.5/go.mod h1:AhIE+pS6D4Ql0SQWbBeXPHw7gY0/sjHoA4s/n1KB7xg= github.com/gookit/color v1.5.4 h1:FZmqs7XOyGgCAxmWyPslpiok1k05wmY3SJTytgvYFs0= github.com/gookit/color v1.5.4/go.mod h1:pZJOeOS8DM43rXbp4AZo1n9zCU2qjpcRko0b6/QJi9w= +github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 h1:YBftPWNWd4WwGqtY2yeZL2ef8rHAxPBD8KFhJpmcqms= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg= github.com/hako/durafmt v0.0.0-20210608085754-5c1018a4e16b h1:wDUNC2eKiL35DbLvsDhiblTUXHxcOPwQSCzi7xpQUN4= github.com/hako/durafmt v0.0.0-20210608085754-5c1018a4e16b/go.mod h1:VzxiSdG6j1pi7rwGm/xYI5RbtpBgM8sARDXlvEvxlu0= github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= @@ -974,11 +979,19 @@ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.45.0 h1:x8Z78aZ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.45.0/go.mod h1:62CPTSry9QZtOaSsE3tOzhx6LzDhHnXJ6xHeMNNiM6Q= go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs= go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0 h1:IeMeyr1aBvBiPVYihXIaeIZba6b8E1bYp7lbdxK8CQg= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.19.0/go.mod h1:oVdCUtjq9MK9BlS7TtucsQwUcXcymNiEDjgDD2jMtZU= go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE= go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8= +go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= +go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A= go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= +go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE= go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= diff --git a/internal/file/copy.go b/internal/file/copy.go new file mode 100644 index 00000000..45080444 --- /dev/null +++ b/internal/file/copy.go @@ -0,0 +1,68 @@ +package file + +import ( + "fmt" + "io" + "os" + "path" + + "github.com/spf13/afero" +) + +func CopyDir(fs afero.Fs, src string, dst string) error { + var err error + var fds []os.DirEntry + var srcinfo os.FileInfo + + if srcinfo, err = fs.Stat(src); err != nil { + return err + } + + if err = fs.MkdirAll(dst, srcinfo.Mode()); err != nil { + return err + } + + if fds, err = os.ReadDir(src); err != nil { + return err + } + for _, fd := range fds { + srcPath := path.Join(src, fd.Name()) + dstPath := path.Join(dst, fd.Name()) + + if fd.IsDir() { + if err = CopyDir(fs, srcPath, dstPath); err != nil { + return fmt.Errorf("could not copy dir (%s -> %s): %w", srcPath, dstPath, err) + } + } else { + if err = CopyFile(fs, srcPath, dstPath); err != nil { + return fmt.Errorf("could not copy file (%s -> %s): %w", srcPath, dstPath, err) + } + } + } + return nil +} + +func CopyFile(fs afero.Fs, src, dst string) error { + var err error + var srcFd afero.File + var dstFd afero.File + var srcinfo os.FileInfo + + if srcFd, err = fs.Open(src); err != nil { + return err + } + defer srcFd.Close() + + if dstFd, err = fs.Create(dst); err != nil { + return err + } + defer dstFd.Close() + + if _, err = io.Copy(dstFd, srcFd); err != nil { + return err + } + if srcinfo, err = fs.Stat(src); err != nil { + return err + } + return fs.Chmod(dst, srcinfo.Mode()) +} diff --git a/internal/file/exists.go b/internal/file/exists.go index 702a6828..9e7221e1 100644 --- a/internal/file/exists.go +++ b/internal/file/exists.go @@ -6,10 +6,13 @@ import ( "github.com/spf13/afero" ) -func Exists(fs afero.Fs, path string) bool { +func Exists(fs afero.Fs, path string) (bool, error) { info, err := fs.Stat(path) if os.IsNotExist(err) { - return false + return false, nil + } else if err != nil { + return false, err } - return !info.IsDir() + + return !info.IsDir(), nil } diff --git a/internal/file/getter.go b/internal/file/getter.go index 588a9cf7..ed261390 100644 --- a/internal/file/getter.go +++ b/internal/file/getter.go @@ -1,23 +1,15 @@ package file import ( - "crypto/tls" - "crypto/x509" "fmt" "io" - "io/fs" - "math" "net/http" - "strings" - "time" - "github.com/hashicorp/go-cleanhttp" "github.com/hashicorp/go-getter" "github.com/hashicorp/go-getter/helper/url" "github.com/wagoodman/go-progress" - "github.com/anchore/go-logger" - "github.com/anchore/grype-db/internal/log" + "github.com/anchore/grype-db/internal/stringutil" ) var ( @@ -34,52 +26,29 @@ type Getter interface { GetToDir(dst, src string, monitor ...*progress.Manual) error } -type hashiGoGetter struct { +type HashiGoGetter struct { httpGetter getter.HttpGetter } // NewGetter creates and returns a new Getter. Providing an http.Client is optional. If one is provided, // it will be used for all HTTP(S) getting; otherwise, go-getter's default getters will be used. -func NewGetter(httpClient *http.Client) Getter { - return &hashiGoGetter{ +func NewGetter(httpClient *http.Client) *HashiGoGetter { + return &HashiGoGetter{ httpGetter: getter.HttpGetter{ Client: httpClient, }, } } -func NewDefaultGetter() Getter { - return NewGetter(cleanhttp.DefaultClient()) -} - -func HTTPClientWithCerts(fileSystem fs.FS, caCertPath string) (*http.Client, error) { - httpClient := cleanhttp.DefaultClient() - if caCertPath != "" { - rootCAs := x509.NewCertPool() - - pemBytes, err := fs.ReadFile(fileSystem, caCertPath) - if err != nil { - return nil, fmt.Errorf("unable to configure root CAs for curator: %w", err) - } - rootCAs.AppendCertsFromPEM(pemBytes) - - httpClient.Transport.(*http.Transport).TLSClientConfig = &tls.Config{ - MinVersion: tls.VersionTLS12, - RootCAs: rootCAs, - } - } - return httpClient, nil -} - -func (g hashiGoGetter) GetFile(dst, src string, monitors ...*progress.Manual) error { +func (g HashiGoGetter) GetFile(dst, src string, monitors ...*progress.Manual) error { if len(monitors) > 1 { return fmt.Errorf("multiple monitors provided, which is not allowed") } - return getWithRetry(getterClient(dst, src, false, g.httpGetter, monitors)) + return getterClient(dst, src, false, g.httpGetter, monitors).Get() } -func (g hashiGoGetter) GetToDir(dst, src string, monitors ...*progress.Manual) error { +func (g HashiGoGetter) GetToDir(dst, src string, monitors ...*progress.Manual) error { // though there are multiple getters, only the http/https getter requires extra validation if err := validateHTTPSource(src); err != nil { return err @@ -88,69 +57,12 @@ func (g hashiGoGetter) GetToDir(dst, src string, monitors ...*progress.Manual) e return fmt.Errorf("multiple monitors provided, which is not allowed") } - return getWithRetry(getterClient(dst, src, true, g.httpGetter, monitors)) -} - -func getWithRetry(client *getter.Client) error { - var err error - attempt := 1 - for interval := range retryIntervals() { - fields := logger.Fields{ - "url": client.Src, - "to": client.Dst, - } - - if attempt > 1 { - fields["attempt"] = attempt - } - - log.WithFields(fields).Info("downloading file") - - err = client.Get() - if err == nil { - break - } - - time.Sleep(interval) - attempt++ - } - return err -} - -func retryIntervals() <-chan time.Duration { - return exponentialBackoffDurations(250*time.Millisecond, 5*time.Second, 2) -} - -func exponentialBackoffDurations(minDuration, maxDuration time.Duration, step float64) <-chan time.Duration { - sleepDurations := make(chan time.Duration) - go func() { - defer close(sleepDurations) - for attempt := 0; ; attempt++ { - duration := exponentialBackoffDuration(minDuration, maxDuration, step, attempt) - - sleepDurations <- duration - - if duration == maxDuration { - break - } - } - }() - return sleepDurations -} - -func exponentialBackoffDuration(minDuration, maxDuration time.Duration, step float64, attempt int) time.Duration { - duration := time.Duration(float64(minDuration) * math.Pow(step, float64(attempt))) - if duration < minDuration { - return minDuration - } else if duration > maxDuration { - return maxDuration - } - return duration + return getterClient(dst, src, true, g.httpGetter, monitors).Get() } func validateHTTPSource(src string) error { // we are ignoring any sources that are not destined to use the http getter object - if !hasAnyOfPrefixes(src, "http://", "https://") { + if !stringutil.HasAnyOfPrefixes(src, "http://", "https://") { return nil } @@ -159,7 +71,7 @@ func validateHTTPSource(src string) error { return fmt.Errorf("bad URL provided %q: %w", src, err) } // only allow for sources with archive extensions - if !hasAnyOfSuffixes(u.Path, archiveExtensions...) { + if !stringutil.HasAnyOfSuffixes(u.Path, archiveExtensions...) { return ErrNonArchiveSource } return nil @@ -230,25 +142,3 @@ func getterDecompressorNames() (names []string) { } return names } - -// hasAnyOfSuffixes returns an indication if the given string has any of the given suffixes. -func hasAnyOfSuffixes(input string, suffixes ...string) bool { - for _, suffix := range suffixes { - if strings.HasSuffix(input, suffix) { - return true - } - } - - return false -} - -// hasAnyOfPrefixes returns an indication if the given string has any of the given prefixes. -func hasAnyOfPrefixes(input string, prefixes ...string) bool { - for _, prefix := range prefixes { - if strings.HasPrefix(input, prefix) { - return true - } - } - - return false -} diff --git a/internal/file/getter_test.go b/internal/file/getter_test.go new file mode 100644 index 00000000..f0bdaf90 --- /dev/null +++ b/internal/file/getter_test.go @@ -0,0 +1,261 @@ +package file + +import ( + "archive/tar" + "bytes" + "context" + "crypto/x509" + "fmt" + "net" + "net/http" + "net/http/httptest" + "net/url" + "path" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetter_GetFile(t *testing.T) { + testCases := []struct { + name string + prepareClient func(*http.Client) + assert assert.ErrorAssertionFunc + }{ + { + name: "client trusts server's CA", + assert: assert.NoError, + }, + { + name: "client doesn't trust server's CA", + prepareClient: removeTrustedCAs, + assert: assertUnknownAuthorityError, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + requestPath := "/foo" + + server := newTestServer(t, withResponseForPath(t, requestPath, testFileContent)) + t.Cleanup(server.Close) + + httpClient := getClient(t, server) + if tc.prepareClient != nil { + tc.prepareClient(httpClient) + } + + getter := NewGetter(httpClient) + requestURL := createRequestURL(t, server, requestPath) + + tempDir := t.TempDir() + tempFile := path.Join(tempDir, "some-destination-file") + + err := getter.GetFile(tempFile, requestURL) + tc.assert(t, err) + }) + } +} + +func TestGetter_GetToDir_FilterNonArchivesWired(t *testing.T) { + testCases := []struct { + name string + source string + assert assert.ErrorAssertionFunc + }{ + { + name: "error out on non-archive sources", + source: "http://localhost/something.txt", + assert: assertErrNonArchiveSource, + }, + } + + for _, test := range testCases { + t.Run(test.name, func(t *testing.T) { + test.assert(t, NewGetter(nil).GetToDir(t.TempDir(), test.source)) + }) + } +} + +func TestGetter_validateHttpSource(t *testing.T) { + testCases := []struct { + name string + source string + assert assert.ErrorAssertionFunc + }{ + { + name: "error out on non-archive sources", + source: "http://localhost/something.txt", + assert: assertErrNonArchiveSource, + }, + { + name: "filter out non-archive sources with get param", + source: "https://localhost/vulnerability-db_v3_2021-11-21T08:15:44Z.txt?checksum=sha256%3Ac402d01fa909a3fa85a5c6733ef27a3a51a9105b6c62b9152adbd24c08358911", + assert: assertErrNonArchiveSource, + }, + { + name: "ignore non http-https input", + source: "s3://bucket/something.txt", + assert: assert.NoError, + }, + } + + for _, test := range testCases { + t.Run(test.name, func(t *testing.T) { + test.assert(t, validateHTTPSource(test.source)) + }) + } +} + +func TestGetter_GetToDir_CertConcerns(t *testing.T) { + testCases := []struct { + name string + prepareClient func(*http.Client) + assert assert.ErrorAssertionFunc + }{ + + { + name: "client trusts server's CA", + assert: assert.NoError, + }, + { + name: "client doesn't trust server's CA", + prepareClient: removeTrustedCAs, + assert: assertUnknownAuthorityError, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + requestPath := "/foo.tar" + tarball := createTarball("foo", testFileContent) + + server := newTestServer(t, withResponseForPath(t, requestPath, tarball)) + t.Cleanup(server.Close) + + httpClient := getClient(t, server) + if tc.prepareClient != nil { + tc.prepareClient(httpClient) + } + + getter := NewGetter(httpClient) + requestURL := createRequestURL(t, server, requestPath) + + tempDir := t.TempDir() + + err := getter.GetToDir(tempDir, requestURL) + tc.assert(t, err) + }) + } +} + +func assertUnknownAuthorityError(t assert.TestingT, err error, _ ...interface{}) bool { + return assert.ErrorAs(t, err, &x509.UnknownAuthorityError{}) +} + +func assertErrNonArchiveSource(t assert.TestingT, err error, _ ...interface{}) bool { + return assert.ErrorIs(t, err, ErrNonArchiveSource) +} + +func removeTrustedCAs(client *http.Client) { + client.Transport.(*http.Transport).TLSClientConfig.RootCAs = x509.NewCertPool() +} + +// createTarball makes a single-file tarball and returns it as a byte slice. +func createTarball(filename string, content []byte) []byte { + tarBuffer := new(bytes.Buffer) + tarWriter := tar.NewWriter(tarBuffer) + tarWriter.WriteHeader(&tar.Header{ + Name: filename, + Size: int64(len(content)), + Mode: 0600, + }) + tarWriter.Write(content) + tarWriter.Close() + + return tarBuffer.Bytes() +} + +type muxOption func(mux *http.ServeMux) + +func withResponseForPath(t *testing.T, path string, response []byte) muxOption { + t.Helper() + + return func(mux *http.ServeMux) { + mux.HandleFunc(path, func(w http.ResponseWriter, req *http.Request) { + t.Logf("server handling request: %s %s", req.Method, req.URL) + + _, err := w.Write(response) + if err != nil { + t.Fatal(err) + } + }) + } +} + +func newTestServer(t *testing.T, muxOptions ...muxOption) *httptest.Server { + t.Helper() + + mux := http.NewServeMux() + for _, option := range muxOptions { + option(mux) + } + + server := httptest.NewTLSServer(mux) + t.Logf("new TLS server listening at %s", getHost(t, server)) + + return server +} + +func createRequestURL(t *testing.T, server *httptest.Server, path string) string { + t.Helper() + + // TODO: Figure out how to get this value from the server without hardcoding it here + const testServerCertificateName = "example.com" + + serverURL, err := url.Parse(server.URL) + if err != nil { + t.Fatal(err) + } + + // Set URL hostname to value from TLS certificate + serverURL.Host = fmt.Sprintf("%s:%s", testServerCertificateName, serverURL.Port()) + + serverURL.Path = path + + return serverURL.String() +} + +// getClient returns an http.Client that can be used to contact the test TLS server. +func getClient(t *testing.T, server *httptest.Server) *http.Client { + t.Helper() + + httpClient := server.Client() + transport := httpClient.Transport.(*http.Transport) + + serverHost := getHost(t, server) + + transport.DialContext = func(_ context.Context, _, addr string) (net.Conn, error) { + t.Logf("client dialing %q for host %q", serverHost, addr) + + // Ensure the client dials our test server + return net.Dial("tcp", serverHost) + } + + return httpClient +} + +// getHost extracts the host value from a server URL string. +// e.g. given a server with URL "http://1.2.3.4:5000/foo", getHost returns "1.2.3.4:5000" +func getHost(t *testing.T, server *httptest.Server) string { + t.Helper() + + u, err := url.Parse(server.URL) + if err != nil { + t.Fatal(err) + } + + return u.Hostname() + ":" + u.Port() +} + +var testFileContent = []byte("This is the content of a test file!\n") diff --git a/internal/file/hasher.go b/internal/file/hasher.go index 240f0449..afdaa04a 100644 --- a/internal/file/hasher.go +++ b/internal/file/hasher.go @@ -1,6 +1,7 @@ package file import ( + "crypto/sha256" "encoding/hex" "fmt" "hash" @@ -10,6 +11,8 @@ import ( "github.com/spf13/afero" ) +// TODO: these are duplicate functions that need to be refactored + func ContentDigest(fs afero.Fs, path string, hasher hash.Hash) (string, error) { f, err := fs.Open(path) if err != nil { @@ -34,3 +37,38 @@ func ValidateDigest(path, expectedDigest string, hasher hash.Hash) error { } return nil } + +func ValidateByHash(fs afero.Fs, path, hashStr string) (bool, string, error) { + var hasher hash.Hash + var hashFn string + switch { + case strings.HasPrefix(hashStr, "sha256:"): + hashFn = "sha256" + hasher = sha256.New() + default: + return false, "", fmt.Errorf("hasher not supported or specified (given: %s)", hashStr) + } + + hashNoPrefix := strings.Split(hashStr, ":")[1] + + actualHash, err := HashFile(fs, path, hasher) + if err != nil { + return false, "", err + } + + return actualHash == hashNoPrefix, hashFn + ":" + actualHash, nil +} + +func HashFile(fs afero.Fs, path string, hasher hash.Hash) (string, error) { + f, err := fs.Open(path) + if err != nil { + return "", fmt.Errorf("failed to open file '%s': %w", path, err) + } + defer f.Close() + + if _, err := io.Copy(hasher, f); err != nil { + return "", fmt.Errorf("failed to hash file '%s': %w", path, err) + } + + return hex.EncodeToString(hasher.Sum(nil)), nil +} diff --git a/internal/file/hasher_test.go b/internal/file/hasher_test.go new file mode 100644 index 00000000..94170791 --- /dev/null +++ b/internal/file/hasher_test.go @@ -0,0 +1,85 @@ +package file + +import ( + "fmt" + "testing" + + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" +) + +func TestValidateByHash(t *testing.T) { + testsCases := []struct { + name, path, hashStr, actualHash string + setup func(fs afero.Fs) + valid bool + err bool + errMsg error + }{ + { + name: "Valid SHA256 hash", + path: "test.txt", + hashStr: "sha256:9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08", + setup: func(fs afero.Fs) { + afero.WriteFile(fs, "test.txt", []byte("test"), 0644) + }, + actualHash: "sha256:9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08", + valid: true, + err: false, + }, + { + name: "Invalid SHA256 hash", + path: "test.txt", + hashStr: "sha256:deadbeef", + setup: func(fs afero.Fs) { + afero.WriteFile(fs, "test.txt", []byte("test"), 0644) + }, + actualHash: "sha256:9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08", + valid: false, + err: false, + }, + { + name: "Unsupported hash function", + path: "test.txt", + hashStr: "md5:deadbeef", + setup: func(fs afero.Fs) { + afero.WriteFile(fs, "test.txt", []byte("test"), 0644) + }, + actualHash: "", + valid: false, + err: true, + errMsg: fmt.Errorf("hasher not supported or specified (given: md5:deadbeef)"), + }, + { + name: "File does not exist", + path: "nonexistent.txt", + hashStr: "sha256:deadbeef", + setup: func(fs afero.Fs) {}, + valid: false, + actualHash: "", + err: true, + }, + } + + for _, tc := range testsCases { + t.Run(tc.name, func(t *testing.T) { + fs := afero.NewMemMapFs() + tc.setup(fs) + + valid, actualHash, err := ValidateByHash(fs, tc.path, tc.hashStr) + + assert.Equal(t, tc.valid, valid) + assert.Equal(t, tc.actualHash, actualHash) + + if tc.err { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + + if tc.errMsg != nil { + assert.Equal(t, tc.errMsg, err) + } + }) + } +} diff --git a/internal/file/tar.go b/internal/file/tar.go new file mode 100644 index 00000000..c05e3eca --- /dev/null +++ b/internal/file/tar.go @@ -0,0 +1,106 @@ +package file + +import ( + "archive/tar" + "compress/gzip" + "fmt" + "io" + "os" + "path/filepath" + "strings" +) + +const ( + _ = iota + KB = 1 << (10 * iota) + MB + GB + // limit the tar reader to 5GB per file to prevent decompression bomb attacks. Why 5GB? This is somewhat of an + // arbitrary threshold, however, we need to keep this at at minimum 2GB to accommodate possible grype DB sizes. + decompressionByteReadLimit = 5 * GB +) + +type errZipSlipDetected struct { + Prefix string + JoinArgs []string +} + +func (e *errZipSlipDetected) Error() string { + return fmt.Sprintf("paths are not allowed to resolve outside of the root prefix (%q). Destination: %q", e.Prefix, e.JoinArgs) +} + +// safeJoin ensures that any destinations do not resolve to a path above the prefix path. +func safeJoin(prefix string, dest ...string) (string, error) { + joinResult := filepath.Join(append([]string{prefix}, dest...)...) + cleanJoinResult := filepath.Clean(joinResult) + if !strings.HasPrefix(cleanJoinResult, filepath.Clean(prefix)) { + return "", &errZipSlipDetected{ + Prefix: prefix, + JoinArgs: dest, + } + } + // why not return the clean path? the called may not be expected it from what should only be a join operation. + return joinResult, nil +} + +func UnTarGz(dst string, r io.Reader) error { + gzr, err := gzip.NewReader(r) + if err != nil { + return err + } + defer gzr.Close() + + tr := tar.NewReader(gzr) + + for { + header, err := tr.Next() + + switch { + case err == io.EOF: + return nil + + case err != nil: + return err + + case header == nil: + continue + } + + target, err := safeJoin(dst, header.Name) + if err != nil { + return err + } + + switch header.Typeflag { + case tar.TypeDir: + if _, err := os.Stat(target); err != nil { + if err := os.MkdirAll(target, 0755); err != nil { + return fmt.Errorf("failed to mkdir (%s): %w", target, err) + } + } + + case tar.TypeReg: + f, err := os.OpenFile(target, os.O_CREATE|os.O_RDWR, os.FileMode(header.Mode)) + if err != nil { + return fmt.Errorf("failed to open file (%s): %w", target, err) + } + + if err := copyWithLimits(f, tr, decompressionByteReadLimit, target); err != nil { + return err + } + + if err = f.Close(); err != nil { + return fmt.Errorf("failed to close file (%s): %w", target, err) + } + } + } +} + +func copyWithLimits(writer io.Writer, reader io.Reader, byteReadLimit int64, pathInArchive string) error { + if numBytes, err := io.Copy(writer, io.LimitReader(reader, byteReadLimit)); err != nil { + return fmt.Errorf("failed to copy file (%s): %w", pathInArchive, err) + } else if numBytes >= byteReadLimit { + return fmt.Errorf("failed to copy file (%s): read limit (%d bytes) reached ", pathInArchive, byteReadLimit) + } + return nil +} diff --git a/internal/file/tar_test.go b/internal/file/tar_test.go new file mode 100644 index 00000000..f8841e8c --- /dev/null +++ b/internal/file/tar_test.go @@ -0,0 +1,138 @@ +package file + +import ( + "bytes" + "errors" + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func assertErrorAs(expectedErr interface{}) assert.ErrorAssertionFunc { + return func(t assert.TestingT, actualErr error, i ...interface{}) bool { + return errors.As(actualErr, &expectedErr) + } +} + +func TestSafeJoin(t *testing.T) { + tests := []struct { + prefix string + args []string + expected string + errAssertion assert.ErrorAssertionFunc + }{ + // go cases... + { + prefix: "/a/place", + args: []string{ + "somewhere/else", + }, + expected: "/a/place/somewhere/else", + errAssertion: assert.NoError, + }, + { + prefix: "/a/place", + args: []string{ + "somewhere/../else", + }, + expected: "/a/place/else", + errAssertion: assert.NoError, + }, + { + prefix: "/a/../place", + args: []string{ + "somewhere/else", + }, + expected: "/place/somewhere/else", + errAssertion: assert.NoError, + }, + // zip slip examples.... + { + prefix: "/a/place", + args: []string{ + "../../../etc/passwd", + }, + expected: "", + errAssertion: assertErrorAs(&errZipSlipDetected{}), + }, + { + prefix: "/a/place", + args: []string{ + "../", + "../", + }, + expected: "", + errAssertion: assertErrorAs(&errZipSlipDetected{}), + }, + { + prefix: "/a/place", + args: []string{ + "../", + }, + expected: "", + errAssertion: assertErrorAs(&errZipSlipDetected{}), + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("%+v:%+v", test.prefix, test.args), func(t *testing.T) { + actual, err := safeJoin(test.prefix, test.args...) + test.errAssertion(t, err) + assert.Equal(t, test.expected, actual) + }) + } +} + +func Test_copyWithLimits(t *testing.T) { + tests := []struct { + name string + input string + byteReadLimit int64 + pathInArchive string + expectWritten string + expectErr bool + }{ + { + name: "write bytes", + input: "something here", + byteReadLimit: 1000, + pathInArchive: "dont care", + expectWritten: "something here", + expectErr: false, + }, + { + name: "surpass upper limit", + input: "something here", + byteReadLimit: 11, + pathInArchive: "dont care", + expectWritten: "something h", + expectErr: true, + }, + // since we want the threshold being reached to be easily detectable, simply reaching the threshold is + // enough to cause an error. Otherwise surpassing the threshold would be undetectable. + { + name: "reach limit exactly", + input: "something here", + byteReadLimit: 14, + pathInArchive: "dont care", + expectWritten: "something here", + expectErr: true, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + writer := &bytes.Buffer{} + err := copyWithLimits(writer, strings.NewReader(test.input), test.byteReadLimit, test.pathInArchive) + if (err != nil) != test.expectErr { + t.Errorf("copyWithLimits() error = %v, want %v", err, test.expectErr) + return + } else if err != nil { + assert.Contains(t, err.Error(), test.pathInArchive) + } + assert.Equal(t, test.expectWritten, writer.String()) + + }) + } +} diff --git a/internal/stringutil/helpers.go b/internal/stringutil/helpers.go new file mode 100644 index 00000000..25d21f02 --- /dev/null +++ b/internal/stringutil/helpers.go @@ -0,0 +1,36 @@ +package stringutil + +import "strings" + +// HasAnyOfSuffixes returns an indication if the given string has any of the given suffixes. +func HasAnyOfSuffixes(input string, suffixes ...string) bool { + for _, suffix := range suffixes { + if strings.HasSuffix(input, suffix) { + return true + } + } + + return false +} + +// HasAnyOfPrefixes returns an indication if the given string has any of the given prefixes. +func HasAnyOfPrefixes(input string, prefixes ...string) bool { + for _, prefix := range prefixes { + if strings.HasPrefix(input, prefix) { + return true + } + } + + return false +} + +// SplitCommaSeparatedString returns a slice of strings separated from the input string by commas +func SplitCommaSeparatedString(input string) []string { + output := make([]string, 0) + for _, inputItem := range strings.Split(input, ",") { + if len(inputItem) > 0 { + output = append(output, inputItem) + } + } + return output +} diff --git a/internal/stringutil/helpers_test.go b/internal/stringutil/helpers_test.go new file mode 100644 index 00000000..89baa28f --- /dev/null +++ b/internal/stringutil/helpers_test.go @@ -0,0 +1,156 @@ +package stringutil + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestHasAnyOfSuffixes(t *testing.T) { + tests := []struct { + name string + input string + suffixes []string + expected bool + }{ + { + name: "go case", + input: "this has something", + suffixes: []string{ + "has something", + "has NOT something", + }, + expected: true, + }, + { + name: "no match", + input: "this has something", + suffixes: []string{ + "has NOT something", + }, + expected: false, + }, + { + name: "empty", + input: "this has something", + suffixes: []string{}, + expected: false, + }, + { + name: "positive match last", + input: "this has something", + suffixes: []string{ + "that does not have", + "something", + }, + expected: true, + }, + { + name: "empty input", + input: "", + suffixes: []string{ + "that does not have", + "this has", + }, + expected: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + assert.Equal(t, test.expected, HasAnyOfSuffixes(test.input, test.suffixes...)) + }) + } +} + +func TestHasAnyOfPrefixes(t *testing.T) { + tests := []struct { + name string + input string + prefixes []string + expected bool + }{ + { + name: "go case", + input: "this has something", + prefixes: []string{ + "this has", + "that does not have", + }, + expected: true, + }, + { + name: "no match", + input: "this has something", + prefixes: []string{ + "this DOES NOT has", + "that does not have", + }, + expected: false, + }, + { + name: "empty", + input: "this has something", + prefixes: []string{}, + expected: false, + }, + { + name: "positive match last", + input: "this has something", + prefixes: []string{ + "that does not have", + "this has", + }, + expected: true, + }, + { + name: "empty input", + input: "", + prefixes: []string{ + "that does not have", + "this has", + }, + expected: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + assert.Equal(t, test.expected, HasAnyOfPrefixes(test.input, test.prefixes...)) + }) + } +} + +func TestSplitCommaSeparatedString(t *testing.T) { + tests := []struct { + input string + expected []string + }{ + { + input: "testing", + expected: []string{"testing"}, + }, + { + input: "", + expected: []string{}, + }, + { + input: "testing1,testing2", + expected: []string{"testing1", "testing2"}, + }, + { + input: "testing1,,testing2,testing3", + expected: []string{"testing1", "testing2", "testing3"}, + }, + { + input: "testing1,testing2,,", + expected: []string{"testing1", "testing2"}, + }, + } + + for _, test := range tests { + t.Run(test.input, func(t *testing.T) { + assert.Equal(t, test.expected, SplitCommaSeparatedString(test.input)) + }) + } +} diff --git a/pkg/db/curation/curator.go b/pkg/db/curation/curator.go new file mode 100644 index 00000000..766b6689 --- /dev/null +++ b/pkg/db/curation/curator.go @@ -0,0 +1,408 @@ +package curation + +import ( + "crypto/tls" + "crypto/x509" + "fmt" + "net/http" + "os" + "path" + "strconv" + "time" + + "github.com/hako/durafmt" + "github.com/hashicorp/go-cleanhttp" + "github.com/mholt/archiver/v3" + "github.com/spf13/afero" + "github.com/wagoodman/go-partybus" + "github.com/wagoodman/go-progress" + + "github.com/anchore/grype-db/internal/bus" + "github.com/anchore/grype-db/internal/file" + "github.com/anchore/grype-db/internal/log" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/store" + "github.com/anchore/grype/grype/event" + "github.com/anchore/grype/grype/vulnerability" +) + +const ( + FileName = grypeDB.VulnerabilityStoreFileName +) + +type Config struct { + DBRootDir string + ListingURL string + CACert string + ValidateByHashOnGet bool + ValidateAge bool + MaxAllowedBuiltAge time.Duration +} + +type Curator struct { + fs afero.Fs + downloader file.Getter + targetSchema int + dbDir string + dbPath string + listingURL string + validateByHashOnGet bool + validateAge bool + maxAllowedBuiltAge time.Duration +} + +func NewCurator(cfg Config) (Curator, error) { + dbDir := path.Join(cfg.DBRootDir, strconv.Itoa(vulnerability.SchemaVersion)) + + fs := afero.NewOsFs() + httpClient, err := defaultHTTPClient(fs, cfg.CACert) + if err != nil { + return Curator{}, err + } + + return Curator{ + fs: fs, + targetSchema: vulnerability.SchemaVersion, + downloader: file.NewGetter(httpClient), + dbDir: dbDir, + dbPath: path.Join(dbDir, FileName), + listingURL: cfg.ListingURL, + validateByHashOnGet: cfg.ValidateByHashOnGet, + validateAge: cfg.ValidateAge, + maxAllowedBuiltAge: cfg.MaxAllowedBuiltAge, + }, nil +} + +func (c Curator) SupportedSchema() int { + return c.targetSchema +} + +func (c *Curator) GetStore() (grypeDB.StoreReader, grypeDB.DBCloser, error) { + // ensure the DB is ok + _, err := c.validateIntegrity(c.dbDir) + if err != nil { + return nil, nil, fmt.Errorf("vulnerability database is invalid (run db update to correct): %+v", err) + } + + s, err := store.New(c.dbPath, false) + return s, s, err +} + +func (c *Curator) Status() Status { + metadata, err := NewMetadataFromDir(c.fs, c.dbDir) + if err != nil { + return Status{ + Err: fmt.Errorf("failed to parse database metadata (%s): %w", c.dbDir, err), + } + } + if metadata == nil { + return Status{ + Err: fmt.Errorf("database metadata not found at %q", c.dbDir), + } + } + + return Status{ + Built: metadata.Built, + SchemaVersion: metadata.Version, + Location: c.dbDir, + Checksum: metadata.Checksum, + Err: c.Validate(), + } +} + +// Delete removes the DB and metadata file for this specific schema. +func (c *Curator) Delete() error { + return c.fs.RemoveAll(c.dbDir) +} + +// Update the existing DB, returning an indication if any action was taken. +func (c *Curator) Update() (bool, error) { + // let consumers know of a monitorable event (download + import stages) + importProgress := progress.NewManual(1) + stage := progress.NewAtomicStage("checking for update") + downloadProgress := progress.NewManual(1) + aggregateProgress := progress.NewAggregator(progress.DefaultStrategy, downloadProgress, importProgress) + + bus.Publish(partybus.Event{ + Type: event.UpdateVulnerabilityDatabase, + Value: progress.StagedProgressable(&struct { + progress.Stager + progress.Progressable + }{ + Stager: progress.Stager(stage), + Progressable: progress.Progressable(aggregateProgress), + }), + }) + + defer downloadProgress.SetCompleted() + defer importProgress.SetCompleted() + + updateAvailable, metadata, updateEntry, err := c.IsUpdateAvailable() + if err != nil { + // we want to continue if possible even if we can't check for an update + log.Warnf("unable to check for vulnerability database update") + log.Debugf("check for vulnerability update failed: %+v", err) + } + if updateAvailable { + log.Infof("downloading new vulnerability DB") + err = c.UpdateTo(updateEntry, downloadProgress, importProgress, stage) + if err != nil { + return false, fmt.Errorf("unable to update vulnerability database: %w", err) + } + + if metadata != nil { + log.Infof( + "updated vulnerability DB from version=%d built=%q to version=%d built=%q", + metadata.Version, + metadata.Built.String(), + updateEntry.Version, + updateEntry.Built.String(), + ) + return true, nil + } + + log.Infof( + "downloaded new vulnerability DB version=%d built=%q", + updateEntry.Version, + updateEntry.Built.String(), + ) + return true, nil + } + + stage.Set("no update available") + return false, nil +} + +// IsUpdateAvailable indicates if there is a new update available as a boolean, and returns the latest listing information +// available for this schema. +func (c *Curator) IsUpdateAvailable() (bool, *Metadata, *ListingEntry, error) { + log.Debugf("checking for available database updates") + + listing, err := c.ListingFromURL() + if err != nil { + return false, nil, nil, err + } + + updateEntry := listing.BestUpdate(c.targetSchema) + if updateEntry == nil { + return false, nil, nil, fmt.Errorf("no db candidates with correct version available (maybe there is an application update available?)") + } + log.Debugf("found database update candidate: %s", updateEntry) + + // compare created data to current db date + current, err := NewMetadataFromDir(c.fs, c.dbDir) + if err != nil { + return false, nil, nil, fmt.Errorf("current metadata corrupt: %w", err) + } + + if current.IsSupersededBy(updateEntry) { + log.Debugf("database update available: %s", updateEntry) + return true, current, updateEntry, nil + } + log.Debugf("no database update available") + + return false, nil, nil, nil +} + +// UpdateTo updates the existing DB with the specific other version provided from a listing entry. +func (c *Curator) UpdateTo(listing *ListingEntry, downloadProgress, importProgress *progress.Manual, stage *progress.AtomicStage) error { + stage.Set("downloading") + // note: the temp directory is persisted upon download/validation/activation failure to allow for investigation + tempDir, err := c.download(listing, downloadProgress) + if err != nil { + return err + } + + stage.Set("validating integrity") + _, err = c.validateIntegrity(tempDir) + if err != nil { + return err + } + + stage.Set("importing") + err = c.activate(tempDir) + if err != nil { + return err + } + stage.Set("updated") + importProgress.Set(importProgress.Size()) + importProgress.SetCompleted() + + return c.fs.RemoveAll(tempDir) +} + +// Validate checks the current database to ensure file integrity and if it can be used by this version of the application. +func (c *Curator) Validate() error { + metadata, err := c.validateIntegrity(c.dbDir) + if err != nil { + return err + } + + return c.validateStaleness(metadata) +} + +// ImportFrom takes a DB archive file and imports it into the final DB location. +func (c *Curator) ImportFrom(dbArchivePath string) error { + // note: the temp directory is persisted upon download/validation/activation failure to allow for investigation + tempDir, err := os.MkdirTemp("", "grype-import") + if err != nil { + return fmt.Errorf("unable to create db temp dir: %w", err) + } + + err = archiver.Unarchive(dbArchivePath, tempDir) + if err != nil { + return err + } + + _, err = c.validateIntegrity(tempDir) + if err != nil { + return err + } + + err = c.activate(tempDir) + if err != nil { + return err + } + + return c.fs.RemoveAll(tempDir) +} + +func (c *Curator) download(listing *ListingEntry, downloadProgress *progress.Manual) (string, error) { + tempDir, err := os.MkdirTemp("", "grype-scratch") + if err != nil { + return "", fmt.Errorf("unable to create db temp dir: %w", err) + } + + // download the db to the temp dir + url := listing.URL + + // from go-getter, adding a checksum as a query string will validate the payload after download + // note: the checksum query parameter is not sent to the server + query := url.Query() + query.Add("checksum", listing.Checksum) + url.RawQuery = query.Encode() + + // go-getter will automatically extract all files within the archive to the temp dir + err = c.downloader.GetToDir(tempDir, listing.URL.String(), downloadProgress) + if err != nil { + return "", fmt.Errorf("unable to download db: %w", err) + } + + return tempDir, nil +} + +// validateStaleness ensures the vulnerability database has not passed +// the max allowed age, calculated from the time it was built until now. +func (c *Curator) validateStaleness(m Metadata) error { + if !c.validateAge { + return nil + } + + // built time is defined in UTC, + // we should compare it against UTC + now := time.Now().UTC() + + age := now.Sub(m.Built) + if age > c.maxAllowedBuiltAge { + return fmt.Errorf("the vulnerability database was built %s ago (max allowed age is %s)", durafmt.ParseShort(age), durafmt.ParseShort(c.maxAllowedBuiltAge)) + } + + return nil +} + +func (c *Curator) validateIntegrity(dbDirPath string) (Metadata, error) { + // check that the disk checksum still matches the db payload + metadata, err := NewMetadataFromDir(c.fs, dbDirPath) + if err != nil { + return Metadata{}, fmt.Errorf("failed to parse database metadata (%s): %w", dbDirPath, err) + } + if metadata == nil { + return Metadata{}, fmt.Errorf("database metadata not found: %s", dbDirPath) + } + + if c.validateByHashOnGet { + dbPath := path.Join(dbDirPath, FileName) + valid, actualHash, err := file.ValidateByHash(c.fs, dbPath, metadata.Checksum) + if err != nil { + return Metadata{}, err + } + if !valid { + return Metadata{}, fmt.Errorf("bad db checksum (%s): %q vs %q", dbPath, metadata.Checksum, actualHash) + } + } + + if c.targetSchema != metadata.Version { + return Metadata{}, fmt.Errorf("unsupported database version: have=%d want=%d", metadata.Version, c.targetSchema) + } + + // TODO: add version checks here to ensure this version of the application can use this database version (relative to what the DB says, not JUST the metadata!) + + return *metadata, nil +} + +// activate swaps over the downloaded db to the application directory +func (c *Curator) activate(dbDirPath string) error { + _, err := c.fs.Stat(c.dbDir) + if !os.IsNotExist(err) { + // remove any previous databases + err = c.Delete() + if err != nil { + return fmt.Errorf("failed to purge existing database: %w", err) + } + } + + // ensure there is an application db directory + err = c.fs.MkdirAll(c.dbDir, 0755) + if err != nil { + return fmt.Errorf("failed to create db directory: %w", err) + } + + // activate the new db cache + return file.CopyDir(c.fs, dbDirPath, c.dbDir) +} + +// ListingFromURL loads a Listing from a URL. +func (c Curator) ListingFromURL() (Listing, error) { + tempFile, err := afero.TempFile(c.fs, "", "grype-db-listing") + if err != nil { + return Listing{}, fmt.Errorf("unable to create listing temp file: %w", err) + } + defer func() { + err := c.fs.RemoveAll(tempFile.Name()) + if err != nil { + log.Errorf("failed to remove file (%s): %w", tempFile.Name(), err) + } + }() + + // download the listing file + err = c.downloader.GetFile(tempFile.Name(), c.listingURL) + if err != nil { + return Listing{}, fmt.Errorf("unable to download listing: %w", err) + } + + // parse the listing file + listing, err := NewListingFromFile(c.fs, tempFile.Name()) + if err != nil { + return Listing{}, err + } + return listing, nil +} + +func defaultHTTPClient(fs afero.Fs, caCertPath string) (*http.Client, error) { + httpClient := cleanhttp.DefaultClient() + if caCertPath != "" { + rootCAs := x509.NewCertPool() + + pemBytes, err := afero.ReadFile(fs, caCertPath) + if err != nil { + return nil, fmt.Errorf("unable to configure root CAs for curator: %w", err) + } + rootCAs.AppendCertsFromPEM(pemBytes) + + httpClient.Transport.(*http.Transport).TLSClientConfig = &tls.Config{ + MinVersion: tls.VersionTLS12, + RootCAs: rootCAs, + } + } + return httpClient, nil +} diff --git a/pkg/db/curation/curator_test.go b/pkg/db/curation/curator_test.go new file mode 100644 index 00000000..4b6f313f --- /dev/null +++ b/pkg/db/curation/curator_test.go @@ -0,0 +1,366 @@ +package curation + +import ( + "bufio" + "fmt" + "io" + "net/http" + "net/url" + "os" + "os/exec" + "path" + "path/filepath" + "strconv" + "syscall" + "testing" + "time" + + "github.com/gookit/color" + "github.com/scylladb/go-set/strset" + "github.com/spf13/afero" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/wagoodman/go-progress" + + "github.com/anchore/grype-db/internal/file" +) + +type testGetter struct { + file map[string]string + dir map[string]string + calls *strset.Set + fs afero.Fs +} + +func newTestGetter(fs afero.Fs, f, d map[string]string) *testGetter { + return &testGetter{ + file: f, + dir: d, + calls: strset.New(), + fs: fs, + } +} + +// GetFile downloads the give URL into the given path. The URL must reference a single file. +func (g *testGetter) GetFile(dst, src string, _ ...*progress.Manual) error { + g.calls.Add(src) + if _, ok := g.file[src]; !ok { + return fmt.Errorf("blerg, no file!") + } + return afero.WriteFile(g.fs, dst, []byte(g.file[src]), 0755) +} + +// Get downloads the given URL into the given directory. The directory must already exist. +func (g *testGetter) GetToDir(dst, src string, _ ...*progress.Manual) error { + g.calls.Add(src) + if _, ok := g.dir[src]; !ok { + return fmt.Errorf("blerg, no file!") + } + return afero.WriteFile(g.fs, dst, []byte(g.dir[src]), 0755) +} + +func newTestCurator(tb testing.TB, fs afero.Fs, getter file.Getter, dbDir, metadataUrl string, validateDbHash bool) Curator { + c, err := NewCurator(Config{ + DBRootDir: dbDir, + ListingURL: metadataUrl, + ValidateByHashOnGet: validateDbHash, + }) + + require.NoError(tb, err) + + c.downloader = getter + c.fs = fs + + return c +} + +func Test_defaultHTTPClient(t *testing.T) { + tests := []struct { + name string + hasCert bool + }{ + { + name: "no custom cert should use default system root certs", + hasCert: false, + }, + { + name: "should use single custom cert", + hasCert: true, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + var certPath string + if test.hasCert { + certPath = generateCertFixture(t) + } + + httpClient, err := defaultHTTPClient(afero.NewOsFs(), certPath) + require.NoError(t, err) + + if test.hasCert { + require.NotNil(t, httpClient.Transport.(*http.Transport).TLSClientConfig) + assert.Len(t, httpClient.Transport.(*http.Transport).TLSClientConfig.RootCAs.Subjects(), 1) + } else { + assert.Nil(t, httpClient.Transport.(*http.Transport).TLSClientConfig) + } + + }) + } +} + +func generateCertFixture(t *testing.T) string { + path := "test-fixtures/tls/server.crt" + if _, err := os.Stat(path); !os.IsNotExist(err) { + // fixture already exists... + return path + } + + t.Logf(color.Bold.Sprint("Generating Key/Cert Fixture")) + + cwd, err := os.Getwd() + if err != nil { + t.Errorf("unable to get cwd: %+v", err) + } + + cmd := exec.Command("make", "server.crt") + cmd.Dir = filepath.Join(cwd, "test-fixtures/tls") + + stderr, err := cmd.StderrPipe() + if err != nil { + t.Fatalf("could not get stderr: %+v", err) + } + stdout, err := cmd.StdoutPipe() + if err != nil { + t.Fatalf("could not get stdout: %+v", err) + } + + err = cmd.Start() + if err != nil { + t.Fatalf("failed to start cmd: %+v", err) + } + + show := func(label string, reader io.ReadCloser) { + scanner := bufio.NewScanner(reader) + scanner.Split(bufio.ScanLines) + for scanner.Scan() { + t.Logf("%s: %s", label, scanner.Text()) + } + } + go show("out", stdout) + go show("err", stderr) + + if err := cmd.Wait(); err != nil { + if exiterr, ok := err.(*exec.ExitError); ok { + // The program has exited with an exit code != 0 + + // This works on both Unix and Windows. Although package + // syscall is generally platform dependent, WaitStatus is + // defined for both Unix and Windows and in both cases has + // an ExitStatus() method with the same signature. + if status, ok := exiterr.Sys().(syscall.WaitStatus); ok { + if status.ExitStatus() != 0 { + t.Fatalf("failed to generate fixture: rc=%d", status.ExitStatus()) + } + } + } else { + t.Fatalf("unable to get generate fixture result: %+v", err) + } + } + return path +} + +func TestCuratorDownload(t *testing.T) { + tests := []struct { + name string + entry *ListingEntry + expectedURL string + err bool + }{ + { + name: "download populates returned tempdir", + entry: &ListingEntry{ + Built: time.Date(2020, 06, 13, 17, 13, 13, 0, time.UTC), + URL: mustUrl(url.Parse("http://a-url/payload.tar.gz")), + Checksum: "sha256:deadbeefcafe", + }, + expectedURL: "http://a-url/payload.tar.gz?checksum=sha256%3Adeadbeefcafe", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + metadataUrl := "http://metadata.io" + contents := "CONTENTS!!!" + files := map[string]string{} + dirs := map[string]string{ + test.expectedURL: contents, + } + fs := afero.NewMemMapFs() + getter := newTestGetter(fs, files, dirs) + cur := newTestCurator(t, fs, getter, "/tmp/dbdir", metadataUrl, false) + + path, err := cur.download(test.entry, &progress.Manual{}) + if err != nil { + t.Fatalf("could not download entry: %+v", err) + } + + if !getter.calls.Has(test.expectedURL) { + t.Fatalf("never made the appropriate fetch call: %+v", getter.calls) + } + + f, err := fs.Open(path) + if err != nil { + t.Fatalf("no db file: %+v", err) + } + + actual, err := afero.ReadAll(f) + if err != nil { + t.Fatalf("bad db file read: %+v", err) + } + + if string(actual) != contents { + t.Fatalf("bad contents: %+v", string(actual)) + } + }) + } +} + +func TestCuratorValidate(t *testing.T) { + tests := []struct { + name string + fixture string + constraint int + cfgValidateDbHash bool + err bool + }{ + { + name: "good checksum & good constraint", + fixture: "test-fixtures/curator-validate/good-checksum", + cfgValidateDbHash: true, + constraint: 1, + err: false, + }, + { + name: "good checksum & bad constraint", + fixture: "test-fixtures/curator-validate/good-checksum", + cfgValidateDbHash: true, + constraint: 2, + err: true, + }, + { + name: "bad checksum & good constraint", + fixture: "test-fixtures/curator-validate/bad-checksum", + cfgValidateDbHash: true, + constraint: 1, + err: true, + }, + { + name: "bad checksum & bad constraint", + fixture: "test-fixtures/curator-validate/bad-checksum", + cfgValidateDbHash: true, + constraint: 2, + err: true, + }, + { + name: "bad checksum ignored on config exception", + fixture: "test-fixtures/curator-validate/bad-checksum", + cfgValidateDbHash: false, + constraint: 1, + err: false, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + metadataUrl := "http://metadata.io" + + fs := afero.NewOsFs() + getter := newTestGetter(fs, nil, nil) + cur := newTestCurator(t, fs, getter, "/tmp/dbdir", metadataUrl, test.cfgValidateDbHash) + + cur.targetSchema = test.constraint + + md, err := cur.validateIntegrity(test.fixture) + + if err == nil && test.err { + t.Errorf("expected an error but got none") + } else if err != nil && !test.err { + assert.NotZero(t, md) + t.Errorf("expected no error, got: %+v", err) + } + }) + } +} + +func TestCuratorDBPathHasSchemaVersion(t *testing.T) { + fs := afero.NewMemMapFs() + dbRootPath := "/tmp/dbdir" + cur := newTestCurator(t, fs, nil, dbRootPath, "http://metadata.io", false) + + assert.Equal(t, path.Join(dbRootPath, strconv.Itoa(cur.targetSchema)), cur.dbDir, "unexpected dir") + assert.Contains(t, cur.dbPath, path.Join(dbRootPath, strconv.Itoa(cur.targetSchema)), "unexpected path") +} + +func TestCurator_validateStaleness(t *testing.T) { + type fields struct { + validateAge bool + maxAllowedDBAge time.Duration + md Metadata + } + + now := time.Now().UTC() + tests := []struct { + name string + cur *Curator + fields fields + wantErr assert.ErrorAssertionFunc + }{ + { + name: "no-validation", + fields: fields{ + md: Metadata{Built: now}, + }, + wantErr: assert.NoError, + }, + { + name: "up-to-date", + fields: fields{ + maxAllowedDBAge: 2 * time.Hour, + validateAge: true, + md: Metadata{Built: now}, + }, + wantErr: assert.NoError, + }, + { + name: "stale-data", + fields: fields{ + maxAllowedDBAge: time.Hour, + validateAge: true, + md: Metadata{Built: now.UTC().Add(-4 * time.Hour)}, + }, + wantErr: func(t assert.TestingT, err error, i ...interface{}) bool { + return assert.ErrorContains(t, err, "the vulnerability database was built") + }, + }, + { + name: "stale-data-no-validation", + fields: fields{ + maxAllowedDBAge: time.Hour, + validateAge: false, + md: Metadata{Built: now.Add(-4 * time.Hour)}, + }, + wantErr: assert.NoError, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + c := &Curator{ + validateAge: tt.fields.validateAge, + maxAllowedBuiltAge: tt.fields.maxAllowedDBAge, + } + tt.wantErr(t, c.validateStaleness(tt.fields.md), fmt.Sprintf("validateStaleness(%v)", tt.fields.md)) + }) + } +} diff --git a/pkg/db/curation/listing.go b/pkg/db/curation/listing.go new file mode 100644 index 00000000..8946117b --- /dev/null +++ b/pkg/db/curation/listing.go @@ -0,0 +1,90 @@ +package curation + +import ( + "encoding/json" + "fmt" + "os" + "sort" + + "github.com/spf13/afero" +) + +const ListingFileName = "listing.json" + +// Listing represents the json file which is served up and made available for applications to download and +// consume one or more vulnerability db flat files. +type Listing struct { + Available map[int][]ListingEntry `json:"available"` +} + +// NewListing creates a listing from one or more given ListingEntries. +func NewListing(entries ...ListingEntry) Listing { + listing := Listing{ + Available: make(map[int][]ListingEntry), + } + for _, entry := range entries { + if _, ok := listing.Available[entry.Version]; !ok { + listing.Available[entry.Version] = make([]ListingEntry, 0) + } + listing.Available[entry.Version] = append(listing.Available[entry.Version], entry) + } + + // sort each entry descending by date + for idx := range listing.Available { + listingEntries := listing.Available[idx] + sort.SliceStable(listingEntries, func(i, j int) bool { + return listingEntries[i].Built.After(listingEntries[j].Built) + }) + } + + return listing +} + +// NewListingFromFile loads a Listing from a given filepath. +func NewListingFromFile(fs afero.Fs, path string) (Listing, error) { + f, err := fs.Open(path) + if err != nil { + return Listing{}, fmt.Errorf("unable to open DB listing path: %w", err) + } + defer f.Close() + + var l Listing + err = json.NewDecoder(f).Decode(&l) + if err != nil { + return Listing{}, fmt.Errorf("unable to parse DB listing: %w", err) + } + + // sort each entry descending by date + for idx := range l.Available { + listingEntries := l.Available[idx] + sort.SliceStable(listingEntries, func(i, j int) bool { + return listingEntries[i].Built.After(listingEntries[j].Built) + }) + } + + return l, nil +} + +// BestUpdate returns the ListingEntry from a Listing that meets the given version constraints. +func (l *Listing) BestUpdate(targetSchema int) *ListingEntry { + if listingEntries, ok := l.Available[targetSchema]; ok { + if len(listingEntries) > 0 { + return &listingEntries[0] + } + } + return nil +} + +// Write the current listing to the given filepath. +func (l Listing) Write(toPath string) error { + contents, err := json.MarshalIndent(&l, "", " ") + if err != nil { + return fmt.Errorf("failed to encode listing file: %w", err) + } + + err = os.WriteFile(toPath, contents, 0600) + if err != nil { + return fmt.Errorf("failed to write listing file: %w", err) + } + return nil +} diff --git a/pkg/db/curation/listing_entry.go b/pkg/db/curation/listing_entry.go new file mode 100644 index 00000000..6d8c7df8 --- /dev/null +++ b/pkg/db/curation/listing_entry.go @@ -0,0 +1,97 @@ +package curation + +import ( + "crypto/sha256" + "encoding/json" + "fmt" + "net/url" + "path" + "path/filepath" + "time" + + "github.com/spf13/afero" + + "github.com/anchore/grype-db/internal/file" +) + +// ListingEntry represents basic metadata about a database archive such as what is in the archive (built/version) +// as well as how to obtain and verify the archive (URL/checksum). +type ListingEntry struct { + Built time.Time // RFC 3339 + Version int + URL *url.URL + Checksum string +} + +// ListingEntryJSON is a helper struct for converting a ListingEntry into JSON (or parsing from JSON) +type ListingEntryJSON struct { + Built string `json:"built"` + Version int `json:"version"` + URL string `json:"url"` + Checksum string `json:"checksum"` +} + +// NewListingEntryFromArchive creates a new ListingEntry based on the metadata from a database flat file. +func NewListingEntryFromArchive(fs afero.Fs, metadata Metadata, dbArchivePath string, baseURL *url.URL) (ListingEntry, error) { + checksum, err := file.HashFile(fs, dbArchivePath, sha256.New()) + if err != nil { + return ListingEntry{}, fmt.Errorf("unable to find db archive checksum: %w", err) + } + + dbArchiveName := filepath.Base(dbArchivePath) + fileURL, _ := url.Parse(baseURL.String()) + fileURL.Path = path.Join(fileURL.Path, dbArchiveName) + + return ListingEntry{ + Built: metadata.Built, + Version: metadata.Version, + URL: fileURL, + Checksum: "sha256:" + checksum, + }, nil +} + +// ToListingEntry converts a ListingEntryJSON to a ListingEntry. +func (l ListingEntryJSON) ToListingEntry() (ListingEntry, error) { + build, err := time.Parse(time.RFC3339, l.Built) + if err != nil { + return ListingEntry{}, fmt.Errorf("cannot convert built time (%s): %+v", l.Built, err) + } + + u, err := url.Parse(l.URL) + if err != nil { + return ListingEntry{}, fmt.Errorf("cannot parse url (%s): %+v", l.URL, err) + } + + return ListingEntry{ + Built: build.UTC(), + Version: l.Version, + URL: u, + Checksum: l.Checksum, + }, nil +} + +func (l *ListingEntry) UnmarshalJSON(data []byte) error { + var lej ListingEntryJSON + if err := json.Unmarshal(data, &lej); err != nil { + return err + } + le, err := lej.ToListingEntry() + if err != nil { + return err + } + *l = le + return nil +} + +func (l *ListingEntry) MarshalJSON() ([]byte, error) { + return json.Marshal(&ListingEntryJSON{ + Built: l.Built.Format(time.RFC3339), + Version: l.Version, + Checksum: l.Checksum, + URL: l.URL.String(), + }) +} + +func (l ListingEntry) String() string { + return fmt.Sprintf("Listing(url=%s)", l.URL) +} diff --git a/pkg/db/curation/listing_test.go b/pkg/db/curation/listing_test.go new file mode 100644 index 00000000..6f133d88 --- /dev/null +++ b/pkg/db/curation/listing_test.go @@ -0,0 +1,153 @@ +package curation + +import ( + "net/url" + "testing" + "time" + + "github.com/go-test/deep" + "github.com/spf13/afero" +) + +func mustUrl(u *url.URL, err error) *url.URL { + if err != nil { + panic(err) + } + return u +} + +func TestNewListingFromPath(t *testing.T) { + tests := []struct { + fixture string + expected Listing + err bool + }{ + { + fixture: "test-fixtures/listing.json", + expected: Listing{ + Available: map[int][]ListingEntry{ + 1: { + { + Built: time.Date(2020, 06, 12, 16, 12, 12, 0, time.UTC), + URL: mustUrl(url.Parse("http://localhost:5000/vulnerability-db-v0.2.0+2020-6-12.tar.gz")), + Version: 1, + Checksum: "sha256:e20c251202948df7f853ddc812f64826bdcd6a285c839a7c65939e68609dfc6e", + }, + }, + 2: { + { + Built: time.Date(2020, 06, 13, 17, 13, 13, 0, time.UTC), + URL: mustUrl(url.Parse("http://localhost:5000/vulnerability-db-v1.1.0+2020-6-13.tar.gz")), + Version: 2, + Checksum: "sha256:dcd6a285c839a7c65939e20c251202912f64826be68609dfc6e48df7f853ddc8", + }, + }, + }, + }, + }, + { + fixture: "test-fixtures/listing-sorted.json", + expected: Listing{ + Available: map[int][]ListingEntry{ + 1: { + { + Built: time.Date(2020, 06, 13, 17, 13, 13, 0, time.UTC), + URL: mustUrl(url.Parse("http://localhost:5000/vulnerability-db_v1_2020-6-13.tar.gz")), + Version: 1, + Checksum: "sha256:dcd6a285c839a7c65939e20c251202912f64826be68609dfc6e48df7f853ddc8", + }, + { + Built: time.Date(2020, 06, 12, 16, 12, 12, 0, time.UTC), + URL: mustUrl(url.Parse("http://localhost:5000/vulnerability-db_v1_2020-6-12.tar.gz")), + Version: 1, + Checksum: "sha256:e20c251202948df7f853ddc812f64826bdcd6a285c839a7c65939e68609dfc6e", + }, + }, + }, + }, + }, + { + fixture: "test-fixtures/listing-unsorted.json", + expected: Listing{ + Available: map[int][]ListingEntry{ + 1: { + { + Built: time.Date(2020, 06, 13, 17, 13, 13, 0, time.UTC), + URL: mustUrl(url.Parse("http://localhost:5000/vulnerability-db_v1_2020-6-13.tar.gz")), + Version: 1, + Checksum: "sha256:dcd6a285c839a7c65939e20c251202912f64826be68609dfc6e48df7f853ddc8", + }, + { + Built: time.Date(2020, 06, 12, 16, 12, 12, 0, time.UTC), + URL: mustUrl(url.Parse("http://localhost:5000/vulnerability-db_v1_2020-6-12.tar.gz")), + Version: 1, + Checksum: "sha256:e20c251202948df7f853ddc812f64826bdcd6a285c839a7c65939e68609dfc6e", + }, + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.fixture, func(t *testing.T) { + listing, err := NewListingFromFile(afero.NewOsFs(), test.fixture) + if err != nil && !test.err { + t.Fatalf("failed to get metadata: %+v", err) + } else if err == nil && test.err { + t.Fatalf("expected errer but got none") + } + + for _, diff := range deep.Equal(listing, test.expected) { + t.Errorf("listing difference: %s", diff) + } + }) + } +} + +func TestListingBestUpdate(t *testing.T) { + tests := []struct { + fixture string + constraint int + expected *ListingEntry + }{ + { + fixture: "test-fixtures/listing.json", + constraint: 2, + expected: &ListingEntry{ + Built: time.Date(2020, 06, 13, 17, 13, 13, 0, time.UTC), + URL: mustUrl(url.Parse("http://localhost:5000/vulnerability-db-v1.1.0+2020-6-13.tar.gz")), + Version: 2, + Checksum: "sha256:dcd6a285c839a7c65939e20c251202912f64826be68609dfc6e48df7f853ddc8", + }, + }, + { + fixture: "test-fixtures/listing.json", + constraint: 1, + expected: &ListingEntry{ + Built: time.Date(2020, 06, 12, 16, 12, 12, 0, time.UTC), + URL: mustUrl(url.Parse("http://localhost:5000/vulnerability-db-v0.2.0+2020-6-12.tar.gz")), + Version: 1, + Checksum: "sha256:e20c251202948df7f853ddc812f64826bdcd6a285c839a7c65939e68609dfc6e", + }, + }, + } + + for _, test := range tests { + t.Run(test.fixture, func(t *testing.T) { + listing, err := NewListingFromFile(afero.NewOsFs(), test.fixture) + if err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } + + actual := listing.BestUpdate(test.constraint) + if actual == nil && test.expected != nil || actual != nil && test.expected == nil { + t.Fatalf("mismatched best candidate expectations") + } + + for _, diff := range deep.Equal(actual, test.expected) { + t.Errorf("listing entry difference: %s", diff) + } + }) + } +} diff --git a/pkg/db/curation/metadata.go b/pkg/db/curation/metadata.go new file mode 100644 index 00000000..3c5824a2 --- /dev/null +++ b/pkg/db/curation/metadata.go @@ -0,0 +1,137 @@ +package curation + +import ( + "encoding/json" + "fmt" + "os" + "path" + "time" + + "github.com/spf13/afero" + + "github.com/anchore/grype-db/internal/file" + "github.com/anchore/grype-db/internal/log" +) + +const MetadataFileName = "metadata.json" + +// Metadata represents the basic identifying information of a database flat file (built/version) and a way to +// verify the contents (checksum). +type Metadata struct { + Built time.Time + Version int + Checksum string +} + +// MetadataJSON is a helper struct for parsing and assembling Metadata objects to and from JSON. +type MetadataJSON struct { + Built string `json:"built"` // RFC 3339 + Version int `json:"version"` + Checksum string `json:"checksum"` +} + +// ToMetadata converts a MetadataJSON object to a Metadata object. +func (m MetadataJSON) ToMetadata() (Metadata, error) { + build, err := time.Parse(time.RFC3339, m.Built) + if err != nil { + return Metadata{}, fmt.Errorf("cannot convert built time (%s): %+v", m.Built, err) + } + + metadata := Metadata{ + Built: build.UTC(), + Version: m.Version, + Checksum: m.Checksum, + } + + return metadata, nil +} + +func metadataPath(dir string) string { + return path.Join(dir, MetadataFileName) +} + +// NewMetadataFromDir generates a Metadata object from a directory containing a vulnerability.db flat file. +func NewMetadataFromDir(fs afero.Fs, dir string) (*Metadata, error) { + metadataFilePath := metadataPath(dir) + exists, err := file.Exists(fs, metadataFilePath) + if err != nil { + return nil, fmt.Errorf("unable to check if DB metadata path exists (%s): %w", metadataFilePath, err) + } + if !exists { + return nil, nil + } + f, err := fs.Open(metadataFilePath) + if err != nil { + return nil, fmt.Errorf("unable to open DB metadata path (%s): %w", metadataFilePath, err) + } + defer f.Close() + + var m Metadata + err = json.NewDecoder(f).Decode(&m) + if err != nil { + return nil, fmt.Errorf("unable to parse DB metadata (%s): %w", metadataFilePath, err) + } + return &m, nil +} + +func (m *Metadata) UnmarshalJSON(data []byte) error { + var mj MetadataJSON + if err := json.Unmarshal(data, &mj); err != nil { + return err + } + me, err := mj.ToMetadata() + if err != nil { + return err + } + *m = me + return nil +} + +// IsSupersededBy takes a ListingEntry and determines if the entry candidate is newer than what is hinted at +// in the current Metadata object. +func (m *Metadata) IsSupersededBy(entry *ListingEntry) bool { + if m == nil { + log.Debugf("cannot find existing metadata, using update...") + // any valid update beats no database, use it! + return true + } + + if entry.Version > m.Version { + log.Debugf("update is a newer version than the current database, using update...") + // the listing is newer than the existing db, use it! + return true + } + + if entry.Built.After(m.Built) { + log.Debugf("existing database (%s) is older than candidate update (%s), using update...", m.Built.String(), entry.Built.String()) + // the listing is newer than the existing db, use it! + return true + } + + log.Debugf("existing database is already up to date") + return false +} + +func (m Metadata) String() string { + return fmt.Sprintf("Metadata(built=%s version=%d checksum=%s)", m.Built, m.Version, m.Checksum) +} + +// Write out a Metadata object to the given path. +func (m Metadata) Write(toPath string) error { + metadata := MetadataJSON{ + Built: m.Built.UTC().Format(time.RFC3339), + Version: m.Version, + Checksum: m.Checksum, + } + + contents, err := json.MarshalIndent(&metadata, "", " ") + if err != nil { + return fmt.Errorf("failed to encode metadata file: %w", err) + } + + err = os.WriteFile(toPath, contents, 0600) + if err != nil { + return fmt.Errorf("failed to write metadata file: %w", err) + } + return nil +} diff --git a/pkg/db/curation/metadata_test.go b/pkg/db/curation/metadata_test.go new file mode 100644 index 00000000..ba68a462 --- /dev/null +++ b/pkg/db/curation/metadata_test.go @@ -0,0 +1,110 @@ +package curation + +import ( + "testing" + "time" + + "github.com/go-test/deep" + "github.com/spf13/afero" +) + +func TestMetadataParse(t *testing.T) { + tests := []struct { + fixture string + expected *Metadata + err bool + }{ + { + fixture: "test-fixtures/metadata-gocase", + expected: &Metadata{ + Built: time.Date(2020, 06, 15, 14, 02, 36, 0, time.UTC), + Version: 2, + Checksum: "sha256:dcd6a285c839a7c65939e20c251202912f64826be68609dfc6e48df7f853ddc8", + }, + }, + { + fixture: "test-fixtures/metadata-edt-timezone", + expected: &Metadata{ + Built: time.Date(2020, 06, 15, 18, 02, 36, 0, time.UTC), + Version: 2, + Checksum: "sha256:dcd6a285c839a7c65939e20c251202912f64826be68609dfc6e48df7f853ddc8", + }, + }, + { + fixture: "/dev/null/impossible", + err: true, + }, + } + + for _, test := range tests { + t.Run(test.fixture, func(t *testing.T) { + metadata, err := NewMetadataFromDir(afero.NewOsFs(), test.fixture) + if err != nil && !test.err { + t.Fatalf("failed to get metadata: %+v", err) + } else if err == nil && test.err { + t.Fatalf("expected error but got none") + } else if metadata == nil && test.expected != nil { + t.Fatalf("metadata not found: %+v", test.fixture) + } + + if metadata != nil && test.expected != nil { + for _, diff := range deep.Equal(*metadata, *test.expected) { + t.Errorf("metadata difference: %s", diff) + } + } + }) + } +} + +func TestMetadataIsSupercededBy(t *testing.T) { + tests := []struct { + name string + current *Metadata + update *ListingEntry + expectedToSupercede bool + }{ + { + name: "prefer updated versions over later dates", + expectedToSupercede: true, + current: &Metadata{ + Built: time.Date(2020, 06, 15, 14, 02, 36, 0, time.UTC), + Version: 2, + }, + update: &ListingEntry{ + Built: time.Date(2020, 06, 13, 17, 13, 13, 0, time.UTC), + Version: 3, + }, + }, + { + name: "prefer later dates when version is the same", + expectedToSupercede: false, + current: &Metadata{ + Built: time.Date(2020, 06, 15, 14, 02, 36, 0, time.UTC), + Version: 1, + }, + update: &ListingEntry{ + Built: time.Date(2020, 06, 13, 17, 13, 13, 0, time.UTC), + Version: 1, + }, + }, + { + name: "prefer something over nothing", + expectedToSupercede: true, + current: nil, + update: &ListingEntry{ + Built: time.Date(2020, 06, 13, 17, 13, 13, 0, time.UTC), + Version: 1, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + actual := test.current.IsSupersededBy(test.update) + + if test.expectedToSupercede != actual { + t.Errorf("failed supercede assertion: got %+v", actual) + } + }) + } +} diff --git a/pkg/db/curation/status.go b/pkg/db/curation/status.go new file mode 100644 index 00000000..eb8c3df9 --- /dev/null +++ b/pkg/db/curation/status.go @@ -0,0 +1,11 @@ +package curation + +import "time" + +type Status struct { + Built time.Time `json:"built"` + SchemaVersion int `json:"schemaVersion"` + Location string `json:"location"` + Checksum string `json:"checksum"` + Err error `json:"error"` +} diff --git a/pkg/db/curation/test-fixtures/curator-validate/bad-checksum/vulnerability.db b/pkg/db/curation/test-fixtures/curator-validate/bad-checksum/vulnerability.db new file mode 100644 index 00000000..66641fa4 --- /dev/null +++ b/pkg/db/curation/test-fixtures/curator-validate/bad-checksum/vulnerability.db @@ -0,0 +1 @@ +I can haz cve? \ No newline at end of file diff --git a/pkg/db/curation/test-fixtures/curator-validate/good-checksum/vulnerability.db b/pkg/db/curation/test-fixtures/curator-validate/good-checksum/vulnerability.db new file mode 100644 index 00000000..66641fa4 --- /dev/null +++ b/pkg/db/curation/test-fixtures/curator-validate/good-checksum/vulnerability.db @@ -0,0 +1 @@ +I can haz cve? \ No newline at end of file diff --git a/pkg/db/curation/test-fixtures/listing-sorted.json b/pkg/db/curation/test-fixtures/listing-sorted.json new file mode 100644 index 00000000..46203e31 --- /dev/null +++ b/pkg/db/curation/test-fixtures/listing-sorted.json @@ -0,0 +1,18 @@ +{ + "available": { + "1": [ + { + "built": "2020-06-13T13:13:13-04:00", + "version": 1, + "url": "http://localhost:5000/vulnerability-db_v1_2020-6-13.tar.gz", + "checksum": "sha256:dcd6a285c839a7c65939e20c251202912f64826be68609dfc6e48df7f853ddc8" + }, + { + "built": "2020-06-12T12:12:12-04:00", + "version": 1, + "url": "http://localhost:5000/vulnerability-db_v1_2020-6-12.tar.gz", + "checksum": "sha256:e20c251202948df7f853ddc812f64826bdcd6a285c839a7c65939e68609dfc6e" + } + ] + } +} diff --git a/pkg/db/curation/test-fixtures/listing-unsorted.json b/pkg/db/curation/test-fixtures/listing-unsorted.json new file mode 100644 index 00000000..b144a80f --- /dev/null +++ b/pkg/db/curation/test-fixtures/listing-unsorted.json @@ -0,0 +1,18 @@ +{ + "available": { + "1": [ + { + "built": "2020-06-12T12:12:12-04:00", + "version": 1, + "url": "http://localhost:5000/vulnerability-db_v1_2020-6-12.tar.gz", + "checksum": "sha256:e20c251202948df7f853ddc812f64826bdcd6a285c839a7c65939e68609dfc6e" + }, + { + "built": "2020-06-13T13:13:13-04:00", + "version": 1, + "url": "http://localhost:5000/vulnerability-db_v1_2020-6-13.tar.gz", + "checksum": "sha256:dcd6a285c839a7c65939e20c251202912f64826be68609dfc6e48df7f853ddc8" + } + ] + } +} diff --git a/pkg/db/curation/test-fixtures/tls/.gitignore b/pkg/db/curation/test-fixtures/tls/.gitignore new file mode 100755 index 00000000..4638aa2e --- /dev/null +++ b/pkg/db/curation/test-fixtures/tls/.gitignore @@ -0,0 +1,5 @@ +server.key +server.crt +www/ +listing.json +dbdir/ \ No newline at end of file diff --git a/pkg/db/curation/test-fixtures/tls/Makefile b/pkg/db/curation/test-fixtures/tls/Makefile new file mode 100644 index 00000000..be149c85 --- /dev/null +++ b/pkg/db/curation/test-fixtures/tls/Makefile @@ -0,0 +1,45 @@ +all: clean serve + +.PHONY: serve +serve: www/listing.json www/db.tar.gz server.crt + python3 serve.py + + +.PHONY: grype-test-fail +grype-test-fail: clean-dbdir dbdir + GRYPE_DB_CACHE_DIR=$(shell pwd)/dbdir \ + GRYPE_DB_UPDATE_URL=https://$(shell hostname).local/listing.json \ + go run ../../../../cmd/grype -vv alpine:latest + +.PHONY: grype-test-pass +grype-test-pass: clean-dbdir dbdir + GRYPE_DB_CA_CERT=$(shell pwd)/server.crt \ + GRYPE_DB_CACHE_DIR=$(shell pwd)/dbdir \ + GRYPE_DB_UPDATE_URL=https://$(shell hostname).local/listing.json \ + go run ../../../../cmd/grype -vv alpine:latest + +dbdir: + mkdir -p dbdir + +server.crt server.key: + ./generate-x509-cert-pair.sh + +www: + mkdir -p www + +listing.json: + curl -L -O https://toolbox-data.anchore.io/grype/databases/listing.json + +www/listing.json www/db.tar.gz: www listing.json + $(eval location=$(shell python3 listing.py)) + curl -L -o www/db.tar.gz $(location) + +.PHONY: clean +clean: clean-dbdir + rm -rf www + rm -f server.crt + rm -f server.key + +.PHONY: clean-dbdir +clean-dbdir: + rm -rf dbdir/ \ No newline at end of file diff --git a/pkg/db/curation/test-fixtures/tls/README.md b/pkg/db/curation/test-fixtures/tls/README.md new file mode 100644 index 00000000..38bff5e6 --- /dev/null +++ b/pkg/db/curation/test-fixtures/tls/README.md @@ -0,0 +1,24 @@ +# TLS test utils + +Note: Makefile, server.crt, and server.key are used in automated testing, the remaining files are for convenience in manual verification. + +You will require Python 3 to run these utils. + +To standup a test server: +``` +make serve +``` + +To test grype against this server: +``` +# without the custom cert configured (thus will fail) +make grype-test-fail + +# with the custom cert configured +make grype-test-pass +``` + +To remove all temp files: +``` +make clean +``` \ No newline at end of file diff --git a/pkg/db/curation/test-fixtures/tls/generate-x509-cert-pair.sh b/pkg/db/curation/test-fixtures/tls/generate-x509-cert-pair.sh new file mode 100755 index 00000000..1c94b2a9 --- /dev/null +++ b/pkg/db/curation/test-fixtures/tls/generate-x509-cert-pair.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -eux + +# create private key +openssl genrsa -out server.key 2048 + +# generate self-signed public key (cert) based on the private key +openssl req -new -x509 -sha256 \ + -key server.key \ + -out server.crt \ + -days 3650 \ + -reqexts SAN \ + -extensions SAN \ + -config <(cat /etc/ssl/openssl.cnf <(printf "[SAN]\nsubjectAltName=DNS:$(hostname).local")) \ + -subj "/C=US/ST=Test/L=Test/O=Test/CN=$(hostname).local" + diff --git a/pkg/db/curation/test-fixtures/tls/listing.py b/pkg/db/curation/test-fixtures/tls/listing.py new file mode 100644 index 00000000..a903498b --- /dev/null +++ b/pkg/db/curation/test-fixtures/tls/listing.py @@ -0,0 +1,27 @@ +import urllib.request +import json +import os + +with open('listing.json', 'r') as fh: + data = json.loads(fh.read()) + +entry = data["available"]["3"][-1] + +hostname = os.popen('hostname').read().strip() + +with open('www/listing.json', 'w') as fh: + json.dump( + { + "available": { + entry["version"]: [ + { + "built": entry["built"], + "version": entry["version"], + "url": f"https://{hostname}.local/db.tar.gz", + "checksum": entry["checksum"] + } + ] + } + }, fh) + +print(entry["url"]) diff --git a/pkg/db/curation/test-fixtures/tls/serve.py b/pkg/db/curation/test-fixtures/tls/serve.py new file mode 100644 index 00000000..ebb9f361 --- /dev/null +++ b/pkg/db/curation/test-fixtures/tls/serve.py @@ -0,0 +1,25 @@ +from http.server import HTTPServer, SimpleHTTPRequestHandler +import ssl +import logging + +port = 443 +directory = "www" + + +class Handler(SimpleHTTPRequestHandler): + def __init__(self, *args, **kwargs): + super().__init__(*args, directory=directory, **kwargs) + + def do_GET(self): + logging.error(self.headers) + SimpleHTTPRequestHandler.do_GET(self) + + +httpd = HTTPServer(('0.0.0.0', port), Handler) +sslctx = ssl.SSLContext() +sslctx.options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 +sslctx.load_cert_chain(certfile='server.crt', keyfile="server.key") +httpd.socket = sslctx.wrap_socket(httpd.socket, server_side=True) + +print(f"Server running on https://0.0.0.0:{port}") +httpd.serve_forever() \ No newline at end of file diff --git a/pkg/db/internal/gormadapter/logger.go b/pkg/db/internal/gormadapter/logger.go new file mode 100644 index 00000000..4fdc6f44 --- /dev/null +++ b/pkg/db/internal/gormadapter/logger.go @@ -0,0 +1,37 @@ +package gormadapter + +import ( + "context" + "time" + + "gorm.io/gorm/logger" + + "github.com/anchore/grype-db/internal/log" +) + +type logAdapter struct { +} + +func newLogger() logger.Interface { + return logAdapter{} +} + +func (l logAdapter) LogMode(logger.LogLevel) logger.Interface { + return l +} + +func (l logAdapter) Info(_ context.Context, _ string, _ ...interface{}) { + // unimplemented +} + +func (l logAdapter) Warn(_ context.Context, fmt string, v ...interface{}) { + log.Warnf("gorm: "+fmt, v...) +} + +func (l logAdapter) Error(_ context.Context, fmt string, v ...interface{}) { + log.Errorf("gorm: "+fmt, v...) +} + +func (l logAdapter) Trace(_ context.Context, _ time.Time, _ func() (sql string, rowsAffected int64), _ error) { + // unimplemented +} diff --git a/pkg/db/internal/gormadapter/open.go b/pkg/db/internal/gormadapter/open.go new file mode 100644 index 00000000..6448dada --- /dev/null +++ b/pkg/db/internal/gormadapter/open.go @@ -0,0 +1,66 @@ +package gormadapter + +import ( + "fmt" + "os" + + "github.com/glebarez/sqlite" + "gorm.io/gorm" +) + +var writerStatements = []string{ + // performance improvements (note: will result in lost data on write interruptions). + // on my box it reduces the time to write from 10 minutes to 10 seconds (with ~1GB memory utilization spikes) + `PRAGMA synchronous = OFF`, + `PRAGMA journal_mode = MEMORY`, +} + +var readOptions = []string{ + "immutable=1", + "cache=shared", + "mode=ro", +} + +// Open a new connection to a sqlite3 database file +func Open(path string, write bool) (*gorm.DB, error) { + if write { + // the file may or may not exist, so we ignore the error explicitly + _ = os.Remove(path) + } + + connStr, err := connectionString(path) + if err != nil { + return nil, err + } + + if !write { + // &immutable=1&cache=shared&mode=ro + for _, o := range readOptions { + connStr += fmt.Sprintf("&%s", o) + } + } + + dbObj, err := gorm.Open(sqlite.Open(connStr), &gorm.Config{Logger: newLogger()}) + if err != nil { + return nil, fmt.Errorf("unable to connect to DB: %w", err) + } + + if write { + for _, sqlStmt := range writerStatements { + dbObj.Exec(sqlStmt) + if dbObj.Error != nil { + return nil, fmt.Errorf("unable to execute (%s): %w", sqlStmt, dbObj.Error) + } + } + } + + return dbObj, nil +} + +// ConnectionString creates a connection string for sqlite3 +func connectionString(path string) (string, error) { + if path == "" { + return "", fmt.Errorf("no db filepath given") + } + return fmt.Sprintf("file:%s?cache=shared", path), nil +} diff --git a/pkg/db/internal/sqlite/nullable_types.go b/pkg/db/internal/sqlite/nullable_types.go new file mode 100644 index 00000000..bb6899fd --- /dev/null +++ b/pkg/db/internal/sqlite/nullable_types.go @@ -0,0 +1,73 @@ +package sqlite + +import ( + "database/sql" + "encoding/json" +) + +type NullString struct { + sql.NullString +} + +func NewNullString(s string, valid bool) NullString { + return NullString{ + sql.NullString{ + String: s, + Valid: valid, + }, + } +} + +func ToNullString(v any) NullString { + nullString := NullString{} + nullString.Valid = false + + if v != nil { + var stringValue string + + if s, ok := v.(string); ok { + stringValue = s + } else { + vBytes, err := json.Marshal(v) + if err != nil { + // TODO: just no + panic(err) + } + + stringValue = string(vBytes) + } + + if stringValue != "null" { + nullString.String = stringValue + nullString.Valid = true + } + } + + return nullString +} + +func (v NullString) ToByteSlice() []byte { + if v.Valid { + return []byte(v.String) + } + + return []byte("null") +} + +func (v NullString) MarshalJSON() ([]byte, error) { + if v.Valid { + return json.Marshal(v.String) + } + + return json.Marshal(nil) +} + +func (v *NullString) UnmarshalJSON(data []byte) error { + if data != nil && string(data) != "null" { + v.Valid = true + v.String = string(data) + } else { + v.Valid = false + } + return nil +} diff --git a/pkg/db/internal/sqlite/nullable_types_test.go b/pkg/db/internal/sqlite/nullable_types_test.go new file mode 100644 index 00000000..b392d1e2 --- /dev/null +++ b/pkg/db/internal/sqlite/nullable_types_test.go @@ -0,0 +1,109 @@ +package sqlite + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestToNullString(t *testing.T) { + tests := []struct { + name string + input any + expected NullString + }{ + { + name: "Nil input", + input: nil, + expected: NullString{}, + }, + { + name: "String null", + input: "null", + expected: NullString{}, + }, + { + name: "Other string", + input: "Hello there {}", + expected: NewNullString("Hello there {}", true), + }, + { + name: "Single struct with all fields populated", + input: struct { + Boolean bool `json:"boolean"` + String string `json:"string"` + Integer int `json:"integer"` + InnerStruct struct { + StringList []string `json:"string_list"` + } `json:"inner_struct"` + }{ + Boolean: true, + String: "{}", + Integer: 1034, + InnerStruct: struct { + StringList []string `json:"string_list"` + }{ + StringList: []string{"a", "b", "c"}, + }, + }, + expected: NewNullString(`{"boolean":true,"string":"{}","integer":1034,"inner_struct":{"string_list":["a","b","c"]}}`, true), + }, + { + name: "Single struct with one field populated", + input: struct { + Boolean bool `json:"boolean"` + String string `json:"string"` + Integer int `json:"integer"` + InnerStruct struct { + StringList []string `json:"string_list"` + } `json:"inner_struct"` + }{ + Boolean: true, + }, + expected: NewNullString(`{"boolean":true,"string":"","integer":0,"inner_struct":{"string_list":null}}`, true), + }, + { + name: "Single struct with one field populated omit empty", + input: struct { + Boolean bool `json:"boolean,omitempty"` + String string `json:"string,omitempty"` + Integer int `json:"integer,omitempty"` + InnerStruct struct { + StringList []string `json:"string_list,omitempty"` + } `json:"inner_struct,omitempty"` + }{ + Boolean: true, + }, + expected: NewNullString(`{"boolean":true,"inner_struct":{}}`, true), + }, + { + name: "Array of structs", + input: []struct { + Boolean bool `json:"boolean,omitempty"` + String string `json:"string,omitempty"` + Integer int `json:"integer,omitempty"` + }{ + { + Boolean: true, + String: "{}", + Integer: 1034, + }, + { + String: "[{}]", + }, + { + Integer: -5000, + Boolean: false, + }, + }, + expected: NewNullString(`[{"boolean":true,"string":"{}","integer":1034},{"string":"[{}]"},{"integer":-5000}]`, true), + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + result := ToNullString(test.input) + assert.Equal(t, test.expected, result) + }) + } +} diff --git a/pkg/db/v1/id.go b/pkg/db/v1/id.go new file mode 100644 index 00000000..297d73c1 --- /dev/null +++ b/pkg/db/v1/id.go @@ -0,0 +1,28 @@ +package v1 + +import ( + "time" +) + +// ID represents identifying information for a DB and the data it contains. +type ID struct { + // BuildTimestamp is the timestamp used to define the age of the DB, ideally including the age of the data + // contained in the DB, not just when the DB file was created. + BuildTimestamp time.Time + SchemaVersion int +} + +type IDReader interface { + GetID() (*ID, error) +} + +type IDWriter interface { + SetID(ID) error +} + +func NewID(age time.Time) ID { + return ID{ + BuildTimestamp: age.UTC(), + SchemaVersion: SchemaVersion, + } +} diff --git a/pkg/db/v1/namespace.go b/pkg/db/v1/namespace.go new file mode 100644 index 00000000..780cbcfd --- /dev/null +++ b/pkg/db/v1/namespace.go @@ -0,0 +1,30 @@ +package v1 + +import ( + "fmt" +) + +const ( + NVDNamespace = "nvd" +) + +func RecordSource(feed, group string) string { + switch feed { + case "github", "nvdv2": + return group + default: + return fmt.Sprintf("%s:%s", feed, group) + } +} + +func NamespaceForFeedGroup(feed, group string) (string, error) { + switch { + case feed == "vulnerabilities": + return group, nil + case feed == "github": + return group, nil + case feed == "nvdv2" && group == "nvdv2:cves": + return NVDNamespace, nil + } + return "", fmt.Errorf("feed=%q group=%q has no namespace mappings", feed, group) +} diff --git a/pkg/db/v1/namespace_test.go b/pkg/db/v1/namespace_test.go new file mode 100644 index 00000000..10dc9845 --- /dev/null +++ b/pkg/db/v1/namespace_test.go @@ -0,0 +1,49 @@ +package v1 + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNamespaceFromRecordSource(t *testing.T) { + tests := []struct { + Feed, Group string + Namespace string + }{ + { + Feed: "vulnerabilities", + Group: "ubuntu:20.04", + Namespace: "ubuntu:20.04", + }, + { + Feed: "vulnerabilities", + Group: "alpine:3.9", + Namespace: "alpine:3.9", + }, + { + Feed: "vulnerabilities", + Group: "sles:12.5", + Namespace: "sles:12.5", + }, + { + Feed: "nvdv2", + Group: "nvdv2:cves", + Namespace: "nvd", + }, + { + Feed: "github", + Group: "github:python", + Namespace: "github:python", + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("feed=%q group=%q namespace=%q", test.Feed, test.Group, test.Namespace), func(t *testing.T) { + actual, err := NamespaceForFeedGroup(test.Feed, test.Group) + assert.NoError(t, err) + assert.Equal(t, test.Namespace, actual) + }) + } +} diff --git a/pkg/db/v1/schema_version.go b/pkg/db/v1/schema_version.go new file mode 100644 index 00000000..f72f10ce --- /dev/null +++ b/pkg/db/v1/schema_version.go @@ -0,0 +1,3 @@ +package v1 + +const SchemaVersion = 1 diff --git a/pkg/db/v1/store.go b/pkg/db/v1/store.go new file mode 100644 index 00000000..97b6f8bf --- /dev/null +++ b/pkg/db/v1/store.go @@ -0,0 +1,19 @@ +package v1 + +type Store interface { + StoreReader + StoreWriter +} + +type StoreReader interface { + IDReader + VulnerabilityStoreReader + VulnerabilityMetadataStoreReader +} + +type StoreWriter interface { + IDWriter + VulnerabilityStoreWriter + VulnerabilityMetadataStoreWriter + Close() +} diff --git a/pkg/db/v1/store/model/id.go b/pkg/db/v1/store/model/id.go new file mode 100644 index 00000000..668250da --- /dev/null +++ b/pkg/db/v1/store/model/id.go @@ -0,0 +1,40 @@ +package model + +import ( + "fmt" + "time" + + v1 "github.com/anchore/grype-db/pkg/db/v1" +) + +const ( + IDTableName = "id" +) + +type IDModel struct { + BuildTimestamp string `gorm:"column:build_timestamp"` + SchemaVersion int `gorm:"column:schema_version"` +} + +func NewIDModel(id v1.ID) IDModel { + return IDModel{ + BuildTimestamp: id.BuildTimestamp.Format(time.RFC3339Nano), + SchemaVersion: id.SchemaVersion, + } +} + +func (IDModel) TableName() string { + return IDTableName +} + +func (m *IDModel) Inflate() (v1.ID, error) { + buildTime, err := time.Parse(time.RFC3339Nano, m.BuildTimestamp) + if err != nil { + return v1.ID{}, fmt.Errorf("unable to parse build timestamp (%+v): %w", m.BuildTimestamp, err) + } + + return v1.ID{ + BuildTimestamp: buildTime, + SchemaVersion: m.SchemaVersion, + }, nil +} diff --git a/pkg/db/v1/store/model/vulnerability.go b/pkg/db/v1/store/model/vulnerability.go new file mode 100644 index 00000000..a16fa1b4 --- /dev/null +++ b/pkg/db/v1/store/model/vulnerability.go @@ -0,0 +1,86 @@ +package model + +import ( + "encoding/json" + "fmt" + + v1 "github.com/anchore/grype-db/pkg/db/v1" +) + +const ( + VulnerabilityTableName = "vulnerability" + GetVulnerabilityIndexName = "get_vulnerability_index" +) + +// VulnerabilityModel is a struct used to serialize db.Vulnerability information into a sqlite3 DB. +type VulnerabilityModel struct { + PK uint64 `gorm:"primary_key;auto_increment;"` + ID string `gorm:"column:id"` + RecordSource string `gorm:"column:record_source"` + PackageName string `gorm:"column:package_name; index:get_vulnerability_index"` + Namespace string `gorm:"column:namespace; index:get_vulnerability_index"` + VersionConstraint string `gorm:"column:version_constraint"` + VersionFormat string `gorm:"column:version_format"` + CPEs string `gorm:"column:cpes"` + ProxyVulnerabilities string `gorm:"column:proxy_vulnerabilities"` + FixedInVersion string `gorm:"column:fixed_in_version"` +} + +// NewVulnerabilityModel generates a new model from a db.Vulnerability struct. +func NewVulnerabilityModel(vulnerability v1.Vulnerability) VulnerabilityModel { + cpes, err := json.Marshal(vulnerability.CPEs) + if err != nil { + // TODO: just no + panic(err) + } + + proxy, err := json.Marshal(vulnerability.ProxyVulnerabilities) + if err != nil { + // TODO: just no + panic(err) + } + + return VulnerabilityModel{ + ID: vulnerability.ID, + PackageName: vulnerability.PackageName, + RecordSource: vulnerability.RecordSource, + Namespace: vulnerability.Namespace, + VersionConstraint: vulnerability.VersionConstraint, + VersionFormat: vulnerability.VersionFormat, + FixedInVersion: vulnerability.FixedInVersion, + CPEs: string(cpes), + ProxyVulnerabilities: string(proxy), + } +} + +// TableName returns the table which all db.Vulnerability model instances are stored into. +func (VulnerabilityModel) TableName() string { + return VulnerabilityTableName +} + +// Inflate generates a db.Vulnerability object from the serialized model instance. +func (m *VulnerabilityModel) Inflate() (v1.Vulnerability, error) { + var cpes []string + err := json.Unmarshal([]byte(m.CPEs), &cpes) + if err != nil { + return v1.Vulnerability{}, fmt.Errorf("unable to unmarshal CPEs (%+v): %w", m.CPEs, err) + } + + var proxy []string + err = json.Unmarshal([]byte(m.ProxyVulnerabilities), &proxy) + if err != nil { + return v1.Vulnerability{}, fmt.Errorf("unable to unmarshal proxy vulnerabilities (%+v): %w", m.ProxyVulnerabilities, err) + } + + return v1.Vulnerability{ + ID: m.ID, + RecordSource: m.RecordSource, + PackageName: m.PackageName, + Namespace: m.Namespace, + VersionConstraint: m.VersionConstraint, + VersionFormat: m.VersionFormat, + CPEs: cpes, + ProxyVulnerabilities: proxy, + FixedInVersion: m.FixedInVersion, + }, nil +} diff --git a/pkg/db/v1/store/model/vulnerability_metadata.go b/pkg/db/v1/store/model/vulnerability_metadata.go new file mode 100644 index 00000000..1910e7f3 --- /dev/null +++ b/pkg/db/v1/store/model/vulnerability_metadata.go @@ -0,0 +1,104 @@ +package model + +import ( + "database/sql" + "encoding/json" + "fmt" + + v1 "github.com/anchore/grype-db/pkg/db/v1" +) + +const ( + VulnerabilityMetadataTableName = "vulnerability_metadata" +) + +// VulnerabilityMetadataModel is a struct used to serialize db.VulnerabilityMetadata information into a sqlite3 DB. +type VulnerabilityMetadataModel struct { + ID string `gorm:"primary_key; column:id;"` + RecordSource string `gorm:"primary_key; column:record_source;"` + Severity string `gorm:"column:severity"` + Links string `gorm:"column:links"` + Description string `gorm:"column:description"` + CvssV2 sql.NullString `gorm:"column:cvss_v2"` + CvssV3 sql.NullString `gorm:"column:cvss_v3"` +} + +// NewVulnerabilityMetadataModel generates a new model from a db.VulnerabilityMetadata struct. +func NewVulnerabilityMetadataModel(metadata v1.VulnerabilityMetadata) VulnerabilityMetadataModel { + links, err := json.Marshal(metadata.Links) + if err != nil { + // TODO: just no + panic(err) + } + + var cvssV2Str sql.NullString + if metadata.CvssV2 != nil { + cvssV2, err := json.Marshal(*metadata.CvssV2) + if err != nil { + // TODO: just no + panic(err) + } + cvssV2Str.String = string(cvssV2) + cvssV2Str.Valid = true + } + + var cvssV3Str sql.NullString + if metadata.CvssV3 != nil { + cvssV3, err := json.Marshal(*metadata.CvssV3) + if err != nil { + // TODO: just no + panic(err) + } + cvssV3Str.String = string(cvssV3) + cvssV3Str.Valid = true + } + + return VulnerabilityMetadataModel{ + ID: metadata.ID, + RecordSource: metadata.RecordSource, + Severity: metadata.Severity, + Links: string(links), + Description: metadata.Description, + CvssV2: cvssV2Str, + CvssV3: cvssV3Str, + } +} + +// TableName returns the table which all db.VulnerabilityMetadata model instances are stored into. +func (VulnerabilityMetadataModel) TableName() string { + return VulnerabilityMetadataTableName +} + +// Inflate generates a db.VulnerabilityMetadataModel object from the serialized model instance. +func (m *VulnerabilityMetadataModel) Inflate() (v1.VulnerabilityMetadata, error) { + var links []string + var cvssV2, cvssV3 *v1.Cvss + + if err := json.Unmarshal([]byte(m.Links), &links); err != nil { + return v1.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal links (%+v): %w", m.Links, err) + } + + if m.CvssV2.Valid { + err := json.Unmarshal([]byte(m.CvssV2.String), &cvssV2) + if err != nil { + return v1.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal cvssV2 data (%+v): %w", m.CvssV2, err) + } + } + + if m.CvssV3.Valid { + err := json.Unmarshal([]byte(m.CvssV3.String), &cvssV3) + if err != nil { + return v1.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal cvssV3 data (%+v): %w", m.CvssV3, err) + } + } + + return v1.VulnerabilityMetadata{ + ID: m.ID, + RecordSource: m.RecordSource, + Severity: m.Severity, + Links: links, + Description: m.Description, + CvssV2: cvssV2, + CvssV3: cvssV3, + }, nil +} diff --git a/pkg/db/v1/store/store.go b/pkg/db/v1/store/store.go new file mode 100644 index 00000000..76ca537e --- /dev/null +++ b/pkg/db/v1/store/store.go @@ -0,0 +1,211 @@ +package store + +import ( + "fmt" + "sort" + + _ "github.com/glebarez/sqlite" // provide the sqlite dialect to gorm via import + "github.com/go-test/deep" + "github.com/scylladb/go-set/strset" + "gorm.io/gorm" + + "github.com/anchore/grype-db/pkg/db/internal/gormadapter" + v1 "github.com/anchore/grype-db/pkg/db/v1" + "github.com/anchore/grype-db/pkg/db/v1/store/model" +) + +// store holds an instance of the database connection +type store struct { + db *gorm.DB +} + +// New creates a new instance of the store. +func New(dbFilePath string, overwrite bool) (v1.Store, error) { + db, err := gormadapter.Open(dbFilePath, overwrite) + if err != nil { + return nil, err + } + + if overwrite { + // TODO: automigrate could write to the database, + // we should be validating the database is the correct database based on the version in the ID table before + // automigrating + if err := db.AutoMigrate(&model.IDModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate ID model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityMetadataModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability Metadata model: %w", err) + } + } + + return &store{ + db: db, + }, nil +} + +// GetID fetches the metadata about the databases schema version and build time. +func (s *store) GetID() (*v1.ID, error) { + var models []model.IDModel + result := s.db.Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple DB IDs") + case len(models) == 1: + id, err := models[0].Inflate() + if err != nil { + return nil, err + } + return &id, nil + } + + return nil, nil +} + +// SetID stores the databases schema version and build time. +func (s *store) SetID(id v1.ID) error { + var ids []model.IDModel + + // replace the existing ID with the given one + s.db.Find(&ids).Delete(&ids) + + m := model.NewIDModel(id) + result := s.db.Create(&m) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add id (%d rows affected)", result.RowsAffected) + } + + return result.Error +} + +// GetVulnerability retrieves one or more vulnerabilities given a namespace and package name. +func (s *store) GetVulnerability(namespace, packageName string) ([]v1.Vulnerability, error) { + var models []model.VulnerabilityModel + + result := s.db.Where("namespace = ? AND package_name = ?", namespace, packageName).Find(&models) + + var vulnerabilities = make([]v1.Vulnerability, len(models)) + for idx, m := range models { + vulnerability, err := m.Inflate() + if err != nil { + return nil, err + } + vulnerabilities[idx] = vulnerability + } + + return vulnerabilities, result.Error +} + +// AddVulnerability saves one or more vulnerabilities into the sqlite3 store. +func (s *store) AddVulnerability(vulnerabilities ...v1.Vulnerability) error { + for _, vulnerability := range vulnerabilities { + m := model.NewVulnerabilityModel(vulnerability) + + result := s.db.Create(&m) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability (%d rows affected)", result.RowsAffected) + } + } + return nil +} + +// GetVulnerabilityMetadata retrieves metadata for the given vulnerability ID relative to a specific record source. +func (s *store) GetVulnerabilityMetadata(id, recordSource string) (*v1.VulnerabilityMetadata, error) { + var models []model.VulnerabilityMetadataModel + + result := s.db.Where(&model.VulnerabilityMetadataModel{ID: id, RecordSource: recordSource}).Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple metadatas for single ID=%q RecordSource=%q", id, recordSource) + case len(models) == 1: + metadata, err := models[0].Inflate() + if err != nil { + return nil, err + } + + return &metadata, nil + } + + return nil, nil +} + +// AddVulnerabilityMetadata stores one or more vulnerability metadata models into the sqlite DB. +func (s *store) AddVulnerabilityMetadata(metadata ...v1.VulnerabilityMetadata) error { + for _, m := range metadata { + existing, err := s.GetVulnerabilityMetadata(m.ID, m.RecordSource) + if err != nil { + return fmt.Errorf("failed to verify existing entry: %w", err) + } + + if existing != nil { + // merge with the existing entry + + cvssV3Diffs := deep.Equal(existing.CvssV3, m.CvssV3) + cvssV2Diffs := deep.Equal(existing.CvssV2, m.CvssV2) + + switch { + case existing.Severity != m.Severity: + return fmt.Errorf("existing metadata has mismatched severity (%q!=%q)", existing.Severity, m.Severity) + case existing.Description != m.Description: + return fmt.Errorf("existing metadata has mismatched description (%q!=%q)", existing.Description, m.Description) + case existing.CvssV2 != nil && len(cvssV2Diffs) > 0: + return fmt.Errorf("existing metadata has mismatched cvss-v2: %+v", cvssV2Diffs) + case existing.CvssV3 != nil && len(cvssV3Diffs) > 0: + return fmt.Errorf("existing metadata has mismatched cvss-v3: %+v", cvssV3Diffs) + default: + existing.CvssV2 = m.CvssV2 + existing.CvssV3 = m.CvssV3 + } + + links := strset.New(existing.Links...) + for _, l := range m.Links { + links.Add(l) + } + + existing.Links = links.List() + sort.Strings(existing.Links) + + newModel := model.NewVulnerabilityMetadataModel(*existing) + result := s.db.Save(&newModel) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to merge vulnerability metadata (%d rows affected)", result.RowsAffected) + } + + if result.Error != nil { + return result.Error + } + } else { + // this is a new entry + newModel := model.NewVulnerabilityMetadataModel(m) + result := s.db.Create(&newModel) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability metadata (%d rows affected)", result.RowsAffected) + } + } + } + return nil +} + +func (s *store) Close() { + s.db.Exec("VACUUM;") +} diff --git a/pkg/db/v1/store/store_test.go b/pkg/db/v1/store/store_test.go new file mode 100644 index 00000000..b9800830 --- /dev/null +++ b/pkg/db/v1/store/store_test.go @@ -0,0 +1,494 @@ +package store + +import ( + "testing" + "time" + + "github.com/go-test/deep" + + v1 "github.com/anchore/grype-db/pkg/db/v1" + "github.com/anchore/grype-db/pkg/db/v1/store/model" +) + +func assertIDReader(t *testing.T, reader v1.IDReader, expected v1.ID) { + t.Helper() + if actual, err := reader.GetID(); err != nil { + t.Fatalf("failed to get ID: %+v", err) + } else { + diffs := deep.Equal(&expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } +} + +func TestStore_GetID_SetID(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + expected := v1.ID{ + BuildTimestamp: time.Now().UTC(), + SchemaVersion: 2, + } + + if err = s.SetID(expected); err != nil { + t.Fatalf("failed to set ID: %+v", err) + } + + assertIDReader(t, s, expected) + +} + +func assertVulnerabilityReader(t *testing.T, reader v1.VulnerabilityStoreReader, namespace, name string, expected []v1.Vulnerability) { + if actual, err := reader.GetVulnerability(namespace, name); err != nil { + t.Fatalf("failed to get Vulnerability: %+v", err) + } else { + if len(actual) != len(expected) { + t.Fatalf("unexpected number of vulns: %d", len(actual)) + } + + for idx := range actual { + diffs := deep.Equal(expected[idx], actual[idx]) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + } +} + +func TestStore_GetVulnerability_SetVulnerability(t *testing.T) { + dbTempFile := t.TempDir() + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + extra := []v1.Vulnerability{ + { + ID: "my-cve-33333", + RecordSource: "record-source", + PackageName: "package-name-2", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + ProxyVulnerabilities: []string{"another-cve", "an-other-cve"}, + FixedInVersion: "2.0.1", + }, + { + ID: "my-other-cve-33333", + RecordSource: "record-source", + PackageName: "package-name-3", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + ProxyVulnerabilities: []string{"another-cve", "an-other-cve"}, + }, + } + + expected := []v1.Vulnerability{ + { + ID: "my-cve", + RecordSource: "record-source", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + ProxyVulnerabilities: []string{"another-cve", "an-other-cve"}, + FixedInVersion: "1.0.1", + }, + { + ID: "my-other-cve", + RecordSource: "record-source", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + ProxyVulnerabilities: []string{"another-cve", "an-other-cve"}, + FixedInVersion: "4.0.5", + }, + } + + total := append(expected, extra...) + + if err = s.AddVulnerability(total...); err != nil { + t.Fatalf("failed to set Vulnerability: %+v", err) + } + + var allEntries []model.VulnerabilityModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + assertVulnerabilityReader(t, s, expected[0].Namespace, expected[0].PackageName, expected) + +} + +func assertVulnerabilityMetadataReader(t *testing.T, reader v1.VulnerabilityMetadataStoreReader, id, recordSource string, expected v1.VulnerabilityMetadata) { + if actual, err := reader.GetVulnerabilityMetadata(id, recordSource); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else { + + diffs := deep.Equal(&expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + +} + +func TestStore_GetVulnerabilityMetadata_SetVulnerabilityMetadata(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + total := []v1.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v1.Cvss{ + BaseScore: 1.1, + ExploitabilityScore: 2.2, + ImpactScore: 3.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NOT", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.3, + ExploitabilityScore: 2.1, + ImpactScore: 3.2, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NICE", + }, + }, + { + ID: "my-other-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "worst description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + } + + if err = s.AddVulnerabilityMetadata(total...); err != nil { + t.Fatalf("failed to set metadata: %+v", err) + } + + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + +} + +func TestStore_MergeVulnerabilityMetadata(t *testing.T) { + tests := []struct { + name string + add []v1.VulnerabilityMetadata + expected v1.VulnerabilityMetadata + err bool + }{ + { + name: "go-case", + add: []v1.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "worst description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + expected: v1.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "worst description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + name: "merge-links", + add: []v1.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://google.com"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://yahoo.com"}, + }, + }, + expected: v1.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re", "https://google.com", "https://yahoo.com"}, + }, + }, + { + name: "bad-severity", + add: []v1.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "meh, push that for next tuesday...", + Links: []string{"https://redhat.com"}, + }, + }, + err: true, + }, + { + name: "mismatch-description", + err: true, + add: []v1.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "worst description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "mismatch-cvss2", + err: true, + add: []v1.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "mismatch-cvss3", + err: true, + add: []v1.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v1.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v1.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 0, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dbTempDir := t.TempDir() + + s, err := New(dbTempDir, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + // add each metadata in order + var theErr error + for _, metadata := range test.add { + err = s.AddVulnerabilityMetadata(metadata) + if err != nil { + theErr = err + break + } + } + + if test.err && theErr == nil { + t.Fatalf("expected error but did not get one") + } else if !test.err && theErr != nil { + t.Fatalf("expected no error but got one: %+v", theErr) + } else if test.err && theErr != nil { + // test pass... + return + } + + // ensure there is exactly one entry + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != 1 { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + // get the resulting metadata object + if actual, err := s.GetVulnerabilityMetadata(test.expected.ID, test.expected.RecordSource); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else { + diffs := deep.Equal(&test.expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + }) + } +} diff --git a/pkg/db/v1/vulnerability.go b/pkg/db/v1/vulnerability.go new file mode 100644 index 00000000..c0fa5912 --- /dev/null +++ b/pkg/db/v1/vulnerability.go @@ -0,0 +1,31 @@ +package v1 + +const VulnerabilityStoreFileName = "vulnerability.db" + +// Vulnerability represents the minimum data fields necessary to perform package-to-vulnerability matching. This can represent a CVE, 3rd party advisory, or any source that relates back to a CVE. +type Vulnerability struct { + ID string // The identifier of the vulnerability or advisory + RecordSource string // The source of the vulnerability information + PackageName string // The name of the package that is vulnerable + Namespace string // The ecosystem where the package resides + VersionConstraint string // The version range which the given package is vulnerable + VersionFormat string // The format which all version fields should be interpreted as + CPEs []string // The CPEs which are considered vulnerable + ProxyVulnerabilities []string // IDs of other Vulnerabilities that are related to this one (this is how advisories relate to CVEs) + FixedInVersion string // The version which this particular vulnerability was fixed in +} + +type VulnerabilityStore interface { + VulnerabilityStoreReader + VulnerabilityStoreWriter +} + +type VulnerabilityStoreReader interface { + // GetVulnerability retrieves vulnerabilities associated with a namespace and a package name + GetVulnerability(namespace, name string) ([]Vulnerability, error) +} + +type VulnerabilityStoreWriter interface { + // AddVulnerability inserts a new record of a vulnerability into the store + AddVulnerability(vulnerabilities ...Vulnerability) error +} diff --git a/pkg/db/v1/vulnerability_metadata.go b/pkg/db/v1/vulnerability_metadata.go new file mode 100644 index 00000000..218c0ff5 --- /dev/null +++ b/pkg/db/v1/vulnerability_metadata.go @@ -0,0 +1,33 @@ +package v1 + +// VulnerabilityMetadata represents all vulnerability data that is not necessary to perform package-to-vulnerability matching. +type VulnerabilityMetadata struct { + ID string // The identifier of the vulnerability or advisory + RecordSource string // The source of the vulnerability information + Severity string // How severe the vulnerability is (valid values are defined by upstream sources currently) + Links []string // URLs to get more information about the vulnerability or advisory + Description string // Description of the vulnerability + CvssV2 *Cvss // Common Vulnerability Scoring System V2 values + CvssV3 *Cvss // Common Vulnerability Scoring System V3 values +} + +// Cvss contains select Common Vulnerability Scoring System fields for a vulnerability. +type Cvss struct { + BaseScore float64 // Ranges from 0 - 10 and defines for qualities intrinsic to a vulnerability + ExploitabilityScore float64 // Indicator of how easy it may be for an attacker to exploit a vulnerability + ImpactScore float64 // Representation of the effects of an exploited vulnerability relative to compromise in confidentiality, integrity, and availability + Vector string // A textual representation of the metric values used to determine the score +} + +type VulnerabilityMetadataStore interface { + VulnerabilityMetadataStoreReader + VulnerabilityMetadataStoreWriter +} + +type VulnerabilityMetadataStoreReader interface { + GetVulnerabilityMetadata(id, recordSource string) (*VulnerabilityMetadata, error) +} + +type VulnerabilityMetadataStoreWriter interface { + AddVulnerabilityMetadata(metadata ...VulnerabilityMetadata) error +} diff --git a/pkg/db/v2/id.go b/pkg/db/v2/id.go new file mode 100644 index 00000000..f162ea08 --- /dev/null +++ b/pkg/db/v2/id.go @@ -0,0 +1,28 @@ +package v2 + +import ( + "time" +) + +// ID represents identifying information for a DB and the data it contains. +type ID struct { + // BuildTimestamp is the timestamp used to define the age of the DB, ideally including the age of the data + // contained in the DB, not just when the DB file was created. + BuildTimestamp time.Time + SchemaVersion int +} + +type IDReader interface { + GetID() (*ID, error) +} + +type IDWriter interface { + SetID(ID) error +} + +func NewID(age time.Time) ID { + return ID{ + BuildTimestamp: age.UTC(), + SchemaVersion: SchemaVersion, + } +} diff --git a/pkg/db/v2/namespace.go b/pkg/db/v2/namespace.go new file mode 100644 index 00000000..70ac1030 --- /dev/null +++ b/pkg/db/v2/namespace.go @@ -0,0 +1,30 @@ +package v2 + +import ( + "fmt" +) + +const ( + NVDNamespace = "nvd" +) + +func RecordSource(feed, group string) string { + switch feed { + case "github", "nvdv2": + return group + default: + return fmt.Sprintf("%s:%s", feed, group) + } +} + +func NamespaceForFeedGroup(feed, group string) (string, error) { + switch { + case feed == "vulnerabilities": + return group, nil + case feed == "github": + return group, nil + case feed == "nvdv2" && group == "nvdv2:cves": + return NVDNamespace, nil + } + return "", fmt.Errorf("feed=%q group=%q has no namespace mappings", feed, group) +} diff --git a/pkg/db/v2/namespace_test.go b/pkg/db/v2/namespace_test.go new file mode 100644 index 00000000..f4f4a2bb --- /dev/null +++ b/pkg/db/v2/namespace_test.go @@ -0,0 +1,49 @@ +package v2 + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNamespaceFromRecordSource(t *testing.T) { + tests := []struct { + Feed, Group string + Namespace string + }{ + { + Feed: "vulnerabilities", + Group: "ubuntu:20.04", + Namespace: "ubuntu:20.04", + }, + { + Feed: "vulnerabilities", + Group: "alpine:3.9", + Namespace: "alpine:3.9", + }, + { + Feed: "vulnerabilities", + Group: "sles:12.5", + Namespace: "sles:12.5", + }, + { + Feed: "nvdv2", + Group: "nvdv2:cves", + Namespace: "nvd", + }, + { + Feed: "github", + Group: "github:python", + Namespace: "github:python", + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("feed=%q group=%q namespace=%q", test.Feed, test.Group, test.Namespace), func(t *testing.T) { + actual, err := NamespaceForFeedGroup(test.Feed, test.Group) + assert.NoError(t, err) + assert.Equal(t, test.Namespace, actual) + }) + } +} diff --git a/pkg/db/v2/schema_version.go b/pkg/db/v2/schema_version.go new file mode 100644 index 00000000..86d7d191 --- /dev/null +++ b/pkg/db/v2/schema_version.go @@ -0,0 +1,3 @@ +package v2 + +const SchemaVersion = 2 diff --git a/pkg/db/v2/store.go b/pkg/db/v2/store.go new file mode 100644 index 00000000..ad5cb064 --- /dev/null +++ b/pkg/db/v2/store.go @@ -0,0 +1,19 @@ +package v2 + +type Store interface { + StoreReader + StoreWriter +} + +type StoreReader interface { + IDReader + VulnerabilityStoreReader + VulnerabilityMetadataStoreReader +} + +type StoreWriter interface { + IDWriter + VulnerabilityStoreWriter + VulnerabilityMetadataStoreWriter + Close() +} diff --git a/pkg/db/v2/store/model/id.go b/pkg/db/v2/store/model/id.go new file mode 100644 index 00000000..d3022546 --- /dev/null +++ b/pkg/db/v2/store/model/id.go @@ -0,0 +1,40 @@ +package model + +import ( + "fmt" + "time" + + v2 "github.com/anchore/grype-db/pkg/db/v2" +) + +const ( + IDTableName = "id" +) + +type IDModel struct { + BuildTimestamp string `gorm:"column:build_timestamp"` + SchemaVersion int `gorm:"column:schema_version"` +} + +func NewIDModel(id v2.ID) IDModel { + return IDModel{ + BuildTimestamp: id.BuildTimestamp.Format(time.RFC3339Nano), + SchemaVersion: id.SchemaVersion, + } +} + +func (IDModel) TableName() string { + return IDTableName +} + +func (m *IDModel) Inflate() (v2.ID, error) { + buildTime, err := time.Parse(time.RFC3339Nano, m.BuildTimestamp) + if err != nil { + return v2.ID{}, fmt.Errorf("unable to parse build timestamp (%+v): %w", m.BuildTimestamp, err) + } + + return v2.ID{ + BuildTimestamp: buildTime, + SchemaVersion: m.SchemaVersion, + }, nil +} diff --git a/pkg/db/v2/store/model/vulnerability.go b/pkg/db/v2/store/model/vulnerability.go new file mode 100644 index 00000000..578e6c7e --- /dev/null +++ b/pkg/db/v2/store/model/vulnerability.go @@ -0,0 +1,86 @@ +package model + +import ( + "encoding/json" + "fmt" + + v2 "github.com/anchore/grype-db/pkg/db/v2" +) + +const ( + VulnerabilityTableName = "vulnerability" + GetVulnerabilityIndexName = "get_vulnerability_index" +) + +// VulnerabilityModel is a struct used to serialize db.Vulnerability information into a sqlite3 DB. +type VulnerabilityModel struct { + PK uint64 `gorm:"primary_key;auto_increment;"` + ID string `gorm:"column:id"` + RecordSource string `gorm:"column:record_source"` + PackageName string `gorm:"column:package_name; index:get_vulnerability_index"` + Namespace string `gorm:"column:namespace; index:get_vulnerability_index"` + VersionConstraint string `gorm:"column:version_constraint"` + VersionFormat string `gorm:"column:version_format"` + CPEs string `gorm:"column:cpes"` + ProxyVulnerabilities string `gorm:"column:proxy_vulnerabilities"` + FixedInVersion string `gorm:"column:fixed_in_version"` +} + +// NewVulnerabilityModel generates a new model from a db.Vulnerability struct. +func NewVulnerabilityModel(vulnerability v2.Vulnerability) VulnerabilityModel { + cpes, err := json.Marshal(vulnerability.CPEs) + if err != nil { + // TODO: just no + panic(err) + } + + proxy, err := json.Marshal(vulnerability.ProxyVulnerabilities) + if err != nil { + // TODO: just no + panic(err) + } + + return VulnerabilityModel{ + ID: vulnerability.ID, + PackageName: vulnerability.PackageName, + RecordSource: vulnerability.RecordSource, + Namespace: vulnerability.Namespace, + VersionConstraint: vulnerability.VersionConstraint, + VersionFormat: vulnerability.VersionFormat, + FixedInVersion: vulnerability.FixedInVersion, + CPEs: string(cpes), + ProxyVulnerabilities: string(proxy), + } +} + +// TableName returns the table which all db.Vulnerability model instances are stored into. +func (VulnerabilityModel) TableName() string { + return VulnerabilityTableName +} + +// Inflate generates a db.Vulnerability object from the serialized model instance. +func (m *VulnerabilityModel) Inflate() (v2.Vulnerability, error) { + var cpes []string + err := json.Unmarshal([]byte(m.CPEs), &cpes) + if err != nil { + return v2.Vulnerability{}, fmt.Errorf("unable to unmarshal CPEs (%+v): %w", m.CPEs, err) + } + + var proxy []string + err = json.Unmarshal([]byte(m.ProxyVulnerabilities), &proxy) + if err != nil { + return v2.Vulnerability{}, fmt.Errorf("unable to unmarshal proxy vulnerabilities (%+v): %w", m.ProxyVulnerabilities, err) + } + + return v2.Vulnerability{ + ID: m.ID, + RecordSource: m.RecordSource, + PackageName: m.PackageName, + Namespace: m.Namespace, + VersionConstraint: m.VersionConstraint, + VersionFormat: m.VersionFormat, + CPEs: cpes, + ProxyVulnerabilities: proxy, + FixedInVersion: m.FixedInVersion, + }, nil +} diff --git a/pkg/db/v2/store/model/vulnerability_metadata.go b/pkg/db/v2/store/model/vulnerability_metadata.go new file mode 100644 index 00000000..bb92cb84 --- /dev/null +++ b/pkg/db/v2/store/model/vulnerability_metadata.go @@ -0,0 +1,104 @@ +package model + +import ( + "database/sql" + "encoding/json" + "fmt" + + v2 "github.com/anchore/grype-db/pkg/db/v2" +) + +const ( + VulnerabilityMetadataTableName = "vulnerability_metadata" +) + +// VulnerabilityMetadataModel is a struct used to serialize db.VulnerabilityMetadata information into a sqlite3 DB. +type VulnerabilityMetadataModel struct { + ID string `gorm:"primary_key; column:id;"` + RecordSource string `gorm:"primary_key; column:record_source;"` + Severity string `gorm:"column:severity"` + Links string `gorm:"column:links"` + Description string `gorm:"column:description"` + CvssV2 sql.NullString `gorm:"column:cvss_v2"` + CvssV3 sql.NullString `gorm:"column:cvss_v3"` +} + +// NewVulnerabilityMetadataModel generates a new model from a db.VulnerabilityMetadata struct. +func NewVulnerabilityMetadataModel(metadata v2.VulnerabilityMetadata) VulnerabilityMetadataModel { + links, err := json.Marshal(metadata.Links) + if err != nil { + // TODO: just no + panic(err) + } + + var cvssV2Str sql.NullString + if metadata.CvssV2 != nil { + cvssV2, err := json.Marshal(*metadata.CvssV2) + if err != nil { + // TODO: just no + panic(err) + } + cvssV2Str.String = string(cvssV2) + cvssV2Str.Valid = true + } + + var cvssV3Str sql.NullString + if metadata.CvssV3 != nil { + cvssV3, err := json.Marshal(*metadata.CvssV3) + if err != nil { + // TODO: just no + panic(err) + } + cvssV3Str.String = string(cvssV3) + cvssV3Str.Valid = true + } + + return VulnerabilityMetadataModel{ + ID: metadata.ID, + RecordSource: metadata.RecordSource, + Severity: metadata.Severity, + Links: string(links), + Description: metadata.Description, + CvssV2: cvssV2Str, + CvssV3: cvssV3Str, + } +} + +// TableName returns the table which all db.VulnerabilityMetadata model instances are stored into. +func (VulnerabilityMetadataModel) TableName() string { + return VulnerabilityMetadataTableName +} + +// Inflate generates a db.VulnerabilityMetadataModel object from the serialized model instance. +func (m *VulnerabilityMetadataModel) Inflate() (v2.VulnerabilityMetadata, error) { + var links []string + var cvssV2, cvssV3 *v2.Cvss + + if err := json.Unmarshal([]byte(m.Links), &links); err != nil { + return v2.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal links (%+v): %w", m.Links, err) + } + + if m.CvssV2.Valid { + err := json.Unmarshal([]byte(m.CvssV2.String), &cvssV2) + if err != nil { + return v2.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal cvssV2 data (%+v): %w", m.CvssV2, err) + } + } + + if m.CvssV3.Valid { + err := json.Unmarshal([]byte(m.CvssV3.String), &cvssV3) + if err != nil { + return v2.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal cvssV3 data (%+v): %w", m.CvssV3, err) + } + } + + return v2.VulnerabilityMetadata{ + ID: m.ID, + RecordSource: m.RecordSource, + Severity: m.Severity, + Links: links, + Description: m.Description, + CvssV2: cvssV2, + CvssV3: cvssV3, + }, nil +} diff --git a/pkg/db/v2/store/store.go b/pkg/db/v2/store/store.go new file mode 100644 index 00000000..99de1604 --- /dev/null +++ b/pkg/db/v2/store/store.go @@ -0,0 +1,210 @@ +package store + +import ( + "fmt" + "sort" + + _ "github.com/glebarez/sqlite" // provide the sqlite dialect to gorm via import + "github.com/go-test/deep" + "github.com/scylladb/go-set/strset" + "gorm.io/gorm" + + "github.com/anchore/grype-db/pkg/db/internal/gormadapter" + v2 "github.com/anchore/grype-db/pkg/db/v2" + "github.com/anchore/grype-db/pkg/db/v2/store/model" +) + +// store holds an instance of the database connection +type store struct { + db *gorm.DB +} + +// New creates a new instance of the store. +func New(dbFilePath string, overwrite bool) (v2.Store, error) { + db, err := gormadapter.Open(dbFilePath, overwrite) + if err != nil { + return nil, err + } + if overwrite { + // TODO: automigrate could write to the database, + // we should be validating the database is the correct database based on the version in the ID table before + // automigrating + if err := db.AutoMigrate(&model.IDModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate ID model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityMetadataModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability Metadata model: %w", err) + } + } + + return &store{ + db: db, + }, nil +} + +// GetID fetches the metadata about the databases schema version and build time. +func (s *store) GetID() (*v2.ID, error) { + var models []model.IDModel + result := s.db.Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple DB IDs") + case len(models) == 1: + id, err := models[0].Inflate() + if err != nil { + return nil, err + } + return &id, nil + } + + return nil, nil +} + +// SetID stores the databases schema version and build time. +func (s *store) SetID(id v2.ID) error { + var ids []model.IDModel + + // replace the existing ID with the given one + s.db.Find(&ids).Delete(&ids) + + m := model.NewIDModel(id) + result := s.db.Create(&m) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add id (%d rows affected)", result.RowsAffected) + } + + return result.Error +} + +// GetVulnerability retrieves one or more vulnerabilities given a namespace and package name. +func (s *store) GetVulnerability(namespace, packageName string) ([]v2.Vulnerability, error) { + var models []model.VulnerabilityModel + + result := s.db.Where("namespace = ? AND package_name = ?", namespace, packageName).Find(&models) + + var vulnerabilities = make([]v2.Vulnerability, len(models)) + for idx, m := range models { + vulnerability, err := m.Inflate() + if err != nil { + return nil, err + } + vulnerabilities[idx] = vulnerability + } + + return vulnerabilities, result.Error +} + +// AddVulnerability saves one or more vulnerabilities into the sqlite3 store. +func (s *store) AddVulnerability(vulnerabilities ...v2.Vulnerability) error { + for _, vulnerability := range vulnerabilities { + m := model.NewVulnerabilityModel(vulnerability) + + result := s.db.Create(&m) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability (%d rows affected)", result.RowsAffected) + } + } + return nil +} + +// GetVulnerabilityMetadata retrieves metadata for the given vulnerability ID relative to a specific record source. +func (s *store) GetVulnerabilityMetadata(id, recordSource string) (*v2.VulnerabilityMetadata, error) { + var models []model.VulnerabilityMetadataModel + + result := s.db.Where(&model.VulnerabilityMetadataModel{ID: id, RecordSource: recordSource}).Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple metadatas for single ID=%q RecordSource=%q", id, recordSource) + case len(models) == 1: + metadata, err := models[0].Inflate() + if err != nil { + return nil, err + } + + return &metadata, nil + } + + return nil, nil +} + +// AddVulnerabilityMetadata stores one or more vulnerability metadata models into the sqlite DB. +func (s *store) AddVulnerabilityMetadata(metadata ...v2.VulnerabilityMetadata) error { + for _, m := range metadata { + existing, err := s.GetVulnerabilityMetadata(m.ID, m.RecordSource) + if err != nil { + return fmt.Errorf("failed to verify existing entry: %w", err) + } + + if existing != nil { + // merge with the existing entry + + cvssV3Diffs := deep.Equal(existing.CvssV3, m.CvssV3) + cvssV2Diffs := deep.Equal(existing.CvssV2, m.CvssV2) + + switch { + case existing.Severity != m.Severity: + return fmt.Errorf("existing metadata has mismatched severity (%q!=%q)", existing.Severity, m.Severity) + case existing.Description != m.Description: + return fmt.Errorf("existing metadata has mismatched description (%q!=%q)", existing.Description, m.Description) + case existing.CvssV2 != nil && len(cvssV2Diffs) > 0: + return fmt.Errorf("existing metadata has mismatched cvss-v2: %+v", cvssV2Diffs) + case existing.CvssV3 != nil && len(cvssV3Diffs) > 0: + return fmt.Errorf("existing metadata has mismatched cvss-v3: %+v", cvssV3Diffs) + default: + existing.CvssV2 = m.CvssV2 + existing.CvssV3 = m.CvssV3 + } + + links := strset.New(existing.Links...) + for _, l := range m.Links { + links.Add(l) + } + + existing.Links = links.List() + sort.Strings(existing.Links) + + newModel := model.NewVulnerabilityMetadataModel(*existing) + result := s.db.Save(&newModel) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to merge vulnerability metadata (%d rows affected)", result.RowsAffected) + } + + if result.Error != nil { + return result.Error + } + } else { + // this is a new entry + newModel := model.NewVulnerabilityMetadataModel(m) + result := s.db.Create(&newModel) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability metadata (%d rows affected)", result.RowsAffected) + } + } + } + return nil +} + +func (s *store) Close() { + s.db.Exec("VACUUM;") +} diff --git a/pkg/db/v2/store/store_test.go b/pkg/db/v2/store/store_test.go new file mode 100644 index 00000000..75befa75 --- /dev/null +++ b/pkg/db/v2/store/store_test.go @@ -0,0 +1,494 @@ +package store + +import ( + "testing" + "time" + + "github.com/go-test/deep" + + v2 "github.com/anchore/grype-db/pkg/db/v2" + "github.com/anchore/grype-db/pkg/db/v2/store/model" +) + +func assertIDReader(t *testing.T, reader v2.IDReader, expected v2.ID) { + t.Helper() + if actual, err := reader.GetID(); err != nil { + t.Fatalf("failed to get ID: %+v", err) + } else { + diffs := deep.Equal(&expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } +} + +func TestStore_GetID_SetID(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + expected := v2.ID{ + BuildTimestamp: time.Now().UTC(), + SchemaVersion: 2, + } + + if err = s.SetID(expected); err != nil { + t.Fatalf("failed to set ID: %+v", err) + } + + assertIDReader(t, s, expected) + +} + +func assertVulnerabilityReader(t *testing.T, reader v2.VulnerabilityStoreReader, namespace, name string, expected []v2.Vulnerability) { + if actual, err := reader.GetVulnerability(namespace, name); err != nil { + t.Fatalf("failed to get Vulnerability: %+v", err) + } else { + if len(actual) != len(expected) { + t.Fatalf("unexpected number of vulns: %d", len(actual)) + } + + for idx := range actual { + diffs := deep.Equal(expected[idx], actual[idx]) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + } +} + +func TestStore_GetVulnerability_SetVulnerability(t *testing.T) { + dbTempFile := t.TempDir() + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + extra := []v2.Vulnerability{ + { + ID: "my-cve-33333", + RecordSource: "record-source", + PackageName: "package-name-2", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + ProxyVulnerabilities: []string{"another-cve", "an-other-cve"}, + FixedInVersion: "2.0.1", + }, + { + ID: "my-other-cve-33333", + RecordSource: "record-source", + PackageName: "package-name-3", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + ProxyVulnerabilities: []string{"another-cve", "an-other-cve"}, + }, + } + + expected := []v2.Vulnerability{ + { + ID: "my-cve", + RecordSource: "record-source", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + ProxyVulnerabilities: []string{"another-cve", "an-other-cve"}, + FixedInVersion: "1.0.1", + }, + { + ID: "my-other-cve", + RecordSource: "record-source", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + ProxyVulnerabilities: []string{"another-cve", "an-other-cve"}, + FixedInVersion: "4.0.5", + }, + } + + total := append(expected, extra...) + + if err = s.AddVulnerability(total...); err != nil { + t.Fatalf("failed to set Vulnerability: %+v", err) + } + + var allEntries []model.VulnerabilityModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + assertVulnerabilityReader(t, s, expected[0].Namespace, expected[0].PackageName, expected) + +} + +func assertVulnerabilityMetadataReader(t *testing.T, reader v2.VulnerabilityMetadataStoreReader, id, recordSource string, expected v2.VulnerabilityMetadata) { + if actual, err := reader.GetVulnerabilityMetadata(id, recordSource); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else { + + diffs := deep.Equal(&expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + +} + +func TestStore_GetVulnerabilityMetadata_SetVulnerabilityMetadata(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + total := []v2.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v2.Cvss{ + BaseScore: 1.1, + ExploitabilityScore: 2.2, + ImpactScore: 3.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NOT", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.3, + ExploitabilityScore: 2.1, + ImpactScore: 3.2, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NICE", + }, + }, + { + ID: "my-other-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "worst description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + } + + if err = s.AddVulnerabilityMetadata(total...); err != nil { + t.Fatalf("failed to set metadata: %+v", err) + } + + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + +} + +func TestStore_MergeVulnerabilityMetadata(t *testing.T) { + tests := []struct { + name string + add []v2.VulnerabilityMetadata + expected v2.VulnerabilityMetadata + err bool + }{ + { + name: "go-case", + add: []v2.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "worst description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + expected: v2.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "worst description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + name: "merge-links", + add: []v2.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://google.com"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://yahoo.com"}, + }, + }, + expected: v2.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re", "https://google.com", "https://yahoo.com"}, + }, + }, + { + name: "bad-severity", + add: []v2.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "meh, push that for next tuesday...", + Links: []string{"https://redhat.com"}, + }, + }, + err: true, + }, + { + name: "mismatch-description", + err: true, + add: []v2.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "worst description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "mismatch-cvss2", + err: true, + add: []v2.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "mismatch-cvss3", + err: true, + add: []v2.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 2.5, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Severity: "pretty bad", + Links: []string{"https://ancho.re"}, + Description: "best description ever", + CvssV2: &v2.Cvss{ + BaseScore: 4.1, + ExploitabilityScore: 5.2, + ImpactScore: 6.3, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + CvssV3: &v2.Cvss{ + BaseScore: 1.4, + ExploitabilityScore: 0, + ImpactScore: 3.6, + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dbTempDir := t.TempDir() + + s, err := New(dbTempDir, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + // add each metadata in order + var theErr error + for _, metadata := range test.add { + err = s.AddVulnerabilityMetadata(metadata) + if err != nil { + theErr = err + break + } + } + + if test.err && theErr == nil { + t.Fatalf("expected error but did not get one") + } else if !test.err && theErr != nil { + t.Fatalf("expected no error but got one: %+v", theErr) + } else if test.err && theErr != nil { + // test pass... + return + } + + // ensure there is exactly one entry + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != 1 { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + // get the resulting metadata object + if actual, err := s.GetVulnerabilityMetadata(test.expected.ID, test.expected.RecordSource); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else { + diffs := deep.Equal(&test.expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + }) + } +} diff --git a/pkg/db/v2/vulnerability.go b/pkg/db/v2/vulnerability.go new file mode 100644 index 00000000..f76d76f7 --- /dev/null +++ b/pkg/db/v2/vulnerability.go @@ -0,0 +1,14 @@ +package v2 + +// Vulnerability represents the minimum data fields necessary to perform package-to-vulnerability matching. This can represent a CVE, 3rd party advisory, or any source that relates back to a CVE. +type Vulnerability struct { + ID string // The identifier of the vulnerability or advisory + RecordSource string // The source of the vulnerability information + PackageName string // The name of the package that is vulnerable + Namespace string // The ecosystem where the package resides + VersionConstraint string // The version range which the given package is vulnerable + VersionFormat string // The format which all version fields should be interpreted as + CPEs []string // The CPEs which are considered vulnerable + ProxyVulnerabilities []string // IDs of other Vulnerabilities that are related to this one (this is how advisories relate to CVEs) + FixedInVersion string // The version which this particular vulnerability was fixed in +} diff --git a/pkg/db/v2/vulnerability_metadata.go b/pkg/db/v2/vulnerability_metadata.go new file mode 100644 index 00000000..d92395b5 --- /dev/null +++ b/pkg/db/v2/vulnerability_metadata.go @@ -0,0 +1,20 @@ +package v2 + +// VulnerabilityMetadata represents all vulnerability data that is not necessary to perform package-to-vulnerability matching. +type VulnerabilityMetadata struct { + ID string // The identifier of the vulnerability or advisory + RecordSource string // The source of the vulnerability information + Severity string // How severe the vulnerability is (valid values are defined by upstream sources currently) + Links []string // URLs to get more information about the vulnerability or advisory + Description string // Description of the vulnerability + CvssV2 *Cvss // Common Vulnerability Scoring System V2 values + CvssV3 *Cvss // Common Vulnerability Scoring System V3 values +} + +// Cvss contains select Common Vulnerability Scoring System fields for a vulnerability. +type Cvss struct { + BaseScore float64 // Ranges from 0 - 10 and defines for qualities intrinsic to a vulnerability + ExploitabilityScore float64 // Indicator of how easy it may be for an attacker to exploit a vulnerability + ImpactScore float64 // Representation of the effects of an exploited vulnerability relative to compromise in confidentiality, integrity, and availability + Vector string // A textual representation of the metric values used to determine the score +} diff --git a/pkg/db/v2/vulnerability_metadata_store.go b/pkg/db/v2/vulnerability_metadata_store.go new file mode 100644 index 00000000..65b726e0 --- /dev/null +++ b/pkg/db/v2/vulnerability_metadata_store.go @@ -0,0 +1,14 @@ +package v2 + +type VulnerabilityMetadataStore interface { + VulnerabilityMetadataStoreReader + VulnerabilityMetadataStoreWriter +} + +type VulnerabilityMetadataStoreReader interface { + GetVulnerabilityMetadata(id, recordSource string) (*VulnerabilityMetadata, error) +} + +type VulnerabilityMetadataStoreWriter interface { + AddVulnerabilityMetadata(metadata ...VulnerabilityMetadata) error +} diff --git a/pkg/db/v2/vulnerability_store.go b/pkg/db/v2/vulnerability_store.go new file mode 100644 index 00000000..d3c18e64 --- /dev/null +++ b/pkg/db/v2/vulnerability_store.go @@ -0,0 +1,18 @@ +package v2 + +const VulnerabilityStoreFileName = "vulnerability.db" + +type VulnerabilityStore interface { + VulnerabilityStoreReader + VulnerabilityStoreWriter +} + +type VulnerabilityStoreReader interface { + // GetVulnerability retrieves vulnerabilities associated with a namespace and a package name + GetVulnerability(namespace, name string) ([]Vulnerability, error) +} + +type VulnerabilityStoreWriter interface { + // AddVulnerability inserts a new record of a vulnerability into the store + AddVulnerability(vulnerabilities ...Vulnerability) error +} diff --git a/pkg/db/v3/advisory.go b/pkg/db/v3/advisory.go new file mode 100644 index 00000000..2bd47843 --- /dev/null +++ b/pkg/db/v3/advisory.go @@ -0,0 +1,7 @@ +package v3 + +// Advisory represents published statements regarding a vulnerability (and potentially about it's resolution). +type Advisory struct { + ID string + Link string +} diff --git a/pkg/db/v3/diff.go b/pkg/db/v3/diff.go new file mode 100644 index 00000000..d9d2cc52 --- /dev/null +++ b/pkg/db/v3/diff.go @@ -0,0 +1,16 @@ +package v3 + +type DiffReason = string + +const ( + DiffAdded DiffReason = "added" + DiffChanged DiffReason = "changed" + DiffRemoved DiffReason = "removed" +) + +type Diff struct { + Reason DiffReason `json:"reason"` + ID string `json:"id"` + Namespace string `json:"namespace"` + Packages []string `json:"packages"` +} diff --git a/pkg/db/v3/fix.go b/pkg/db/v3/fix.go new file mode 100644 index 00000000..e7d6269a --- /dev/null +++ b/pkg/db/v3/fix.go @@ -0,0 +1,16 @@ +package v3 + +type FixState string + +const ( + UnknownFixState FixState = "unknown" + FixedState FixState = "fixed" + NotFixedState FixState = "not-fixed" + WontFixState FixState = "wont-fix" +) + +// Fix represents all information about known fixes for a stated vulnerability. +type Fix struct { + Versions []string // The version(s) which this particular vulnerability was fixed in + State FixState +} diff --git a/pkg/db/v3/id.go b/pkg/db/v3/id.go new file mode 100644 index 00000000..1a5033e3 --- /dev/null +++ b/pkg/db/v3/id.go @@ -0,0 +1,28 @@ +package v3 + +import ( + "time" +) + +// ID represents identifying information for a DB and the data it contains. +type ID struct { + // BuildTimestamp is the timestamp used to define the age of the DB, ideally including the age of the data + // contained in the DB, not just when the DB file was created. + BuildTimestamp time.Time + SchemaVersion int +} + +type IDReader interface { + GetID() (*ID, error) +} + +type IDWriter interface { + SetID(ID) error +} + +func NewID(age time.Time) ID { + return ID{ + BuildTimestamp: age.UTC(), + SchemaVersion: SchemaVersion, + } +} diff --git a/pkg/db/v3/namespace.go b/pkg/db/v3/namespace.go new file mode 100644 index 00000000..b5d9e8e0 --- /dev/null +++ b/pkg/db/v3/namespace.go @@ -0,0 +1,138 @@ +package v3 + +import ( + "fmt" + "strings" + + "github.com/scylladb/go-set/strset" + + "github.com/anchore/grype-db/internal/log" + "github.com/anchore/grype/grype/distro" + "github.com/anchore/grype/grype/pkg" + packageurl "github.com/anchore/packageurl-go" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +const ( + NVDNamespace = "nvd" + MSRCNamespacePrefix = "msrc" + VulnDBNamespace = "vulndb" +) + +func RecordSource(feed, group string) string { + return fmt.Sprintf("%s:%s", feed, group) +} + +func NamespaceForFeedGroup(feed, group string) (string, error) { + switch { + case feed == "vulnerabilities": + return group, nil + case feed == "github": + return group, nil + case feed == "nvdv2" && group == "nvdv2:cves": + return NVDNamespace, nil + case feed == "vulndb" && group == "vulndb:vulnerabilities": + return VulnDBNamespace, nil + case feed == "microsoft" && strings.HasPrefix(group, MSRCNamespacePrefix+":"): + return group, nil + } + return "", fmt.Errorf("feed=%q group=%q has no namespace mappings", feed, group) +} + +// NamespaceFromDistro returns the correct Feed Service namespace for the given +// distro. A namespace is a distinct identifier from the Feed Service, and it +// can be a combination of distro name and version(s), for example "amzn:8". +// This is critical to query the database and correlate the distro version with +// feed contents. Namespaces have to exist in the Feed Service, otherwise, +// this causes no results to be returned when the database is queried. +func NamespaceForDistro(d *distro.Distro) string { + if d == nil { + return "" + } + + if d.IsRolling() { + return fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), "rolling") + } + + var versionSegments []int + if d.Version != nil { + versionSegments = d.Version.Segments() + } + + if len(versionSegments) > 0 { + switch d.Type { + // derived from https://github.com/anchore/anchore-engine/blob/5bbbe6b9744f2fb806198ae5d6f0cfe3b367fd9d/anchore_engine/services/policy_engine/__init__.py#L149-L159 + case distro.CentOS, distro.RedHat, distro.Fedora, distro.RockyLinux, distro.AlmaLinux: + // TODO: there is no mapping of fedora version to RHEL latest version (only the name) + return fmt.Sprintf("rhel:%d", versionSegments[0]) + case distro.AmazonLinux: + return fmt.Sprintf("amzn:%d", versionSegments[0]) + case distro.OracleLinux: + return fmt.Sprintf("ol:%d", versionSegments[0]) + case distro.Alpine: + // XXX this assumes that a major and minor versions will always exist in Segments + return fmt.Sprintf("alpine:%d.%d", versionSegments[0], versionSegments[1]) + case distro.SLES: + return fmt.Sprintf("sles:%d.%d", versionSegments[0], versionSegments[1]) + case distro.Windows: + return fmt.Sprintf("%s:%d", MSRCNamespacePrefix, versionSegments[0]) + } + } + return fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), d.FullVersion()) +} + +func NamespacesIndexedByCPE() []string { + return []string{NVDNamespace, VulnDBNamespace} +} + +func NamespacePackageNamersForLanguage(l syftPkg.Language) map[string]NamerByPackage { + namespaces := make(map[string]NamerByPackage) + switch l { + case syftPkg.Ruby: + namespaces["github:gem"] = defaultPackageNamer + case syftPkg.Java: + namespaces["github:java"] = githubJavaPackageNamer + case syftPkg.JavaScript: + namespaces["github:npm"] = defaultPackageNamer + case syftPkg.Python: + namespaces["github:python"] = defaultPackageNamer + case syftPkg.Dotnet: + namespaces["github:nuget"] = defaultPackageNamer + default: + namespaces[fmt.Sprintf("github:%s", l)] = defaultPackageNamer + } + return namespaces +} + +type NamerByPackage func(p pkg.Package) []string + +func defaultPackageNamer(p pkg.Package) []string { + return []string{p.Name} +} + +func githubJavaPackageNamer(p pkg.Package) []string { + names := strset.New() + + // all github advisories are stored by ":" + if metadata, ok := p.Metadata.(pkg.JavaMetadata); ok { + if metadata.PomGroupID != "" { + if metadata.PomArtifactID != "" { + names.Add(fmt.Sprintf("%s:%s", metadata.PomGroupID, metadata.PomArtifactID)) + } + if metadata.ManifestName != "" { + names.Add(fmt.Sprintf("%s:%s", metadata.PomGroupID, metadata.ManifestName)) + } + } + } + + if p.PURL != "" { + purl, err := packageurl.FromString(p.PURL) + if err != nil { + log.Warnf("unable to extract GHSA java package information from purl=%q: %+v", p.PURL, err) + } else { + names.Add(fmt.Sprintf("%s:%s", purl.Namespace, purl.Name)) + } + } + + return names.List() +} diff --git a/pkg/db/v3/namespace_test.go b/pkg/db/v3/namespace_test.go new file mode 100644 index 00000000..9e67aa8a --- /dev/null +++ b/pkg/db/v3/namespace_test.go @@ -0,0 +1,523 @@ +package v3 + +import ( + "fmt" + "testing" + + "github.com/google/uuid" + "github.com/scylladb/go-set/strset" + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype/grype/distro" + "github.com/anchore/grype/grype/pkg" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func Test_NamespaceFromRecordSource(t *testing.T) { + tests := []struct { + Feed, Group string + Namespace string + }{ + { + Feed: "vulnerabilities", + Group: "ubuntu:20.04", + Namespace: "ubuntu:20.04", + }, + { + Feed: "vulnerabilities", + Group: "alpine:3.9", + Namespace: "alpine:3.9", + }, + { + Feed: "nvdv2", + Group: "nvdv2:cves", + Namespace: "nvd", + }, + { + Feed: "github", + Group: "github:python", + Namespace: "github:python", + }, + { + Feed: "vulndb", + Group: "vulndb:vulnerabilities", + Namespace: "vulndb", + }, + { + Feed: "microsoft", + Group: "msrc:11769", + Namespace: "msrc:11769", + }, + } + + for _, test := range tests { + t.Run(fmt.Sprintf("feed=%q group=%q namespace=%q", test.Feed, test.Group, test.Namespace), func(t *testing.T) { + actual, err := NamespaceForFeedGroup(test.Feed, test.Group) + assert.NoError(t, err) + assert.Equal(t, test.Namespace, actual) + }) + } +} + +func Test_NamespaceForDistro(t *testing.T) { + tests := []struct { + dist distro.Type + version string + expected string + }{ + // regression: https://github.com/anchore/grype/issues/221 + { + dist: distro.RedHat, + version: "8.3", + expected: "rhel:8", + }, + { + dist: distro.CentOS, + version: "8.3", + expected: "rhel:8", + }, + { + dist: distro.AmazonLinux, + version: "8.3", + expected: "amzn:8", + }, + { + dist: distro.OracleLinux, + version: "8.3", + expected: "ol:8", + }, + { + dist: distro.Fedora, + version: "31.1", + // TODO: this is incorrect and will be solved in a future issue (to map the fedora version to the rhel latest version) + expected: "rhel:31", + }, + // end of regression #221 + { + dist: distro.RedHat, + version: "8", + expected: "rhel:8", + }, + { + dist: distro.AmazonLinux, + version: "2", + expected: "amzn:2", + }, + { + dist: distro.OracleLinux, + version: "6", + expected: "ol:6", + }, + { + dist: distro.Alpine, + version: "1.3.1", + expected: "alpine:1.3", + }, + { + dist: distro.Debian, + version: "8", + expected: "debian:8", + }, + { + dist: distro.Fedora, + version: "31", + expected: "rhel:31", + }, + { + dist: distro.Busybox, + version: "3.1.1", + expected: "busybox:3.1.1", + }, + { + dist: distro.CentOS, + version: "7", + expected: "rhel:7", + }, + { + dist: distro.Ubuntu, + version: "18.04", + expected: "ubuntu:18.04", + }, + { + // TODO: this is not correct. This should be mapped to a feed source. + dist: distro.ArchLinux, + version: "", // ArchLinux doesn't expose a version + expected: "archlinux:rolling", + }, + { + // TODO: this is not correct. This should be mapped to a feed source. + dist: distro.OpenSuseLeap, + version: "15.2", + expected: "opensuseleap:15.2", + }, + { + // TODO: this is not correct. This should be mapped to a feed source. + dist: distro.Photon, + version: "4.0", + expected: "photon:4.0", + }, + { + dist: distro.SLES, + version: "12.5", + expected: "sles:12.5", + }, + { + dist: distro.Windows, + version: "471816", + expected: "msrc:471816", + }, + { + dist: distro.RockyLinux, + version: "8.5", + expected: "rhel:8", + }, + { + dist: distro.AlmaLinux, + version: "8.5", + expected: "rhel:8", + }, + { + dist: distro.Gentoo, + version: "", // Gentoo is a rolling release + expected: "gentoo:rolling", + }, + { + dist: distro.Wolfi, + version: "2022yzblah", // Wolfi is a rolling release + expected: "wolfi:rolling", + }, + { + dist: distro.Chainguard, + expected: "chainguard:rolling", + }, + } + + observedDistros := strset.New() + allDistros := strset.New() + + for _, d := range distro.All { + allDistros.Add(d.String()) + } + + // TODO: what do we do with mariner + allDistros.Remove(distro.Mariner.String()) + + for _, test := range tests { + name := fmt.Sprintf("%s:%s", test.dist, test.version) + t.Run(name, func(t *testing.T) { + d, err := distro.New(test.dist, test.version, "") + assert.NoError(t, err) + observedDistros.Add(d.Type.String()) + assert.Equal(t, test.expected, NamespaceForDistro(d)) + }) + } + + assert.ElementsMatch(t, allDistros.List(), observedDistros.List(), "at least one distro doesn't have a corresponding test") +} + +func Test_NamespacesIndexedByCPE(t *testing.T) { + assert.ElementsMatch(t, NamespacesIndexedByCPE(), []string{"nvd", "vulndb"}) +} + +func Test_NamespacesForLanguage(t *testing.T) { + tests := []struct { + language syftPkg.Language + namerInput *pkg.Package + expectedNamespaces []string + expectedNames []string + }{ + // default languages + { + language: syftPkg.Rust, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + }, + expectedNamespaces: []string{ + "github:rust", + }, + expectedNames: []string{ + "a-name", + }, + }, + { + language: syftPkg.Go, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + }, + expectedNamespaces: []string{ + "github:go", + }, + expectedNames: []string{ + "a-name", + }, + }, + // supported languages + { + language: syftPkg.Ruby, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + }, + expectedNamespaces: []string{ + "github:gem", + }, + expectedNames: []string{ + "a-name", + }, + }, + { + language: syftPkg.JavaScript, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + }, + expectedNamespaces: []string{ + "github:npm", + }, + expectedNames: []string{ + "a-name", + }, + }, + { + language: syftPkg.Python, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + }, + expectedNamespaces: []string{ + "github:python", + }, + expectedNames: []string{ + "a-name", + }, + }, + { + language: syftPkg.Java, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: pkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + ManifestName: "man-name", + }, + }, + expectedNamespaces: []string{ + "github:java", + }, + expectedNames: []string{ + "g-id:art-id", + "g-id:man-name", + }, + }, + { + language: syftPkg.Dart, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + }, + expectedNamespaces: []string{ + "github:dart", + }, + expectedNames: []string{ + "a-name", + }, + }, + { + language: syftPkg.Dotnet, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + }, + expectedNamespaces: []string{ + "github:nuget", + }, + expectedNames: []string{ + "a-name", + }, + }, + { + language: syftPkg.Haskell, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "h-name", + }, + expectedNamespaces: []string{ + "github:haskell", + }, + expectedNames: []string{ + "h-name", + }, + }, + { + language: syftPkg.Elixir, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "e-name", + }, + expectedNamespaces: []string{ + "github:elixir", + }, + expectedNames: []string{ + "e-name", + }, + }, + { + language: syftPkg.Erlang, + namerInput: &pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "2-name", + }, + expectedNamespaces: []string{ + "github:erlang", + }, + expectedNames: []string{ + "2-name", + }, + }, + } + + observedLanguages := strset.New() + allLanguages := strset.New() + + for _, l := range syftPkg.AllLanguages { + allLanguages.Add(string(l)) + } + + // remove PHP, CPP for coverage as feed has not been updated + allLanguages.Remove(string(syftPkg.PHP)) + allLanguages.Remove(string(syftPkg.CPP)) + allLanguages.Remove(string(syftPkg.Swift)) + allLanguages.Remove(string(syftPkg.R)) + + for _, test := range tests { + t.Run(string(test.language), func(t *testing.T) { + observedLanguages.Add(string(test.language)) + var actualNamespaces, actualNames []string + namers := NamespacePackageNamersForLanguage(test.language) + for namespace, namerFn := range namers { + actualNamespaces = append(actualNamespaces, namespace) + actualNames = append(actualNames, namerFn(*test.namerInput)...) + } + assert.ElementsMatch(t, actualNamespaces, test.expectedNamespaces) + assert.ElementsMatch(t, actualNames, test.expectedNames) + }) + } + + assert.ElementsMatch(t, allLanguages.List(), observedLanguages.List(), "at least one language doesn't have a corresponding test") +} + +func Test_githubJavaPackageNamer(t *testing.T) { + tests := []struct { + name string + namerInput pkg.Package + expected []string + }{ + { + name: "both artifact and manifest", + namerInput: pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: pkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + ManifestName: "man-name", + }, + }, + expected: []string{ + "g-id:art-id", + "g-id:man-name", + }, + }, + { + name: "no group id", + namerInput: pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: pkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + ManifestName: "man-name", + }, + }, + expected: []string{}, + }, + { + name: "only manifest", + namerInput: pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: pkg.JavaMetadata{ + VirtualPath: "v-path", + PomGroupID: "g-id", + ManifestName: "man-name", + }, + }, + expected: []string{ + "g-id:man-name", + }, + }, + { + name: "only artifact", + namerInput: pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: pkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + }, + }, + expected: []string{ + "g-id:art-id", + }, + }, + { + name: "no artifact or manifest", + namerInput: pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: pkg.JavaMetadata{ + VirtualPath: "v-path", + PomGroupID: "g-id", + }, + }, + expected: []string{}, + }, + { + name: "with valid purl", + namerInput: pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + PURL: "pkg:maven/org.anchore/b-name@0.2", + }, + expected: []string{"org.anchore:b-name"}, + }, + { + name: "ignore invalid pURLs", + namerInput: pkg.Package{ + ID: pkg.ID(uuid.NewString()), + Name: "a-name", + PURL: "pkg:BAD/", + Metadata: pkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + }, + }, + expected: []string{ + "g-id:art-id", + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + assert.ElementsMatch(t, githubJavaPackageNamer(test.namerInput), test.expected) + }) + } +} diff --git a/pkg/db/v3/schema_version.go b/pkg/db/v3/schema_version.go new file mode 100644 index 00000000..5a3e5194 --- /dev/null +++ b/pkg/db/v3/schema_version.go @@ -0,0 +1,3 @@ +package v3 + +const SchemaVersion = 3 diff --git a/pkg/db/v3/store.go b/pkg/db/v3/store.go new file mode 100644 index 00000000..0817651c --- /dev/null +++ b/pkg/db/v3/store.go @@ -0,0 +1,24 @@ +package v3 + +type Store interface { + StoreReader + StoreWriter +} + +type StoreReader interface { + IDReader + DiffReader + VulnerabilityStoreReader + VulnerabilityMetadataStoreReader +} + +type StoreWriter interface { + IDWriter + VulnerabilityStoreWriter + VulnerabilityMetadataStoreWriter + Close() +} + +type DiffReader interface { + DiffStore(s StoreReader) (*[]Diff, error) +} diff --git a/pkg/db/v3/store/diff.go b/pkg/db/v3/store/diff.go new file mode 100644 index 00000000..55cf034f --- /dev/null +++ b/pkg/db/v3/store/diff.go @@ -0,0 +1,305 @@ +package store + +import ( + "github.com/wagoodman/go-partybus" + "github.com/wagoodman/go-progress" + + "github.com/anchore/grype-db/internal/bus" + v3 "github.com/anchore/grype-db/pkg/db/v3" + "github.com/anchore/grype/grype/event" + "github.com/anchore/grype/grype/event/monitor" +) + +type storeKey struct { + id string + namespace string + packageName string +} + +type PkgMap = map[storeKey][]string + +type storeVulnerabilityList struct { + items map[storeKey][]storeVulnerability + seen bool +} +type storeVulnerability struct { + item *v3.Vulnerability + seen bool +} +type storeMetadata struct { + item *v3.VulnerabilityMetadata + seen bool +} + +// create manual progress bars for tracking the database diff's progress +func trackDiff(total int64) (*progress.Manual, *progress.Manual, *progress.Stage) { + stageProgress := &progress.Manual{} + stageProgress.SetTotal(total) + differencesDiscovered := &progress.Manual{} + stager := &progress.Stage{} + + bus.Publish(partybus.Event{ + Type: event.DatabaseDiffingStarted, + Value: monitor.DBDiff{ + Stager: stager, + StageProgress: progress.Progressable(stageProgress), + DifferencesDiscovered: progress.Monitorable(differencesDiscovered), + }, + }) + return stageProgress, differencesDiscovered, stager +} + +// creates a map from an unpackaged key to a list of all packages associated with it +func buildVulnerabilityPkgsMap(models *[]v3.Vulnerability) *map[storeKey][]string { + storeMap := make(map[storeKey][]string) + for _, m := range *models { + model := m + k := getVulnerabilityParentKey(model) + if storeVuln, exists := storeMap[k]; exists { + storeMap[k] = append(storeVuln, model.PackageName) + } else { + storeMap[k] = []string{model.PackageName} + } + } + return &storeMap +} + +// creates a diff from the given key using the package maps information to populate +// the relevant packages affected by the update +func createDiff(baseStore, targetStore *PkgMap, key storeKey, reason v3.DiffReason) *v3.Diff { + pkgMap := make(map[string]struct{}) + + key.packageName = "" + if baseStore != nil { + if basePkgs, exists := (*baseStore)[key]; exists { + for _, pkg := range basePkgs { + pkgMap[pkg] = struct{}{} + } + } + } + if targetStore != nil { + if targetPkgs, exists := (*targetStore)[key]; exists { + for _, pkg := range targetPkgs { + pkgMap[pkg] = struct{}{} + } + } + } + pkgs := []string{} + for pkg := range pkgMap { + pkgs = append(pkgs, pkg) + } + + return &v3.Diff{ + Reason: reason, + ID: key.id, + Namespace: key.namespace, + Packages: pkgs, + } +} + +// gets an unpackaged key from a vulnerability +func getVulnerabilityParentKey(vuln v3.Vulnerability) storeKey { + return storeKey{vuln.ID, vuln.Namespace, ""} +} + +// gets a packaged key from a vulnerability +func getVulnerabilityKey(vuln v3.Vulnerability) storeKey { + return storeKey{vuln.ID, vuln.Namespace, vuln.PackageName} +} + +type VulnerabilitySet struct { + data map[storeKey]*storeVulnerabilityList +} + +func NewVulnerabilitySet(models *[]v3.Vulnerability) *VulnerabilitySet { + m := make(map[storeKey]*storeVulnerabilityList, len(*models)) + for _, mm := range *models { + model := mm + parentKey := getVulnerabilityParentKey(model) + vulnKey := getVulnerabilityKey(model) + if storeVuln, exists := m[parentKey]; exists { + if kk, exists := storeVuln.items[vulnKey]; exists { + storeVuln.items[vulnKey] = append(kk, storeVulnerability{ + item: &model, + seen: false, + }) + } else { + storeVuln.items[vulnKey] = []storeVulnerability{{&model, false}} + } + } else { + vuln := storeVulnerabilityList{ + items: make(map[storeKey][]storeVulnerability), + seen: false, + } + vuln.items[vulnKey] = []storeVulnerability{{&model, false}} + m[parentKey] = &vuln + } + } + return &VulnerabilitySet{ + data: m, + } +} + +func (v *VulnerabilitySet) in(item v3.Vulnerability) bool { + _, exists := v.data[getVulnerabilityParentKey(item)] + return exists +} + +func (v *VulnerabilitySet) match(item v3.Vulnerability) bool { + if parent, exists := v.data[getVulnerabilityParentKey(item)]; exists { + parent.seen = true + key := getVulnerabilityKey(item) + if children, exists := parent.items[key]; exists { + for idx, child := range children { + if item.Equal(*child.item) { + children[idx].seen = true + return true + } + } + } + } + return false +} + +func (v *VulnerabilitySet) getUnmatched() ([]storeKey, []storeKey) { + notSeen := []storeKey{} + notEntirelySeen := []storeKey{} + for k, item := range v.data { + if !item.seen { + notSeen = append(notSeen, k) + continue + } + componentLoop: + for _, components := range item.items { + for _, component := range components { + if !component.seen { + notEntirelySeen = append(notEntirelySeen, k) + break componentLoop + } + } + } + } + return notSeen, notEntirelySeen +} + +func diffVulnerabilities(baseModels, targetModels *[]v3.Vulnerability, basePkgsMap, targetPkgsMap *PkgMap, differentItems *progress.Manual) *map[string]*v3.Diff { + diffs := make(map[string]*v3.Diff) + m := NewVulnerabilitySet(baseModels) + + for _, tModel := range *targetModels { + targetModel := tModel + k := getVulnerabilityKey(targetModel) + if m.in(targetModel) { + matched := m.match(targetModel) + if !matched { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, targetPkgsMap, k, v3.DiffChanged) + differentItems.Increment() + } + } else { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(nil, targetPkgsMap, k, v3.DiffAdded) + differentItems.Increment() + } + } + notSeen, partialSeen := m.getUnmatched() + for _, k := range partialSeen { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, targetPkgsMap, k, v3.DiffChanged) + differentItems.Increment() + } + for _, k := range notSeen { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, nil, k, v3.DiffRemoved) + differentItems.Increment() + } + + return &diffs +} + +type MetadataSet struct { + data map[storeKey]*storeMetadata +} + +func NewMetadataSet(models *[]v3.VulnerabilityMetadata) *MetadataSet { + m := make(map[storeKey]*storeMetadata, len(*models)) + for _, mm := range *models { + model := mm + m[getMetadataKey(model)] = &storeMetadata{ + item: &model, + seen: false, + } + } + return &MetadataSet{ + data: m, + } +} + +func (v *MetadataSet) in(item v3.VulnerabilityMetadata) bool { + _, exists := v.data[getMetadataKey(item)] + return exists +} + +func (v *MetadataSet) match(item v3.VulnerabilityMetadata) bool { + if baseModel, exists := v.data[getMetadataKey(item)]; exists { + baseModel.seen = true + return baseModel.item.Equal(item) + } + return false +} + +func (v *MetadataSet) getUnmatched() []storeKey { + notSeen := []storeKey{} + for k, item := range v.data { + if !item.seen { + notSeen = append(notSeen, k) + } + } + return notSeen +} + +func diffVulnerabilityMetadata(baseModels, targetModels *[]v3.VulnerabilityMetadata, basePkgsMap, targetPkgsMap *PkgMap, differentItems *progress.Manual) *map[string]*v3.Diff { + diffs := make(map[string]*v3.Diff) + m := NewMetadataSet(baseModels) + + for _, tModel := range *targetModels { + targetModel := tModel + k := getMetadataKey(targetModel) + if m.in(targetModel) { + if !m.match(targetModel) { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, targetPkgsMap, k, v3.DiffChanged) + differentItems.Increment() + } + } else { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(nil, targetPkgsMap, k, v3.DiffAdded) + differentItems.Increment() + } + } + for _, k := range m.getUnmatched() { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, nil, k, v3.DiffRemoved) + differentItems.Increment() + } + + return &diffs +} + +func getMetadataKey(metadata v3.VulnerabilityMetadata) storeKey { + return storeKey{metadata.ID, metadata.Namespace, ""} +} diff --git a/pkg/db/v3/store/diff_test.go b/pkg/db/v3/store/diff_test.go new file mode 100644 index 00000000..3d8b5b1c --- /dev/null +++ b/pkg/db/v3/store/diff_test.go @@ -0,0 +1,236 @@ +package store + +import ( + "os" + "sort" + "testing" + + "github.com/stretchr/testify/assert" + + v3 "github.com/anchore/grype-db/pkg/db/v3" +) + +func Test_GetAllVulnerabilities(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + //WHEN + result, err := s.GetAllVulnerabilities() + + //THEN + assert.NotNil(t, result) + assert.NoError(t, err) +} + +func Test_GetAllVulnerabilityMetadata(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + //WHEN + result, err := s.GetAllVulnerabilityMetadata() + + //THEN + assert.NotNil(t, result) + assert.NoError(t, err) +} + +func Test_Diff_Vulnerabilities(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + + s1, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + dbTempFile = t.TempDir() + defer os.Remove(dbTempFile) + + s2, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + baseVulns := []v3.Vulnerability{ + { + Namespace: "github:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 2.0 >= 1.29", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "github:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + PackageName: "npm:axios", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, + Fix: v3.Fix{ + State: v3.UnknownFixState, + }, + }, + } + targetVulns := []v3.Vulnerability{ + { + Namespace: "github:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 2.0 >= 1.29", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "github:go", + ID: "GHSA-....-....", + PackageName: "hashicorp:nomad", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:golang:hashicorp:nomad:*:*:*:*:*"}, + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + PackageName: "npm:axios", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, + Fix: v3.Fix{ + State: v3.WontFixState, + }, + }, + } + expectedDiffs := []v3.Diff{ + { + Reason: v3.DiffChanged, + ID: "CVE-123-4567", + Namespace: "github:python", + Packages: []string{"pypi:requests"}, + }, + { + Reason: v3.DiffChanged, + ID: "CVE-123-7654", + Namespace: "npm", + Packages: []string{"npm:axios"}, + }, + { + Reason: v3.DiffAdded, + ID: "GHSA-....-....", + Namespace: "github:go", + Packages: []string{"hashicorp:nomad"}, + }, + } + + for _, vuln := range baseVulns { + s1.AddVulnerability(vuln) + } + for _, vuln := range targetVulns { + s2.AddVulnerability(vuln) + } + + //WHEN + result, err := s1.DiffStore(s2) + sort.SliceStable(*result, func(i, j int) bool { + return (*result)[i].ID < (*result)[j].ID + }) + + //THEN + assert.NoError(t, err) + assert.Equal(t, expectedDiffs, *result) +} + +func Test_Diff_Metadata(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + + s1, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + dbTempFile = t.TempDir() + + s2, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + baseVulns := []v3.VulnerabilityMetadata{ + { + Namespace: "github:python", + ID: "CVE-123-4567", + DataSource: "nvd", + }, + { + Namespace: "github:python", + ID: "CVE-123-4567", + DataSource: "nvd", + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + DataSource: "nvd", + }, + } + targetVulns := []v3.VulnerabilityMetadata{ + { + Namespace: "github:go", + ID: "GHSA-....-....", + DataSource: "nvd", + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + DataSource: "vulndb", + }, + } + expectedDiffs := []v3.Diff{ + { + Reason: v3.DiffRemoved, + ID: "CVE-123-4567", + Namespace: "github:python", + Packages: []string{}, + }, + { + Reason: v3.DiffChanged, + ID: "CVE-123-7654", + Namespace: "npm", + Packages: []string{}, + }, + { + Reason: v3.DiffAdded, + ID: "GHSA-....-....", + Namespace: "github:go", + Packages: []string{}, + }, + } + + for _, vuln := range baseVulns { + s1.AddVulnerabilityMetadata(vuln) + } + for _, vuln := range targetVulns { + s2.AddVulnerabilityMetadata(vuln) + } + + //WHEN + result, err := s1.DiffStore(s2) + + //THEN + sort.SliceStable(*result, func(i, j int) bool { + return (*result)[i].ID < (*result)[j].ID + }) + + assert.NoError(t, err) + assert.Equal(t, expectedDiffs, *result) +} diff --git a/pkg/db/v3/store/model/id.go b/pkg/db/v3/store/model/id.go new file mode 100644 index 00000000..73a16adf --- /dev/null +++ b/pkg/db/v3/store/model/id.go @@ -0,0 +1,40 @@ +package model + +import ( + "fmt" + "time" + + v3 "github.com/anchore/grype-db/pkg/db/v3" +) + +const ( + IDTableName = "id" +) + +type IDModel struct { + BuildTimestamp string `gorm:"column:build_timestamp"` + SchemaVersion int `gorm:"column:schema_version"` +} + +func NewIDModel(id v3.ID) IDModel { + return IDModel{ + BuildTimestamp: id.BuildTimestamp.Format(time.RFC3339Nano), + SchemaVersion: id.SchemaVersion, + } +} + +func (IDModel) TableName() string { + return IDTableName +} + +func (m *IDModel) Inflate() (v3.ID, error) { + buildTime, err := time.Parse(time.RFC3339Nano, m.BuildTimestamp) + if err != nil { + return v3.ID{}, fmt.Errorf("unable to parse build timestamp (%+v): %w", m.BuildTimestamp, err) + } + + return v3.ID{ + BuildTimestamp: buildTime, + SchemaVersion: m.SchemaVersion, + }, nil +} diff --git a/pkg/db/v3/store/model/vulnerability.go b/pkg/db/v3/store/model/vulnerability.go new file mode 100644 index 00000000..2cdd86b2 --- /dev/null +++ b/pkg/db/v3/store/model/vulnerability.go @@ -0,0 +1,115 @@ +package model + +import ( + "encoding/json" + "fmt" + + v3 "github.com/anchore/grype-db/pkg/db/v3" +) + +const ( + VulnerabilityTableName = "vulnerability" + GetVulnerabilityIndexName = "get_vulnerability_index" +) + +// VulnerabilityModel is a struct used to serialize db.Vulnerability information into a sqlite3 DB. +type VulnerabilityModel struct { + PK uint64 `gorm:"primary_key;auto_increment;"` + ID string `gorm:"column:id"` + PackageName string `gorm:"column:package_name; index:get_vulnerability_index"` + Namespace string `gorm:"column:namespace; index:get_vulnerability_index"` + VersionConstraint string `gorm:"column:version_constraint"` + VersionFormat string `gorm:"column:version_format"` + CPEs string `gorm:"column:cpes"` + RelatedVulnerabilities string `gorm:"column:related_vulnerabilities"` + FixedInVersions string `gorm:"column:fixed_in_versions"` + FixState string `gorm:"column:fix_state"` + Advisories string `gorm:"column:advisories"` +} + +// NewVulnerabilityModel generates a new model from a db.Vulnerability struct. +func NewVulnerabilityModel(vulnerability v3.Vulnerability) VulnerabilityModel { + cpes, err := json.Marshal(vulnerability.CPEs) + if err != nil { + // TODO: just no + panic(err) + } + + related, err := json.Marshal(vulnerability.RelatedVulnerabilities) + if err != nil { + // TODO: just no + panic(err) + } + + advisories, err := json.Marshal(vulnerability.Advisories) + if err != nil { + // TODO: just no + panic(err) + } + + fixedInVersions, err := json.Marshal(vulnerability.Fix.Versions) + if err != nil { + // TODO: just no + panic(err) + } + + return VulnerabilityModel{ + ID: vulnerability.ID, + PackageName: vulnerability.PackageName, + Namespace: vulnerability.Namespace, + VersionConstraint: vulnerability.VersionConstraint, + VersionFormat: vulnerability.VersionFormat, + FixedInVersions: string(fixedInVersions), + FixState: string(vulnerability.Fix.State), + Advisories: string(advisories), + CPEs: string(cpes), + RelatedVulnerabilities: string(related), + } +} + +// TableName returns the table which all db.Vulnerability model instances are stored into. +func (VulnerabilityModel) TableName() string { + return VulnerabilityTableName +} + +// Inflate generates a db.Vulnerability object from the serialized model instance. +func (m *VulnerabilityModel) Inflate() (v3.Vulnerability, error) { + var cpes []string + err := json.Unmarshal([]byte(m.CPEs), &cpes) + if err != nil { + return v3.Vulnerability{}, fmt.Errorf("unable to unmarshal CPEs (%+v): %w", m.CPEs, err) + } + + var related []v3.VulnerabilityReference + err = json.Unmarshal([]byte(m.RelatedVulnerabilities), &related) + if err != nil { + return v3.Vulnerability{}, fmt.Errorf("unable to unmarshal related vulnerabilities (%+v): %w", m.RelatedVulnerabilities, err) + } + + var advisories []v3.Advisory + err = json.Unmarshal([]byte(m.Advisories), &advisories) + if err != nil { + return v3.Vulnerability{}, fmt.Errorf("unable to unmarshal advisories (%+v): %w", m.Advisories, err) + } + + var versions []string + err = json.Unmarshal([]byte(m.FixedInVersions), &versions) + if err != nil { + return v3.Vulnerability{}, fmt.Errorf("unable to unmarshal versions (%+v): %w", m.FixedInVersions, err) + } + + return v3.Vulnerability{ + ID: m.ID, + PackageName: m.PackageName, + Namespace: m.Namespace, + VersionConstraint: m.VersionConstraint, + VersionFormat: m.VersionFormat, + CPEs: cpes, + RelatedVulnerabilities: related, + Fix: v3.Fix{ + Versions: versions, + State: v3.FixState(m.FixState), + }, + Advisories: advisories, + }, nil +} diff --git a/pkg/db/v3/store/model/vulnerability_metadata.go b/pkg/db/v3/store/model/vulnerability_metadata.go new file mode 100644 index 00000000..a793a1d6 --- /dev/null +++ b/pkg/db/v3/store/model/vulnerability_metadata.go @@ -0,0 +1,87 @@ +package model + +import ( + "encoding/json" + "fmt" + + v3 "github.com/anchore/grype-db/pkg/db/v3" +) + +const ( + VulnerabilityMetadataTableName = "vulnerability_metadata" +) + +// VulnerabilityMetadataModel is a struct used to serialize db.VulnerabilityMetadata information into a sqlite3 DB. +type VulnerabilityMetadataModel struct { + ID string `gorm:"primary_key; column:id;"` + Namespace string `gorm:"primary_key; column:namespace;"` + DataSource string `gorm:"column:data_source"` + RecordSource string `gorm:"column:record_source"` + Severity string `gorm:"column:severity"` + URLs string `gorm:"column:urls"` + Description string `gorm:"column:description"` + Cvss string `gorm:"column:cvss"` +} + +// NewVulnerabilityMetadataModel generates a new model from a db.VulnerabilityMetadata struct. +func NewVulnerabilityMetadataModel(metadata v3.VulnerabilityMetadata) VulnerabilityMetadataModel { + links, err := json.Marshal(metadata.URLs) + if err != nil { + // TODO: just no + panic(err) + } + + if metadata.Cvss == nil { + metadata.Cvss = make([]v3.Cvss, 0) + } + var cvssStr string + cvss, err := json.Marshal(metadata.Cvss) + if err != nil { + // TODO: just no + panic(err) + } + + cvssStr = string(cvss) + + return VulnerabilityMetadataModel{ + ID: metadata.ID, + Namespace: metadata.Namespace, + DataSource: metadata.DataSource, + RecordSource: metadata.RecordSource, + Severity: metadata.Severity, + URLs: string(links), + Description: metadata.Description, + Cvss: cvssStr, + } +} + +// TableName returns the table which all db.VulnerabilityMetadata model instances are stored into. +func (VulnerabilityMetadataModel) TableName() string { + return VulnerabilityMetadataTableName +} + +// Inflate generates a db.VulnerabilityMetadataModel object from the serialized model instance. +func (m *VulnerabilityMetadataModel) Inflate() (v3.VulnerabilityMetadata, error) { + var links []string + var cvss []v3.Cvss + + if err := json.Unmarshal([]byte(m.URLs), &links); err != nil { + return v3.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal URLs (%+v): %w", m.URLs, err) + } + + err := json.Unmarshal([]byte(m.Cvss), &cvss) + if err != nil { + return v3.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal cvss data (%+v): %w", m.Cvss, err) + } + + return v3.VulnerabilityMetadata{ + ID: m.ID, + Namespace: m.Namespace, + DataSource: m.DataSource, + RecordSource: m.RecordSource, + Severity: m.Severity, + URLs: links, + Description: m.Description, + Cvss: cvss, + }, nil +} diff --git a/pkg/db/v3/store/store.go b/pkg/db/v3/store/store.go new file mode 100644 index 00000000..4225eb85 --- /dev/null +++ b/pkg/db/v3/store/store.go @@ -0,0 +1,308 @@ +package store + +import ( + "fmt" + "sort" + + _ "github.com/glebarez/sqlite" // provide the sqlite dialect to gorm via import + "github.com/go-test/deep" + "github.com/scylladb/go-set/strset" + "gorm.io/gorm" + + "github.com/anchore/grype-db/pkg/db/internal/gormadapter" + v3 "github.com/anchore/grype-db/pkg/db/v3" + "github.com/anchore/grype-db/pkg/db/v3/store/model" +) + +// store holds an instance of the database connection +type store struct { + db *gorm.DB +} + +// New creates a new instance of the store. +func New(dbFilePath string, overwrite bool) (v3.Store, error) { + db, err := gormadapter.Open(dbFilePath, overwrite) + if err != nil { + return nil, err + } + + if overwrite { + // TODO: automigrate could write to the database, + // we should be validating the database is the correct database based on the version in the ID table before + // automigrating + if err := db.AutoMigrate(&model.IDModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate ID model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityMetadataModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability Metadata model: %w", err) + } + } + + return &store{ + db: db, + }, nil +} + +// GetID fetches the metadata about the databases schema version and build time. +func (s *store) GetID() (*v3.ID, error) { + var models []model.IDModel + result := s.db.Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple DB IDs") + case len(models) == 1: + id, err := models[0].Inflate() + if err != nil { + return nil, err + } + return &id, nil + } + + return nil, nil +} + +// SetID stores the databases schema version and build time. +func (s *store) SetID(id v3.ID) error { + var ids []model.IDModel + + // replace the existing ID with the given one + s.db.Find(&ids).Delete(&ids) + + m := model.NewIDModel(id) + result := s.db.Create(&m) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add id (%d rows affected)", result.RowsAffected) + } + + return result.Error +} + +// GetVulnerability retrieves one or more vulnerabilities given a namespace and package name. +func (s *store) GetVulnerability(namespace, packageName string) ([]v3.Vulnerability, error) { + var models []model.VulnerabilityModel + + result := s.db.Where("namespace = ? AND package_name = ?", namespace, packageName).Find(&models) + + var vulnerabilities = make([]v3.Vulnerability, len(models)) + for idx, m := range models { + vulnerability, err := m.Inflate() + if err != nil { + return nil, err + } + vulnerabilities[idx] = vulnerability + } + + return vulnerabilities, result.Error +} + +// AddVulnerability saves one or more vulnerabilities into the sqlite3 store. +func (s *store) AddVulnerability(vulnerabilities ...v3.Vulnerability) error { + for _, vulnerability := range vulnerabilities { + m := model.NewVulnerabilityModel(vulnerability) + + result := s.db.Create(&m) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability (%d rows affected)", result.RowsAffected) + } + } + return nil +} + +// GetVulnerabilityMetadata retrieves metadata for the given vulnerability ID relative to a specific record source. +func (s *store) GetVulnerabilityMetadata(id, namespace string) (*v3.VulnerabilityMetadata, error) { + var models []model.VulnerabilityMetadataModel + + result := s.db.Where(&model.VulnerabilityMetadataModel{ID: id, Namespace: namespace}).Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple metadatas for single ID=%q Namespace=%q", id, namespace) + case len(models) == 1: + metadata, err := models[0].Inflate() + if err != nil { + return nil, err + } + + return &metadata, nil + } + + return nil, nil +} + +// AddVulnerabilityMetadata stores one or more vulnerability metadata models into the sqlite DB. +// +//nolint:gocognit +func (s *store) AddVulnerabilityMetadata(metadata ...v3.VulnerabilityMetadata) error { + for _, m := range metadata { + existing, err := s.GetVulnerabilityMetadata(m.ID, m.Namespace) + if err != nil { + return fmt.Errorf("failed to verify existing entry: %w", err) + } + + if existing != nil { + // merge with the existing entry + + switch { + case existing.Severity != m.Severity: + return fmt.Errorf("existing metadata has mismatched severity (%q!=%q)", existing.Severity, m.Severity) + case existing.Description != m.Description: + return fmt.Errorf("existing metadata has mismatched description (%q!=%q)", existing.Description, m.Description) + } + + incoming: + // go through all incoming CVSS and see if they are already stored. + // If they exist already in the database then skip adding them, + // preventing a duplicate + for _, incomingCvss := range m.Cvss { + for _, existingCvss := range existing.Cvss { + if len(deep.Equal(incomingCvss, existingCvss)) == 0 { + // duplicate found, so incoming CVSS shouldn't get added + continue incoming + } + } + // a duplicate CVSS entry wasn't found, so append the incoming CVSS + existing.Cvss = append(existing.Cvss, incomingCvss) + } + + links := strset.New(existing.URLs...) + for _, l := range m.URLs { + links.Add(l) + } + + existing.URLs = links.List() + sort.Strings(existing.URLs) + + newModel := model.NewVulnerabilityMetadataModel(*existing) + result := s.db.Save(&newModel) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to merge vulnerability metadata (%d rows affected)", result.RowsAffected) + } + + if result.Error != nil { + return result.Error + } + } else { + // this is a new entry + newModel := model.NewVulnerabilityMetadataModel(m) + result := s.db.Create(&newModel) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability metadata (%d rows affected)", result.RowsAffected) + } + } + } + return nil +} + +// GetAllVulnerabilities gets all vulnerabilities in the database +func (s *store) GetAllVulnerabilities() (*[]v3.Vulnerability, error) { + var models []model.VulnerabilityModel + if result := s.db.Find(&models); result.Error != nil { + return nil, result.Error + } + vulns := make([]v3.Vulnerability, len(models)) + for idx, m := range models { + vuln, err := m.Inflate() + if err != nil { + return nil, err + } + vulns[idx] = vuln + } + return &vulns, nil +} + +// GetAllVulnerabilityMetadata gets all vulnerability metadata in the database +func (s *store) GetAllVulnerabilityMetadata() (*[]v3.VulnerabilityMetadata, error) { + var models []model.VulnerabilityMetadataModel + if result := s.db.Find(&models); result.Error != nil { + return nil, result.Error + } + metadata := make([]v3.VulnerabilityMetadata, len(models)) + for idx, m := range models { + data, err := m.Inflate() + if err != nil { + return nil, err + } + metadata[idx] = data + } + return &metadata, nil +} + +// DiffStore creates a diff between the current sql database and the given store +func (s *store) DiffStore(targetStore v3.StoreReader) (*[]v3.Diff, error) { + // 7 stages, one for each step of the diff process (stages) + rowsProgress, diffItems, stager := trackDiff(7) + + stager.Current = "reading target vulnerabilities" + targetVulns, err := targetStore.GetAllVulnerabilities() + rowsProgress.Increment() + if err != nil { + return nil, err + } + + stager.Current = "reading base vulnerabilities" + baseVulns, err := s.GetAllVulnerabilities() + rowsProgress.Increment() + if err != nil { + return nil, err + } + + stager.Current = "preparing" + baseVulnPkgMap := buildVulnerabilityPkgsMap(baseVulns) + targetVulnPkgMap := buildVulnerabilityPkgsMap(targetVulns) + + stager.Current = "comparing vulnerabilities" + allDiffsMap := diffVulnerabilities(baseVulns, targetVulns, baseVulnPkgMap, targetVulnPkgMap, diffItems) + + stager.Current = "reading base metadata" + baseMetadata, err := s.GetAllVulnerabilityMetadata() + if err != nil { + return nil, err + } + rowsProgress.Increment() + + stager.Current = "reading target metadata" + targetMetadata, err := targetStore.GetAllVulnerabilityMetadata() + if err != nil { + return nil, err + } + rowsProgress.Increment() + + stager.Current = "comparing metadata" + metaDiffsMap := diffVulnerabilityMetadata(baseMetadata, targetMetadata, baseVulnPkgMap, targetVulnPkgMap, diffItems) + for k, diff := range *metaDiffsMap { + (*allDiffsMap)[k] = diff + } + allDiffs := []v3.Diff{} + for _, diff := range *allDiffsMap { + allDiffs = append(allDiffs, *diff) + } + + rowsProgress.SetCompleted() + diffItems.SetCompleted() + + return &allDiffs, nil +} + +func (s *store) Close() { + s.db.Exec("VACUUM;") +} diff --git a/pkg/db/v3/store/store_test.go b/pkg/db/v3/store/store_test.go new file mode 100644 index 00000000..9a8c1a4e --- /dev/null +++ b/pkg/db/v3/store/store_test.go @@ -0,0 +1,1174 @@ +package store + +import ( + "encoding/json" + "sort" + "testing" + "time" + + "github.com/go-test/deep" + "github.com/stretchr/testify/assert" + + v3 "github.com/anchore/grype-db/pkg/db/v3" + "github.com/anchore/grype-db/pkg/db/v3/store/model" +) + +func assertIDReader(t *testing.T, reader v3.IDReader, expected v3.ID) { + t.Helper() + if actual, err := reader.GetID(); err != nil { + t.Fatalf("failed to get ID: %+v", err) + } else { + diffs := deep.Equal(&expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } +} + +func TestStore_GetID_SetID(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + expected := v3.ID{ + BuildTimestamp: time.Now().UTC(), + SchemaVersion: 2, + } + + if err = s.SetID(expected); err != nil { + t.Fatalf("failed to set ID: %+v", err) + } + + assertIDReader(t, s, expected) + +} + +func assertVulnerabilityReader(t *testing.T, reader v3.VulnerabilityStoreReader, namespace, name string, expected []v3.Vulnerability) { + if actual, err := reader.GetVulnerability(namespace, name); err != nil { + t.Fatalf("failed to get Vulnerability: %+v", err) + } else { + if len(actual) != len(expected) { + t.Fatalf("unexpected number of vulns: %d", len(actual)) + } + for idx := range actual { + diffs := deep.Equal(expected[idx], actual[idx]) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + } +} + +func TestStore_GetVulnerability_SetVulnerability(t *testing.T) { + dbTempFile := t.TempDir() + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + extra := []v3.Vulnerability{ + { + ID: "my-cve-33333", + PackageName: "package-name-2", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v3.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v3.Fix{ + Versions: []string{"2.0.1"}, + State: v3.FixedState, + }, + }, + { + ID: "my-other-cve-33333", + PackageName: "package-name-3", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v3.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v3.Fix{ + State: v3.NotFixedState, + }, + }, + } + + expected := []v3.Vulnerability{ + { + ID: "my-cve", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v3.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v3.Fix{ + Versions: []string{"1.0.1"}, + State: v3.FixedState, + }, + }, + { + ID: "my-other-cve", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v3.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v3.Fix{ + Versions: []string{"4.0.5"}, + State: v3.FixedState, + }, + }, + } + + total := append(expected, extra...) + + if err = s.AddVulnerability(total...); err != nil { + t.Fatalf("failed to set Vulnerability: %+v", err) + } + + var allEntries []model.VulnerabilityModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + assertVulnerabilityReader(t, s, expected[0].Namespace, expected[0].PackageName, expected) + +} + +func assertVulnerabilityMetadataReader(t *testing.T, reader v3.VulnerabilityMetadataStoreReader, id, namespace string, expected v3.VulnerabilityMetadata) { + if actual, err := reader.GetVulnerabilityMetadata(id, namespace); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else if actual == nil { + t.Fatalf("no metadata returned for id=%q namespace=%q", id, namespace) + } else { + sortMetadataCvss(actual.Cvss) + sortMetadataCvss(expected.Cvss) + + // make sure they both have the same number of CVSS entries - preventing a panic on later assertions + assert.Len(t, expected.Cvss, len(actual.Cvss)) + for idx, actualCvss := range actual.Cvss { + assert.Equal(t, actualCvss.Vector, expected.Cvss[idx].Vector) + assert.Equal(t, actualCvss.Version, expected.Cvss[idx].Version) + assert.Equal(t, actualCvss.Metrics, expected.Cvss[idx].Metrics) + + actualVendor, err := json.Marshal(actualCvss.VendorMetadata) + if err != nil { + t.Errorf("unable to marshal vendor metadata: %q", err) + } + expectedVendor, err := json.Marshal(expected.Cvss[idx].VendorMetadata) + if err != nil { + t.Errorf("unable to marshal vendor metadata: %q", err) + } + assert.Equal(t, string(actualVendor), string(expectedVendor)) + + } + + // nil the Cvss field because it is an interface - verification of Cvss + // has already happened at this point + expected.Cvss = nil + actual.Cvss = nil + assert.Equal(t, &expected, actual) + } + +} + +func sortMetadataCvss(cvss []v3.Cvss) { + sort.Slice(cvss, func(i, j int) bool { + // first, sort by Vector + if cvss[i].Vector > cvss[j].Vector { + return true + } + if cvss[i].Vector < cvss[j].Vector { + return false + } + // then try to sort by BaseScore if Vector is the same + return cvss[i].Metrics.BaseScore < cvss[j].Metrics.BaseScore + }) +} + +// CustomMetadata is effectively a noop, its values aren't meaningful and are +// mostly useful to ensure that any type can be stored and then retrieved for +// assertion in these test cases where custom vendor CVSS scores are used +type CustomMetadata struct { + SuperScore string + Vendor string +} + +func TestStore_GetVulnerabilityMetadata_SetVulnerabilityMetadata(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + total := []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v3.Cvss{ + { + VendorMetadata: CustomMetadata{ + Vendor: "redhat", + SuperScore: "1000", + }, + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 1.1, + 2.2, + 3.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NOT", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.3, + 2.1, + 3.2, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NICE", + VendorMetadata: nil, + }, + }, + }, + { + ID: "my-other-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + } + + if err = s.AddVulnerabilityMetadata(total...); err != nil { + t.Fatalf("failed to set metadata: %+v", err) + } + + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + +} + +func TestStore_MergeVulnerabilityMetadata(t *testing.T) { + tests := []struct { + name string + add []v3.VulnerabilityMetadata + expected v3.VulnerabilityMetadata + err bool + }{ + { + name: "go-case", + add: []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v3.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "merge-links", + add: []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://google.com"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://yahoo.com"}, + }, + }, + expected: v3.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re", "https://google.com", "https://yahoo.com"}, + Cvss: []v3.Cvss{}, + }, + }, + { + name: "bad-severity", + add: []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "meh, push that for next tuesday...", + URLs: []string{"https://redhat.com"}, + }, + }, + err: true, + }, + { + name: "mismatch-description", + err: true, + add: []v3.VulnerabilityMetadata{ + { + + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + }, + { + name: "mismatch-cvss2", + err: false, + add: []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v3.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:P--VERY", + }, + }, + }, + }, + { + name: "mismatch-cvss3", + err: false, + add: []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 0, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v3.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 0, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dbTempDir := t.TempDir() + s, err := New(dbTempDir, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + // add each metadata in order + var theErr error + for _, metadata := range test.add { + err = s.AddVulnerabilityMetadata(metadata) + if err != nil { + theErr = err + break + } + } + + if test.err && theErr == nil { + t.Fatalf("expected error but did not get one") + } else if !test.err && theErr != nil { + t.Fatalf("expected no error but got one: %+v", theErr) + } else if test.err && theErr != nil { + // test pass... + return + } + + // ensure there is exactly one entry + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != 1 { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + // get the resulting metadata object + if actual, err := s.GetVulnerabilityMetadata(test.expected.ID, test.expected.Namespace); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else { + diffs := deep.Equal(&test.expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + }) + } +} + +func TestCvssScoresInMetadata(t *testing.T) { + tests := []struct { + name string + add []v3.VulnerabilityMetadata + expected v3.VulnerabilityMetadata + }{ + { + name: "append-cvss", + add: []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v3.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "append-vendor-cvss", + add: []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + VendorMetadata: CustomMetadata{ + SuperScore: "100", + Vendor: "debian", + }, + }, + }, + }, + }, + expected: v3.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "2.0", + Metrics: v3.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + VendorMetadata: CustomMetadata{ + SuperScore: "100", + Vendor: "debian", + }, + }, + }, + }, + }, + { + name: "avoids-duplicate-cvss", + add: []v3.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v3.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v3.Cvss{ + { + Version: "3.0", + Metrics: v3.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dbTempDir := t.TempDir() + + s, err := New(dbTempDir, true) + if err != nil { + t.Fatalf("could not create s: %+v", err) + } + + // add each metadata in order + for _, metadata := range test.add { + err = s.AddVulnerabilityMetadata(metadata) + if err != nil { + t.Fatalf("unable to s vulnerability metadata: %+v", err) + } + } + + // ensure there is exactly one entry + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != 1 { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + assertVulnerabilityMetadataReader(t, s, test.expected.ID, test.expected.Namespace, test.expected) + }) + } +} + +func Test_DiffStore(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + + s1, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + dbTempFile = t.TempDir() + + s2, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + baseVulns := []v3.Vulnerability{ + { + Namespace: "github:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 2.0 >= 1.29", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "github:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + PackageName: "npm:axios", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, + Fix: v3.Fix{ + State: v3.UnknownFixState, + }, + }, + { + Namespace: "nuget", + ID: "GHSA-****-******", + PackageName: "nuget:net", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:nuget:net:*:*:*:*:*:*"}, + Fix: v3.Fix{ + State: v3.UnknownFixState, + }, + }, + { + Namespace: "hex", + ID: "GHSA-^^^^-^^^^^^", + PackageName: "hex:esbuild", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:hex:esbuild:*:*:*:*:*:*"}, + }, + } + baseMetadata := []v3.VulnerabilityMetadata{ + { + Namespace: "nuget", + ID: "GHSA-****-******", + DataSource: "nvd", + }, + } + targetVulns := []v3.Vulnerability{ + { + Namespace: "github:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 2.0 >= 1.29", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "github:go", + ID: "GHSA-....-....", + PackageName: "hashicorp:nomad", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:golang:hashicorp:nomad:*:*:*:*:*"}, + }, + { + Namespace: "github:go", + ID: "GHSA-....-....", + PackageName: "hashicorp:n", + VersionConstraint: "< 2.0 >= 1.17", + CPEs: []string{"cpe:2.3:golang:hashicorp:n:*:*:*:*:*"}, + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + PackageName: "npm:axios", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, + Fix: v3.Fix{ + State: v3.WontFixState, + }, + }, + { + Namespace: "nuget", + ID: "GHSA-****-******", + PackageName: "nuget:net", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:nuget:net:*:*:*:*:*:*"}, + Fix: v3.Fix{ + State: v3.UnknownFixState, + }, + }, + } + expectedDiffs := []v3.Diff{ + { + Reason: v3.DiffChanged, + ID: "CVE-123-4567", + Namespace: "github:python", + Packages: []string{"pypi:requests"}, + }, + { + Reason: v3.DiffChanged, + ID: "CVE-123-7654", + Namespace: "npm", + Packages: []string{"npm:axios"}, + }, + { + Reason: v3.DiffRemoved, + ID: "GHSA-****-******", + Namespace: "nuget", + Packages: []string{"nuget:net"}, + }, + { + Reason: v3.DiffAdded, + ID: "GHSA-....-....", + Namespace: "github:go", + Packages: []string{"hashicorp:n", "hashicorp:nomad"}, + }, + { + Reason: v3.DiffRemoved, + ID: "GHSA-^^^^-^^^^^^", + Namespace: "hex", + Packages: []string{"hex:esbuild"}, + }, + } + + for _, vuln := range baseVulns { + s1.AddVulnerability(vuln) + } + for _, vuln := range targetVulns { + s2.AddVulnerability(vuln) + } + for _, meta := range baseMetadata { + s1.AddVulnerabilityMetadata(meta) + } + + //WHEN + result, err := s1.DiffStore(s2) + + //THEN + sort.SliceStable(*result, func(i, j int) bool { + return (*result)[i].ID < (*result)[j].ID + }) + for i := range *result { + sort.Strings((*result)[i].Packages) + } + + assert.NoError(t, err) + assert.Equal(t, expectedDiffs, *result) +} diff --git a/pkg/db/v3/vulnerability.go b/pkg/db/v3/vulnerability.go new file mode 100644 index 00000000..19f9e4a1 --- /dev/null +++ b/pkg/db/v3/vulnerability.go @@ -0,0 +1,96 @@ +package v3 + +import ( + "sort" + "strings" +) + +// Vulnerability represents the minimum data fields necessary to perform package-to-vulnerability matching. This can represent a CVE, 3rd party advisory, or any source that relates back to a CVE. +type Vulnerability struct { + ID string // The identifier of the vulnerability or advisory + PackageName string // The name of the package that is vulnerable + Namespace string // The ecosystem where the package resides + VersionConstraint string // The version range which the given package is vulnerable + VersionFormat string // The format which all version fields should be interpreted as + CPEs []string // The CPEs which are considered vulnerable + RelatedVulnerabilities []VulnerabilityReference // Other Vulnerabilities that are related to this one (e.g. GHSA relate to CVEs, or how distro CVE relates to NVD record) + Fix Fix // All information about fixed versions + Advisories []Advisory // Any vendor advisories about fixes or other notifications about this vulnerability +} + +type VulnerabilityReference struct { + ID string + Namespace string +} + +//nolint:gocognit +func (v *Vulnerability) Equal(vv Vulnerability) bool { + equal := v.ID == vv.ID && + v.PackageName == vv.PackageName && + v.Namespace == vv.Namespace && + v.VersionConstraint == vv.VersionConstraint && + v.VersionFormat == vv.VersionFormat && + len(v.CPEs) == len(vv.CPEs) && + len(v.RelatedVulnerabilities) == len(vv.RelatedVulnerabilities) && + len(v.Advisories) == len(vv.Advisories) && + v.Fix.State == vv.Fix.State && + len(v.Fix.Versions) == len(vv.Fix.Versions) + + if !equal { + return false + } + + sort.Strings(v.CPEs) + sort.Strings(vv.CPEs) + for idx, cpe := range v.CPEs { + if cpe != vv.CPEs[idx] { + return false + } + } + + sortedBaseRelVulns, sortedTargetRelVulns := sortRelatedVulns(v.RelatedVulnerabilities), sortRelatedVulns(vv.RelatedVulnerabilities) + for idx, item := range sortedBaseRelVulns { + if item != sortedTargetRelVulns[idx] { + return false + } + } + sortedBaseAdvisories, sortedTargetAdvisories := sortAdvisories(v.Advisories), sortAdvisories(vv.Advisories) + for idx, item := range sortedBaseAdvisories { + if item != sortedTargetAdvisories[idx] { + return false + } + } + sort.Strings(v.Fix.Versions) + sort.Strings(vv.Fix.Versions) + for idx, item := range v.Fix.Versions { + if item != vv.Fix.Versions[idx] { + return false + } + } + + return true +} + +func sortRelatedVulns(vulns []VulnerabilityReference) []VulnerabilityReference { + sort.SliceStable(vulns, func(i, j int) bool { + b1, b2 := strings.Builder{}, strings.Builder{} + b1.WriteString(vulns[i].ID) + b1.WriteString(vulns[i].Namespace) + b2.WriteString(vulns[j].ID) + b2.WriteString(vulns[j].Namespace) + return b1.String() < b2.String() + }) + return vulns +} + +func sortAdvisories(advisories []Advisory) []Advisory { + sort.SliceStable(advisories, func(i, j int) bool { + b1, b2 := strings.Builder{}, strings.Builder{} + b1.WriteString(advisories[i].ID) + b1.WriteString(advisories[i].Link) + b2.WriteString(advisories[j].ID) + b2.WriteString(advisories[j].Link) + return b1.String() < b2.String() + }) + return advisories +} diff --git a/pkg/db/v3/vulnerability_metadata.go b/pkg/db/v3/vulnerability_metadata.go new file mode 100644 index 00000000..b30f879c --- /dev/null +++ b/pkg/db/v3/vulnerability_metadata.go @@ -0,0 +1,76 @@ +package v3 + +import "reflect" + +// VulnerabilityMetadata represents all vulnerability data that is not necessary to perform package-to-vulnerability matching. +type VulnerabilityMetadata struct { + ID string // The identifier of the vulnerability or advisory + Namespace string // Where this entry is valid within + DataSource string // A URL where the data was sourced from + RecordSource string // The source of the vulnerability information (relative to the immediate upstream in the enterprise feedgroup) + Severity string // How severe the vulnerability is (valid values are defined by upstream sources currently) + URLs []string // URLs to get more information about the vulnerability or advisory + Description string // Description of the vulnerability + Cvss []Cvss // Common Vulnerability Scoring System values +} + +// Cvss contains select Common Vulnerability Scoring System fields for a vulnerability. +type Cvss struct { + // VendorMetadata captures non-standard CVSS fields that vendors can sometimes + // include when providing CVSS information. This vendor-specific metadata type + // allows to capture that data for persisting into the database + VendorMetadata interface{} + Metrics CvssMetrics + Vector string // A textual representation of the metric values used to determine the score + Version string // The version of the CVSS spec, for example 2.0, 3.0, or 3.1 +} + +// CvssMetrics are the quantitative values that make up a CVSS score. +type CvssMetrics struct { + // BaseScore ranges from 0 - 10 and defines qualities intrinsic to the severity of a vulnerability. + BaseScore float64 + // ExploitabilityScore is a pointer to avoid having a 0 value by default. + // It is an indicator of how easy it may be for an attacker to exploit + // a vulnerability + ExploitabilityScore *float64 + // ImpactScore represents the effects of an exploited vulnerability + // relative to compromise in confidentiality, integrity, and availability. + // It is an optional parameter, so that is why it is a pointer instead of + // a regular field + ImpactScore *float64 +} + +func NewCvssMetrics(baseScore, exploitabilityScore, impactScore float64) CvssMetrics { + return CvssMetrics{ + BaseScore: baseScore, + ExploitabilityScore: &exploitabilityScore, + ImpactScore: &impactScore, + } +} + +func (v *VulnerabilityMetadata) Equal(vv VulnerabilityMetadata) bool { + equal := v.ID == vv.ID && + v.Namespace == vv.Namespace && + v.DataSource == vv.DataSource && + v.RecordSource == vv.RecordSource && + v.Severity == vv.Severity && + v.Description == vv.Description && + len(v.URLs) == len(vv.URLs) && + len(v.Cvss) == len(vv.Cvss) + + if !equal { + return false + } + for idx, cpe := range v.URLs { + if cpe != vv.URLs[idx] { + return false + } + } + for idx, item := range v.Cvss { + if !reflect.DeepEqual(item, vv.Cvss[idx]) { + return false + } + } + + return true +} diff --git a/pkg/db/v3/vulnerability_metadata_store.go b/pkg/db/v3/vulnerability_metadata_store.go new file mode 100644 index 00000000..83ba195f --- /dev/null +++ b/pkg/db/v3/vulnerability_metadata_store.go @@ -0,0 +1,17 @@ +package v3 + +type SerializedVulnerabilityMetadata = interface{} + +type VulnerabilityMetadataStore interface { + VulnerabilityMetadataStoreReader + VulnerabilityMetadataStoreWriter +} + +type VulnerabilityMetadataStoreReader interface { + GetVulnerabilityMetadata(id, namespace string) (*VulnerabilityMetadata, error) + GetAllVulnerabilityMetadata() (*[]VulnerabilityMetadata, error) +} + +type VulnerabilityMetadataStoreWriter interface { + AddVulnerabilityMetadata(metadata ...VulnerabilityMetadata) error +} diff --git a/pkg/db/v3/vulnerability_store.go b/pkg/db/v3/vulnerability_store.go new file mode 100644 index 00000000..6abdd5da --- /dev/null +++ b/pkg/db/v3/vulnerability_store.go @@ -0,0 +1,21 @@ +package v3 + +const VulnerabilityStoreFileName = "vulnerability.db" + +type SerializedVulnerabilities = interface{} + +type VulnerabilityStore interface { + VulnerabilityStoreReader + VulnerabilityStoreWriter +} + +type VulnerabilityStoreReader interface { + // GetVulnerability retrieves vulnerabilities associated with a namespace and a package name + GetVulnerability(namespace, name string) ([]Vulnerability, error) + GetAllVulnerabilities() (*[]Vulnerability, error) +} + +type VulnerabilityStoreWriter interface { + // AddVulnerability inserts a new record of a vulnerability into the store + AddVulnerability(vulnerabilities ...Vulnerability) error +} diff --git a/pkg/db/v4/advisory.go b/pkg/db/v4/advisory.go new file mode 100644 index 00000000..012c04dd --- /dev/null +++ b/pkg/db/v4/advisory.go @@ -0,0 +1,7 @@ +package v4 + +// Advisory represents published statements regarding a vulnerability (and potentially about it's resolution). +type Advisory struct { + ID string `json:"id"` + Link string `json:"link"` +} diff --git a/pkg/db/v4/diff.go b/pkg/db/v4/diff.go new file mode 100644 index 00000000..86df8738 --- /dev/null +++ b/pkg/db/v4/diff.go @@ -0,0 +1,16 @@ +package v4 + +type DiffReason = string + +const ( + DiffAdded DiffReason = "added" + DiffChanged DiffReason = "changed" + DiffRemoved DiffReason = "removed" +) + +type Diff struct { + Reason DiffReason `json:"reason"` + ID string `json:"id"` + Namespace string `json:"namespace"` + Packages []string `json:"packages"` +} diff --git a/pkg/db/v4/fix.go b/pkg/db/v4/fix.go new file mode 100644 index 00000000..e8dba213 --- /dev/null +++ b/pkg/db/v4/fix.go @@ -0,0 +1,16 @@ +package v4 + +type FixState string + +const ( + UnknownFixState FixState = "unknown" + FixedState FixState = "fixed" + NotFixedState FixState = "not-fixed" + WontFixState FixState = "wont-fix" +) + +// Fix represents all information about known fixes for a stated vulnerability. +type Fix struct { + Versions []string `json:"versions"` // The version(s) which this particular vulnerability was fixed in + State FixState `json:"state"` +} diff --git a/pkg/db/v4/id.go b/pkg/db/v4/id.go new file mode 100644 index 00000000..369c4c66 --- /dev/null +++ b/pkg/db/v4/id.go @@ -0,0 +1,28 @@ +package v4 + +import ( + "time" +) + +// ID represents identifying information for a DB and the data it contains. +type ID struct { + // BuildTimestamp is the timestamp used to define the age of the DB, ideally including the age of the data + // contained in the DB, not just when the DB file was created. + BuildTimestamp time.Time `json:"build_timestamp"` + SchemaVersion int `json:"schema_version"` +} + +type IDReader interface { + GetID() (*ID, error) +} + +type IDWriter interface { + SetID(ID) error +} + +func NewID(age time.Time) ID { + return ID{ + BuildTimestamp: age.UTC(), + SchemaVersion: SchemaVersion, + } +} diff --git a/pkg/db/v4/namespace/cpe/namespace.go b/pkg/db/v4/namespace/cpe/namespace.go new file mode 100644 index 00000000..9f0aa2b7 --- /dev/null +++ b/pkg/db/v4/namespace/cpe/namespace.go @@ -0,0 +1,54 @@ +package cpe + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver" + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver/stock" +) + +const ID = "cpe" + +type Namespace struct { + provider string + resolver resolver.Resolver +} + +func NewNamespace(provider string) *Namespace { + return &Namespace{ + provider: provider, + resolver: &stock.Resolver{}, + } +} + +func FromString(namespaceStr string) (*Namespace, error) { + if namespaceStr == "" { + return nil, errors.New("unable to create CPE namespace from empty string") + } + + components := strings.Split(namespaceStr, ":") + + if len(components) != 2 { + return nil, fmt.Errorf("unable to create CPE namespace from %s: incorrect number of components", namespaceStr) + } + + if components[1] != ID { + return nil, fmt.Errorf("unable to create CPE namespace from %s: type %s is incorrect", namespaceStr, components[1]) + } + + return NewNamespace(components[0]), nil +} + +func (n *Namespace) Provider() string { + return n.provider +} + +func (n *Namespace) Resolver() resolver.Resolver { + return n.resolver +} + +func (n Namespace) String() string { + return fmt.Sprintf("%s:%s", n.provider, ID) +} diff --git a/pkg/db/v4/namespace/cpe/namespace_test.go b/pkg/db/v4/namespace/cpe/namespace_test.go new file mode 100644 index 00000000..e4be6dc1 --- /dev/null +++ b/pkg/db/v4/namespace/cpe/namespace_test.go @@ -0,0 +1,51 @@ +package cpe + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFromString(t *testing.T) { + successTests := []struct { + namespaceString string + result *Namespace + }{ + { + namespaceString: "abc.xyz:cpe", + result: NewNamespace("abc.xyz"), + }, + } + + for _, test := range successTests { + result, _ := FromString(test.namespaceString) + assert.Equal(t, result, test.result) + } + + errorTests := []struct { + namespaceString string + errorMessage string + }{ + { + namespaceString: "", + errorMessage: "unable to create CPE namespace from empty string", + }, + { + namespaceString: "single-component", + errorMessage: "unable to create CPE namespace from single-component: incorrect number of components", + }, + { + namespaceString: "too:many:components", + errorMessage: "unable to create CPE namespace from too:many:components: incorrect number of components", + }, + { + namespaceString: "wrong:namespace_type", + errorMessage: "unable to create CPE namespace from wrong:namespace_type: type namespace_type is incorrect", + }, + } + + for _, test := range errorTests { + _, err := FromString(test.namespaceString) + assert.EqualError(t, err, test.errorMessage) + } +} diff --git a/pkg/db/v4/namespace/distro/namespace.go b/pkg/db/v4/namespace/distro/namespace.go new file mode 100644 index 00000000..780f3934 --- /dev/null +++ b/pkg/db/v4/namespace/distro/namespace.go @@ -0,0 +1,67 @@ +package distro + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver" + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver/stock" + "github.com/anchore/grype/grype/distro" +) + +const ID = "distro" + +type Namespace struct { + provider string + distroType distro.Type + version string + resolver resolver.Resolver +} + +func NewNamespace(provider string, distroType distro.Type, version string) *Namespace { + return &Namespace{ + provider: provider, + distroType: distroType, + version: version, + resolver: &stock.Resolver{}, + } +} + +func FromString(namespaceStr string) (*Namespace, error) { + if namespaceStr == "" { + return nil, errors.New("unable to create distro namespace from empty string") + } + + components := strings.Split(namespaceStr, ":") + + if len(components) != 4 { + return nil, fmt.Errorf("unable to create distro namespace from %s: incorrect number of components", namespaceStr) + } + + if components[1] != ID { + return nil, fmt.Errorf("unable to create distro namespace from %s: type %s is incorrect", namespaceStr, components[1]) + } + + return NewNamespace(components[0], distro.Type(components[2]), components[3]), nil +} + +func (n *Namespace) Provider() string { + return n.provider +} + +func (n *Namespace) DistroType() distro.Type { + return n.distroType +} + +func (n *Namespace) Version() string { + return n.version +} + +func (n *Namespace) Resolver() resolver.Resolver { + return n.resolver +} + +func (n Namespace) String() string { + return fmt.Sprintf("%s:%s:%s:%s", n.provider, ID, n.distroType, n.version) +} diff --git a/pkg/db/v4/namespace/distro/namespace_test.go b/pkg/db/v4/namespace/distro/namespace_test.go new file mode 100644 index 00000000..f916d66b --- /dev/null +++ b/pkg/db/v4/namespace/distro/namespace_test.go @@ -0,0 +1,85 @@ +package distro + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + grypeDistro "github.com/anchore/grype/grype/distro" +) + +func TestFromString(t *testing.T) { + successTests := []struct { + namespaceString string + result *Namespace + }{ + { + namespaceString: "alpine:distro:alpine:3.15", + result: NewNamespace("alpine", grypeDistro.Alpine, "3.15"), + }, + { + namespaceString: "redhat:distro:redhat:8", + result: NewNamespace("redhat", grypeDistro.RedHat, "8"), + }, + { + namespaceString: "abc.xyz:distro:unknown:abcd~~~", + result: NewNamespace("abc.xyz", grypeDistro.Type("unknown"), "abcd~~~"), + }, + { + namespaceString: "msrc:distro:windows:10111", + result: NewNamespace("msrc", grypeDistro.Type("windows"), "10111"), + }, + { + namespaceString: "amazon:distro:amazonlinux:2022", + result: NewNamespace("amazon", grypeDistro.AmazonLinux, "2022"), + }, + { + namespaceString: "amazon:distro:amazonlinux:2", + result: NewNamespace("amazon", grypeDistro.AmazonLinux, "2"), + }, + { + namespaceString: "wolfi:distro:wolfi:rolling", + result: NewNamespace("wolfi", grypeDistro.Wolfi, "rolling"), + }, + } + + for _, test := range successTests { + result, _ := FromString(test.namespaceString) + assert.Equal(t, result, test.result) + } + + errorTests := []struct { + namespaceString string + errorMessage string + }{ + { + namespaceString: "", + errorMessage: "unable to create distro namespace from empty string", + }, + { + namespaceString: "single-component", + errorMessage: "unable to create distro namespace from single-component: incorrect number of components", + }, + { + namespaceString: "two:components", + errorMessage: "unable to create distro namespace from two:components: incorrect number of components", + }, + { + namespaceString: "still:not:enough", + errorMessage: "unable to create distro namespace from still:not:enough: incorrect number of components", + }, + { + namespaceString: "too:many:components:a:b", + errorMessage: "unable to create distro namespace from too:many:components:a:b: incorrect number of components", + }, + { + namespaceString: "wrong:namespace_type:a:b", + errorMessage: "unable to create distro namespace from wrong:namespace_type:a:b: type namespace_type is incorrect", + }, + } + + for _, test := range errorTests { + _, err := FromString(test.namespaceString) + assert.EqualError(t, err, test.errorMessage) + } +} diff --git a/pkg/db/v4/namespace/from_string.go b/pkg/db/v4/namespace/from_string.go new file mode 100644 index 00000000..12690000 --- /dev/null +++ b/pkg/db/v4/namespace/from_string.go @@ -0,0 +1,34 @@ +package namespace + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/grype-db/pkg/db/v4/namespace/cpe" + "github.com/anchore/grype-db/pkg/db/v4/namespace/distro" + "github.com/anchore/grype-db/pkg/db/v4/namespace/language" +) + +func FromString(namespaceStr string) (Namespace, error) { + if namespaceStr == "" { + return nil, errors.New("unable to create namespace from empty string") + } + + components := strings.Split(namespaceStr, ":") + + if len(components) < 1 { + return nil, fmt.Errorf("unable to create namespace from %s: incorrect number of components", namespaceStr) + } + + switch components[1] { + case cpe.ID: + return cpe.FromString(namespaceStr) + case distro.ID: + return distro.FromString(namespaceStr) + case language.ID: + return language.FromString(namespaceStr) + default: + return nil, fmt.Errorf("unable to create namespace from %s: unknown type %s", namespaceStr, components[1]) + } +} diff --git a/pkg/db/v4/namespace/from_string_test.go b/pkg/db/v4/namespace/from_string_test.go new file mode 100644 index 00000000..14b0ca07 --- /dev/null +++ b/pkg/db/v4/namespace/from_string_test.go @@ -0,0 +1,50 @@ +package namespace + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/v4/namespace/cpe" + "github.com/anchore/grype-db/pkg/db/v4/namespace/distro" + "github.com/anchore/grype-db/pkg/db/v4/namespace/language" + grypeDistro "github.com/anchore/grype/grype/distro" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func TestFromString(t *testing.T) { + tests := []struct { + namespaceString string + result Namespace + }{ + { + namespaceString: "github:language:python", + result: language.NewNamespace("github", syftPkg.Python, ""), + }, + { + namespaceString: "github:language:python:python", + result: language.NewNamespace("github", syftPkg.Python, syftPkg.PythonPkg), + }, + { + namespaceString: "debian:distro:debian:8", + result: distro.NewNamespace("debian", grypeDistro.Debian, "8"), + }, + { + namespaceString: "unknown:distro:amazonlinux:2022.15", + result: distro.NewNamespace("unknown", grypeDistro.AmazonLinux, "2022.15"), + }, + { + namespaceString: "ns-1:distro:unknowndistro:abcdefg~~~", + result: distro.NewNamespace("ns-1", grypeDistro.Type("unknowndistro"), "abcdefg~~~"), + }, + { + namespaceString: "abc.xyz:cpe", + result: cpe.NewNamespace("abc.xyz"), + }, + } + + for _, test := range tests { + result, _ := FromString(test.namespaceString) + assert.Equal(t, result, test.result) + } +} diff --git a/pkg/db/v4/namespace/index.go b/pkg/db/v4/namespace/index.go new file mode 100644 index 00000000..c91762fe --- /dev/null +++ b/pkg/db/v4/namespace/index.go @@ -0,0 +1,133 @@ +package namespace + +import ( + "fmt" + "strings" + + "github.com/anchore/grype-db/internal/log" + "github.com/anchore/grype-db/pkg/db/v4/namespace/cpe" + "github.com/anchore/grype-db/pkg/db/v4/namespace/distro" + "github.com/anchore/grype-db/pkg/db/v4/namespace/language" + grypeDistro "github.com/anchore/grype/grype/distro" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +type Index struct { + all []Namespace + byLanguage map[syftPkg.Language][]*language.Namespace + byDistroKey map[string][]*distro.Namespace + cpe []*cpe.Namespace +} + +func FromStrings(namespaces []string) (*Index, error) { + all := make([]Namespace, 0) + byLanguage := make(map[syftPkg.Language][]*language.Namespace) + byDistroKey := make(map[string][]*distro.Namespace) + cpeNamespaces := make([]*cpe.Namespace, 0) + + for _, n := range namespaces { + ns, err := FromString(n) + + if err != nil { + log.Warnf("unable to create namespace object from namespace=%s: %+v", n, err) + continue + } + + all = append(all, ns) + + switch nsObj := ns.(type) { + case *language.Namespace: + l := nsObj.Language() + if _, ok := byLanguage[l]; !ok { + byLanguage[l] = make([]*language.Namespace, 0) + } + + byLanguage[l] = append(byLanguage[l], nsObj) + case *distro.Namespace: + distroKey := fmt.Sprintf("%s:%s", nsObj.DistroType(), nsObj.Version()) + if _, ok := byDistroKey[distroKey]; !ok { + byDistroKey[distroKey] = make([]*distro.Namespace, 0) + } + + byDistroKey[distroKey] = append(byDistroKey[distroKey], nsObj) + case *cpe.Namespace: + cpeNamespaces = append(cpeNamespaces, nsObj) + default: + log.Warnf("unable to index namespace=%s", n) + continue + } + } + + return &Index{ + all: all, + byLanguage: byLanguage, + byDistroKey: byDistroKey, + cpe: cpeNamespaces, + }, nil +} + +func (i *Index) NamespacesForLanguage(l syftPkg.Language) []*language.Namespace { + if _, ok := i.byLanguage[l]; ok { + return i.byLanguage[l] + } + + return nil +} + +func (i *Index) NamespacesForDistro(d *grypeDistro.Distro) []*distro.Namespace { + if d == nil { + return nil + } + + if d.IsRolling() { + distroKey := fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), "rolling") + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + + var versionSegments []int + if d.Version != nil { + versionSegments = d.Version.Segments() + } + + if len(versionSegments) > 0 { + // First attempt a direct match on distro full name and version + distroKey := fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), d.FullVersion()) + + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + + if len(versionSegments) == 3 { + // Try with only first two version components + distroKey = fmt.Sprintf("%s:%d.%d", strings.ToLower(d.Type.String()), versionSegments[0], versionSegments[1]) + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + + // Try using only major version component + distroKey = fmt.Sprintf("%s:%d", strings.ToLower(d.Type.String()), versionSegments[0]) + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + + // Fall back into the manual mapping logic derived from + // https://github.com/anchore/enterprise/blob/eb71bc6686b9f4c92347a4e95bec828cee879197/anchore_engine/services/policy_engine/__init__.py#L127-L140 + switch d.Type { + case grypeDistro.CentOS, grypeDistro.RedHat, grypeDistro.Fedora, grypeDistro.RockyLinux, grypeDistro.AlmaLinux, grypeDistro.Gentoo: + // TODO: there is no mapping of fedora version to RHEL latest version (only the name) + distroKey = fmt.Sprintf("%s:%d", strings.ToLower(string(grypeDistro.RedHat)), versionSegments[0]) + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + } + + return nil +} + +func (i *Index) CPENamespaces() []*cpe.Namespace { + return i.cpe +} diff --git a/pkg/db/v4/namespace/index_test.go b/pkg/db/v4/namespace/index_test.go new file mode 100644 index 00000000..66abd8c6 --- /dev/null +++ b/pkg/db/v4/namespace/index_test.go @@ -0,0 +1,283 @@ +package namespace + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/v4/namespace/cpe" + "github.com/anchore/grype-db/pkg/db/v4/namespace/distro" + "github.com/anchore/grype-db/pkg/db/v4/namespace/language" + osDistro "github.com/anchore/grype/grype/distro" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func TestFromStringSlice(t *testing.T) { + tests := []struct { + namespaces []string + byLanguage map[syftPkg.Language][]*language.Namespace + byDistroKey map[string][]*distro.Namespace + cpe []*cpe.Namespace + }{ + { + namespaces: []string{ + "github:language:python", + "github:language:python:conda", + "debian:distro:debian:8", + "alpine:distro:alpine:3.15", + "alpine:distro:alpine:3.16", + "msrc:distro:windows:12345", + "nvd:cpe", + "github:language:ruby", + "abc.xyz:language:ruby", + "1234.4567:language:unknown", + "---:cpe", + "another-provider:distro:alpine:3.15", + "another-provider:distro:alpine:3.16", + }, + byLanguage: map[syftPkg.Language][]*language.Namespace{ + syftPkg.Python: { + language.NewNamespace("github", syftPkg.Python, ""), + language.NewNamespace("github", syftPkg.Python, syftPkg.Type("conda")), + }, + syftPkg.Ruby: { + language.NewNamespace("github", syftPkg.Ruby, ""), + language.NewNamespace("abc.xyz", syftPkg.Ruby, ""), + }, + syftPkg.Language("unknown"): { + language.NewNamespace("1234.4567", syftPkg.Language("unknown"), ""), + }, + }, + byDistroKey: map[string][]*distro.Namespace{ + "debian:8": { + distro.NewNamespace("debian", osDistro.Debian, "8"), + }, + "alpine:3.15": { + distro.NewNamespace("alpine", osDistro.Alpine, "3.15"), + distro.NewNamespace("another-provider", osDistro.Alpine, "3.15"), + }, + "alpine:3.16": { + distro.NewNamespace("alpine", osDistro.Alpine, "3.16"), + distro.NewNamespace("another-provider", osDistro.Alpine, "3.16"), + }, + "windows:12345": { + distro.NewNamespace("msrc", osDistro.Windows, "12345"), + }, + }, + cpe: []*cpe.Namespace{ + cpe.NewNamespace("---"), + cpe.NewNamespace("nvd"), + }, + }, + } + + for _, test := range tests { + result, _ := FromStrings(test.namespaces) + assert.Len(t, result.all, len(test.namespaces)) + + for l, elems := range result.byLanguage { + assert.Contains(t, test.byLanguage, l) + assert.ElementsMatch(t, elems, test.byLanguage[l]) + } + + for d, elems := range result.byDistroKey { + assert.Contains(t, test.byDistroKey, d) + assert.ElementsMatch(t, elems, test.byDistroKey[d]) + } + + assert.ElementsMatch(t, result.cpe, test.cpe) + } +} + +func TestIndex_CPENamespaces(t *testing.T) { + tests := []struct { + namespaces []string + cpe []*cpe.Namespace + }{ + { + namespaces: []string{"nvd:cpe", "another-source:cpe", "x:distro:y:10"}, + cpe: []*cpe.Namespace{ + cpe.NewNamespace("nvd"), + cpe.NewNamespace("another-source"), + }, + }, + } + + for _, test := range tests { + result, _ := FromStrings(test.namespaces) + assert.Len(t, result.all, len(test.namespaces)) + assert.ElementsMatch(t, result.CPENamespaces(), test.cpe) + } +} + +func newDistro(t *testing.T, dt osDistro.Type, v string, idLikes []string) *osDistro.Distro { + distro, err := osDistro.New(dt, v, idLikes...) + assert.NoError(t, err) + return distro +} + +func TestIndex_NamespacesForDistro(t *testing.T) { + namespaceIndex, err := FromStrings([]string{ + "alpine:distro:alpine:3.15", + "alpine:distro:alpine:3.16", + "debian:distro:debian:8", + "amazon:distro:amazonlinux:2", + "amazon:distro:amazonlinux:2022", + "abc.xyz:distro:unknown:123.456", + "redhat:distro:redhat:8", + "redhat:distro:redhat:9", + "other-provider:distro:debian:8", + "other-provider:distro:redhat:9", + "suse:distro:sles:12.5", + "msrc:distro:windows:471816", + "ubuntu:distro:ubuntu:18.04", + "oracle:distro:oraclelinux:8", + "wolfi:distro:wolfi:rolling", + "chainguard:distro:chainguard:rolling", + "archlinux:distro:archlinux:rolling", + }) + + assert.NoError(t, err) + + tests := []struct { + distro *osDistro.Distro + namespaces []*distro.Namespace + }{ + { + distro: newDistro(t, osDistro.Alpine, "3.15.4", []string{"alpine"}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "3.15"), + }, + }, + { + distro: newDistro(t, osDistro.Alpine, "3.16", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "3.16"), + }, + }, + { + distro: newDistro(t, osDistro.Alpine, "3.16.4.5", []string{}), + namespaces: nil, + }, + { + distro: newDistro(t, osDistro.Debian, "8.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("debian", osDistro.Debian, "8"), + distro.NewNamespace("other-provider", osDistro.Debian, "8"), + }, + }, + { + distro: newDistro(t, osDistro.RedHat, "9.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("redhat", osDistro.RedHat, "9"), + distro.NewNamespace("other-provider", osDistro.RedHat, "9"), + }, + }, + { + distro: newDistro(t, osDistro.CentOS, "9.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("redhat", osDistro.RedHat, "9"), + distro.NewNamespace("other-provider", osDistro.RedHat, "9"), + }, + }, + { + distro: newDistro(t, osDistro.AlmaLinux, "9.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("redhat", osDistro.RedHat, "9"), + distro.NewNamespace("other-provider", osDistro.RedHat, "9"), + }, + }, + { + distro: newDistro(t, osDistro.RockyLinux, "9.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("redhat", osDistro.RedHat, "9"), + distro.NewNamespace("other-provider", osDistro.RedHat, "9"), + }, + }, + { + distro: newDistro(t, osDistro.SLES, "12.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("suse", osDistro.SLES, "12.5"), + }, + }, + { + distro: newDistro(t, osDistro.Windows, "471816", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("msrc", osDistro.Windows, "471816"), + }, + }, + { + distro: newDistro(t, osDistro.Ubuntu, "18.04", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("ubuntu", osDistro.Ubuntu, "18.04"), + }, + }, + { + distro: newDistro(t, osDistro.Fedora, "31.4", []string{}), + namespaces: nil, + }, + { + distro: newDistro(t, osDistro.AmazonLinux, "2", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("amazon", osDistro.AmazonLinux, "2"), + }, + }, + { + distro: newDistro(t, osDistro.AmazonLinux, "2022", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("amazon", osDistro.AmazonLinux, "2022"), + }, + }, + { + distro: newDistro(t, osDistro.Mariner, "20.1", []string{}), + namespaces: nil, + }, + { + distro: newDistro(t, osDistro.OracleLinux, "8", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("oracle", osDistro.OracleLinux, "8"), + }, + }, + { + distro: newDistro(t, osDistro.ArchLinux, "", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("archlinux", osDistro.ArchLinux, "rolling"), + }, + }, + { + // Gentoo is a rolling distro; however, because we currently have no namespaces populated for it in the + // index fixture, we expect to get nil + distro: newDistro(t, osDistro.Gentoo, "", []string{}), + namespaces: nil, + }, + { + distro: newDistro(t, osDistro.OpenSuseLeap, "100", []string{}), + namespaces: nil, + }, + { + distro: newDistro(t, osDistro.Photon, "20.1", []string{}), + namespaces: nil, + }, + { + distro: newDistro(t, osDistro.Busybox, "20.1", []string{}), + namespaces: nil, + }, + { + distro: newDistro(t, osDistro.Wolfi, "20221011", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("wolfi", osDistro.Wolfi, "rolling"), + }, + }, + { + distro: newDistro(t, osDistro.Chainguard, "20230214", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("chainguard", osDistro.Chainguard, "rolling"), + }, + }, + } + + for _, test := range tests { + result := namespaceIndex.NamespacesForDistro(test.distro) + assert.ElementsMatch(t, result, test.namespaces) + } +} diff --git a/pkg/db/v4/namespace/language/namespace.go b/pkg/db/v4/namespace/language/namespace.go new file mode 100644 index 00000000..1623c578 --- /dev/null +++ b/pkg/db/v4/namespace/language/namespace.go @@ -0,0 +1,78 @@ +package language + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +const ID = "language" + +type Namespace struct { + provider string + language syftPkg.Language + packageType syftPkg.Type + resolver resolver.Resolver +} + +func NewNamespace(provider string, language syftPkg.Language, packageType syftPkg.Type) *Namespace { + r, _ := resolver.FromLanguage(language) + + return &Namespace{ + provider: provider, + language: language, + packageType: packageType, + resolver: r, + } +} + +func FromString(namespaceStr string) (*Namespace, error) { + if namespaceStr == "" { + return nil, errors.New("unable to create language namespace from empty string") + } + + components := strings.Split(namespaceStr, ":") + + if len(components) != 3 && len(components) != 4 { + return nil, fmt.Errorf("unable to create language namespace from %s: incorrect number of components", namespaceStr) + } + + if components[1] != ID { + return nil, fmt.Errorf("unable to create language namespace from %s: type %s is incorrect", namespaceStr, components[1]) + } + + packageType := "" + + if len(components) == 4 { + packageType = components[3] + } + + return NewNamespace(components[0], syftPkg.Language(components[2]), syftPkg.Type(packageType)), nil +} + +func (n *Namespace) Provider() string { + return n.provider +} + +func (n *Namespace) Language() syftPkg.Language { + return n.language +} + +func (n *Namespace) PackageType() syftPkg.Type { + return n.packageType +} + +func (n *Namespace) Resolver() resolver.Resolver { + return n.resolver +} + +func (n Namespace) String() string { + if n.packageType != "" { + return fmt.Sprintf("%s:%s:%s:%s", n.provider, ID, n.language, n.packageType) + } + + return fmt.Sprintf("%s:%s:%s", n.provider, ID, n.language) +} diff --git a/pkg/db/v4/namespace/language/namespace_test.go b/pkg/db/v4/namespace/language/namespace_test.go new file mode 100644 index 00000000..35cd7424 --- /dev/null +++ b/pkg/db/v4/namespace/language/namespace_test.go @@ -0,0 +1,73 @@ +package language + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func TestFromString(t *testing.T) { + successTests := []struct { + namespaceString string + result *Namespace + }{ + { + namespaceString: "github:language:python", + result: NewNamespace("github", syftPkg.Python, ""), + }, + { + namespaceString: "github:language:ruby", + result: NewNamespace("github", syftPkg.Ruby, ""), + }, + { + namespaceString: "github:language:java", + result: NewNamespace("github", syftPkg.Java, ""), + }, + { + namespaceString: "abc.xyz:language:something", + result: NewNamespace("abc.xyz", syftPkg.Language("something"), ""), + }, + { + namespaceString: "abc.xyz:language:something:another-package-manager", + result: NewNamespace("abc.xyz", syftPkg.Language("something"), syftPkg.Type("another-package-manager")), + }, + } + + for _, test := range successTests { + result, _ := FromString(test.namespaceString) + assert.Equal(t, result, test.result) + } + + errorTests := []struct { + namespaceString string + errorMessage string + }{ + { + namespaceString: "", + errorMessage: "unable to create language namespace from empty string", + }, + { + namespaceString: "single-component", + errorMessage: "unable to create language namespace from single-component: incorrect number of components", + }, + { + namespaceString: "two:components", + errorMessage: "unable to create language namespace from two:components: incorrect number of components", + }, + { + namespaceString: "too:many:components:a:b", + errorMessage: "unable to create language namespace from too:many:components:a:b: incorrect number of components", + }, + { + namespaceString: "wrong:namespace_type:a:b", + errorMessage: "unable to create language namespace from wrong:namespace_type:a:b: type namespace_type is incorrect", + }, + } + + for _, test := range errorTests { + _, err := FromString(test.namespaceString) + assert.EqualError(t, err, test.errorMessage) + } +} diff --git a/pkg/db/v4/namespace/namespace.go b/pkg/db/v4/namespace/namespace.go new file mode 100644 index 00000000..3688b613 --- /dev/null +++ b/pkg/db/v4/namespace/namespace.go @@ -0,0 +1,11 @@ +package namespace + +import ( + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver" +) + +type Namespace interface { + Provider() string + Resolver() resolver.Resolver + String() string +} diff --git a/pkg/db/v4/pkg/resolver/from_language.go b/pkg/db/v4/pkg/resolver/from_language.go new file mode 100644 index 00000000..23f2b4e4 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/from_language.go @@ -0,0 +1,23 @@ +package resolver + +import ( + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver/java" + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver/python" + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver/stock" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func FromLanguage(language syftPkg.Language) (Resolver, error) { + var r Resolver + + switch language { + case syftPkg.Python: + r = &python.Resolver{} + case syftPkg.Java: + r = &java.Resolver{} + default: + r = &stock.Resolver{} + } + + return r, nil +} diff --git a/pkg/db/v4/pkg/resolver/from_language_test.go b/pkg/db/v4/pkg/resolver/from_language_test.go new file mode 100644 index 00000000..f186c7c7 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/from_language_test.go @@ -0,0 +1,70 @@ +package resolver + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver/java" + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver/python" + "github.com/anchore/grype-db/pkg/db/v4/pkg/resolver/stock" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func TestFromLanguage(t *testing.T) { + tests := []struct { + language syftPkg.Language + result Resolver + }{ + { + language: syftPkg.Python, + result: &python.Resolver{}, + }, + { + language: syftPkg.Java, + result: &java.Resolver{}, + }, + { + language: syftPkg.Ruby, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Dart, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Rust, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Go, + result: &stock.Resolver{}, + }, + { + language: syftPkg.JavaScript, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Dotnet, + result: &stock.Resolver{}, + }, + { + language: syftPkg.PHP, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Ruby, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Language("something-new"), + result: &stock.Resolver{}, + }, + } + + for _, test := range tests { + result, err := FromLanguage(test.language) + assert.NoError(t, err) + assert.Equal(t, result, test.result) + } +} diff --git a/pkg/db/v4/pkg/resolver/java/resolver.go b/pkg/db/v4/pkg/resolver/java/resolver.go new file mode 100644 index 00000000..bbfd5168 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/java/resolver.go @@ -0,0 +1,47 @@ +package java + +import ( + "fmt" + "strings" + + "github.com/scylladb/go-set/strset" + + "github.com/anchore/grype-db/internal/log" + grypePkg "github.com/anchore/grype/grype/pkg" + "github.com/anchore/packageurl-go" +) + +type Resolver struct { +} + +func (r *Resolver) Normalize(name string) string { + return strings.ToLower(name) +} + +func (r *Resolver) Resolve(p grypePkg.Package) []string { + names := strset.New() + + // The current default for the Java ecosystem is to use a Maven-like identifier of the form + // ":" + if metadata, ok := p.Metadata.(grypePkg.JavaMetadata); ok { + if metadata.PomGroupID != "" { + if metadata.PomArtifactID != "" { + names.Add(r.Normalize(fmt.Sprintf("%s:%s", metadata.PomGroupID, metadata.PomArtifactID))) + } + if metadata.ManifestName != "" { + names.Add(r.Normalize(fmt.Sprintf("%s:%s", metadata.PomGroupID, metadata.ManifestName))) + } + } + } + + if p.PURL != "" { + purl, err := packageurl.FromString(p.PURL) + if err != nil { + log.Warnf("unable to resolve java package identifier from purl=%q: %+v", p.PURL, err) + } else { + names.Add(r.Normalize(fmt.Sprintf("%s:%s", purl.Namespace, purl.Name))) + } + } + + return names.List() +} diff --git a/pkg/db/v4/pkg/resolver/java/resolver_test.go b/pkg/db/v4/pkg/resolver/java/resolver_test.go new file mode 100644 index 00000000..a0e9b207 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/java/resolver_test.go @@ -0,0 +1,175 @@ +package java + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + + grypePkg "github.com/anchore/grype/grype/pkg" +) + +func TestResolver_Normalize(t *testing.T) { + tests := []struct { + packageName string + normalized string + }{ + { + packageName: "PyYAML", + normalized: "pyyaml", + }, + { + packageName: "oslo.concurrency", + normalized: "oslo.concurrency", + }, + { + packageName: "", + normalized: "", + }, + { + packageName: "test---1", + normalized: "test---1", + }, + { + packageName: "AbCd.-__.--.-___.__.--1234____----....XyZZZ", + normalized: "abcd.-__.--.-___.__.--1234____----....xyzzz", + }, + } + + resolver := Resolver{} + + for _, test := range tests { + resolvedNames := resolver.Normalize(test.packageName) + assert.Equal(t, resolvedNames, test.normalized) + } +} + +func TestResolver_Resolve(t *testing.T) { + tests := []struct { + name string + pkg grypePkg.Package + resolved []string + }{ + { + name: "both artifact and manifest 1", + pkg: grypePkg.Package{ + Name: "ABCD", + Version: "1.2.3.4", + Language: "java", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "virtual-path-info", + PomArtifactID: "pom-ARTIFACT-ID-info", + PomGroupID: "pom-group-ID-info", + ManifestName: "main-section-name-info", + }, + }, + resolved: []string{"pom-group-id-info:pom-artifact-id-info", "pom-group-id-info:main-section-name-info"}, + }, + { + name: "both artifact and manifest 2", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + ManifestName: "man-name", + }, + }, + resolved: []string{ + "g-id:art-id", + "g-id:man-name", + }, + }, + { + name: "no group id", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + ManifestName: "man-name", + }, + }, + resolved: []string{}, + }, + { + name: "only manifest", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomGroupID: "g-id", + ManifestName: "man-name", + }, + }, + resolved: []string{ + "g-id:man-name", + }, + }, + { + name: "only artifact", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + }, + }, + resolved: []string{ + "g-id:art-id", + }, + }, + { + name: "no artifact or manifest", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomGroupID: "g-id", + }, + }, + resolved: []string{}, + }, + { + name: "with valid purl", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + PURL: "pkg:maven/org.anchore/b-name@0.2", + }, + resolved: []string{"org.anchore:b-name"}, + }, + { + name: "ignore invalid pURLs", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + PURL: "pkg:BAD/", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + }, + }, + resolved: []string{ + "g-id:art-id", + }, + }, + } + + resolver := Resolver{} + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resolvedNames := resolver.Resolve(test.pkg) + assert.ElementsMatch(t, resolvedNames, test.resolved) + }) + } +} diff --git a/pkg/db/v4/pkg/resolver/python/resolver.go b/pkg/db/v4/pkg/resolver/python/resolver.go new file mode 100644 index 00000000..0145bf09 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/python/resolver.go @@ -0,0 +1,29 @@ +package python + +import ( + "regexp" + "strings" + + grypePkg "github.com/anchore/grype/grype/pkg" +) + +type Resolver struct { +} + +func (r *Resolver) Normalize(name string) string { + // Canonical naming of packages within python is defined by PEP 503 at + // https://peps.python.org/pep-0503/#normalized-names, and this code is derived from + // the official python implementation of canonical naming at + // https://packaging.pypa.io/en/latest/_modules/packaging/utils.html#canonicalize_name + + return strings.ToLower(regexp.MustCompile(`[-_.]+`).ReplaceAllString(name, "-")) +} + +func (r *Resolver) Resolve(p grypePkg.Package) []string { + // Canonical naming of packages within python is defined by PEP 503 at + // https://peps.python.org/pep-0503/#normalized-names, and this code is derived from + // the official python implementation of canonical naming at + // https://packaging.pypa.io/en/latest/_modules/packaging/utils.html#canonicalize_name + + return []string{r.Normalize(p.Name)} +} diff --git a/pkg/db/v4/pkg/resolver/python/resolver_test.go b/pkg/db/v4/pkg/resolver/python/resolver_test.go new file mode 100644 index 00000000..f54aef42 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/python/resolver_test.go @@ -0,0 +1,42 @@ +package python + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestResolver_Normalize(t *testing.T) { + tests := []struct { + packageName string + normalized string + }{ + { + packageName: "PyYAML", + normalized: "pyyaml", + }, + { + packageName: "oslo.concurrency", + normalized: "oslo-concurrency", + }, + { + packageName: "", + normalized: "", + }, + { + packageName: "test---1", + normalized: "test-1", + }, + { + packageName: "AbCd.-__.--.-___.__.--1234____----....XyZZZ", + normalized: "abcd-1234-xyzzz", + }, + } + + resolver := Resolver{} + + for _, test := range tests { + resolvedNames := resolver.Normalize(test.packageName) + assert.Equal(t, resolvedNames, test.normalized) + } +} diff --git a/pkg/db/v4/pkg/resolver/resolver.go b/pkg/db/v4/pkg/resolver/resolver.go new file mode 100644 index 00000000..bc253a25 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/resolver.go @@ -0,0 +1,10 @@ +package resolver + +import ( + grypePkg "github.com/anchore/grype/grype/pkg" +) + +type Resolver interface { + Normalize(string) string + Resolve(p grypePkg.Package) []string +} diff --git a/pkg/db/v4/pkg/resolver/stock/resolver.go b/pkg/db/v4/pkg/resolver/stock/resolver.go new file mode 100644 index 00000000..c1e38411 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/stock/resolver.go @@ -0,0 +1,18 @@ +package stock + +import ( + "strings" + + grypePkg "github.com/anchore/grype/grype/pkg" +) + +type Resolver struct { +} + +func (r *Resolver) Normalize(name string) string { + return strings.ToLower(name) +} + +func (r *Resolver) Resolve(p grypePkg.Package) []string { + return []string{r.Normalize(p.Name)} +} diff --git a/pkg/db/v4/pkg/resolver/stock/resolver_test.go b/pkg/db/v4/pkg/resolver/stock/resolver_test.go new file mode 100644 index 00000000..699b5817 --- /dev/null +++ b/pkg/db/v4/pkg/resolver/stock/resolver_test.go @@ -0,0 +1,42 @@ +package stock + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestResolver_Normalize(t *testing.T) { + tests := []struct { + packageName string + normalized string + }{ + { + packageName: "PyYAML", + normalized: "pyyaml", + }, + { + packageName: "oslo.concurrency", + normalized: "oslo.concurrency", + }, + { + packageName: "", + normalized: "", + }, + { + packageName: "test---1", + normalized: "test---1", + }, + { + packageName: "AbCd.-__.--.-___.__.--1234____----....XyZZZ", + normalized: "abcd.-__.--.-___.__.--1234____----....xyzzz", + }, + } + + resolver := Resolver{} + + for _, test := range tests { + resolvedNames := resolver.Normalize(test.packageName) + assert.Equal(t, resolvedNames, test.normalized) + } +} diff --git a/pkg/db/v4/schema_version.go b/pkg/db/v4/schema_version.go new file mode 100644 index 00000000..3f74ff22 --- /dev/null +++ b/pkg/db/v4/schema_version.go @@ -0,0 +1,3 @@ +package v4 + +const SchemaVersion = 4 diff --git a/pkg/db/v4/store.go b/pkg/db/v4/store.go new file mode 100644 index 00000000..b0ac18c1 --- /dev/null +++ b/pkg/db/v4/store.go @@ -0,0 +1,30 @@ +package v4 + +type Store interface { + StoreReader + StoreWriter + DBCloser +} + +type StoreReader interface { + IDReader + DiffReader + VulnerabilityStoreReader + VulnerabilityMetadataStoreReader + VulnerabilityMatchExclusionStoreReader +} + +type StoreWriter interface { + IDWriter + VulnerabilityStoreWriter + VulnerabilityMetadataStoreWriter + VulnerabilityMatchExclusionStoreWriter +} + +type DiffReader interface { + DiffStore(s StoreReader) (*[]Diff, error) +} + +type DBCloser interface { + Close() +} diff --git a/pkg/db/v4/store/diff.go b/pkg/db/v4/store/diff.go new file mode 100644 index 00000000..37288bae --- /dev/null +++ b/pkg/db/v4/store/diff.go @@ -0,0 +1,305 @@ +package store + +import ( + "github.com/wagoodman/go-partybus" + "github.com/wagoodman/go-progress" + + "github.com/anchore/grype-db/internal/bus" + v4 "github.com/anchore/grype-db/pkg/db/v4" + "github.com/anchore/grype/grype/event" + "github.com/anchore/grype/grype/event/monitor" +) + +type storeKey struct { + id string + namespace string + packageName string +} + +type PkgMap = map[storeKey][]string + +type storeVulnerabilityList struct { + items map[storeKey][]storeVulnerability + seen bool +} +type storeVulnerability struct { + item *v4.Vulnerability + seen bool +} +type storeMetadata struct { + item *v4.VulnerabilityMetadata + seen bool +} + +// create manual progress bars for tracking the database diff's progress +func trackDiff(total int64) (*progress.Manual, *progress.Manual, *progress.Stage) { + stageProgress := &progress.Manual{} + stageProgress.SetTotal(total) + differencesDiscovered := &progress.Manual{} + stager := &progress.Stage{} + + bus.Publish(partybus.Event{ + Type: event.DatabaseDiffingStarted, + Value: monitor.DBDiff{ + Stager: stager, + StageProgress: progress.Progressable(stageProgress), + DifferencesDiscovered: progress.Monitorable(differencesDiscovered), + }, + }) + return stageProgress, differencesDiscovered, stager +} + +// creates a map from an unpackaged key to a list of all packages associated with it +func buildVulnerabilityPkgsMap(models *[]v4.Vulnerability) *map[storeKey][]string { + storeMap := make(map[storeKey][]string) + for _, m := range *models { + model := m + k := getVulnerabilityParentKey(model) + if storeVuln, exists := storeMap[k]; exists { + storeMap[k] = append(storeVuln, model.PackageName) + } else { + storeMap[k] = []string{model.PackageName} + } + } + return &storeMap +} + +// creates a diff from the given key using the package maps information to populate +// the relevant packages affected by the update +func createDiff(baseStore, targetStore *PkgMap, key storeKey, reason v4.DiffReason) *v4.Diff { + pkgMap := make(map[string]struct{}) + + key.packageName = "" + if baseStore != nil { + if basePkgs, exists := (*baseStore)[key]; exists { + for _, pkg := range basePkgs { + pkgMap[pkg] = struct{}{} + } + } + } + if targetStore != nil { + if targetPkgs, exists := (*targetStore)[key]; exists { + for _, pkg := range targetPkgs { + pkgMap[pkg] = struct{}{} + } + } + } + pkgs := []string{} + for pkg := range pkgMap { + pkgs = append(pkgs, pkg) + } + + return &v4.Diff{ + Reason: reason, + ID: key.id, + Namespace: key.namespace, + Packages: pkgs, + } +} + +// gets an unpackaged key from a vulnerability +func getVulnerabilityParentKey(vuln v4.Vulnerability) storeKey { + return storeKey{vuln.ID, vuln.Namespace, ""} +} + +// gets a packaged key from a vulnerability +func getVulnerabilityKey(vuln v4.Vulnerability) storeKey { + return storeKey{vuln.ID, vuln.Namespace, vuln.PackageName} +} + +type VulnerabilitySet struct { + data map[storeKey]*storeVulnerabilityList +} + +func NewVulnerabilitySet(models *[]v4.Vulnerability) *VulnerabilitySet { + m := make(map[storeKey]*storeVulnerabilityList, len(*models)) + for _, mm := range *models { + model := mm + parentKey := getVulnerabilityParentKey(model) + vulnKey := getVulnerabilityKey(model) + if storeVuln, exists := m[parentKey]; exists { + if kk, exists := storeVuln.items[vulnKey]; exists { + storeVuln.items[vulnKey] = append(kk, storeVulnerability{ + item: &model, + seen: false, + }) + } else { + storeVuln.items[vulnKey] = []storeVulnerability{{&model, false}} + } + } else { + vuln := storeVulnerabilityList{ + items: make(map[storeKey][]storeVulnerability), + seen: false, + } + vuln.items[vulnKey] = []storeVulnerability{{&model, false}} + m[parentKey] = &vuln + } + } + return &VulnerabilitySet{ + data: m, + } +} + +func (v *VulnerabilitySet) in(item v4.Vulnerability) bool { + _, exists := v.data[getVulnerabilityParentKey(item)] + return exists +} + +func (v *VulnerabilitySet) match(item v4.Vulnerability) bool { + if parent, exists := v.data[getVulnerabilityParentKey(item)]; exists { + parent.seen = true + key := getVulnerabilityKey(item) + if children, exists := parent.items[key]; exists { + for idx, child := range children { + if item.Equal(*child.item) { + children[idx].seen = true + return true + } + } + } + } + return false +} + +func (v *VulnerabilitySet) getUnmatched() ([]storeKey, []storeKey) { + notSeen := []storeKey{} + notEntirelySeen := []storeKey{} + for k, item := range v.data { + if !item.seen { + notSeen = append(notSeen, k) + continue + } + componentLoop: + for _, components := range item.items { + for _, component := range components { + if !component.seen { + notEntirelySeen = append(notEntirelySeen, k) + break componentLoop + } + } + } + } + return notSeen, notEntirelySeen +} + +func diffVulnerabilities(baseModels, targetModels *[]v4.Vulnerability, basePkgsMap, targetPkgsMap *PkgMap, differentItems *progress.Manual) *map[string]*v4.Diff { + diffs := make(map[string]*v4.Diff) + m := NewVulnerabilitySet(baseModels) + + for _, tModel := range *targetModels { + targetModel := tModel + k := getVulnerabilityKey(targetModel) + if m.in(targetModel) { + matched := m.match(targetModel) + if !matched { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, targetPkgsMap, k, v4.DiffChanged) + differentItems.Increment() + } + } else { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(nil, targetPkgsMap, k, v4.DiffAdded) + differentItems.Increment() + } + } + notSeen, partialSeen := m.getUnmatched() + for _, k := range partialSeen { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, targetPkgsMap, k, v4.DiffChanged) + differentItems.Increment() + } + for _, k := range notSeen { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, nil, k, v4.DiffRemoved) + differentItems.Increment() + } + + return &diffs +} + +type MetadataSet struct { + data map[storeKey]*storeMetadata +} + +func NewMetadataSet(models *[]v4.VulnerabilityMetadata) *MetadataSet { + m := make(map[storeKey]*storeMetadata, len(*models)) + for _, mm := range *models { + model := mm + m[getMetadataKey(model)] = &storeMetadata{ + item: &model, + seen: false, + } + } + return &MetadataSet{ + data: m, + } +} + +func (v *MetadataSet) in(item v4.VulnerabilityMetadata) bool { + _, exists := v.data[getMetadataKey(item)] + return exists +} + +func (v *MetadataSet) match(item v4.VulnerabilityMetadata) bool { + if baseModel, exists := v.data[getMetadataKey(item)]; exists { + baseModel.seen = true + return baseModel.item.Equal(item) + } + return false +} + +func (v *MetadataSet) getUnmatched() []storeKey { + notSeen := []storeKey{} + for k, item := range v.data { + if !item.seen { + notSeen = append(notSeen, k) + } + } + return notSeen +} + +func diffVulnerabilityMetadata(baseModels, targetModels *[]v4.VulnerabilityMetadata, basePkgsMap, targetPkgsMap *PkgMap, differentItems *progress.Manual) *map[string]*v4.Diff { + diffs := make(map[string]*v4.Diff) + m := NewMetadataSet(baseModels) + + for _, tModel := range *targetModels { + targetModel := tModel + k := getMetadataKey(targetModel) + if m.in(targetModel) { + if !m.match(targetModel) { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, targetPkgsMap, k, v4.DiffChanged) + differentItems.Increment() + } + } else { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(nil, targetPkgsMap, k, v4.DiffAdded) + differentItems.Increment() + } + } + for _, k := range m.getUnmatched() { + if _, exists := diffs[k.id+k.namespace]; exists { + continue + } + diffs[k.id+k.namespace] = createDiff(basePkgsMap, nil, k, v4.DiffRemoved) + differentItems.Increment() + } + + return &diffs +} + +func getMetadataKey(metadata v4.VulnerabilityMetadata) storeKey { + return storeKey{metadata.ID, metadata.Namespace, ""} +} diff --git a/pkg/db/v4/store/diff_test.go b/pkg/db/v4/store/diff_test.go new file mode 100644 index 00000000..1f8ccaec --- /dev/null +++ b/pkg/db/v4/store/diff_test.go @@ -0,0 +1,231 @@ +package store + +import ( + "sort" + "testing" + + "github.com/stretchr/testify/assert" + + v4 "github.com/anchore/grype-db/pkg/db/v4" +) + +func Test_GetAllVulnerabilities(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + //WHEN + result, err := s.GetAllVulnerabilities() + + //THEN + assert.NotNil(t, result) + assert.NoError(t, err) +} + +func Test_GetAllVulnerabilityMetadata(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + //WHEN + result, err := s.GetAllVulnerabilityMetadata() + + //THEN + assert.NotNil(t, result) + assert.NoError(t, err) +} + +func Test_Diff_Vulnerabilities(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + + s1, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + dbTempFile = t.TempDir() + s2, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + baseVulns := []v4.Vulnerability{ + { + Namespace: "github:language:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 2.0 >= 1.29", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "github:language:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + PackageName: "npm:axios", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, + Fix: v4.Fix{ + State: v4.UnknownFixState, + }, + }, + } + targetVulns := []v4.Vulnerability{ + { + Namespace: "github:language:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 2.0 >= 1.29", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "github:language:go", + ID: "GHSA-....-....", + PackageName: "hashicorp:nomad", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:golang:hashicorp:nomad:*:*:*:*:*"}, + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + PackageName: "npm:axios", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, + Fix: v4.Fix{ + State: v4.WontFixState, + }, + }, + } + expectedDiffs := []v4.Diff{ + { + Reason: v4.DiffChanged, + ID: "CVE-123-4567", + Namespace: "github:language:python", + Packages: []string{"pypi:requests"}, + }, + { + Reason: v4.DiffChanged, + ID: "CVE-123-7654", + Namespace: "npm", + Packages: []string{"npm:axios"}, + }, + { + Reason: v4.DiffAdded, + ID: "GHSA-....-....", + Namespace: "github:language:go", + Packages: []string{"hashicorp:nomad"}, + }, + } + + for _, vuln := range baseVulns { + s1.AddVulnerability(vuln) + } + for _, vuln := range targetVulns { + s2.AddVulnerability(vuln) + } + + //WHEN + result, err := s1.DiffStore(s2) + sort.SliceStable(*result, func(i, j int) bool { + return (*result)[i].ID < (*result)[j].ID + }) + + //THEN + assert.NoError(t, err) + assert.Equal(t, expectedDiffs, *result) +} + +func Test_Diff_Metadata(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + + s1, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + dbTempFile = t.TempDir() + + s2, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + baseVulns := []v4.VulnerabilityMetadata{ + { + Namespace: "github:language:python", + ID: "CVE-123-4567", + DataSource: "nvd", + }, + { + Namespace: "github:language:python", + ID: "CVE-123-4567", + DataSource: "nvd", + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + DataSource: "nvd", + }, + } + targetVulns := []v4.VulnerabilityMetadata{ + { + Namespace: "github:language:go", + ID: "GHSA-....-....", + DataSource: "nvd", + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + DataSource: "vulndb", + }, + } + expectedDiffs := []v4.Diff{ + { + Reason: v4.DiffRemoved, + ID: "CVE-123-4567", + Namespace: "github:language:python", + Packages: []string{}, + }, + { + Reason: v4.DiffChanged, + ID: "CVE-123-7654", + Namespace: "npm", + Packages: []string{}, + }, + { + Reason: v4.DiffAdded, + ID: "GHSA-....-....", + Namespace: "github:language:go", + Packages: []string{}, + }, + } + + for _, vuln := range baseVulns { + s1.AddVulnerabilityMetadata(vuln) + } + for _, vuln := range targetVulns { + s2.AddVulnerabilityMetadata(vuln) + } + + //WHEN + result, err := s1.DiffStore(s2) + + //THEN + sort.SliceStable(*result, func(i, j int) bool { + return (*result)[i].ID < (*result)[j].ID + }) + + assert.NoError(t, err) + assert.Equal(t, expectedDiffs, *result) +} diff --git a/pkg/db/v4/store/model/id.go b/pkg/db/v4/store/model/id.go new file mode 100644 index 00000000..2b47d873 --- /dev/null +++ b/pkg/db/v4/store/model/id.go @@ -0,0 +1,40 @@ +package model + +import ( + "fmt" + "time" + + v4 "github.com/anchore/grype-db/pkg/db/v4" +) + +const ( + IDTableName = "id" +) + +type IDModel struct { + BuildTimestamp string `gorm:"column:build_timestamp"` + SchemaVersion int `gorm:"column:schema_version"` +} + +func NewIDModel(id v4.ID) IDModel { + return IDModel{ + BuildTimestamp: id.BuildTimestamp.Format(time.RFC3339Nano), + SchemaVersion: id.SchemaVersion, + } +} + +func (IDModel) TableName() string { + return IDTableName +} + +func (m *IDModel) Inflate() (v4.ID, error) { + buildTime, err := time.Parse(time.RFC3339Nano, m.BuildTimestamp) + if err != nil { + return v4.ID{}, fmt.Errorf("unable to parse build timestamp (%+v): %w", m.BuildTimestamp, err) + } + + return v4.ID{ + BuildTimestamp: buildTime, + SchemaVersion: m.SchemaVersion, + }, nil +} diff --git a/pkg/db/v4/store/model/vulnerability.go b/pkg/db/v4/store/model/vulnerability.go new file mode 100644 index 00000000..40ee3fa9 --- /dev/null +++ b/pkg/db/v4/store/model/vulnerability.go @@ -0,0 +1,93 @@ +package model + +import ( + "encoding/json" + "fmt" + + sqlite "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v4 "github.com/anchore/grype-db/pkg/db/v4" +) + +const ( + VulnerabilityTableName = "vulnerability" + GetVulnerabilityIndexName = "get_vulnerability_index" +) + +// VulnerabilityModel is a struct used to serialize db.Vulnerability information into a sqlite3 DB. +type VulnerabilityModel struct { + PK uint64 `gorm:"primary_key;auto_increment;"` + ID string `gorm:"column:id"` + PackageName string `gorm:"column:package_name; index:get_vulnerability_index"` + Namespace string `gorm:"column:namespace; index:get_vulnerability_index"` + VersionConstraint string `gorm:"column:version_constraint"` + VersionFormat string `gorm:"column:version_format"` + CPEs sqlite.NullString `gorm:"column:cpes; default:null"` + RelatedVulnerabilities sqlite.NullString `gorm:"column:related_vulnerabilities; default:null"` + FixedInVersions sqlite.NullString `gorm:"column:fixed_in_versions; default:null"` + FixState string `gorm:"column:fix_state"` + Advisories sqlite.NullString `gorm:"column:advisories; default:null"` +} + +// NewVulnerabilityModel generates a new model from a db.Vulnerability struct. +func NewVulnerabilityModel(vulnerability v4.Vulnerability) VulnerabilityModel { + return VulnerabilityModel{ + ID: vulnerability.ID, + PackageName: vulnerability.PackageName, + Namespace: vulnerability.Namespace, + VersionConstraint: vulnerability.VersionConstraint, + VersionFormat: vulnerability.VersionFormat, + FixedInVersions: sqlite.ToNullString(vulnerability.Fix.Versions), + FixState: string(vulnerability.Fix.State), + Advisories: sqlite.ToNullString(vulnerability.Advisories), + CPEs: sqlite.ToNullString(vulnerability.CPEs), + RelatedVulnerabilities: sqlite.ToNullString(vulnerability.RelatedVulnerabilities), + } +} + +// TableName returns the table which all db.Vulnerability model instances are stored into. +func (VulnerabilityModel) TableName() string { + return VulnerabilityTableName +} + +// Inflate generates a db.Vulnerability object from the serialized model instance. +func (m *VulnerabilityModel) Inflate() (v4.Vulnerability, error) { + var cpes []string + err := json.Unmarshal(m.CPEs.ToByteSlice(), &cpes) + if err != nil { + return v4.Vulnerability{}, fmt.Errorf("unable to unmarshal CPEs (%+v): %w", m.CPEs, err) + } + + var related []v4.VulnerabilityReference + err = json.Unmarshal(m.RelatedVulnerabilities.ToByteSlice(), &related) + if err != nil { + return v4.Vulnerability{}, fmt.Errorf("unable to unmarshal related vulnerabilities (%+v): %w", m.RelatedVulnerabilities, err) + } + + var advisories []v4.Advisory + + err = json.Unmarshal(m.Advisories.ToByteSlice(), &advisories) + if err != nil { + return v4.Vulnerability{}, fmt.Errorf("unable to unmarshal advisories (%+v): %w", m.Advisories, err) + } + + var versions []string + err = json.Unmarshal(m.FixedInVersions.ToByteSlice(), &versions) + if err != nil { + return v4.Vulnerability{}, fmt.Errorf("unable to unmarshal versions (%+v): %w", m.FixedInVersions, err) + } + + return v4.Vulnerability{ + ID: m.ID, + PackageName: m.PackageName, + Namespace: m.Namespace, + VersionConstraint: m.VersionConstraint, + VersionFormat: m.VersionFormat, + CPEs: cpes, + RelatedVulnerabilities: related, + Fix: v4.Fix{ + Versions: versions, + State: v4.FixState(m.FixState), + }, + Advisories: advisories, + }, nil +} diff --git a/pkg/db/v4/store/model/vulnerability_match_exclusion.go b/pkg/db/v4/store/model/vulnerability_match_exclusion.go new file mode 100644 index 00000000..60f341fa --- /dev/null +++ b/pkg/db/v4/store/model/vulnerability_match_exclusion.go @@ -0,0 +1,72 @@ +package model + +import ( + "encoding/json" + "fmt" + + "github.com/anchore/grype-db/internal/log" + "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v4 "github.com/anchore/grype-db/pkg/db/v4" +) + +const ( + VulnerabilityMatchExclusionTableName = "vulnerability_match_exclusion" + GetVulnerabilityMatchExclusionIndexName = "get_vulnerability_match_exclusion_index" +) + +// VulnerabilityMatchExclusionModel is a struct used to serialize db.VulnerabilityMatchExclusion information into a sqlite3 DB. +type VulnerabilityMatchExclusionModel struct { + PK uint64 `gorm:"primary_key;auto_increment;"` + ID string `gorm:"column:id; index:get_vulnerability_match_exclusion_index"` + Constraints sqlite.NullString `gorm:"column:constraints; default:null"` + Justification string `gorm:"column:justification"` +} + +// NewVulnerabilityMatchExclusionModel generates a new model from a db.VulnerabilityMatchExclusion struct. +func NewVulnerabilityMatchExclusionModel(v v4.VulnerabilityMatchExclusion) VulnerabilityMatchExclusionModel { + return VulnerabilityMatchExclusionModel{ + ID: v.ID, + Constraints: sqlite.ToNullString(v.Constraints), + Justification: v.Justification, + } +} + +// TableName returns the table which all db.VulnerabilityMatchExclusion model instances are stored into. +func (VulnerabilityMatchExclusionModel) TableName() string { + return VulnerabilityMatchExclusionTableName +} + +// Inflate generates a db.VulnerabilityMatchExclusion object from the serialized model instance. +func (m *VulnerabilityMatchExclusionModel) Inflate() (*v4.VulnerabilityMatchExclusion, error) { + // It's important that we only utilise exclusion constraints that are compatible with this version of Grype, + // so if any unknown fields are encountered then ignore that constraint. + + var constraints []v4.VulnerabilityMatchExclusionConstraint + err := json.Unmarshal(m.Constraints.ToByteSlice(), &constraints) + if err != nil { + return nil, fmt.Errorf("unable to unmarshal vulnerability match exclusion constraints (%+v): %w", m.Constraints, err) + } + + var compatibleConstraints []v4.VulnerabilityMatchExclusionConstraint + + if len(constraints) > 0 { + for _, c := range constraints { + if !c.Usable() { + log.Debugf("skipping incompatible vulnerability match constraint for vuln id=%s, constraint=%+v", m.ID, c) + } else { + compatibleConstraints = append(compatibleConstraints, c) + } + } + + // If there were constraints and none were compatible, the entire record is not usable by this version of Grype + if len(compatibleConstraints) == 0 { + return nil, nil + } + } + + return &v4.VulnerabilityMatchExclusion{ + ID: m.ID, + Constraints: compatibleConstraints, + Justification: m.Justification, + }, nil +} diff --git a/pkg/db/v4/store/model/vulnerability_match_exclusion_test.go b/pkg/db/v4/store/model/vulnerability_match_exclusion_test.go new file mode 100644 index 00000000..6cc7747a --- /dev/null +++ b/pkg/db/v4/store/model/vulnerability_match_exclusion_test.go @@ -0,0 +1,201 @@ +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v4 "github.com/anchore/grype-db/pkg/db/v4" +) + +func TestVulnerabilityMatchExclusionModel_Inflate(t *testing.T) { + tests := []struct { + name string + record *VulnerabilityMatchExclusionModel + result *v4.VulnerabilityMatchExclusion + }{ + { + name: "Nil constraint", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-12345", + Constraints: sqlite.ToNullString(nil), + Justification: "Who really knows?", + }, + result: &v4.VulnerabilityMatchExclusion{ + ID: "CVE-12345", + Constraints: nil, + Justification: "Who really knows?", + }, + }, + { + name: "Empty constraint array", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[]`, true), + Justification: "Always ignore", + }, + result: &v4.VulnerabilityMatchExclusion{ + ID: "CVE-919", + Constraints: nil, + Justification: "Always ignore", + }, + }, + { + name: "Single constraint", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[{"vulnerability":{"namespace":"nvd:cpe"},"package":{"language":"python"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: &v4.VulnerabilityMatchExclusion{ + ID: "CVE-919", + Constraints: []v4.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Language: "python", + }, + }, + }, + Justification: "Python packages are not vulnerable", + }, + }, + { + name: "Single unusable constraint with unknown vulnerability constraint field", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[{"vulnerability":{"namespace":"nvd:cpe","something_new":"1234"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: nil, + }, + { + name: "Single unusable constraint with unknown package constraint fields", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[{"package":{"name":"jim","another_field":"1234","x_y_z":"abc"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: nil, + }, + { + name: "Single unusable constraint with unknown root-level constraint fields", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[{"x_y_z":{"name":"jim","another_field":"1234","x_y_z":"abc"},"package":{"name":"jim","another_field":"1234","x_y_z":"abc"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: nil, + }, + { + name: "Multiple usable constraints", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-2025-152345", + Constraints: sqlite.NewNullString(`[{"vulnerability":{"namespace":"abc.xyz:language:ruby","fix_state":"wont-fix"},"package":{"language":"ruby","type":"not-gem"}},{"package":{"language":"python","version":"1000.0.1"}},{"vulnerability":{"namespace":"nvd:cpe"}},{"vulnerability":{"namespace":"nvd:cpe"},"package":{"name":"x"}},{"package":{"location":"/bin/x"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: &v4.VulnerabilityMatchExclusion{ + ID: "CVE-2025-152345", + Constraints: []v4.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "abc.xyz:language:ruby", + FixState: "wont-fix", + }, + Package: v4.PackageExclusionConstraint{ + Language: "ruby", + Type: "not-gem", + }, + }, + { + Package: v4.PackageExclusionConstraint{ + Language: "python", + Version: "1000.0.1", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Name: "x", + }, + }, + { + Package: v4.PackageExclusionConstraint{ + Location: "/bin/x", + }, + }, + }, + Justification: "Python packages are not vulnerable", + }, + }, + { + name: "Multiple constraints with some unusable", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-2025-152345", + Constraints: sqlite.NewNullString(`[{"a_b_c": "x","vulnerability":{"namespace":"abc.xyz:language:ruby","fix_state":"wont-fix"},"package":{"language":"ruby","type":"not-gem"}},{"package":{"language":"python","version":"1000.0.1"}},{"vulnerability":{"namespace":"nvd:cpe"}},{"vulnerability":{"namespace":"nvd:cpe"},"package":{"name":"x"}},{"package":{"location":"/bin/x","nnnn":"no"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: &v4.VulnerabilityMatchExclusion{ + ID: "CVE-2025-152345", + Constraints: []v4.VulnerabilityMatchExclusionConstraint{ + { + Package: v4.PackageExclusionConstraint{ + Language: "python", + Version: "1000.0.1", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Name: "x", + }, + }, + }, + Justification: "Python packages are not vulnerable", + }, + }, + { + name: "Multiple constraints all unusable", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-2025-152345", + Constraints: sqlite.NewNullString(`[{"a_b_c": "x","vulnerability":{"namespace":"abc.xyz:language:ruby","fix_state":"wont-fix"},"package":{"language":"ruby","type":"not-gem"}},{"a_b_c": "x","package":{"language":"python","version":"1000.0.1"}},{"a_b_c": "x","vulnerability":{"namespace":"nvd:cpe"}},{"a_b_c": "x","vulnerability":{"namespace":"nvd:cpe"},"package":{"name":"x"}},{"package":{"location":"/bin/x","nnnn":"no"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: nil, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + result, err := test.record.Inflate() + assert.NoError(t, err) + assert.Equal(t, test.result, result) + }) + } +} diff --git a/pkg/db/v4/store/model/vulnerability_metadata.go b/pkg/db/v4/store/model/vulnerability_metadata.go new file mode 100644 index 00000000..7dd201a0 --- /dev/null +++ b/pkg/db/v4/store/model/vulnerability_metadata.go @@ -0,0 +1,74 @@ +package model + +import ( + "encoding/json" + "fmt" + + sqlite "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v4 "github.com/anchore/grype-db/pkg/db/v4" +) + +const ( + VulnerabilityMetadataTableName = "vulnerability_metadata" +) + +// VulnerabilityMetadataModel is a struct used to serialize db.VulnerabilityMetadata information into a sqlite3 DB. +type VulnerabilityMetadataModel struct { + ID string `gorm:"primary_key; column:id;"` + Namespace string `gorm:"primary_key; column:namespace;"` + DataSource string `gorm:"column:data_source"` + RecordSource string `gorm:"column:record_source"` + Severity string `gorm:"column:severity"` + URLs sqlite.NullString `gorm:"column:urls; default:null"` + Description string `gorm:"column:description"` + Cvss sqlite.NullString `gorm:"column:cvss; default:null"` +} + +// NewVulnerabilityMetadataModel generates a new model from a db.VulnerabilityMetadata struct. +func NewVulnerabilityMetadataModel(metadata v4.VulnerabilityMetadata) VulnerabilityMetadataModel { + if metadata.Cvss == nil { + metadata.Cvss = make([]v4.Cvss, 0) + } + + return VulnerabilityMetadataModel{ + ID: metadata.ID, + Namespace: metadata.Namespace, + DataSource: metadata.DataSource, + RecordSource: metadata.RecordSource, + Severity: metadata.Severity, + URLs: sqlite.ToNullString(metadata.URLs), + Description: metadata.Description, + Cvss: sqlite.ToNullString(metadata.Cvss), + } +} + +// TableName returns the table which all db.VulnerabilityMetadata model instances are stored into. +func (VulnerabilityMetadataModel) TableName() string { + return VulnerabilityMetadataTableName +} + +// Inflate generates a db.VulnerabilityMetadataModel object from the serialized model instance. +func (m *VulnerabilityMetadataModel) Inflate() (v4.VulnerabilityMetadata, error) { + var links []string + var cvss []v4.Cvss + + if err := json.Unmarshal(m.URLs.ToByteSlice(), &links); err != nil { + return v4.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal URLs (%+v): %w", m.URLs, err) + } + + err := json.Unmarshal(m.Cvss.ToByteSlice(), &cvss) + if err != nil { + return v4.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal cvss data (%+v): %w", m.Cvss, err) + } + + return v4.VulnerabilityMetadata{ + ID: m.ID, + Namespace: m.Namespace, + DataSource: m.DataSource, + RecordSource: m.RecordSource, + Severity: m.Severity, + URLs: links, + Description: m.Description, + Cvss: cvss, + }, nil +} diff --git a/pkg/db/v4/store/store.go b/pkg/db/v4/store/store.go new file mode 100644 index 00000000..93263695 --- /dev/null +++ b/pkg/db/v4/store/store.go @@ -0,0 +1,362 @@ +package store + +import ( + "fmt" + "sort" + + _ "github.com/glebarez/sqlite" // provide the sqlite dialect to gorm via import + "github.com/go-test/deep" + "github.com/scylladb/go-set/strset" + "gorm.io/gorm" + + "github.com/anchore/grype-db/pkg/db/internal/gormadapter" + v4 "github.com/anchore/grype-db/pkg/db/v4" + "github.com/anchore/grype-db/pkg/db/v4/store/model" +) + +// store holds an instance of the database connection +type store struct { + db *gorm.DB +} + +// New creates a new instance of the store. +func New(dbFilePath string, overwrite bool) (v4.Store, error) { + db, err := gormadapter.Open(dbFilePath, overwrite) + if err != nil { + return nil, err + } + + if overwrite { + // TODO: automigrate could write to the database, + // we should be validating the database is the correct database based on the version in the ID table before + // automigrating + if err := db.AutoMigrate(&model.IDModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate ID model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityMetadataModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability Metadata model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityMatchExclusionModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability Match Exclusion model: %w", err) + } + } + + return &store{ + db: db, + }, nil +} + +// GetID fetches the metadata about the databases schema version and build time. +func (s *store) GetID() (*v4.ID, error) { + var models []model.IDModel + result := s.db.Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple DB IDs") + case len(models) == 1: + id, err := models[0].Inflate() + if err != nil { + return nil, err + } + return &id, nil + } + + return nil, nil +} + +// SetID stores the databases schema version and build time. +func (s *store) SetID(id v4.ID) error { + var ids []model.IDModel + + // replace the existing ID with the given one + s.db.Find(&ids).Delete(&ids) + + m := model.NewIDModel(id) + result := s.db.Create(&m) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add id (%d rows affected)", result.RowsAffected) + } + + return result.Error +} + +// GetVulnerabilityNamespaces retrieves all possible namespaces from the database. +func (s *store) GetVulnerabilityNamespaces() ([]string, error) { + var names []string + result := s.db.Model(&model.VulnerabilityMetadataModel{}).Distinct().Pluck("namespace", &names) + return names, result.Error +} + +// GetVulnerability retrieves vulnerabilities by namespace and package +func (s *store) GetVulnerability(namespace, packageName string) ([]v4.Vulnerability, error) { + var models []model.VulnerabilityModel + + result := s.db.Where("namespace = ? AND package_name = ?", namespace, packageName).Find(&models) + + var vulnerabilities = make([]v4.Vulnerability, len(models)) + for idx, m := range models { + vulnerability, err := m.Inflate() + if err != nil { + return nil, err + } + vulnerabilities[idx] = vulnerability + } + + return vulnerabilities, result.Error +} + +// AddVulnerability saves one or more vulnerabilities into the sqlite3 store. +func (s *store) AddVulnerability(vulnerabilities ...v4.Vulnerability) error { + for _, vulnerability := range vulnerabilities { + m := model.NewVulnerabilityModel(vulnerability) + + result := s.db.Create(&m) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability (%d rows affected)", result.RowsAffected) + } + } + return nil +} + +// GetVulnerabilityMetadata retrieves metadata for the given vulnerability ID relative to a specific record source. +func (s *store) GetVulnerabilityMetadata(id, namespace string) (*v4.VulnerabilityMetadata, error) { + var models []model.VulnerabilityMetadataModel + + result := s.db.Where(&model.VulnerabilityMetadataModel{ID: id, Namespace: namespace}).Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple metadatas for single ID=%q Namespace=%q", id, namespace) + case len(models) == 1: + metadata, err := models[0].Inflate() + if err != nil { + return nil, err + } + + return &metadata, nil + } + + return nil, nil +} + +// AddVulnerabilityMetadata stores one or more vulnerability metadata models into the sqlite DB. +// +//nolint:gocognit +func (s *store) AddVulnerabilityMetadata(metadata ...v4.VulnerabilityMetadata) error { + for _, m := range metadata { + existing, err := s.GetVulnerabilityMetadata(m.ID, m.Namespace) + if err != nil { + return fmt.Errorf("failed to verify existing entry: %w", err) + } + + if existing != nil { + // merge with the existing entry + + switch { + case existing.Severity != m.Severity: + return fmt.Errorf("existing metadata has mismatched severity (%q!=%q)", existing.Severity, m.Severity) + case existing.Description != m.Description: + return fmt.Errorf("existing metadata has mismatched description (%q!=%q)", existing.Description, m.Description) + } + + incoming: + // go through all incoming CVSS and see if they are already stored. + // If they exist already in the database then skip adding them, + // preventing a duplicate + for _, incomingCvss := range m.Cvss { + for _, existingCvss := range existing.Cvss { + if len(deep.Equal(incomingCvss, existingCvss)) == 0 { + // duplicate found, so incoming CVSS shouldn't get added + continue incoming + } + } + // a duplicate CVSS entry wasn't found, so append the incoming CVSS + existing.Cvss = append(existing.Cvss, incomingCvss) + } + + links := strset.New(existing.URLs...) + for _, l := range m.URLs { + links.Add(l) + } + + existing.URLs = links.List() + sort.Strings(existing.URLs) + + newModel := model.NewVulnerabilityMetadataModel(*existing) + result := s.db.Save(&newModel) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to merge vulnerability metadata (%d rows affected)", result.RowsAffected) + } + + if result.Error != nil { + return result.Error + } + } else { + // this is a new entry + newModel := model.NewVulnerabilityMetadataModel(m) + result := s.db.Create(&newModel) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability metadata (%d rows affected)", result.RowsAffected) + } + } + } + return nil +} + +// GetVulnerabilityMatchExclusion retrieves one or more vulnerability match exclusion records given a vulnerability identifier. +func (s *store) GetVulnerabilityMatchExclusion(id string) ([]v4.VulnerabilityMatchExclusion, error) { + var models []model.VulnerabilityMatchExclusionModel + + result := s.db.Where("id = ?", id).Find(&models) + + var exclusions []v4.VulnerabilityMatchExclusion + for _, m := range models { + exclusion, err := m.Inflate() + if err != nil { + return nil, err + } + + if exclusion != nil { + exclusions = append(exclusions, *exclusion) + } + } + + return exclusions, result.Error +} + +// AddVulnerabilityMatchExclusion saves one or more vulnerability match exclusion records into the sqlite3 store. +func (s *store) AddVulnerabilityMatchExclusion(exclusions ...v4.VulnerabilityMatchExclusion) error { + for _, exclusion := range exclusions { + m := model.NewVulnerabilityMatchExclusionModel(exclusion) + + result := s.db.Create(&m) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability match exclusion (%d rows affected)", result.RowsAffected) + } + } + + return nil +} + +func (s *store) Close() { + s.db.Exec("VACUUM;") + + sqlDB, err := s.db.DB() + if err != nil { + _ = sqlDB.Close() + } +} + +// GetAllVulnerabilities gets all vulnerabilities in the database +func (s *store) GetAllVulnerabilities() (*[]v4.Vulnerability, error) { + var models []model.VulnerabilityModel + if result := s.db.Find(&models); result.Error != nil { + return nil, result.Error + } + vulns := make([]v4.Vulnerability, len(models)) + for idx, m := range models { + vuln, err := m.Inflate() + if err != nil { + return nil, err + } + vulns[idx] = vuln + } + return &vulns, nil +} + +// GetAllVulnerabilityMetadata gets all vulnerability metadata in the database +func (s *store) GetAllVulnerabilityMetadata() (*[]v4.VulnerabilityMetadata, error) { + var models []model.VulnerabilityMetadataModel + if result := s.db.Find(&models); result.Error != nil { + return nil, result.Error + } + metadata := make([]v4.VulnerabilityMetadata, len(models)) + for idx, m := range models { + data, err := m.Inflate() + if err != nil { + return nil, err + } + metadata[idx] = data + } + return &metadata, nil +} + +// DiffStore creates a diff between the current sql database and the given store +func (s *store) DiffStore(targetStore v4.StoreReader) (*[]v4.Diff, error) { + // 7 stages, one for each step of the diff process (stages) + rowsProgress, diffItems, stager := trackDiff(7) + + stager.Current = "reading target vulnerabilities" + targetVulns, err := targetStore.GetAllVulnerabilities() + rowsProgress.Increment() + if err != nil { + return nil, err + } + + stager.Current = "reading base vulnerabilities" + baseVulns, err := s.GetAllVulnerabilities() + rowsProgress.Increment() + if err != nil { + return nil, err + } + + stager.Current = "preparing" + baseVulnPkgMap := buildVulnerabilityPkgsMap(baseVulns) + targetVulnPkgMap := buildVulnerabilityPkgsMap(targetVulns) + + stager.Current = "comparing vulnerabilities" + allDiffsMap := diffVulnerabilities(baseVulns, targetVulns, baseVulnPkgMap, targetVulnPkgMap, diffItems) + + stager.Current = "reading base metadata" + baseMetadata, err := s.GetAllVulnerabilityMetadata() + if err != nil { + return nil, err + } + rowsProgress.Increment() + + stager.Current = "reading target metadata" + targetMetadata, err := targetStore.GetAllVulnerabilityMetadata() + if err != nil { + return nil, err + } + rowsProgress.Increment() + + stager.Current = "comparing metadata" + metaDiffsMap := diffVulnerabilityMetadata(baseMetadata, targetMetadata, baseVulnPkgMap, targetVulnPkgMap, diffItems) + for k, diff := range *metaDiffsMap { + (*allDiffsMap)[k] = diff + } + allDiffs := []v4.Diff{} + for _, diff := range *allDiffsMap { + allDiffs = append(allDiffs, *diff) + } + + rowsProgress.SetCompleted() + diffItems.SetCompleted() + + return &allDiffs, nil +} diff --git a/pkg/db/v4/store/store_test.go b/pkg/db/v4/store/store_test.go new file mode 100644 index 00000000..7c1c8886 --- /dev/null +++ b/pkg/db/v4/store/store_test.go @@ -0,0 +1,1377 @@ +package store + +import ( + "encoding/json" + "sort" + "testing" + "time" + + "github.com/go-test/deep" + "github.com/stretchr/testify/assert" + + v4 "github.com/anchore/grype-db/pkg/db/v4" + "github.com/anchore/grype-db/pkg/db/v4/store/model" +) + +func assertIDReader(t *testing.T, reader v4.IDReader, expected v4.ID) { + t.Helper() + if actual, err := reader.GetID(); err != nil { + t.Fatalf("failed to get ID: %+v", err) + } else { + diffs := deep.Equal(&expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } +} + +func TestStore_GetID_SetID(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + expected := v4.ID{ + BuildTimestamp: time.Now().UTC(), + SchemaVersion: 2, + } + + if err = s.SetID(expected); err != nil { + t.Fatalf("failed to set ID: %+v", err) + } + + assertIDReader(t, s, expected) + +} + +func assertVulnerabilityReader(t *testing.T, reader v4.VulnerabilityStoreReader, namespace, name string, expected []v4.Vulnerability) { + if actual, err := reader.GetVulnerability(namespace, name); err != nil { + t.Fatalf("failed to get Vulnerability: %+v", err) + } else { + if len(actual) != len(expected) { + t.Fatalf("unexpected number of vulns: %d", len(actual)) + } + for idx := range actual { + diffs := deep.Equal(expected[idx], actual[idx]) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + } +} + +func TestStore_GetVulnerability_SetVulnerability(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + extra := []v4.Vulnerability{ + { + ID: "my-cve-33333", + PackageName: "package-name-2", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v4.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v4.Fix{ + Versions: []string{"2.0.1"}, + State: v4.FixedState, + }, + }, + { + ID: "my-other-cve-33333", + PackageName: "package-name-3", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v4.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v4.Fix{ + State: v4.NotFixedState, + }, + }, + } + + expected := []v4.Vulnerability{ + { + ID: "my-cve", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v4.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v4.Fix{ + Versions: []string{"1.0.1"}, + State: v4.FixedState, + }, + }, + { + ID: "my-other-cve", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: nil, + RelatedVulnerabilities: []v4.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v4.Fix{ + Versions: []string{"4.0.5"}, + State: v4.FixedState, + }, + }, + { + ID: "yet-another-cve", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1000.0.0", + VersionFormat: "semver", + CPEs: nil, + RelatedVulnerabilities: nil, + Fix: v4.Fix{ + Versions: []string{"1000.0.1"}, + State: v4.FixedState, + }, + }, + { + ID: "yet-another-cve-with-advisories", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1000.0.0", + VersionFormat: "semver", + CPEs: nil, + RelatedVulnerabilities: nil, + Fix: v4.Fix{ + Versions: []string{"1000.0.1"}, + State: v4.FixedState, + }, + Advisories: []v4.Advisory{{ID: "ABC-12345", Link: "https://abc.xyz"}}, + }, + } + + total := append(expected, extra...) + + if err = s.AddVulnerability(total...); err != nil { + t.Fatalf("failed to set Vulnerability: %+v", err) + } + + var allEntries []model.VulnerabilityModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + assertVulnerabilityReader(t, s, expected[0].Namespace, expected[0].PackageName, expected) + +} + +func assertVulnerabilityMetadataReader(t *testing.T, reader v4.VulnerabilityMetadataStoreReader, id, namespace string, expected v4.VulnerabilityMetadata) { + if actual, err := reader.GetVulnerabilityMetadata(id, namespace); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else if actual == nil { + t.Fatalf("no metadata returned for id=%q namespace=%q", id, namespace) + } else { + sortMetadataCvss(actual.Cvss) + sortMetadataCvss(expected.Cvss) + + // make sure they both have the same number of CVSS entries - preventing a panic on later assertions + assert.Len(t, expected.Cvss, len(actual.Cvss)) + for idx, actualCvss := range actual.Cvss { + assert.Equal(t, actualCvss.Vector, expected.Cvss[idx].Vector) + assert.Equal(t, actualCvss.Version, expected.Cvss[idx].Version) + assert.Equal(t, actualCvss.Metrics, expected.Cvss[idx].Metrics) + + actualVendor, err := json.Marshal(actualCvss.VendorMetadata) + if err != nil { + t.Errorf("unable to marshal vendor metadata: %q", err) + } + expectedVendor, err := json.Marshal(expected.Cvss[idx].VendorMetadata) + if err != nil { + t.Errorf("unable to marshal vendor metadata: %q", err) + } + assert.Equal(t, string(actualVendor), string(expectedVendor)) + + } + + // nil the Cvss field because it is an interface - verification of Cvss + // has already happened at this point + expected.Cvss = nil + actual.Cvss = nil + assert.Equal(t, &expected, actual) + } + +} + +func sortMetadataCvss(cvss []v4.Cvss) { + sort.Slice(cvss, func(i, j int) bool { + // first, sort by Vector + if cvss[i].Vector > cvss[j].Vector { + return true + } + if cvss[i].Vector < cvss[j].Vector { + return false + } + // then try to sort by BaseScore if Vector is the same + return cvss[i].Metrics.BaseScore < cvss[j].Metrics.BaseScore + }) +} + +// CustomMetadata is effectively a noop, its values aren't meaningful and are +// mostly useful to ensure that any type can be stored and then retrieved for +// assertion in these test cases where custom vendor CVSS scores are used +type CustomMetadata struct { + SuperScore string + Vendor string +} + +func TestStore_GetVulnerabilityMetadata_SetVulnerabilityMetadata(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + total := []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v4.Cvss{ + { + VendorMetadata: CustomMetadata{ + Vendor: "redhat", + SuperScore: "1000", + }, + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 1.1, + 2.2, + 3.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NOT", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.3, + 2.1, + 3.2, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NICE", + VendorMetadata: nil, + }, + }, + }, + { + ID: "my-other-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + } + + if err = s.AddVulnerabilityMetadata(total...); err != nil { + t.Fatalf("failed to set metadata: %+v", err) + } + + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + +} + +func TestStore_MergeVulnerabilityMetadata(t *testing.T) { + tests := []struct { + name string + add []v4.VulnerabilityMetadata + expected v4.VulnerabilityMetadata + err bool + }{ + { + name: "go-case", + add: []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v4.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "merge-links", + add: []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://google.com"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://yahoo.com"}, + }, + }, + expected: v4.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re", "https://google.com", "https://yahoo.com"}, + Cvss: []v4.Cvss{}, + }, + }, + { + name: "bad-severity", + add: []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "meh, push that for next tuesday...", + URLs: []string{"https://redhat.com"}, + }, + }, + err: true, + }, + { + name: "mismatch-description", + err: true, + add: []v4.VulnerabilityMetadata{ + { + + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + }, + { + name: "mismatch-cvss2", + err: false, + add: []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v4.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:P--VERY", + }, + }, + }, + }, + { + name: "mismatch-cvss3", + err: false, + add: []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 0, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v4.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 0, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dbTempDir := t.TempDir() + s, err := New(dbTempDir, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + // add each metadata in order + var theErr error + for _, metadata := range test.add { + err = s.AddVulnerabilityMetadata(metadata) + if err != nil { + theErr = err + break + } + } + + if test.err && theErr == nil { + t.Fatalf("expected error but did not get one") + } else if !test.err && theErr != nil { + t.Fatalf("expected no error but got one: %+v", theErr) + } else if test.err && theErr != nil { + // test pass... + return + } + + // ensure there is exactly one entry + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != 1 { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + // get the resulting metadata object + if actual, err := s.GetVulnerabilityMetadata(test.expected.ID, test.expected.Namespace); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else { + diffs := deep.Equal(&test.expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + }) + } +} + +func TestCvssScoresInMetadata(t *testing.T) { + tests := []struct { + name string + add []v4.VulnerabilityMetadata + expected v4.VulnerabilityMetadata + }{ + { + name: "append-cvss", + add: []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v4.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "append-vendor-cvss", + add: []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + VendorMetadata: CustomMetadata{ + SuperScore: "100", + Vendor: "debian", + }, + }, + }, + }, + }, + expected: v4.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "2.0", + Metrics: v4.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + VendorMetadata: CustomMetadata{ + SuperScore: "100", + Vendor: "debian", + }, + }, + }, + }, + }, + { + name: "avoids-duplicate-cvss", + add: []v4.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v4.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v4.Cvss{ + { + Version: "3.0", + Metrics: v4.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dbTempDir := t.TempDir() + + s, err := New(dbTempDir, true) + if err != nil { + t.Fatalf("could not create s: %+v", err) + } + + // add each metadata in order + for _, metadata := range test.add { + err = s.AddVulnerabilityMetadata(metadata) + if err != nil { + t.Fatalf("unable to s vulnerability metadata: %+v", err) + } + } + + // ensure there is exactly one entry + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != 1 { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + assertVulnerabilityMetadataReader(t, s, test.expected.ID, test.expected.Namespace, test.expected) + }) + } +} + +func assertVulnerabilityMatchExclusionReader(t *testing.T, reader v4.VulnerabilityMatchExclusionStoreReader, id string, expected []v4.VulnerabilityMatchExclusion) { + if actual, err := reader.GetVulnerabilityMatchExclusion(id); err != nil { + t.Fatalf("failed to get Vulnerability Match Exclusion: %+v", err) + } else { + t.Logf("%+v", actual) + if len(actual) != len(expected) { + t.Fatalf("unexpected number of vulnerability match exclusions: expected=%d, actual=%d", len(expected), len(actual)) + } + for idx := range actual { + diffs := deep.Equal(expected[idx], actual[idx]) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + } +} + +func TestStore_GetVulnerabilityMatchExclusion_SetVulnerabilityMatchExclusion(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + extra := []v4.VulnerabilityMatchExclusion{ + { + ID: "CVE-1234-14567", + Constraints: []v4.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "extra-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Name: "abc", + Language: "ruby", + Version: "1.2.3", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "extra-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Name: "abc", + Language: "ruby", + Version: "4.5.6", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "extra-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Name: "time-1", + Language: "ruby", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "extra-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Name: "abc.xyz:nothing-of-interest", + Type: "java-archive", + }, + }, + }, + Justification: "Because I said so.", + }, + { + ID: "CVE-1234-10", + Constraints: nil, + Justification: "Because I said so.", + }, + } + + expected := []v4.VulnerabilityMatchExclusion{ + { + ID: "CVE-1234-9999999", + Constraints: []v4.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Language: "python", + Name: "abc", + Version: "1.2.3", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Language: "python", + Name: "abc", + Version: "4.5.6", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Language: "python", + Name: "time-245", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Type: "npm", + Name: "everything", + }, + }, + }, + Justification: "This is a false positive", + }, + { + ID: "CVE-1234-9999999", + Constraints: []v4.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Language: "go", + Type: "go-module", + Name: "abc", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + Namespace: "some-other-namespace:cpe", + }, + Package: v4.PackageExclusionConstraint{ + Language: "go", + Type: "go-module", + Name: "abc", + }, + }, + { + Vulnerability: v4.VulnerabilityExclusionConstraint{ + FixState: "wont-fix", + }, + }, + }, + Justification: "This is also a false positive", + }, + { + ID: "CVE-1234-9999999", + Justification: "global exclude", + }, + } + + total := append(expected, extra...) + + if err = s.AddVulnerabilityMatchExclusion(total...); err != nil { + t.Fatalf("failed to set Vulnerability Match Exclusion: %+v", err) + } + + var allEntries []model.VulnerabilityMatchExclusionModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + assertVulnerabilityMatchExclusionReader(t, s, expected[0].ID, expected) +} + +func Test_DiffStore(t *testing.T) { + //GIVEN + dbTempFile := t.TempDir() + + s1, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + dbTempFile = t.TempDir() + + s2, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + baseVulns := []v4.Vulnerability{ + { + Namespace: "github:language:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 2.0 >= 1.29", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "github:language:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + PackageName: "npm:axios", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, + Fix: v4.Fix{ + State: v4.UnknownFixState, + }, + }, + { + Namespace: "nuget", + ID: "GHSA-****-******", + PackageName: "nuget:net", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:nuget:net:*:*:*:*:*:*"}, + Fix: v4.Fix{ + State: v4.UnknownFixState, + }, + }, + { + Namespace: "hex", + ID: "GHSA-^^^^-^^^^^^", + PackageName: "hex:esbuild", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:hex:esbuild:*:*:*:*:*:*"}, + }, + } + baseMetadata := []v4.VulnerabilityMetadata{ + { + Namespace: "nuget", + ID: "GHSA-****-******", + DataSource: "nvd", + }, + } + targetVulns := []v4.Vulnerability{ + { + Namespace: "github:language:python", + ID: "CVE-123-4567", + PackageName: "pypi:requests", + VersionConstraint: "< 2.0 >= 1.29", + CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, + }, + { + Namespace: "github:language:go", + ID: "GHSA-....-....", + PackageName: "hashicorp:nomad", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:golang:hashicorp:nomad:*:*:*:*:*"}, + }, + { + Namespace: "github:language:go", + ID: "GHSA-....-....", + PackageName: "hashicorp:n", + VersionConstraint: "< 2.0 >= 1.17", + CPEs: []string{"cpe:2.3:golang:hashicorp:n:*:*:*:*:*"}, + }, + { + Namespace: "npm", + ID: "CVE-123-7654", + PackageName: "npm:axios", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, + Fix: v4.Fix{ + State: v4.WontFixState, + }, + }, + { + Namespace: "nuget", + ID: "GHSA-****-******", + PackageName: "nuget:net", + VersionConstraint: "< 3.0 >= 2.17", + CPEs: []string{"cpe:2.3:nuget:net:*:*:*:*:*:*"}, + Fix: v4.Fix{ + State: v4.UnknownFixState, + }, + }, + } + expectedDiffs := []v4.Diff{ + { + Reason: v4.DiffChanged, + ID: "CVE-123-4567", + Namespace: "github:language:python", + Packages: []string{"pypi:requests"}, + }, + { + Reason: v4.DiffChanged, + ID: "CVE-123-7654", + Namespace: "npm", + Packages: []string{"npm:axios"}, + }, + { + Reason: v4.DiffRemoved, + ID: "GHSA-****-******", + Namespace: "nuget", + Packages: []string{"nuget:net"}, + }, + { + Reason: v4.DiffAdded, + ID: "GHSA-....-....", + Namespace: "github:language:go", + Packages: []string{"hashicorp:n", "hashicorp:nomad"}, + }, + { + Reason: v4.DiffRemoved, + ID: "GHSA-^^^^-^^^^^^", + Namespace: "hex", + Packages: []string{"hex:esbuild"}, + }, + } + + for _, vuln := range baseVulns { + s1.AddVulnerability(vuln) + } + for _, vuln := range targetVulns { + s2.AddVulnerability(vuln) + } + for _, meta := range baseMetadata { + s1.AddVulnerabilityMetadata(meta) + } + + //WHEN + result, err := s1.DiffStore(s2) + + //THEN + sort.SliceStable(*result, func(i, j int) bool { + return (*result)[i].ID < (*result)[j].ID + }) + for i := range *result { + sort.Strings((*result)[i].Packages) + } + + assert.NoError(t, err) + assert.Equal(t, expectedDiffs, *result) +} diff --git a/pkg/db/v4/vulnerability.go b/pkg/db/v4/vulnerability.go new file mode 100644 index 00000000..2104b4a7 --- /dev/null +++ b/pkg/db/v4/vulnerability.go @@ -0,0 +1,96 @@ +package v4 + +import ( + "sort" + "strings" +) + +// Vulnerability represents the minimum data fields necessary to perform package-to-vulnerability matching. This can represent a CVE, 3rd party advisory, or any source that relates back to a CVE. +type Vulnerability struct { + ID string `json:"id"` // The identifier of the vulnerability or advisory + PackageName string `json:"package_name"` // The name of the package that is vulnerable + Namespace string `json:"namespace"` // The ecosystem where the package resides + VersionConstraint string `json:"version_constraint"` // The version range which the given package is vulnerable + VersionFormat string `json:"version_format"` // The format which all version fields should be interpreted as + CPEs []string `json:"cpes"` // The CPEs which are considered vulnerable + RelatedVulnerabilities []VulnerabilityReference `json:"related_vulnerabilities"` // Other Vulnerabilities that are related to this one (e.g. GHSA relate to CVEs, or how distro CVE relates to NVD record) + Fix Fix `json:"fix"` // All information about fixed versions + Advisories []Advisory `json:"advisories"` // Any vendor advisories about fixes or other notifications about this vulnerability +} + +type VulnerabilityReference struct { + ID string `json:"id"` + Namespace string `json:"namespace"` +} + +//nolint:gocognit +func (v *Vulnerability) Equal(vv Vulnerability) bool { + equal := v.ID == vv.ID && + v.PackageName == vv.PackageName && + v.Namespace == vv.Namespace && + v.VersionConstraint == vv.VersionConstraint && + v.VersionFormat == vv.VersionFormat && + len(v.CPEs) == len(vv.CPEs) && + len(v.RelatedVulnerabilities) == len(vv.RelatedVulnerabilities) && + len(v.Advisories) == len(vv.Advisories) && + v.Fix.State == vv.Fix.State && + len(v.Fix.Versions) == len(vv.Fix.Versions) + + if !equal { + return false + } + + sort.Strings(v.CPEs) + sort.Strings(vv.CPEs) + for idx, cpe := range v.CPEs { + if cpe != vv.CPEs[idx] { + return false + } + } + + sortedBaseRelVulns, sortedTargetRelVulns := sortRelatedVulns(v.RelatedVulnerabilities), sortRelatedVulns(vv.RelatedVulnerabilities) + for idx, item := range sortedBaseRelVulns { + if item != sortedTargetRelVulns[idx] { + return false + } + } + sortedBaseAdvisories, sortedTargetAdvisories := sortAdvisories(v.Advisories), sortAdvisories(vv.Advisories) + for idx, item := range sortedBaseAdvisories { + if item != sortedTargetAdvisories[idx] { + return false + } + } + sort.Strings(v.Fix.Versions) + sort.Strings(vv.Fix.Versions) + for idx, item := range v.Fix.Versions { + if item != vv.Fix.Versions[idx] { + return false + } + } + + return true +} + +func sortRelatedVulns(vulns []VulnerabilityReference) []VulnerabilityReference { + sort.SliceStable(vulns, func(i, j int) bool { + b1, b2 := strings.Builder{}, strings.Builder{} + b1.WriteString(vulns[i].ID) + b1.WriteString(vulns[i].Namespace) + b2.WriteString(vulns[j].ID) + b2.WriteString(vulns[j].Namespace) + return b1.String() < b2.String() + }) + return vulns +} + +func sortAdvisories(advisories []Advisory) []Advisory { + sort.SliceStable(advisories, func(i, j int) bool { + b1, b2 := strings.Builder{}, strings.Builder{} + b1.WriteString(advisories[i].ID) + b1.WriteString(advisories[i].Link) + b2.WriteString(advisories[j].ID) + b2.WriteString(advisories[j].Link) + return b1.String() < b2.String() + }) + return advisories +} diff --git a/pkg/db/v4/vulnerability_match_exclusion.go b/pkg/db/v4/vulnerability_match_exclusion.go new file mode 100644 index 00000000..6546c00a --- /dev/null +++ b/pkg/db/v4/vulnerability_match_exclusion.go @@ -0,0 +1,130 @@ +package v4 + +import ( + "encoding/json" +) + +// VulnerabilityMatchExclusion represents the minimum data fields necessary to automatically filter certain +// vulnerabilities from match results based on the specified constraints. +type VulnerabilityMatchExclusion struct { + ID string `json:"id"` // The identifier of the vulnerability or advisory + Constraints []VulnerabilityMatchExclusionConstraint `json:"constraints,omitempty"` // The constraints under which the exclusion applies + Justification string `json:"justification"` // Justification for the exclusion +} + +// VulnerabilityMatchExclusionConstraint describes criteria for which matches should be excluded +type VulnerabilityMatchExclusionConstraint struct { + Vulnerability VulnerabilityExclusionConstraint `json:"vulnerability,omitempty"` // Vulnerability exclusion criteria + Package PackageExclusionConstraint `json:"package,omitempty"` // Package exclusion criteria + ExtraFields map[string]interface{} `json:"-"` +} + +func (c VulnerabilityMatchExclusionConstraint) Usable() bool { + return len(c.ExtraFields) == 0 && c.Vulnerability.Usable() && c.Package.Usable() +} + +func (c *VulnerabilityMatchExclusionConstraint) UnmarshalJSON(data []byte) error { + // Create a new type from the target type to avoid recursion. + type _vulnerabilityMatchExclusionConstraint VulnerabilityMatchExclusionConstraint + + // Unmarshal into an instance of the new type. + var _c _vulnerabilityMatchExclusionConstraint + if err := json.Unmarshal(data, &_c); err != nil { + return err + } + + if err := json.Unmarshal(data, &_c.ExtraFields); err != nil { + return err + } + + delete(_c.ExtraFields, "vulnerability") + delete(_c.ExtraFields, "package") + + if len(_c.ExtraFields) == 0 { + _c.ExtraFields = nil + } + + // Cast the new type instance to the original type and assign. + *c = VulnerabilityMatchExclusionConstraint(_c) + return nil +} + +// VulnerabilityExclusionConstraint describes criteria for excluding a match based on additional vulnerability components +type VulnerabilityExclusionConstraint struct { + Namespace string `json:"namespace,omitempty"` // Vulnerability namespace + FixState FixState `json:"fix_state,omitempty"` // Vulnerability fix state + ExtraFields map[string]interface{} `json:"-"` +} + +func (v VulnerabilityExclusionConstraint) Usable() bool { + return len(v.ExtraFields) == 0 +} + +func (v *VulnerabilityExclusionConstraint) UnmarshalJSON(data []byte) error { + // Create a new type from the target type to avoid recursion. + type _vulnerabilityExclusionConstraint VulnerabilityExclusionConstraint + + // Unmarshal into an instance of the new type. + var _v _vulnerabilityExclusionConstraint + if err := json.Unmarshal(data, &_v); err != nil { + return err + } + + if err := json.Unmarshal(data, &_v.ExtraFields); err != nil { + return err + } + + delete(_v.ExtraFields, "namespace") + delete(_v.ExtraFields, "fix_state") + + if len(_v.ExtraFields) == 0 { + _v.ExtraFields = nil + } + + // Cast the new type instance to the original type and assign. + *v = VulnerabilityExclusionConstraint(_v) + return nil +} + +// PackageExclusionConstraint describes criteria for excluding a match based on package components +type PackageExclusionConstraint struct { + Name string `json:"name,omitempty"` // Package name + Language string `json:"language,omitempty"` // The language ecosystem for a package + Type string `json:"type,omitempty"` // Package type + Version string `json:"version,omitempty"` // Package version + Location string `json:"location,omitempty"` // Package location + ExtraFields map[string]interface{} `json:"-"` +} + +func (p PackageExclusionConstraint) Usable() bool { + return len(p.ExtraFields) == 0 +} + +func (p *PackageExclusionConstraint) UnmarshalJSON(data []byte) error { + // Create a new type from the target type to avoid recursion. + type _packageExclusionConstraint PackageExclusionConstraint + + // Unmarshal into an instance of the new type. + var _p _packageExclusionConstraint + if err := json.Unmarshal(data, &_p); err != nil { + return err + } + + if err := json.Unmarshal(data, &_p.ExtraFields); err != nil { + return err + } + + delete(_p.ExtraFields, "name") + delete(_p.ExtraFields, "language") + delete(_p.ExtraFields, "type") + delete(_p.ExtraFields, "version") + delete(_p.ExtraFields, "location") + + if len(_p.ExtraFields) == 0 { + _p.ExtraFields = nil + } + + // Cast the new type instance to the original type and assign. + *p = PackageExclusionConstraint(_p) + return nil +} diff --git a/pkg/db/v4/vulnerability_match_exclusion_store.go b/pkg/db/v4/vulnerability_match_exclusion_store.go new file mode 100644 index 00000000..f2b3ebb5 --- /dev/null +++ b/pkg/db/v4/vulnerability_match_exclusion_store.go @@ -0,0 +1,14 @@ +package v4 + +type VulnerabilityMatchExclusionStore interface { + VulnerabilityMatchExclusionStoreReader + VulnerabilityMatchExclusionStoreWriter +} + +type VulnerabilityMatchExclusionStoreReader interface { + GetVulnerabilityMatchExclusion(id string) ([]VulnerabilityMatchExclusion, error) +} + +type VulnerabilityMatchExclusionStoreWriter interface { + AddVulnerabilityMatchExclusion(exclusion ...VulnerabilityMatchExclusion) error +} diff --git a/pkg/db/v4/vulnerability_metadata.go b/pkg/db/v4/vulnerability_metadata.go new file mode 100644 index 00000000..c13f9090 --- /dev/null +++ b/pkg/db/v4/vulnerability_metadata.go @@ -0,0 +1,76 @@ +package v4 + +import "reflect" + +// VulnerabilityMetadata represents all vulnerability data that is not necessary to perform package-to-vulnerability matching. +type VulnerabilityMetadata struct { + ID string `json:"id"` // The identifier of the vulnerability or advisory + Namespace string `json:"namespace"` // Where this entry is valid within + DataSource string `json:"data_source"` // A URL where the data was sourced from + RecordSource string `json:"record_source"` // The source of the vulnerability information (relative to the immediate upstream in the enterprise feedgroup) + Severity string `json:"severity"` // How severe the vulnerability is (valid values are defined by upstream sources currently) + URLs []string `json:"urls"` // URLs to get more information about the vulnerability or advisory + Description string `json:"description"` // Description of the vulnerability + Cvss []Cvss `json:"cvss"` // Common Vulnerability Scoring System values +} + +// Cvss contains select Common Vulnerability Scoring System fields for a vulnerability. +type Cvss struct { + // VendorMetadata captures non-standard CVSS fields that vendors can sometimes + // include when providing CVSS information. This vendor-specific metadata type + // allows to capture that data for persisting into the database + VendorMetadata interface{} `json:"vendor_metadata"` + Metrics CvssMetrics `json:"metrics"` + Vector string `json:"vector"` // A textual representation of the metric values used to determine the score + Version string `json:"version"` // The version of the CVSS spec, for example 2.0, 3.0, or 3.1 +} + +// CvssMetrics are the quantitative values that make up a CVSS score. +type CvssMetrics struct { + // BaseScore ranges from 0 - 10 and defines qualities intrinsic to the severity of a vulnerability. + BaseScore float64 `json:"base_score"` + // ExploitabilityScore is a pointer to avoid having a 0 value by default. + // It is an indicator of how easy it may be for an attacker to exploit + // a vulnerability + ExploitabilityScore *float64 `json:"exploitability_score"` + // ImpactScore represents the effects of an exploited vulnerability + // relative to compromise in confidentiality, integrity, and availability. + // It is an optional parameter, so that is why it is a pointer instead of + // a regular field + ImpactScore *float64 `json:"impact_score"` +} + +func NewCvssMetrics(baseScore, exploitabilityScore, impactScore float64) CvssMetrics { + return CvssMetrics{ + BaseScore: baseScore, + ExploitabilityScore: &exploitabilityScore, + ImpactScore: &impactScore, + } +} + +func (v *VulnerabilityMetadata) Equal(vv VulnerabilityMetadata) bool { + equal := v.ID == vv.ID && + v.Namespace == vv.Namespace && + v.DataSource == vv.DataSource && + v.RecordSource == vv.RecordSource && + v.Severity == vv.Severity && + v.Description == vv.Description && + len(v.URLs) == len(vv.URLs) && + len(v.Cvss) == len(vv.Cvss) + + if !equal { + return false + } + for idx, cpe := range v.URLs { + if cpe != vv.URLs[idx] { + return false + } + } + for idx, item := range v.Cvss { + if !reflect.DeepEqual(item, vv.Cvss[idx]) { + return false + } + } + + return true +} diff --git a/pkg/db/v4/vulnerability_metadata_store.go b/pkg/db/v4/vulnerability_metadata_store.go new file mode 100644 index 00000000..9f99c46e --- /dev/null +++ b/pkg/db/v4/vulnerability_metadata_store.go @@ -0,0 +1,15 @@ +package v4 + +type VulnerabilityMetadataStore interface { + VulnerabilityMetadataStoreReader + VulnerabilityMetadataStoreWriter +} + +type VulnerabilityMetadataStoreReader interface { + GetVulnerabilityMetadata(id, namespace string) (*VulnerabilityMetadata, error) + GetAllVulnerabilityMetadata() (*[]VulnerabilityMetadata, error) +} + +type VulnerabilityMetadataStoreWriter interface { + AddVulnerabilityMetadata(metadata ...VulnerabilityMetadata) error +} diff --git a/pkg/db/v4/vulnerability_store.go b/pkg/db/v4/vulnerability_store.go new file mode 100644 index 00000000..84dd3c9f --- /dev/null +++ b/pkg/db/v4/vulnerability_store.go @@ -0,0 +1,21 @@ +package v4 + +const VulnerabilityStoreFileName = "vulnerability.db" + +type VulnerabilityStore interface { + VulnerabilityStoreReader + VulnerabilityStoreWriter +} + +type VulnerabilityStoreReader interface { + // GetVulnerabilityNamespaces retrieves unique list of vulnerability namespaces + GetVulnerabilityNamespaces() ([]string, error) + // GetVulnerability retrieves vulnerabilities by namespace and package + GetVulnerability(namespace, packageName string) ([]Vulnerability, error) + GetAllVulnerabilities() (*[]Vulnerability, error) +} + +type VulnerabilityStoreWriter interface { + // AddVulnerability inserts a new record of a vulnerability into the store + AddVulnerability(vulnerabilities ...Vulnerability) error +} diff --git a/pkg/db/v5/advisory.go b/pkg/db/v5/advisory.go new file mode 100644 index 00000000..f94176db --- /dev/null +++ b/pkg/db/v5/advisory.go @@ -0,0 +1,7 @@ +package v5 + +// Advisory represents published statements regarding a vulnerability (and potentially about it's resolution). +type Advisory struct { + ID string `json:"id"` + Link string `json:"link"` +} diff --git a/pkg/db/v5/fix.go b/pkg/db/v5/fix.go new file mode 100644 index 00000000..84f19be1 --- /dev/null +++ b/pkg/db/v5/fix.go @@ -0,0 +1,16 @@ +package v5 + +type FixState string + +const ( + UnknownFixState FixState = "unknown" + FixedState FixState = "fixed" + NotFixedState FixState = "not-fixed" + WontFixState FixState = "wont-fix" +) + +// Fix represents all information about known fixes for a stated vulnerability. +type Fix struct { + Versions []string `json:"versions"` // The version(s) which this particular vulnerability was fixed in + State FixState `json:"state"` +} diff --git a/pkg/db/v5/id.go b/pkg/db/v5/id.go new file mode 100644 index 00000000..98aabb2a --- /dev/null +++ b/pkg/db/v5/id.go @@ -0,0 +1,28 @@ +package v5 + +import ( + "time" +) + +// ID represents identifying information for a DB and the data it contains. +type ID struct { + // BuildTimestamp is the timestamp used to define the age of the DB, ideally including the age of the data + // contained in the DB, not just when the DB file was created. + BuildTimestamp time.Time `json:"build_timestamp"` + SchemaVersion int `json:"schema_version"` +} + +type IDReader interface { + GetID() (*ID, error) +} + +type IDWriter interface { + SetID(ID) error +} + +func NewID(age time.Time) ID { + return ID{ + BuildTimestamp: age.UTC(), + SchemaVersion: SchemaVersion, + } +} diff --git a/pkg/db/v5/namespace/cpe/namespace.go b/pkg/db/v5/namespace/cpe/namespace.go new file mode 100644 index 00000000..05c70617 --- /dev/null +++ b/pkg/db/v5/namespace/cpe/namespace.go @@ -0,0 +1,54 @@ +package cpe + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver" + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver/stock" +) + +const ID = "cpe" + +type Namespace struct { + provider string + resolver resolver.Resolver +} + +func NewNamespace(provider string) *Namespace { + return &Namespace{ + provider: provider, + resolver: &stock.Resolver{}, + } +} + +func FromString(namespaceStr string) (*Namespace, error) { + if namespaceStr == "" { + return nil, errors.New("unable to create CPE namespace from empty string") + } + + components := strings.Split(namespaceStr, ":") + + if len(components) != 2 { + return nil, fmt.Errorf("unable to create CPE namespace from %s: incorrect number of components", namespaceStr) + } + + if components[1] != ID { + return nil, fmt.Errorf("unable to create CPE namespace from %s: type %s is incorrect", namespaceStr, components[1]) + } + + return NewNamespace(components[0]), nil +} + +func (n *Namespace) Provider() string { + return n.provider +} + +func (n *Namespace) Resolver() resolver.Resolver { + return n.resolver +} + +func (n Namespace) String() string { + return fmt.Sprintf("%s:%s", n.provider, ID) +} diff --git a/pkg/db/v5/namespace/cpe/namespace_test.go b/pkg/db/v5/namespace/cpe/namespace_test.go new file mode 100644 index 00000000..e4be6dc1 --- /dev/null +++ b/pkg/db/v5/namespace/cpe/namespace_test.go @@ -0,0 +1,51 @@ +package cpe + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestFromString(t *testing.T) { + successTests := []struct { + namespaceString string + result *Namespace + }{ + { + namespaceString: "abc.xyz:cpe", + result: NewNamespace("abc.xyz"), + }, + } + + for _, test := range successTests { + result, _ := FromString(test.namespaceString) + assert.Equal(t, result, test.result) + } + + errorTests := []struct { + namespaceString string + errorMessage string + }{ + { + namespaceString: "", + errorMessage: "unable to create CPE namespace from empty string", + }, + { + namespaceString: "single-component", + errorMessage: "unable to create CPE namespace from single-component: incorrect number of components", + }, + { + namespaceString: "too:many:components", + errorMessage: "unable to create CPE namespace from too:many:components: incorrect number of components", + }, + { + namespaceString: "wrong:namespace_type", + errorMessage: "unable to create CPE namespace from wrong:namespace_type: type namespace_type is incorrect", + }, + } + + for _, test := range errorTests { + _, err := FromString(test.namespaceString) + assert.EqualError(t, err, test.errorMessage) + } +} diff --git a/pkg/db/v5/namespace/distro/namespace.go b/pkg/db/v5/namespace/distro/namespace.go new file mode 100644 index 00000000..2106b568 --- /dev/null +++ b/pkg/db/v5/namespace/distro/namespace.go @@ -0,0 +1,67 @@ +package distro + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver" + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver/stock" + "github.com/anchore/grype/grype/distro" +) + +const ID = "distro" + +type Namespace struct { + provider string + distroType distro.Type + version string + resolver resolver.Resolver +} + +func NewNamespace(provider string, distroType distro.Type, version string) *Namespace { + return &Namespace{ + provider: provider, + distroType: distroType, + version: version, + resolver: &stock.Resolver{}, + } +} + +func FromString(namespaceStr string) (*Namespace, error) { + if namespaceStr == "" { + return nil, errors.New("unable to create distro namespace from empty string") + } + + components := strings.Split(namespaceStr, ":") + + if len(components) != 4 { + return nil, fmt.Errorf("unable to create distro namespace from %s: incorrect number of components", namespaceStr) + } + + if components[1] != ID { + return nil, fmt.Errorf("unable to create distro namespace from %s: type %s is incorrect", namespaceStr, components[1]) + } + + return NewNamespace(components[0], distro.Type(components[2]), components[3]), nil +} + +func (n *Namespace) Provider() string { + return n.provider +} + +func (n *Namespace) DistroType() distro.Type { + return n.distroType +} + +func (n *Namespace) Version() string { + return n.version +} + +func (n *Namespace) Resolver() resolver.Resolver { + return n.resolver +} + +func (n Namespace) String() string { + return fmt.Sprintf("%s:%s:%s:%s", n.provider, ID, n.distroType, n.version) +} diff --git a/pkg/db/v5/namespace/distro/namespace_test.go b/pkg/db/v5/namespace/distro/namespace_test.go new file mode 100644 index 00000000..f916d66b --- /dev/null +++ b/pkg/db/v5/namespace/distro/namespace_test.go @@ -0,0 +1,85 @@ +package distro + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + grypeDistro "github.com/anchore/grype/grype/distro" +) + +func TestFromString(t *testing.T) { + successTests := []struct { + namespaceString string + result *Namespace + }{ + { + namespaceString: "alpine:distro:alpine:3.15", + result: NewNamespace("alpine", grypeDistro.Alpine, "3.15"), + }, + { + namespaceString: "redhat:distro:redhat:8", + result: NewNamespace("redhat", grypeDistro.RedHat, "8"), + }, + { + namespaceString: "abc.xyz:distro:unknown:abcd~~~", + result: NewNamespace("abc.xyz", grypeDistro.Type("unknown"), "abcd~~~"), + }, + { + namespaceString: "msrc:distro:windows:10111", + result: NewNamespace("msrc", grypeDistro.Type("windows"), "10111"), + }, + { + namespaceString: "amazon:distro:amazonlinux:2022", + result: NewNamespace("amazon", grypeDistro.AmazonLinux, "2022"), + }, + { + namespaceString: "amazon:distro:amazonlinux:2", + result: NewNamespace("amazon", grypeDistro.AmazonLinux, "2"), + }, + { + namespaceString: "wolfi:distro:wolfi:rolling", + result: NewNamespace("wolfi", grypeDistro.Wolfi, "rolling"), + }, + } + + for _, test := range successTests { + result, _ := FromString(test.namespaceString) + assert.Equal(t, result, test.result) + } + + errorTests := []struct { + namespaceString string + errorMessage string + }{ + { + namespaceString: "", + errorMessage: "unable to create distro namespace from empty string", + }, + { + namespaceString: "single-component", + errorMessage: "unable to create distro namespace from single-component: incorrect number of components", + }, + { + namespaceString: "two:components", + errorMessage: "unable to create distro namespace from two:components: incorrect number of components", + }, + { + namespaceString: "still:not:enough", + errorMessage: "unable to create distro namespace from still:not:enough: incorrect number of components", + }, + { + namespaceString: "too:many:components:a:b", + errorMessage: "unable to create distro namespace from too:many:components:a:b: incorrect number of components", + }, + { + namespaceString: "wrong:namespace_type:a:b", + errorMessage: "unable to create distro namespace from wrong:namespace_type:a:b: type namespace_type is incorrect", + }, + } + + for _, test := range errorTests { + _, err := FromString(test.namespaceString) + assert.EqualError(t, err, test.errorMessage) + } +} diff --git a/pkg/db/v5/namespace/from_string.go b/pkg/db/v5/namespace/from_string.go new file mode 100644 index 00000000..0d6c1a6c --- /dev/null +++ b/pkg/db/v5/namespace/from_string.go @@ -0,0 +1,34 @@ +package namespace + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/grype-db/pkg/db/v5/namespace/cpe" + "github.com/anchore/grype-db/pkg/db/v5/namespace/distro" + "github.com/anchore/grype-db/pkg/db/v5/namespace/language" +) + +func FromString(namespaceStr string) (Namespace, error) { + if namespaceStr == "" { + return nil, errors.New("unable to create namespace from empty string") + } + + components := strings.Split(namespaceStr, ":") + + if len(components) < 1 { + return nil, fmt.Errorf("unable to create namespace from %s: incorrect number of components", namespaceStr) + } + + switch components[1] { + case cpe.ID: + return cpe.FromString(namespaceStr) + case distro.ID: + return distro.FromString(namespaceStr) + case language.ID: + return language.FromString(namespaceStr) + default: + return nil, fmt.Errorf("unable to create namespace from %s: unknown type %s", namespaceStr, components[1]) + } +} diff --git a/pkg/db/v5/namespace/from_string_test.go b/pkg/db/v5/namespace/from_string_test.go new file mode 100644 index 00000000..bc4eda0b --- /dev/null +++ b/pkg/db/v5/namespace/from_string_test.go @@ -0,0 +1,50 @@ +package namespace + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/v5/namespace/cpe" + "github.com/anchore/grype-db/pkg/db/v5/namespace/distro" + "github.com/anchore/grype-db/pkg/db/v5/namespace/language" + grypeDistro "github.com/anchore/grype/grype/distro" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func TestFromString(t *testing.T) { + tests := []struct { + namespaceString string + result Namespace + }{ + { + namespaceString: "github:language:python", + result: language.NewNamespace("github", syftPkg.Python, ""), + }, + { + namespaceString: "github:language:python:python", + result: language.NewNamespace("github", syftPkg.Python, syftPkg.PythonPkg), + }, + { + namespaceString: "debian:distro:debian:8", + result: distro.NewNamespace("debian", grypeDistro.Debian, "8"), + }, + { + namespaceString: "unknown:distro:amazonlinux:2022.15", + result: distro.NewNamespace("unknown", grypeDistro.AmazonLinux, "2022.15"), + }, + { + namespaceString: "ns-1:distro:unknowndistro:abcdefg~~~", + result: distro.NewNamespace("ns-1", grypeDistro.Type("unknowndistro"), "abcdefg~~~"), + }, + { + namespaceString: "abc.xyz:cpe", + result: cpe.NewNamespace("abc.xyz"), + }, + } + + for _, test := range tests { + result, _ := FromString(test.namespaceString) + assert.Equal(t, result, test.result) + } +} diff --git a/pkg/db/v5/namespace/index.go b/pkg/db/v5/namespace/index.go new file mode 100644 index 00000000..1befdf47 --- /dev/null +++ b/pkg/db/v5/namespace/index.go @@ -0,0 +1,183 @@ +package namespace + +import ( + "fmt" + "regexp" + "strings" + + "github.com/anchore/grype-db/internal/log" + "github.com/anchore/grype-db/pkg/db/v5/namespace/cpe" + "github.com/anchore/grype-db/pkg/db/v5/namespace/distro" + "github.com/anchore/grype-db/pkg/db/v5/namespace/language" + grypeDistro "github.com/anchore/grype/grype/distro" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +var alpineVersionRegularExpression = regexp.MustCompile(`^(\d+)\.(\d+)\.(\d+)$`) + +type Index struct { + all []Namespace + byLanguage map[syftPkg.Language][]*language.Namespace + byDistroKey map[string][]*distro.Namespace + cpe []*cpe.Namespace +} + +func FromStrings(namespaces []string) (*Index, error) { + all := make([]Namespace, 0) + byLanguage := make(map[syftPkg.Language][]*language.Namespace) + byDistroKey := make(map[string][]*distro.Namespace) + cpeNamespaces := make([]*cpe.Namespace, 0) + + for _, n := range namespaces { + ns, err := FromString(n) + + if err != nil { + log.Warnf("unable to create namespace object from namespace=%s: %+v", n, err) + continue + } + + all = append(all, ns) + + switch nsObj := ns.(type) { + case *language.Namespace: + l := nsObj.Language() + if _, ok := byLanguage[l]; !ok { + byLanguage[l] = make([]*language.Namespace, 0) + } + + byLanguage[l] = append(byLanguage[l], nsObj) + case *distro.Namespace: + distroKey := fmt.Sprintf("%s:%s", nsObj.DistroType(), nsObj.Version()) + if _, ok := byDistroKey[distroKey]; !ok { + byDistroKey[distroKey] = make([]*distro.Namespace, 0) + } + + byDistroKey[distroKey] = append(byDistroKey[distroKey], nsObj) + case *cpe.Namespace: + cpeNamespaces = append(cpeNamespaces, nsObj) + default: + log.Warnf("unable to index namespace=%s", n) + continue + } + } + + return &Index{ + all: all, + byLanguage: byLanguage, + byDistroKey: byDistroKey, + cpe: cpeNamespaces, + }, nil +} + +func (i *Index) NamespacesForLanguage(l syftPkg.Language) []*language.Namespace { + if _, ok := i.byLanguage[l]; ok { + return i.byLanguage[l] + } + + return nil +} + +//nolint:funlen,gocognit +func (i *Index) NamespacesForDistro(d *grypeDistro.Distro) []*distro.Namespace { + if d == nil { + return nil + } + + if d.IsRolling() { + distroKey := fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), "rolling") + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + + var versionSegments []int + if d.Version != nil { + versionSegments = d.Version.Segments() + } + + if len(versionSegments) > 0 { + // Alpine is a special case since we can only match on x.y.z + // after this things like x.y and x are valid namespace selections + if d.Type == grypeDistro.Alpine { + if v := getAlpineNamespace(i, d, versionSegments); v != nil { + return v + } + } + + // Next attempt a direct match on distro full name and version + distroKey := fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), d.FullVersion()) + + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + + if len(versionSegments) == 3 { + // Try with only first two version components + distroKey = fmt.Sprintf("%s:%d.%d", strings.ToLower(d.Type.String()), versionSegments[0], versionSegments[1]) + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + + // Try using only major version component + distroKey = fmt.Sprintf("%s:%d", strings.ToLower(d.Type.String()), versionSegments[0]) + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + + // Fall back into the manual mapping logic derived from + // https://github.com/anchore/enterprise/blob/eb71bc6686b9f4c92347a4e95bec828cee879197/anchore_engine/services/policy_engine/__init__.py#L127-L140 + switch d.Type { + case grypeDistro.CentOS, grypeDistro.RedHat, grypeDistro.Fedora, grypeDistro.RockyLinux, grypeDistro.AlmaLinux, grypeDistro.Gentoo: + // TODO: there is no mapping of fedora version to RHEL latest version (only the name) + distroKey = fmt.Sprintf("%s:%d", strings.ToLower(string(grypeDistro.RedHat)), versionSegments[0]) + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + } + + // Fall back to alpine:edge if no version segments found + // alpine:edge is labeled as alpine-x.x_alphaYYYYMMDD + if versionSegments == nil && d.Type == grypeDistro.Alpine { + distroKey := fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), "edge") + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + + if versionSegments == nil && d.Type == grypeDistro.Debian && d.RawVersion == "unstable" { + distroKey := fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), "unstable") + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + + return nil +} + +func getAlpineNamespace(i *Index, d *grypeDistro.Distro, versionSegments []int) []*distro.Namespace { + // check if distro version matches x.y.z + if alpineVersionRegularExpression.MatchString(d.RawVersion) { + // Get the first two version components + // TODO: should we update the namespaces in db generation to match x.y.z here? + distroKey := fmt.Sprintf("%s:%d.%d", strings.ToLower(d.Type.String()), versionSegments[0], versionSegments[1]) + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + } + + // If the version does not match x.y.z then it is edge + // In this case it would have - or _ alpha,beta,etc + // https://github.com/anchore/grype/issues/964#issuecomment-1290888755 + distroKey := fmt.Sprintf("%s:%s", strings.ToLower(d.Type.String()), "edge") + if v, ok := i.byDistroKey[distroKey]; ok { + return v + } + + return nil +} + +func (i *Index) CPENamespaces() []*cpe.Namespace { + return i.cpe +} diff --git a/pkg/db/v5/namespace/index_test.go b/pkg/db/v5/namespace/index_test.go new file mode 100644 index 00000000..da6c8b75 --- /dev/null +++ b/pkg/db/v5/namespace/index_test.go @@ -0,0 +1,369 @@ +package namespace + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/v5/namespace/cpe" + "github.com/anchore/grype-db/pkg/db/v5/namespace/distro" + "github.com/anchore/grype-db/pkg/db/v5/namespace/language" + osDistro "github.com/anchore/grype/grype/distro" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func TestFromStringSlice(t *testing.T) { + tests := []struct { + namespaces []string + byLanguage map[syftPkg.Language][]*language.Namespace + byDistroKey map[string][]*distro.Namespace + cpe []*cpe.Namespace + }{ + { + namespaces: []string{ + "github:language:python", + "github:language:python:conda", + "debian:distro:debian:8", + "alpine:distro:alpine:3.15", + "alpine:distro:alpine:3.16", + "msrc:distro:windows:12345", + "nvd:cpe", + "github:language:ruby", + "abc.xyz:language:ruby", + "github:language:rust", + "something:language:rust", + "1234.4567:language:unknown", + "---:cpe", + "another-provider:distro:alpine:3.15", + "another-provider:distro:alpine:3.16", + }, + byLanguage: map[syftPkg.Language][]*language.Namespace{ + syftPkg.Python: { + language.NewNamespace("github", syftPkg.Python, ""), + language.NewNamespace("github", syftPkg.Python, syftPkg.Type("conda")), + }, + syftPkg.Ruby: { + language.NewNamespace("github", syftPkg.Ruby, ""), + language.NewNamespace("abc.xyz", syftPkg.Ruby, ""), + }, + syftPkg.Rust: { + language.NewNamespace("github", syftPkg.Rust, ""), + language.NewNamespace("something", syftPkg.Rust, ""), + }, + syftPkg.Language("unknown"): { + language.NewNamespace("1234.4567", syftPkg.Language("unknown"), ""), + }, + }, + byDistroKey: map[string][]*distro.Namespace{ + "debian:8": { + distro.NewNamespace("debian", osDistro.Debian, "8"), + }, + "alpine:3.15": { + distro.NewNamespace("alpine", osDistro.Alpine, "3.15"), + distro.NewNamespace("another-provider", osDistro.Alpine, "3.15"), + }, + "alpine:3.16": { + distro.NewNamespace("alpine", osDistro.Alpine, "3.16"), + distro.NewNamespace("another-provider", osDistro.Alpine, "3.16"), + }, + "windows:12345": { + distro.NewNamespace("msrc", osDistro.Windows, "12345"), + }, + }, + cpe: []*cpe.Namespace{ + cpe.NewNamespace("---"), + cpe.NewNamespace("nvd"), + }, + }, + } + + for _, test := range tests { + result, _ := FromStrings(test.namespaces) + assert.Len(t, result.all, len(test.namespaces)) + + for l, elems := range result.byLanguage { + assert.Contains(t, test.byLanguage, l) + assert.ElementsMatch(t, elems, test.byLanguage[l]) + } + + for d, elems := range result.byDistroKey { + assert.Contains(t, test.byDistroKey, d) + assert.ElementsMatch(t, elems, test.byDistroKey[d]) + } + + assert.ElementsMatch(t, result.cpe, test.cpe) + } +} + +func TestIndex_CPENamespaces(t *testing.T) { + tests := []struct { + namespaces []string + cpe []*cpe.Namespace + }{ + { + namespaces: []string{"nvd:cpe", "another-source:cpe", "x:distro:y:10"}, + cpe: []*cpe.Namespace{ + cpe.NewNamespace("nvd"), + cpe.NewNamespace("another-source"), + }, + }, + } + + for _, test := range tests { + result, _ := FromStrings(test.namespaces) + assert.Len(t, result.all, len(test.namespaces)) + assert.ElementsMatch(t, result.CPENamespaces(), test.cpe) + } +} + +func newDistro(t *testing.T, dt osDistro.Type, v string, idLikes []string) *osDistro.Distro { + distro, err := osDistro.New(dt, v, idLikes...) + assert.NoError(t, err) + return distro +} + +func TestIndex_NamespacesForDistro(t *testing.T) { + namespaceIndex, err := FromStrings([]string{ + "alpine:distro:alpine:3.15", + "alpine:distro:alpine:3.16", + "alpine:distro:alpine:edge", + "debian:distro:debian:8", + "debian:distro:debian:unstable", + "amazon:distro:amazonlinux:2", + "amazon:distro:amazonlinux:2022", + "abc.xyz:distro:unknown:123.456", + "redhat:distro:redhat:8", + "redhat:distro:redhat:9", + "other-provider:distro:debian:8", + "other-provider:distro:redhat:9", + "suse:distro:sles:12.5", + "msrc:distro:windows:471816", + "ubuntu:distro:ubuntu:18.04", + "oracle:distro:oraclelinux:8", + "wolfi:distro:wolfi:rolling", + "chainguard:distro:chainguard:rolling", + "archlinux:distro:archlinux:rolling", + }) + + assert.NoError(t, err) + + tests := []struct { + name string + distro *osDistro.Distro + namespaces []*distro.Namespace + }{ + { + name: "alpine patch version matches minor version namespace", + distro: newDistro(t, osDistro.Alpine, "3.15.4", []string{"alpine"}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "3.15"), + }, + }, + { + name: "alpine minor version with no patch should match edge", + distro: newDistro(t, osDistro.Alpine, "3.16", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "edge"), + }, + }, + { + name: "alpine rc version with no patch should match edge", + distro: newDistro(t, osDistro.Alpine, "3.16.4-r4", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "edge"), + }, + }, + + { + name: "alpine edge version matches edge namespace", + distro: &osDistro.Distro{Type: osDistro.Alpine, Version: nil, RawVersion: "3.17.1_alpha20221002", IDLike: []string{"alpine"}}, + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "edge"), + }, + }, + { + name: "alpine raw version matches edge with - character", + distro: &osDistro.Distro{Type: osDistro.Alpine, Version: nil, RawVersion: "3.17.1-alpha20221002", IDLike: []string{"alpine"}}, + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "edge"), + }, + }, + { + name: "alpine raw version matches edge with - character no sha", + distro: newDistro(t, osDistro.Alpine, "3.17.1-alpha", []string{"alpine"}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "edge"), + }, + }, + { + name: "alpine raw version matches edge with _ character no sha", + // we don't create a newDistro from this since parsing the version fails + distro: &osDistro.Distro{Type: osDistro.Alpine, Version: nil, RawVersion: "3.17.1_alpha", IDLike: []string{"alpine"}}, + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "edge"), + }, + }, + { + name: "alpine malformed version matches no namespace", + distro: newDistro(t, osDistro.Alpine, "3.16.4.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("alpine", osDistro.Alpine, "edge"), + }, + }, + { + name: "Debian minor version matches debian and other-provider namespaces", + distro: newDistro(t, osDistro.Debian, "8.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("debian", osDistro.Debian, "8"), + distro.NewNamespace("other-provider", osDistro.Debian, "8"), + }, + }, + { + name: "Redhat minor version matches redhat and other-provider namespaces", + distro: newDistro(t, osDistro.RedHat, "9.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("redhat", osDistro.RedHat, "9"), + distro.NewNamespace("other-provider", osDistro.RedHat, "9"), + }, + }, + { + name: "Centos minor version matches redhat and other-provider namespaces", + distro: newDistro(t, osDistro.CentOS, "9.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("redhat", osDistro.RedHat, "9"), + distro.NewNamespace("other-provider", osDistro.RedHat, "9"), + }, + }, + { + name: "Alma Linux minor version matches redhat and other-provider namespaces", + distro: newDistro(t, osDistro.AlmaLinux, "9.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("redhat", osDistro.RedHat, "9"), + distro.NewNamespace("other-provider", osDistro.RedHat, "9"), + }, + }, + { + name: "Rocky Linux minor version matches redhat and other-provider namespaces", + distro: newDistro(t, osDistro.RockyLinux, "9.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("redhat", osDistro.RedHat, "9"), + distro.NewNamespace("other-provider", osDistro.RedHat, "9"), + }, + }, + { + name: "SLES minor version matches suse namespace", + distro: newDistro(t, osDistro.SLES, "12.5", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("suse", osDistro.SLES, "12.5"), + }, + }, + { + name: "Windows version object matches msrc namespace with exact version", + distro: newDistro(t, osDistro.Windows, "471816", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("msrc", osDistro.Windows, "471816"), + }, + }, + { + name: "Ubuntu minor semvar matches ubuntu namespace with exact version", + distro: newDistro(t, osDistro.Ubuntu, "18.04", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("ubuntu", osDistro.Ubuntu, "18.04"), + }, + }, + { + name: "Fedora minor semvar will not match a namespace", + distro: newDistro(t, osDistro.Fedora, "31.4", []string{}), + namespaces: nil, + }, + { + name: "Amazon Linux Major semvar matches amazon namespace with exact version", + distro: newDistro(t, osDistro.AmazonLinux, "2", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("amazon", osDistro.AmazonLinux, "2"), + }, + }, + { + name: "Amazon Linux year version matches amazon namespace with exact uear", + distro: newDistro(t, osDistro.AmazonLinux, "2022", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("amazon", osDistro.AmazonLinux, "2022"), + }, + }, + { + name: "Mariner minor semvar matches no namespace", + distro: newDistro(t, osDistro.Mariner, "20.1", []string{}), + namespaces: nil, + }, + { + name: "Oracle Linux Major semvar matches oracle namespace with exact version", + distro: newDistro(t, osDistro.OracleLinux, "8", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("oracle", osDistro.OracleLinux, "8"), + }, + }, + { + + name: "Arch Linux matches archlinux rolling namespace", + distro: newDistro(t, osDistro.ArchLinux, "", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("archlinux", osDistro.ArchLinux, "rolling"), + }, + }, + { + + name: "Wolfi matches wolfi rolling namespace", + distro: newDistro(t, osDistro.Wolfi, "20221011", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("wolfi", osDistro.Wolfi, "rolling"), + }, + }, + { + + name: "Chainguard matches chainguard rolling namespace", + distro: newDistro(t, osDistro.Chainguard, "20230214", []string{}), + namespaces: []*distro.Namespace{ + distro.NewNamespace("chainguard", osDistro.Chainguard, "rolling"), + }, + }, + { + + name: "Gentoo doesn't match any namespace since the gentoo rolling namespace doesn't exist in index", + distro: newDistro(t, osDistro.Gentoo, "", []string{}), + namespaces: nil, + }, + { + name: "Open Suse Leap semvar matches no namespace", + distro: newDistro(t, osDistro.OpenSuseLeap, "100", []string{}), + namespaces: nil, + }, + { + name: "Photon minor semvar no namespace", + distro: newDistro(t, osDistro.Photon, "20.1", []string{}), + namespaces: nil, + }, + { + name: "Busybox minor semvar matches no namespace", + distro: newDistro(t, osDistro.Busybox, "20.1", []string{}), + namespaces: nil, + }, + { + name: "debian unstable", + distro: &osDistro.Distro{ + Type: osDistro.Debian, + RawVersion: "unstable", + Version: nil, + }, + namespaces: []*distro.Namespace{ + distro.NewNamespace("debian", osDistro.Debian, "unstable"), + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + namespaces := namespaceIndex.NamespacesForDistro(test.distro) + assert.Equal(t, test.namespaces, namespaces) + }) + } +} diff --git a/pkg/db/v5/namespace/language/namespace.go b/pkg/db/v5/namespace/language/namespace.go new file mode 100644 index 00000000..1609d325 --- /dev/null +++ b/pkg/db/v5/namespace/language/namespace.go @@ -0,0 +1,78 @@ +package language + +import ( + "errors" + "fmt" + "strings" + + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +const ID = "language" + +type Namespace struct { + provider string + language syftPkg.Language + packageType syftPkg.Type + resolver resolver.Resolver +} + +func NewNamespace(provider string, language syftPkg.Language, packageType syftPkg.Type) *Namespace { + r, _ := resolver.FromLanguage(language) + + return &Namespace{ + provider: provider, + language: language, + packageType: packageType, + resolver: r, + } +} + +func FromString(namespaceStr string) (*Namespace, error) { + if namespaceStr == "" { + return nil, errors.New("unable to create language namespace from empty string") + } + + components := strings.Split(namespaceStr, ":") + + if len(components) != 3 && len(components) != 4 { + return nil, fmt.Errorf("unable to create language namespace from %s: incorrect number of components", namespaceStr) + } + + if components[1] != ID { + return nil, fmt.Errorf("unable to create language namespace from %s: type %s is incorrect", namespaceStr, components[1]) + } + + packageType := "" + + if len(components) == 4 { + packageType = components[3] + } + + return NewNamespace(components[0], syftPkg.Language(components[2]), syftPkg.Type(packageType)), nil +} + +func (n *Namespace) Provider() string { + return n.provider +} + +func (n *Namespace) Language() syftPkg.Language { + return n.language +} + +func (n *Namespace) PackageType() syftPkg.Type { + return n.packageType +} + +func (n *Namespace) Resolver() resolver.Resolver { + return n.resolver +} + +func (n Namespace) String() string { + if n.packageType != "" { + return fmt.Sprintf("%s:%s:%s:%s", n.provider, ID, n.language, n.packageType) + } + + return fmt.Sprintf("%s:%s:%s", n.provider, ID, n.language) +} diff --git a/pkg/db/v5/namespace/language/namespace_test.go b/pkg/db/v5/namespace/language/namespace_test.go new file mode 100644 index 00000000..faad7bd5 --- /dev/null +++ b/pkg/db/v5/namespace/language/namespace_test.go @@ -0,0 +1,77 @@ +package language + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func TestFromString(t *testing.T) { + successTests := []struct { + namespaceString string + result *Namespace + }{ + { + namespaceString: "github:language:python", + result: NewNamespace("github", syftPkg.Python, ""), + }, + { + namespaceString: "github:language:ruby", + result: NewNamespace("github", syftPkg.Ruby, ""), + }, + { + namespaceString: "github:language:java", + result: NewNamespace("github", syftPkg.Java, ""), + }, + { + namespaceString: "github:language:rust", + result: NewNamespace("github", syftPkg.Rust, ""), + }, + { + namespaceString: "abc.xyz:language:something", + result: NewNamespace("abc.xyz", syftPkg.Language("something"), ""), + }, + { + namespaceString: "abc.xyz:language:something:another-package-manager", + result: NewNamespace("abc.xyz", syftPkg.Language("something"), syftPkg.Type("another-package-manager")), + }, + } + + for _, test := range successTests { + result, _ := FromString(test.namespaceString) + assert.Equal(t, result, test.result) + } + + errorTests := []struct { + namespaceString string + errorMessage string + }{ + { + namespaceString: "", + errorMessage: "unable to create language namespace from empty string", + }, + { + namespaceString: "single-component", + errorMessage: "unable to create language namespace from single-component: incorrect number of components", + }, + { + namespaceString: "two:components", + errorMessage: "unable to create language namespace from two:components: incorrect number of components", + }, + { + namespaceString: "too:many:components:a:b", + errorMessage: "unable to create language namespace from too:many:components:a:b: incorrect number of components", + }, + { + namespaceString: "wrong:namespace_type:a:b", + errorMessage: "unable to create language namespace from wrong:namespace_type:a:b: type namespace_type is incorrect", + }, + } + + for _, test := range errorTests { + _, err := FromString(test.namespaceString) + assert.EqualError(t, err, test.errorMessage) + } +} diff --git a/pkg/db/v5/namespace/namespace.go b/pkg/db/v5/namespace/namespace.go new file mode 100644 index 00000000..5fcfb180 --- /dev/null +++ b/pkg/db/v5/namespace/namespace.go @@ -0,0 +1,11 @@ +package namespace + +import ( + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver" +) + +type Namespace interface { + Provider() string + Resolver() resolver.Resolver + String() string +} diff --git a/pkg/db/v5/pkg/qualifier/from_json.go b/pkg/db/v5/pkg/qualifier/from_json.go new file mode 100644 index 00000000..16dd5a64 --- /dev/null +++ b/pkg/db/v5/pkg/qualifier/from_json.go @@ -0,0 +1,52 @@ +package qualifier + +import ( + "encoding/json" + + "github.com/mitchellh/mapstructure" + + "github.com/anchore/grype-db/internal/log" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier/platformcpe" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier/rpmmodularity" +) + +func FromJSON(data []byte) ([]Qualifier, error) { + var records []map[string]interface{} + if err := json.Unmarshal(data, &records); err != nil { + return nil, err + } + + var qualifiers []Qualifier + + for _, r := range records { + k, ok := r["kind"] + + if !ok { + log.Warn("Skipping qualifier with no kind specified") + continue + } + + // create the specific kind of Qualifier + switch k { + case "rpm-modularity": + var q rpmmodularity.Qualifier + if err := mapstructure.Decode(r, &q); err != nil { + log.Warn("Error decoding rpm-modularity package qualifier: (%v)", err) + continue + } + qualifiers = append(qualifiers, q) + case "platform-cpe": + var q platformcpe.Qualifier + if err := mapstructure.Decode(r, &q); err != nil { + log.Warn("Error decoding platform-cpe package qualifier: (%v)", err) + continue + } + qualifiers = append(qualifiers, q) + default: + log.Debug("Skipping unsupported package qualifier: %s", k) + continue + } + } + + return qualifiers, nil +} diff --git a/pkg/db/v5/pkg/qualifier/platformcpe/qualifier.go b/pkg/db/v5/pkg/qualifier/platformcpe/qualifier.go new file mode 100644 index 00000000..772b8cbb --- /dev/null +++ b/pkg/db/v5/pkg/qualifier/platformcpe/qualifier.go @@ -0,0 +1,21 @@ +package platformcpe + +import ( + "fmt" + + "github.com/anchore/grype/grype/pkg/qualifier" + "github.com/anchore/grype/grype/pkg/qualifier/platformcpe" +) + +type Qualifier struct { + Kind string `json:"kind" mapstructure:"kind"` // Kind of qualifier + CPE string `json:"cpe,omitempty" mapstructure:"cpe,omitempty"` // CPE +} + +func (q Qualifier) Parse() qualifier.Qualifier { + return platformcpe.New(q.CPE) +} + +func (q Qualifier) String() string { + return fmt.Sprintf("kind: %s, cpe: %q", q.Kind, q.CPE) +} diff --git a/pkg/db/v5/pkg/qualifier/qualifier.go b/pkg/db/v5/pkg/qualifier/qualifier.go new file mode 100644 index 00000000..d010ead8 --- /dev/null +++ b/pkg/db/v5/pkg/qualifier/qualifier.go @@ -0,0 +1,12 @@ +package qualifier + +import ( + "fmt" + + "github.com/anchore/grype/grype/pkg/qualifier" +) + +type Qualifier interface { + fmt.Stringer + Parse() qualifier.Qualifier +} diff --git a/pkg/db/v5/pkg/qualifier/rpmmodularity/qualifier.go b/pkg/db/v5/pkg/qualifier/rpmmodularity/qualifier.go new file mode 100644 index 00000000..cb41b5eb --- /dev/null +++ b/pkg/db/v5/pkg/qualifier/rpmmodularity/qualifier.go @@ -0,0 +1,21 @@ +package rpmmodularity + +import ( + "fmt" + + "github.com/anchore/grype/grype/pkg/qualifier" + "github.com/anchore/grype/grype/pkg/qualifier/rpmmodularity" +) + +type Qualifier struct { + Kind string `json:"kind" mapstructure:"kind"` // Kind of qualifier + Module string `json:"module,omitempty" mapstructure:"module,omitempty"` // Modularity label +} + +func (q Qualifier) Parse() qualifier.Qualifier { + return rpmmodularity.New(q.Module) +} + +func (q Qualifier) String() string { + return fmt.Sprintf("kind: %s, module: %q", q.Kind, q.Module) +} diff --git a/pkg/db/v5/pkg/resolver/from_language.go b/pkg/db/v5/pkg/resolver/from_language.go new file mode 100644 index 00000000..69a04199 --- /dev/null +++ b/pkg/db/v5/pkg/resolver/from_language.go @@ -0,0 +1,23 @@ +package resolver + +import ( + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver/java" + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver/python" + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver/stock" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func FromLanguage(language syftPkg.Language) (Resolver, error) { + var r Resolver + + switch language { + case syftPkg.Python: + r = &python.Resolver{} + case syftPkg.Java: + r = &java.Resolver{} + default: + r = &stock.Resolver{} + } + + return r, nil +} diff --git a/pkg/db/v5/pkg/resolver/from_language_test.go b/pkg/db/v5/pkg/resolver/from_language_test.go new file mode 100644 index 00000000..362b9eeb --- /dev/null +++ b/pkg/db/v5/pkg/resolver/from_language_test.go @@ -0,0 +1,70 @@ +package resolver + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver/java" + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver/python" + "github.com/anchore/grype-db/pkg/db/v5/pkg/resolver/stock" + syftPkg "github.com/anchore/syft/syft/pkg" +) + +func TestFromLanguage(t *testing.T) { + tests := []struct { + language syftPkg.Language + result Resolver + }{ + { + language: syftPkg.Python, + result: &python.Resolver{}, + }, + { + language: syftPkg.Java, + result: &java.Resolver{}, + }, + { + language: syftPkg.Ruby, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Dart, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Rust, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Go, + result: &stock.Resolver{}, + }, + { + language: syftPkg.JavaScript, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Dotnet, + result: &stock.Resolver{}, + }, + { + language: syftPkg.PHP, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Ruby, + result: &stock.Resolver{}, + }, + { + language: syftPkg.Language("something-new"), + result: &stock.Resolver{}, + }, + } + + for _, test := range tests { + result, err := FromLanguage(test.language) + assert.NoError(t, err) + assert.Equal(t, result, test.result) + } +} diff --git a/pkg/db/v5/pkg/resolver/java/resolver.go b/pkg/db/v5/pkg/resolver/java/resolver.go new file mode 100644 index 00000000..bbfd5168 --- /dev/null +++ b/pkg/db/v5/pkg/resolver/java/resolver.go @@ -0,0 +1,47 @@ +package java + +import ( + "fmt" + "strings" + + "github.com/scylladb/go-set/strset" + + "github.com/anchore/grype-db/internal/log" + grypePkg "github.com/anchore/grype/grype/pkg" + "github.com/anchore/packageurl-go" +) + +type Resolver struct { +} + +func (r *Resolver) Normalize(name string) string { + return strings.ToLower(name) +} + +func (r *Resolver) Resolve(p grypePkg.Package) []string { + names := strset.New() + + // The current default for the Java ecosystem is to use a Maven-like identifier of the form + // ":" + if metadata, ok := p.Metadata.(grypePkg.JavaMetadata); ok { + if metadata.PomGroupID != "" { + if metadata.PomArtifactID != "" { + names.Add(r.Normalize(fmt.Sprintf("%s:%s", metadata.PomGroupID, metadata.PomArtifactID))) + } + if metadata.ManifestName != "" { + names.Add(r.Normalize(fmt.Sprintf("%s:%s", metadata.PomGroupID, metadata.ManifestName))) + } + } + } + + if p.PURL != "" { + purl, err := packageurl.FromString(p.PURL) + if err != nil { + log.Warnf("unable to resolve java package identifier from purl=%q: %+v", p.PURL, err) + } else { + names.Add(r.Normalize(fmt.Sprintf("%s:%s", purl.Namespace, purl.Name))) + } + } + + return names.List() +} diff --git a/pkg/db/v5/pkg/resolver/java/resolver_test.go b/pkg/db/v5/pkg/resolver/java/resolver_test.go new file mode 100644 index 00000000..a0e9b207 --- /dev/null +++ b/pkg/db/v5/pkg/resolver/java/resolver_test.go @@ -0,0 +1,175 @@ +package java + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + + grypePkg "github.com/anchore/grype/grype/pkg" +) + +func TestResolver_Normalize(t *testing.T) { + tests := []struct { + packageName string + normalized string + }{ + { + packageName: "PyYAML", + normalized: "pyyaml", + }, + { + packageName: "oslo.concurrency", + normalized: "oslo.concurrency", + }, + { + packageName: "", + normalized: "", + }, + { + packageName: "test---1", + normalized: "test---1", + }, + { + packageName: "AbCd.-__.--.-___.__.--1234____----....XyZZZ", + normalized: "abcd.-__.--.-___.__.--1234____----....xyzzz", + }, + } + + resolver := Resolver{} + + for _, test := range tests { + resolvedNames := resolver.Normalize(test.packageName) + assert.Equal(t, resolvedNames, test.normalized) + } +} + +func TestResolver_Resolve(t *testing.T) { + tests := []struct { + name string + pkg grypePkg.Package + resolved []string + }{ + { + name: "both artifact and manifest 1", + pkg: grypePkg.Package{ + Name: "ABCD", + Version: "1.2.3.4", + Language: "java", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "virtual-path-info", + PomArtifactID: "pom-ARTIFACT-ID-info", + PomGroupID: "pom-group-ID-info", + ManifestName: "main-section-name-info", + }, + }, + resolved: []string{"pom-group-id-info:pom-artifact-id-info", "pom-group-id-info:main-section-name-info"}, + }, + { + name: "both artifact and manifest 2", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + ManifestName: "man-name", + }, + }, + resolved: []string{ + "g-id:art-id", + "g-id:man-name", + }, + }, + { + name: "no group id", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + ManifestName: "man-name", + }, + }, + resolved: []string{}, + }, + { + name: "only manifest", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomGroupID: "g-id", + ManifestName: "man-name", + }, + }, + resolved: []string{ + "g-id:man-name", + }, + }, + { + name: "only artifact", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + }, + }, + resolved: []string{ + "g-id:art-id", + }, + }, + { + name: "no artifact or manifest", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomGroupID: "g-id", + }, + }, + resolved: []string{}, + }, + { + name: "with valid purl", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + PURL: "pkg:maven/org.anchore/b-name@0.2", + }, + resolved: []string{"org.anchore:b-name"}, + }, + { + name: "ignore invalid pURLs", + pkg: grypePkg.Package{ + ID: grypePkg.ID(uuid.NewString()), + Name: "a-name", + PURL: "pkg:BAD/", + Metadata: grypePkg.JavaMetadata{ + VirtualPath: "v-path", + PomArtifactID: "art-id", + PomGroupID: "g-id", + }, + }, + resolved: []string{ + "g-id:art-id", + }, + }, + } + + resolver := Resolver{} + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + resolvedNames := resolver.Resolve(test.pkg) + assert.ElementsMatch(t, resolvedNames, test.resolved) + }) + } +} diff --git a/pkg/db/v5/pkg/resolver/python/resolver.go b/pkg/db/v5/pkg/resolver/python/resolver.go new file mode 100644 index 00000000..0145bf09 --- /dev/null +++ b/pkg/db/v5/pkg/resolver/python/resolver.go @@ -0,0 +1,29 @@ +package python + +import ( + "regexp" + "strings" + + grypePkg "github.com/anchore/grype/grype/pkg" +) + +type Resolver struct { +} + +func (r *Resolver) Normalize(name string) string { + // Canonical naming of packages within python is defined by PEP 503 at + // https://peps.python.org/pep-0503/#normalized-names, and this code is derived from + // the official python implementation of canonical naming at + // https://packaging.pypa.io/en/latest/_modules/packaging/utils.html#canonicalize_name + + return strings.ToLower(regexp.MustCompile(`[-_.]+`).ReplaceAllString(name, "-")) +} + +func (r *Resolver) Resolve(p grypePkg.Package) []string { + // Canonical naming of packages within python is defined by PEP 503 at + // https://peps.python.org/pep-0503/#normalized-names, and this code is derived from + // the official python implementation of canonical naming at + // https://packaging.pypa.io/en/latest/_modules/packaging/utils.html#canonicalize_name + + return []string{r.Normalize(p.Name)} +} diff --git a/pkg/db/v5/pkg/resolver/python/resolver_test.go b/pkg/db/v5/pkg/resolver/python/resolver_test.go new file mode 100644 index 00000000..f54aef42 --- /dev/null +++ b/pkg/db/v5/pkg/resolver/python/resolver_test.go @@ -0,0 +1,42 @@ +package python + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestResolver_Normalize(t *testing.T) { + tests := []struct { + packageName string + normalized string + }{ + { + packageName: "PyYAML", + normalized: "pyyaml", + }, + { + packageName: "oslo.concurrency", + normalized: "oslo-concurrency", + }, + { + packageName: "", + normalized: "", + }, + { + packageName: "test---1", + normalized: "test-1", + }, + { + packageName: "AbCd.-__.--.-___.__.--1234____----....XyZZZ", + normalized: "abcd-1234-xyzzz", + }, + } + + resolver := Resolver{} + + for _, test := range tests { + resolvedNames := resolver.Normalize(test.packageName) + assert.Equal(t, resolvedNames, test.normalized) + } +} diff --git a/pkg/db/v5/pkg/resolver/resolver.go b/pkg/db/v5/pkg/resolver/resolver.go new file mode 100644 index 00000000..bc253a25 --- /dev/null +++ b/pkg/db/v5/pkg/resolver/resolver.go @@ -0,0 +1,10 @@ +package resolver + +import ( + grypePkg "github.com/anchore/grype/grype/pkg" +) + +type Resolver interface { + Normalize(string) string + Resolve(p grypePkg.Package) []string +} diff --git a/pkg/db/v5/pkg/resolver/stock/resolver.go b/pkg/db/v5/pkg/resolver/stock/resolver.go new file mode 100644 index 00000000..c1e38411 --- /dev/null +++ b/pkg/db/v5/pkg/resolver/stock/resolver.go @@ -0,0 +1,18 @@ +package stock + +import ( + "strings" + + grypePkg "github.com/anchore/grype/grype/pkg" +) + +type Resolver struct { +} + +func (r *Resolver) Normalize(name string) string { + return strings.ToLower(name) +} + +func (r *Resolver) Resolve(p grypePkg.Package) []string { + return []string{r.Normalize(p.Name)} +} diff --git a/pkg/db/v5/pkg/resolver/stock/resolver_test.go b/pkg/db/v5/pkg/resolver/stock/resolver_test.go new file mode 100644 index 00000000..699b5817 --- /dev/null +++ b/pkg/db/v5/pkg/resolver/stock/resolver_test.go @@ -0,0 +1,42 @@ +package stock + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestResolver_Normalize(t *testing.T) { + tests := []struct { + packageName string + normalized string + }{ + { + packageName: "PyYAML", + normalized: "pyyaml", + }, + { + packageName: "oslo.concurrency", + normalized: "oslo.concurrency", + }, + { + packageName: "", + normalized: "", + }, + { + packageName: "test---1", + normalized: "test---1", + }, + { + packageName: "AbCd.-__.--.-___.__.--1234____----....XyZZZ", + normalized: "abcd.-__.--.-___.__.--1234____----....xyzzz", + }, + } + + resolver := Resolver{} + + for _, test := range tests { + resolvedNames := resolver.Normalize(test.packageName) + assert.Equal(t, resolvedNames, test.normalized) + } +} diff --git a/pkg/db/v5/schema_version.go b/pkg/db/v5/schema_version.go new file mode 100644 index 00000000..8956dd88 --- /dev/null +++ b/pkg/db/v5/schema_version.go @@ -0,0 +1,3 @@ +package v5 + +const SchemaVersion = 5 diff --git a/pkg/db/v5/store.go b/pkg/db/v5/store.go new file mode 100644 index 00000000..3118918f --- /dev/null +++ b/pkg/db/v5/store.go @@ -0,0 +1,25 @@ +package v5 + +type Store interface { + StoreReader + StoreWriter + DBCloser +} + +type StoreReader interface { + IDReader + VulnerabilityStoreReader + VulnerabilityMetadataStoreReader + VulnerabilityMatchExclusionStoreReader +} + +type StoreWriter interface { + IDWriter + VulnerabilityStoreWriter + VulnerabilityMetadataStoreWriter + VulnerabilityMatchExclusionStoreWriter +} + +type DBCloser interface { + Close() +} diff --git a/pkg/db/v5/store/model/id.go b/pkg/db/v5/store/model/id.go new file mode 100644 index 00000000..3a817f83 --- /dev/null +++ b/pkg/db/v5/store/model/id.go @@ -0,0 +1,40 @@ +package model + +import ( + "fmt" + "time" + + v5 "github.com/anchore/grype-db/pkg/db/v5" +) + +const ( + IDTableName = "id" +) + +type IDModel struct { + BuildTimestamp string `gorm:"column:build_timestamp"` + SchemaVersion int `gorm:"column:schema_version"` +} + +func NewIDModel(id v5.ID) IDModel { + return IDModel{ + BuildTimestamp: id.BuildTimestamp.Format(time.RFC3339Nano), + SchemaVersion: id.SchemaVersion, + } +} + +func (IDModel) TableName() string { + return IDTableName +} + +func (m *IDModel) Inflate() (v5.ID, error) { + buildTime, err := time.Parse(time.RFC3339Nano, m.BuildTimestamp) + if err != nil { + return v5.ID{}, fmt.Errorf("unable to parse build timestamp (%+v): %w", m.BuildTimestamp, err) + } + + return v5.ID{ + BuildTimestamp: buildTime, + SchemaVersion: m.SchemaVersion, + }, nil +} diff --git a/pkg/db/v5/store/model/vulnerability.go b/pkg/db/v5/store/model/vulnerability.go new file mode 100644 index 00000000..e59c9ae8 --- /dev/null +++ b/pkg/db/v5/store/model/vulnerability.go @@ -0,0 +1,102 @@ +package model + +import ( + "encoding/json" + "fmt" + + sqlite "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v5 "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier" +) + +const ( + VulnerabilityTableName = "vulnerability" + GetVulnerabilityIndexName = "get_vulnerability_index" +) + +// VulnerabilityModel is a struct used to serialize db.Vulnerability information into a sqlite3 DB. +type VulnerabilityModel struct { + PK uint64 `gorm:"primary_key;auto_increment;"` + ID string `gorm:"column:id"` + PackageName string `gorm:"column:package_name; index:get_vulnerability_index"` + Namespace string `gorm:"column:namespace; index:get_vulnerability_index"` + PackageQualifiers sqlite.NullString `gorm:"column:package_qualifiers"` + VersionConstraint string `gorm:"column:version_constraint"` + VersionFormat string `gorm:"column:version_format"` + CPEs sqlite.NullString `gorm:"column:cpes; default:null"` + RelatedVulnerabilities sqlite.NullString `gorm:"column:related_vulnerabilities; default:null"` + FixedInVersions sqlite.NullString `gorm:"column:fixed_in_versions; default:null"` + FixState string `gorm:"column:fix_state"` + Advisories sqlite.NullString `gorm:"column:advisories; default:null"` +} + +// NewVulnerabilityModel generates a new model from a db.Vulnerability struct. +func NewVulnerabilityModel(vulnerability v5.Vulnerability) VulnerabilityModel { + return VulnerabilityModel{ + ID: vulnerability.ID, + PackageName: vulnerability.PackageName, + Namespace: vulnerability.Namespace, + PackageQualifiers: sqlite.ToNullString(vulnerability.PackageQualifiers), + VersionConstraint: vulnerability.VersionConstraint, + VersionFormat: vulnerability.VersionFormat, + FixedInVersions: sqlite.ToNullString(vulnerability.Fix.Versions), + FixState: string(vulnerability.Fix.State), + Advisories: sqlite.ToNullString(vulnerability.Advisories), + CPEs: sqlite.ToNullString(vulnerability.CPEs), + RelatedVulnerabilities: sqlite.ToNullString(vulnerability.RelatedVulnerabilities), + } +} + +// TableName returns the table which all db.Vulnerability model instances are stored into. +func (VulnerabilityModel) TableName() string { + return VulnerabilityTableName +} + +// Inflate generates a db.Vulnerability object from the serialized model instance. +func (m *VulnerabilityModel) Inflate() (v5.Vulnerability, error) { + var cpes []string + err := json.Unmarshal(m.CPEs.ToByteSlice(), &cpes) + if err != nil { + return v5.Vulnerability{}, fmt.Errorf("unable to unmarshal CPEs (%+v): %w", m.CPEs, err) + } + + var related []v5.VulnerabilityReference + err = json.Unmarshal(m.RelatedVulnerabilities.ToByteSlice(), &related) + if err != nil { + return v5.Vulnerability{}, fmt.Errorf("unable to unmarshal related vulnerabilities (%+v): %w", m.RelatedVulnerabilities, err) + } + + var advisories []v5.Advisory + + err = json.Unmarshal(m.Advisories.ToByteSlice(), &advisories) + if err != nil { + return v5.Vulnerability{}, fmt.Errorf("unable to unmarshal advisories (%+v): %w", m.Advisories, err) + } + + var versions []string + err = json.Unmarshal(m.FixedInVersions.ToByteSlice(), &versions) + if err != nil { + return v5.Vulnerability{}, fmt.Errorf("unable to unmarshal versions (%+v): %w", m.FixedInVersions, err) + } + + pkgQualifiers, err := qualifier.FromJSON(m.PackageQualifiers.ToByteSlice()) + if err != nil { + return v5.Vulnerability{}, fmt.Errorf("unable to unmarshal package_qualifiers (%+v): %w", m.PackageQualifiers, err) + } + + return v5.Vulnerability{ + ID: m.ID, + PackageName: m.PackageName, + PackageQualifiers: pkgQualifiers, + Namespace: m.Namespace, + VersionConstraint: m.VersionConstraint, + VersionFormat: m.VersionFormat, + CPEs: cpes, + RelatedVulnerabilities: related, + Fix: v5.Fix{ + Versions: versions, + State: v5.FixState(m.FixState), + }, + Advisories: advisories, + }, nil +} diff --git a/pkg/db/v5/store/model/vulnerability_match_exclusion.go b/pkg/db/v5/store/model/vulnerability_match_exclusion.go new file mode 100644 index 00000000..349e9ea2 --- /dev/null +++ b/pkg/db/v5/store/model/vulnerability_match_exclusion.go @@ -0,0 +1,72 @@ +package model + +import ( + "encoding/json" + "fmt" + + "github.com/anchore/grype-db/internal/log" + "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v5 "github.com/anchore/grype-db/pkg/db/v5" +) + +const ( + VulnerabilityMatchExclusionTableName = "vulnerability_match_exclusion" + GetVulnerabilityMatchExclusionIndexName = "get_vulnerability_match_exclusion_index" +) + +// VulnerabilityMatchExclusionModel is a struct used to serialize db.VulnerabilityMatchExclusion information into a sqlite3 DB. +type VulnerabilityMatchExclusionModel struct { + PK uint64 `gorm:"primary_key;auto_increment;"` + ID string `gorm:"column:id; index:get_vulnerability_match_exclusion_index"` + Constraints sqlite.NullString `gorm:"column:constraints; default:null"` + Justification string `gorm:"column:justification"` +} + +// NewVulnerabilityMatchExclusionModel generates a new model from a db.VulnerabilityMatchExclusion struct. +func NewVulnerabilityMatchExclusionModel(v v5.VulnerabilityMatchExclusion) VulnerabilityMatchExclusionModel { + return VulnerabilityMatchExclusionModel{ + ID: v.ID, + Constraints: sqlite.ToNullString(v.Constraints), + Justification: v.Justification, + } +} + +// TableName returns the table which all db.VulnerabilityMatchExclusion model instances are stored into. +func (VulnerabilityMatchExclusionModel) TableName() string { + return VulnerabilityMatchExclusionTableName +} + +// Inflate generates a db.VulnerabilityMatchExclusion object from the serialized model instance. +func (m *VulnerabilityMatchExclusionModel) Inflate() (*v5.VulnerabilityMatchExclusion, error) { + // It's important that we only utilise exclusion constraints that are compatible with this version of Grype, + // so if any unknown fields are encountered then ignore that constraint. + + var constraints []v5.VulnerabilityMatchExclusionConstraint + err := json.Unmarshal(m.Constraints.ToByteSlice(), &constraints) + if err != nil { + return nil, fmt.Errorf("unable to unmarshal vulnerability match exclusion constraints (%+v): %w", m.Constraints, err) + } + + var compatibleConstraints []v5.VulnerabilityMatchExclusionConstraint + + if len(constraints) > 0 { + for _, c := range constraints { + if !c.Usable() { + log.Debugf("skipping incompatible vulnerability match constraint for vuln id=%s, constraint=%+v", m.ID, c) + } else { + compatibleConstraints = append(compatibleConstraints, c) + } + } + + // If there were constraints and none were compatible, the entire record is not usable by this version of Grype + if len(compatibleConstraints) == 0 { + return nil, nil + } + } + + return &v5.VulnerabilityMatchExclusion{ + ID: m.ID, + Constraints: compatibleConstraints, + Justification: m.Justification, + }, nil +} diff --git a/pkg/db/v5/store/model/vulnerability_match_exclusion_test.go b/pkg/db/v5/store/model/vulnerability_match_exclusion_test.go new file mode 100644 index 00000000..95da565c --- /dev/null +++ b/pkg/db/v5/store/model/vulnerability_match_exclusion_test.go @@ -0,0 +1,201 @@ +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v5 "github.com/anchore/grype-db/pkg/db/v5" +) + +func TestVulnerabilityMatchExclusionModel_Inflate(t *testing.T) { + tests := []struct { + name string + record *VulnerabilityMatchExclusionModel + result *v5.VulnerabilityMatchExclusion + }{ + { + name: "Nil constraint", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-12345", + Constraints: sqlite.ToNullString(nil), + Justification: "Who really knows?", + }, + result: &v5.VulnerabilityMatchExclusion{ + ID: "CVE-12345", + Constraints: nil, + Justification: "Who really knows?", + }, + }, + { + name: "Empty constraint array", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[]`, true), + Justification: "Always ignore", + }, + result: &v5.VulnerabilityMatchExclusion{ + ID: "CVE-919", + Constraints: nil, + Justification: "Always ignore", + }, + }, + { + name: "Single constraint", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[{"vulnerability":{"namespace":"nvd:cpe"},"package":{"language":"python"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: &v5.VulnerabilityMatchExclusion{ + ID: "CVE-919", + Constraints: []v5.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Language: "python", + }, + }, + }, + Justification: "Python packages are not vulnerable", + }, + }, + { + name: "Single unusable constraint with unknown vulnerability constraint field", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[{"vulnerability":{"namespace":"nvd:cpe","something_new":"1234"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: nil, + }, + { + name: "Single unusable constraint with unknown package constraint fields", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[{"package":{"name":"jim","another_field":"1234","x_y_z":"abc"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: nil, + }, + { + name: "Single unusable constraint with unknown root-level constraint fields", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-919", + Constraints: sqlite.NewNullString(`[{"x_y_z":{"name":"jim","another_field":"1234","x_y_z":"abc"},"package":{"name":"jim","another_field":"1234","x_y_z":"abc"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: nil, + }, + { + name: "Multiple usable constraints", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-2025-152345", + Constraints: sqlite.NewNullString(`[{"vulnerability":{"namespace":"abc.xyz:language:ruby","fix_state":"wont-fix"},"package":{"language":"ruby","type":"not-gem"}},{"package":{"language":"python","version":"1000.0.1"}},{"vulnerability":{"namespace":"nvd:cpe"}},{"vulnerability":{"namespace":"nvd:cpe"},"package":{"name":"x"}},{"package":{"location":"/bin/x"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: &v5.VulnerabilityMatchExclusion{ + ID: "CVE-2025-152345", + Constraints: []v5.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "abc.xyz:language:ruby", + FixState: "wont-fix", + }, + Package: v5.PackageExclusionConstraint{ + Language: "ruby", + Type: "not-gem", + }, + }, + { + Package: v5.PackageExclusionConstraint{ + Language: "python", + Version: "1000.0.1", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Name: "x", + }, + }, + { + Package: v5.PackageExclusionConstraint{ + Location: "/bin/x", + }, + }, + }, + Justification: "Python packages are not vulnerable", + }, + }, + { + name: "Multiple constraints with some unusable", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-2025-152345", + Constraints: sqlite.NewNullString(`[{"a_b_c": "x","vulnerability":{"namespace":"abc.xyz:language:ruby","fix_state":"wont-fix"},"package":{"language":"ruby","type":"not-gem"}},{"package":{"language":"python","version":"1000.0.1"}},{"vulnerability":{"namespace":"nvd:cpe"}},{"vulnerability":{"namespace":"nvd:cpe"},"package":{"name":"x"}},{"package":{"location":"/bin/x","nnnn":"no"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: &v5.VulnerabilityMatchExclusion{ + ID: "CVE-2025-152345", + Constraints: []v5.VulnerabilityMatchExclusionConstraint{ + { + Package: v5.PackageExclusionConstraint{ + Language: "python", + Version: "1000.0.1", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "nvd:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Name: "x", + }, + }, + }, + Justification: "Python packages are not vulnerable", + }, + }, + { + name: "Multiple constraints all unusable", + record: &VulnerabilityMatchExclusionModel{ + PK: 0, + ID: "CVE-2025-152345", + Constraints: sqlite.NewNullString(`[{"a_b_c": "x","vulnerability":{"namespace":"abc.xyz:language:ruby","fix_state":"wont-fix"},"package":{"language":"ruby","type":"not-gem"}},{"a_b_c": "x","package":{"language":"python","version":"1000.0.1"}},{"a_b_c": "x","vulnerability":{"namespace":"nvd:cpe"}},{"a_b_c": "x","vulnerability":{"namespace":"nvd:cpe"},"package":{"name":"x"}},{"package":{"location":"/bin/x","nnnn":"no"}}]`, true), + Justification: "Python packages are not vulnerable", + }, + result: nil, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + result, err := test.record.Inflate() + assert.NoError(t, err) + assert.Equal(t, test.result, result) + }) + } +} diff --git a/pkg/db/v5/store/model/vulnerability_metadata.go b/pkg/db/v5/store/model/vulnerability_metadata.go new file mode 100644 index 00000000..5e4f1f2b --- /dev/null +++ b/pkg/db/v5/store/model/vulnerability_metadata.go @@ -0,0 +1,74 @@ +package model + +import ( + "encoding/json" + "fmt" + + sqlite "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v5 "github.com/anchore/grype-db/pkg/db/v5" +) + +const ( + VulnerabilityMetadataTableName = "vulnerability_metadata" +) + +// VulnerabilityMetadataModel is a struct used to serialize db.VulnerabilityMetadata information into a sqlite3 DB. +type VulnerabilityMetadataModel struct { + ID string `gorm:"primary_key; column:id;"` + Namespace string `gorm:"primary_key; column:namespace;"` + DataSource string `gorm:"column:data_source"` + RecordSource string `gorm:"column:record_source"` + Severity string `gorm:"column:severity"` + URLs sqlite.NullString `gorm:"column:urls; default:null"` + Description string `gorm:"column:description"` + Cvss sqlite.NullString `gorm:"column:cvss; default:null"` +} + +// NewVulnerabilityMetadataModel generates a new model from a db.VulnerabilityMetadata struct. +func NewVulnerabilityMetadataModel(metadata v5.VulnerabilityMetadata) VulnerabilityMetadataModel { + if metadata.Cvss == nil { + metadata.Cvss = make([]v5.Cvss, 0) + } + + return VulnerabilityMetadataModel{ + ID: metadata.ID, + Namespace: metadata.Namespace, + DataSource: metadata.DataSource, + RecordSource: metadata.RecordSource, + Severity: metadata.Severity, + URLs: sqlite.ToNullString(metadata.URLs), + Description: metadata.Description, + Cvss: sqlite.ToNullString(metadata.Cvss), + } +} + +// TableName returns the table which all db.VulnerabilityMetadata model instances are stored into. +func (VulnerabilityMetadataModel) TableName() string { + return VulnerabilityMetadataTableName +} + +// Inflate generates a db.VulnerabilityMetadataModel object from the serialized model instance. +func (m *VulnerabilityMetadataModel) Inflate() (v5.VulnerabilityMetadata, error) { + var links []string + var cvss []v5.Cvss + + if err := json.Unmarshal(m.URLs.ToByteSlice(), &links); err != nil { + return v5.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal URLs (%+v): %w", m.URLs, err) + } + + err := json.Unmarshal(m.Cvss.ToByteSlice(), &cvss) + if err != nil { + return v5.VulnerabilityMetadata{}, fmt.Errorf("unable to unmarshal cvss data (%+v): %w", m.Cvss, err) + } + + return v5.VulnerabilityMetadata{ + ID: m.ID, + Namespace: m.Namespace, + DataSource: m.DataSource, + RecordSource: m.RecordSource, + Severity: m.Severity, + URLs: links, + Description: m.Description, + Cvss: cvss, + }, nil +} diff --git a/pkg/db/v5/store/model/vulnerability_test.go b/pkg/db/v5/store/model/vulnerability_test.go new file mode 100644 index 00000000..38123220 --- /dev/null +++ b/pkg/db/v5/store/model/vulnerability_test.go @@ -0,0 +1,167 @@ +package model + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/anchore/grype-db/pkg/db/internal/sqlite" + v5 "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier/platformcpe" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier/rpmmodularity" +) + +func TestVulnerabilityModel_Inflate(t *testing.T) { + tests := []struct { + name string + record *VulnerabilityModel + result v5.Vulnerability + }{ + { + name: "nil package_qualifiers", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-12345", + PackageQualifiers: sqlite.ToNullString(nil), + }, + result: v5.Vulnerability{ + ID: "CVE-12345", + }, + }, + { + name: "Empty package_qualifiers array", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-919", + PackageQualifiers: sqlite.NewNullString(`[]`, true), + }, + result: v5.Vulnerability{ + ID: "CVE-919", + }, + }, + { + name: "Single rpmmodularity package qualifier with no module specified", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-919", + PackageQualifiers: sqlite.NewNullString(`[{"kind": "rpm-modularity"}]`, true), + }, + result: v5.Vulnerability{ + ID: "CVE-919", + PackageQualifiers: []qualifier.Qualifier{ + rpmmodularity.Qualifier{ + Kind: "rpm-modularity", + Module: "", + }, + }, + }, + }, + { + name: "Single rpmmodularity package qualifier with empty string module specified", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-919", + PackageQualifiers: sqlite.NewNullString(`[{"kind": "rpm-modularity", "module": ""}]`, true), + }, + result: v5.Vulnerability{ + ID: "CVE-919", + PackageQualifiers: []qualifier.Qualifier{ + rpmmodularity.Qualifier{ + Kind: "rpm-modularity", + Module: "", + }, + }, + }, + }, + { + name: "Single rpmmodularity package qualifier with module specified", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-919", + PackageQualifiers: sqlite.NewNullString(`[{"kind": "rpm-modularity", "module": "x.y.z:2000"}]`, true), + }, + result: v5.Vulnerability{ + ID: "CVE-919", + PackageQualifiers: []qualifier.Qualifier{ + rpmmodularity.Qualifier{ + Kind: "rpm-modularity", + Module: "x.y.z:2000", + }, + }, + }, + }, + { + name: "Single platformcpe package qualifier with cpe specified", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-919", + PackageQualifiers: sqlite.NewNullString(`[{"kind": "platform-cpe", "cpe": "cpe:2.3:o:canonical:ubuntu_linux:19.10:*:*:*:*:*:*:*"}]`, true), + }, + result: v5.Vulnerability{ + ID: "CVE-919", + PackageQualifiers: []qualifier.Qualifier{ + platformcpe.Qualifier{ + Kind: "platform-cpe", + CPE: "cpe:2.3:o:canonical:ubuntu_linux:19.10:*:*:*:*:*:*:*", + }, + }, + }, + }, + { + name: "Single unrecognized package qualifier", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-919", + PackageQualifiers: sqlite.NewNullString(`[{"kind": "unknown", "some-random-slice": [{"x": true}]}]`, true), + }, + result: v5.Vulnerability{ + ID: "CVE-919", + }, + }, + { + name: "Single package qualifier without kind specified", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-919", + PackageQualifiers: sqlite.NewNullString(`[{"some-random-slice": [{"x": true}]}]`, true), + }, + result: v5.Vulnerability{ + ID: "CVE-919", + }, + }, + { + name: "Multiple package qualifiers", + record: &VulnerabilityModel{ + PK: 0, + ID: "CVE-919", + PackageQualifiers: sqlite.NewNullString(`[{"kind": "rpm-modularity"},{"kind": "rpm-modularity", "module": ""},{"kind": "rpm-modularity", "module": "x.y.z:2000"},{"kind": "unknown", "some-random-slice": [{"x": true}]}]`, true), + }, + result: v5.Vulnerability{ + ID: "CVE-919", + PackageQualifiers: []qualifier.Qualifier{ + rpmmodularity.Qualifier{ + Kind: "rpm-modularity", + Module: "", + }, + rpmmodularity.Qualifier{ + Kind: "rpm-modularity", + Module: "", + }, + rpmmodularity.Qualifier{ + Kind: "rpm-modularity", + Module: "x.y.z:2000", + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + result, err := test.record.Inflate() + assert.NoError(t, err) + assert.Equal(t, test.result, result) + }) + } +} diff --git a/pkg/db/v5/store/store.go b/pkg/db/v5/store/store.go new file mode 100644 index 00000000..5d46f346 --- /dev/null +++ b/pkg/db/v5/store/store.go @@ -0,0 +1,324 @@ +package store + +import ( + "fmt" + "sort" + + _ "github.com/glebarez/sqlite" // provide the sqlite dialect to gorm via import + "github.com/go-test/deep" + "github.com/scylladb/go-set/strset" + "gorm.io/gorm" + + "github.com/anchore/grype-db/pkg/db/internal/gormadapter" + v5 "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/store/model" +) + +// store holds an instance of the database connection +type store struct { + db *gorm.DB +} + +// New creates a new instance of the store. +func New(dbFilePath string, overwrite bool) (v5.Store, error) { + db, err := gormadapter.Open(dbFilePath, overwrite) + if err != nil { + return nil, err + } + + if overwrite { + // TODO: automigrate could write to the database, + // we should be validating the database is the correct database based on the version in the ID table before + // automigrating + if err := db.AutoMigrate(&model.IDModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate ID model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityMetadataModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability Metadata model: %w", err) + } + if err := db.AutoMigrate(&model.VulnerabilityMatchExclusionModel{}); err != nil { + return nil, fmt.Errorf("unable to migrate Vulnerability Match Exclusion model: %w", err) + } + } + + return &store{ + db: db, + }, nil +} + +// GetID fetches the metadata about the databases schema version and build time. +func (s *store) GetID() (*v5.ID, error) { + var models []model.IDModel + result := s.db.Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple DB IDs") + case len(models) == 1: + id, err := models[0].Inflate() + if err != nil { + return nil, err + } + return &id, nil + } + + return nil, nil +} + +// SetID stores the databases schema version and build time. +func (s *store) SetID(id v5.ID) error { + var ids []model.IDModel + + // replace the existing ID with the given one + s.db.Find(&ids).Delete(&ids) + + m := model.NewIDModel(id) + result := s.db.Create(&m) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add id (%d rows affected)", result.RowsAffected) + } + + return result.Error +} + +// GetVulnerabilityNamespaces retrieves all possible namespaces from the database. +func (s *store) GetVulnerabilityNamespaces() ([]string, error) { + var names []string + result := s.db.Model(&model.VulnerabilityMetadataModel{}).Distinct().Pluck("namespace", &names) + return names, result.Error +} + +// GetVulnerability retrieves vulnerabilities by namespace and id +func (s *store) GetVulnerability(namespace, id string) ([]v5.Vulnerability, error) { + var models []model.VulnerabilityModel + + result := s.db.Where("namespace = ? AND id = ?", namespace, id).Find(&models) + + var vulnerabilities = make([]v5.Vulnerability, len(models)) + for idx, m := range models { + vulnerability, err := m.Inflate() + if err != nil { + return nil, err + } + vulnerabilities[idx] = vulnerability + } + + return vulnerabilities, result.Error +} + +// SearchForVulnerabilities retrieves vulnerabilities by namespace and package +func (s *store) SearchForVulnerabilities(namespace, packageName string) ([]v5.Vulnerability, error) { + var models []model.VulnerabilityModel + + result := s.db.Where("namespace = ? AND package_name = ?", namespace, packageName).Find(&models) + + var vulnerabilities = make([]v5.Vulnerability, len(models)) + for idx, m := range models { + vulnerability, err := m.Inflate() + if err != nil { + return nil, err + } + vulnerabilities[idx] = vulnerability + } + + return vulnerabilities, result.Error +} + +// AddVulnerability saves one or more vulnerabilities into the sqlite3 store. +func (s *store) AddVulnerability(vulnerabilities ...v5.Vulnerability) error { + for _, vulnerability := range vulnerabilities { + m := model.NewVulnerabilityModel(vulnerability) + + result := s.db.Create(&m) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability (%d rows affected)", result.RowsAffected) + } + } + return nil +} + +// GetVulnerabilityMetadata retrieves metadata for the given vulnerability ID relative to a specific record source. +func (s *store) GetVulnerabilityMetadata(id, namespace string) (*v5.VulnerabilityMetadata, error) { + var models []model.VulnerabilityMetadataModel + + result := s.db.Where(&model.VulnerabilityMetadataModel{ID: id, Namespace: namespace}).Find(&models) + if result.Error != nil { + return nil, result.Error + } + + switch { + case len(models) > 1: + return nil, fmt.Errorf("found multiple metadatas for single ID=%q Namespace=%q", id, namespace) + case len(models) == 1: + metadata, err := models[0].Inflate() + if err != nil { + return nil, err + } + + return &metadata, nil + } + + return nil, nil +} + +// AddVulnerabilityMetadata stores one or more vulnerability metadata models into the sqlite DB. +// +//nolint:gocognit +func (s *store) AddVulnerabilityMetadata(metadata ...v5.VulnerabilityMetadata) error { + for _, m := range metadata { + existing, err := s.GetVulnerabilityMetadata(m.ID, m.Namespace) + if err != nil { + return fmt.Errorf("failed to verify existing entry: %w", err) + } + + if existing != nil { + // merge with the existing entry + + switch { + case existing.Severity != m.Severity: + return fmt.Errorf("existing metadata has mismatched severity (%q!=%q)", existing.Severity, m.Severity) + case existing.Description != m.Description: + return fmt.Errorf("existing metadata has mismatched description (%q!=%q)", existing.Description, m.Description) + } + + incoming: + // go through all incoming CVSS and see if they are already stored. + // If they exist already in the database then skip adding them, + // preventing a duplicate + for _, incomingCvss := range m.Cvss { + for _, existingCvss := range existing.Cvss { + if len(deep.Equal(incomingCvss, existingCvss)) == 0 { + // duplicate found, so incoming CVSS shouldn't get added + continue incoming + } + } + // a duplicate CVSS entry wasn't found, so append the incoming CVSS + existing.Cvss = append(existing.Cvss, incomingCvss) + } + + links := strset.New(existing.URLs...) + for _, l := range m.URLs { + links.Add(l) + } + + existing.URLs = links.List() + sort.Strings(existing.URLs) + + newModel := model.NewVulnerabilityMetadataModel(*existing) + result := s.db.Save(&newModel) + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to merge vulnerability metadata (%d rows affected)", result.RowsAffected) + } + + if result.Error != nil { + return result.Error + } + } else { + // this is a new entry + newModel := model.NewVulnerabilityMetadataModel(m) + result := s.db.Create(&newModel) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability metadata (%d rows affected)", result.RowsAffected) + } + } + } + return nil +} + +// GetVulnerabilityMatchExclusion retrieves one or more vulnerability match exclusion records given a vulnerability identifier. +func (s *store) GetVulnerabilityMatchExclusion(id string) ([]v5.VulnerabilityMatchExclusion, error) { + var models []model.VulnerabilityMatchExclusionModel + + result := s.db.Where("id = ?", id).Find(&models) + + var exclusions []v5.VulnerabilityMatchExclusion + for _, m := range models { + exclusion, err := m.Inflate() + if err != nil { + return nil, err + } + + if exclusion != nil { + exclusions = append(exclusions, *exclusion) + } + } + + return exclusions, result.Error +} + +// AddVulnerabilityMatchExclusion saves one or more vulnerability match exclusion records into the sqlite3 store. +func (s *store) AddVulnerabilityMatchExclusion(exclusions ...v5.VulnerabilityMatchExclusion) error { + for _, exclusion := range exclusions { + m := model.NewVulnerabilityMatchExclusionModel(exclusion) + + result := s.db.Create(&m) + if result.Error != nil { + return result.Error + } + + if result.RowsAffected != 1 { + return fmt.Errorf("unable to add vulnerability match exclusion (%d rows affected)", result.RowsAffected) + } + } + + return nil +} + +func (s *store) Close() { + s.db.Exec("VACUUM;") + + sqlDB, err := s.db.DB() + if err != nil { + _ = sqlDB.Close() + } +} + +// GetAllVulnerabilities gets all vulnerabilities in the database +func (s *store) GetAllVulnerabilities() (*[]v5.Vulnerability, error) { + var models []model.VulnerabilityModel + if result := s.db.Find(&models); result.Error != nil { + return nil, result.Error + } + vulns := make([]v5.Vulnerability, len(models)) + for idx, m := range models { + vuln, err := m.Inflate() + if err != nil { + return nil, err + } + vulns[idx] = vuln + } + return &vulns, nil +} + +// GetAllVulnerabilityMetadata gets all vulnerability metadata in the database +func (s *store) GetAllVulnerabilityMetadata() (*[]v5.VulnerabilityMetadata, error) { + var models []model.VulnerabilityMetadataModel + if result := s.db.Find(&models); result.Error != nil { + return nil, result.Error + } + metadata := make([]v5.VulnerabilityMetadata, len(models)) + for idx, m := range models { + data, err := m.Inflate() + if err != nil { + return nil, err + } + metadata[idx] = data + } + return &metadata, nil +} diff --git a/pkg/db/v5/store/store_test.go b/pkg/db/v5/store/store_test.go new file mode 100644 index 00000000..59ff9c01 --- /dev/null +++ b/pkg/db/v5/store/store_test.go @@ -0,0 +1,1375 @@ +package store + +import ( + "encoding/json" + "sort" + "testing" + "time" + + "github.com/go-test/deep" + "github.com/stretchr/testify/assert" + + v5 "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/store/model" +) + +func assertIDReader(t *testing.T, reader v5.IDReader, expected v5.ID) { + t.Helper() + if actual, err := reader.GetID(); err != nil { + t.Fatalf("failed to get ID: %+v", err) + } else { + diffs := deep.Equal(&expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } +} + +func TestStore_GetID_SetID(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + expected := v5.ID{ + BuildTimestamp: time.Now().UTC(), + SchemaVersion: 2, + } + + if err = s.SetID(expected); err != nil { + t.Fatalf("failed to set ID: %+v", err) + } + + assertIDReader(t, s, expected) + +} + +func assertVulnerabilityReader(t *testing.T, reader v5.VulnerabilityStoreReader, namespace, name string, expected []v5.Vulnerability) { + if actual, err := reader.SearchForVulnerabilities(namespace, name); err != nil { + t.Fatalf("failed to get Vulnerability: %+v", err) + } else { + if len(actual) != len(expected) { + t.Fatalf("unexpected number of vulns: %d", len(actual)) + } + for idx := range actual { + diffs := deep.Equal(expected[idx], actual[idx]) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + } +} + +func TestStore_GetVulnerability_SetVulnerability(t *testing.T) { + dbTempFile := t.TempDir() + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + extra := []v5.Vulnerability{ + { + ID: "my-cve-33333", + PackageName: "package-name-2", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v5.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v5.Fix{ + Versions: []string{"2.0.1"}, + State: v5.FixedState, + }, + }, + { + ID: "my-other-cve-33333", + PackageName: "package-name-3", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v5.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v5.Fix{ + State: v5.NotFixedState, + }, + }, + } + + expected := []v5.Vulnerability{ + { + ID: "my-cve", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1.0", + VersionFormat: "semver", + CPEs: []string{"a-cool-cpe"}, + RelatedVulnerabilities: []v5.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v5.Fix{ + Versions: []string{"1.0.1"}, + State: v5.FixedState, + }, + }, + { + ID: "my-other-cve", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 509.2.2", + VersionFormat: "semver", + CPEs: nil, + RelatedVulnerabilities: []v5.VulnerabilityReference{ + { + ID: "another-cve", + Namespace: "nvd", + }, + { + ID: "an-other-cve", + Namespace: "nvd", + }, + }, + Fix: v5.Fix{ + Versions: []string{"4.0.5"}, + State: v5.FixedState, + }, + }, + { + ID: "yet-another-cve", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1000.0.0", + VersionFormat: "semver", + CPEs: nil, + RelatedVulnerabilities: nil, + Fix: v5.Fix{ + Versions: []string{"1000.0.1"}, + State: v5.FixedState, + }, + }, + { + ID: "yet-another-cve-with-advisories", + PackageName: "package-name", + Namespace: "my-namespace", + VersionConstraint: "< 1000.0.0", + VersionFormat: "semver", + CPEs: nil, + RelatedVulnerabilities: nil, + Fix: v5.Fix{ + Versions: []string{"1000.0.1"}, + State: v5.FixedState, + }, + Advisories: []v5.Advisory{{ID: "ABC-12345", Link: "https://abc.xyz"}}, + }, + } + + total := append(expected, extra...) + + if err = s.AddVulnerability(total...); err != nil { + t.Fatalf("failed to set Vulnerability: %+v", err) + } + + var allEntries []model.VulnerabilityModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + assertVulnerabilityReader(t, s, expected[0].Namespace, expected[0].PackageName, expected) + +} + +func assertVulnerabilityMetadataReader(t *testing.T, reader v5.VulnerabilityMetadataStoreReader, id, namespace string, expected v5.VulnerabilityMetadata) { + if actual, err := reader.GetVulnerabilityMetadata(id, namespace); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else if actual == nil { + t.Fatalf("no metadata returned for id=%q namespace=%q", id, namespace) + } else { + sortMetadataCvss(actual.Cvss) + sortMetadataCvss(expected.Cvss) + + // make sure they both have the same number of CVSS entries - preventing a panic on later assertions + assert.Len(t, expected.Cvss, len(actual.Cvss)) + for idx, actualCvss := range actual.Cvss { + assert.Equal(t, actualCvss.Vector, expected.Cvss[idx].Vector) + assert.Equal(t, actualCvss.Version, expected.Cvss[idx].Version) + assert.Equal(t, actualCvss.Metrics, expected.Cvss[idx].Metrics) + + actualVendor, err := json.Marshal(actualCvss.VendorMetadata) + if err != nil { + t.Errorf("unable to marshal vendor metadata: %q", err) + } + expectedVendor, err := json.Marshal(expected.Cvss[idx].VendorMetadata) + if err != nil { + t.Errorf("unable to marshal vendor metadata: %q", err) + } + assert.Equal(t, string(actualVendor), string(expectedVendor)) + + } + + // nil the Cvss field because it is an interface - verification of Cvss + // has already happened at this point + expected.Cvss = nil + actual.Cvss = nil + assert.Equal(t, &expected, actual) + } + +} + +func sortMetadataCvss(cvss []v5.Cvss) { + sort.Slice(cvss, func(i, j int) bool { + // first, sort by Vector + if cvss[i].Vector > cvss[j].Vector { + return true + } + if cvss[i].Vector < cvss[j].Vector { + return false + } + // then try to sort by BaseScore if Vector is the same + return cvss[i].Metrics.BaseScore < cvss[j].Metrics.BaseScore + }) +} + +// CustomMetadata is effectively a noop, its values aren't meaningful and are +// mostly useful to ensure that any type can be stored and then retrieved for +// assertion in these test cases where custom vendor CVSS scores are used +type CustomMetadata struct { + SuperScore string + Vendor string +} + +func TestStore_GetVulnerabilityMetadata_SetVulnerabilityMetadata(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + total := []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v5.Cvss{ + { + VendorMetadata: CustomMetadata{ + Vendor: "redhat", + SuperScore: "1000", + }, + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 1.1, + 2.2, + 3.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NOT", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.3, + 2.1, + 3.2, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--NICE", + VendorMetadata: nil, + }, + }, + }, + { + ID: "my-other-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + } + + if err = s.AddVulnerabilityMetadata(total...); err != nil { + t.Fatalf("failed to set metadata: %+v", err) + } + + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + +} + +func TestStore_MergeVulnerabilityMetadata(t *testing.T) { + tests := []struct { + name string + add []v5.VulnerabilityMetadata + expected v5.VulnerabilityMetadata + err bool + }{ + { + name: "go-case", + add: []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v5.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "merge-links", + add: []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://google.com"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://yahoo.com"}, + }, + }, + expected: v5.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re", "https://google.com", "https://yahoo.com"}, + Cvss: []v5.Cvss{}, + }, + }, + { + name: "bad-severity", + add: []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "meh, push that for next tuesday...", + URLs: []string{"https://redhat.com"}, + }, + }, + err: true, + }, + { + name: "mismatch-description", + err: true, + add: []v5.VulnerabilityMetadata{ + { + + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + }, + { + name: "mismatch-cvss2", + err: false, + add: []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v5.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:P--VERY", + }, + }, + }, + }, + { + name: "mismatch-cvss3", + err: false, + add: []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 0, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v5.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "best description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 0, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dbTempDir := t.TempDir() + + s, err := New(dbTempDir, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + // add each metadata in order + var theErr error + for _, metadata := range test.add { + err = s.AddVulnerabilityMetadata(metadata) + if err != nil { + theErr = err + break + } + } + + if test.err && theErr == nil { + t.Fatalf("expected error but did not get one") + } else if !test.err && theErr != nil { + t.Fatalf("expected no error but got one: %+v", theErr) + } else if test.err && theErr != nil { + // test pass... + return + } + + // ensure there is exactly one entry + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != 1 { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + // get the resulting metadata object + if actual, err := s.GetVulnerabilityMetadata(test.expected.ID, test.expected.Namespace); err != nil { + t.Fatalf("failed to get metadata: %+v", err) + } else { + diffs := deep.Equal(&test.expected, actual) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + }) + } +} + +func TestCvssScoresInMetadata(t *testing.T) { + tests := []struct { + name string + add []v5.VulnerabilityMetadata + expected v5.VulnerabilityMetadata + }{ + { + name: "append-cvss", + add: []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v5.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + { + name: "append-vendor-cvss", + add: []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + VendorMetadata: CustomMetadata{ + SuperScore: "100", + Vendor: "debian", + }, + }, + }, + }, + }, + expected: v5.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + }, + { + Version: "2.0", + Metrics: v5.NewCvssMetrics( + 4.1, + 5.2, + 6.3, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--VERY", + VendorMetadata: CustomMetadata{ + SuperScore: "100", + Vendor: "debian", + }, + }, + }, + }, + }, + { + name: "avoids-duplicate-cvss", + add: []v5.VulnerabilityMetadata{ + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + { + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + expected: v5.VulnerabilityMetadata{ + ID: "my-cve", + RecordSource: "record-source", + Namespace: "namespace", + Severity: "pretty bad", + URLs: []string{"https://ancho.re"}, + Description: "worst description ever", + Cvss: []v5.Cvss{ + { + Version: "3.0", + Metrics: v5.NewCvssMetrics( + 1.4, + 2.5, + 3.6, + ), + Vector: "AV:N/AC:L/Au:N/C:P/I:P/A:P--GOOD", + }, + }, + }, + }, + } + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + dbTempDir := t.TempDir() + + s, err := New(dbTempDir, true) + if err != nil { + t.Fatalf("could not create s: %+v", err) + } + + // add each metadata in order + for _, metadata := range test.add { + err = s.AddVulnerabilityMetadata(metadata) + if err != nil { + t.Fatalf("unable to s vulnerability metadata: %+v", err) + } + } + + // ensure there is exactly one entry + var allEntries []model.VulnerabilityMetadataModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != 1 { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + + assertVulnerabilityMetadataReader(t, s, test.expected.ID, test.expected.Namespace, test.expected) + }) + } +} + +func assertVulnerabilityMatchExclusionReader(t *testing.T, reader v5.VulnerabilityMatchExclusionStoreReader, id string, expected []v5.VulnerabilityMatchExclusion) { + if actual, err := reader.GetVulnerabilityMatchExclusion(id); err != nil { + t.Fatalf("failed to get Vulnerability Match Exclusion: %+v", err) + } else { + t.Logf("%+v", actual) + if len(actual) != len(expected) { + t.Fatalf("unexpected number of vulnerability match exclusions: expected=%d, actual=%d", len(expected), len(actual)) + } + for idx := range actual { + diffs := deep.Equal(expected[idx], actual[idx]) + if len(diffs) > 0 { + for _, d := range diffs { + t.Errorf("Diff: %+v", d) + } + } + } + } +} + +func TestStore_GetVulnerabilityMatchExclusion_SetVulnerabilityMatchExclusion(t *testing.T) { + dbTempFile := t.TempDir() + + s, err := New(dbTempFile, true) + if err != nil { + t.Fatalf("could not create store: %+v", err) + } + + extra := []v5.VulnerabilityMatchExclusion{ + { + ID: "CVE-1234-14567", + Constraints: []v5.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "extra-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Name: "abc", + Language: "ruby", + Version: "1.2.3", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "extra-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Name: "abc", + Language: "ruby", + Version: "4.5.6", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "extra-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Name: "time-1", + Language: "ruby", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "extra-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Name: "abc.xyz:nothing-of-interest", + Type: "java-archive", + }, + }, + }, + Justification: "Because I said so.", + }, + { + ID: "CVE-1234-10", + Constraints: nil, + Justification: "Because I said so.", + }, + } + + expected := []v5.VulnerabilityMatchExclusion{ + { + ID: "CVE-1234-9999999", + Constraints: []v5.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Language: "python", + Name: "abc", + Version: "1.2.3", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Language: "python", + Name: "abc", + Version: "4.5.6", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Language: "python", + Name: "time-245", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Type: "npm", + Name: "everything", + }, + }, + }, + Justification: "This is a false positive", + }, + { + ID: "CVE-1234-9999999", + Constraints: []v5.VulnerabilityMatchExclusionConstraint{ + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "old-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Language: "go", + Type: "go-module", + Name: "abc", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + Namespace: "some-other-namespace:cpe", + }, + Package: v5.PackageExclusionConstraint{ + Language: "go", + Type: "go-module", + Name: "abc", + }, + }, + { + Vulnerability: v5.VulnerabilityExclusionConstraint{ + FixState: "wont-fix", + }, + }, + }, + Justification: "This is also a false positive", + }, + { + ID: "CVE-1234-9999999", + Justification: "global exclude", + }, + } + + total := append(expected, extra...) + + if err = s.AddVulnerabilityMatchExclusion(total...); err != nil { + t.Fatalf("failed to set Vulnerability Match Exclusion: %+v", err) + } + + var allEntries []model.VulnerabilityMatchExclusionModel + s.(*store).db.Find(&allEntries) + if len(allEntries) != len(total) { + t.Fatalf("unexpected number of entries: %d", len(allEntries)) + } + assertVulnerabilityMatchExclusionReader(t, s, expected[0].ID, expected) +} + +//func Test_DiffStore(t *testing.T) { +// //GIVEN +// dbTempFile := t.TempDir() +// s1, err := New(dbTempFile, true) +// if err != nil { +// t.Fatalf("could not create store: %+v", err) +// } +// dbTempFile = t.TempDir() +// s2, err := New(dbTempFile, true) +// if err != nil { +// t.Fatalf("could not create store: %+v", err) +// } +// +// baseVulns := []v5.Vulnerability{ +// { +// Namespace: "github:language:python", +// ID: "CVE-123-4567", +// PackageName: "pypi:requests", +// VersionConstraint: "< 2.0 >= 1.29", +// CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, +// }, +// { +// Namespace: "github:language:python", +// ID: "CVE-123-4567", +// PackageName: "pypi:requests", +// VersionConstraint: "< 3.0 >= 2.17", +// CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, +// }, +// { +// Namespace: "npm", +// ID: "CVE-123-7654", +// PackageName: "npm:axios", +// VersionConstraint: "< 3.0 >= 2.17", +// CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, +// Fix: v5.Fix{ +// State: v5.UnknownFixState, +// }, +// }, +// { +// Namespace: "nuget", +// ID: "GHSA-****-******", +// PackageName: "nuget:net", +// VersionConstraint: "< 3.0 >= 2.17", +// CPEs: []string{"cpe:2.3:nuget:net:*:*:*:*:*:*"}, +// Fix: v5.Fix{ +// State: v5.UnknownFixState, +// }, +// }, +// { +// Namespace: "hex", +// ID: "GHSA-^^^^-^^^^^^", +// PackageName: "hex:esbuild", +// VersionConstraint: "< 3.0 >= 2.17", +// CPEs: []string{"cpe:2.3:hex:esbuild:*:*:*:*:*:*"}, +// }, +// } +// baseMetadata := []v5.VulnerabilityMetadata{ +// { +// Namespace: "nuget", +// ID: "GHSA-****-******", +// DataSource: "nvd", +// }, +// } +// targetVulns := []v5.Vulnerability{ +// { +// Namespace: "github:language:python", +// ID: "CVE-123-4567", +// PackageName: "pypi:requests", +// VersionConstraint: "< 2.0 >= 1.29", +// CPEs: []string{"cpe:2.3:pypi:requests:*:*:*:*:*:*"}, +// }, +// { +// Namespace: "github:language:go", +// ID: "GHSA-....-....", +// PackageName: "hashicorp:nomad", +// VersionConstraint: "< 3.0 >= 2.17", +// CPEs: []string{"cpe:2.3:golang:hashicorp:nomad:*:*:*:*:*"}, +// }, +// { +// Namespace: "github:language:go", +// ID: "GHSA-....-....", +// PackageName: "hashicorp:n", +// VersionConstraint: "< 2.0 >= 1.17", +// CPEs: []string{"cpe:2.3:golang:hashicorp:n:*:*:*:*:*"}, +// }, +// { +// Namespace: "npm", +// ID: "CVE-123-7654", +// PackageName: "npm:axios", +// VersionConstraint: "< 3.0 >= 2.17", +// CPEs: []string{"cpe:2.3:npm:axios:*:*:*:*:*:*"}, +// Fix: v5.Fix{ +// State: v5.WontFixState, +// }, +// }, +// { +// Namespace: "nuget", +// ID: "GHSA-****-******", +// PackageName: "nuget:net", +// VersionConstraint: "< 3.0 >= 2.17", +// CPEs: []string{"cpe:2.3:nuget:net:*:*:*:*:*:*"}, +// Fix: v5.Fix{ +// State: v5.UnknownFixState, +// }, +// }, +// } +// expectedDiffs := []db.Diff{ +// { +// Reason: db.DiffChanged, +// ID: "CVE-123-4567", +// Namespace: "github:language:python", +// Packages: []string{"pypi:requests"}, +// }, +// { +// Reason: db.DiffChanged, +// ID: "CVE-123-7654", +// Namespace: "npm", +// Packages: []string{"npm:axios"}, +// }, +// { +// Reason: db.DiffRemoved, +// ID: "GHSA-****-******", +// Namespace: "nuget", +// Packages: []string{"nuget:net"}, +// }, +// { +// Reason: db.DiffAdded, +// ID: "GHSA-....-....", +// Namespace: "github:language:go", +// Packages: []string{"hashicorp:n", "hashicorp:nomad"}, +// }, +// { +// Reason: db.DiffRemoved, +// ID: "GHSA-^^^^-^^^^^^", +// Namespace: "hex", +// Packages: []string{"hex:esbuild"}, +// }, +// } +// +// for _, vuln := range baseVulns { +// s1.AddVulnerability(vuln) +// } +// for _, vuln := range targetVulns { +// s2.AddVulnerability(vuln) +// } +// for _, meta := range baseMetadata { +// s1.AddVulnerabilityMetadata(meta) +// } +// +// //WHEN +// result, err := s1.DiffStore(s2) +// +// //THEN +// sort.SliceStable(*result, func(i, j int) bool { +// return (*result)[i].ID < (*result)[j].ID +// }) +// for i := range *result { +// sort.Strings((*result)[i].Packages) +// } +// +// assert.NoError(t, err) +// assert.Equal(t, expectedDiffs, *result) +//} diff --git a/pkg/db/v5/vulnerability.go b/pkg/db/v5/vulnerability.go new file mode 100644 index 00000000..8d1f8cd6 --- /dev/null +++ b/pkg/db/v5/vulnerability.go @@ -0,0 +1,114 @@ +package v5 + +import ( + "sort" + "strings" + + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier" +) + +// Vulnerability represents the minimum data fields necessary to perform package-to-vulnerability matching. This can represent a CVE, 3rd party advisory, or any source that relates back to a CVE. +type Vulnerability struct { + ID string `json:"id"` // The identifier of the vulnerability or advisory + PackageName string `json:"package_name"` // The name of the package that is vulnerable + Namespace string `json:"namespace"` // The ecosystem where the package resides + PackageQualifiers []qualifier.Qualifier `json:"package_qualifiers"` // The qualifiers for determining if a package is vulnerable + VersionConstraint string `json:"version_constraint"` // The version range which the given package is vulnerable + VersionFormat string `json:"version_format"` // The format which all version fields should be interpreted as + CPEs []string `json:"cpes"` // The CPEs which are considered vulnerable + RelatedVulnerabilities []VulnerabilityReference `json:"related_vulnerabilities"` // Other Vulnerabilities that are related to this one (e.g. GHSA relate to CVEs, or how distro CVE relates to NVD record) + Fix Fix `json:"fix"` // All information about fixed versions + Advisories []Advisory `json:"advisories"` // Any vendor advisories about fixes or other notifications about this vulnerability +} + +type VulnerabilityReference struct { + ID string `json:"id"` + Namespace string `json:"namespace"` +} + +//nolint:gocognit +func (v *Vulnerability) Equal(vv Vulnerability) bool { + equal := v.ID == vv.ID && + v.PackageName == vv.PackageName && + v.Namespace == vv.Namespace && + len(v.PackageQualifiers) == len(vv.PackageQualifiers) && + v.VersionConstraint == vv.VersionConstraint && + v.VersionFormat == vv.VersionFormat && + len(v.CPEs) == len(vv.CPEs) && + len(v.RelatedVulnerabilities) == len(vv.RelatedVulnerabilities) && + len(v.Advisories) == len(vv.Advisories) && + v.Fix.State == vv.Fix.State && + len(v.Fix.Versions) == len(vv.Fix.Versions) + + if !equal { + return false + } + + sort.Strings(v.CPEs) + sort.Strings(vv.CPEs) + for idx, cpe := range v.CPEs { + if cpe != vv.CPEs[idx] { + return false + } + } + + sortedBaseRelVulns, sortedTargetRelVulns := sortRelatedVulns(v.RelatedVulnerabilities), sortRelatedVulns(vv.RelatedVulnerabilities) + for idx, item := range sortedBaseRelVulns { + if item != sortedTargetRelVulns[idx] { + return false + } + } + sortedBaseAdvisories, sortedTargetAdvisories := sortAdvisories(v.Advisories), sortAdvisories(vv.Advisories) + for idx, item := range sortedBaseAdvisories { + if item != sortedTargetAdvisories[idx] { + return false + } + } + sortedBasePkgQualifiers, sortedTargetPkgQualifiers := sortPackageQualifiers(v.PackageQualifiers), sortPackageQualifiers(vv.PackageQualifiers) + for idx, item := range sortedBasePkgQualifiers { + if item != sortedTargetPkgQualifiers[idx] { + return false + } + } + + sort.Strings(v.Fix.Versions) + sort.Strings(vv.Fix.Versions) + for idx, item := range v.Fix.Versions { + if item != vv.Fix.Versions[idx] { + return false + } + } + + return true +} + +func sortRelatedVulns(vulns []VulnerabilityReference) []VulnerabilityReference { + sort.SliceStable(vulns, func(i, j int) bool { + b1, b2 := strings.Builder{}, strings.Builder{} + b1.WriteString(vulns[i].ID) + b1.WriteString(vulns[i].Namespace) + b2.WriteString(vulns[j].ID) + b2.WriteString(vulns[j].Namespace) + return b1.String() < b2.String() + }) + return vulns +} + +func sortAdvisories(advisories []Advisory) []Advisory { + sort.SliceStable(advisories, func(i, j int) bool { + b1, b2 := strings.Builder{}, strings.Builder{} + b1.WriteString(advisories[i].ID) + b1.WriteString(advisories[i].Link) + b2.WriteString(advisories[j].ID) + b2.WriteString(advisories[j].Link) + return b1.String() < b2.String() + }) + return advisories +} + +func sortPackageQualifiers(qualifiers []qualifier.Qualifier) []qualifier.Qualifier { + sort.SliceStable(qualifiers, func(i, j int) bool { + return qualifiers[i].String() < qualifiers[j].String() + }) + return qualifiers +} diff --git a/pkg/db/v5/vulnerability_match_exclusion.go b/pkg/db/v5/vulnerability_match_exclusion.go new file mode 100644 index 00000000..86d36f9c --- /dev/null +++ b/pkg/db/v5/vulnerability_match_exclusion.go @@ -0,0 +1,130 @@ +package v5 + +import ( + "encoding/json" +) + +// VulnerabilityMatchExclusion represents the minimum data fields necessary to automatically filter certain +// vulnerabilities from match results based on the specified constraints. +type VulnerabilityMatchExclusion struct { + ID string `json:"id"` // The identifier of the vulnerability or advisory + Constraints []VulnerabilityMatchExclusionConstraint `json:"constraints,omitempty"` // The constraints under which the exclusion applies + Justification string `json:"justification"` // Justification for the exclusion +} + +// VulnerabilityMatchExclusionConstraint describes criteria for which matches should be excluded +type VulnerabilityMatchExclusionConstraint struct { + Vulnerability VulnerabilityExclusionConstraint `json:"vulnerability,omitempty"` // Vulnerability exclusion criteria + Package PackageExclusionConstraint `json:"package,omitempty"` // Package exclusion criteria + ExtraFields map[string]interface{} `json:"-"` +} + +func (c VulnerabilityMatchExclusionConstraint) Usable() bool { + return len(c.ExtraFields) == 0 && c.Vulnerability.Usable() && c.Package.Usable() +} + +func (c *VulnerabilityMatchExclusionConstraint) UnmarshalJSON(data []byte) error { + // Create a new type from the target type to avoid recursion. + type _vulnerabilityMatchExclusionConstraint VulnerabilityMatchExclusionConstraint + + // Unmarshal into an instance of the new type. + var _c _vulnerabilityMatchExclusionConstraint + if err := json.Unmarshal(data, &_c); err != nil { + return err + } + + if err := json.Unmarshal(data, &_c.ExtraFields); err != nil { + return err + } + + delete(_c.ExtraFields, "vulnerability") + delete(_c.ExtraFields, "package") + + if len(_c.ExtraFields) == 0 { + _c.ExtraFields = nil + } + + // Cast the new type instance to the original type and assign. + *c = VulnerabilityMatchExclusionConstraint(_c) + return nil +} + +// VulnerabilityExclusionConstraint describes criteria for excluding a match based on additional vulnerability components +type VulnerabilityExclusionConstraint struct { + Namespace string `json:"namespace,omitempty"` // Vulnerability namespace + FixState FixState `json:"fix_state,omitempty"` // Vulnerability fix state + ExtraFields map[string]interface{} `json:"-"` +} + +func (v VulnerabilityExclusionConstraint) Usable() bool { + return len(v.ExtraFields) == 0 +} + +func (v *VulnerabilityExclusionConstraint) UnmarshalJSON(data []byte) error { + // Create a new type from the target type to avoid recursion. + type _vulnerabilityExclusionConstraint VulnerabilityExclusionConstraint + + // Unmarshal into an instance of the new type. + var _v _vulnerabilityExclusionConstraint + if err := json.Unmarshal(data, &_v); err != nil { + return err + } + + if err := json.Unmarshal(data, &_v.ExtraFields); err != nil { + return err + } + + delete(_v.ExtraFields, "namespace") + delete(_v.ExtraFields, "fix_state") + + if len(_v.ExtraFields) == 0 { + _v.ExtraFields = nil + } + + // Cast the new type instance to the original type and assign. + *v = VulnerabilityExclusionConstraint(_v) + return nil +} + +// PackageExclusionConstraint describes criteria for excluding a match based on package components +type PackageExclusionConstraint struct { + Name string `json:"name,omitempty"` // Package name + Language string `json:"language,omitempty"` // The language ecosystem for a package + Type string `json:"type,omitempty"` // Package type + Version string `json:"version,omitempty"` // Package version + Location string `json:"location,omitempty"` // Package location + ExtraFields map[string]interface{} `json:"-"` +} + +func (p PackageExclusionConstraint) Usable() bool { + return len(p.ExtraFields) == 0 +} + +func (p *PackageExclusionConstraint) UnmarshalJSON(data []byte) error { + // Create a new type from the target type to avoid recursion. + type _packageExclusionConstraint PackageExclusionConstraint + + // Unmarshal into an instance of the new type. + var _p _packageExclusionConstraint + if err := json.Unmarshal(data, &_p); err != nil { + return err + } + + if err := json.Unmarshal(data, &_p.ExtraFields); err != nil { + return err + } + + delete(_p.ExtraFields, "name") + delete(_p.ExtraFields, "language") + delete(_p.ExtraFields, "type") + delete(_p.ExtraFields, "version") + delete(_p.ExtraFields, "location") + + if len(_p.ExtraFields) == 0 { + _p.ExtraFields = nil + } + + // Cast the new type instance to the original type and assign. + *p = PackageExclusionConstraint(_p) + return nil +} diff --git a/pkg/db/v5/vulnerability_match_exclusion_store.go b/pkg/db/v5/vulnerability_match_exclusion_store.go new file mode 100644 index 00000000..2a463289 --- /dev/null +++ b/pkg/db/v5/vulnerability_match_exclusion_store.go @@ -0,0 +1,14 @@ +package v5 + +type VulnerabilityMatchExclusionStore interface { + VulnerabilityMatchExclusionStoreReader + VulnerabilityMatchExclusionStoreWriter +} + +type VulnerabilityMatchExclusionStoreReader interface { + GetVulnerabilityMatchExclusion(id string) ([]VulnerabilityMatchExclusion, error) +} + +type VulnerabilityMatchExclusionStoreWriter interface { + AddVulnerabilityMatchExclusion(exclusion ...VulnerabilityMatchExclusion) error +} diff --git a/pkg/db/v5/vulnerability_metadata.go b/pkg/db/v5/vulnerability_metadata.go new file mode 100644 index 00000000..e37395cf --- /dev/null +++ b/pkg/db/v5/vulnerability_metadata.go @@ -0,0 +1,78 @@ +package v5 + +import "reflect" + +// VulnerabilityMetadata represents all vulnerability data that is not necessary to perform package-to-vulnerability matching. +type VulnerabilityMetadata struct { + ID string `json:"id"` // The identifier of the vulnerability or advisory + Namespace string `json:"namespace"` // Where this entry is valid within + DataSource string `json:"data_source"` // A URL where the data was sourced from + RecordSource string `json:"record_source"` // The source of the vulnerability information (relative to the immediate upstream in the enterprise feedgroup) + Severity string `json:"severity"` // How severe the vulnerability is (valid values are defined by upstream sources currently) + URLs []string `json:"urls"` // URLs to get more information about the vulnerability or advisory + Description string `json:"description"` // Description of the vulnerability + Cvss []Cvss `json:"cvss"` // Common Vulnerability Scoring System values +} + +// Cvss contains select Common Vulnerability Scoring System fields for a vulnerability. +type Cvss struct { + // VendorMetadata captures non-standard CVSS fields that vendors can sometimes + // include when providing CVSS information. This vendor-specific metadata type + // allows to capture that data for persisting into the database + VendorMetadata interface{} `json:"vendor_metadata"` + Metrics CvssMetrics `json:"metrics"` + Vector string `json:"vector"` // A textual representation of the metric values used to determine the score + Version string `json:"version"` // The version of the CVSS spec, for example 2.0, 3.0, or 3.1 + Source string `json:"source"` // Identifies the organization that provided the score + Type string `json:"type"` // Whether the source is a `primary` or `secondary` source +} + +// CvssMetrics are the quantitative values that make up a CVSS score. +type CvssMetrics struct { + // BaseScore ranges from 0 - 10 and defines qualities intrinsic to the severity of a vulnerability. + BaseScore float64 `json:"base_score"` + // ExploitabilityScore is a pointer to avoid having a 0 value by default. + // It is an indicator of how easy it may be for an attacker to exploit + // a vulnerability + ExploitabilityScore *float64 `json:"exploitability_score"` + // ImpactScore represents the effects of an exploited vulnerability + // relative to compromise in confidentiality, integrity, and availability. + // It is an optional parameter, so that is why it is a pointer instead of + // a regular field + ImpactScore *float64 `json:"impact_score"` +} + +func NewCvssMetrics(baseScore, exploitabilityScore, impactScore float64) CvssMetrics { + return CvssMetrics{ + BaseScore: baseScore, + ExploitabilityScore: &exploitabilityScore, + ImpactScore: &impactScore, + } +} + +func (v *VulnerabilityMetadata) Equal(vv VulnerabilityMetadata) bool { + equal := v.ID == vv.ID && + v.Namespace == vv.Namespace && + v.DataSource == vv.DataSource && + v.RecordSource == vv.RecordSource && + v.Severity == vv.Severity && + v.Description == vv.Description && + len(v.URLs) == len(vv.URLs) && + len(v.Cvss) == len(vv.Cvss) + + if !equal { + return false + } + for idx, cpe := range v.URLs { + if cpe != vv.URLs[idx] { + return false + } + } + for idx, item := range v.Cvss { + if !reflect.DeepEqual(item, vv.Cvss[idx]) { + return false + } + } + + return true +} diff --git a/pkg/db/v5/vulnerability_metadata_store.go b/pkg/db/v5/vulnerability_metadata_store.go new file mode 100644 index 00000000..d7578cae --- /dev/null +++ b/pkg/db/v5/vulnerability_metadata_store.go @@ -0,0 +1,15 @@ +package v5 + +type VulnerabilityMetadataStore interface { + VulnerabilityMetadataStoreReader + VulnerabilityMetadataStoreWriter +} + +type VulnerabilityMetadataStoreReader interface { + GetVulnerabilityMetadata(id, namespace string) (*VulnerabilityMetadata, error) + GetAllVulnerabilityMetadata() (*[]VulnerabilityMetadata, error) +} + +type VulnerabilityMetadataStoreWriter interface { + AddVulnerabilityMetadata(metadata ...VulnerabilityMetadata) error +} diff --git a/pkg/db/v5/vulnerability_store.go b/pkg/db/v5/vulnerability_store.go new file mode 100644 index 00000000..aa6a450c --- /dev/null +++ b/pkg/db/v5/vulnerability_store.go @@ -0,0 +1,23 @@ +package v5 + +const VulnerabilityStoreFileName = "vulnerability.db" + +type VulnerabilityStore interface { + VulnerabilityStoreReader + VulnerabilityStoreWriter +} + +type VulnerabilityStoreReader interface { + // GetVulnerabilityNamespaces retrieves unique list of vulnerability namespaces + GetVulnerabilityNamespaces() ([]string, error) + // GetVulnerability retrieves vulnerabilities by namespace and id + GetVulnerability(namespace, id string) ([]Vulnerability, error) + // SearchForVulnerabilities retrieves vulnerabilities by namespace and package + SearchForVulnerabilities(namespace, packageName string) ([]Vulnerability, error) + GetAllVulnerabilities() (*[]Vulnerability, error) +} + +type VulnerabilityStoreWriter interface { + // AddVulnerability inserts a new record of a vulnerability into the store + AddVulnerability(vulnerabilities ...Vulnerability) error +} diff --git a/pkg/process/build.go b/pkg/process/build.go index 39acf874..40418a1e 100644 --- a/pkg/process/build.go +++ b/pkg/process/build.go @@ -9,6 +9,11 @@ import ( "github.com/anchore/grype-db/internal/log" "github.com/anchore/grype-db/pkg/data" + grypeDBv1 "github.com/anchore/grype-db/pkg/db/v1" + grypeDBv2 "github.com/anchore/grype-db/pkg/db/v2" + grypeDBv3 "github.com/anchore/grype-db/pkg/db/v3" + grypeDBv4 "github.com/anchore/grype-db/pkg/db/v4" + grypeDBv5 "github.com/anchore/grype-db/pkg/db/v5" v1 "github.com/anchore/grype-db/pkg/process/v1" v2 "github.com/anchore/grype-db/pkg/process/v2" v3 "github.com/anchore/grype-db/pkg/process/v3" @@ -17,11 +22,6 @@ import ( "github.com/anchore/grype-db/pkg/provider" "github.com/anchore/grype-db/pkg/provider/entry" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDBv1 "github.com/anchore/grype/grype/db/v1" - grypeDBv2 "github.com/anchore/grype/grype/db/v2" - grypeDBv3 "github.com/anchore/grype/grype/db/v3" - grypeDBv4 "github.com/anchore/grype/grype/db/v4" - grypeDBv5 "github.com/anchore/grype/grype/db/v5" ) type BuildConfig struct { diff --git a/pkg/process/default_schema_version.go b/pkg/process/default_schema_version.go index 369d8ecf..7bbab621 100644 --- a/pkg/process/default_schema_version.go +++ b/pkg/process/default_schema_version.go @@ -1,5 +1,5 @@ package process -import grypeDB "github.com/anchore/grype/grype/db/v5" +import grypeDB "github.com/anchore/grype-db/pkg/db/v5" const DefaultSchemaVersion = grypeDB.SchemaVersion diff --git a/pkg/process/package.go b/pkg/process/package.go index 5ebf591b..e7cbce94 100644 --- a/pkg/process/package.go +++ b/pkg/process/package.go @@ -14,7 +14,7 @@ import ( "github.com/anchore/grype-db/internal/log" "github.com/anchore/grype-db/internal/tar" - "github.com/anchore/grype/grype/db" + "github.com/anchore/grype-db/pkg/db/curation" ) func randomString() (string, error) { @@ -27,7 +27,7 @@ func Package(dbDir, publishBaseURL, overrideArchiveExtension string) error { log.WithFields("from", dbDir, "url", publishBaseURL, "extension-override", overrideArchiveExtension).Info("packaging database") fs := afero.NewOsFs() - metadata, err := db.NewMetadataFromDir(fs, dbDir) + metadata, err := curation.NewMetadataFromDir(fs, dbDir) if err != nil { return err } @@ -87,13 +87,13 @@ func Package(dbDir, publishBaseURL, overrideArchiveExtension string) error { log.WithFields("path", tarPath).Info("created database archive") - entry, err := db.NewListingEntryFromArchive(fs, *metadata, tarPath, u) + entry, err := curation.NewListingEntryFromArchive(fs, *metadata, tarPath, u) if err != nil { return fmt.Errorf("unable to create listing entry from archive: %w", err) } - listing := db.NewListing(entry) - listingPath := path.Join(dbDir, db.ListingFileName) + listing := curation.NewListing(entry) + listingPath := path.Join(dbDir, curation.ListingFileName) if err = listing.Write(listingPath); err != nil { return err } diff --git a/pkg/process/v1/transformers/entry.go b/pkg/process/v1/transformers/entry.go index 172f7aa9..ea37addd 100644 --- a/pkg/process/v1/transformers/entry.go +++ b/pkg/process/v1/transformers/entry.go @@ -2,7 +2,7 @@ package transformers import ( "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v1" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" ) func NewEntries(vs []grypeDB.Vulnerability, metadata grypeDB.VulnerabilityMetadata) []data.Entry { diff --git a/pkg/process/v1/transformers/github/transform.go b/pkg/process/v1/transformers/github/transform.go index 75fbaf22..57646ee8 100644 --- a/pkg/process/v1/transformers/github/transform.go +++ b/pkg/process/v1/transformers/github/transform.go @@ -2,10 +2,10 @@ package github import ( "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v1/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v1" ) const ( diff --git a/pkg/process/v1/transformers/github/transform_test.go b/pkg/process/v1/transformers/github/transform_test.go index d0cc3f1d..2f59a88a 100644 --- a/pkg/process/v1/transformers/github/transform_test.go +++ b/pkg/process/v1/transformers/github/transform_test.go @@ -7,9 +7,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v1" ) func TestUnmarshalGitHubEntries(t *testing.T) { diff --git a/pkg/process/v1/transformers/nvd/transform.go b/pkg/process/v1/transformers/nvd/transform.go index f5b241b4..71b00a56 100644 --- a/pkg/process/v1/transformers/nvd/transform.go +++ b/pkg/process/v1/transformers/nvd/transform.go @@ -3,12 +3,13 @@ package nvd import ( "strings" - "github.com/anchore/grype-db/internal" + "github.com/scylladb/go-set/strset" + "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" "github.com/anchore/grype-db/pkg/process/v1/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" "github.com/anchore/grype-db/pkg/provider/unmarshal/nvd" - grypeDB "github.com/anchore/grype/grype/db/v1" ) const ( @@ -35,7 +36,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { // duplicate the vulnerabilities based on the set of unique packages the vulnerability is for for _, p := range uniquePkgs.All() { matches := uniquePkgs.Matches(p) - cpes := internal.NewStringSet() + cpes := strset.New() for _, m := range matches { cpes.Add(m.Criteria) } @@ -49,7 +50,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { PackageName: p.Product, Namespace: "nvd", // should the vendor be here? or in other metadata? ProxyVulnerabilities: []string{}, - CPEs: cpes.ToSlice(), + CPEs: cpes.List(), } allVulns = append(allVulns, vuln) diff --git a/pkg/process/v1/transformers/nvd/transform_test.go b/pkg/process/v1/transformers/nvd/transform_test.go index 57e140b2..5b138302 100644 --- a/pkg/process/v1/transformers/nvd/transform_test.go +++ b/pkg/process/v1/transformers/nvd/transform_test.go @@ -8,9 +8,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v1" ) const recordSource = "nvdv2:cves" diff --git a/pkg/process/v1/transformers/os/transform.go b/pkg/process/v1/transformers/os/transform.go index dcb270e1..c5b5e472 100644 --- a/pkg/process/v1/transformers/os/transform.go +++ b/pkg/process/v1/transformers/os/transform.go @@ -5,10 +5,10 @@ import ( "strings" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v1/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v1" ) const ( diff --git a/pkg/process/v1/transformers/os/transform_test.go b/pkg/process/v1/transformers/os/transform_test.go index bd0334df..440d588d 100644 --- a/pkg/process/v1/transformers/os/transform_test.go +++ b/pkg/process/v1/transformers/os/transform_test.go @@ -8,9 +8,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v1" ) func TestUnmarshalVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v1/writer.go b/pkg/process/v1/writer.go index 50dd1805..22a83b18 100644 --- a/pkg/process/v1/writer.go +++ b/pkg/process/v1/writer.go @@ -13,9 +13,9 @@ import ( "github.com/anchore/grype-db/internal/file" "github.com/anchore/grype-db/internal/log" "github.com/anchore/grype-db/pkg/data" - "github.com/anchore/grype/grype/db" - grypeDB "github.com/anchore/grype/grype/db/v1" - grypeDBStore "github.com/anchore/grype/grype/db/v1/store" + "github.com/anchore/grype-db/pkg/db/curation" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" + grypeDBStore "github.com/anchore/grype-db/pkg/db/v1/store" ) var _ data.Writer = (*writer)(nil) @@ -65,7 +65,7 @@ func (w writer) Write(entries ...data.Entry) error { return nil } -func (w writer) metadata() (*db.Metadata, error) { +func (w writer) metadata() (*curation.Metadata, error) { hashStr, err := file.ContentDigest(afero.NewOsFs(), w.dbPath, sha256.New()) if err != nil { return nil, fmt.Errorf("failed to hash database file (%s): %w", w.dbPath, err) @@ -76,7 +76,7 @@ func (w writer) metadata() (*db.Metadata, error) { return nil, fmt.Errorf("failed to fetch store ID: %w", err) } - metadata := db.Metadata{ + metadata := curation.Metadata{ Built: storeID.BuildTimestamp, Version: storeID.SchemaVersion, Checksum: "sha256:" + hashStr, @@ -91,7 +91,7 @@ func (w writer) Close() error { return err } - metadataPath := path.Join(filepath.Dir(w.dbPath), db.MetadataFileName) + metadataPath := path.Join(filepath.Dir(w.dbPath), curation.MetadataFileName) if err = metadata.Write(metadataPath); err != nil { return err } diff --git a/pkg/process/v1/writer_test.go b/pkg/process/v1/writer_test.go index 4593346b..a16b1f53 100644 --- a/pkg/process/v1/writer_test.go +++ b/pkg/process/v1/writer_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v1" + grypeDB "github.com/anchore/grype-db/pkg/db/v1" ) var _ grypeDB.VulnerabilityMetadataStoreReader = (*mockReader)(nil) diff --git a/pkg/process/v2/transformers/entry.go b/pkg/process/v2/transformers/entry.go index 6109add5..a85a0208 100644 --- a/pkg/process/v2/transformers/entry.go +++ b/pkg/process/v2/transformers/entry.go @@ -2,7 +2,7 @@ package transformers import ( "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v2" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" ) func NewEntries(vs []grypeDB.Vulnerability, metadata grypeDB.VulnerabilityMetadata) []data.Entry { diff --git a/pkg/process/v2/transformers/github/transform.go b/pkg/process/v2/transformers/github/transform.go index 873ac1a9..879f8e53 100644 --- a/pkg/process/v2/transformers/github/transform.go +++ b/pkg/process/v2/transformers/github/transform.go @@ -2,10 +2,10 @@ package github import ( "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v2/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v2" ) const ( diff --git a/pkg/process/v2/transformers/github/transform_test.go b/pkg/process/v2/transformers/github/transform_test.go index c548233c..d59ef397 100644 --- a/pkg/process/v2/transformers/github/transform_test.go +++ b/pkg/process/v2/transformers/github/transform_test.go @@ -7,9 +7,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v2" ) func TestUnmarshalGitHubEntries(t *testing.T) { diff --git a/pkg/process/v2/transformers/nvd/transform.go b/pkg/process/v2/transformers/nvd/transform.go index bd189580..623ddb27 100644 --- a/pkg/process/v2/transformers/nvd/transform.go +++ b/pkg/process/v2/transformers/nvd/transform.go @@ -3,12 +3,13 @@ package nvd import ( "strings" - "github.com/anchore/grype-db/internal" + "github.com/scylladb/go-set/strset" + "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" "github.com/anchore/grype-db/pkg/process/v2/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" "github.com/anchore/grype-db/pkg/provider/unmarshal/nvd" - grypeDB "github.com/anchore/grype/grype/db/v2" ) const ( @@ -36,7 +37,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { // duplicate the vulnerabilities based on the set of unique packages the vulnerability is for for _, p := range uniquePkgs.All() { matches := uniquePkgs.Matches(p) - cpes := internal.NewStringSet() + cpes := strset.New() for _, m := range matches { cpes.Add(m.Criteria) } @@ -50,7 +51,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { PackageName: p.Product, Namespace: "nvd", // should the vendor be here? or in other metadata? ProxyVulnerabilities: []string{}, - CPEs: cpes.ToSlice(), + CPEs: cpes.List(), } allVulns = append(allVulns, vuln) diff --git a/pkg/process/v2/transformers/nvd/transform_test.go b/pkg/process/v2/transformers/nvd/transform_test.go index 7d10d18a..de0c10f4 100644 --- a/pkg/process/v2/transformers/nvd/transform_test.go +++ b/pkg/process/v2/transformers/nvd/transform_test.go @@ -8,9 +8,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v2" ) func TestUnmarshalVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v2/transformers/os/transform.go b/pkg/process/v2/transformers/os/transform.go index f1c7dafc..261aa6f6 100644 --- a/pkg/process/v2/transformers/os/transform.go +++ b/pkg/process/v2/transformers/os/transform.go @@ -5,10 +5,10 @@ import ( "strings" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v2/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v2" ) const ( diff --git a/pkg/process/v2/transformers/os/transform_test.go b/pkg/process/v2/transformers/os/transform_test.go index cae8160c..7a40c599 100644 --- a/pkg/process/v2/transformers/os/transform_test.go +++ b/pkg/process/v2/transformers/os/transform_test.go @@ -8,9 +8,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v2" ) func TestUnmarshalVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v2/writer.go b/pkg/process/v2/writer.go index c8029d57..54903304 100644 --- a/pkg/process/v2/writer.go +++ b/pkg/process/v2/writer.go @@ -13,9 +13,9 @@ import ( "github.com/anchore/grype-db/internal/file" "github.com/anchore/grype-db/internal/log" "github.com/anchore/grype-db/pkg/data" - "github.com/anchore/grype/grype/db" - grypeDB "github.com/anchore/grype/grype/db/v2" - grypeDBStore "github.com/anchore/grype/grype/db/v2/store" + "github.com/anchore/grype-db/pkg/db/curation" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" + grypeDBStore "github.com/anchore/grype-db/pkg/db/v2/store" ) var _ data.Writer = (*writer)(nil) @@ -65,7 +65,7 @@ func (w writer) Write(entries ...data.Entry) error { return nil } -func (w writer) metadata() (*db.Metadata, error) { +func (w writer) metadata() (*curation.Metadata, error) { hashStr, err := file.ContentDigest(afero.NewOsFs(), w.dbPath, sha256.New()) if err != nil { return nil, fmt.Errorf("failed to hash database file (%s): %w", w.dbPath, err) @@ -76,7 +76,7 @@ func (w writer) metadata() (*db.Metadata, error) { return nil, fmt.Errorf("failed to fetch store ID: %w", err) } - metadata := db.Metadata{ + metadata := curation.Metadata{ Built: storeID.BuildTimestamp, Version: storeID.SchemaVersion, Checksum: "sha256:" + hashStr, @@ -91,7 +91,7 @@ func (w writer) Close() error { return err } - metadataPath := path.Join(filepath.Dir(w.dbPath), db.MetadataFileName) + metadataPath := path.Join(filepath.Dir(w.dbPath), curation.MetadataFileName) if err = metadata.Write(metadataPath); err != nil { return err } diff --git a/pkg/process/v2/writer_test.go b/pkg/process/v2/writer_test.go index f0327707..a151570c 100644 --- a/pkg/process/v2/writer_test.go +++ b/pkg/process/v2/writer_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v2" + grypeDB "github.com/anchore/grype-db/pkg/db/v2" ) var _ grypeDB.VulnerabilityMetadataStoreReader = (*mockReader)(nil) diff --git a/pkg/process/v3/transformers/entry.go b/pkg/process/v3/transformers/entry.go index ae8f3715..e3fc4185 100644 --- a/pkg/process/v3/transformers/entry.go +++ b/pkg/process/v3/transformers/entry.go @@ -2,7 +2,7 @@ package transformers import ( "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v3" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" ) func NewEntries(vs []grypeDB.Vulnerability, metadata grypeDB.VulnerabilityMetadata) []data.Entry { diff --git a/pkg/process/v3/transformers/github/transform.go b/pkg/process/v3/transformers/github/transform.go index f9ec70cf..d8c31758 100644 --- a/pkg/process/v3/transformers/github/transform.go +++ b/pkg/process/v3/transformers/github/transform.go @@ -2,10 +2,10 @@ package github import ( "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v3/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v3" ) const ( diff --git a/pkg/process/v3/transformers/github/transform_test.go b/pkg/process/v3/transformers/github/transform_test.go index ce0bc28c..c975f44b 100644 --- a/pkg/process/v3/transformers/github/transform_test.go +++ b/pkg/process/v3/transformers/github/transform_test.go @@ -8,9 +8,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v3" ) func TestUnmarshalGitHubEntries(t *testing.T) { diff --git a/pkg/process/v3/transformers/msrc/transform.go b/pkg/process/v3/transformers/msrc/transform.go index d1a1ea48..a9ec90a9 100644 --- a/pkg/process/v3/transformers/msrc/transform.go +++ b/pkg/process/v3/transformers/msrc/transform.go @@ -4,10 +4,10 @@ import ( "fmt" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v3/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v3" ) const ( diff --git a/pkg/process/v3/transformers/msrc/transform_test.go b/pkg/process/v3/transformers/msrc/transform_test.go index 4bb431c8..17822761 100644 --- a/pkg/process/v3/transformers/msrc/transform_test.go +++ b/pkg/process/v3/transformers/msrc/transform_test.go @@ -7,9 +7,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v3" ) func TestUnmarshalMsrcVulnerabilities(t *testing.T) { diff --git a/pkg/process/v3/transformers/nvd/transform.go b/pkg/process/v3/transformers/nvd/transform.go index 3b3f484c..5a98cd99 100644 --- a/pkg/process/v3/transformers/nvd/transform.go +++ b/pkg/process/v3/transformers/nvd/transform.go @@ -3,12 +3,13 @@ package nvd import ( "fmt" - "github.com/anchore/grype-db/internal" + "github.com/scylladb/go-set/strset" + "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" "github.com/anchore/grype-db/pkg/process/v3/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" "github.com/anchore/grype-db/pkg/provider/unmarshal/nvd" - grypeDB "github.com/anchore/grype/grype/db/v3" ) const ( @@ -43,7 +44,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { var allVulns []grypeDB.Vulnerability for _, p := range uniquePkgs.All() { matches := uniquePkgs.Matches(p) - cpes := internal.NewStringSet() + cpes := strset.New() for _, m := range matches { cpes.Add(m.Criteria) } @@ -55,7 +56,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { VersionFormat: "unknown", PackageName: p.Product, Namespace: entryNamespace, - CPEs: cpes.ToSlice(), + CPEs: cpes.List(), Fix: grypeDB.Fix{ State: grypeDB.UnknownFixState, }, diff --git a/pkg/process/v3/transformers/nvd/transform_test.go b/pkg/process/v3/transformers/nvd/transform_test.go index 1f136144..12125a2a 100644 --- a/pkg/process/v3/transformers/nvd/transform_test.go +++ b/pkg/process/v3/transformers/nvd/transform_test.go @@ -9,9 +9,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v3" ) func TestUnmarshalNVDVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v3/transformers/os/transform.go b/pkg/process/v3/transformers/os/transform.go index 1ab8d60f..0a9b6b9b 100644 --- a/pkg/process/v3/transformers/os/transform.go +++ b/pkg/process/v3/transformers/os/transform.go @@ -5,10 +5,10 @@ import ( "strings" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v3/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v3" ) const ( diff --git a/pkg/process/v3/transformers/os/transform_test.go b/pkg/process/v3/transformers/os/transform_test.go index 09834c2c..a36f3b61 100644 --- a/pkg/process/v3/transformers/os/transform_test.go +++ b/pkg/process/v3/transformers/os/transform_test.go @@ -8,10 +8,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/process/v3/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v3" ) func TestUnmarshalOSVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v3/writer.go b/pkg/process/v3/writer.go index 051a55ee..9e5c4b72 100644 --- a/pkg/process/v3/writer.go +++ b/pkg/process/v3/writer.go @@ -13,9 +13,9 @@ import ( "github.com/anchore/grype-db/internal/file" "github.com/anchore/grype-db/internal/log" "github.com/anchore/grype-db/pkg/data" - "github.com/anchore/grype/grype/db" - grypeDB "github.com/anchore/grype/grype/db/v3" - grypeDBStore "github.com/anchore/grype/grype/db/v3/store" + "github.com/anchore/grype-db/pkg/db/curation" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" + grypeDBStore "github.com/anchore/grype-db/pkg/db/v3/store" ) var _ data.Writer = (*writer)(nil) @@ -66,7 +66,7 @@ func (w writer) Write(entries ...data.Entry) error { return nil } -func (w writer) metadata() (*db.Metadata, error) { +func (w writer) metadata() (*curation.Metadata, error) { hashStr, err := file.ContentDigest(afero.NewOsFs(), w.dbPath, sha256.New()) if err != nil { return nil, fmt.Errorf("failed to hash database file (%s): %w", w.dbPath, err) @@ -77,7 +77,7 @@ func (w writer) metadata() (*db.Metadata, error) { return nil, fmt.Errorf("failed to fetch store ID: %w", err) } - metadata := db.Metadata{ + metadata := curation.Metadata{ Built: storeID.BuildTimestamp, Version: storeID.SchemaVersion, Checksum: "sha256:" + hashStr, @@ -92,7 +92,7 @@ func (w writer) Close() error { return err } - metadataPath := path.Join(filepath.Dir(w.dbPath), db.MetadataFileName) + metadataPath := path.Join(filepath.Dir(w.dbPath), curation.MetadataFileName) if err = metadata.Write(metadataPath); err != nil { return err } diff --git a/pkg/process/v3/writer_test.go b/pkg/process/v3/writer_test.go index 0f8cfb62..bed37122 100644 --- a/pkg/process/v3/writer_test.go +++ b/pkg/process/v3/writer_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v3" + grypeDB "github.com/anchore/grype-db/pkg/db/v3" ) var _ grypeDB.VulnerabilityMetadataStoreReader = (*mockReader)(nil) diff --git a/pkg/process/v4/transformers/entry.go b/pkg/process/v4/transformers/entry.go index a60d5daf..9b539399 100644 --- a/pkg/process/v4/transformers/entry.go +++ b/pkg/process/v4/transformers/entry.go @@ -2,7 +2,7 @@ package transformers import ( "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v4" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" ) func NewEntries(vs []grypeDB.Vulnerability, metadata grypeDB.VulnerabilityMetadata) []data.Entry { diff --git a/pkg/process/v4/transformers/github/transform.go b/pkg/process/v4/transformers/github/transform.go index d1599f6d..a5559036 100644 --- a/pkg/process/v4/transformers/github/transform.go +++ b/pkg/process/v4/transformers/github/transform.go @@ -5,11 +5,11 @@ import ( "strings" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" + "github.com/anchore/grype-db/pkg/db/v4/namespace" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v4/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v4" - "github.com/anchore/grype/grype/db/v4/namespace" syftPkg "github.com/anchore/syft/syft/pkg" ) diff --git a/pkg/process/v4/transformers/github/transform_test.go b/pkg/process/v4/transformers/github/transform_test.go index 4e5c1c85..ed4d7cb4 100644 --- a/pkg/process/v4/transformers/github/transform_test.go +++ b/pkg/process/v4/transformers/github/transform_test.go @@ -8,11 +8,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" + "github.com/anchore/grype-db/pkg/db/v4/namespace" + "github.com/anchore/grype-db/pkg/db/v4/namespace/language" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v4" - "github.com/anchore/grype/grype/db/v4/namespace" - "github.com/anchore/grype/grype/db/v4/namespace/language" syftPkg "github.com/anchore/syft/syft/pkg" ) diff --git a/pkg/process/v4/transformers/matchexclusions/transform.go b/pkg/process/v4/transformers/matchexclusions/transform.go index c4831be1..50f022cb 100644 --- a/pkg/process/v4/transformers/matchexclusions/transform.go +++ b/pkg/process/v4/transformers/matchexclusions/transform.go @@ -2,8 +2,8 @@ package matchexclusions import ( "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v4" ) func Transform(matchExclusion unmarshal.MatchExclusion) ([]data.Entry, error) { diff --git a/pkg/process/v4/transformers/msrc/transform.go b/pkg/process/v4/transformers/msrc/transform.go index b15dd881..b0e7e0ce 100644 --- a/pkg/process/v4/transformers/msrc/transform.go +++ b/pkg/process/v4/transformers/msrc/transform.go @@ -5,11 +5,11 @@ import ( "strings" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" + "github.com/anchore/grype-db/pkg/db/v4/namespace" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v4/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v4" - "github.com/anchore/grype/grype/db/v4/namespace" "github.com/anchore/grype/grype/distro" ) diff --git a/pkg/process/v4/transformers/msrc/transform_test.go b/pkg/process/v4/transformers/msrc/transform_test.go index d768204b..93ed0902 100644 --- a/pkg/process/v4/transformers/msrc/transform_test.go +++ b/pkg/process/v4/transformers/msrc/transform_test.go @@ -7,9 +7,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v4" ) func TestUnmarshalMsrcVulnerabilities(t *testing.T) { diff --git a/pkg/process/v4/transformers/nvd/transform.go b/pkg/process/v4/transformers/nvd/transform.go index c7bf18bc..b6db32c9 100644 --- a/pkg/process/v4/transformers/nvd/transform.go +++ b/pkg/process/v4/transformers/nvd/transform.go @@ -3,13 +3,14 @@ package nvd import ( "fmt" - "github.com/anchore/grype-db/internal" + "github.com/scylladb/go-set/strset" + "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" + "github.com/anchore/grype-db/pkg/db/v4/namespace" "github.com/anchore/grype-db/pkg/process/v4/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" "github.com/anchore/grype-db/pkg/provider/unmarshal/nvd" - grypeDB "github.com/anchore/grype/grype/db/v4" - "github.com/anchore/grype/grype/db/v4/namespace" ) const ( @@ -56,7 +57,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { var allVulns []grypeDB.Vulnerability for _, p := range uniquePkgs.All() { matches := uniquePkgs.Matches(p) - cpes := internal.NewStringSet() + cpes := strset.New() for _, m := range matches { cpes.Add(grypeNamespace.Resolver().Normalize(m.Criteria)) } @@ -68,7 +69,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { VersionFormat: "unknown", PackageName: grypeNamespace.Resolver().Normalize(p.Product), Namespace: entryNamespace, - CPEs: cpes.ToSlice(), + CPEs: cpes.List(), Fix: grypeDB.Fix{ State: grypeDB.UnknownFixState, }, diff --git a/pkg/process/v4/transformers/nvd/transform_test.go b/pkg/process/v4/transformers/nvd/transform_test.go index 277527b8..0231f9bd 100644 --- a/pkg/process/v4/transformers/nvd/transform_test.go +++ b/pkg/process/v4/transformers/nvd/transform_test.go @@ -9,9 +9,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v4" ) func TestUnmarshalNVDVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v4/transformers/os/transform.go b/pkg/process/v4/transformers/os/transform.go index 5a9796b4..8d11cdce 100644 --- a/pkg/process/v4/transformers/os/transform.go +++ b/pkg/process/v4/transformers/os/transform.go @@ -5,11 +5,11 @@ import ( "strings" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" + "github.com/anchore/grype-db/pkg/db/v4/namespace" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v4/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v4" - "github.com/anchore/grype/grype/db/v4/namespace" "github.com/anchore/grype/grype/distro" ) diff --git a/pkg/process/v4/transformers/os/transform_test.go b/pkg/process/v4/transformers/os/transform_test.go index 745916a8..b5c264bd 100644 --- a/pkg/process/v4/transformers/os/transform_test.go +++ b/pkg/process/v4/transformers/os/transform_test.go @@ -8,10 +8,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/process/v4/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v4" ) func TestUnmarshalOSVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v4/writer.go b/pkg/process/v4/writer.go index 376b14a9..2c1e9853 100644 --- a/pkg/process/v4/writer.go +++ b/pkg/process/v4/writer.go @@ -13,9 +13,9 @@ import ( "github.com/anchore/grype-db/internal/file" "github.com/anchore/grype-db/internal/log" "github.com/anchore/grype-db/pkg/data" - "github.com/anchore/grype/grype/db" - grypeDB "github.com/anchore/grype/grype/db/v4" - grypeDBStore "github.com/anchore/grype/grype/db/v4/store" + "github.com/anchore/grype-db/pkg/db/curation" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" + grypeDBStore "github.com/anchore/grype-db/pkg/db/v4/store" ) // TODO: add NVDNamespace const to grype.db package? @@ -73,7 +73,7 @@ func (w writer) Write(entries ...data.Entry) error { return nil } -func (w writer) metadata() (*db.Metadata, error) { +func (w writer) metadata() (*curation.Metadata, error) { hashStr, err := file.ContentDigest(afero.NewOsFs(), w.dbPath, sha256.New()) if err != nil { return nil, fmt.Errorf("failed to hash database file (%s): %w", w.dbPath, err) @@ -84,7 +84,7 @@ func (w writer) metadata() (*db.Metadata, error) { return nil, fmt.Errorf("failed to fetch store ID: %w", err) } - metadata := db.Metadata{ + metadata := curation.Metadata{ Built: storeID.BuildTimestamp, Version: storeID.SchemaVersion, Checksum: "sha256:" + hashStr, @@ -99,7 +99,7 @@ func (w writer) Close() error { return err } - metadataPath := path.Join(filepath.Dir(w.dbPath), db.MetadataFileName) + metadataPath := path.Join(filepath.Dir(w.dbPath), curation.MetadataFileName) if err = metadata.Write(metadataPath); err != nil { return err } diff --git a/pkg/process/v4/writer_test.go b/pkg/process/v4/writer_test.go index 76d5384e..2c744530 100644 --- a/pkg/process/v4/writer_test.go +++ b/pkg/process/v4/writer_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v4" + grypeDB "github.com/anchore/grype-db/pkg/db/v4" ) var _ grypeDB.VulnerabilityMetadataStoreReader = (*mockReader)(nil) diff --git a/pkg/process/v5/transformers/entry.go b/pkg/process/v5/transformers/entry.go index 842509da..0959031f 100644 --- a/pkg/process/v5/transformers/entry.go +++ b/pkg/process/v5/transformers/entry.go @@ -2,7 +2,7 @@ package transformers import ( "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v5" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" ) func NewEntries(vs []grypeDB.Vulnerability, metadata grypeDB.VulnerabilityMetadata) []data.Entry { diff --git a/pkg/process/v5/transformers/github/transform.go b/pkg/process/v5/transformers/github/transform.go index 91b31f7d..9fdac75b 100644 --- a/pkg/process/v5/transformers/github/transform.go +++ b/pkg/process/v5/transformers/github/transform.go @@ -5,11 +5,11 @@ import ( "strings" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/namespace" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v5/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v5" - "github.com/anchore/grype/grype/db/v5/namespace" syftPkg "github.com/anchore/syft/syft/pkg" ) diff --git a/pkg/process/v5/transformers/github/transform_test.go b/pkg/process/v5/transformers/github/transform_test.go index c6727816..70c29b39 100644 --- a/pkg/process/v5/transformers/github/transform_test.go +++ b/pkg/process/v5/transformers/github/transform_test.go @@ -8,13 +8,13 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + v5 "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/namespace" + "github.com/anchore/grype-db/pkg/db/v5/namespace/language" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/process/v5/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v5" - v5 "github.com/anchore/grype/grype/db/v5" - "github.com/anchore/grype/grype/db/v5/namespace" - "github.com/anchore/grype/grype/db/v5/namespace/language" syftPkg "github.com/anchore/syft/syft/pkg" ) diff --git a/pkg/process/v5/transformers/matchexclusions/transform.go b/pkg/process/v5/transformers/matchexclusions/transform.go index 577c71e3..3fd1ce1d 100644 --- a/pkg/process/v5/transformers/matchexclusions/transform.go +++ b/pkg/process/v5/transformers/matchexclusions/transform.go @@ -2,8 +2,8 @@ package matchexclusions import ( "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v5" ) func Transform(matchExclusion unmarshal.MatchExclusion) ([]data.Entry, error) { diff --git a/pkg/process/v5/transformers/msrc/transform.go b/pkg/process/v5/transformers/msrc/transform.go index a35e17e1..c213bf93 100644 --- a/pkg/process/v5/transformers/msrc/transform.go +++ b/pkg/process/v5/transformers/msrc/transform.go @@ -4,11 +4,11 @@ import ( "fmt" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/namespace" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v5/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v5" - "github.com/anchore/grype/grype/db/v5/namespace" "github.com/anchore/grype/grype/distro" ) diff --git a/pkg/process/v5/transformers/msrc/transform_test.go b/pkg/process/v5/transformers/msrc/transform_test.go index 081965c9..1b711e7e 100644 --- a/pkg/process/v5/transformers/msrc/transform_test.go +++ b/pkg/process/v5/transformers/msrc/transform_test.go @@ -7,9 +7,9 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v5" ) func TestUnmarshalMsrcVulnerabilities(t *testing.T) { diff --git a/pkg/process/v5/transformers/nvd/transform.go b/pkg/process/v5/transformers/nvd/transform.go index 72d92bbd..fd946bf2 100644 --- a/pkg/process/v5/transformers/nvd/transform.go +++ b/pkg/process/v5/transformers/nvd/transform.go @@ -3,15 +3,16 @@ package nvd import ( "sort" - "github.com/anchore/grype-db/internal" + "github.com/scylladb/go-set/strset" + "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/namespace" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier/platformcpe" "github.com/anchore/grype-db/pkg/process/v5/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" "github.com/anchore/grype-db/pkg/provider/unmarshal/nvd" - grypeDB "github.com/anchore/grype/grype/db/v5" - "github.com/anchore/grype/grype/db/v5/namespace" - "github.com/anchore/grype/grype/db/v5/pkg/qualifier" - "github.com/anchore/grype/grype/db/v5/pkg/qualifier/platformcpe" ) func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { @@ -41,7 +42,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { for _, p := range uniquePkgs.All() { var qualifiers []qualifier.Qualifier matches := uniquePkgs.Matches(p) - cpes := internal.NewStringSet() + cpes := strset.New() for _, m := range matches { cpes.Add(grypeNamespace.Resolver().Normalize(m.Criteria)) } @@ -53,7 +54,7 @@ func Transform(vulnerability unmarshal.NVDVulnerability) ([]data.Entry, error) { }} } - orderedCPEs := cpes.ToSlice() + orderedCPEs := cpes.List() sort.Strings(orderedCPEs) // create vulnerability entry diff --git a/pkg/process/v5/transformers/nvd/transform_test.go b/pkg/process/v5/transformers/nvd/transform_test.go index 493d10b7..0f80d1c9 100644 --- a/pkg/process/v5/transformers/nvd/transform_test.go +++ b/pkg/process/v5/transformers/nvd/transform_test.go @@ -9,11 +9,11 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier/platformcpe" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v5" - "github.com/anchore/grype/grype/db/v5/pkg/qualifier" - "github.com/anchore/grype/grype/db/v5/pkg/qualifier/platformcpe" ) func TestUnmarshalNVDVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v5/transformers/os/transform.go b/pkg/process/v5/transformers/os/transform.go index 18fd7bab..64492650 100644 --- a/pkg/process/v5/transformers/os/transform.go +++ b/pkg/process/v5/transformers/os/transform.go @@ -5,13 +5,13 @@ import ( "strings" "github.com/anchore/grype-db/pkg/data" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/namespace" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier/rpmmodularity" "github.com/anchore/grype-db/pkg/process/common" "github.com/anchore/grype-db/pkg/process/v5/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v5" - "github.com/anchore/grype/grype/db/v5/namespace" - "github.com/anchore/grype/grype/db/v5/pkg/qualifier" - "github.com/anchore/grype/grype/db/v5/pkg/qualifier/rpmmodularity" "github.com/anchore/grype/grype/distro" ) diff --git a/pkg/process/v5/transformers/os/transform_test.go b/pkg/process/v5/transformers/os/transform_test.go index 55919589..ec6f9dd3 100644 --- a/pkg/process/v5/transformers/os/transform_test.go +++ b/pkg/process/v5/transformers/os/transform_test.go @@ -8,12 +8,12 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier" + "github.com/anchore/grype-db/pkg/db/v5/pkg/qualifier/rpmmodularity" testUtils "github.com/anchore/grype-db/pkg/process/tests" "github.com/anchore/grype-db/pkg/process/v5/transformers" "github.com/anchore/grype-db/pkg/provider/unmarshal" - grypeDB "github.com/anchore/grype/grype/db/v5" - "github.com/anchore/grype/grype/db/v5/pkg/qualifier" - "github.com/anchore/grype/grype/db/v5/pkg/qualifier/rpmmodularity" ) func TestUnmarshalOSVulnerabilitiesEntries(t *testing.T) { diff --git a/pkg/process/v5/writer.go b/pkg/process/v5/writer.go index 4acf5a71..66da36ee 100644 --- a/pkg/process/v5/writer.go +++ b/pkg/process/v5/writer.go @@ -13,9 +13,9 @@ import ( "github.com/anchore/grype-db/internal/file" "github.com/anchore/grype-db/internal/log" "github.com/anchore/grype-db/pkg/data" - "github.com/anchore/grype/grype/db" - grypeDB "github.com/anchore/grype/grype/db/v5" - grypeDBStore "github.com/anchore/grype/grype/db/v5/store" + "github.com/anchore/grype-db/pkg/db/curation" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" + grypeDBStore "github.com/anchore/grype-db/pkg/db/v5/store" ) // TODO: add NVDNamespace const to grype.db package? @@ -74,7 +74,7 @@ func (w writer) Write(entries ...data.Entry) error { return nil } -func (w writer) metadata() (*db.Metadata, error) { +func (w writer) metadata() (*curation.Metadata, error) { hashStr, err := file.ContentDigest(afero.NewOsFs(), w.dbPath, sha256.New()) if err != nil { return nil, fmt.Errorf("failed to hash database file (%s): %w", w.dbPath, err) @@ -85,7 +85,7 @@ func (w writer) metadata() (*db.Metadata, error) { return nil, fmt.Errorf("failed to fetch store ID: %w", err) } - metadata := db.Metadata{ + metadata := curation.Metadata{ Built: storeID.BuildTimestamp, Version: storeID.SchemaVersion, Checksum: "sha256:" + hashStr, @@ -100,7 +100,7 @@ func (w writer) Close() error { return err } - metadataPath := path.Join(filepath.Dir(w.dbPath), db.MetadataFileName) + metadataPath := path.Join(filepath.Dir(w.dbPath), curation.MetadataFileName) if err = metadata.Write(metadataPath); err != nil { return err } diff --git a/pkg/process/v5/writer_test.go b/pkg/process/v5/writer_test.go index 7c552361..1d3aaaad 100644 --- a/pkg/process/v5/writer_test.go +++ b/pkg/process/v5/writer_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/anchore/grype-db/pkg/data" - grypeDB "github.com/anchore/grype/grype/db/v5" + grypeDB "github.com/anchore/grype-db/pkg/db/v5" ) var _ grypeDB.VulnerabilityMetadataStoreReader = (*mockReader)(nil)