From 9275cb51a02ab49f288ff4efd15b175189477792 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Mon, 20 Nov 2023 19:02:44 +0000 Subject: [PATCH 01/17] feat: add vulnerability report verifier --- .github/workflows/publish-package.yml | 1 + CONTRIBUTING.md | 2 +- Makefile | 27 +- ..._v1beta1_verifier_vulnerabilityreport.yaml | 14 + ...v1beta1_verifier_vulnerabilityreport2.yaml | 11 + go.mod | 1 + go.sum | 7 + httpserver/Dockerfile | 2 + .../samples/constraint.yaml | 11 + .../notation-nested-validation/template.yaml | 80 + .../samples/constraint.yaml | 11 + .../template.yaml | 82 + pkg/referrerstore/mocks/memory_store.go | 28 +- .../schemavalidation/schemas/sarif-2.1.0.json | 3012 +++++++++++++++++ .../schemavalidation/schemavalidation_test.go | 89 + .../schemavalidation/schemavalidator.go | 65 + .../schemavalidation/testdata/bad_schema.json | 2 + .../testdata/mismatch_schema.json | 964 ++++++ .../testdata/trivy_scan_report.json | 1889 +++++++++++ .../vulnerability_report.go | 400 +++ .../vulnerability_report_test.go | 955 ++++++ scripts/azure-ci-test.sh | 2 +- test/bats/cli-test.bats | 5 + test/bats/plugin-test.bats | 26 + .../config/vulnerabilityreport_config.json | 28 + 25 files changed, 7700 insertions(+), 14 deletions(-) create mode 100644 config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml create mode 100644 config/samples/config_v1beta1_verifier_vulnerabilityreport2.yaml create mode 100644 library/notation-nested-validation/samples/constraint.yaml create mode 100644 library/notation-nested-validation/template.yaml create mode 100644 library/vulnerability-report-validation/samples/constraint.yaml create mode 100644 library/vulnerability-report-validation/template.yaml create mode 100644 plugins/verifier/vulnerabilityreport/schemavalidation/schemas/sarif-2.1.0.json create mode 100644 plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidation_test.go create mode 100644 plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidator.go create mode 100644 plugins/verifier/vulnerabilityreport/schemavalidation/testdata/bad_schema.json create mode 100644 plugins/verifier/vulnerabilityreport/schemavalidation/testdata/mismatch_schema.json create mode 100644 plugins/verifier/vulnerabilityreport/schemavalidation/testdata/trivy_scan_report.json create mode 100644 plugins/verifier/vulnerabilityreport/vulnerability_report.go create mode 100644 plugins/verifier/vulnerabilityreport/vulnerability_report_test.go create mode 100644 test/bats/tests/config/vulnerabilityreport_config.json diff --git a/.github/workflows/publish-package.yml b/.github/workflows/publish-package.yml index 4e0c90a65..6a6c8234e 100644 --- a/.github/workflows/publish-package.yml +++ b/.github/workflows/publish-package.yml @@ -73,6 +73,7 @@ jobs: --build-arg build_sbom=true \ --build-arg build_licensechecker=true \ --build-arg build_schemavalidator=true \ + --build-arg build_vulnerabilityreport=true \ --build-arg LDFLAGS="-X github.com/deislabs/ratify/internal/version.Version=$(TAG)" \ --label org.opencontainers.image.revision=${{ github.sha }} \ -t ${{ steps.prepare.outputs.ref }} \ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 136544164..4be2fc32d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -144,7 +144,7 @@ Follow the steps below to build and deploy a Ratify image with your private chan export REGISTRY=yourregistry docker buildx create --use -docker buildx build -f httpserver/Dockerfile --platform linux/amd64 --build-arg build_cosign=true --build-arg build_sbom=true --build-arg build_licensechecker=true --build-arg build_schemavalidator=true -t ${REGISTRY}/deislabs/ratify:yourtag . +docker buildx build -f httpserver/Dockerfile --platform linux/amd64 --build-arg build_cosign=true --build-arg build_sbom=true --build-arg build_licensechecker=true --build-arg build_schemavalidator=true build_vulnerabilityreport -t ${REGISTRY}/deislabs/ratify:yourtag . docker build --progress=plain --build-arg KUBE_VERSION="1.25.0" --build-arg TARGETOS="linux" --build-arg TARGETARCH="amd64" -f crd.Dockerfile -t ${REGISTRY}/localbuildcrd:yourtag ./charts/ratify/crds ``` diff --git a/Makefile b/Makefile index 95f61c6d6..ef148c43e 100644 --- a/Makefile +++ b/Makefile @@ -32,6 +32,7 @@ SYFT_VERSION ?= v0.76.0 YQ_VERSION ?= v4.34.1 YQ_BINARY ?= yq_linux_amd64 ALPINE_IMAGE ?= alpine@sha256:93d5a28ff72d288d69b5997b8ba47396d2cbb62a72b5d87cd3351094b5d578a0 +ALPINE_IMAGE_VULNERABLE ?= alpine@sha256:25fad2a32ad1f6f510e528448ae1ec69a28ef81916a004d3629874104f8a7f70 REDIS_IMAGE_TAG ?= 7.0-debian-11 CERT_ROTATION_ENABLED ?= false REGO_POLICY_ENABLED ?= false @@ -68,6 +69,7 @@ build-plugins: go build -cover -coverpkg=github.com/deislabs/ratify/plugins/verifier/sample/... -o ./bin/plugins/ ./plugins/verifier/sample go build -cover -coverpkg=github.com/deislabs/ratify/plugins/verifier/sbom/... -o ./bin/plugins/ ./plugins/verifier/sbom go build -cover -coverpkg=github.com/deislabs/ratify/plugins/verifier/schemavalidator/... -o ./bin/plugins/ ./plugins/verifier/schemavalidator + go build -cover -coverpkg=github.com/deislabs/ratify/plugins/verifier/vulnerabilityreport/... -o ./bin/plugins/ ./plugins/verifier/vulnerabilityreport .PHONY: install install: @@ -139,7 +141,7 @@ test-e2e: generate-rotation-certs EXPIRING_CERT_DIR=.staging/rotation/expiring-certs CERT_DIR=.staging/rotation GATEKEEPER_VERSION=${GATEKEEPER_VERSION} bats -t ${BATS_PLUGIN_TESTS_FILE} .PHONY: test-e2e-cli -test-e2e-cli: e2e-dependencies e2e-create-local-registry e2e-notation-setup e2e-notation-leaf-cert-setup e2e-cosign-setup e2e-licensechecker-setup e2e-sbom-setup e2e-schemavalidator-setup +test-e2e-cli: e2e-dependencies e2e-create-local-registry e2e-notation-setup e2e-notation-leaf-cert-setup e2e-cosign-setup e2e-licensechecker-setup e2e-sbom-setup e2e-schemavalidator-setup e2e-vulnerabilityreport-setup rm ${GOCOVERDIR} -rf mkdir ${GOCOVERDIR} -p RATIFY_DIR=${INSTALL_DIR} TEST_REGISTRY=${TEST_REGISTRY} ${GITHUB_WORKSPACE}/bin/bats -t ${BATS_CLI_TESTS_FILE} @@ -401,6 +403,26 @@ e2e-schemavalidator-setup: ${TEST_REGISTRY}/all:v0 \ .staging/schemavalidator/trivy-scan.sarif:application/sarif+json +e2e-vulnerabilityreport-setup: + rm -rf .staging/vulnerabilityreport + mkdir -p .staging/vulnerabilityreport + + # Install Trivy + curl -L https://github.com/aquasecurity/trivy/releases/download/v0.47.0/trivy_0.47.0_Linux-64bit.tar.gz --output .staging/vulnerabilityreport/trivy.tar.gz + tar -zxf .staging/vulnerabilityreport/trivy.tar.gz -C .staging/vulnerabilityreport + + # Build/Push Image + printf 'FROM ${ALPINE_IMAGE_VULNERABLE}\nCMD ["echo", "vulnerabilityreport image"]' > .staging/vulnerabilityreport/Dockerfile + docker build --no-cache -t ${TEST_REGISTRY}/vulnerabilityreport:v0 .staging/vulnerabilityreport + docker push ${TEST_REGISTRY}/vulnerabilityreport:v0 + + # Create/Attach Scan Result + .staging/vulnerabilityreport/trivy image --format sarif --output .staging/vulnerabilityreport/trivy-sarif.json ${TEST_REGISTRY}/vulnerabilityreport:v0 + ${GITHUB_WORKSPACE}/bin/oras attach \ + --artifact-type application/sarif+json \ + ${TEST_REGISTRY}/vulnerabilityreport:v0 \ + .staging/vulnerabilityreport/trivy-sarif.json:application/sarif+json + e2e-inlinecert-setup: rm -rf .staging/inlinecert mkdir -p .staging/inlinecert @@ -460,7 +482,7 @@ e2e-deploy-base-ratify: e2e-notation-setup e2e-notation-leaf-cert-setup e2e-inli rm mount_config.json -e2e-deploy-ratify: e2e-notation-setup e2e-notation-leaf-cert-setup e2e-cosign-setup e2e-cosign-setup e2e-licensechecker-setup e2e-sbom-setup e2e-schemavalidator-setup e2e-inlinecert-setup e2e-build-crd-image e2e-build-local-ratify-image e2e-helm-deploy-ratify +e2e-deploy-ratify: e2e-notation-setup e2e-notation-leaf-cert-setup e2e-cosign-setup e2e-cosign-setup e2e-licensechecker-setup e2e-sbom-setup e2e-schemavalidator-setup e2e-vulnerabilityreport-setup e2e-inlinecert-setup e2e-build-crd-image e2e-build-local-ratify-image e2e-helm-deploy-ratify e2e-build-local-ratify-image: docker build --progress=plain --no-cache \ @@ -468,6 +490,7 @@ e2e-build-local-ratify-image: --build-arg build_sbom=true \ --build-arg build_licensechecker=true \ --build-arg build_schemavalidator=true \ + --build-arg build_vulnerabilityreport=true \ -f ./httpserver/Dockerfile \ -t localbuild:test . kind load docker-image --name kind localbuild:test diff --git a/config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml b/config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml new file mode 100644 index 000000000..95c6dd678 --- /dev/null +++ b/config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml @@ -0,0 +1,14 @@ +apiVersion: config.ratify.deislabs.io/v1beta1 +kind: Verifier +metadata: + name: verifier-vulnerabilityreport +spec: + name: vulnerabilityreport + artifactTypes: application/sarif+json + parameters: + maximumAge: 24h + disallowedSeverity: + - high + - critical + denylistCVEs: + - CVE-2021-44228 # Log4Shell diff --git a/config/samples/config_v1beta1_verifier_vulnerabilityreport2.yaml b/config/samples/config_v1beta1_verifier_vulnerabilityreport2.yaml new file mode 100644 index 000000000..e9b626e64 --- /dev/null +++ b/config/samples/config_v1beta1_verifier_vulnerabilityreport2.yaml @@ -0,0 +1,11 @@ +apiVersion: config.ratify.deislabs.io/v1beta1 +kind: Verifier +metadata: + name: verifier-vulnerabilityreport +spec: + name: vulnerabilityreport + artifactTypes: application/sarif+json + parameters: + maximumAge: 24h + denylistCVEs: + - CVE-2021-44228 # Log4Shell diff --git a/go.mod b/go.mod index 66974473b..abb965f53 100644 --- a/go.mod +++ b/go.mod @@ -34,6 +34,7 @@ require ( github.com/open-policy-agent/opa v0.58.0 github.com/opencontainers/go-digest v1.0.0 github.com/opencontainers/image-spec v1.1.0-rc5 + github.com/owenrumney/go-sarif/v2 v2.3.0 github.com/pkg/errors v0.9.1 github.com/sigstore/cosign/v2 v2.2.1 github.com/sigstore/sigstore v1.7.5 diff --git a/go.sum b/go.sum index 25df4c19a..3641f0def 100644 --- a/go.sum +++ b/go.sum @@ -148,6 +148,7 @@ github.com/aliyun/credentials-go v1.3.1 h1:uq/0v7kWrxmoLGpqjx7vtQ/s03f0zR//0br/x github.com/aliyun/credentials-go v1.3.1/go.mod h1:8jKYhQuDawt8x2+fusqa1Y6mPxemTsBEN04dgcAcYz0= github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092 h1:aM1rlcoLz8y5B2r4tTLMiVTrMtpfY0O8EScKJxaSaEc= github.com/anchore/go-struct-converter v0.0.0-20221118182256-c68fdcfa2092/go.mod h1:rYqSE9HbjzpHTI74vwPvae4ZVYZd1lue2ta6xHPdblA= +github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q= github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= @@ -640,6 +641,9 @@ github.com/opencontainers/image-spec v1.1.0-rc5 h1:Ygwkfw9bpDvs+c9E34SdgGOj41dX/ github.com/opencontainers/image-spec v1.1.0-rc5/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= +github.com/owenrumney/go-sarif v1.1.1/go.mod h1:dNDiPlF04ESR/6fHlPyq7gHKmrM0sHUvAGjsoh8ZH0U= +github.com/owenrumney/go-sarif/v2 v2.3.0 h1:wP5yEpI53zr0v5cBmagXzLbHZp9Oylyo3AJDpfLBITs= +github.com/owenrumney/go-sarif/v2 v2.3.0/go.mod h1:MSqMMx9WqlBSY7pXoOZWgEsVB4FDNfhcaXDA1j6Sr+w= github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE= github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4= github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= @@ -769,6 +773,8 @@ github.com/vbatts/tar-split v0.11.5 h1:3bHCTIheBm1qFTcgh9oPu+nNBtX+XJIupG/vacinC github.com/vbatts/tar-split v0.11.5/go.mod h1:yZbwRsSeGjusneWgA781EKej9HF8vme8okylkAeNKLk= github.com/veraison/go-cose v1.2.0 h1:Ok0Hr3GMAf8K/1NB4sV65QGgCiukG1w1QD+H5tmt0Ow= github.com/veraison/go-cose v1.2.0/go.mod h1:7ziE85vSq4ScFTg6wyoMXjucIGOf4JkFEZi/an96Ct4= +github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4= +github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/xanzy/go-gitlab v0.93.2 h1:kNNf3BYNYn/Zkig0B89fma12l36VLcYSGu7OnaRlRDg= @@ -804,6 +810,7 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zalando/go-keyring v0.2.2 h1:f0xmpYiSrHtSNAVgwip93Cg8tuF45HJM6rHq/A5RI/4= +github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= go.mongodb.org/mongo-driver v1.7.3/go.mod h1:NqaYOwnXWr5Pm7AOpO5QFxKJ503nbMse/R79oO62zWg= go.mongodb.org/mongo-driver v1.7.5/go.mod h1:VXEWRZ6URJIkUq2SCAyapmhH0ZLRBP+FT4xhp5Zvxng= go.mongodb.org/mongo-driver v1.10.0/go.mod h1:wsihk0Kdgv8Kqu1Anit4sfK+22vSFbUrAVEYRhCXrA8= diff --git a/httpserver/Dockerfile b/httpserver/Dockerfile index 7d20d75fe..abdfeaf98 100644 --- a/httpserver/Dockerfile +++ b/httpserver/Dockerfile @@ -13,6 +13,7 @@ ARG build_sbom ARG build_cosign ARG build_licensechecker ARG build_schemavalidator +ARG build_vulnerabilityreport ENV CGO_ENABLED=0 \ GOOS=${TARGETOS} \ @@ -30,6 +31,7 @@ RUN if [ "$build_sbom" = "true" ]; then go build -o /app/out/plugins/ /app/plugi RUN if [ "$build_cosign" = "true" ]; then go build -o /app/out/plugins/ /app/plugins/verifier/cosign; fi RUN if [ "$build_licensechecker" = "true" ]; then go build -o /app/out/plugins/ /app/plugins/verifier/licensechecker; fi RUN if [ "$build_schemavalidator" = "true" ]; then go build -o /app/out/plugins/ /app/plugins/verifier/schemavalidator; fi +RUN if [ "$build_vulnerabilityreport" = "true" ]; then go build -o /app/out/plugins/ /app/plugins/verifier/vulnerabilityreport; fi FROM $BASEIMAGE LABEL org.opencontainers.image.source https://github.com/deislabs/ratify diff --git a/library/notation-nested-validation/samples/constraint.yaml b/library/notation-nested-validation/samples/constraint.yaml new file mode 100644 index 000000000..70953230c --- /dev/null +++ b/library/notation-nested-validation/samples/constraint.yaml @@ -0,0 +1,11 @@ +apiVersion: constraints.gatekeeper.sh/v1beta1 +kind: NotationNestedValidation +metadata: + name: notation-nested-constraint +spec: + enforcementAction: deny + match: + kinds: + - apiGroups: [""] + kinds: ["Pod"] + namespaces: ["default"] diff --git a/library/notation-nested-validation/template.yaml b/library/notation-nested-validation/template.yaml new file mode 100644 index 000000000..6e34ad75a --- /dev/null +++ b/library/notation-nested-validation/template.yaml @@ -0,0 +1,80 @@ +apiVersion: templates.gatekeeper.sh/v1beta1 +kind: ConstraintTemplate +metadata: + name: notationnestedvalidation +spec: + crd: + spec: + names: + kind: NotationNestedValidation + validation: + openAPIV3Schema: + type: object + properties: + issuer: + type: string + targets: + - target: admission.k8s.gatekeeper.sh + rego: | + package notationnestedvalidation + import future.keywords.if + + remote_data := response { + images := [img | img = input.review.object.spec.containers[_].image] + images_init := [img | img = input.review.object.spec.initContainers[_].image] + images_ephemeral := [img | img = input.review.object.spec.ephemeralContainers[_].image] + other_images := array.concat(images_init, images_ephemeral) + all_images := array.concat(other_images, images) + response := external_data({"provider": "ratify-provider", "keys": all_images}) + } + + violation[{"msg": msg}] { + general_violation[{"result": msg}] + } + + # Check if there are any system errors + general_violation[{"result": result}] { + err := remote_data.system_error + err != "" + result := sprintf("System error calling external data provider: %s", [err]) + } + # Check if there are errors for any of the images + general_violation[{"result": result}] { + count(remote_data.errors) > 0 + result := sprintf("Error validating one or more images: %s", remote_data.errors) + } + + # Check if the success criteria is true + general_violation[{"result": result}] { + subject_validation := remote_data.responses[_] + subject_result := subject_validation[1] + failed_verify(subject_result) + result := sprintf("Subject failed verification: %s", [subject_validation[0]]) + } + + failed_verify(reports) if { + newReports := {"nestedResults": reports.verifierReports} + has_subject_failed_verify(newReports) + } + + has_subject_failed_verify(nestedReports) if { + [path, value] := walk(nestedReports) + path[count(path) - 1] == "nestedResults" + not notary_signature_pass_verify(value) + } + + notary_signature_pass_verify(nestedReports) if { + count_with_success := notary_signature_signature_count(nestedReports) + count_with_success > 0 + } + + notary_signature_signature_count(nestedReports) := number if { + sigs := [x | + some i + nestedReports[i].isSuccess == true + nestedReports[i].artifactType == "application/vnd.cncf.notary.signature" + x := nestedReports[i].subject + ] + number := count(sigs) + } + \ No newline at end of file diff --git a/library/vulnerability-report-validation/samples/constraint.yaml b/library/vulnerability-report-validation/samples/constraint.yaml new file mode 100644 index 000000000..6cba3bc08 --- /dev/null +++ b/library/vulnerability-report-validation/samples/constraint.yaml @@ -0,0 +1,11 @@ +apiVersion: constraints.gatekeeper.sh/v1beta1 +kind: VulnerabilityReportValidation +metadata: + name: vulnerability-report-validation-constraint +spec: + enforcementAction: deny + match: + kinds: + - apiGroups: [""] + kinds: ["Pod"] + namespaces: ["default"] \ No newline at end of file diff --git a/library/vulnerability-report-validation/template.yaml b/library/vulnerability-report-validation/template.yaml new file mode 100644 index 000000000..2cf29accb --- /dev/null +++ b/library/vulnerability-report-validation/template.yaml @@ -0,0 +1,82 @@ +apiVersion: templates.gatekeeper.sh/v1beta1 +kind: ConstraintTemplate +metadata: + name: vulnerabilityreportvalidation +spec: + crd: + spec: + names: + kind: VulnerabilityReportValidation + validation: + openAPIV3Schema: + type: object + properties: + issuer: + type: string + targets: + - target: admission.k8s.gatekeeper.sh + rego: | + package vulnerabilityreportvalidation + + # TODO: add support for custom reason message propagating to user + import future.keywords.if + import future.keywords.in + import future.keywords.every + + default require_signature := false # change to true to require notation signature on vulnerability report + + # Get data from Ratify + remote_data := response { + images := [img | img = input.review.object.spec.containers[_].image] + images_init := [img | img = input.review.object.spec.initContainers[_].image] + images_ephemeral := [img | img = input.review.object.spec.ephemeralContainers[_].image] + other_images := array.concat(images_init, images_ephemeral) + all_images := array.concat(other_images, images) + response := external_data({"provider": "ratify-provider", "keys": all_images}) + } + + violation[{"msg": msg}] { + general_violation[{"result": msg}] + } + + # Check if there are any system errors + general_violation[{"result": result}] { + err := remote_data.system_error + err != "" + result := sprintf("System error calling external data provider: %s", [err]) + } + + # Check if there are errors for any of the images + general_violation[{"result": result}] { + count(remote_data.errors) > 0 + result := sprintf("Error validating one or more images: %s", remote_data.errors) + } + + # Check if the success criteria is true + general_violation[{"result": result}] { + subject_validation := remote_data.responses[_] + subject_result := subject_validation[1] + vuln_results := [res | subject_result.verifierReports[i].name == "vulnerabilityreport"; res := subject_result.verifierReports[i]] + count(vuln_results) > 0 + not process_vuln_reports(vuln_results) + result := sprintf("Subject failed verification: %s", [subject_validation[0]]) + } + + process_vuln_reports(reports) if { + # At least one report must be valid + some vuln_report in reports + vuln_report.isSuccess == true + valid_signatures(vuln_report) + } + + valid_signatures(_) := true { + require_signature == false + } + + valid_signatures(report) := true { + require_signature + count(report.nestedResults) > 0 + some nestedResult in report.nestedResults + nestedResult.artifactType == "application/vnd.cncf.notary.signature" + nestedResult.isSuccess + } \ No newline at end of file diff --git a/pkg/referrerstore/mocks/memory_store.go b/pkg/referrerstore/mocks/memory_store.go index b887041fc..5f809e0ed 100644 --- a/pkg/referrerstore/mocks/memory_store.go +++ b/pkg/referrerstore/mocks/memory_store.go @@ -25,12 +25,14 @@ import ( v1 "github.com/opencontainers/image-spec/specs-go/v1" ) -type memoryTestStore struct { +type MemoryTestStore struct { Subjects map[digest.Digest]*ocispecs.SubjectDescriptor Referrers map[digest.Digest][]ocispecs.ReferenceDescriptor + Manifests map[digest.Digest]ocispecs.ReferenceManifest + Blobs map[digest.Digest][]byte } -func (store *memoryTestStore) ListReferrers(_ context.Context, _ common.Reference, _ []string, _ string, subjectDesc *ocispecs.SubjectDescriptor) (referrerstore.ListReferrersResult, error) { +func (store *MemoryTestStore) ListReferrers(_ context.Context, _ common.Reference, _ []string, _ string, subjectDesc *ocispecs.SubjectDescriptor) (referrerstore.ListReferrersResult, error) { // assume subjectDesc is given and good if item, ok := store.Referrers[subjectDesc.Digest]; ok { @@ -43,23 +45,29 @@ func (store *memoryTestStore) ListReferrers(_ context.Context, _ common.Referenc return referrerstore.ListReferrersResult{}, nil } -func (store *memoryTestStore) Name() string { +func (store *MemoryTestStore) Name() string { return "memoryTestStore" } -func (store *memoryTestStore) GetBlobContent(_ context.Context, _ common.Reference, _ digest.Digest) ([]byte, error) { +func (store *MemoryTestStore) GetBlobContent(_ context.Context, _ common.Reference, digest digest.Digest) ([]byte, error) { + if item, ok := store.Blobs[digest]; ok { + return item, nil + } return nil, nil } -func (store *memoryTestStore) GetReferenceManifest(_ context.Context, _ common.Reference, _ ocispecs.ReferenceDescriptor) (ocispecs.ReferenceManifest, error) { +func (store *MemoryTestStore) GetReferenceManifest(_ context.Context, _ common.Reference, desc ocispecs.ReferenceDescriptor) (ocispecs.ReferenceManifest, error) { + if item, ok := store.Manifests[desc.Digest]; ok { + return item, nil + } return ocispecs.ReferenceManifest{}, nil } -func (store *memoryTestStore) GetConfig() *config.StoreConfig { +func (store *MemoryTestStore) GetConfig() *config.StoreConfig { return &config.StoreConfig{} } -func (store *memoryTestStore) GetSubjectDescriptor(_ context.Context, subjectReference common.Reference) (*ocispecs.SubjectDescriptor, error) { +func (store *MemoryTestStore) GetSubjectDescriptor(_ context.Context, subjectReference common.Reference) (*ocispecs.SubjectDescriptor, error) { if item, ok := store.Subjects[subjectReference.Digest]; ok { return item, nil } @@ -67,8 +75,8 @@ func (store *memoryTestStore) GetSubjectDescriptor(_ context.Context, subjectRef return nil, fmt.Errorf("subject not found for %s", subjectReference.Digest) } -func createEmptyMemoryTestStore() *memoryTestStore { - return &memoryTestStore{Subjects: make(map[digest.Digest]*ocispecs.SubjectDescriptor), Referrers: make(map[digest.Digest][]ocispecs.ReferenceDescriptor)} +func createEmptyMemoryTestStore() *MemoryTestStore { + return &MemoryTestStore{Subjects: make(map[digest.Digest]*ocispecs.SubjectDescriptor), Referrers: make(map[digest.Digest][]ocispecs.ReferenceDescriptor)} } func CreateNewTestStoreForNestedSbom() referrerstore.ReferrerStore { @@ -87,7 +95,7 @@ const ( artifactMediaType = "application/vnd.oci.artifact.manifest.v1+json" ) -func addSignedImageWithSignedSbomToStore(store *memoryTestStore) { +func addSignedImageWithSignedSbomToStore(store *MemoryTestStore) { imageDigest := digest.NewDigestFromEncoded("sha256", "b556844e6e59451caf4429eb1de50aa7c50e4b1cc985f9f5893affe4b73f9935") sbomDigest := digest.NewDigestFromEncoded("sha256", "9393779549fca5758811d7cf0444ddb1b254cb24b44fe1cf80fac6fd3199817f") sbomSignatureDigest := digest.NewDigestFromEncoded("sha256", "ace31a6d260ee372caaed757b3411b634b2cecc379c31fda979dba4470699227") diff --git a/plugins/verifier/vulnerabilityreport/schemavalidation/schemas/sarif-2.1.0.json b/plugins/verifier/vulnerabilityreport/schemavalidation/schemas/sarif-2.1.0.json new file mode 100644 index 000000000..3deb87150 --- /dev/null +++ b/plugins/verifier/vulnerabilityreport/schemavalidation/schemas/sarif-2.1.0.json @@ -0,0 +1,3012 @@ +{ + "$id": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "additionalProperties": false, + "definitions": { + "address": { + "description": "A physical or virtual address, or a range of addresses, in an 'addressable region' (memory or a binary file).", + "additionalProperties": false, + "type": "object", + "properties": { + "absoluteAddress": { + "description": "The address expressed as a byte offset from the start of the addressable region.", + "type": "integer", + "minimum": -1, + "default": -1 + }, + "relativeAddress": { + "description": "The address expressed as a byte offset from the absolute address of the top-most parent object.", + "type": "integer" + }, + "length": { + "description": "The number of bytes in this range of addresses.", + "type": "integer" + }, + "kind": { + "description": "An open-ended string that identifies the address kind. 'data', 'function', 'header','instruction', 'module', 'page', 'section', 'segment', 'stack', 'stackFrame', 'table' are well-known values.", + "type": "string" + }, + "name": { + "description": "A name that is associated with the address, e.g., '.text'.", + "type": "string" + }, + "fullyQualifiedName": { + "description": "A human-readable fully qualified name that is associated with the address.", + "type": "string" + }, + "offsetFromParent": { + "description": "The byte offset of this address from the absolute or relative address of the parent object.", + "type": "integer" + }, + "index": { + "description": "The index within run.addresses of the cached object for this address.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "parentIndex": { + "description": "The index within run.addresses of the parent object.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "properties": { + "description": "Key/value pairs that provide additional information about the address.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "artifact": { + "description": "A single artifact. In some cases, this artifact might be nested within another artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "description": { + "description": "A short description of the artifact.", + "$ref": "#/definitions/message" + }, + "location": { + "description": "The location of the artifact.", + "$ref": "#/definitions/artifactLocation" + }, + "parentIndex": { + "description": "Identifies the index of the immediate parent of the artifact, if this artifact is nested.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "offset": { + "description": "The offset in bytes of the artifact within its containing artifact.", + "type": "integer", + "minimum": 0 + }, + "length": { + "description": "The length of the artifact in bytes.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "roles": { + "description": "The role or roles played by the artifact in the analysis.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "enum": [ + "analysisTarget", + "attachment", + "responseFile", + "resultFile", + "standardStream", + "tracedFile", + "unmodified", + "modified", + "added", + "deleted", + "renamed", + "uncontrolled", + "driver", + "extension", + "translation", + "taxonomy", + "policy", + "referencedOnCommandLine", + "memoryContents", + "directory", + "userSpecifiedConfiguration", + "toolSpecifiedConfiguration", + "debugOutputFile" + ] + } + }, + "mimeType": { + "description": "The MIME type (RFC 2045) of the artifact.", + "type": "string", + "pattern": "[^/]+/.+" + }, + "contents": { + "description": "The contents of the artifact.", + "$ref": "#/definitions/artifactContent" + }, + "encoding": { + "description": "Specifies the encoding for an artifact object that refers to a text file.", + "type": "string" + }, + "sourceLanguage": { + "description": "Specifies the source language for any artifact object that refers to a text file that contains source code.", + "type": "string" + }, + "hashes": { + "description": "A dictionary, each of whose keys is the name of a hash function and each of whose values is the hashed value of the artifact produced by the specified hash function.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "lastModifiedTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the artifact was most recently modified. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the artifact.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "artifactChange": { + "description": "A change to a single artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "artifactLocation": { + "description": "The location of the artifact to change.", + "$ref": "#/definitions/artifactLocation" + }, + "replacements": { + "description": "An array of replacement objects, each of which represents the replacement of a single region in a single artifact specified by 'artifactLocation'.", + "type": "array", + "minItems": 1, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/replacement" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the change.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "artifactLocation", + "replacements" + ] + }, + "artifactContent": { + "description": "Represents the contents of an artifact.", + "type": "object", + "additionalProperties": false, + "properties": { + "text": { + "description": "UTF-8-encoded content from a text artifact.", + "type": "string" + }, + "binary": { + "description": "MIME Base64-encoded content from a binary artifact, or from a text artifact in its original encoding.", + "type": "string" + }, + "rendered": { + "description": "An alternate rendered representation of the artifact (e.g., a decompiled representation of a binary region).", + "$ref": "#/definitions/multiformatMessageString" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the artifact content.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "artifactLocation": { + "description": "Specifies the location of an artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "uri": { + "description": "A string containing a valid relative or absolute URI.", + "type": "string", + "format": "uri-reference" + }, + "uriBaseId": { + "description": "A string which indirectly specifies the absolute URI with respect to which a relative URI in the \"uri\" property is interpreted.", + "type": "string" + }, + "index": { + "description": "The index within the run artifacts array of the artifact object associated with the artifact location.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "description": { + "description": "A short description of the artifact location.", + "$ref": "#/definitions/message" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the artifact location.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "attachment": { + "description": "An artifact relevant to a result.", + "type": "object", + "additionalProperties": false, + "properties": { + "description": { + "description": "A message describing the role played by the attachment.", + "$ref": "#/definitions/message" + }, + "artifactLocation": { + "description": "The location of the attachment.", + "$ref": "#/definitions/artifactLocation" + }, + "regions": { + "description": "An array of regions of interest within the attachment.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/region" + } + }, + "rectangles": { + "description": "An array of rectangles specifying areas of interest within the image.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/rectangle" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the attachment.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "artifactLocation" + ] + }, + "codeFlow": { + "description": "A set of threadFlows which together describe a pattern of code execution relevant to detecting a result.", + "additionalProperties": false, + "type": "object", + "properties": { + "message": { + "description": "A message relevant to the code flow.", + "$ref": "#/definitions/message" + }, + "threadFlows": { + "description": "An array of one or more unique threadFlow objects, each of which describes the progress of a program through a thread of execution.", + "type": "array", + "minItems": 1, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/threadFlow" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the code flow.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "threadFlows" + ] + }, + "configurationOverride": { + "description": "Information about how a specific rule or notification was reconfigured at runtime.", + "type": "object", + "additionalProperties": false, + "properties": { + "configuration": { + "description": "Specifies how the rule or notification was configured during the scan.", + "$ref": "#/definitions/reportingConfiguration" + }, + "descriptor": { + "description": "A reference used to locate the descriptor whose configuration was overridden.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the configuration override.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "configuration", + "descriptor" + ] + }, + "conversion": { + "description": "Describes how a converter transformed the output of a static analysis tool from the analysis tool's native output format into the SARIF format.", + "additionalProperties": false, + "type": "object", + "properties": { + "tool": { + "description": "A tool object that describes the converter.", + "$ref": "#/definitions/tool" + }, + "invocation": { + "description": "An invocation object that describes the invocation of the converter.", + "$ref": "#/definitions/invocation" + }, + "analysisToolLogFiles": { + "description": "The locations of the analysis tool's per-run log files.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/artifactLocation" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the conversion.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "tool" + ] + }, + "edge": { + "description": "Represents a directed edge in a graph.", + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "description": "A string that uniquely identifies the edge within its graph.", + "type": "string" + }, + "label": { + "description": "A short description of the edge.", + "$ref": "#/definitions/message" + }, + "sourceNodeId": { + "description": "Identifies the source node (the node at which the edge starts).", + "type": "string" + }, + "targetNodeId": { + "description": "Identifies the target node (the node at which the edge ends).", + "type": "string" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the edge.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "id", + "sourceNodeId", + "targetNodeId" + ] + }, + "edgeTraversal": { + "description": "Represents the traversal of a single edge during a graph traversal.", + "type": "object", + "additionalProperties": false, + "properties": { + "edgeId": { + "description": "Identifies the edge being traversed.", + "type": "string" + }, + "message": { + "description": "A message to display to the user as the edge is traversed.", + "$ref": "#/definitions/message" + }, + "finalState": { + "description": "The values of relevant expressions after the edge has been traversed.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + "stepOverEdgeCount": { + "description": "The number of edge traversals necessary to return from a nested graph.", + "type": "integer", + "minimum": 0 + }, + "properties": { + "description": "Key/value pairs that provide additional information about the edge traversal.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "edgeId" + ] + }, + "exception": { + "description": "Describes a runtime exception encountered during the execution of an analysis tool.", + "type": "object", + "additionalProperties": false, + "properties": { + "kind": { + "type": "string", + "description": "A string that identifies the kind of exception, for example, the fully qualified type name of an object that was thrown, or the symbolic name of a signal." + }, + "message": { + "description": "A message that describes the exception.", + "type": "string" + }, + "stack": { + "description": "The sequence of function calls leading to the exception.", + "$ref": "#/definitions/stack" + }, + "innerExceptions": { + "description": "An array of exception objects each of which is considered a cause of this exception.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/exception" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the exception.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "externalProperties": { + "description": "The top-level element of an external property file.", + "type": "object", + "additionalProperties": false, + "properties": { + "schema": { + "description": "The URI of the JSON schema corresponding to the version of the external property file format.", + "type": "string", + "format": "uri" + }, + "version": { + "description": "The SARIF format version of this external properties object.", + "enum": [ + "2.1.0" + ] + }, + "guid": { + "description": "A stable, unique identifer for this external properties object, in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "runGuid": { + "description": "A stable, unique identifer for the run associated with this external properties object, in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "conversion": { + "description": "A conversion object that will be merged with a separate run.", + "$ref": "#/definitions/conversion" + }, + "graphs": { + "description": "An array of graph objects that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "default": [], + "uniqueItems": true, + "items": { + "$ref": "#/definitions/graph" + } + }, + "externalizedProperties": { + "description": "Key/value pairs that provide additional information that will be merged with a separate run.", + "$ref": "#/definitions/propertyBag" + }, + "artifacts": { + "description": "An array of artifact objects that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/artifact" + } + }, + "invocations": { + "description": "Describes the invocation of the analysis tool that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/invocation" + } + }, + "logicalLocations": { + "description": "An array of logical locations such as namespaces, types or functions that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/logicalLocation" + } + }, + "threadFlowLocations": { + "description": "An array of threadFlowLocation objects that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/threadFlowLocation" + } + }, + "results": { + "description": "An array of result objects that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/result" + } + }, + "taxonomies": { + "description": "Tool taxonomies that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + "driver": { + "description": "The analysis tool object that will be merged with a separate run.", + "$ref": "#/definitions/toolComponent" + }, + "extensions": { + "description": "Tool extensions that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + "policies": { + "description": "Tool policies that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + "translations": { + "description": "Tool translations that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + "addresses": { + "description": "Addresses that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/address" + } + }, + "webRequests": { + "description": "Requests that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/webRequest" + } + }, + "webResponses": { + "description": "Responses that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/webResponse" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the external properties.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "externalPropertyFileReference": { + "description": "Contains information that enables a SARIF consumer to locate the external property file that contains the value of an externalized property associated with the run.", + "type": "object", + "additionalProperties": false, + "properties": { + "location": { + "description": "The location of the external property file.", + "$ref": "#/definitions/artifactLocation" + }, + "guid": { + "description": "A stable, unique identifer for the external property file in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "itemCount": { + "description": "A non-negative integer specifying the number of items contained in the external property file.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "properties": { + "description": "Key/value pairs that provide additional information about the external property file.", + "$ref": "#/definitions/propertyBag" + } + }, + "anyOf": [ + { + "required": [ + "location" + ] + }, + { + "required": [ + "guid" + ] + } + ] + }, + "externalPropertyFileReferences": { + "description": "References to external property files that should be inlined with the content of a root log file.", + "additionalProperties": false, + "type": "object", + "properties": { + "conversion": { + "description": "An external property file containing a run.conversion object to be merged with the root log file.", + "$ref": "#/definitions/externalPropertyFileReference" + }, + "graphs": { + "description": "An array of external property files containing a run.graphs object to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "externalizedProperties": { + "description": "An external property file containing a run.properties object to be merged with the root log file.", + "$ref": "#/definitions/externalPropertyFileReference" + }, + "artifacts": { + "description": "An array of external property files containing run.artifacts arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "invocations": { + "description": "An array of external property files containing run.invocations arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "logicalLocations": { + "description": "An array of external property files containing run.logicalLocations arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "threadFlowLocations": { + "description": "An array of external property files containing run.threadFlowLocations arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "results": { + "description": "An array of external property files containing run.results arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "taxonomies": { + "description": "An array of external property files containing run.taxonomies arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "addresses": { + "description": "An array of external property files containing run.addresses arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "driver": { + "description": "An external property file containing a run.driver object to be merged with the root log file.", + "$ref": "#/definitions/externalPropertyFileReference" + }, + "extensions": { + "description": "An array of external property files containing run.extensions arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "policies": { + "description": "An array of external property files containing run.policies arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "translations": { + "description": "An array of external property files containing run.translations arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "webRequests": { + "description": "An array of external property files containing run.requests arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "webResponses": { + "description": "An array of external property files containing run.responses arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the external property files.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "fix": { + "description": "A proposed fix for the problem represented by a result object. A fix specifies a set of artifacts to modify. For each artifact, it specifies a set of bytes to remove, and provides a set of new bytes to replace them.", + "additionalProperties": false, + "type": "object", + "properties": { + "description": { + "description": "A message that describes the proposed fix, enabling viewers to present the proposed change to an end user.", + "$ref": "#/definitions/message" + }, + "artifactChanges": { + "description": "One or more artifact changes that comprise a fix for a result.", + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/artifactChange" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the fix.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "artifactChanges" + ] + }, + "graph": { + "description": "A network of nodes and directed edges that describes some aspect of the structure of the code (for example, a call graph).", + "type": "object", + "additionalProperties": false, + "properties": { + "description": { + "description": "A description of the graph.", + "$ref": "#/definitions/message" + }, + "nodes": { + "description": "An array of node objects representing the nodes of the graph.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/node" + } + }, + "edges": { + "description": "An array of edge objects representing the edges of the graph.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/edge" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the graph.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "graphTraversal": { + "description": "Represents a path through a graph.", + "type": "object", + "additionalProperties": false, + "properties": { + "runGraphIndex": { + "description": "The index within the run.graphs to be associated with the result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "resultGraphIndex": { + "description": "The index within the result.graphs to be associated with the result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "description": { + "description": "A description of this graph traversal.", + "$ref": "#/definitions/message" + }, + "initialState": { + "description": "Values of relevant expressions at the start of the graph traversal that may change during graph traversal.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + "immutableState": { + "description": "Values of relevant expressions at the start of the graph traversal that remain constant for the graph traversal.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + "edgeTraversals": { + "description": "The sequences of edges traversed by this graph traversal.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/edgeTraversal" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the graph traversal.", + "$ref": "#/definitions/propertyBag" + } + }, + "oneOf": [ + { + "required": [ + "runGraphIndex" + ] + }, + { + "required": [ + "resultGraphIndex" + ] + } + ] + }, + "invocation": { + "description": "The runtime environment of the analysis tool run.", + "additionalProperties": false, + "type": "object", + "properties": { + "commandLine": { + "description": "The command line used to invoke the tool.", + "type": "string" + }, + "arguments": { + "description": "An array of strings, containing in order the command line arguments passed to the tool from the operating system.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "items": { + "type": "string" + } + }, + "responseFiles": { + "description": "The locations of any response files specified on the tool's command line.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/artifactLocation" + } + }, + "startTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the invocation started. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + "endTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the invocation ended. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + "exitCode": { + "description": "The process exit code.", + "type": "integer" + }, + "ruleConfigurationOverrides": { + "description": "An array of configurationOverride objects that describe rules related runtime overrides.", + "type": "array", + "minItems": 0, + "default": [], + "uniqueItems": true, + "items": { + "$ref": "#/definitions/configurationOverride" + } + }, + "notificationConfigurationOverrides": { + "description": "An array of configurationOverride objects that describe notifications related runtime overrides.", + "type": "array", + "minItems": 0, + "default": [], + "uniqueItems": true, + "items": { + "$ref": "#/definitions/configurationOverride" + } + }, + "toolExecutionNotifications": { + "description": "A list of runtime conditions detected by the tool during the analysis.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/notification" + } + }, + "toolConfigurationNotifications": { + "description": "A list of conditions detected by the tool that are relevant to the tool's configuration.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/notification" + } + }, + "exitCodeDescription": { + "description": "The reason for the process exit.", + "type": "string" + }, + "exitSignalName": { + "description": "The name of the signal that caused the process to exit.", + "type": "string" + }, + "exitSignalNumber": { + "description": "The numeric value of the signal that caused the process to exit.", + "type": "integer" + }, + "processStartFailureMessage": { + "description": "The reason given by the operating system that the process failed to start.", + "type": "string" + }, + "executionSuccessful": { + "description": "Specifies whether the tool's execution completed successfully.", + "type": "boolean" + }, + "machine": { + "description": "The machine on which the invocation occurred.", + "type": "string" + }, + "account": { + "description": "The account under which the invocation occurred.", + "type": "string" + }, + "processId": { + "description": "The id of the process in which the invocation occurred.", + "type": "integer" + }, + "executableLocation": { + "description": "An absolute URI specifying the location of the executable that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + "workingDirectory": { + "description": "The working directory for the invocation.", + "$ref": "#/definitions/artifactLocation" + }, + "environmentVariables": { + "description": "The environment variables associated with the analysis tool process, expressed as key/value pairs.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "stdin": { + "description": "A file containing the standard input stream to the process that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + "stdout": { + "description": "A file containing the standard output stream from the process that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + "stderr": { + "description": "A file containing the standard error stream from the process that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + "stdoutStderr": { + "description": "A file containing the interleaved standard output and standard error stream from the process that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the invocation.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "executionSuccessful" + ] + }, + "location": { + "description": "A location within a programming artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "id": { + "description": "Value that distinguishes this location from all other locations within a single result object.", + "type": "integer", + "minimum": -1, + "default": -1 + }, + "physicalLocation": { + "description": "Identifies the artifact and region.", + "$ref": "#/definitions/physicalLocation" + }, + "logicalLocations": { + "description": "The logical locations associated with the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/logicalLocation" + } + }, + "message": { + "description": "A message relevant to the location.", + "$ref": "#/definitions/message" + }, + "annotations": { + "description": "A set of regions relevant to the location.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/region" + } + }, + "relationships": { + "description": "An array of objects that describe relationships between this location and others.", + "type": "array", + "default": [], + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/locationRelationship" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the location.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "locationRelationship": { + "description": "Information about the relation of one location to another.", + "type": "object", + "additionalProperties": false, + "properties": { + "target": { + "description": "A reference to the related location.", + "type": "integer", + "minimum": 0 + }, + "kinds": { + "description": "A set of distinct strings that categorize the relationship. Well-known kinds include 'includes', 'isIncludedBy' and 'relevant'.", + "type": "array", + "default": [ + "relevant" + ], + "uniqueItems": true, + "items": { + "type": "string" + } + }, + "description": { + "description": "A description of the location relationship.", + "$ref": "#/definitions/message" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the location relationship.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "target" + ] + }, + "logicalLocation": { + "description": "A logical location of a construct that produced a result.", + "additionalProperties": false, + "type": "object", + "properties": { + "name": { + "description": "Identifies the construct in which the result occurred. For example, this property might contain the name of a class or a method.", + "type": "string" + }, + "index": { + "description": "The index within the logical locations array.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "fullyQualifiedName": { + "description": "The human-readable fully qualified name of the logical location.", + "type": "string" + }, + "decoratedName": { + "description": "The machine-readable name for the logical location, such as a mangled function name provided by a C++ compiler that encodes calling convention, return type and other details along with the function name.", + "type": "string" + }, + "parentIndex": { + "description": "Identifies the index of the immediate parent of the construct in which the result was detected. For example, this property might point to a logical location that represents the namespace that holds a type.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "kind": { + "description": "The type of construct this logical location component refers to. Should be one of 'function', 'member', 'module', 'namespace', 'parameter', 'resource', 'returnType', 'type', 'variable', 'object', 'array', 'property', 'value', 'element', 'text', 'attribute', 'comment', 'declaration', 'dtd' or 'processingInstruction', if any of those accurately describe the construct.", + "type": "string" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the logical location.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "message": { + "description": "Encapsulates a message intended to be read by the end user.", + "type": "object", + "additionalProperties": false, + "properties": { + "text": { + "description": "A plain text message string.", + "type": "string" + }, + "markdown": { + "description": "A Markdown message string.", + "type": "string" + }, + "id": { + "description": "The identifier for this message.", + "type": "string" + }, + "arguments": { + "description": "An array of strings to substitute into the message string.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "type": "string" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the message.", + "$ref": "#/definitions/propertyBag" + } + }, + "anyOf": [ + { + "required": [ + "text" + ] + }, + { + "required": [ + "id" + ] + } + ] + }, + "multiformatMessageString": { + "description": "A message string or message format string rendered in multiple formats.", + "type": "object", + "additionalProperties": false, + "properties": { + "text": { + "description": "A plain text message string or format string.", + "type": "string" + }, + "markdown": { + "description": "A Markdown message string or format string.", + "type": "string" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the message.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "text" + ] + }, + "node": { + "description": "Represents a node in a graph.", + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "description": "A string that uniquely identifies the node within its graph.", + "type": "string" + }, + "label": { + "description": "A short description of the node.", + "$ref": "#/definitions/message" + }, + "location": { + "description": "A code location associated with the node.", + "$ref": "#/definitions/location" + }, + "children": { + "description": "Array of child nodes.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/node" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the node.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "id" + ] + }, + "notification": { + "description": "Describes a condition relevant to the tool itself, as opposed to being relevant to a target being analyzed by the tool.", + "type": "object", + "additionalProperties": false, + "properties": { + "locations": { + "description": "The locations relevant to this notification.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/location" + } + }, + "message": { + "description": "A message that describes the condition that was encountered.", + "$ref": "#/definitions/message" + }, + "level": { + "description": "A value specifying the severity level of the notification.", + "default": "warning", + "enum": [ + "none", + "note", + "warning", + "error" + ] + }, + "threadId": { + "description": "The thread identifier of the code that generated the notification.", + "type": "integer" + }, + "timeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the analysis tool generated the notification.", + "type": "string", + "format": "date-time" + }, + "exception": { + "description": "The runtime exception, if any, relevant to this notification.", + "$ref": "#/definitions/exception" + }, + "descriptor": { + "description": "A reference used to locate the descriptor relevant to this notification.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + "associatedRule": { + "description": "A reference used to locate the rule descriptor associated with this notification.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the notification.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "message" + ] + }, + "physicalLocation": { + "description": "A physical location relevant to a result. Specifies a reference to a programming artifact together with a range of bytes or characters within that artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "address": { + "description": "The address of the location.", + "$ref": "#/definitions/address" + }, + "artifactLocation": { + "description": "The location of the artifact.", + "$ref": "#/definitions/artifactLocation" + }, + "region": { + "description": "Specifies a portion of the artifact.", + "$ref": "#/definitions/region" + }, + "contextRegion": { + "description": "Specifies a portion of the artifact that encloses the region. Allows a viewer to display additional context around the region.", + "$ref": "#/definitions/region" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the physical location.", + "$ref": "#/definitions/propertyBag" + } + }, + "anyOf": [ + { + "required": [ + "address" + ] + }, + { + "required": [ + "artifactLocation" + ] + } + ] + }, + "propertyBag": { + "description": "Key/value pairs that provide additional information about the object.", + "type": "object", + "additionalProperties": true, + "properties": { + "tags": { + "description": "A set of distinct strings that provide additional information.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "type": "string" + } + } + } + }, + "rectangle": { + "description": "An area within an image.", + "additionalProperties": false, + "type": "object", + "properties": { + "top": { + "description": "The Y coordinate of the top edge of the rectangle, measured in the image's natural units.", + "type": "number" + }, + "left": { + "description": "The X coordinate of the left edge of the rectangle, measured in the image's natural units.", + "type": "number" + }, + "bottom": { + "description": "The Y coordinate of the bottom edge of the rectangle, measured in the image's natural units.", + "type": "number" + }, + "right": { + "description": "The X coordinate of the right edge of the rectangle, measured in the image's natural units.", + "type": "number" + }, + "message": { + "description": "A message relevant to the rectangle.", + "$ref": "#/definitions/message" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the rectangle.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "region": { + "description": "A region within an artifact where a result was detected.", + "additionalProperties": false, + "type": "object", + "properties": { + "startLine": { + "description": "The line number of the first character in the region.", + "type": "integer", + "minimum": 1 + }, + "startColumn": { + "description": "The column number of the first character in the region.", + "type": "integer", + "minimum": 1 + }, + "endLine": { + "description": "The line number of the last character in the region.", + "type": "integer", + "minimum": 1 + }, + "endColumn": { + "description": "The column number of the character following the end of the region.", + "type": "integer", + "minimum": 1 + }, + "charOffset": { + "description": "The zero-based offset from the beginning of the artifact of the first character in the region.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "charLength": { + "description": "The length of the region in characters.", + "type": "integer", + "minimum": 0 + }, + "byteOffset": { + "description": "The zero-based offset from the beginning of the artifact of the first byte in the region.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "byteLength": { + "description": "The length of the region in bytes.", + "type": "integer", + "minimum": 0 + }, + "snippet": { + "description": "The portion of the artifact contents within the specified region.", + "$ref": "#/definitions/artifactContent" + }, + "message": { + "description": "A message relevant to the region.", + "$ref": "#/definitions/message" + }, + "sourceLanguage": { + "description": "Specifies the source language, if any, of the portion of the artifact specified by the region object.", + "type": "string" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the region.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "replacement": { + "description": "The replacement of a single region of an artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "deletedRegion": { + "description": "The region of the artifact to delete.", + "$ref": "#/definitions/region" + }, + "insertedContent": { + "description": "The content to insert at the location specified by the 'deletedRegion' property.", + "$ref": "#/definitions/artifactContent" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the replacement.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "deletedRegion" + ] + }, + "reportingDescriptor": { + "description": "Metadata that describes a specific report produced by the tool, as part of the analysis it provides or its runtime reporting.", + "additionalProperties": false, + "type": "object", + "properties": { + "id": { + "description": "A stable, opaque identifier for the report.", + "type": "string" + }, + "deprecatedIds": { + "description": "An array of stable, opaque identifiers by which this report was known in some previous version of the analysis tool.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "type": "string" + } + }, + "guid": { + "description": "A unique identifer for the reporting descriptor in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "deprecatedGuids": { + "description": "An array of unique identifies in the form of a GUID by which this report was known in some previous version of the analysis tool.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + } + }, + "name": { + "description": "A report identifier that is understandable to an end user.", + "type": "string" + }, + "deprecatedNames": { + "description": "An array of readable identifiers by which this report was known in some previous version of the analysis tool.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "type": "string" + } + }, + "shortDescription": { + "description": "A concise description of the report. Should be a single sentence that is understandable when visible space is limited to a single line of text.", + "$ref": "#/definitions/multiformatMessageString" + }, + "fullDescription": { + "description": "A description of the report. Should, as far as possible, provide details sufficient to enable resolution of any problem indicated by the result.", + "$ref": "#/definitions/multiformatMessageString" + }, + "messageStrings": { + "description": "A set of name/value pairs with arbitrary names. Each value is a multiformatMessageString object, which holds message strings in plain text and (optionally) Markdown format. The strings can include placeholders, which can be used to construct a message in combination with an arbitrary number of additional string arguments.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + "defaultConfiguration": { + "description": "Default reporting configuration information.", + "$ref": "#/definitions/reportingConfiguration" + }, + "helpUri": { + "description": "A URI where the primary documentation for the report can be found.", + "type": "string", + "format": "uri" + }, + "help": { + "description": "Provides the primary documentation for the report, useful when there is no online documentation.", + "$ref": "#/definitions/multiformatMessageString" + }, + "relationships": { + "description": "An array of objects that describe relationships between this reporting descriptor and others.", + "type": "array", + "default": [], + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/reportingDescriptorRelationship" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the report.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "id" + ] + }, + "reportingConfiguration": { + "description": "Information about a rule or notification that can be configured at runtime.", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "description": "Specifies whether the report may be produced during the scan.", + "type": "boolean", + "default": true + }, + "level": { + "description": "Specifies the failure level for the report.", + "default": "warning", + "enum": [ + "none", + "note", + "warning", + "error" + ] + }, + "rank": { + "description": "Specifies the relative priority of the report. Used for analysis output only.", + "type": "number", + "default": -1, + "minimum": -1, + "maximum": 100 + }, + "parameters": { + "description": "Contains configuration information specific to a report.", + "$ref": "#/definitions/propertyBag" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the reporting configuration.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "reportingDescriptorReference": { + "description": "Information about how to locate a relevant reporting descriptor.", + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "description": "The id of the descriptor.", + "type": "string" + }, + "index": { + "description": "The index into an array of descriptors in toolComponent.ruleDescriptors, toolComponent.notificationDescriptors, or toolComponent.taxonomyDescriptors, depending on context.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "guid": { + "description": "A guid that uniquely identifies the descriptor.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "toolComponent": { + "description": "A reference used to locate the toolComponent associated with the descriptor.", + "$ref": "#/definitions/toolComponentReference" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the reporting descriptor reference.", + "$ref": "#/definitions/propertyBag" + } + }, + "anyOf": [ + { + "required": [ + "index" + ] + }, + { + "required": [ + "guid" + ] + }, + { + "required": [ + "id" + ] + } + ] + }, + "reportingDescriptorRelationship": { + "description": "Information about the relation of one reporting descriptor to another.", + "type": "object", + "additionalProperties": false, + "properties": { + "target": { + "description": "A reference to the related reporting descriptor.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + "kinds": { + "description": "A set of distinct strings that categorize the relationship. Well-known kinds include 'canPrecede', 'canFollow', 'willPrecede', 'willFollow', 'superset', 'subset', 'equal', 'disjoint', 'relevant', and 'incomparable'.", + "type": "array", + "default": [ + "relevant" + ], + "uniqueItems": true, + "items": { + "type": "string" + } + }, + "description": { + "description": "A description of the reporting descriptor relationship.", + "$ref": "#/definitions/message" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the reporting descriptor reference.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "target" + ] + }, + "result": { + "description": "A result produced by an analysis tool.", + "additionalProperties": false, + "type": "object", + "properties": { + "ruleId": { + "description": "The stable, unique identifier of the rule, if any, to which this result is relevant.", + "type": "string" + }, + "ruleIndex": { + "description": "The index within the tool component rules array of the rule object associated with this result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "rule": { + "description": "A reference used to locate the rule descriptor relevant to this result.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + "kind": { + "description": "A value that categorizes results by evaluation state.", + "default": "fail", + "enum": [ + "notApplicable", + "pass", + "fail", + "review", + "open", + "informational" + ] + }, + "level": { + "description": "A value specifying the severity level of the result.", + "default": "warning", + "enum": [ + "none", + "note", + "warning", + "error" + ] + }, + "message": { + "description": "A message that describes the result. The first sentence of the message only will be displayed when visible space is limited.", + "$ref": "#/definitions/message" + }, + "analysisTarget": { + "description": "Identifies the artifact that the analysis tool was instructed to scan. This need not be the same as the artifact where the result actually occurred.", + "$ref": "#/definitions/artifactLocation" + }, + "locations": { + "description": "The set of locations where the result was detected. Specify only one location unless the problem indicated by the result can only be corrected by making a change at every specified location.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/location" + } + }, + "guid": { + "description": "A stable, unique identifer for the result in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "correlationGuid": { + "description": "A stable, unique identifier for the equivalence class of logically identical results to which this result belongs, in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "occurrenceCount": { + "description": "A positive integer specifying the number of times this logically unique result was observed in this run.", + "type": "integer", + "minimum": 1 + }, + "partialFingerprints": { + "description": "A set of strings that contribute to the stable, unique identity of the result.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "fingerprints": { + "description": "A set of strings each of which individually defines a stable, unique identity for the result.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "stacks": { + "description": "An array of 'stack' objects relevant to the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/stack" + } + }, + "codeFlows": { + "description": "An array of 'codeFlow' objects relevant to the result.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/codeFlow" + } + }, + "graphs": { + "description": "An array of zero or more unique graph objects associated with the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/graph" + } + }, + "graphTraversals": { + "description": "An array of one or more unique 'graphTraversal' objects.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/graphTraversal" + } + }, + "relatedLocations": { + "description": "A set of locations relevant to this result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/location" + } + }, + "suppressions": { + "description": "A set of suppressions relevant to this result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/suppression" + } + }, + "baselineState": { + "description": "The state of a result relative to a baseline of a previous run.", + "enum": [ + "new", + "unchanged", + "updated", + "absent" + ] + }, + "rank": { + "description": "A number representing the priority or importance of the result.", + "type": "number", + "default": -1, + "minimum": -1, + "maximum": 100 + }, + "attachments": { + "description": "A set of artifacts relevant to the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/attachment" + } + }, + "hostedViewerUri": { + "description": "An absolute URI at which the result can be viewed.", + "type": "string", + "format": "uri" + }, + "workItemUris": { + "description": "The URIs of the work items associated with this result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "type": "string", + "format": "uri" + } + }, + "provenance": { + "description": "Information about how and when the result was detected.", + "$ref": "#/definitions/resultProvenance" + }, + "fixes": { + "description": "An array of 'fix' objects, each of which represents a proposed fix to the problem indicated by the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/fix" + } + }, + "taxa": { + "description": "An array of references to taxonomy reporting descriptors that are applicable to the result.", + "type": "array", + "default": [], + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/reportingDescriptorReference" + } + }, + "webRequest": { + "description": "A web request associated with this result.", + "$ref": "#/definitions/webRequest" + }, + "webResponse": { + "description": "A web response associated with this result.", + "$ref": "#/definitions/webResponse" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the result.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "message" + ] + }, + "resultProvenance": { + "description": "Contains information about how and when a result was detected.", + "additionalProperties": false, + "type": "object", + "properties": { + "firstDetectionTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the result was first detected. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + "lastDetectionTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the result was most recently detected. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + "firstDetectionRunGuid": { + "description": "A GUID-valued string equal to the automationDetails.guid property of the run in which the result was first detected.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "lastDetectionRunGuid": { + "description": "A GUID-valued string equal to the automationDetails.guid property of the run in which the result was most recently detected.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "invocationIndex": { + "description": "The index within the run.invocations array of the invocation object which describes the tool invocation that detected the result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "conversionSources": { + "description": "An array of physicalLocation objects which specify the portions of an analysis tool's output that a converter transformed into the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/physicalLocation" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the result.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "run": { + "description": "Describes a single run of an analysis tool, and contains the reported output of that run.", + "additionalProperties": false, + "type": "object", + "properties": { + "tool": { + "description": "Information about the tool or tool pipeline that generated the results in this run. A run can only contain results produced by a single tool or tool pipeline. A run can aggregate results from multiple log files, as long as context around the tool run (tool command-line arguments and the like) is identical for all aggregated files.", + "$ref": "#/definitions/tool" + }, + "invocations": { + "description": "Describes the invocation of the analysis tool.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/invocation" + } + }, + "conversion": { + "description": "A conversion object that describes how a converter transformed an analysis tool's native reporting format into the SARIF format.", + "$ref": "#/definitions/conversion" + }, + "language": { + "description": "The language of the messages emitted into the log file during this run (expressed as an ISO 639-1 two-letter lowercase culture code) and an optional region (expressed as an ISO 3166-1 two-letter uppercase subculture code associated with a country or region). The casing is recommended but not required (in order for this data to conform to RFC5646).", + "type": "string", + "default": "en-US", + "pattern": "^[a-zA-Z]{2}|^[a-zA-Z]{2}-[a-zA-Z]{2}?$" + }, + "versionControlProvenance": { + "description": "Specifies the revision in version control of the artifacts that were scanned.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/versionControlDetails" + } + }, + "originalUriBaseIds": { + "description": "The artifact location specified by each uriBaseId symbol on the machine where the tool originally ran.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/artifactLocation" + } + }, + "artifacts": { + "description": "An array of artifact objects relevant to the run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/artifact" + } + }, + "logicalLocations": { + "description": "An array of logical locations such as namespaces, types or functions.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/logicalLocation" + } + }, + "graphs": { + "description": "An array of zero or more unique graph objects associated with the run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/graph" + } + }, + "results": { + "description": "The set of results contained in an SARIF log. The results array can be omitted when a run is solely exporting rules metadata. It must be present (but may be empty) if a log file represents an actual scan.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/result" + } + }, + "automationDetails": { + "description": "Automation details that describe this run.", + "$ref": "#/definitions/runAutomationDetails" + }, + "runAggregates": { + "description": "Automation details that describe the aggregate of runs to which this run belongs.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/runAutomationDetails" + } + }, + "baselineGuid": { + "description": "The 'guid' property of a previous SARIF 'run' that comprises the baseline that was used to compute result 'baselineState' properties for the run.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "redactionTokens": { + "description": "An array of strings used to replace sensitive information in a redaction-aware property.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "type": "string" + } + }, + "defaultEncoding": { + "description": "Specifies the default encoding for any artifact object that refers to a text file.", + "type": "string" + }, + "defaultSourceLanguage": { + "description": "Specifies the default source language for any artifact object that refers to a text file that contains source code.", + "type": "string" + }, + "newlineSequences": { + "description": "An ordered list of character sequences that were treated as line breaks when computing region information for the run.", + "type": "array", + "minItems": 1, + "uniqueItems": true, + "default": [ + "\r\n", + "\n" + ], + "items": { + "type": "string" + } + }, + "columnKind": { + "description": "Specifies the unit in which the tool measures columns.", + "enum": [ + "utf16CodeUnits", + "unicodeCodePoints" + ] + }, + "externalPropertyFileReferences": { + "description": "References to external property files that should be inlined with the content of a root log file.", + "$ref": "#/definitions/externalPropertyFileReferences" + }, + "threadFlowLocations": { + "description": "An array of threadFlowLocation objects cached at run level.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/threadFlowLocation" + } + }, + "taxonomies": { + "description": "An array of toolComponent objects relevant to a taxonomy in which results are categorized.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + "addresses": { + "description": "Addresses associated with this run instance, if any.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/address" + } + }, + "translations": { + "description": "The set of available translations of the localized data provided by the tool.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + "policies": { + "description": "Contains configurations that may potentially override both reportingDescriptor.defaultConfiguration (the tool's default severities) and invocation.configurationOverrides (severities established at run-time from the command line).", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + "webRequests": { + "description": "An array of request objects cached at run level.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/webRequest" + } + }, + "webResponses": { + "description": "An array of response objects cached at run level.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/webResponse" + } + }, + "specialLocations": { + "description": "A specialLocations object that defines locations of special significance to SARIF consumers.", + "$ref": "#/definitions/specialLocations" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the run.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "tool" + ] + }, + "runAutomationDetails": { + "description": "Information that describes a run's identity and role within an engineering system process.", + "additionalProperties": false, + "type": "object", + "properties": { + "description": { + "description": "A description of the identity and role played within the engineering system by this object's containing run object.", + "$ref": "#/definitions/message" + }, + "id": { + "description": "A hierarchical string that uniquely identifies this object's containing run object.", + "type": "string" + }, + "guid": { + "description": "A stable, unique identifer for this object's containing run object in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "correlationGuid": { + "description": "A stable, unique identifier for the equivalence class of runs to which this object's containing run object belongs in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the run automation details.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "specialLocations": { + "description": "Defines locations of special significance to SARIF consumers.", + "type": "object", + "additionalProperties": false, + "properties": { + "displayBase": { + "description": "Provides a suggestion to SARIF consumers to display file paths relative to the specified location.", + "$ref": "#/definitions/artifactLocation" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the special locations.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "stack": { + "description": "A call stack that is relevant to a result.", + "additionalProperties": false, + "type": "object", + "properties": { + "message": { + "description": "A message relevant to this call stack.", + "$ref": "#/definitions/message" + }, + "frames": { + "description": "An array of stack frames that represents a sequence of calls, rendered in reverse chronological order, that comprise the call stack.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/stackFrame" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the stack.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "frames" + ] + }, + "stackFrame": { + "description": "A function call within a stack trace.", + "additionalProperties": false, + "type": "object", + "properties": { + "location": { + "description": "The location to which this stack frame refers.", + "$ref": "#/definitions/location" + }, + "module": { + "description": "The name of the module that contains the code of this stack frame.", + "type": "string" + }, + "threadId": { + "description": "The thread identifier of the stack frame.", + "type": "integer" + }, + "parameters": { + "description": "The parameters of the call that is executing.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "type": "string", + "default": [] + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the stack frame.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "suppression": { + "description": "A suppression that is relevant to a result.", + "additionalProperties": false, + "type": "object", + "properties": { + "guid": { + "description": "A stable, unique identifer for the suppression in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "kind": { + "description": "A string that indicates where the suppression is persisted.", + "enum": [ + "inSource", + "external" + ] + }, + "status": { + "description": "A string that indicates the review status of the suppression.", + "enum": [ + "accepted", + "underReview", + "rejected" + ] + }, + "justification": { + "description": "A string representing the justification for the suppression.", + "type": "string" + }, + "location": { + "description": "Identifies the location associated with the suppression.", + "$ref": "#/definitions/location" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the suppression.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "kind" + ] + }, + "threadFlow": { + "description": "Describes a sequence of code locations that specify a path through a single thread of execution such as an operating system or fiber.", + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "description": "An string that uniquely identifies the threadFlow within the codeFlow in which it occurs.", + "type": "string" + }, + "message": { + "description": "A message relevant to the thread flow.", + "$ref": "#/definitions/message" + }, + "initialState": { + "description": "Values of relevant expressions at the start of the thread flow that may change during thread flow execution.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + "immutableState": { + "description": "Values of relevant expressions at the start of the thread flow that remain constant.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + "locations": { + "description": "A temporally ordered array of 'threadFlowLocation' objects, each of which describes a location visited by the tool while producing the result.", + "type": "array", + "minItems": 1, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/threadFlowLocation" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the thread flow.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "locations" + ] + }, + "threadFlowLocation": { + "description": "A location visited by an analysis tool while simulating or monitoring the execution of a program.", + "additionalProperties": false, + "type": "object", + "properties": { + "index": { + "description": "The index within the run threadFlowLocations array.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "location": { + "description": "The code location.", + "$ref": "#/definitions/location" + }, + "stack": { + "description": "The call stack leading to this location.", + "$ref": "#/definitions/stack" + }, + "kinds": { + "description": "A set of distinct strings that categorize the thread flow location. Well-known kinds include 'acquire', 'release', 'enter', 'exit', 'call', 'return', 'branch', 'implicit', 'false', 'true', 'caution', 'danger', 'unknown', 'unreachable', 'taint', 'function', 'handler', 'lock', 'memory', 'resource', 'scope' and 'value'.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "type": "string" + } + }, + "taxa": { + "description": "An array of references to rule or taxonomy reporting descriptors that are applicable to the thread flow location.", + "type": "array", + "default": [], + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/reportingDescriptorReference" + } + }, + "module": { + "description": "The name of the module that contains the code that is executing.", + "type": "string" + }, + "state": { + "description": "A dictionary, each of whose keys specifies a variable or expression, the associated value of which represents the variable or expression value. For an annotation of kind 'continuation', for example, this dictionary might hold the current assumed values of a set of global variables.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + "nestingLevel": { + "description": "An integer representing a containment hierarchy within the thread flow.", + "type": "integer", + "minimum": 0 + }, + "executionOrder": { + "description": "An integer representing the temporal order in which execution reached this location.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "executionTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which this location was executed.", + "type": "string", + "format": "date-time" + }, + "importance": { + "description": "Specifies the importance of this location in understanding the code flow in which it occurs. The order from most to least important is \"essential\", \"important\", \"unimportant\". Default: \"important\".", + "enum": [ + "important", + "essential", + "unimportant" + ], + "default": "important" + }, + "webRequest": { + "description": "A web request associated with this thread flow location.", + "$ref": "#/definitions/webRequest" + }, + "webResponse": { + "description": "A web response associated with this thread flow location.", + "$ref": "#/definitions/webResponse" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the threadflow location.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "tool": { + "description": "The analysis tool that was run.", + "additionalProperties": false, + "type": "object", + "properties": { + "driver": { + "description": "The analysis tool that was run.", + "$ref": "#/definitions/toolComponent" + }, + "extensions": { + "description": "Tool extensions that contributed to or reconfigured the analysis tool that was run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the tool.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "driver" + ] + }, + "toolComponent": { + "description": "A component, such as a plug-in or the driver, of the analysis tool that was run.", + "additionalProperties": false, + "type": "object", + "properties": { + "guid": { + "description": "A unique identifer for the tool component in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "name": { + "description": "The name of the tool component.", + "type": "string" + }, + "organization": { + "description": "The organization or company that produced the tool component.", + "type": "string" + }, + "product": { + "description": "A product suite to which the tool component belongs.", + "type": "string" + }, + "productSuite": { + "description": "A localizable string containing the name of the suite of products to which the tool component belongs.", + "type": "string" + }, + "shortDescription": { + "description": "A brief description of the tool component.", + "$ref": "#/definitions/multiformatMessageString" + }, + "fullDescription": { + "description": "A comprehensive description of the tool component.", + "$ref": "#/definitions/multiformatMessageString" + }, + "fullName": { + "description": "The name of the tool component along with its version and any other useful identifying information, such as its locale.", + "type": "string" + }, + "version": { + "description": "The tool component version, in whatever format the component natively provides.", + "type": "string" + }, + "semanticVersion": { + "description": "The tool component version in the format specified by Semantic Versioning 2.0.", + "type": "string" + }, + "dottedQuadFileVersion": { + "description": "The binary version of the tool component's primary executable file expressed as four non-negative integers separated by a period (for operating systems that express file versions in this way).", + "type": "string", + "pattern": "[0-9]+(\\.[0-9]+){3}" + }, + "releaseDateUtc": { + "description": "A string specifying the UTC date (and optionally, the time) of the component's release.", + "type": "string" + }, + "downloadUri": { + "description": "The absolute URI from which the tool component can be downloaded.", + "type": "string", + "format": "uri" + }, + "informationUri": { + "description": "The absolute URI at which information about this version of the tool component can be found.", + "type": "string", + "format": "uri" + }, + "globalMessageStrings": { + "description": "A dictionary, each of whose keys is a resource identifier and each of whose values is a multiformatMessageString object, which holds message strings in plain text and (optionally) Markdown format. The strings can include placeholders, which can be used to construct a message in combination with an arbitrary number of additional string arguments.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + "notifications": { + "description": "An array of reportingDescriptor objects relevant to the notifications related to the configuration and runtime execution of the tool component.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/reportingDescriptor" + } + }, + "rules": { + "description": "An array of reportingDescriptor objects relevant to the analysis performed by the tool component.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/reportingDescriptor" + } + }, + "taxa": { + "description": "An array of reportingDescriptor objects relevant to the definitions of both standalone and tool-defined taxonomies.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/reportingDescriptor" + } + }, + "locations": { + "description": "An array of the artifactLocation objects associated with the tool component.", + "type": "array", + "minItems": 0, + "default": [], + "items": { + "$ref": "#/definitions/artifactLocation" + } + }, + "language": { + "description": "The language of the messages emitted into the log file during this run (expressed as an ISO 639-1 two-letter lowercase language code) and an optional region (expressed as an ISO 3166-1 two-letter uppercase subculture code associated with a country or region). The casing is recommended but not required (in order for this data to conform to RFC5646).", + "type": "string", + "default": "en-US", + "pattern": "^[a-zA-Z]{2}|^[a-zA-Z]{2}-[a-zA-Z]{2}?$" + }, + "contents": { + "description": "The kinds of data contained in this object.", + "type": "array", + "uniqueItems": true, + "default": [ + "localizedData", + "nonLocalizedData" + ], + "items": { + "enum": [ + "localizedData", + "nonLocalizedData" + ] + } + }, + "isComprehensive": { + "description": "Specifies whether this object contains a complete definition of the localizable and/or non-localizable data for this component, as opposed to including only data that is relevant to the results persisted to this log file.", + "type": "boolean", + "default": false + }, + "localizedDataSemanticVersion": { + "description": "The semantic version of the localized strings defined in this component; maintained by components that provide translations.", + "type": "string" + }, + "minimumRequiredLocalizedDataSemanticVersion": { + "description": "The minimum value of localizedDataSemanticVersion required in translations consumed by this component; used by components that consume translations.", + "type": "string" + }, + "associatedComponent": { + "description": "The component which is strongly associated with this component. For a translation, this refers to the component which has been translated. For an extension, this is the driver that provides the extension's plugin model.", + "$ref": "#/definitions/toolComponentReference" + }, + "translationMetadata": { + "description": "Translation metadata, required for a translation, not populated by other component types.", + "$ref": "#/definitions/translationMetadata" + }, + "supportedTaxonomies": { + "description": "An array of toolComponentReference objects to declare the taxonomies supported by the tool component.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponentReference" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the tool component.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "name" + ] + }, + "toolComponentReference": { + "description": "Identifies a particular toolComponent object, either the driver or an extension.", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "description": "The 'name' property of the referenced toolComponent.", + "type": "string" + }, + "index": { + "description": "An index into the referenced toolComponent in tool.extensions.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "guid": { + "description": "The 'guid' property of the referenced toolComponent.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the toolComponentReference.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "translationMetadata": { + "description": "Provides additional metadata related to translation.", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "description": "The name associated with the translation metadata.", + "type": "string" + }, + "fullName": { + "description": "The full name associated with the translation metadata.", + "type": "string" + }, + "shortDescription": { + "description": "A brief description of the translation metadata.", + "$ref": "#/definitions/multiformatMessageString" + }, + "fullDescription": { + "description": "A comprehensive description of the translation metadata.", + "$ref": "#/definitions/multiformatMessageString" + }, + "downloadUri": { + "description": "The absolute URI from which the translation metadata can be downloaded.", + "type": "string", + "format": "uri" + }, + "informationUri": { + "description": "The absolute URI from which information related to the translation metadata can be downloaded.", + "type": "string", + "format": "uri" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the translation metadata.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "name" + ] + }, + "versionControlDetails": { + "description": "Specifies the information necessary to retrieve a desired revision from a version control system.", + "type": "object", + "additionalProperties": false, + "properties": { + "repositoryUri": { + "description": "The absolute URI of the repository.", + "type": "string", + "format": "uri" + }, + "revisionId": { + "description": "A string that uniquely and permanently identifies the revision within the repository.", + "type": "string" + }, + "branch": { + "description": "The name of a branch containing the revision.", + "type": "string" + }, + "revisionTag": { + "description": "A tag that has been applied to the revision.", + "type": "string" + }, + "asOfTimeUtc": { + "description": "A Coordinated Universal Time (UTC) date and time that can be used to synchronize an enlistment to the state of the repository at that time.", + "type": "string", + "format": "date-time" + }, + "mappedTo": { + "description": "The location in the local file system to which the root of the repository was mapped at the time of the analysis.", + "$ref": "#/definitions/artifactLocation" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the version control details.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "repositoryUri" + ] + }, + "webRequest": { + "description": "Describes an HTTP request.", + "type": "object", + "additionalProperties": false, + "properties": { + "index": { + "description": "The index within the run.webRequests array of the request object associated with this result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "protocol": { + "description": "The request protocol. Example: 'http'.", + "type": "string" + }, + "version": { + "description": "The request version. Example: '1.1'.", + "type": "string" + }, + "target": { + "description": "The target of the request.", + "type": "string" + }, + "method": { + "description": "The HTTP method. Well-known values are 'GET', 'PUT', 'POST', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS', 'TRACE', 'CONNECT'.", + "type": "string" + }, + "headers": { + "description": "The request headers.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "parameters": { + "description": "The request parameters.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "body": { + "description": "The body of the request.", + "$ref": "#/definitions/artifactContent" + }, + "properties": { + "description": "Key/value pairs that provide additional information about the request.", + "$ref": "#/definitions/propertyBag" + } + } + }, + "webResponse": { + "description": "Describes the response to an HTTP request.", + "type": "object", + "additionalProperties": false, + "properties": { + "index": { + "description": "The index within the run.webResponses array of the response object associated with this result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + "protocol": { + "description": "The response protocol. Example: 'http'.", + "type": "string" + }, + "version": { + "description": "The response version. Example: '1.1'.", + "type": "string" + }, + "statusCode": { + "description": "The response status code. Example: 451.", + "type": "integer" + }, + "reasonPhrase": { + "description": "The response reason. Example: 'Not found'.", + "type": "string" + }, + "headers": { + "description": "The response headers.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "body": { + "description": "The body of the response.", + "$ref": "#/definitions/artifactContent" + }, + "noResponseReceived": { + "description": "Specifies whether a response was received from the server.", + "type": "boolean", + "default": false + }, + "properties": { + "description": "Key/value pairs that provide additional information about the response.", + "$ref": "#/definitions/propertyBag" + } + } + } + }, + "description": "Static Analysis Results Format (SARIF) Version 2.1.0 JSON Schema: a standard format for the output of static analysis tools.", + "properties": { + "$schema": { + "description": "The URI of the JSON schema corresponding to the version.", + "type": "string", + "format": "uri" + }, + "version": { + "description": "The SARIF format version of this log file.", + "enum": [ + "2.1.0" + ] + }, + "runs": { + "description": "The set of runs contained in this log file.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/run" + } + }, + "inlineExternalProperties": { + "description": "References to external property files that share data between runs.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/externalProperties" + } + }, + "properties": { + "description": "Key/value pairs that provide additional information about the log file.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": [ + "version", + "runs" + ], + "title": "Static Analysis Results Format (SARIF) Version 2.1.0 JSON Schema", + "type": "object" +} \ No newline at end of file diff --git a/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidation_test.go b/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidation_test.go new file mode 100644 index 000000000..91c050e87 --- /dev/null +++ b/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidation_test.go @@ -0,0 +1,89 @@ +/* +Copyright The Ratify Authors. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package schemavalidation + +import ( + "os" + "testing" +) + +var schemaURL = "https://json.schemastore.org/sarif-2.1.0-rtm.5.json" +var schemaFileBytes []byte +var schemaFileMismatchBytes []byte +var schemaFileBadBytes []byte +var trivyScanReport []byte + +func init() { + trivyScanReport, _ = os.ReadFile("./testdata/trivy_scan_report.json") + schemaFileBytes, _ = os.ReadFile("./schemas/sarif-2.1.0.json") + schemaFileMismatchBytes, _ = os.ReadFile("./testdata/mismatch_schema.json") + schemaFileBadBytes, _ = os.ReadFile("./testdata/bad_schema.json") +} + +// TestProperSchemaValidates tests that the proper schema validates +func TestProperSchemaValidates(t *testing.T) { + expected := true + result := Validate(schemaURL, trivyScanReport) == nil + + if expected != result { + t.Logf("expected: %v, got: %v", expected, result) + t.FailNow() + } +} + +// TestInvalidSchemaFailsValidation tests that an invalid schema fails validation +func TestInvalidSchemaFailsValidation(t *testing.T) { + expected := false + result := Validate("bad schema", trivyScanReport) == nil + + if expected != result { + t.Logf("expected: %v, got: %v", expected, result) + t.FailNow() + } +} + +// TestProperSchemaValidatesFromFile tests that the proper schema validates from a file +func TestProperSchemaValidatesFromFile(t *testing.T) { + expected := true + result := ValidateAgainstOfflineSchema(schemaFileBytes, trivyScanReport) == nil + + if expected != result { + t.Logf("expected: %v, got: %v", expected, result) + t.FailNow() + } +} + +// TestSchemaMismatchFromFile tests that a schema mismatch fails validation from a file +func TestSchemaMismatchFromFile(t *testing.T) { + expected := false + result := ValidateAgainstOfflineSchema(schemaFileMismatchBytes, trivyScanReport) == nil + + if expected != result { + t.Logf("expected: %v, got: %v", expected, result) + t.FailNow() + } +} + +// TestBadSchemaValidatesFromFile tests that a bad schema fails validation from a file +func TestBadSchemaValidatesFromFile(t *testing.T) { + expected := false + result := ValidateAgainstOfflineSchema(schemaFileBadBytes, trivyScanReport) == nil + + if expected != result { + t.Logf("expected: %v, got: %v", expected, result) + t.FailNow() + } +} diff --git a/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidator.go b/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidator.go new file mode 100644 index 000000000..74ab85ecb --- /dev/null +++ b/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidator.go @@ -0,0 +1,65 @@ +/* +Copyright The Ratify Authors. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package schemavalidation + +import ( + "errors" + "fmt" + + "github.com/xeipuuv/gojsonschema" +) + +// Validates content from a byte array against a URL schema or from a canonical file path +func Validate(schema string, content []byte) error { + sl := gojsonschema.NewReferenceLoader(schema) + dl := gojsonschema.NewBytesLoader(content) + + result, err := gojsonschema.Validate(sl, dl) + + if err != nil { + return err + } + + if result.Valid() { + return nil + } + var e string + for _, desc := range result.Errors() { + e += fmt.Sprintf("%s:", desc.Description()) + } + return errors.New(e) +} + +// Validates content from a byte array against a schema from a byte array +// This is useful for testing and restricted environments as it allows loading of schemas from files +func ValidateAgainstOfflineSchema(schema []byte, content []byte) error { + sl := gojsonschema.NewBytesLoader(schema) + dl := gojsonschema.NewBytesLoader(content) + + result, err := gojsonschema.Validate(sl, dl) + if err != nil { + return err + } + + if result.Valid() { + return nil + } + var e string + for _, desc := range result.Errors() { + e += fmt.Sprintf("%s:", desc.Description()) + } + return errors.New(e) +} diff --git a/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/bad_schema.json b/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/bad_schema.json new file mode 100644 index 000000000..ab5bd2589 --- /dev/null +++ b/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/bad_schema.json @@ -0,0 +1,2 @@ +{ +bad schema diff --git a/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/mismatch_schema.json b/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/mismatch_schema.json new file mode 100644 index 000000000..2ea742cfe --- /dev/null +++ b/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/mismatch_schema.json @@ -0,0 +1,964 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "http://spdx.org/rdf/terms/2.3", + "title": "SPDX 2.3", + "type": "object", + "properties": { + "SPDXID": { + "type": "string", + "description": "Uniquely identify any element in an SPDX document which may be referenced by other elements." + }, + "annotations": { + "description": "Provide additional information about an SpdxElement.", + "type": "array", + "items": { + "type": "object", + "properties": { + "annotationDate": { + "description": "Identify when the comment was made. This is to be specified according to the combined date and time in the UTC format, as specified in the ISO 8601 standard.", + "type": "string" + }, + "annotationType": { + "description": "Type of the annotation.", + "type": "string", + "enum": [ + "OTHER", + "REVIEW" + ] + }, + "annotator": { + "description": "This field identifies the person, organization, or tool that has commented on a file, package, snippet, or the entire document.", + "type": "string" + }, + "comment": { + "type": "string" + } + }, + "required": [ + "annotationDate", + "annotationType", + "annotator", + "comment" + ], + "additionalProperties": false, + "description": "An Annotation is a comment on an SpdxItem by an agent." + } + }, + "comment": { + "type": "string" + }, + "creationInfo": { + "type": "object", + "properties": { + "comment": { + "type": "string" + }, + "created": { + "description": "Identify when the SPDX document was originally created. The date is to be specified according to combined date and time in UTC format as specified in ISO 8601 standard.", + "type": "string" + }, + "creators": { + "description": "Identify who (or what, in the case of a tool) created the SPDX document. If the SPDX document was created by an individual, indicate the person's name. If the SPDX document was created on behalf of a company or organization, indicate the entity name. If the SPDX document was created using a software tool, indicate the name and version for that tool. If multiple participants or tools were involved, use multiple instances of this field. Person name or organization name may be designated as “anonymous” if appropriate.", + "minItems": 1, + "type": "array", + "items": { + "description": "Identify who (or what, in the case of a tool) created the SPDX document. If the SPDX document was created by an individual, indicate the person's name. If the SPDX document was created on behalf of a company or organization, indicate the entity name. If the SPDX document was created using a software tool, indicate the name and version for that tool. If multiple participants or tools were involved, use multiple instances of this field. Person name or organization name may be designated as “anonymous” if appropriate.", + "type": "string" + } + }, + "licenseListVersion": { + "description": "An optional field for creators of the SPDX file to provide the version of the SPDX License List used when the SPDX file was created.", + "type": "string" + } + }, + "required": [ + "created", + "creators" + ], + "additionalProperties": false, + "description": "One instance is required for each SPDX file produced. It provides the necessary information for forward and backward compatibility for processing tools." + }, + "dataLicense": { + "description": "License expression for dataLicense. See SPDX Annex D for the license expression syntax. Compliance with the SPDX specification includes populating the SPDX fields therein with data related to such fields (\"SPDX-Metadata\"). The SPDX specification contains numerous fields where an SPDX document creator may provide relevant explanatory text in SPDX-Metadata. Without opining on the lawfulness of \"database rights\" (in jurisdictions where applicable), such explanatory text is copyrightable subject matter in most Berne Convention countries. By using the SPDX specification, or any portion hereof, you hereby agree that any copyright rights (as determined by your jurisdiction) in any SPDX-Metadata, including without limitation explanatory text, shall be subject to the terms of the Creative Commons CC0 1.0 Universal license. For SPDX-Metadata not containing any copyright rights, you hereby agree and acknowledge that the SPDX-Metadata is provided to you \"as-is\" and without any representations or warranties of any kind concerning the SPDX-Metadata, express, implied, statutory or otherwise, including without limitation warranties of title, merchantability, fitness for a particular purpose, non-infringement, or the absence of latent or other defects, accuracy, or the presence or absence of errors, whether or not discoverable, all to the greatest extent permissible under applicable law.", + "type": "string" + }, + "externalDocumentRefs": { + "description": "Identify any external SPDX documents referenced within this SPDX document.", + "type": "array", + "items": { + "type": "object", + "properties": { + "checksum": { + "type": "object", + "properties": { + "algorithm": { + "description": "Identifies the algorithm used to produce the subject Checksum. Currently, SHA-1 is the only supported algorithm. It is anticipated that other algorithms will be supported at a later time.", + "type": "string", + "enum": [ + "SHA1", + "BLAKE3", + "SHA3-384", + "SHA256", + "SHA384", + "BLAKE2b-512", + "BLAKE2b-256", + "SHA3-512", + "MD2", + "ADLER32", + "MD4", + "SHA3-256", + "BLAKE2b-384", + "SHA512", + "MD6", + "MD5", + "SHA224" + ] + }, + "checksumValue": { + "description": "The checksumValue property provides a lower case hexidecimal encoded digest value produced using a specific algorithm.", + "type": "string" + } + }, + "required": [ + "algorithm", + "checksumValue" + ], + "additionalProperties": false, + "description": "A Checksum is value that allows the contents of a file to be authenticated. Even small changes to the content of the file will change its checksum. This class allows the results of a variety of checksum and cryptographic message digest algorithms to be represented." + }, + "externalDocumentId": { + "description": "externalDocumentId is a string containing letters, numbers, ., - and/or + which uniquely identifies an external document within this document.", + "type": "string" + }, + "spdxDocument": { + "description": "SPDX ID for SpdxDocument. A property containing an SPDX document.", + "type": "string" + } + }, + "required": [ + "checksum", + "externalDocumentId", + "spdxDocument" + ], + "additionalProperties": false, + "description": "Information about an external SPDX document reference including the checksum. This allows for verification of the external references." + } + }, + "hasExtractedLicensingInfos": { + "description": "Indicates that a particular ExtractedLicensingInfo was defined in the subject SpdxDocument.", + "type": "array", + "items": { + "type": "object", + "properties": { + "comment": { + "type": "string" + }, + "crossRefs": { + "description": "Cross Reference Detail for a license SeeAlso URL", + "type": "array", + "items": { + "type": "object", + "properties": { + "isLive": { + "description": "Indicate a URL is still a live accessible location on the public internet", + "type": "boolean" + }, + "isValid": { + "description": "True if the URL is a valid well formed URL", + "type": "boolean" + }, + "isWayBackLink": { + "description": "True if the License SeeAlso URL points to a Wayback archive", + "type": "boolean" + }, + "match": { + "description": "Status of a License List SeeAlso URL reference if it refers to a website that matches the license text.", + "type": "string" + }, + "order": { + "description": "The ordinal order of this element within a list", + "type": "integer" + }, + "timestamp": { + "description": "Timestamp", + "type": "string" + }, + "url": { + "description": "URL Reference", + "type": "string" + } + }, + "required": [ + "url" + ], + "additionalProperties": false, + "description": "Cross reference details for the a URL reference" + } + }, + "extractedText": { + "description": "Provide a copy of the actual text of the license reference extracted from the package, file or snippet that is associated with the License Identifier to aid in future analysis.", + "type": "string" + }, + "licenseId": { + "description": "A human readable short form license identifier for a license. The license ID is either on the standard license list or the form \"LicenseRef-[idString]\" where [idString] is a unique string containing letters, numbers, \".\" or \"-\". When used within a license expression, the license ID can optionally include a reference to an external document in the form \"DocumentRef-[docrefIdString]:LicenseRef-[idString]\" where docRefIdString is an ID for an external document reference.", + "type": "string" + }, + "name": { + "description": "Identify name of this SpdxElement.", + "type": "string" + }, + "seeAlsos": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "extractedText", + "licenseId" + ], + "additionalProperties": false, + "description": "An ExtractedLicensingInfo represents a license or licensing notice that was found in a package, file or snippet. Any license text that is recognized as a license may be represented as a License rather than an ExtractedLicensingInfo." + } + }, + "name": { + "description": "Identify name of this SpdxElement.", + "type": "string" + }, + "revieweds": { + "description": "Reviewed", + "type": "array", + "items": { + "type": "object", + "properties": { + "comment": { + "type": "string" + }, + "reviewDate": { + "description": "The date and time at which the SpdxDocument was reviewed. This value must be in UTC and have 'Z' as its timezone indicator.", + "type": "string" + }, + "reviewer": { + "description": "The name and, optionally, contact information of the person who performed the review. Values of this property must conform to the agent and tool syntax. The reviewer property is deprecated in favor of Annotation with an annotationType review.", + "type": "string" + } + }, + "required": [ + "reviewDate" + ], + "additionalProperties": false, + "description": "This class has been deprecated in favor of an Annotation with an Annotation type of review." + } + }, + "spdxVersion": { + "description": "Provide a reference number that can be used to understand how to parse and interpret the rest of the file. It will enable both future changes to the specification and to support backward compatibility. The version number consists of a major and minor version indicator. The major field will be incremented when incompatible changes between versions are made (one or more sections are created, modified or deleted). The minor field will be incremented when backwards compatible changes are made.", + "type": "string" + }, + "documentNamespace": { + "type": "string", + "description": "The URI provides an unambiguous mechanism for other SPDX documents to reference SPDX elements within this SPDX document." + }, + "documentDescribes": { + "description": "Packages, files and/or Snippets described by this SPDX document", + "type": "array", + "items": { + "type": "string", + "description": "SPDX ID for each Package, File, or Snippet." + } + }, + "packages": { + "description": "Packages referenced in the SPDX document", + "type": "array", + "items": { + "type": "object", + "properties": { + "SPDXID": { + "type": "string", + "description": "Uniquely identify any element in an SPDX document which may be referenced by other elements." + }, + "annotations": { + "description": "Provide additional information about an SpdxElement.", + "type": "array", + "items": { + "type": "object", + "properties": { + "annotationDate": { + "description": "Identify when the comment was made. This is to be specified according to the combined date and time in the UTC format, as specified in the ISO 8601 standard.", + "type": "string" + }, + "annotationType": { + "description": "Type of the annotation.", + "type": "string", + "enum": [ + "OTHER", + "REVIEW" + ] + }, + "annotator": { + "description": "This field identifies the person, organization, or tool that has commented on a file, package, snippet, or the entire document.", + "type": "string" + }, + "comment": { + "type": "string" + } + }, + "required": [ + "annotationDate", + "annotationType", + "annotator", + "comment" + ], + "additionalProperties": false, + "description": "An Annotation is a comment on an SpdxItem by an agent." + } + }, + "attributionTexts": { + "description": "This field provides a place for the SPDX data creator to record acknowledgements that may be required to be communicated in some contexts. This is not meant to include the actual complete license text (see licenseConculded and licenseDeclared), and may or may not include copyright notices (see also copyrightText). The SPDX data creator may use this field to record other acknowledgements, such as particular clauses from license texts, which may be necessary or desirable to reproduce.", + "type": "array", + "items": { + "description": "This field provides a place for the SPDX data creator to record acknowledgements that may be required to be communicated in some contexts. This is not meant to include the actual complete license text (see licenseConculded and licenseDeclared), and may or may not include copyright notices (see also copyrightText). The SPDX data creator may use this field to record other acknowledgements, such as particular clauses from license texts, which may be necessary or desirable to reproduce.", + "type": "string" + } + }, + "builtDate": { + "description": "This field provides a place for recording the actual date the package was built.", + "type": "string" + }, + "checksums": { + "description": "The checksum property provides a mechanism that can be used to verify that the contents of a File or Package have not changed.", + "type": "array", + "items": { + "type": "object", + "properties": { + "algorithm": { + "description": "Identifies the algorithm used to produce the subject Checksum. Currently, SHA-1 is the only supported algorithm. It is anticipated that other algorithms will be supported at a later time.", + "type": "string", + "enum": [ + "SHA1", + "BLAKE3", + "SHA3-384", + "SHA256", + "SHA384", + "BLAKE2b-512", + "BLAKE2b-256", + "SHA3-512", + "MD2", + "ADLER32", + "MD4", + "SHA3-256", + "BLAKE2b-384", + "SHA512", + "MD6", + "MD5", + "SHA224" + ] + }, + "checksumValue": { + "description": "The checksumValue property provides a lower case hexidecimal encoded digest value produced using a specific algorithm.", + "type": "string" + } + }, + "required": [ + "algorithm", + "checksumValue" + ], + "additionalProperties": false, + "description": "A Checksum is value that allows the contents of a file to be authenticated. Even small changes to the content of the file will change its checksum. This class allows the results of a variety of checksum and cryptographic message digest algorithms to be represented." + } + }, + "comment": { + "type": "string" + }, + "copyrightText": { + "description": "The text of copyright declarations recited in the package, file or snippet.\n\nIf the copyrightText field is not present, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + }, + "description": { + "description": "Provides a detailed description of the package.", + "type": "string" + }, + "downloadLocation": { + "description": "The URI at which this package is available for download. Private (i.e., not publicly reachable) URIs are acceptable as values of this property. The values http://spdx.org/rdf/terms#none and http://spdx.org/rdf/terms#noassertion may be used to specify that the package is not downloadable or that no attempt was made to determine its download location, respectively.", + "type": "string" + }, + "externalRefs": { + "description": "An External Reference allows a Package to reference an external source of additional information, metadata, enumerations, asset identifiers, or downloadable content believed to be relevant to the Package.", + "type": "array", + "items": { + "type": "object", + "properties": { + "comment": { + "type": "string" + }, + "referenceCategory": { + "description": "Category for the external reference", + "type": "string", + "enum": [ + "OTHER", + "PERSISTENT-ID", + "PERSISTENT_ID", + "SECURITY", + "PACKAGE-MANAGER", + "PACKAGE_MANAGER" + ] + }, + "referenceLocator": { + "description": "The unique string with no spaces necessary to access the package-specific information, metadata, or content within the target location. The format of the locator is subject to constraints defined by the .", + "type": "string" + }, + "referenceType": { + "description": "Type of the external reference. These are defined in an appendix in the SPDX specification.", + "type": "string" + } + }, + "required": [ + "referenceCategory", + "referenceLocator", + "referenceType" + ], + "additionalProperties": false, + "description": "An External Reference allows a Package to reference an external source of additional information, metadata, enumerations, asset identifiers, or downloadable content believed to be relevant to the Package." + } + }, + "filesAnalyzed": { + "description": "Indicates whether the file content of this package has been available for or subjected to analysis when creating the SPDX document. If false indicates packages that represent metadata or URI references to a project, product, artifact, distribution or a component. If set to false, the package must not contain any files.", + "type": "boolean" + }, + "hasFiles": { + "description": "Indicates that a particular file belongs to a package.", + "type": "array", + "items": { + "description": "SPDX ID for File. Indicates that a particular file belongs to a package.", + "type": "string" + } + }, + "homepage": { + "type": "string" + }, + "licenseComments": { + "description": "The licenseComments property allows the preparer of the SPDX document to describe why the licensing in spdx:licenseConcluded was chosen.", + "type": "string" + }, + "licenseConcluded": { + "description": "License expression for licenseConcluded. See SPDX Annex D for the license expression syntax. The licensing that the preparer of this SPDX document has concluded, based on the evidence, actually applies to the SPDX Item.\n\nIf the licenseConcluded field is not present for an SPDX Item, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + }, + "licenseDeclared": { + "description": "License expression for licenseDeclared. See SPDX Annex D for the license expression syntax. The licensing that the creators of the software in the package, or the packager, have declared. Declarations by the original software creator should be preferred, if they exist.", + "type": "string" + }, + "licenseInfoFromFiles": { + "description": "The licensing information that was discovered directly within the package. There will be an instance of this property for each distinct value of alllicenseInfoInFile properties of all files contained in the package.\n\nIf the licenseInfoFromFiles field is not present for a package and filesAnalyzed property for that same package is true or omitted, it implies an equivalent meaning to NOASSERTION.", + "type": "array", + "items": { + "description": "License expression for licenseInfoFromFiles. See SPDX Annex D for the license expression syntax. The licensing information that was discovered directly within the package. There will be an instance of this property for each distinct value of alllicenseInfoInFile properties of all files contained in the package.\n\nIf the licenseInfoFromFiles field is not present for a package and filesAnalyzed property for that same package is true or omitted, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + } + }, + "name": { + "description": "Identify name of this SpdxElement.", + "type": "string" + }, + "originator": { + "description": "The name and, optionally, contact information of the person or organization that originally created the package. Values of this property must conform to the agent and tool syntax.", + "type": "string" + }, + "packageFileName": { + "description": "The base name of the package file name. For example, zlib-1.2.5.tar.gz.", + "type": "string" + }, + "packageVerificationCode": { + "type": "object", + "properties": { + "packageVerificationCodeExcludedFiles": { + "description": "A file that was excluded when calculating the package verification code. This is usually a file containing SPDX data regarding the package. If a package contains more than one SPDX file all SPDX files must be excluded from the package verification code. If this is not done it would be impossible to correctly calculate the verification codes in both files.", + "type": "array", + "items": { + "description": "A file that was excluded when calculating the package verification code. This is usually a file containing SPDX data regarding the package. If a package contains more than one SPDX file all SPDX files must be excluded from the package verification code. If this is not done it would be impossible to correctly calculate the verification codes in both files.", + "type": "string" + } + }, + "packageVerificationCodeValue": { + "description": "The actual package verification code as a hex encoded value.", + "type": "string" + } + }, + "required": [ + "packageVerificationCodeValue" + ], + "additionalProperties": false, + "description": "A manifest based verification code (the algorithm is defined in section 4.7 of the full specification) of the SPDX Item. This allows consumers of this data and/or database to determine if an SPDX item they have in hand is identical to the SPDX item from which the data was produced. This algorithm works even if the SPDX document is included in the SPDX item." + }, + "primaryPackagePurpose": { + "description": "This field provides information about the primary purpose of the identified package. Package Purpose is intrinsic to how the package is being used rather than the content of the package.", + "type": "string", + "enum": [ + "OTHER", + "INSTALL", + "ARCHIVE", + "FIRMWARE", + "APPLICATION", + "FRAMEWORK", + "LIBRARY", + "CONTAINER", + "SOURCE", + "DEVICE", + "OPERATING_SYSTEM", + "FILE" + ] + }, + "releaseDate": { + "description": "This field provides a place for recording the date the package was released.", + "type": "string" + }, + "sourceInfo": { + "description": "Allows the producer(s) of the SPDX document to describe how the package was acquired and/or changed from the original source.", + "type": "string" + }, + "summary": { + "description": "Provides a short description of the package.", + "type": "string" + }, + "supplier": { + "description": "The name and, optionally, contact information of the person or organization who was the immediate supplier of this package to the recipient. The supplier may be different than originator when the software has been repackaged. Values of this property must conform to the agent and tool syntax.", + "type": "string" + }, + "validUntilDate": { + "description": "This field provides a place for recording the end of the support period for a package from the supplier.", + "type": "string" + }, + "versionInfo": { + "description": "Provides an indication of the version of the package that is described by this SpdxDocument.", + "type": "string" + } + }, + "required": [ + "SPDXID", + "downloadLocation", + "name" + ], + "additionalProperties": false + } + }, + "files": { + "description": "Files referenced in the SPDX document", + "type": "array", + "items": { + "type": "object", + "properties": { + "SPDXID": { + "type": "string", + "description": "Uniquely identify any element in an SPDX document which may be referenced by other elements." + }, + "annotations": { + "description": "Provide additional information about an SpdxElement.", + "type": "array", + "items": { + "type": "object", + "properties": { + "annotationDate": { + "description": "Identify when the comment was made. This is to be specified according to the combined date and time in the UTC format, as specified in the ISO 8601 standard.", + "type": "string" + }, + "annotationType": { + "description": "Type of the annotation.", + "type": "string", + "enum": [ + "OTHER", + "REVIEW" + ] + }, + "annotator": { + "description": "This field identifies the person, organization, or tool that has commented on a file, package, snippet, or the entire document.", + "type": "string" + }, + "comment": { + "type": "string" + } + }, + "required": [ + "annotationDate", + "annotationType", + "annotator", + "comment" + ], + "additionalProperties": false, + "description": "An Annotation is a comment on an SpdxItem by an agent." + } + }, + "artifactOfs": { + "description": "Indicates the project in which the SpdxElement originated. Tools must preserve doap:homepage and doap:name properties and the URI (if one is known) of doap:Project resources that are values of this property. All other properties of doap:Projects are not directly supported by SPDX and may be dropped when translating to or from some SPDX formats.", + "type": "array", + "items": { + "type": "object" + } + }, + "attributionTexts": { + "description": "This field provides a place for the SPDX data creator to record acknowledgements that may be required to be communicated in some contexts. This is not meant to include the actual complete license text (see licenseConculded and licenseDeclared), and may or may not include copyright notices (see also copyrightText). The SPDX data creator may use this field to record other acknowledgements, such as particular clauses from license texts, which may be necessary or desirable to reproduce.", + "type": "array", + "items": { + "description": "This field provides a place for the SPDX data creator to record acknowledgements that may be required to be communicated in some contexts. This is not meant to include the actual complete license text (see licenseConculded and licenseDeclared), and may or may not include copyright notices (see also copyrightText). The SPDX data creator may use this field to record other acknowledgements, such as particular clauses from license texts, which may be necessary or desirable to reproduce.", + "type": "string" + } + }, + "checksums": { + "description": "The checksum property provides a mechanism that can be used to verify that the contents of a File or Package have not changed.", + "minItems": 1, + "type": "array", + "items": { + "type": "object", + "properties": { + "algorithm": { + "description": "Identifies the algorithm used to produce the subject Checksum. Currently, SHA-1 is the only supported algorithm. It is anticipated that other algorithms will be supported at a later time.", + "type": "string", + "enum": [ + "SHA1", + "BLAKE3", + "SHA3-384", + "SHA256", + "SHA384", + "BLAKE2b-512", + "BLAKE2b-256", + "SHA3-512", + "MD2", + "ADLER32", + "MD4", + "SHA3-256", + "BLAKE2b-384", + "SHA512", + "MD6", + "MD5", + "SHA224" + ] + }, + "checksumValue": { + "description": "The checksumValue property provides a lower case hexidecimal encoded digest value produced using a specific algorithm.", + "type": "string" + } + }, + "required": [ + "algorithm", + "checksumValue" + ], + "additionalProperties": false, + "description": "A Checksum is value that allows the contents of a file to be authenticated. Even small changes to the content of the file will change its checksum. This class allows the results of a variety of checksum and cryptographic message digest algorithms to be represented." + } + }, + "comment": { + "type": "string" + }, + "copyrightText": { + "description": "The text of copyright declarations recited in the package, file or snippet.\n\nIf the copyrightText field is not present, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + }, + "fileContributors": { + "description": "This field provides a place for the SPDX file creator to record file contributors. Contributors could include names of copyright holders and/or authors who may not be copyright holders yet contributed to the file content.", + "type": "array", + "items": { + "description": "This field provides a place for the SPDX file creator to record file contributors. Contributors could include names of copyright holders and/or authors who may not be copyright holders yet contributed to the file content.", + "type": "string" + } + }, + "fileDependencies": { + "description": "This field is deprecated since SPDX 2.0 in favor of using Section 7 which provides more granularity about relationships.", + "type": "array", + "items": { + "description": "SPDX ID for File. This field is deprecated since SPDX 2.0 in favor of using Section 7 which provides more granularity about relationships.", + "type": "string" + } + }, + "fileName": { + "description": "The name of the file relative to the root of the package.", + "type": "string" + }, + "fileTypes": { + "description": "The type of the file.", + "type": "array", + "items": { + "description": "The type of the file.", + "type": "string", + "enum": [ + "OTHER", + "DOCUMENTATION", + "IMAGE", + "VIDEO", + "ARCHIVE", + "SPDX", + "APPLICATION", + "SOURCE", + "BINARY", + "TEXT", + "AUDIO" + ] + } + }, + "licenseComments": { + "description": "The licenseComments property allows the preparer of the SPDX document to describe why the licensing in spdx:licenseConcluded was chosen.", + "type": "string" + }, + "licenseConcluded": { + "description": "License expression for licenseConcluded. See SPDX Annex D for the license expression syntax. The licensing that the preparer of this SPDX document has concluded, based on the evidence, actually applies to the SPDX Item.\n\nIf the licenseConcluded field is not present for an SPDX Item, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + }, + "licenseInfoInFiles": { + "description": "Licensing information that was discovered directly in the subject file. This is also considered a declared license for the file.\n\nIf the licenseInfoInFile field is not present for a file, it implies an equivalent meaning to NOASSERTION.", + "type": "array", + "items": { + "description": "License expression for licenseInfoInFile. See SPDX Annex D for the license expression syntax. Licensing information that was discovered directly in the subject file. This is also considered a declared license for the file.\n\nIf the licenseInfoInFile field is not present for a file, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + } + }, + "noticeText": { + "description": "This field provides a place for the SPDX file creator to record potential legal notices found in the file. This may or may not include copyright statements.", + "type": "string" + } + }, + "required": [ + "SPDXID", + "checksums", + "fileName" + ], + "additionalProperties": false + } + }, + "snippets": { + "description": "Snippets referenced in the SPDX document", + "type": "array", + "items": { + "type": "object", + "properties": { + "SPDXID": { + "type": "string", + "description": "Uniquely identify any element in an SPDX document which may be referenced by other elements." + }, + "annotations": { + "description": "Provide additional information about an SpdxElement.", + "type": "array", + "items": { + "type": "object", + "properties": { + "annotationDate": { + "description": "Identify when the comment was made. This is to be specified according to the combined date and time in the UTC format, as specified in the ISO 8601 standard.", + "type": "string" + }, + "annotationType": { + "description": "Type of the annotation.", + "type": "string", + "enum": [ + "OTHER", + "REVIEW" + ] + }, + "annotator": { + "description": "This field identifies the person, organization, or tool that has commented on a file, package, snippet, or the entire document.", + "type": "string" + }, + "comment": { + "type": "string" + } + }, + "required": [ + "annotationDate", + "annotationType", + "annotator", + "comment" + ], + "additionalProperties": false, + "description": "An Annotation is a comment on an SpdxItem by an agent." + } + }, + "attributionTexts": { + "description": "This field provides a place for the SPDX data creator to record acknowledgements that may be required to be communicated in some contexts. This is not meant to include the actual complete license text (see licenseConculded and licenseDeclared), and may or may not include copyright notices (see also copyrightText). The SPDX data creator may use this field to record other acknowledgements, such as particular clauses from license texts, which may be necessary or desirable to reproduce.", + "type": "array", + "items": { + "description": "This field provides a place for the SPDX data creator to record acknowledgements that may be required to be communicated in some contexts. This is not meant to include the actual complete license text (see licenseConculded and licenseDeclared), and may or may not include copyright notices (see also copyrightText). The SPDX data creator may use this field to record other acknowledgements, such as particular clauses from license texts, which may be necessary or desirable to reproduce.", + "type": "string" + } + }, + "comment": { + "type": "string" + }, + "copyrightText": { + "description": "The text of copyright declarations recited in the package, file or snippet.\n\nIf the copyrightText field is not present, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + }, + "licenseComments": { + "description": "The licenseComments property allows the preparer of the SPDX document to describe why the licensing in spdx:licenseConcluded was chosen.", + "type": "string" + }, + "licenseConcluded": { + "description": "License expression for licenseConcluded. See SPDX Annex D for the license expression syntax. The licensing that the preparer of this SPDX document has concluded, based on the evidence, actually applies to the SPDX Item.\n\nIf the licenseConcluded field is not present for an SPDX Item, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + }, + "licenseInfoInSnippets": { + "description": "Licensing information that was discovered directly in the subject snippet. This is also considered a declared license for the snippet.\n\nIf the licenseInfoInSnippet field is not present for a snippet, it implies an equivalent meaning to NOASSERTION.", + "type": "array", + "items": { + "description": "License expression for licenseInfoInSnippet. See SPDX Annex D for the license expression syntax. Licensing information that was discovered directly in the subject snippet. This is also considered a declared license for the snippet.\n\nIf the licenseInfoInSnippet field is not present for a snippet, it implies an equivalent meaning to NOASSERTION.", + "type": "string" + } + }, + "name": { + "description": "Identify name of this SpdxElement.", + "type": "string" + }, + "ranges": { + "description": "This field defines the byte range in the original host file (in X.2) that the snippet information applies to", + "minItems": 1, + "type": "array", + "items": { + "type": "object", + "properties": { + "endPointer": { + "type": "object", + "properties": { + "reference": { + "description": "SPDX ID for File", + "type": "string" + }, + "offset": { + "type": "integer", + "description": "Byte offset in the file" + }, + "lineNumber": { + "type": "integer", + "description": "line number offset in the file" + } + }, + "required": [ + "reference" + ], + "additionalProperties": false + }, + "startPointer": { + "type": "object", + "properties": { + "reference": { + "description": "SPDX ID for File", + "type": "string" + }, + "offset": { + "type": "integer", + "description": "Byte offset in the file" + }, + "lineNumber": { + "type": "integer", + "description": "line number offset in the file" + } + }, + "required": [ + "reference" + ], + "additionalProperties": false + } + }, + "required": [ + "endPointer", + "startPointer" + ], + "additionalProperties": false + } + }, + "snippetFromFile": { + "description": "SPDX ID for File. File containing the SPDX element (e.g. the file contaning a snippet).", + "type": "string" + } + }, + "required": [ + "SPDXID", + "name", + "ranges", + "snippetFromFile" + ], + "additionalProperties": false + } + }, + "relationships": { + "description": "Relationships referenced in the SPDX document", + "type": "array", + "items": { + "type": "object", + "properties": { + "spdxElementId": { + "type": "string", + "description": "Id to which the SPDX element is related" + }, + "comment": { + "type": "string" + }, + "relatedSpdxElement": { + "description": "SPDX ID for SpdxElement. A related SpdxElement.", + "type": "string" + }, + "relationshipType": { + "description": "Describes the type of relationship between two SPDX elements.", + "type": "string", + "enum": [ + "VARIANT_OF", + "COPY_OF", + "PATCH_FOR", + "TEST_DEPENDENCY_OF", + "CONTAINED_BY", + "DATA_FILE_OF", + "OPTIONAL_COMPONENT_OF", + "ANCESTOR_OF", + "GENERATES", + "CONTAINS", + "OPTIONAL_DEPENDENCY_OF", + "FILE_ADDED", + "REQUIREMENT_DESCRIPTION_FOR", + "DEV_DEPENDENCY_OF", + "DEPENDENCY_OF", + "BUILD_DEPENDENCY_OF", + "DESCRIBES", + "PREREQUISITE_FOR", + "HAS_PREREQUISITE", + "PROVIDED_DEPENDENCY_OF", + "DYNAMIC_LINK", + "DESCRIBED_BY", + "METAFILE_OF", + "DEPENDENCY_MANIFEST_OF", + "PATCH_APPLIED", + "RUNTIME_DEPENDENCY_OF", + "TEST_OF", + "TEST_TOOL_OF", + "DEPENDS_ON", + "SPECIFICATION_FOR", + "FILE_MODIFIED", + "DISTRIBUTION_ARTIFACT", + "AMENDS", + "DOCUMENTATION_OF", + "GENERATED_FROM", + "STATIC_LINK", + "OTHER", + "BUILD_TOOL_OF", + "TEST_CASE_OF", + "PACKAGE_OF", + "DESCENDANT_OF", + "FILE_DELETED", + "EXPANDED_FROM_ARCHIVE", + "DEV_TOOL_OF", + "EXAMPLE_OF" + ] + } + }, + "required": [ + "spdxElementId", + "relatedSpdxElement", + "relationshipType" + ], + "additionalProperties": false + } + } + }, + "required": [ + "SPDXID", + "creationInfo", + "dataLicense", + "name", + "spdxVersion", + "documentNamespace" + ], + "additionalProperties": false +} \ No newline at end of file diff --git a/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/trivy_scan_report.json b/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/trivy_scan_report.json new file mode 100644 index 000000000..92ee881f4 --- /dev/null +++ b/plugins/verifier/vulnerabilityreport/schemavalidation/testdata/trivy_scan_report.json @@ -0,0 +1,1889 @@ +{ + "version": "2.1.0", + "$schema": "https://json.schemastore.org/sarif-2.1.0-rtm.5.json", + "runs": [ + { + "tool": { + "driver": { + "fullName": "Trivy Vulnerability Scanner", + "informationUri": "https://github.com/aquasecurity/trivy", + "name": "Trivy", + "rules": [ + { + "id": "CVE-2021-36159", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "" + }, + "fullDescription": { + "text": "libfetch before 2021-07-26, as used in apk-tools, xbps, and other products, mishandles numeric strings for the FTP and HTTP protocols. The FTP passive mode implementation allows an out-of-bounds read because strtol is used to parse the relevant numbers into address bytes. It does not check if the line ends prematurely. If it does, the for-loop condition checks for the '\\0' terminator one byte too late." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-36159", + "help": { + "text": "Vulnerability CVE-2021-36159\nSeverity: CRITICAL\nPackage: apk-tools\nFixed Version: 2.10.7-r0\nLink: [CVE-2021-36159](https://avd.aquasec.com/nvd/cve-2021-36159)\nlibfetch before 2021-07-26, as used in apk-tools, xbps, and other products, mishandles numeric strings for the FTP and HTTP protocols. The FTP passive mode implementation allows an out-of-bounds read because strtol is used to parse the relevant numbers into address bytes. It does not check if the line ends prematurely. If it does, the for-loop condition checks for the '\\0' terminator one byte too late.", + "markdown": "**Vulnerability CVE-2021-36159**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|CRITICAL|apk-tools|2.10.7-r0|[CVE-2021-36159](https://avd.aquasec.com/nvd/cve-2021-36159)|\n\nlibfetch before 2021-07-26, as used in apk-tools, xbps, and other products, mishandles numeric strings for the FTP and HTTP protocols. The FTP passive mode implementation allows an out-of-bounds read because strtol is used to parse the relevant numbers into address bytes. It does not check if the line ends prematurely. If it does, the for-loop condition checks for the '\\0' terminator one byte too late." + }, + "properties": { + "precision": "very-high", + "security-severity": "9.1", + "tags": [ + "vulnerability", + "security", + "CRITICAL" + ] + } + }, + { + "id": "CVE-2021-30139", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "" + }, + "fullDescription": { + "text": "In Alpine Linux apk-tools before 2.12.5, the tarball parser allows a buffer overflow and crash." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-30139", + "help": { + "text": "Vulnerability CVE-2021-30139\nSeverity: HIGH\nPackage: apk-tools\nFixed Version: 2.10.6-r0\nLink: [CVE-2021-30139](https://avd.aquasec.com/nvd/cve-2021-30139)\nIn Alpine Linux apk-tools before 2.12.5, the tarball parser allows a buffer overflow and crash.", + "markdown": "**Vulnerability CVE-2021-30139**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|apk-tools|2.10.6-r0|[CVE-2021-30139](https://avd.aquasec.com/nvd/cve-2021-30139)|\n\nIn Alpine Linux apk-tools before 2.12.5, the tarball parser allows a buffer overflow and crash." + }, + "properties": { + "precision": "very-high", + "security-severity": "7.5", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-28831", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: invalid free or segmentation fault via malformed gzip data" + }, + "fullDescription": { + "text": "decompress_gunzip.c in BusyBox through 1.32.1 mishandles the error bit on the huft_build result pointer, with a resultant invalid free or segmentation fault, via malformed gzip data." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-28831", + "help": { + "text": "Vulnerability CVE-2021-28831\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r10\nLink: [CVE-2021-28831](https://avd.aquasec.com/nvd/cve-2021-28831)\ndecompress_gunzip.c in BusyBox through 1.32.1 mishandles the error bit on the huft_build result pointer, with a resultant invalid free or segmentation fault, via malformed gzip data.", + "markdown": "**Vulnerability CVE-2021-28831**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r10|[CVE-2021-28831](https://avd.aquasec.com/nvd/cve-2021-28831)|\n\ndecompress_gunzip.c in BusyBox through 1.32.1 mishandles the error bit on the huft_build result pointer, with a resultant invalid free or segmentation fault, via malformed gzip data." + }, + "properties": { + "precision": "very-high", + "security-severity": "7.5", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42378", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the getvar_i()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the getvar_i function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42378", + "help": { + "text": "Vulnerability CVE-2021-42378\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42378](https://avd.aquasec.com/nvd/cve-2021-42378)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the getvar_i function", + "markdown": "**Vulnerability CVE-2021-42378**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42378](https://avd.aquasec.com/nvd/cve-2021-42378)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the getvar_i function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42379", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the next_input_file()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the next_input_file function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42379", + "help": { + "text": "Vulnerability CVE-2021-42379\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42379](https://avd.aquasec.com/nvd/cve-2021-42379)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the next_input_file function", + "markdown": "**Vulnerability CVE-2021-42379**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42379](https://avd.aquasec.com/nvd/cve-2021-42379)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the next_input_file function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42380", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the clrvar()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the clrvar function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42380", + "help": { + "text": "Vulnerability CVE-2021-42380\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42380](https://avd.aquasec.com/nvd/cve-2021-42380)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the clrvar function", + "markdown": "**Vulnerability CVE-2021-42380**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42380](https://avd.aquasec.com/nvd/cve-2021-42380)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the clrvar function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42381", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the hash_init()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the hash_init function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42381", + "help": { + "text": "Vulnerability CVE-2021-42381\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42381](https://avd.aquasec.com/nvd/cve-2021-42381)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the hash_init function", + "markdown": "**Vulnerability CVE-2021-42381**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42381](https://avd.aquasec.com/nvd/cve-2021-42381)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the hash_init function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42382", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the getvar_s()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the getvar_s function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42382", + "help": { + "text": "Vulnerability CVE-2021-42382\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42382](https://avd.aquasec.com/nvd/cve-2021-42382)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the getvar_s function", + "markdown": "**Vulnerability CVE-2021-42382**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42382](https://avd.aquasec.com/nvd/cve-2021-42382)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the getvar_s function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42383", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the evaluate()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the evaluate function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42383", + "help": { + "text": "Vulnerability CVE-2021-42383\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42383](https://avd.aquasec.com/nvd/cve-2021-42383)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the evaluate function", + "markdown": "**Vulnerability CVE-2021-42383**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42383](https://avd.aquasec.com/nvd/cve-2021-42383)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the evaluate function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42384", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the handle_special()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the handle_special function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42384", + "help": { + "text": "Vulnerability CVE-2021-42384\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42384](https://avd.aquasec.com/nvd/cve-2021-42384)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the handle_special function", + "markdown": "**Vulnerability CVE-2021-42384**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42384](https://avd.aquasec.com/nvd/cve-2021-42384)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the handle_special function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42385", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the evaluate()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the evaluate function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42385", + "help": { + "text": "Vulnerability CVE-2021-42385\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42385](https://avd.aquasec.com/nvd/cve-2021-42385)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the evaluate function", + "markdown": "**Vulnerability CVE-2021-42385**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42385](https://avd.aquasec.com/nvd/cve-2021-42385)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the evaluate function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42386", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: use-after-free in awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the nvalloc()" + }, + "fullDescription": { + "text": "A use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the nvalloc function" + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42386", + "help": { + "text": "Vulnerability CVE-2021-42386\nSeverity: HIGH\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42386](https://avd.aquasec.com/nvd/cve-2021-42386)\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the nvalloc function", + "markdown": "**Vulnerability CVE-2021-42386**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|ssl_client|1.31.1-r11|[CVE-2021-42386](https://avd.aquasec.com/nvd/cve-2021-42386)|\n\nA use-after-free in Busybox's awk applet leads to denial of service and possibly code execution when processing a crafted awk pattern in the nvalloc function" + }, + "properties": { + "precision": "very-high", + "security-severity": "7.2", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-42374", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "busybox: out-of-bounds read in unlzma applet leads to information leak and denial of service when crafted LZMA-compressed input is decompressed" + }, + "fullDescription": { + "text": "An out-of-bounds heap read in Busybox's unlzma applet leads to information leak and denial of service when crafted LZMA-compressed input is decompressed. This can be triggered by any applet/format that" + }, + "defaultConfiguration": { + "level": "warning" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-42374", + "help": { + "text": "Vulnerability CVE-2021-42374\nSeverity: MEDIUM\nPackage: ssl_client\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42374](https://avd.aquasec.com/nvd/cve-2021-42374)\nAn out-of-bounds heap read in Busybox's unlzma applet leads to information leak and denial of service when crafted LZMA-compressed input is decompressed. This can be triggered by any applet/format that", + "markdown": "**Vulnerability CVE-2021-42374**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|MEDIUM|ssl_client|1.31.1-r11|[CVE-2021-42374](https://avd.aquasec.com/nvd/cve-2021-42374)|\n\nAn out-of-bounds heap read in Busybox's unlzma applet leads to information leak and denial of service when crafted LZMA-compressed input is decompressed. This can be triggered by any applet/format that" + }, + "properties": { + "precision": "very-high", + "security-severity": "5.3", + "tags": [ + "vulnerability", + "security", + "MEDIUM" + ] + } + }, + { + "id": "CVE-2021-3711", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: SM2 Decryption Buffer Overflow" + }, + "fullDescription": { + "text": "In order to decrypt SM2 encrypted data an application is expected to call the API function EVP_PKEY_decrypt(). Typically an application will call this function twice. The first time, on entry, the "out" parameter can be NULL and, on exit, the "outlen" parameter is populated with the buffer size required to hold the decrypted plaintext. The application can then allocate a sufficiently sized buffer and call EVP_PKEY_decrypt() again, but this time passing a non-NULL value for the "out" parameter. A bug in the implementation of the SM2 decryption code means that the calculation of the buffer size required to hold the plaintext returned by the first call to EVP_PKEY_decrypt() can be smaller than the actual size required by the second call. This can lead to a buffer overflow when EVP_PKEY_decrypt() is called by the application a second time with a buffer that is too small. A malicious attacker who is able present SM2 content for decryption to an application could cause attacker chosen data to overflow the buffer by up to a maximum of 62 bytes altering the contents of other data held after the buffer, possibly changing application behaviour or causing the application to crash. The location of the buffer is application dependent but is typically heap allocated. Fixed in OpenSSL 1.1.1l (Affected 1.1.1-1.1.1k)." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-3711", + "help": { + "text": "Vulnerability CVE-2021-3711\nSeverity: CRITICAL\nPackage: libssl1.1\nFixed Version: 1.1.1l-r0\nLink: [CVE-2021-3711](https://avd.aquasec.com/nvd/cve-2021-3711)\nIn order to decrypt SM2 encrypted data an application is expected to call the API function EVP_PKEY_decrypt(). Typically an application will call this function twice. The first time, on entry, the \"out\" parameter can be NULL and, on exit, the \"outlen\" parameter is populated with the buffer size required to hold the decrypted plaintext. The application can then allocate a sufficiently sized buffer and call EVP_PKEY_decrypt() again, but this time passing a non-NULL value for the \"out\" parameter. A bug in the implementation of the SM2 decryption code means that the calculation of the buffer size required to hold the plaintext returned by the first call to EVP_PKEY_decrypt() can be smaller than the actual size required by the second call. This can lead to a buffer overflow when EVP_PKEY_decrypt() is called by the application a second time with a buffer that is too small. A malicious attacker who is able present SM2 content for decryption to an application could cause attacker chosen data to overflow the buffer by up to a maximum of 62 bytes altering the contents of other data held after the buffer, possibly changing application behaviour or causing the application to crash. The location of the buffer is application dependent but is typically heap allocated. Fixed in OpenSSL 1.1.1l (Affected 1.1.1-1.1.1k).", + "markdown": "**Vulnerability CVE-2021-3711**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|CRITICAL|libssl1.1|1.1.1l-r0|[CVE-2021-3711](https://avd.aquasec.com/nvd/cve-2021-3711)|\n\nIn order to decrypt SM2 encrypted data an application is expected to call the API function EVP_PKEY_decrypt(). Typically an application will call this function twice. The first time, on entry, the \"out\" parameter can be NULL and, on exit, the \"outlen\" parameter is populated with the buffer size required to hold the decrypted plaintext. The application can then allocate a sufficiently sized buffer and call EVP_PKEY_decrypt() again, but this time passing a non-NULL value for the \"out\" parameter. A bug in the implementation of the SM2 decryption code means that the calculation of the buffer size required to hold the plaintext returned by the first call to EVP_PKEY_decrypt() can be smaller than the actual size required by the second call. This can lead to a buffer overflow when EVP_PKEY_decrypt() is called by the application a second time with a buffer that is too small. A malicious attacker who is able present SM2 content for decryption to an application could cause attacker chosen data to overflow the buffer by up to a maximum of 62 bytes altering the contents of other data held after the buffer, possibly changing application behaviour or causing the application to crash. The location of the buffer is application dependent but is typically heap allocated. Fixed in OpenSSL 1.1.1l (Affected 1.1.1-1.1.1k)." + }, + "properties": { + "precision": "very-high", + "security-severity": "9.8", + "tags": [ + "vulnerability", + "security", + "CRITICAL" + ] + } + }, + { + "id": "CVE-2020-1967", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: Segmentation fault in SSL_check_chain causes denial of service" + }, + "fullDescription": { + "text": "Server or client applications that call the SSL_check_chain() function during or after a TLS 1.3 handshake may crash due to a NULL pointer dereference as a result of incorrect handling of the "signature_algorithms_cert" TLS extension. The crash occurs if an invalid or unrecognised signature algorithm is received from the peer. This could be exploited by a malicious peer in a Denial of Service attack. OpenSSL version 1.1.1d, 1.1.1e, and 1.1.1f are affected by this issue. This issue did not affect OpenSSL versions prior to 1.1.1d. Fixed in OpenSSL 1.1.1g (Affected 1.1.1d-1.1.1f)." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2020-1967", + "help": { + "text": "Vulnerability CVE-2020-1967\nSeverity: HIGH\nPackage: libssl1.1\nFixed Version: 1.1.1g-r0\nLink: [CVE-2020-1967](https://avd.aquasec.com/nvd/cve-2020-1967)\nServer or client applications that call the SSL_check_chain() function during or after a TLS 1.3 handshake may crash due to a NULL pointer dereference as a result of incorrect handling of the \"signature_algorithms_cert\" TLS extension. The crash occurs if an invalid or unrecognised signature algorithm is received from the peer. This could be exploited by a malicious peer in a Denial of Service attack. OpenSSL version 1.1.1d, 1.1.1e, and 1.1.1f are affected by this issue. This issue did not affect OpenSSL versions prior to 1.1.1d. Fixed in OpenSSL 1.1.1g (Affected 1.1.1d-1.1.1f).", + "markdown": "**Vulnerability CVE-2020-1967**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|libssl1.1|1.1.1g-r0|[CVE-2020-1967](https://avd.aquasec.com/nvd/cve-2020-1967)|\n\nServer or client applications that call the SSL_check_chain() function during or after a TLS 1.3 handshake may crash due to a NULL pointer dereference as a result of incorrect handling of the \"signature_algorithms_cert\" TLS extension. The crash occurs if an invalid or unrecognised signature algorithm is received from the peer. This could be exploited by a malicious peer in a Denial of Service attack. OpenSSL version 1.1.1d, 1.1.1e, and 1.1.1f are affected by this issue. This issue did not affect OpenSSL versions prior to 1.1.1d. Fixed in OpenSSL 1.1.1g (Affected 1.1.1d-1.1.1f)." + }, + "properties": { + "precision": "very-high", + "security-severity": "7.5", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-23840", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: integer overflow in CipherUpdate" + }, + "fullDescription": { + "text": "Calls to EVP_CipherUpdate, EVP_EncryptUpdate and EVP_DecryptUpdate may overflow the output length argument in some cases where the input length is close to the maximum permissible length for an integer on the platform. In such cases the return value from the function call will be 1 (indicating success), but the output length value will be negative. This could cause applications to behave incorrectly or crash. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.1.1j (Affected 1.1.1-1.1.1i). Fixed in OpenSSL 1.0.2y (Affected 1.0.2-1.0.2x)." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-23840", + "help": { + "text": "Vulnerability CVE-2021-23840\nSeverity: HIGH\nPackage: libssl1.1\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23840](https://avd.aquasec.com/nvd/cve-2021-23840)\nCalls to EVP_CipherUpdate, EVP_EncryptUpdate and EVP_DecryptUpdate may overflow the output length argument in some cases where the input length is close to the maximum permissible length for an integer on the platform. In such cases the return value from the function call will be 1 (indicating success), but the output length value will be negative. This could cause applications to behave incorrectly or crash. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.1.1j (Affected 1.1.1-1.1.1i). Fixed in OpenSSL 1.0.2y (Affected 1.0.2-1.0.2x).", + "markdown": "**Vulnerability CVE-2021-23840**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|libssl1.1|1.1.1j-r0|[CVE-2021-23840](https://avd.aquasec.com/nvd/cve-2021-23840)|\n\nCalls to EVP_CipherUpdate, EVP_EncryptUpdate and EVP_DecryptUpdate may overflow the output length argument in some cases where the input length is close to the maximum permissible length for an integer on the platform. In such cases the return value from the function call will be 1 (indicating success), but the output length value will be negative. This could cause applications to behave incorrectly or crash. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.1.1j (Affected 1.1.1-1.1.1i). Fixed in OpenSSL 1.0.2y (Affected 1.0.2-1.0.2x)." + }, + "properties": { + "precision": "very-high", + "security-severity": "7.5", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-3450", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: CA certificate check bypass with X509_V_FLAG_X509_STRICT" + }, + "fullDescription": { + "text": "The X509_V_FLAG_X509_STRICT flag enables additional security checks of the certificates present in a certificate chain. It is not set by default. Starting from OpenSSL version 1.1.1h a check to disallow certificates in the chain that have explicitly encoded elliptic curve parameters was added as an additional strict check. An error in the implementation of this check meant that the result of a previous check to confirm that certificates in the chain are valid CA certificates was overwritten. This effectively bypasses the check that non-CA certificates must not be able to issue other certificates. If a "purpose" has been configured then there is a subsequent opportunity for checks that the certificate is a valid CA. All of the named "purpose" values implemented in libcrypto perform this check. Therefore, where a purpose is set the certificate chain will still be rejected even when the strict flag has been used. A purpose is set by default in libssl client and server certificate verification routines, but it can be overridden or removed by an application. In order to be affected, an application must explicitly set the X509_V_FLAG_X509_STRICT verification flag and either not set a purpose for the certificate verification or, in the case of TLS client or server applications, override the default purpose. OpenSSL versions 1.1.1h and newer are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue. Fixed in OpenSSL 1.1.1k (Affected 1.1.1h-1.1.1j)." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-3450", + "help": { + "text": "Vulnerability CVE-2021-3450\nSeverity: HIGH\nPackage: libssl1.1\nFixed Version: 1.1.1k-r0\nLink: [CVE-2021-3450](https://avd.aquasec.com/nvd/cve-2021-3450)\nThe X509_V_FLAG_X509_STRICT flag enables additional security checks of the certificates present in a certificate chain. It is not set by default. Starting from OpenSSL version 1.1.1h a check to disallow certificates in the chain that have explicitly encoded elliptic curve parameters was added as an additional strict check. An error in the implementation of this check meant that the result of a previous check to confirm that certificates in the chain are valid CA certificates was overwritten. This effectively bypasses the check that non-CA certificates must not be able to issue other certificates. If a \"purpose\" has been configured then there is a subsequent opportunity for checks that the certificate is a valid CA. All of the named \"purpose\" values implemented in libcrypto perform this check. Therefore, where a purpose is set the certificate chain will still be rejected even when the strict flag has been used. A purpose is set by default in libssl client and server certificate verification routines, but it can be overridden or removed by an application. In order to be affected, an application must explicitly set the X509_V_FLAG_X509_STRICT verification flag and either not set a purpose for the certificate verification or, in the case of TLS client or server applications, override the default purpose. OpenSSL versions 1.1.1h and newer are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue. Fixed in OpenSSL 1.1.1k (Affected 1.1.1h-1.1.1j).", + "markdown": "**Vulnerability CVE-2021-3450**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|libssl1.1|1.1.1k-r0|[CVE-2021-3450](https://avd.aquasec.com/nvd/cve-2021-3450)|\n\nThe X509_V_FLAG_X509_STRICT flag enables additional security checks of the certificates present in a certificate chain. It is not set by default. Starting from OpenSSL version 1.1.1h a check to disallow certificates in the chain that have explicitly encoded elliptic curve parameters was added as an additional strict check. An error in the implementation of this check meant that the result of a previous check to confirm that certificates in the chain are valid CA certificates was overwritten. This effectively bypasses the check that non-CA certificates must not be able to issue other certificates. If a \"purpose\" has been configured then there is a subsequent opportunity for checks that the certificate is a valid CA. All of the named \"purpose\" values implemented in libcrypto perform this check. Therefore, where a purpose is set the certificate chain will still be rejected even when the strict flag has been used. A purpose is set by default in libssl client and server certificate verification routines, but it can be overridden or removed by an application. In order to be affected, an application must explicitly set the X509_V_FLAG_X509_STRICT verification flag and either not set a purpose for the certificate verification or, in the case of TLS client or server applications, override the default purpose. OpenSSL versions 1.1.1h and newer are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue. Fixed in OpenSSL 1.1.1k (Affected 1.1.1h-1.1.1j)." + }, + "properties": { + "precision": "very-high", + "security-severity": "7.4", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2021-3712", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: Read buffer overruns processing ASN.1 strings" + }, + "fullDescription": { + "text": "ASN.1 strings are represented internally within OpenSSL as an ASN1_STRING structure which contains a buffer holding the string data and a field holding the buffer length. This contrasts with normal C strings which are repesented as a buffer for the string data which is terminated with a NUL (0) byte. Although not a strict requirement, ASN.1 strings that are parsed using OpenSSL's own "d2i" functions (and other similar parsing functions) as well as any string whose value has been set with the ASN1_STRING_set() function will additionally NUL terminate the byte array in the ASN1_STRING structure. However, it is possible for applications to directly construct valid ASN1_STRING structures which do not NUL terminate the byte array by directly setting the "data" and "length" fields in the ASN1_STRING array. This can also happen by using the ASN1_STRING_set0() function. Numerous OpenSSL functions that print ASN.1 data have been found to assume that the ASN1_STRING byte array will be NUL terminated, even though this is not guaranteed for strings that have been directly constructed. Where an application requests an ASN.1 structure to be printed, and where that ASN.1 structure contains ASN1_STRINGs that have been directly constructed by the application without NUL terminating the "data" field, then a read buffer overrun can occur. The same thing can also occur during name constraints processing of certificates (for example if a certificate has been directly constructed by the application instead of loading it via the OpenSSL parsing functions, and the certificate contains non NUL terminated ASN1_STRING structures). It can also occur in the X509_get1_email(), X509_REQ_get1_email() and X509_get1_ocsp() functions. If a malicious actor can cause an application to directly construct an ASN1_STRING and then process it through one of the affected OpenSSL functions then this issue could be hit. This might result in a crash (causing a Denial of Service attack). It could also result in the disclosure of private memory contents (such as private keys, or sensitive plaintext). Fixed in OpenSSL 1.1.1l (Affected 1.1.1-1.1.1k). Fixed in OpenSSL 1.0.2za (Affected 1.0.2-1.0.2y)." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-3712", + "help": { + "text": "Vulnerability CVE-2021-3712\nSeverity: HIGH\nPackage: libssl1.1\nFixed Version: 1.1.1l-r0\nLink: [CVE-2021-3712](https://avd.aquasec.com/nvd/cve-2021-3712)\nASN.1 strings are represented internally within OpenSSL as an ASN1_STRING structure which contains a buffer holding the string data and a field holding the buffer length. This contrasts with normal C strings which are repesented as a buffer for the string data which is terminated with a NUL (0) byte. Although not a strict requirement, ASN.1 strings that are parsed using OpenSSL's own \"d2i\" functions (and other similar parsing functions) as well as any string whose value has been set with the ASN1_STRING_set() function will additionally NUL terminate the byte array in the ASN1_STRING structure. However, it is possible for applications to directly construct valid ASN1_STRING structures which do not NUL terminate the byte array by directly setting the \"data\" and \"length\" fields in the ASN1_STRING array. This can also happen by using the ASN1_STRING_set0() function. Numerous OpenSSL functions that print ASN.1 data have been found to assume that the ASN1_STRING byte array will be NUL terminated, even though this is not guaranteed for strings that have been directly constructed. Where an application requests an ASN.1 structure to be printed, and where that ASN.1 structure contains ASN1_STRINGs that have been directly constructed by the application without NUL terminating the \"data\" field, then a read buffer overrun can occur. The same thing can also occur during name constraints processing of certificates (for example if a certificate has been directly constructed by the application instead of loading it via the OpenSSL parsing functions, and the certificate contains non NUL terminated ASN1_STRING structures). It can also occur in the X509_get1_email(), X509_REQ_get1_email() and X509_get1_ocsp() functions. If a malicious actor can cause an application to directly construct an ASN1_STRING and then process it through one of the affected OpenSSL functions then this issue could be hit. This might result in a crash (causing a Denial of Service attack). It could also result in the disclosure of private memory contents (such as private keys, or sensitive plaintext). Fixed in OpenSSL 1.1.1l (Affected 1.1.1-1.1.1k). Fixed in OpenSSL 1.0.2za (Affected 1.0.2-1.0.2y).", + "markdown": "**Vulnerability CVE-2021-3712**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|HIGH|libssl1.1|1.1.1l-r0|[CVE-2021-3712](https://avd.aquasec.com/nvd/cve-2021-3712)|\n\nASN.1 strings are represented internally within OpenSSL as an ASN1_STRING structure which contains a buffer holding the string data and a field holding the buffer length. This contrasts with normal C strings which are repesented as a buffer for the string data which is terminated with a NUL (0) byte. Although not a strict requirement, ASN.1 strings that are parsed using OpenSSL's own \"d2i\" functions (and other similar parsing functions) as well as any string whose value has been set with the ASN1_STRING_set() function will additionally NUL terminate the byte array in the ASN1_STRING structure. However, it is possible for applications to directly construct valid ASN1_STRING structures which do not NUL terminate the byte array by directly setting the \"data\" and \"length\" fields in the ASN1_STRING array. This can also happen by using the ASN1_STRING_set0() function. Numerous OpenSSL functions that print ASN.1 data have been found to assume that the ASN1_STRING byte array will be NUL terminated, even though this is not guaranteed for strings that have been directly constructed. Where an application requests an ASN.1 structure to be printed, and where that ASN.1 structure contains ASN1_STRINGs that have been directly constructed by the application without NUL terminating the \"data\" field, then a read buffer overrun can occur. The same thing can also occur during name constraints processing of certificates (for example if a certificate has been directly constructed by the application instead of loading it via the OpenSSL parsing functions, and the certificate contains non NUL terminated ASN1_STRING structures). It can also occur in the X509_get1_email(), X509_REQ_get1_email() and X509_get1_ocsp() functions. If a malicious actor can cause an application to directly construct an ASN1_STRING and then process it through one of the affected OpenSSL functions then this issue could be hit. This might result in a crash (causing a Denial of Service attack). It could also result in the disclosure of private memory contents (such as private keys, or sensitive plaintext). Fixed in OpenSSL 1.1.1l (Affected 1.1.1-1.1.1k). Fixed in OpenSSL 1.0.2za (Affected 1.0.2-1.0.2y)." + }, + "properties": { + "precision": "very-high", + "security-severity": "7.4", + "tags": [ + "vulnerability", + "security", + "HIGH" + ] + } + }, + { + "id": "CVE-2020-1971", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: EDIPARTYNAME NULL pointer de-reference" + }, + "fullDescription": { + "text": "The X.509 GeneralName type is a generic type for representing different types of names. One of those name types is known as EDIPartyName. OpenSSL provides a function GENERAL_NAME_cmp which compares different instances of a GENERAL_NAME to see if they are equal or not. This function behaves incorrectly when both GENERAL_NAMEs contain an EDIPARTYNAME. A NULL pointer dereference and a crash may occur leading to a possible denial of service attack. OpenSSL itself uses the GENERAL_NAME_cmp function for two purposes: 1) Comparing CRL distribution point names between an available CRL and a CRL distribution point embedded in an X509 certificate 2) When verifying that a timestamp response token signer matches the timestamp authority name (exposed via the API functions TS_RESP_verify_response and TS_RESP_verify_token) If an attacker can control both items being compared then that attacker could trigger a crash. For example if the attacker can trick a client or server into checking a malicious certificate against a malicious CRL then this may occur. Note that some applications automatically download CRLs based on a URL embedded in a certificate. This checking happens prior to the signatures on the certificate and CRL being verified. OpenSSL's s_server, s_client and verify tools have support for the "-crl_download" option which implements automatic CRL downloading and this attack has been demonstrated to work against those tools. Note that an unrelated bug means that affected versions of OpenSSL cannot parse or construct correct encodings of EDIPARTYNAME. However it is possible to construct a malformed EDIPARTYNAME that OpenSSL's parser will accept and hence trigger this attack. All OpenSSL 1.1.1 and 1.0.2 versions are affected by this issue. Other OpenSSL releases are out of support and have not been checked. Fixed in OpenSSL 1.1.1i (Affected 1.1.1-1.1.1h). Fixed in OpenSSL 1.0.2x (Affected 1.0.2-1.0.2w)." + }, + "defaultConfiguration": { + "level": "warning" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2020-1971", + "help": { + "text": "Vulnerability CVE-2020-1971\nSeverity: MEDIUM\nPackage: libssl1.1\nFixed Version: 1.1.1i-r0\nLink: [CVE-2020-1971](https://avd.aquasec.com/nvd/cve-2020-1971)\nThe X.509 GeneralName type is a generic type for representing different types of names. One of those name types is known as EDIPartyName. OpenSSL provides a function GENERAL_NAME_cmp which compares different instances of a GENERAL_NAME to see if they are equal or not. This function behaves incorrectly when both GENERAL_NAMEs contain an EDIPARTYNAME. A NULL pointer dereference and a crash may occur leading to a possible denial of service attack. OpenSSL itself uses the GENERAL_NAME_cmp function for two purposes: 1) Comparing CRL distribution point names between an available CRL and a CRL distribution point embedded in an X509 certificate 2) When verifying that a timestamp response token signer matches the timestamp authority name (exposed via the API functions TS_RESP_verify_response and TS_RESP_verify_token) If an attacker can control both items being compared then that attacker could trigger a crash. For example if the attacker can trick a client or server into checking a malicious certificate against a malicious CRL then this may occur. Note that some applications automatically download CRLs based on a URL embedded in a certificate. This checking happens prior to the signatures on the certificate and CRL being verified. OpenSSL's s_server, s_client and verify tools have support for the \"-crl_download\" option which implements automatic CRL downloading and this attack has been demonstrated to work against those tools. Note that an unrelated bug means that affected versions of OpenSSL cannot parse or construct correct encodings of EDIPARTYNAME. However it is possible to construct a malformed EDIPARTYNAME that OpenSSL's parser will accept and hence trigger this attack. All OpenSSL 1.1.1 and 1.0.2 versions are affected by this issue. Other OpenSSL releases are out of support and have not been checked. Fixed in OpenSSL 1.1.1i (Affected 1.1.1-1.1.1h). Fixed in OpenSSL 1.0.2x (Affected 1.0.2-1.0.2w).", + "markdown": "**Vulnerability CVE-2020-1971**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|MEDIUM|libssl1.1|1.1.1i-r0|[CVE-2020-1971](https://avd.aquasec.com/nvd/cve-2020-1971)|\n\nThe X.509 GeneralName type is a generic type for representing different types of names. One of those name types is known as EDIPartyName. OpenSSL provides a function GENERAL_NAME_cmp which compares different instances of a GENERAL_NAME to see if they are equal or not. This function behaves incorrectly when both GENERAL_NAMEs contain an EDIPARTYNAME. A NULL pointer dereference and a crash may occur leading to a possible denial of service attack. OpenSSL itself uses the GENERAL_NAME_cmp function for two purposes: 1) Comparing CRL distribution point names between an available CRL and a CRL distribution point embedded in an X509 certificate 2) When verifying that a timestamp response token signer matches the timestamp authority name (exposed via the API functions TS_RESP_verify_response and TS_RESP_verify_token) If an attacker can control both items being compared then that attacker could trigger a crash. For example if the attacker can trick a client or server into checking a malicious certificate against a malicious CRL then this may occur. Note that some applications automatically download CRLs based on a URL embedded in a certificate. This checking happens prior to the signatures on the certificate and CRL being verified. OpenSSL's s_server, s_client and verify tools have support for the \"-crl_download\" option which implements automatic CRL downloading and this attack has been demonstrated to work against those tools. Note that an unrelated bug means that affected versions of OpenSSL cannot parse or construct correct encodings of EDIPARTYNAME. However it is possible to construct a malformed EDIPARTYNAME that OpenSSL's parser will accept and hence trigger this attack. All OpenSSL 1.1.1 and 1.0.2 versions are affected by this issue. Other OpenSSL releases are out of support and have not been checked. Fixed in OpenSSL 1.1.1i (Affected 1.1.1-1.1.1h). Fixed in OpenSSL 1.0.2x (Affected 1.0.2-1.0.2w)." + }, + "properties": { + "precision": "very-high", + "security-severity": "5.9", + "tags": [ + "vulnerability", + "security", + "MEDIUM" + ] + } + }, + { + "id": "CVE-2021-23841", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: NULL pointer dereference in X509_issuer_and_serial_hash()" + }, + "fullDescription": { + "text": "The OpenSSL public API function X509_issuer_and_serial_hash() attempts to create a unique hash value based on the issuer and serial number data contained within an X509 certificate. However it fails to correctly handle any errors that may occur while parsing the issuer field (which might occur if the issuer field is maliciously constructed). This may subsequently result in a NULL pointer deref and a crash leading to a potential denial of service attack. The function X509_issuer_and_serial_hash() is never directly called by OpenSSL itself so applications are only vulnerable if they use this function directly and they use it on certificates that may have been obtained from untrusted sources. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.1.1j (Affected 1.1.1-1.1.1i). Fixed in OpenSSL 1.0.2y (Affected 1.0.2-1.0.2x)." + }, + "defaultConfiguration": { + "level": "warning" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-23841", + "help": { + "text": "Vulnerability CVE-2021-23841\nSeverity: MEDIUM\nPackage: libssl1.1\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23841](https://avd.aquasec.com/nvd/cve-2021-23841)\nThe OpenSSL public API function X509_issuer_and_serial_hash() attempts to create a unique hash value based on the issuer and serial number data contained within an X509 certificate. However it fails to correctly handle any errors that may occur while parsing the issuer field (which might occur if the issuer field is maliciously constructed). This may subsequently result in a NULL pointer deref and a crash leading to a potential denial of service attack. The function X509_issuer_and_serial_hash() is never directly called by OpenSSL itself so applications are only vulnerable if they use this function directly and they use it on certificates that may have been obtained from untrusted sources. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.1.1j (Affected 1.1.1-1.1.1i). Fixed in OpenSSL 1.0.2y (Affected 1.0.2-1.0.2x).", + "markdown": "**Vulnerability CVE-2021-23841**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|MEDIUM|libssl1.1|1.1.1j-r0|[CVE-2021-23841](https://avd.aquasec.com/nvd/cve-2021-23841)|\n\nThe OpenSSL public API function X509_issuer_and_serial_hash() attempts to create a unique hash value based on the issuer and serial number data contained within an X509 certificate. However it fails to correctly handle any errors that may occur while parsing the issuer field (which might occur if the issuer field is maliciously constructed). This may subsequently result in a NULL pointer deref and a crash leading to a potential denial of service attack. The function X509_issuer_and_serial_hash() is never directly called by OpenSSL itself so applications are only vulnerable if they use this function directly and they use it on certificates that may have been obtained from untrusted sources. OpenSSL versions 1.1.1i and below are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1j. OpenSSL versions 1.0.2x and below are affected by this issue. However OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.1.1j (Affected 1.1.1-1.1.1i). Fixed in OpenSSL 1.0.2y (Affected 1.0.2-1.0.2x)." + }, + "properties": { + "precision": "very-high", + "security-severity": "5.9", + "tags": [ + "vulnerability", + "security", + "MEDIUM" + ] + } + }, + { + "id": "CVE-2021-3449", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: NULL pointer dereference in signature_algorithms processing" + }, + "fullDescription": { + "text": "An OpenSSL TLS server may crash if sent a maliciously crafted renegotiation ClientHello message from a client. If a TLSv1.2 renegotiation ClientHello omits the signature_algorithms extension (where it was present in the initial ClientHello), but includes a signature_algorithms_cert extension then a NULL pointer dereference will result, leading to a crash and a denial of service attack. A server is only vulnerable if it has TLSv1.2 and renegotiation enabled (which is the default configuration). OpenSSL TLS clients are not impacted by this issue. All OpenSSL 1.1.1 versions are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue. Fixed in OpenSSL 1.1.1k (Affected 1.1.1-1.1.1j)." + }, + "defaultConfiguration": { + "level": "warning" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-3449", + "help": { + "text": "Vulnerability CVE-2021-3449\nSeverity: MEDIUM\nPackage: libssl1.1\nFixed Version: 1.1.1k-r0\nLink: [CVE-2021-3449](https://avd.aquasec.com/nvd/cve-2021-3449)\nAn OpenSSL TLS server may crash if sent a maliciously crafted renegotiation ClientHello message from a client. If a TLSv1.2 renegotiation ClientHello omits the signature_algorithms extension (where it was present in the initial ClientHello), but includes a signature_algorithms_cert extension then a NULL pointer dereference will result, leading to a crash and a denial of service attack. A server is only vulnerable if it has TLSv1.2 and renegotiation enabled (which is the default configuration). OpenSSL TLS clients are not impacted by this issue. All OpenSSL 1.1.1 versions are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue. Fixed in OpenSSL 1.1.1k (Affected 1.1.1-1.1.1j).", + "markdown": "**Vulnerability CVE-2021-3449**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|MEDIUM|libssl1.1|1.1.1k-r0|[CVE-2021-3449](https://avd.aquasec.com/nvd/cve-2021-3449)|\n\nAn OpenSSL TLS server may crash if sent a maliciously crafted renegotiation ClientHello message from a client. If a TLSv1.2 renegotiation ClientHello omits the signature_algorithms extension (where it was present in the initial ClientHello), but includes a signature_algorithms_cert extension then a NULL pointer dereference will result, leading to a crash and a denial of service attack. A server is only vulnerable if it has TLSv1.2 and renegotiation enabled (which is the default configuration). OpenSSL TLS clients are not impacted by this issue. All OpenSSL 1.1.1 versions are affected by this issue. Users of these versions should upgrade to OpenSSL 1.1.1k. OpenSSL 1.0.2 is not impacted by this issue. Fixed in OpenSSL 1.1.1k (Affected 1.1.1-1.1.1j)." + }, + "properties": { + "precision": "very-high", + "security-severity": "5.9", + "tags": [ + "vulnerability", + "security", + "MEDIUM" + ] + } + }, + { + "id": "CVE-2021-23839", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "openssl: incorrect SSLv2 rollback protection" + }, + "fullDescription": { + "text": "OpenSSL 1.0.2 supports SSLv2. If a client attempts to negotiate SSLv2 with a server that is configured to support both SSLv2 and more recent SSL and TLS versions then a check is made for a version rollback attack when unpadding an RSA signature. Clients that support SSL or TLS versions greater than SSLv2 are supposed to use a special form of padding. A server that supports greater than SSLv2 is supposed to reject connection attempts from a client where this special form of padding is present, because this indicates that a version rollback has occurred (i.e. both client and server support greater than SSLv2, and yet this is the version that is being requested). The implementation of this padding check inverted the logic so that the connection attempt is accepted if the padding is present, and rejected if it is absent. This means that such as server will accept a connection if a version rollback attack has occurred. Further the server will erroneously reject a connection if a normal SSLv2 connection attempt is made. Only OpenSSL 1.0.2 servers from version 1.0.2s to 1.0.2x are affected by this issue. In order to be vulnerable a 1.0.2 server must: 1) have configured SSLv2 support at compile time (this is off by default), 2) have configured SSLv2 support at runtime (this is off by default), 3) have configured SSLv2 ciphersuites (these are not in the default ciphersuite list) OpenSSL 1.1.1 does not have SSLv2 support and therefore is not vulnerable to this issue. The underlying error is in the implementation of the RSA_padding_check_SSLv23() function. This also affects the RSA_SSLV23_PADDING padding mode used by various other functions. Although 1.1.1 does not support SSLv2 the RSA_padding_check_SSLv23() function still exists, as does the RSA_SSLV23_PADDING padding mode. Applications that directly call that function or use that padding mode will encounter this issue. However since there is no support for the SSLv2 protocol in 1.1.1 this is considered a bug and not a security issue in that version. OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.0.2y (Affected 1.0.2s-1.0.2x)." + }, + "defaultConfiguration": { + "level": "note" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2021-23839", + "help": { + "text": "Vulnerability CVE-2021-23839\nSeverity: LOW\nPackage: libssl1.1\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23839](https://avd.aquasec.com/nvd/cve-2021-23839)\nOpenSSL 1.0.2 supports SSLv2. If a client attempts to negotiate SSLv2 with a server that is configured to support both SSLv2 and more recent SSL and TLS versions then a check is made for a version rollback attack when unpadding an RSA signature. Clients that support SSL or TLS versions greater than SSLv2 are supposed to use a special form of padding. A server that supports greater than SSLv2 is supposed to reject connection attempts from a client where this special form of padding is present, because this indicates that a version rollback has occurred (i.e. both client and server support greater than SSLv2, and yet this is the version that is being requested). The implementation of this padding check inverted the logic so that the connection attempt is accepted if the padding is present, and rejected if it is absent. This means that such as server will accept a connection if a version rollback attack has occurred. Further the server will erroneously reject a connection if a normal SSLv2 connection attempt is made. Only OpenSSL 1.0.2 servers from version 1.0.2s to 1.0.2x are affected by this issue. In order to be vulnerable a 1.0.2 server must: 1) have configured SSLv2 support at compile time (this is off by default), 2) have configured SSLv2 support at runtime (this is off by default), 3) have configured SSLv2 ciphersuites (these are not in the default ciphersuite list) OpenSSL 1.1.1 does not have SSLv2 support and therefore is not vulnerable to this issue. The underlying error is in the implementation of the RSA_padding_check_SSLv23() function. This also affects the RSA_SSLV23_PADDING padding mode used by various other functions. Although 1.1.1 does not support SSLv2 the RSA_padding_check_SSLv23() function still exists, as does the RSA_SSLV23_PADDING padding mode. Applications that directly call that function or use that padding mode will encounter this issue. However since there is no support for the SSLv2 protocol in 1.1.1 this is considered a bug and not a security issue in that version. OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.0.2y (Affected 1.0.2s-1.0.2x).", + "markdown": "**Vulnerability CVE-2021-23839**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|LOW|libssl1.1|1.1.1j-r0|[CVE-2021-23839](https://avd.aquasec.com/nvd/cve-2021-23839)|\n\nOpenSSL 1.0.2 supports SSLv2. If a client attempts to negotiate SSLv2 with a server that is configured to support both SSLv2 and more recent SSL and TLS versions then a check is made for a version rollback attack when unpadding an RSA signature. Clients that support SSL or TLS versions greater than SSLv2 are supposed to use a special form of padding. A server that supports greater than SSLv2 is supposed to reject connection attempts from a client where this special form of padding is present, because this indicates that a version rollback has occurred (i.e. both client and server support greater than SSLv2, and yet this is the version that is being requested). The implementation of this padding check inverted the logic so that the connection attempt is accepted if the padding is present, and rejected if it is absent. This means that such as server will accept a connection if a version rollback attack has occurred. Further the server will erroneously reject a connection if a normal SSLv2 connection attempt is made. Only OpenSSL 1.0.2 servers from version 1.0.2s to 1.0.2x are affected by this issue. In order to be vulnerable a 1.0.2 server must: 1) have configured SSLv2 support at compile time (this is off by default), 2) have configured SSLv2 support at runtime (this is off by default), 3) have configured SSLv2 ciphersuites (these are not in the default ciphersuite list) OpenSSL 1.1.1 does not have SSLv2 support and therefore is not vulnerable to this issue. The underlying error is in the implementation of the RSA_padding_check_SSLv23() function. This also affects the RSA_SSLV23_PADDING padding mode used by various other functions. Although 1.1.1 does not support SSLv2 the RSA_padding_check_SSLv23() function still exists, as does the RSA_SSLV23_PADDING padding mode. Applications that directly call that function or use that padding mode will encounter this issue. However since there is no support for the SSLv2 protocol in 1.1.1 this is considered a bug and not a security issue in that version. OpenSSL 1.0.2 is out of support and no longer receiving public updates. Premium support customers of OpenSSL 1.0.2 should upgrade to 1.0.2y. Other users should upgrade to 1.1.1j. Fixed in OpenSSL 1.0.2y (Affected 1.0.2s-1.0.2x)." + }, + "properties": { + "precision": "very-high", + "security-severity": "3.7", + "tags": [ + "vulnerability", + "security", + "LOW" + ] + } + }, + { + "id": "CVE-2020-28928", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "In musl libc through 1.2.1, wcsnrtombs mishandles particular combinati ..." + }, + "fullDescription": { + "text": "In musl libc through 1.2.1, wcsnrtombs mishandles particular combinations of destination buffer size and source character limit, as demonstrated by an invalid write access (buffer overflow)." + }, + "defaultConfiguration": { + "level": "warning" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2020-28928", + "help": { + "text": "Vulnerability CVE-2020-28928\nSeverity: MEDIUM\nPackage: musl-utils\nFixed Version: 1.1.24-r3\nLink: [CVE-2020-28928](https://avd.aquasec.com/nvd/cve-2020-28928)\nIn musl libc through 1.2.1, wcsnrtombs mishandles particular combinations of destination buffer size and source character limit, as demonstrated by an invalid write access (buffer overflow).", + "markdown": "**Vulnerability CVE-2020-28928**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|MEDIUM|musl-utils|1.1.24-r3|[CVE-2020-28928](https://avd.aquasec.com/nvd/cve-2020-28928)|\n\nIn musl libc through 1.2.1, wcsnrtombs mishandles particular combinations of destination buffer size and source character limit, as demonstrated by an invalid write access (buffer overflow)." + }, + "properties": { + "precision": "very-high", + "security-severity": "5.5", + "tags": [ + "vulnerability", + "security", + "MEDIUM" + ] + } + }, + { + "id": "CVE-2022-37434", + "name": "OsPackageVulnerability", + "shortDescription": { + "text": "zlib: heap-based buffer over-read and overflow in inflate() in inflate.c via a large gzip header extra field" + }, + "fullDescription": { + "text": "zlib through 1.2.12 has a heap-based buffer over-read or buffer overflow in inflate in inflate.c via a large gzip header extra field. NOTE: only applications that call inflateGetHeader are affected. Some common applications bundle the affected zlib source code but may be unable to call inflateGetHeader (e.g., see the nodejs/node reference)." + }, + "defaultConfiguration": { + "level": "error" + }, + "helpUri": "https://avd.aquasec.com/nvd/cve-2022-37434", + "help": { + "text": "Vulnerability CVE-2022-37434\nSeverity: CRITICAL\nPackage: zlib\nFixed Version: 1.2.11-r4\nLink: [CVE-2022-37434](https://avd.aquasec.com/nvd/cve-2022-37434)\nzlib through 1.2.12 has a heap-based buffer over-read or buffer overflow in inflate in inflate.c via a large gzip header extra field. NOTE: only applications that call inflateGetHeader are affected. Some common applications bundle the affected zlib source code but may be unable to call inflateGetHeader (e.g., see the nodejs/node reference).", + "markdown": "**Vulnerability CVE-2022-37434**\n| Severity | Package | Fixed Version | Link |\n| --- | --- | --- | --- |\n|CRITICAL|zlib|1.2.11-r4|[CVE-2022-37434](https://avd.aquasec.com/nvd/cve-2022-37434)|\n\nzlib through 1.2.12 has a heap-based buffer over-read or buffer overflow in inflate in inflate.c via a large gzip header extra field. NOTE: only applications that call inflateGetHeader are affected. Some common applications bundle the affected zlib source code but may be unable to call inflateGetHeader (e.g., see the nodejs/node reference)." + }, + "properties": { + "precision": "very-high", + "security-severity": "9.8", + "tags": [ + "vulnerability", + "security", + "CRITICAL" + ] + } + } + ], + "version": "0.35.0" + } + }, + "results": [ + { + "ruleId": "CVE-2021-36159", + "ruleIndex": 0, + "level": "error", + "message": { + "text": "Package: apk-tools\nInstalled Version: 2.10.4-r3\nVulnerability CVE-2021-36159\nSeverity: CRITICAL\nFixed Version: 2.10.7-r0\nLink: [CVE-2021-36159](https://avd.aquasec.com/nvd/cve-2021-36159)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: apk-tools@2.10.4-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-30139", + "ruleIndex": 1, + "level": "error", + "message": { + "text": "Package: apk-tools\nInstalled Version: 2.10.4-r3\nVulnerability CVE-2021-30139\nSeverity: HIGH\nFixed Version: 2.10.6-r0\nLink: [CVE-2021-30139](https://avd.aquasec.com/nvd/cve-2021-30139)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: apk-tools@2.10.4-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-28831", + "ruleIndex": 2, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-28831\nSeverity: HIGH\nFixed Version: 1.31.1-r10\nLink: [CVE-2021-28831](https://avd.aquasec.com/nvd/cve-2021-28831)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42378", + "ruleIndex": 3, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42378\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42378](https://avd.aquasec.com/nvd/cve-2021-42378)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42379", + "ruleIndex": 4, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42379\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42379](https://avd.aquasec.com/nvd/cve-2021-42379)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42380", + "ruleIndex": 5, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42380\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42380](https://avd.aquasec.com/nvd/cve-2021-42380)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42381", + "ruleIndex": 6, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42381\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42381](https://avd.aquasec.com/nvd/cve-2021-42381)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42382", + "ruleIndex": 7, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42382\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42382](https://avd.aquasec.com/nvd/cve-2021-42382)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42383", + "ruleIndex": 8, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42383\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42383](https://avd.aquasec.com/nvd/cve-2021-42383)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42384", + "ruleIndex": 9, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42384\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42384](https://avd.aquasec.com/nvd/cve-2021-42384)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42385", + "ruleIndex": 10, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42385\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42385](https://avd.aquasec.com/nvd/cve-2021-42385)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42386", + "ruleIndex": 11, + "level": "error", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42386\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42386](https://avd.aquasec.com/nvd/cve-2021-42386)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42374", + "ruleIndex": 12, + "level": "warning", + "message": { + "text": "Package: busybox\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42374\nSeverity: MEDIUM\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42374](https://avd.aquasec.com/nvd/cve-2021-42374)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: busybox@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-3711", + "ruleIndex": 13, + "level": "error", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-3711\nSeverity: CRITICAL\nFixed Version: 1.1.1l-r0\nLink: [CVE-2021-3711](https://avd.aquasec.com/nvd/cve-2021-3711)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2020-1967", + "ruleIndex": 14, + "level": "error", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2020-1967\nSeverity: HIGH\nFixed Version: 1.1.1g-r0\nLink: [CVE-2020-1967](https://avd.aquasec.com/nvd/cve-2020-1967)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-23840", + "ruleIndex": 15, + "level": "error", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-23840\nSeverity: HIGH\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23840](https://avd.aquasec.com/nvd/cve-2021-23840)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-3450", + "ruleIndex": 16, + "level": "error", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-3450\nSeverity: HIGH\nFixed Version: 1.1.1k-r0\nLink: [CVE-2021-3450](https://avd.aquasec.com/nvd/cve-2021-3450)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-3712", + "ruleIndex": 17, + "level": "error", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-3712\nSeverity: HIGH\nFixed Version: 1.1.1l-r0\nLink: [CVE-2021-3712](https://avd.aquasec.com/nvd/cve-2021-3712)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2020-1971", + "ruleIndex": 18, + "level": "warning", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2020-1971\nSeverity: MEDIUM\nFixed Version: 1.1.1i-r0\nLink: [CVE-2020-1971](https://avd.aquasec.com/nvd/cve-2020-1971)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-23841", + "ruleIndex": 19, + "level": "warning", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-23841\nSeverity: MEDIUM\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23841](https://avd.aquasec.com/nvd/cve-2021-23841)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-3449", + "ruleIndex": 20, + "level": "warning", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-3449\nSeverity: MEDIUM\nFixed Version: 1.1.1k-r0\nLink: [CVE-2021-3449](https://avd.aquasec.com/nvd/cve-2021-3449)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-23839", + "ruleIndex": 21, + "level": "note", + "message": { + "text": "Package: libcrypto1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-23839\nSeverity: LOW\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23839](https://avd.aquasec.com/nvd/cve-2021-23839)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libcrypto1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-3711", + "ruleIndex": 13, + "level": "error", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-3711\nSeverity: CRITICAL\nFixed Version: 1.1.1l-r0\nLink: [CVE-2021-3711](https://avd.aquasec.com/nvd/cve-2021-3711)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2020-1967", + "ruleIndex": 14, + "level": "error", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2020-1967\nSeverity: HIGH\nFixed Version: 1.1.1g-r0\nLink: [CVE-2020-1967](https://avd.aquasec.com/nvd/cve-2020-1967)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-23840", + "ruleIndex": 15, + "level": "error", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-23840\nSeverity: HIGH\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23840](https://avd.aquasec.com/nvd/cve-2021-23840)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-3450", + "ruleIndex": 16, + "level": "error", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-3450\nSeverity: HIGH\nFixed Version: 1.1.1k-r0\nLink: [CVE-2021-3450](https://avd.aquasec.com/nvd/cve-2021-3450)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-3712", + "ruleIndex": 17, + "level": "error", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-3712\nSeverity: HIGH\nFixed Version: 1.1.1l-r0\nLink: [CVE-2021-3712](https://avd.aquasec.com/nvd/cve-2021-3712)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2020-1971", + "ruleIndex": 18, + "level": "warning", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2020-1971\nSeverity: MEDIUM\nFixed Version: 1.1.1i-r0\nLink: [CVE-2020-1971](https://avd.aquasec.com/nvd/cve-2020-1971)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-23841", + "ruleIndex": 19, + "level": "warning", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-23841\nSeverity: MEDIUM\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23841](https://avd.aquasec.com/nvd/cve-2021-23841)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-3449", + "ruleIndex": 20, + "level": "warning", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-3449\nSeverity: MEDIUM\nFixed Version: 1.1.1k-r0\nLink: [CVE-2021-3449](https://avd.aquasec.com/nvd/cve-2021-3449)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2021-23839", + "ruleIndex": 21, + "level": "note", + "message": { + "text": "Package: libssl1.1\nInstalled Version: 1.1.1d-r3\nVulnerability CVE-2021-23839\nSeverity: LOW\nFixed Version: 1.1.1j-r0\nLink: [CVE-2021-23839](https://avd.aquasec.com/nvd/cve-2021-23839)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: libssl1.1@1.1.1d-r3" + } + } + ] + }, + { + "ruleId": "CVE-2020-28928", + "ruleIndex": 22, + "level": "warning", + "message": { + "text": "Package: musl\nInstalled Version: 1.1.24-r0\nVulnerability CVE-2020-28928\nSeverity: MEDIUM\nFixed Version: 1.1.24-r3\nLink: [CVE-2020-28928](https://avd.aquasec.com/nvd/cve-2020-28928)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: musl@1.1.24-r0" + } + } + ] + }, + { + "ruleId": "CVE-2020-28928", + "ruleIndex": 22, + "level": "warning", + "message": { + "text": "Package: musl-utils\nInstalled Version: 1.1.24-r0\nVulnerability CVE-2020-28928\nSeverity: MEDIUM\nFixed Version: 1.1.24-r3\nLink: [CVE-2020-28928](https://avd.aquasec.com/nvd/cve-2020-28928)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: musl-utils@1.1.24-r0" + } + } + ] + }, + { + "ruleId": "CVE-2021-28831", + "ruleIndex": 2, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-28831\nSeverity: HIGH\nFixed Version: 1.31.1-r10\nLink: [CVE-2021-28831](https://avd.aquasec.com/nvd/cve-2021-28831)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42378", + "ruleIndex": 3, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42378\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42378](https://avd.aquasec.com/nvd/cve-2021-42378)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42379", + "ruleIndex": 4, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42379\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42379](https://avd.aquasec.com/nvd/cve-2021-42379)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42380", + "ruleIndex": 5, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42380\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42380](https://avd.aquasec.com/nvd/cve-2021-42380)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42381", + "ruleIndex": 6, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42381\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42381](https://avd.aquasec.com/nvd/cve-2021-42381)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42382", + "ruleIndex": 7, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42382\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42382](https://avd.aquasec.com/nvd/cve-2021-42382)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42383", + "ruleIndex": 8, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42383\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42383](https://avd.aquasec.com/nvd/cve-2021-42383)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42384", + "ruleIndex": 9, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42384\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42384](https://avd.aquasec.com/nvd/cve-2021-42384)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42385", + "ruleIndex": 10, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42385\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42385](https://avd.aquasec.com/nvd/cve-2021-42385)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42386", + "ruleIndex": 11, + "level": "error", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42386\nSeverity: HIGH\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42386](https://avd.aquasec.com/nvd/cve-2021-42386)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2021-42374", + "ruleIndex": 12, + "level": "warning", + "message": { + "text": "Package: ssl_client\nInstalled Version: 1.31.1-r9\nVulnerability CVE-2021-42374\nSeverity: MEDIUM\nFixed Version: 1.31.1-r11\nLink: [CVE-2021-42374](https://avd.aquasec.com/nvd/cve-2021-42374)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: ssl_client@1.31.1-r9" + } + } + ] + }, + { + "ruleId": "CVE-2022-37434", + "ruleIndex": 23, + "level": "error", + "message": { + "text": "Package: zlib\nInstalled Version: 1.2.11-r3\nVulnerability CVE-2022-37434\nSeverity: CRITICAL\nFixed Version: 1.2.11-r4\nLink: [CVE-2022-37434](https://avd.aquasec.com/nvd/cve-2022-37434)" + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "helloworld", + "uriBaseId": "ROOTPATH" + }, + "region": { + "startLine": 1, + "startColumn": 1, + "endLine": 1, + "endColumn": 1 + } + }, + "message": { + "text": "helloworld: zlib@1.2.11-r3" + } + } + ] + } + ], + "columnKind": "utf16CodeUnits", + "originalUriBaseIds": { + "ROOTPATH": { + "uri": "file:///" + } + } + } + ] +} \ No newline at end of file diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report.go b/plugins/verifier/vulnerabilityreport/vulnerability_report.go new file mode 100644 index 000000000..f0b0d6e4b --- /dev/null +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report.go @@ -0,0 +1,400 @@ +/* +Copyright The Ratify Authors. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "context" + "embed" + "encoding/json" + "fmt" + "regexp" + "strings" + "time" + + "github.com/deislabs/ratify/pkg/common" + "github.com/deislabs/ratify/pkg/ocispecs" + "github.com/deislabs/ratify/pkg/referrerstore" + _ "github.com/deislabs/ratify/pkg/referrerstore/oras" + "github.com/deislabs/ratify/pkg/verifier" + "github.com/deislabs/ratify/pkg/verifier/plugin/skel" + "github.com/deislabs/ratify/plugins/verifier/vulnerabilityreport/schemavalidation" + imagespec "github.com/opencontainers/image-spec/specs-go/v1" + "github.com/owenrumney/go-sarif/v2/sarif" +) + +//go:embed schemavalidation/schemas +var embeddedFS embed.FS + +const ( + SarifMediaType string = "application/sarif+json" + SarifOfflineFilePath string = "schemavalidation/schemas/sarif-2.1.0.json" + TrivyScannerName string = "trivy" + GrypeScannerName string = "grype" + CreatedAnnotation string = imagespec.AnnotationCreated + SeverityRegex = `Severity:\s*(\w+)` +) + +type PluginConfig struct { + Name string `json:"name"` + SchemaURL string `json:"schemaURL,omitempty"` + MaximumAge string `json:"maximumAge,omitempty"` + DisallowedSeverity []string `json:"disallowedSeverity,omitempty"` + Passthrough bool `json:"passthrough,omitempty"` + DenylistCVEs []string `json:"denylistCVEs,omitempty"` +} + +type PluginInputConfig struct { + Config PluginConfig `json:"config"` +} + +func main() { + skel.PluginMain("vulnerabilityreport", "1.0.0", VerifyReference, []string{"1.0.0"}) +} + +func parseInput(stdin []byte) (*PluginConfig, error) { + conf := PluginInputConfig{} + + if err := json.Unmarshal(stdin, &conf); err != nil { + return nil, fmt.Errorf("failed to parse stdin for the input: %w", err) + } + + return &conf.Config, nil +} + +func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, referenceDescriptor ocispecs.ReferenceDescriptor, referrerStore referrerstore.ReferrerStore) (*verifier.VerifierResult, error) { + input, err := parseInput(args.StdinData) + if err != nil { + return nil, err + } + + // check report is newer than allowed maximum age + if input.MaximumAge != "" { + ok, created, err := validateMaximumAge(input.MaximumAge, referenceDescriptor) + if err != nil { + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: error validating maximum age:[%v]", err.Error()), + }, nil + } + if !ok { + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: report is older than maximum age:[%s]", input.MaximumAge), + Extensions: map[string]interface{}{ + CreatedAnnotation: created, + }, + }, nil + } + } + + ctx := context.Background() + + referenceManifest, err := referrerStore.GetReferenceManifest(ctx, subjectReference, referenceDescriptor) + if err != nil { + return nil, fmt.Errorf("error fetching reference manifest for subject: %s reference descriptor: %v", subjectReference, referenceDescriptor.Descriptor) + } + + if len(referenceManifest.Blobs) == 0 { + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: no blobs found for referrer %s@%s", subjectReference.Path, referenceDescriptor.Digest.String()), + }, nil + } + + blobDesc := referenceManifest.Blobs[0] + refBlob, err := referrerStore.GetBlobContent(ctx, subjectReference, blobDesc.Digest) + if err != nil { + return nil, fmt.Errorf("error fetching blob for subject:[%s] digest:[%s]", subjectReference, blobDesc.Digest) + } + + // skip all validation if passthrough is enabled + if input.Passthrough { + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: true, + Message: "vulnerability report validation skipped", + Extensions: map[string]interface{}{ + "passthrough": true, + "report": string(refBlob), + }, + }, nil + } + + // validate json schema + err = verifyJSONSchema(blobDesc.MediaType, refBlob, input.SchemaURL) + if err != nil { + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: schema validation failed for digest:[%s],media type:[%s],parse errors:[%v]", blobDesc.Digest, blobDesc.MediaType, err.Error()), + }, nil + } + + if blobDesc.MediaType == SarifMediaType { + return processSarifReport(input, input.Name, refBlob) + } + + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: true, + Message: "vulnerability report validation succeeded", + }, nil +} + +// verifyJSONSchema validates the json schema of the report +// if schemaURL is empty, it will use the offline schema embedded in binary +// currently only support for sarif reports +func verifyJSONSchema(mediaType string, refBlob []byte, schemaURL string) error { + if mediaType == SarifMediaType { + // decide online or offline schema type + if schemaURL != "" { + return schemavalidation.Validate(schemaURL, refBlob) + } + schemaFileBytes, err := embeddedFS.ReadFile(SarifOfflineFilePath) + if err != nil { + return fmt.Errorf("error reading offline schema file:[%s]", SarifOfflineFilePath) + } + return schemavalidation.ValidateAgainstOfflineSchema(schemaFileBytes, refBlob) + } + return fmt.Errorf("media type not configured for plugin:[%s]", mediaType) +} + +// processSarifReport processes the sarif report running individual validations as configured +func processSarifReport(input *PluginConfig, verifierName string, blob []byte) (*verifier.VerifierResult, error) { + sarifReport, err := sarif.FromBytes(blob) + if err != nil { + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: error parsing sarif report:[%v]", err.Error()), + }, nil + } + // verify that there is at least one run in the report + if len(sarifReport.Runs) < 1 { + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: false, + Message: "vulnerability report validation failed: no runs found in sarif report", + }, nil + } + scannerName := strings.ToLower(sarifReport.Runs[0].Tool.Driver.Name) + if len(input.DenylistCVEs) > 0 { + verifierReport, err := verifyDenyListCVEs(input.Name, scannerName, sarifReport, input.DenylistCVEs) + if err != nil { + return nil, err + } + if !verifierReport.IsSuccess { + return verifierReport, nil + } + } + if len(input.DisallowedSeverity) > 0 { + verifierReport, err := verifyDisallowedSeverities(input.Name, scannerName, sarifReport, input.DisallowedSeverity) + if err != nil { + return nil, err + } + if !verifierReport.IsSuccess { + return verifierReport, nil + } + } + + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: true, + Message: "vulnerability report validation succeeded", + Extensions: map[string]interface{}{ + "scanner": scannerName, + }, + }, nil +} + +// verifyDenyListCVEs verifies that the report does not contain any deny-listed CVEs +func verifyDenyListCVEs(verifierName string, scannerName string, sarifReport *sarif.Report, denylistCVEs []string) (*verifier.VerifierResult, error) { + denylistCVESet := make(map[string]bool) + denylistViolations := []string{} + + // convert denylistCVEs to a set for easy lookup + for _, cve := range denylistCVEs { + denylistCVESet[strings.ToLower(cve)] = false + } + + // iterate over the results and check which cves are deny-listed + for _, result := range sarifReport.Runs[0].Results { + if result.RuleID == nil || *result.RuleID == "" { + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: rule id not found for result:[%v]", result), + Extensions: map[string]interface{}{ + "scanner": scannerName, + }, + }, nil + } + ruleIDLower := strings.ToLower(*result.RuleID) + if _, ok := denylistCVESet[ruleIDLower]; ok { + denylistCVESet[ruleIDLower] = true + } + } + + // iterate over the denylistCVESet and add the deny-listed cves to the list of violations + for cve, isDenylisted := range denylistCVESet { + if isDenylisted { + denylistViolations = append(denylistViolations, cve) + } + } + + if len(denylistViolations) > 0 { + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: false, + Extensions: map[string]interface{}{ + "scanner": scannerName, + "denylistCVEs": denylistViolations, + }, + Message: "vulnerability report validation failed", + }, nil + } + + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: true, + Message: "vulnerability report validation succeeded", + Extensions: map[string]interface{}{ + "scanner": scannerName, + }, + }, nil +} + +// verifyDisallowedSeverities verifies that the report does not contain any disallowed severity levels +func verifyDisallowedSeverities(verifierName string, scannerName string, sarifReport *sarif.Report, disallowedSeverity []string) (*verifier.VerifierResult, error) { + ruleMap := make(map[string]*sarif.ReportingDescriptor) + violatingRules := make([]sarif.ReportingDescriptor, 0) + // create a map of rule id to rule for easy lookup + for _, rule := range sarifReport.Runs[0].Tool.Driver.Rules { + ruleMap[rule.ID] = rule + } + // iterate over the results and check if the severity is disallowed + for _, result := range sarifReport.Runs[0].Results { + if result.RuleID == nil || *result.RuleID == "" { + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: rule id not found for result:[%v]", result), + Extensions: map[string]interface{}{ + "scanner": scannerName, + }, + }, nil + } + rule, ok := ruleMap[*result.RuleID] + if !ok { + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: rule not found for result:[%v]", result), + Extensions: map[string]interface{}{ + "scanner": scannerName, + }, + }, nil + } + severity, err := extractSeverity(scannerName, *rule) + if err != nil { + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: error extracting severity:[%v]", err.Error()), + Extensions: map[string]interface{}{ + "scanner": scannerName, + }, + }, nil + } + // check if the severity is disallowed and add it to the list of violating rules + for _, disallowed := range disallowedSeverity { + if strings.EqualFold(severity, disallowed) { + violatingRules = append(violatingRules, *rule) + } + } + } + // if there are violating rules, return them as custom extension field + if len(violatingRules) > 0 { + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: false, + Extensions: map[string]interface{}{ + "scanner": scannerName, + "severityViolations": violatingRules, + }, + Message: "vulnerability report validation failed", + }, nil + } + return &verifier.VerifierResult{ + Name: verifierName, + IsSuccess: true, + Message: "vulnerability report validation succeeded", + Extensions: map[string]interface{}{ + "scanner": scannerName, + }, + }, nil +} + +// extractSeverity extracts the severity from the rule help text using regex +// relies on the help text being in the format "Severity: " +// currently only supports trivy and grype scanners +func extractSeverity(scannerName string, rule sarif.ReportingDescriptor) (string, error) { + if scannerName == TrivyScannerName || scannerName == GrypeScannerName { + if rule.Help == nil || rule.Help.Text == nil || *rule.Help.Text == "" { + return "", fmt.Errorf("help text not found for rule:[%s]", rule.ID) + } + re := regexp.MustCompile(SeverityRegex) + match := re.FindStringSubmatch(*rule.Help.Text) + if len(match) < 2 { + return "", fmt.Errorf("severity not found in help text:[%s]", *rule.Help.Text) + } + return strings.ToLower(match[1]), nil + } + return "", fmt.Errorf("scanner not supported:[%s]", scannerName) +} + +// validateMaximumAge validates that the report is newer than the allowed maximum age +// extracts the created timestamp from the descriptor annotations +func validateMaximumAge(maximumAge string, descriptor ocispecs.ReferenceDescriptor) (bool, string, error) { + // check if annotations exist + if descriptor.Annotations == nil { + return false, "", fmt.Errorf("no annotations found for descriptor:[%v]", descriptor) + } + // check if created annotation exists + created, ok := descriptor.Annotations[CreatedAnnotation] + if !ok { + return false, "", fmt.Errorf("created annotation not found for descriptor:[%v]", descriptor) + } + // check if created annotation is a valid timestamp + createdTime, err := time.Parse(time.RFC3339, created) + if err != nil { + return false, "", fmt.Errorf("error parsing created timestamp:[%s]", created) + } + // check if maxium age is a valid duration + duration, err := time.ParseDuration(maximumAge) + if err != nil { + return false, "", fmt.Errorf("error parsing maximum age:[%s]", maximumAge) + } + // check if created timestamp is older than maximum age + if time.Since(createdTime) > duration { + return false, created, nil + } + + return true, created, nil +} diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go new file mode 100644 index 000000000..6b508e12f --- /dev/null +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go @@ -0,0 +1,955 @@ +/* +Copyright The Ratify Authors. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package main + +import ( + "fmt" + "testing" + "time" + + "github.com/deislabs/ratify/pkg/common" + "github.com/deislabs/ratify/pkg/ocispecs" + "github.com/deislabs/ratify/pkg/referrerstore/mocks" + "github.com/deislabs/ratify/pkg/verifier/plugin/skel" + "github.com/opencontainers/go-digest" + oci "github.com/opencontainers/image-spec/specs-go/v1" + "github.com/owenrumney/go-sarif/v2/sarif" +) + +const sampleSarifReport string = `{ + "version": "2.1.0", + "$schema": "https://json.schemastore.org/sarif-2.1.0-rtm.5.json", + "runs": [ + { + "tool": { + "driver": { + "name": "grype", + "version": "0.71.0", + "informationUri": "https://github.com/anchore/grype", + "rules": [ + { + "id": "CVE-2022-48174-busybox", + "name": "ApkMatcherExactDirectMatch", + "help": { + "text": "Vulnerability CVE-2022-48174\nSeverity: critical\nPackage: busybox\nVersion: 1.36.1-r0\nFix Version: 1.36.1-r1\nType: apk\nLocation: /lib/apk/db/installed\nData Namespace: alpine:distro:alpine:3.18\nLink: [CVE-2022-48174](http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-48174)", + "markdown": "**Vulnerability CVE-2022-48174**\n| Severity | Package | Version | Fix Version | Type | Location | Data Namespace | Link |\n| --- | --- | --- | --- | --- | --- | --- | --- |\n| critical | busybox | 1.36.1-r0 | 1.36.1-r1 | apk | /lib/apk/db/installed | alpine:distro:alpine:3.18 | [CVE-2022-48174](http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2022-48174) |\n" + }, + "properties": { + "security-severity": "9.8" + } + } + ] + } + }, + "results": [ + { + "ruleId": "CVE-2022-48174-busybox", + "message": { + "text": "The path /lib/apk/db/installed reports busybox at version 1.36.1-r0 which is a vulnerable (apk) package installed in the container" + } + } + ] + } + ] +}` + +// TestVerifyReference tests the VerifyReference function +func TestVerifyReference(t *testing.T) { + manifestDigest := digest.FromString("test_manifest_digest") + blobDigest := digest.FromString("test_blob_digest") + type args struct { + stdinData string + referenceManifest ocispecs.ReferenceManifest + blobContent string + } + type want struct { + message string + err error + } + tests := []struct { + name string + args args + want want + }{ + { + name: "invalid stdin data", + args: args{ + stdinData: "invalid", + referenceManifest: ocispecs.ReferenceManifest{}, + blobContent: sampleSarifReport, + }, + want: want{ + err: fmt.Errorf("failed to parse stdin for the input: %s", "invalid character 'i' looking for beginning of value"), + }, + }, + { + name: "invalid max age", + args: args{ + stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "1d"}}`, + referenceManifest: ocispecs.ReferenceManifest{}, + blobContent: sampleSarifReport, + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: error validating maximum age:[%s]", "no annotations found for descriptor:[{{ sha256:b2f67b016d3c646f025099b363b4f83a56a44d067a846be74e8866342c56f216 0 [] map[] [] } }]"), + }, + }, + { + name: "expired max age", + args: args{ + stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "24h"}}`, + referenceManifest: ocispecs.ReferenceManifest{ + Annotations: map[string]string{ + "org.opencontainers.image.created": time.Now().Add(time.Hour * -30).Format(time.RFC3339), + }, + }, + blobContent: sampleSarifReport, + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: report is older than maximum age:[%s]", "24h"), + }, + }, + { + name: "no blobs in manifest", + args: args{ + stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "24h"}}`, + referenceManifest: ocispecs.ReferenceManifest{ + Annotations: map[string]string{ + "org.opencontainers.image.created": time.Now().Format(time.RFC3339), + }, + }, + blobContent: sampleSarifReport, + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: no blobs found for referrer %s@%s", "test_subject_path", manifestDigest.String()), + }, + }, + { + name: "passthrough enabled", + args: args{ + stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "24h", "passthrough": true}}`, + referenceManifest: ocispecs.ReferenceManifest{ + Annotations: map[string]string{ + "org.opencontainers.image.created": time.Now().Format(time.RFC3339), + }, + Blobs: []oci.Descriptor{ + { + MediaType: SarifMediaType, + Digest: blobDigest, + }, + }, + }, + blobContent: sampleSarifReport, + }, + want: want{ + message: "vulnerability report validation skipped", + }, + }, + { + name: "invalid json schema", + args: args{ + stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "24h"}}`, + referenceManifest: ocispecs.ReferenceManifest{ + Annotations: map[string]string{ + "org.opencontainers.image.created": time.Now().Format(time.RFC3339), + }, + Blobs: []oci.Descriptor{ + { + MediaType: SarifMediaType, + Digest: blobDigest, + }, + }, + }, + blobContent: "{}", + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: schema validation failed for digest:[%s],media type:[%s],parse errors:[%v]", blobDigest, SarifMediaType, "version is required:runs is required:"), + }, + }, + { + name: "valid report", + args: args{ + stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "24h"}}`, + referenceManifest: ocispecs.ReferenceManifest{ + Annotations: map[string]string{ + "org.opencontainers.image.created": time.Now().Format(time.RFC3339), + }, + Blobs: []oci.Descriptor{ + { + MediaType: SarifMediaType, + Digest: blobDigest, + }, + }, + }, + blobContent: sampleSarifReport, + }, + want: want{ + message: "vulnerability report validation succeeded", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cmdArgs := skel.CmdArgs{ + Version: "1.0.0", + Subject: "test_subject", + StdinData: []byte(tt.args.stdinData), + } + testStore := &mocks.MemoryTestStore{ + Manifests: map[digest.Digest]ocispecs.ReferenceManifest{manifestDigest: tt.args.referenceManifest}, + Blobs: map[digest.Digest][]byte{blobDigest: []byte(tt.args.blobContent)}, + } + subjectRef := common.Reference{ + Path: "test_subject_path", + Original: "test_subject", + } + refDesc := ocispecs.ReferenceDescriptor{ + Descriptor: oci.Descriptor{ + Digest: manifestDigest, + Annotations: tt.args.referenceManifest.Annotations, + }, + } + verifierResult, err := VerifyReference(&cmdArgs, subjectRef, refDesc, testStore) + if err != nil && err.Error() != tt.want.err.Error() { + t.Errorf("verifyReference() error = %v, wantErr %v", err, tt.want.err) + return + } + if verifierResult != nil && verifierResult.Message != tt.want.message { + t.Errorf("verifyReference() verifier report message = %s, want %s", verifierResult.Message, tt.want.message) + return + } + }) + } +} + +// TestVerifyJSONSchema tests the verifyJSONSchema function +func TestVerifyJSONSchema(t *testing.T) { + type args struct { + mediaType string + refBlobContent string + schemaURL string + } + type want struct { + err error + } + tests := []struct { + name string + args args + want want + }{ + { + name: "unsupported media type", + args: args{ + mediaType: "unsupported", + }, + want: want{ + err: fmt.Errorf("media type not configured for plugin:[%s]", "unsupported"), + }, + }, + { + name: "online verification success", + args: args{ + mediaType: SarifMediaType, + refBlobContent: sampleSarifReport, + schemaURL: "https://json.schemastore.org/sarif-2.1.0-rtm.5.json", + }, + want: want{ + err: nil, + }, + }, + { + name: "offline verification success", + args: args{ + mediaType: SarifMediaType, + refBlobContent: sampleSarifReport, + }, + want: want{ + err: nil, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := verifyJSONSchema(tt.args.mediaType, []byte(tt.args.refBlobContent), tt.args.schemaURL) + if err != nil && err.Error() != tt.want.err.Error() { + t.Errorf("verifyJSONSchema() error = %v, wantErr %v", err, tt.want.err) + return + } + }) + } +} + +// TestProcessSarifReport tests the processSarifReport function +func TestProcessSarifReport(t *testing.T) { + type args struct { + input PluginConfig + blobContent string + } + type want struct { + message string + err error + } + tests := []struct { + name string + args args + want want + }{ + { + name: "invalid blob", + args: args{ + input: PluginConfig{}, + blobContent: "invalid", + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: error parsing sarif report:[%s]", "invalid character 'i' looking for beginning of value"), + err: nil, + }, + }, + { + name: "no runs in sarif report", + args: args{ + input: PluginConfig{}, + blobContent: `{ + "version": "2.1.0", + "$schema": "https://json.schemastore.org/sarif-2.1.0-rtm.5.json", + "runs": [] +}`, + }, + want: want{ + message: "vulnerability report validation failed: no runs found in sarif report", + err: nil, + }, + }, + { + name: "deny list CVE found", + args: args{ + input: PluginConfig{ + Name: "test_verifier", + DenylistCVEs: []string{"CVE-2022-48174-busybox"}, + }, + blobContent: sampleSarifReport, + }, + want: want{ + message: "vulnerability report validation failed", + err: nil, + }, + }, + { + name: "disallowed severity CVE found", + args: args{ + input: PluginConfig{ + Name: "test_verifier", + DenylistCVEs: []string{"CVE-2022-48174"}, + DisallowedSeverity: []string{ + "critical", + }, + }, + blobContent: sampleSarifReport, + }, + want: want{ + message: "vulnerability report validation failed", + err: nil, + }, + }, + { + name: "vulnerability report validation succeeded", + args: args{ + input: PluginConfig{ + Name: "test_verifier", + DenylistCVEs: []string{"CVE-2022-48174"}, + DisallowedSeverity: []string{ + "high", + }, + }, + blobContent: sampleSarifReport, + }, + want: want{ + message: "vulnerability report validation succeeded", + err: nil, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + verifierReport, err := processSarifReport(&tt.args.input, "sample_verifier", []byte(tt.args.blobContent)) + if err != nil && err.Error() != tt.want.err.Error() { + t.Errorf("processSarifReport() error = %v, wantErr %v", err, tt.want.err) + return + } + if verifierReport.Message != tt.want.message { + t.Errorf("processSarifReport() verifier report message = %s, want %s", verifierReport.Message, tt.want.message) + return + } + }) + } +} + +// TestVerifyDenyListCVEs tests the verifyDenyListCVEs function +func TestVerifyDenyListCVEs(t *testing.T) { + validRuleID := "CVE-2021-1234" + type args struct { + denyListCVEs []string + sarifReport sarif.Report + } + type want struct { + message string + err error + } + tests := []struct { + name string + args args + want want + }{ + { + name: "rule ID does not exist for result", + args: args{ + denyListCVEs: []string{validRuleID}, + sarifReport: sarif.Report{ + Runs: []*sarif.Run{ + { + Tool: sarif.Tool{ + Driver: &sarif.ToolComponent{ + Name: TrivyScannerName, + Rules: []*sarif.ReportingDescriptor{ + { + ID: validRuleID, + }, + }, + }, + }, + Results: []*sarif.Result{ + {}, + }, + }, + }, + }, + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: rule id not found for result:[%v]", &sarif.Result{}), + err: nil, + }, + }, + { + name: "deny list CVEs found", + args: args{ + denyListCVEs: []string{validRuleID}, + sarifReport: sarif.Report{ + Runs: []*sarif.Run{ + { + Tool: sarif.Tool{ + Driver: &sarif.ToolComponent{ + Name: TrivyScannerName, + Rules: []*sarif.ReportingDescriptor{ + { + ID: validRuleID, + }, + }, + }, + }, + Results: []*sarif.Result{ + { + RuleID: &validRuleID, + }, + }, + }, + }, + }, + }, + want: want{ + message: "vulnerability report validation failed", + err: nil, + }, + }, + { + name: "no deny list CVEs found", + args: args{ + denyListCVEs: []string{"CVE-2021-123456"}, + sarifReport: sarif.Report{ + Runs: []*sarif.Run{ + { + Tool: sarif.Tool{ + Driver: &sarif.ToolComponent{ + Name: TrivyScannerName, + Rules: []*sarif.ReportingDescriptor{ + { + ID: validRuleID, + }, + }, + }, + }, + Results: []*sarif.Result{ + { + RuleID: &validRuleID, + }, + }, + }, + }, + }, + }, + want: want{ + message: "vulnerability report validation succeeded", + err: nil, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + verifierReport, err := verifyDenyListCVEs("test_verifier", TrivyScannerName, &tt.args.sarifReport, tt.args.denyListCVEs) + if err != nil && err.Error() != tt.want.err.Error() { + t.Errorf("verifyDenyListCVEs() error = %v, wantErr %v", err, tt.want.err) + return + } + if verifierReport.Message != tt.want.message { + t.Errorf("verifyDenyListCVEs() verifier report message = %s, want %s", verifierReport.Message, tt.want.message) + return + } + }) + } +} + +// TestVerifyDisallowedSeverities tests the verifyDisallowedSeverities function +func TestVerifyDisallowedSeverities(t *testing.T) { + validSeverityText := "Severity: HIGH" + invalidSeverityText := "invalid severity text" + validRuleID := "RULEID" + invalidRuleID := "invalid_rule_id" + type args struct { + disallowedSeverities []string + sarifReport sarif.Report + } + type want struct { + message string + err error + } + tests := []struct { + name string + args args + want want + }{ + { + name: "rule ID does not exist for result", + args: args{ + disallowedSeverities: []string{"high"}, + sarifReport: sarif.Report{ + Runs: []*sarif.Run{ + { + Tool: sarif.Tool{ + Driver: &sarif.ToolComponent{ + Name: "scanner_name", + Rules: []*sarif.ReportingDescriptor{ + { + ID: validRuleID, + Help: &sarif.MultiformatMessageString{ + Text: &validSeverityText, + PropertyBag: *sarif.NewPropertyBag(), + }, + }, + }, + }, + }, + Results: []*sarif.Result{ + {}, + }, + }, + }, + }, + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: rule id not found for result:[%v]", &sarif.Result{}), + err: nil, + }, + }, + { + name: "rule ID not found for result", + args: args{ + disallowedSeverities: []string{"high"}, + sarifReport: sarif.Report{ + Runs: []*sarif.Run{ + { + Tool: sarif.Tool{ + Driver: &sarif.ToolComponent{ + Name: "scanner_name", + Rules: []*sarif.ReportingDescriptor{ + { + ID: validRuleID, + Help: &sarif.MultiformatMessageString{ + Text: &validSeverityText, + PropertyBag: *sarif.NewPropertyBag(), + }, + }, + }, + }, + }, + Results: []*sarif.Result{ + { + RuleID: &invalidRuleID, + }, + }, + }, + }, + }, + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: rule not found for result:[%v]", &sarif.Result{RuleID: &invalidRuleID}), + err: nil, + }, + }, + { + name: "invalid severity extraction", + args: args{ + disallowedSeverities: []string{"high"}, + sarifReport: sarif.Report{ + Runs: []*sarif.Run{ + { + Tool: sarif.Tool{ + Driver: &sarif.ToolComponent{ + Name: TrivyScannerName, + Rules: []*sarif.ReportingDescriptor{ + { + ID: validRuleID, + Help: &sarif.MultiformatMessageString{ + Text: &invalidSeverityText, + PropertyBag: *sarif.NewPropertyBag(), + }, + }, + }, + }, + }, + Results: []*sarif.Result{ + { + RuleID: &validRuleID, + }, + }, + }, + }, + }, + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: error extracting severity:[severity not found in help text:[%s]]", invalidSeverityText), + err: nil, + }, + }, + { + name: "disallowed severities found", + args: args{ + disallowedSeverities: []string{"high"}, + sarifReport: sarif.Report{ + Runs: []*sarif.Run{ + { + Tool: sarif.Tool{ + Driver: &sarif.ToolComponent{ + Name: TrivyScannerName, + Rules: []*sarif.ReportingDescriptor{ + { + ID: validRuleID, + Help: &sarif.MultiformatMessageString{ + Text: &validSeverityText, + PropertyBag: *sarif.NewPropertyBag(), + }, + }, + }, + }, + }, + Results: []*sarif.Result{ + { + RuleID: &validRuleID, + }, + }, + }, + }, + }, + }, + want: want{ + message: "vulnerability report validation failed", + err: nil, + }, + }, + { + name: "vulnerability report validation succeeded", + args: args{ + disallowedSeverities: []string{"critical"}, + sarifReport: sarif.Report{ + Runs: []*sarif.Run{ + { + Tool: sarif.Tool{ + Driver: &sarif.ToolComponent{ + Name: TrivyScannerName, + Rules: []*sarif.ReportingDescriptor{ + { + ID: validRuleID, + Help: &sarif.MultiformatMessageString{ + Text: &validSeverityText, + PropertyBag: *sarif.NewPropertyBag(), + }, + }, + }, + }, + }, + Results: []*sarif.Result{ + { + RuleID: &validRuleID, + }, + }, + }, + }, + }, + }, + want: want{ + message: "vulnerability report validation succeeded", + err: nil, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + verifierReport, err := verifyDisallowedSeverities("test_verifier", TrivyScannerName, &tt.args.sarifReport, tt.args.disallowedSeverities) + if err != nil && err.Error() != tt.want.err.Error() { + t.Errorf("verifyDisallowedSeverities() error = %v, wantErr %v", err, tt.want.err) + return + } + if verifierReport.Message != tt.want.message { + t.Errorf("verifyDisallowedSeverities() verifier report message = %s, want %s", verifierReport.Message, tt.want.message) + return + } + }) + } +} + +// TestExtractSeverity tests the extractSeverity function +func TestExtractSeverity(t *testing.T) { + validSeverityText := "Severity: HIGH" + invalidSeverityText := "invalid severity text" + type args struct { + scannerName string + rule sarif.ReportingDescriptor + } + type want struct { + severity string + err error + } + tests := []struct { + name string + args args + want want + }{ + { + name: "unsupported scanner", + args: args{ + scannerName: "unsupported", + rule: sarif.ReportingDescriptor{}, + }, + want: want{ + severity: "", + err: fmt.Errorf("scanner not supported:[%s]", "unsupported"), + }, + }, + { + name: "no rule help text", + args: args{ + scannerName: TrivyScannerName, + rule: sarif.ReportingDescriptor{ + ID: "RULEID", + }, + }, + want: want{ + severity: "", + err: fmt.Errorf("help text not found for rule:[%s]", "RULEID"), + }, + }, + { + name: "severity not found in help text", + args: args{ + scannerName: TrivyScannerName, + rule: sarif.ReportingDescriptor{ + ID: "RULEID", + Help: &sarif.MultiformatMessageString{ + Text: &invalidSeverityText, + PropertyBag: *sarif.NewPropertyBag(), + }, + }, + }, + want: want{ + severity: "", + err: fmt.Errorf("severity not found in help text:[%s]", invalidSeverityText), + }, + }, + { + name: "severity not found in help text", + args: args{ + scannerName: TrivyScannerName, + rule: sarif.ReportingDescriptor{ + ID: "RULEID", + Help: &sarif.MultiformatMessageString{ + Text: &validSeverityText, + PropertyBag: *sarif.NewPropertyBag(), + }, + }, + }, + want: want{ + severity: "high", + err: nil, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + severity, err := extractSeverity(tt.args.scannerName, tt.args.rule) + if err != nil && err.Error() != tt.want.err.Error() { + t.Errorf("extractSeverity() error = %v, wantErr %v", err, tt.want.err) + return + } + if severity != tt.want.severity { + t.Errorf("extractSeverity() severity = %v, want %v", severity, tt.want.severity) + } + }) + } +} + +// TestValidateMaxiumAge tests the validateMaximumAge function +func TestValidateMaxiumAge(t *testing.T) { + timeNow := time.Now().Format(time.RFC3339) + type args struct { + maximumAge string + desc ocispecs.ReferenceDescriptor + } + type want struct { + valid bool + created string + err error + } + tests := []struct { + name string + args args + want want + }{ + { + name: "valid maximum age, no annotations", + args: args{ + maximumAge: "24h", + desc: ocispecs.ReferenceDescriptor{ + Descriptor: oci.Descriptor{}, + }, + }, + want: want{ + valid: false, + err: fmt.Errorf("no annotations found for descriptor:[%v]", ocispecs.ReferenceDescriptor{Descriptor: oci.Descriptor{}}), + created: "", + }, + }, + { + name: "valid maximum age, empty annotations", + args: args{ + maximumAge: "24h", + desc: ocispecs.ReferenceDescriptor{ + Descriptor: oci.Descriptor{ + Annotations: map[string]string{}, + }, + }, + }, + want: want{ + valid: false, + err: fmt.Errorf("created annotation not found for descriptor:[%v]", ocispecs.ReferenceDescriptor{Descriptor: oci.Descriptor{Annotations: map[string]string{}}}), + created: "", + }, + }, + { + name: "valid maximum age, invalid created annotation", + args: args{ + maximumAge: "24h", + desc: ocispecs.ReferenceDescriptor{ + Descriptor: oci.Descriptor{ + Annotations: map[string]string{ + "org.opencontainers.image.created": "invalid", + }, + }, + }, + }, + want: want{ + valid: false, + err: fmt.Errorf("error parsing created timestamp:[%s]", "invalid"), + created: "", + }, + }, + { + name: "invalid maximum age, valid created annotation", + args: args{ + maximumAge: "1d", + desc: ocispecs.ReferenceDescriptor{ + Descriptor: oci.Descriptor{ + Annotations: map[string]string{ + "org.opencontainers.image.created": "2021-01-01T00:00:00Z", + }, + }, + }, + }, + want: want{ + valid: false, + err: fmt.Errorf("error parsing maximum age:[%s]", "1d"), + created: "", + }, + }, + { + name: "valid maximum age, valid created annotation, created is older than maximum age", + args: args{ + maximumAge: "24h", + desc: ocispecs.ReferenceDescriptor{ + Descriptor: oci.Descriptor{ + Annotations: map[string]string{ + "org.opencontainers.image.created": "2021-01-05T00:00:00Z", + }, + }, + }, + }, + want: want{ + valid: false, + err: nil, + created: "2021-01-05T00:00:00Z", + }, + }, + { + name: "valid maximum age, valid created annotation, created is newer than maximum age", + args: args{ + maximumAge: "24h", + desc: ocispecs.ReferenceDescriptor{ + Descriptor: oci.Descriptor{ + Annotations: map[string]string{ + "org.opencontainers.image.created": timeNow, + }, + }, + }, + }, + want: want{ + valid: true, + err: nil, + created: timeNow, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + valid, created, err := validateMaximumAge(tt.args.maximumAge, tt.args.desc) + if err != nil && err.Error() != tt.want.err.Error() { + t.Errorf("validateMaxiumAge() error = %v, wantErr %v", err, tt.want.err) + return + } + if valid != tt.want.valid { + t.Errorf("validateMaxiumAge() valid = %v, want %v", valid, tt.want.valid) + } + if created != tt.want.created { + t.Errorf("validateMaxiumAge() created = %v, want %v", created, tt.want.created) + } + }) + } +} diff --git a/scripts/azure-ci-test.sh b/scripts/azure-ci-test.sh index 82758c2b9..020d0f7ce 100755 --- a/scripts/azure-ci-test.sh +++ b/scripts/azure-ci-test.sh @@ -40,7 +40,7 @@ REGISTRY="${ACR_NAME}.azurecr.io" build_push_to_acr() { echo "Building and pushing images to ACR" - docker build --progress=plain --no-cache --build-arg build_cosign=true --build-arg build_sbom=true --build-arg build_licensechecker=true --build-arg build_schemavalidator=true -f ./httpserver/Dockerfile -t "${ACR_NAME}.azurecr.io/test/localbuild:${TAG}" . + docker build --progress=plain --no-cache --build-arg build_cosign=true --build-arg build_sbom=true --build-arg build_licensechecker=true --build-arg build_schemavalidator=true --build-arg build_vulnerabilityreport=true -f ./httpserver/Dockerfile -t "${ACR_NAME}.azurecr.io/test/localbuild:${TAG}" . docker push "${REGISTRY}/test/localbuild:${TAG}" docker build --progress=plain --no-cache --build-arg KUBE_VERSION=${KUBERNETES_VERSION} --build-arg TARGETOS="linux" --build-arg TARGETARCH="amd64" -f crd.Dockerfile -t "${ACR_NAME}.azurecr.io/test/localbuildcrd:${TAG}" ./charts/ratify/crds diff --git a/test/bats/cli-test.bats b/test/bats/cli-test.bats index 988abb808..2b4865d43 100644 --- a/test/bats/cli-test.bats +++ b/test/bats/cli-test.bats @@ -60,6 +60,11 @@ load helpers assert_cmd_verify_success } +@test "vulnerabilityreport verifier test" { + run bin/ratify verify -c $RATIFY_DIR/vulnerabilityreport_config.json -s $TEST_REGISTRY/vulnerabilityreport:v0 + assert_cmd_verify_success +} + @test "sbom/notary/cosign/licensechecker verifiers test" { run bin/ratify verify -c $RATIFY_DIR/config.json -s $TEST_REGISTRY/all:v0 assert_cmd_verify_success diff --git a/test/bats/plugin-test.bats b/test/bats/plugin-test.bats index 02cdc0564..e45f09c8a 100644 --- a/test/bats/plugin-test.bats +++ b/test/bats/plugin-test.bats @@ -168,6 +168,32 @@ SLEEP_TIME=1 assert_failure } +@test "vulnerabilityreport verifier test" { + teardown() { + echo "cleaning up" + wait_for_process ${WAIT_TIME} ${SLEEP_TIME} 'kubectl delete verifiers.config.ratify.deislabs.io/verifier-vulnerabilityreport --namespace default --ignore-not-found=true' + wait_for_process ${WAIT_TIME} ${SLEEP_TIME} 'kubectl delete pod vulnerabilityreport --namespace default --force --ignore-not-found=true' + wait_for_process ${WAIT_TIME} ${SLEEP_TIME} 'kubectl delete pod vulnerabilityreport2 --namespace default --force --ignore-not-found=true' + } + + run kubectl apply -f ./library/default/template.yaml + assert_success + sleep 5 + run kubectl apply -f ./library/default/samples/constraint.yaml + assert_success + sleep 5 + + run kubectl apply -f ./config/samples/config_v1beta1_verifier_vulnerabilityreport2.yaml + sleep 5 + run kubectl run vulnerabilityreport --namespace default --image=registry:5000/vulnerabilityreport:v0 + assert_success + sleep 15 + run kubectl apply -f ./config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml + sleep 5 + run kubectl run vulnerabilityreport2 --namespace default --image=registry:5000/vulnerabilityreport:v0 + assert_failure +} + @test "sbom/notary/cosign/licensechecker/schemavalidator verifiers test" { teardown() { echo "cleaning up" diff --git a/test/bats/tests/config/vulnerabilityreport_config.json b/test/bats/tests/config/vulnerabilityreport_config.json new file mode 100644 index 000000000..9fff4426e --- /dev/null +++ b/test/bats/tests/config/vulnerabilityreport_config.json @@ -0,0 +1,28 @@ +{ + "store": { + "version": "1.0.0", + "plugins": [ + { + "name": "oras", + "useHttp": true + } + ] + }, + "policy": { + "version": "1.0.0", + "plugin": { + "name": "configPolicy" + } + }, + "verifier": { + "version": "1.0.0", + "plugins": [ + { + "name": "vulnerabilityreport", + "artifactTypes": "application/sarif+json", + "maximumAge": "24h", + "denylistCVEs": ["CVE-2021-44228"] + } + ] + } +} \ No newline at end of file From 748e7b4d3cc1534cf021674dc3b3ea72ebc44212 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Mon, 27 Nov 2023 22:42:57 +0000 Subject: [PATCH 02/17] address comments; add timestamp to all reports --- Makefile | 5 +- .../vulnerability_report.go | 120 +++++++++++---- .../vulnerability_report_test.go | 139 +++++++++++------- 3 files changed, 176 insertions(+), 88 deletions(-) diff --git a/Makefile b/Makefile index 6fe062200..5cd1b75fd 100644 --- a/Makefile +++ b/Makefile @@ -51,6 +51,7 @@ REDIS_IMAGE_TAG ?= 7.0-debian-11 CERT_ROTATION_ENABLED ?= false REGO_POLICY_ENABLED ?= false SBOM_TOOL_VERSION ?=v1.2.0 +TRIVY_VERSION ?= 0.47.0 # ENVTEST_K8S_VERSION refers to the version of kubebuilder assets to be downloaded by envtest binary. ENVTEST_K8S_VERSION = 1.24.2 @@ -398,7 +399,7 @@ e2e-schemavalidator-setup: mkdir -p .staging/schemavalidator # Install Trivy - curl -L https://github.com/aquasecurity/trivy/releases/download/v0.35.0/trivy_0.35.0_Linux-64bit.tar.gz --output .staging/schemavalidator/trivy.tar.gz + curl -L https://github.com/aquasecurity/trivy/releases/download/v${TRIVY_VERSION}/trivy_${TRIVY_VERSION}_Linux-64bit.tar.gz --output .staging/schemavalidator/trivy.tar.gz tar -zxf .staging/schemavalidator/trivy.tar.gz -C .staging/schemavalidator # Build/Push Images @@ -422,7 +423,7 @@ e2e-vulnerabilityreport-setup: mkdir -p .staging/vulnerabilityreport # Install Trivy - curl -L https://github.com/aquasecurity/trivy/releases/download/v0.47.0/trivy_0.47.0_Linux-64bit.tar.gz --output .staging/vulnerabilityreport/trivy.tar.gz + curl -L https://github.com/aquasecurity/trivy/releases/download/v${TRIVY_VERSION}/trivy_${TRIVY_VERSION}_Linux-64bit.tar.gz --output .staging/vulnerabilityreport/trivy.tar.gz tar -zxf .staging/vulnerabilityreport/trivy.tar.gz -C .staging/vulnerabilityreport # Build/Push Image diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report.go b/plugins/verifier/vulnerabilityreport/vulnerability_report.go index f0b0d6e4b..9ce6d59c0 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report.go @@ -80,14 +80,27 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe return nil, err } + // extract created timestamp from descriptor annotations + createdTime, err := extractCreationTimestamp(referenceDescriptor) + if err != nil { + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: false, + Message: fmt.Sprintf("vulnerability report validation failed: error extracting create timestamp annotation:[%v]", err.Error()), + }, nil + } + // check report is newer than allowed maximum age if input.MaximumAge != "" { - ok, created, err := validateMaximumAge(input.MaximumAge, referenceDescriptor) + ok, err := validateMaximumAge(input.MaximumAge, createdTime) if err != nil { return &verifier.VerifierResult{ Name: input.Name, IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: error validating maximum age:[%v]", err.Error()), + Extensions: map[string]interface{}{ + CreatedAnnotation: createdTime, + }, }, nil } if !ok { @@ -96,7 +109,7 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: report is older than maximum age:[%s]", input.MaximumAge), Extensions: map[string]interface{}{ - CreatedAnnotation: created, + CreatedAnnotation: createdTime, }, }, nil } @@ -106,7 +119,14 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe referenceManifest, err := referrerStore.GetReferenceManifest(ctx, subjectReference, referenceDescriptor) if err != nil { - return nil, fmt.Errorf("error fetching reference manifest for subject: %s reference descriptor: %v", subjectReference, referenceDescriptor.Descriptor) + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: false, + Message: fmt.Sprintf("error fetching reference manifest for subject: %s reference descriptor: %v", subjectReference, referenceDescriptor.Descriptor), + Extensions: map[string]interface{}{ + CreatedAnnotation: createdTime, + }, + }, nil } if len(referenceManifest.Blobs) == 0 { @@ -114,13 +134,23 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe Name: input.Name, IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: no blobs found for referrer %s@%s", subjectReference.Path, referenceDescriptor.Digest.String()), + Extensions: map[string]interface{}{ + CreatedAnnotation: createdTime, + }, }, nil } blobDesc := referenceManifest.Blobs[0] refBlob, err := referrerStore.GetBlobContent(ctx, subjectReference, blobDesc.Digest) if err != nil { - return nil, fmt.Errorf("error fetching blob for subject:[%s] digest:[%s]", subjectReference, blobDesc.Digest) + return &verifier.VerifierResult{ + Name: input.Name, + IsSuccess: false, + Message: fmt.Sprintf("error fetching blob for subject:[%s] digest:[%s]", subjectReference, blobDesc.Digest), + Extensions: map[string]interface{}{ + CreatedAnnotation: createdTime, + }, + }, nil } // skip all validation if passthrough is enabled @@ -130,8 +160,9 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe IsSuccess: true, Message: "vulnerability report validation skipped", Extensions: map[string]interface{}{ - "passthrough": true, - "report": string(refBlob), + CreatedAnnotation: createdTime, + "passthrough": true, + "report": string(refBlob), }, }, nil } @@ -143,17 +174,23 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe Name: input.Name, IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: schema validation failed for digest:[%s],media type:[%s],parse errors:[%v]", blobDesc.Digest, blobDesc.MediaType, err.Error()), + Extensions: map[string]interface{}{ + CreatedAnnotation: createdTime, + }, }, nil } if blobDesc.MediaType == SarifMediaType { - return processSarifReport(input, input.Name, refBlob) + return processSarifReport(input, input.Name, refBlob, createdTime) } return &verifier.VerifierResult{ Name: input.Name, IsSuccess: true, Message: "vulnerability report validation succeeded", + Extensions: map[string]interface{}{ + CreatedAnnotation: createdTime, + }, }, nil } @@ -176,13 +213,16 @@ func verifyJSONSchema(mediaType string, refBlob []byte, schemaURL string) error } // processSarifReport processes the sarif report running individual validations as configured -func processSarifReport(input *PluginConfig, verifierName string, blob []byte) (*verifier.VerifierResult, error) { +func processSarifReport(input *PluginConfig, verifierName string, blob []byte, createdTime time.Time) (*verifier.VerifierResult, error) { sarifReport, err := sarif.FromBytes(blob) if err != nil { return &verifier.VerifierResult{ Name: verifierName, IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: error parsing sarif report:[%v]", err.Error()), + Extensions: map[string]interface{}{ + CreatedAnnotation: createdTime, + }, }, nil } // verify that there is at least one run in the report @@ -191,11 +231,14 @@ func processSarifReport(input *PluginConfig, verifierName string, blob []byte) ( Name: verifierName, IsSuccess: false, Message: "vulnerability report validation failed: no runs found in sarif report", + Extensions: map[string]interface{}{ + CreatedAnnotation: createdTime, + }, }, nil } scannerName := strings.ToLower(sarifReport.Runs[0].Tool.Driver.Name) if len(input.DenylistCVEs) > 0 { - verifierReport, err := verifyDenyListCVEs(input.Name, scannerName, sarifReport, input.DenylistCVEs) + verifierReport, err := verifyDenyListCVEs(input.Name, scannerName, sarifReport, input.DenylistCVEs, createdTime) if err != nil { return nil, err } @@ -204,7 +247,7 @@ func processSarifReport(input *PluginConfig, verifierName string, blob []byte) ( } } if len(input.DisallowedSeverity) > 0 { - verifierReport, err := verifyDisallowedSeverities(input.Name, scannerName, sarifReport, input.DisallowedSeverity) + verifierReport, err := verifyDisallowedSeverities(input.Name, scannerName, sarifReport, input.DisallowedSeverity, createdTime) if err != nil { return nil, err } @@ -218,13 +261,14 @@ func processSarifReport(input *PluginConfig, verifierName string, blob []byte) ( IsSuccess: true, Message: "vulnerability report validation succeeded", Extensions: map[string]interface{}{ - "scanner": scannerName, + CreatedAnnotation: createdTime, + "scanner": scannerName, }, }, nil } // verifyDenyListCVEs verifies that the report does not contain any deny-listed CVEs -func verifyDenyListCVEs(verifierName string, scannerName string, sarifReport *sarif.Report, denylistCVEs []string) (*verifier.VerifierResult, error) { +func verifyDenyListCVEs(verifierName string, scannerName string, sarifReport *sarif.Report, denylistCVEs []string, createdTime time.Time) (*verifier.VerifierResult, error) { denylistCVESet := make(map[string]bool) denylistViolations := []string{} @@ -241,7 +285,8 @@ func verifyDenyListCVEs(verifierName string, scannerName string, sarifReport *sa IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: rule id not found for result:[%v]", result), Extensions: map[string]interface{}{ - "scanner": scannerName, + "scanner": scannerName, + CreatedAnnotation: createdTime, }, }, nil } @@ -263,8 +308,9 @@ func verifyDenyListCVEs(verifierName string, scannerName string, sarifReport *sa Name: verifierName, IsSuccess: false, Extensions: map[string]interface{}{ - "scanner": scannerName, - "denylistCVEs": denylistViolations, + "scanner": scannerName, + "denylistCVEs": denylistViolations, + CreatedAnnotation: createdTime, }, Message: "vulnerability report validation failed", }, nil @@ -275,13 +321,14 @@ func verifyDenyListCVEs(verifierName string, scannerName string, sarifReport *sa IsSuccess: true, Message: "vulnerability report validation succeeded", Extensions: map[string]interface{}{ - "scanner": scannerName, + "scanner": scannerName, + CreatedAnnotation: createdTime, }, }, nil } // verifyDisallowedSeverities verifies that the report does not contain any disallowed severity levels -func verifyDisallowedSeverities(verifierName string, scannerName string, sarifReport *sarif.Report, disallowedSeverity []string) (*verifier.VerifierResult, error) { +func verifyDisallowedSeverities(verifierName string, scannerName string, sarifReport *sarif.Report, disallowedSeverity []string, createdTime time.Time) (*verifier.VerifierResult, error) { ruleMap := make(map[string]*sarif.ReportingDescriptor) violatingRules := make([]sarif.ReportingDescriptor, 0) // create a map of rule id to rule for easy lookup @@ -296,7 +343,8 @@ func verifyDisallowedSeverities(verifierName string, scannerName string, sarifRe IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: rule id not found for result:[%v]", result), Extensions: map[string]interface{}{ - "scanner": scannerName, + "scanner": scannerName, + CreatedAnnotation: createdTime, }, }, nil } @@ -307,7 +355,8 @@ func verifyDisallowedSeverities(verifierName string, scannerName string, sarifRe IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: rule not found for result:[%v]", result), Extensions: map[string]interface{}{ - "scanner": scannerName, + "scanner": scannerName, + CreatedAnnotation: createdTime, }, }, nil } @@ -318,7 +367,8 @@ func verifyDisallowedSeverities(verifierName string, scannerName string, sarifRe IsSuccess: false, Message: fmt.Sprintf("vulnerability report validation failed: error extracting severity:[%v]", err.Error()), Extensions: map[string]interface{}{ - "scanner": scannerName, + "scanner": scannerName, + CreatedAnnotation: createdTime, }, }, nil } @@ -337,6 +387,7 @@ func verifyDisallowedSeverities(verifierName string, scannerName string, sarifRe Extensions: map[string]interface{}{ "scanner": scannerName, "severityViolations": violatingRules, + CreatedAnnotation: createdTime, }, Message: "vulnerability report validation failed", }, nil @@ -346,7 +397,8 @@ func verifyDisallowedSeverities(verifierName string, scannerName string, sarifRe IsSuccess: true, Message: "vulnerability report validation succeeded", Extensions: map[string]interface{}{ - "scanner": scannerName, + "scanner": scannerName, + CreatedAnnotation: createdTime, }, }, nil } @@ -369,32 +421,36 @@ func extractSeverity(scannerName string, rule sarif.ReportingDescriptor) (string return "", fmt.Errorf("scanner not supported:[%s]", scannerName) } -// validateMaximumAge validates that the report is newer than the allowed maximum age -// extracts the created timestamp from the descriptor annotations -func validateMaximumAge(maximumAge string, descriptor ocispecs.ReferenceDescriptor) (bool, string, error) { - // check if annotations exist +// extractCreationTimestamp extracts the created timestamp from the descriptor annotations +// verifies that the created timestamp is a valid timestamp in RFC3339 format +func extractCreationTimestamp(descriptor ocispecs.ReferenceDescriptor) (time.Time, error) { if descriptor.Annotations == nil { - return false, "", fmt.Errorf("no annotations found for descriptor:[%v]", descriptor) + return time.Time{}, fmt.Errorf("no annotations found for descriptor:[%v]", descriptor) } - // check if created annotation exists created, ok := descriptor.Annotations[CreatedAnnotation] if !ok { - return false, "", fmt.Errorf("created annotation not found for descriptor:[%v]", descriptor) + return time.Time{}, fmt.Errorf("created annotation not found for descriptor:[%v]", descriptor) } // check if created annotation is a valid timestamp createdTime, err := time.Parse(time.RFC3339, created) if err != nil { - return false, "", fmt.Errorf("error parsing created timestamp:[%s]", created) + return time.Time{}, fmt.Errorf("error parsing created timestamp:[%s]", created) } + return createdTime, nil +} + +// validateMaximumAge validates that the report is newer than the allowed maximum age +// extracts the created timestamp from the descriptor annotations +func validateMaximumAge(maximumAge string, createdTime time.Time) (bool, error) { // check if maxium age is a valid duration duration, err := time.ParseDuration(maximumAge) if err != nil { - return false, "", fmt.Errorf("error parsing maximum age:[%s]", maximumAge) + return false, fmt.Errorf("error parsing maximum age:[%s]", maximumAge) } // check if created timestamp is older than maximum age if time.Since(createdTime) > duration { - return false, created, nil + return false, nil } - return true, created, nil + return true, nil } diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go index 6b508e12f..6e1d4b314 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go @@ -96,14 +96,29 @@ func TestVerifyReference(t *testing.T) { }, }, { - name: "invalid max age", + name: "invalid created timestamp", args: args{ - stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "1d"}}`, + stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "24h"}}`, referenceManifest: ocispecs.ReferenceManifest{}, blobContent: sampleSarifReport, }, want: want{ - message: fmt.Sprintf("vulnerability report validation failed: error validating maximum age:[%s]", "no annotations found for descriptor:[{{ sha256:b2f67b016d3c646f025099b363b4f83a56a44d067a846be74e8866342c56f216 0 [] map[] [] } }]"), + message: fmt.Sprintf("vulnerability report validation failed: error extracting create timestamp annotation:[%s]", "no annotations found for descriptor:[{{ sha256:b2f67b016d3c646f025099b363b4f83a56a44d067a846be74e8866342c56f216 0 [] map[] [] } }]"), + }, + }, + { + name: "invalid max age", + args: args{ + stdinData: `{"config":{"name": "vulnerabilityreport", "maximumAge": "1d"}}`, + referenceManifest: ocispecs.ReferenceManifest{ + Annotations: map[string]string{ + "org.opencontainers.image.created": time.Now().Add(time.Hour * -30).Format(time.RFC3339), + }, + }, + blobContent: sampleSarifReport, + }, + want: want{ + message: fmt.Sprintf("vulnerability report validation failed: error validating maximum age:[%s]", "error parsing maximum age:[1d]"), }, }, { @@ -383,7 +398,7 @@ func TestProcessSarifReport(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - verifierReport, err := processSarifReport(&tt.args.input, "sample_verifier", []byte(tt.args.blobContent)) + verifierReport, err := processSarifReport(&tt.args.input, "sample_verifier", []byte(tt.args.blobContent), time.Now()) if err != nil && err.Error() != tt.want.err.Error() { t.Errorf("processSarifReport() error = %v, wantErr %v", err, tt.want.err) return @@ -506,7 +521,7 @@ func TestVerifyDenyListCVEs(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - verifierReport, err := verifyDenyListCVEs("test_verifier", TrivyScannerName, &tt.args.sarifReport, tt.args.denyListCVEs) + verifierReport, err := verifyDenyListCVEs("test_verifier", TrivyScannerName, &tt.args.sarifReport, tt.args.denyListCVEs, time.Now()) if err != nil && err.Error() != tt.want.err.Error() { t.Errorf("verifyDenyListCVEs() error = %v, wantErr %v", err, tt.want.err) return @@ -714,7 +729,7 @@ func TestVerifyDisallowedSeverities(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - verifierReport, err := verifyDisallowedSeverities("test_verifier", TrivyScannerName, &tt.args.sarifReport, tt.args.disallowedSeverities) + verifierReport, err := verifyDisallowedSeverities("test_verifier", TrivyScannerName, &tt.args.sarifReport, tt.args.disallowedSeverities, time.Now()) if err != nil && err.Error() != tt.want.err.Error() { t.Errorf("verifyDisallowedSeverities() error = %v, wantErr %v", err, tt.want.err) return @@ -817,16 +832,14 @@ func TestExtractSeverity(t *testing.T) { } } -// TestValidateMaxiumAge tests the validateMaximumAge function -func TestValidateMaxiumAge(t *testing.T) { - timeNow := time.Now().Format(time.RFC3339) +// TestExtractCreationTimestamp tests the extractCreationTimestamp function +func TestExtractCreationTimestamp(t *testing.T) { + timestamp, _ := time.Parse(time.RFC3339, "2021-01-01T00:00:00Z") type args struct { - maximumAge string - desc ocispecs.ReferenceDescriptor + desc ocispecs.ReferenceDescriptor } type want struct { - valid bool - created string + created time.Time err error } tests := []struct { @@ -835,23 +848,20 @@ func TestValidateMaxiumAge(t *testing.T) { want want }{ { - name: "valid maximum age, no annotations", + name: "invalid: no annotations", args: args{ - maximumAge: "24h", desc: ocispecs.ReferenceDescriptor{ Descriptor: oci.Descriptor{}, }, }, want: want{ - valid: false, err: fmt.Errorf("no annotations found for descriptor:[%v]", ocispecs.ReferenceDescriptor{Descriptor: oci.Descriptor{}}), - created: "", + created: time.Time{}, }, }, { - name: "valid maximum age, empty annotations", + name: "invalid: empty annotations", args: args{ - maximumAge: "24h", desc: ocispecs.ReferenceDescriptor{ Descriptor: oci.Descriptor{ Annotations: map[string]string{}, @@ -859,15 +869,13 @@ func TestValidateMaxiumAge(t *testing.T) { }, }, want: want{ - valid: false, err: fmt.Errorf("created annotation not found for descriptor:[%v]", ocispecs.ReferenceDescriptor{Descriptor: oci.Descriptor{Annotations: map[string]string{}}}), - created: "", + created: time.Time{}, }, }, { - name: "valid maximum age, invalid created annotation", + name: "invalid created annotation", args: args{ - maximumAge: "24h", desc: ocispecs.ReferenceDescriptor{ Descriptor: oci.Descriptor{ Annotations: map[string]string{ @@ -877,15 +885,13 @@ func TestValidateMaxiumAge(t *testing.T) { }, }, want: want{ - valid: false, err: fmt.Errorf("error parsing created timestamp:[%s]", "invalid"), - created: "", + created: time.Time{}, }, }, { - name: "invalid maximum age, valid created annotation", + name: "valid created annotation", args: args{ - maximumAge: "1d", desc: ocispecs.ReferenceDescriptor{ Descriptor: oci.Descriptor{ Annotations: map[string]string{ @@ -895,51 +901,79 @@ func TestValidateMaxiumAge(t *testing.T) { }, }, want: want{ - valid: false, - err: fmt.Errorf("error parsing maximum age:[%s]", "1d"), - created: "", + err: fmt.Errorf("error parsing created timestamp:[%s]", "invalid"), + created: timestamp, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + created, err := extractCreationTimestamp(tt.args.desc) + if err != nil && err.Error() != tt.want.err.Error() { + t.Errorf("extractCreationTimestamp() error = %v, wantErr %v", err, tt.want.err) + return + } + if created != tt.want.created { + t.Errorf("extractCreationTimestamp() created = %v, want %v", created, tt.want.created) + } + }) + } +} + +// TestValidateMaxiumAge tests the validateMaximumAge function +func TestValidateMaxiumAge(t *testing.T) { + timestamp, _ := time.Parse(time.RFC3339, "2021-01-01T00:00:00Z") + timestampDelayed, _ := time.Parse(time.RFC3339, "2021-01-05T00:00:00Z") + type args struct { + maximumAge string + createTime time.Time + } + type want struct { + valid bool + err error + } + tests := []struct { + name string + args args + want want + }{ + { + name: "invalid maximum age, valid created annotation", + args: args{ + maximumAge: "1d", + createTime: timestamp, + }, + want: want{ + valid: false, + err: fmt.Errorf("error parsing maximum age:[%s]", "1d"), }, }, { name: "valid maximum age, valid created annotation, created is older than maximum age", args: args{ maximumAge: "24h", - desc: ocispecs.ReferenceDescriptor{ - Descriptor: oci.Descriptor{ - Annotations: map[string]string{ - "org.opencontainers.image.created": "2021-01-05T00:00:00Z", - }, - }, - }, + createTime: timestampDelayed, }, want: want{ - valid: false, - err: nil, - created: "2021-01-05T00:00:00Z", + valid: false, + err: nil, }, }, { name: "valid maximum age, valid created annotation, created is newer than maximum age", args: args{ maximumAge: "24h", - desc: ocispecs.ReferenceDescriptor{ - Descriptor: oci.Descriptor{ - Annotations: map[string]string{ - "org.opencontainers.image.created": timeNow, - }, - }, - }, + createTime: time.Now(), }, want: want{ - valid: true, - err: nil, - created: timeNow, + valid: true, + err: nil, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - valid, created, err := validateMaximumAge(tt.args.maximumAge, tt.args.desc) + valid, err := validateMaximumAge(tt.args.maximumAge, tt.args.createTime) if err != nil && err.Error() != tt.want.err.Error() { t.Errorf("validateMaxiumAge() error = %v, wantErr %v", err, tt.want.err) return @@ -947,9 +981,6 @@ func TestValidateMaxiumAge(t *testing.T) { if valid != tt.want.valid { t.Errorf("validateMaxiumAge() valid = %v, want %v", valid, tt.want.valid) } - if created != tt.want.created { - t.Errorf("validateMaxiumAge() created = %v, want %v", created, tt.want.created) - } }) } } From 5b807eda648e98d6f353a9dc45298409544b7adc Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Tue, 28 Nov 2023 00:18:10 +0000 Subject: [PATCH 03/17] address more comments --- .../verifier/vulnerabilityreport/vulnerability_report.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report.go b/plugins/verifier/vulnerabilityreport/vulnerability_report.go index 9ce6d59c0..859dfef7b 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report.go @@ -122,7 +122,7 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe return &verifier.VerifierResult{ Name: input.Name, IsSuccess: false, - Message: fmt.Sprintf("error fetching reference manifest for subject: %s reference descriptor: %v", subjectReference, referenceDescriptor.Descriptor), + Message: fmt.Sprintf("error fetching reference manifest for subject: %s reference descriptor: %v: [%v]", subjectReference, referenceDescriptor.Descriptor, err.Error()), Extensions: map[string]interface{}{ CreatedAnnotation: createdTime, }, @@ -146,7 +146,7 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe return &verifier.VerifierResult{ Name: input.Name, IsSuccess: false, - Message: fmt.Sprintf("error fetching blob for subject:[%s] digest:[%s]", subjectReference, blobDesc.Digest), + Message: fmt.Sprintf("error fetching blob for subject:[%s] digest:[%s]: [%v]", subjectReference, blobDesc.Digest, err.Error()), Extensions: map[string]interface{}{ CreatedAnnotation: createdTime, }, @@ -168,8 +168,7 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe } // validate json schema - err = verifyJSONSchema(blobDesc.MediaType, refBlob, input.SchemaURL) - if err != nil { + if err := verifyJSONSchema(blobDesc.MediaType, refBlob, input.SchemaURL); err != nil { return &verifier.VerifierResult{ Name: input.Name, IsSuccess: false, From 90e2218f99a5135e3d89500f2b32f512addde47f Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Tue, 28 Nov 2023 18:33:05 +0000 Subject: [PATCH 04/17] update schema validator --- .../schemavalidation/schemavalidation_test.go | 10 +++--- .../schemavalidation/schemavalidator.go | 31 ++++++------------- .../vulnerability_report.go | 4 +-- 3 files changed, 17 insertions(+), 28 deletions(-) diff --git a/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidation_test.go b/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidation_test.go index 91c050e87..143c9546b 100644 --- a/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidation_test.go +++ b/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidation_test.go @@ -36,7 +36,7 @@ func init() { // TestProperSchemaValidates tests that the proper schema validates func TestProperSchemaValidates(t *testing.T) { expected := true - result := Validate(schemaURL, trivyScanReport) == nil + result := LoadAndValidateOnlineSchema(schemaURL, trivyScanReport) == nil if expected != result { t.Logf("expected: %v, got: %v", expected, result) @@ -47,7 +47,7 @@ func TestProperSchemaValidates(t *testing.T) { // TestInvalidSchemaFailsValidation tests that an invalid schema fails validation func TestInvalidSchemaFailsValidation(t *testing.T) { expected := false - result := Validate("bad schema", trivyScanReport) == nil + result := LoadAndValidateOnlineSchema("bad schema", trivyScanReport) == nil if expected != result { t.Logf("expected: %v, got: %v", expected, result) @@ -58,7 +58,7 @@ func TestInvalidSchemaFailsValidation(t *testing.T) { // TestProperSchemaValidatesFromFile tests that the proper schema validates from a file func TestProperSchemaValidatesFromFile(t *testing.T) { expected := true - result := ValidateAgainstOfflineSchema(schemaFileBytes, trivyScanReport) == nil + result := LoadAndValidateOfflineSchema(schemaFileBytes, trivyScanReport) == nil if expected != result { t.Logf("expected: %v, got: %v", expected, result) @@ -69,7 +69,7 @@ func TestProperSchemaValidatesFromFile(t *testing.T) { // TestSchemaMismatchFromFile tests that a schema mismatch fails validation from a file func TestSchemaMismatchFromFile(t *testing.T) { expected := false - result := ValidateAgainstOfflineSchema(schemaFileMismatchBytes, trivyScanReport) == nil + result := LoadAndValidateOfflineSchema(schemaFileMismatchBytes, trivyScanReport) == nil if expected != result { t.Logf("expected: %v, got: %v", expected, result) @@ -80,7 +80,7 @@ func TestSchemaMismatchFromFile(t *testing.T) { // TestBadSchemaValidatesFromFile tests that a bad schema fails validation from a file func TestBadSchemaValidatesFromFile(t *testing.T) { expected := false - result := ValidateAgainstOfflineSchema(schemaFileBadBytes, trivyScanReport) == nil + result := LoadAndValidateOfflineSchema(schemaFileBadBytes, trivyScanReport) == nil if expected != result { t.Logf("expected: %v, got: %v", expected, result) diff --git a/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidator.go b/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidator.go index 74ab85ecb..0a014051e 100644 --- a/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidator.go +++ b/plugins/verifier/vulnerabilityreport/schemavalidation/schemavalidator.go @@ -23,33 +23,22 @@ import ( ) // Validates content from a byte array against a URL schema or from a canonical file path -func Validate(schema string, content []byte) error { +func LoadAndValidateOnlineSchema(schema string, content []byte) error { sl := gojsonschema.NewReferenceLoader(schema) - dl := gojsonschema.NewBytesLoader(content) - - result, err := gojsonschema.Validate(sl, dl) - - if err != nil { - return err - } - - if result.Valid() { - return nil - } - var e string - for _, desc := range result.Errors() { - e += fmt.Sprintf("%s:", desc.Description()) - } - return errors.New(e) + return validate(sl, content) } // Validates content from a byte array against a schema from a byte array // This is useful for testing and restricted environments as it allows loading of schemas from files -func ValidateAgainstOfflineSchema(schema []byte, content []byte) error { +func LoadAndValidateOfflineSchema(schema []byte, content []byte) error { sl := gojsonschema.NewBytesLoader(schema) - dl := gojsonschema.NewBytesLoader(content) + return validate(sl, content) +} - result, err := gojsonschema.Validate(sl, dl) +// Validates content from a byte array against a schema loaded +func validate(schemaLoader gojsonschema.JSONLoader, content []byte) error { + dl := gojsonschema.NewBytesLoader(content) + result, err := gojsonschema.Validate(schemaLoader, dl) if err != nil { return err } @@ -59,7 +48,7 @@ func ValidateAgainstOfflineSchema(schema []byte, content []byte) error { } var e string for _, desc := range result.Errors() { - e += fmt.Sprintf("%s:", desc.Description()) + e += fmt.Sprintf("%s: ", desc.Description()) } return errors.New(e) } diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report.go b/plugins/verifier/vulnerabilityreport/vulnerability_report.go index 859dfef7b..e461b58e9 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report.go @@ -200,13 +200,13 @@ func verifyJSONSchema(mediaType string, refBlob []byte, schemaURL string) error if mediaType == SarifMediaType { // decide online or offline schema type if schemaURL != "" { - return schemavalidation.Validate(schemaURL, refBlob) + return schemavalidation.LoadAndValidateOnlineSchema(schemaURL, refBlob) } schemaFileBytes, err := embeddedFS.ReadFile(SarifOfflineFilePath) if err != nil { return fmt.Errorf("error reading offline schema file:[%s]", SarifOfflineFilePath) } - return schemavalidation.ValidateAgainstOfflineSchema(schemaFileBytes, refBlob) + return schemavalidation.LoadAndValidateOfflineSchema(schemaFileBytes, refBlob) } return fmt.Errorf("media type not configured for plugin:[%s]", mediaType) } From 665c853f0f507c99975b116ee3a601eefe25d3f6 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Tue, 28 Nov 2023 19:08:15 +0000 Subject: [PATCH 05/17] switch to artifact type --- .../vulnerabilityreport/vulnerability_report.go | 14 +++++++------- .../vulnerability_report_test.go | 16 ++++++++-------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report.go b/plugins/verifier/vulnerabilityreport/vulnerability_report.go index e461b58e9..d4a9fbb6e 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report.go @@ -39,7 +39,7 @@ import ( var embeddedFS embed.FS const ( - SarifMediaType string = "application/sarif+json" + SarifArtifactType string = "application/sarif+json" SarifOfflineFilePath string = "schemavalidation/schemas/sarif-2.1.0.json" TrivyScannerName string = "trivy" GrypeScannerName string = "grype" @@ -168,18 +168,18 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe } // validate json schema - if err := verifyJSONSchema(blobDesc.MediaType, refBlob, input.SchemaURL); err != nil { + if err := verifyJSONSchema(referenceDescriptor.ArtifactType, refBlob, input.SchemaURL); err != nil { return &verifier.VerifierResult{ Name: input.Name, IsSuccess: false, - Message: fmt.Sprintf("vulnerability report validation failed: schema validation failed for digest:[%s],media type:[%s],parse errors:[%v]", blobDesc.Digest, blobDesc.MediaType, err.Error()), + Message: fmt.Sprintf("vulnerability report validation failed: schema validation failed for digest:[%s],artifact type:[%s],parse errors:[%v]", blobDesc.Digest, referenceDescriptor.ArtifactType, err.Error()), Extensions: map[string]interface{}{ CreatedAnnotation: createdTime, }, }, nil } - if blobDesc.MediaType == SarifMediaType { + if referenceDescriptor.ArtifactType == SarifArtifactType { return processSarifReport(input, input.Name, refBlob, createdTime) } @@ -196,8 +196,8 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe // verifyJSONSchema validates the json schema of the report // if schemaURL is empty, it will use the offline schema embedded in binary // currently only support for sarif reports -func verifyJSONSchema(mediaType string, refBlob []byte, schemaURL string) error { - if mediaType == SarifMediaType { +func verifyJSONSchema(artifactType string, refBlob []byte, schemaURL string) error { + if artifactType == SarifArtifactType { // decide online or offline schema type if schemaURL != "" { return schemavalidation.LoadAndValidateOnlineSchema(schemaURL, refBlob) @@ -208,7 +208,7 @@ func verifyJSONSchema(mediaType string, refBlob []byte, schemaURL string) error } return schemavalidation.LoadAndValidateOfflineSchema(schemaFileBytes, refBlob) } - return fmt.Errorf("media type not configured for plugin:[%s]", mediaType) + return fmt.Errorf("media type not configured for plugin:[%s]", artifactType) } // processSarifReport processes the sarif report running individual validations as configured diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go index 6e1d4b314..942136e47 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go @@ -103,7 +103,7 @@ func TestVerifyReference(t *testing.T) { blobContent: sampleSarifReport, }, want: want{ - message: fmt.Sprintf("vulnerability report validation failed: error extracting create timestamp annotation:[%s]", "no annotations found for descriptor:[{{ sha256:b2f67b016d3c646f025099b363b4f83a56a44d067a846be74e8866342c56f216 0 [] map[] [] } }]"), + message: fmt.Sprintf("vulnerability report validation failed: error extracting create timestamp annotation:[%s]", "no annotations found for descriptor:[{{ sha256:b2f67b016d3c646f025099b363b4f83a56a44d067a846be74e8866342c56f216 0 [] map[] [] } application/sarif+json}]"), }, }, { @@ -161,7 +161,7 @@ func TestVerifyReference(t *testing.T) { }, Blobs: []oci.Descriptor{ { - MediaType: SarifMediaType, + MediaType: SarifArtifactType, Digest: blobDigest, }, }, @@ -182,15 +182,14 @@ func TestVerifyReference(t *testing.T) { }, Blobs: []oci.Descriptor{ { - MediaType: SarifMediaType, - Digest: blobDigest, + Digest: blobDigest, }, }, }, blobContent: "{}", }, want: want{ - message: fmt.Sprintf("vulnerability report validation failed: schema validation failed for digest:[%s],media type:[%s],parse errors:[%v]", blobDigest, SarifMediaType, "version is required:runs is required:"), + message: fmt.Sprintf("vulnerability report validation failed: schema validation failed for digest:[%s],artifact type:[%s],parse errors:[%v]", blobDigest, SarifArtifactType, "version is required: runs is required: "), }, }, { @@ -203,7 +202,7 @@ func TestVerifyReference(t *testing.T) { }, Blobs: []oci.Descriptor{ { - MediaType: SarifMediaType, + MediaType: SarifArtifactType, Digest: blobDigest, }, }, @@ -235,6 +234,7 @@ func TestVerifyReference(t *testing.T) { Digest: manifestDigest, Annotations: tt.args.referenceManifest.Annotations, }, + ArtifactType: SarifArtifactType, } verifierResult, err := VerifyReference(&cmdArgs, subjectRef, refDesc, testStore) if err != nil && err.Error() != tt.want.err.Error() { @@ -276,7 +276,7 @@ func TestVerifyJSONSchema(t *testing.T) { { name: "online verification success", args: args{ - mediaType: SarifMediaType, + mediaType: SarifArtifactType, refBlobContent: sampleSarifReport, schemaURL: "https://json.schemastore.org/sarif-2.1.0-rtm.5.json", }, @@ -287,7 +287,7 @@ func TestVerifyJSONSchema(t *testing.T) { { name: "offline verification success", args: args{ - mediaType: SarifMediaType, + mediaType: SarifArtifactType, refBlobContent: sampleSarifReport, }, want: want{ From dfebb1e9652e3f48b4e6af5bdada1759d1734e45 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Wed, 29 Nov 2023 00:24:01 +0000 Subject: [PATCH 06/17] add support for latest report filtering --- .../template.yaml | 46 +++++++++++-------- .../vulnerability_report.go | 2 +- 2 files changed, 29 insertions(+), 19 deletions(-) diff --git a/library/vulnerability-report-validation/template.yaml b/library/vulnerability-report-validation/template.yaml index 2cf29accb..60dbfe0b2 100644 --- a/library/vulnerability-report-validation/template.yaml +++ b/library/vulnerability-report-validation/template.yaml @@ -41,9 +41,9 @@ spec: # Check if there are any system errors general_violation[{"result": result}] { - err := remote_data.system_error - err != "" - result := sprintf("System error calling external data provider: %s", [err]) + err := remote_data.system_error + err != "" + result := sprintf("System error calling external data provider: %s", [err]) } # Check if there are errors for any of the images @@ -54,29 +54,39 @@ spec: # Check if the success criteria is true general_violation[{"result": result}] { - subject_validation := remote_data.responses[_] - subject_result := subject_validation[1] - vuln_results := [res | subject_result.verifierReports[i].name == "vulnerabilityreport"; res := subject_result.verifierReports[i]] - count(vuln_results) > 0 - not process_vuln_reports(vuln_results) - result := sprintf("Subject failed verification: %s", [subject_validation[0]]) + subject_validation := remote_data.responses[_] + subject_result := subject_validation[1] + not process_vuln_reports(subject_result) + result := sprintf("Subject failed verification: %s", [subject_validation[0]]) } - process_vuln_reports(reports) if { - # At least one report must be valid - some vuln_report in reports - vuln_report.isSuccess == true - valid_signatures(vuln_report) + process_vuln_reports(subject_result) if { + # collect verifier reports from vulnerabilityreport verifier + vuln_results := [res | subject_result.verifierReports[i].name == "vulnerabilityreport"; res := subject_result.verifierReports[i]] + count(vuln_results) > 0 + # calculate the timestamp between current time and creation time + timestamp_diff_results_map := {diff_in_ns: i | diff_in_ns := time.now_ns() - time.parse_rfc3339_ns(vuln_results[i].extensions["org.opencontainers.image.created"])} + count(timestamp_diff_results_map) > 0 + # extract time difference durations into separate array to find global minimum + timestamp_diff_results_arr := [key | timestamp_diff_results_map[key]] + smallest_timestamp_diff := min(timestamp_diff_results_arr) + # validate latest report + process_vuln_report(vuln_results[timestamp_diff_results_map[smallest_timestamp_diff]]) + } + + process_vuln_report(report) if { + report.isSuccess == true + valid_signatures(report) } valid_signatures(_) := true { - require_signature == false + require_signature == false } valid_signatures(report) := true { require_signature count(report.nestedResults) > 0 - some nestedResult in report.nestedResults - nestedResult.artifactType == "application/vnd.cncf.notary.signature" - nestedResult.isSuccess + some nestedResult in report.nestedResults + nestedResult.artifactType == "application/vnd.cncf.notary.signature" + nestedResult.isSuccess } \ No newline at end of file diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report.go b/plugins/verifier/vulnerabilityreport/vulnerability_report.go index d4a9fbb6e..22166e1f5 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report.go @@ -133,7 +133,7 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe return &verifier.VerifierResult{ Name: input.Name, IsSuccess: false, - Message: fmt.Sprintf("vulnerability report validation failed: no blobs found for referrer %s@%s", subjectReference.Path, referenceDescriptor.Digest.String()), + Message: fmt.Sprintf("vulnerability report validation failed: no layers found in manifest for referrer %s@%s", subjectReference.Path, referenceDescriptor.Digest.String()), Extensions: map[string]interface{}{ CreatedAnnotation: createdTime, }, From 8b521b8dbec7ce8e26d87b86daafeeb3e86813db Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Wed, 29 Nov 2023 00:26:16 +0000 Subject: [PATCH 07/17] small indent --- library/vulnerability-report-validation/template.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/library/vulnerability-report-validation/template.yaml b/library/vulnerability-report-validation/template.yaml index 60dbfe0b2..62c128007 100644 --- a/library/vulnerability-report-validation/template.yaml +++ b/library/vulnerability-report-validation/template.yaml @@ -48,8 +48,8 @@ spec: # Check if there are errors for any of the images general_violation[{"result": result}] { - count(remote_data.errors) > 0 - result := sprintf("Error validating one or more images: %s", remote_data.errors) + count(remote_data.errors) > 0 + result := sprintf("Error validating one or more images: %s", remote_data.errors) } # Check if the success criteria is true From 10b6a481eb69ec556084c469f7ddfc83d478b1e0 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Wed, 29 Nov 2023 01:02:26 +0000 Subject: [PATCH 08/17] fix test --- .../verifier/vulnerabilityreport/vulnerability_report_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go index 942136e47..0ba04fb34 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go @@ -148,7 +148,7 @@ func TestVerifyReference(t *testing.T) { blobContent: sampleSarifReport, }, want: want{ - message: fmt.Sprintf("vulnerability report validation failed: no blobs found for referrer %s@%s", "test_subject_path", manifestDigest.String()), + message: fmt.Sprintf("vulnerability report validation failed: no layers found in manifest for referrer %s@%s", "test_subject_path", manifestDigest.String()), }, }, { From c55d8403408e9bcf1e413bed0f8fb3d796328914 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Wed, 29 Nov 2023 18:38:29 +0000 Subject: [PATCH 09/17] rename notary project sig --- library/notation-nested-validation/template.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/library/notation-nested-validation/template.yaml b/library/notation-nested-validation/template.yaml index 6e34ad75a..6dcdb1a9d 100644 --- a/library/notation-nested-validation/template.yaml +++ b/library/notation-nested-validation/template.yaml @@ -60,15 +60,15 @@ spec: has_subject_failed_verify(nestedReports) if { [path, value] := walk(nestedReports) path[count(path) - 1] == "nestedResults" - not notary_signature_pass_verify(value) + not notary_project_signature_pass_verify(value) } - notary_signature_pass_verify(nestedReports) if { - count_with_success := notary_signature_signature_count(nestedReports) + notary_project_signature_pass_verify(nestedReports) if { + count_with_success := notary_project_signature_count(nestedReports) count_with_success > 0 } - notary_signature_signature_count(nestedReports) := number if { + notary_project_signature_count(nestedReports) := number if { sigs := [x | some i nestedReports[i].isSuccess == true From 8168fb553561abbbc7d58161f97a7a20bda80048 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Wed, 29 Nov 2023 18:57:17 +0000 Subject: [PATCH 10/17] add comments to vuln report template --- .../vulnerability-report-validation/template.yaml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/library/vulnerability-report-validation/template.yaml b/library/vulnerability-report-validation/template.yaml index 62c128007..cce437bc3 100644 --- a/library/vulnerability-report-validation/template.yaml +++ b/library/vulnerability-report-validation/template.yaml @@ -18,12 +18,20 @@ spec: rego: | package vulnerabilityreportvalidation - # TODO: add support for custom reason message propagating to user + # This template defines policy for vulnerability report validation. + # It checks the following: + # - If there are any system errors + # - If there are errors for any of the images + # - There is at least one vulnerability report that was verified + # - Only considers the latest vulnerability report + # - The latest vulnerability report is valid (isSuccess = true) + # - The latest vulnerability report has a valid notary project signature (if require_signature = true) + import future.keywords.if import future.keywords.in import future.keywords.every - default require_signature := false # change to true to require notation signature on vulnerability report + default require_signature := false # change to true to require notary project signature on vulnerability report # Get data from Ratify remote_data := response { From d2197885c5c8785a02c331d5e4a488a94875ec12 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Wed, 29 Nov 2023 18:59:18 +0000 Subject: [PATCH 11/17] add comments to nested notation template --- library/notation-nested-validation/template.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/library/notation-nested-validation/template.yaml b/library/notation-nested-validation/template.yaml index 6dcdb1a9d..39db14e82 100644 --- a/library/notation-nested-validation/template.yaml +++ b/library/notation-nested-validation/template.yaml @@ -17,6 +17,14 @@ spec: - target: admission.k8s.gatekeeper.sh rego: | package notationnestedvalidation + + # This template defines policy for notation nested validation. + # It checks the following: + # - If there are any system errors + # - If there are errors for any of the images + # - Each image has a valid notary project signature + # - Each nested artifact has a valid notary project signature + import future.keywords.if remote_data := response { From c1b3f4e9a220a607b800574c32944d482377cd26 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Thu, 30 Nov 2023 17:47:04 +0000 Subject: [PATCH 12/17] override new go package license --- .github/licenserc.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/licenserc.yml b/.github/licenserc.yml index 6a6db0e1f..5acebebf1 100644 --- a/.github/licenserc.yml +++ b/.github/licenserc.yml @@ -57,3 +57,6 @@ dependency: - name: github.com/rcrowley/go-metrics # TODO: remove this when library is removed or under compatible license version: v0.0.0-20201227073835-cf1acfcdf475 license: BSD-2-Clause + - name: github.com/owenrumney/go-sarif/v2 # TODO: remove this when library is under a compatible license + version: v2.3.0 + license: Apache-2.0 From d88e13e9a319f199df384b132fc26800200d6e56 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Thu, 30 Nov 2023 18:21:33 +0000 Subject: [PATCH 13/17] update to make create time stamp configurable --- .../template.yaml | 2 +- .../vulnerability_report.go | 37 +++++++++++-------- .../vulnerability_report_test.go | 4 +- 3 files changed, 24 insertions(+), 19 deletions(-) diff --git a/library/vulnerability-report-validation/template.yaml b/library/vulnerability-report-validation/template.yaml index cce437bc3..b12dab136 100644 --- a/library/vulnerability-report-validation/template.yaml +++ b/library/vulnerability-report-validation/template.yaml @@ -73,7 +73,7 @@ spec: vuln_results := [res | subject_result.verifierReports[i].name == "vulnerabilityreport"; res := subject_result.verifierReports[i]] count(vuln_results) > 0 # calculate the timestamp between current time and creation time - timestamp_diff_results_map := {diff_in_ns: i | diff_in_ns := time.now_ns() - time.parse_rfc3339_ns(vuln_results[i].extensions["org.opencontainers.image.created"])} + timestamp_diff_results_map := {diff_in_ns: i | diff_in_ns := time.now_ns() - time.parse_rfc3339_ns(vuln_results[i].extensions["createdAt"])} count(timestamp_diff_results_map) > 0 # extract time difference durations into separate array to find global minimum timestamp_diff_results_arr := [key | timestamp_diff_results_map[key]] diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report.go b/plugins/verifier/vulnerabilityreport/vulnerability_report.go index 22166e1f5..60ecaa356 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report.go @@ -39,21 +39,23 @@ import ( var embeddedFS embed.FS const ( - SarifArtifactType string = "application/sarif+json" - SarifOfflineFilePath string = "schemavalidation/schemas/sarif-2.1.0.json" - TrivyScannerName string = "trivy" - GrypeScannerName string = "grype" - CreatedAnnotation string = imagespec.AnnotationCreated - SeverityRegex = `Severity:\s*(\w+)` + SarifArtifactType string = "application/sarif+json" + SarifOfflineFilePath string = "schemavalidation/schemas/sarif-2.1.0.json" + TrivyScannerName string = "trivy" + GrypeScannerName string = "grype" + CreatedAnnotation string = "createdAt" + DefaultCreatedAnnotation string = imagespec.AnnotationCreated + SeverityRegex = `Severity:\s*(\w+)` ) type PluginConfig struct { - Name string `json:"name"` - SchemaURL string `json:"schemaURL,omitempty"` - MaximumAge string `json:"maximumAge,omitempty"` - DisallowedSeverity []string `json:"disallowedSeverity,omitempty"` - Passthrough bool `json:"passthrough,omitempty"` - DenylistCVEs []string `json:"denylistCVEs,omitempty"` + Name string `json:"name"` + SchemaURL string `json:"schemaURL,omitempty"` + CreatedAnnotationName string `json:"createdAnnotationName,omitempty"` + MaximumAge string `json:"maximumAge,omitempty"` + DisallowedSeverity []string `json:"disallowedSeverity,omitempty"` + Passthrough bool `json:"passthrough,omitempty"` + DenylistCVEs []string `json:"denylistCVEs,omitempty"` } type PluginInputConfig struct { @@ -81,7 +83,10 @@ func VerifyReference(args *skel.CmdArgs, subjectReference common.Reference, refe } // extract created timestamp from descriptor annotations - createdTime, err := extractCreationTimestamp(referenceDescriptor) + if input.CreatedAnnotationName == "" { + input.CreatedAnnotationName = DefaultCreatedAnnotation + } + createdTime, err := extractCreationTimestamp(input.CreatedAnnotationName, referenceDescriptor) if err != nil { return &verifier.VerifierResult{ Name: input.Name, @@ -422,13 +427,13 @@ func extractSeverity(scannerName string, rule sarif.ReportingDescriptor) (string // extractCreationTimestamp extracts the created timestamp from the descriptor annotations // verifies that the created timestamp is a valid timestamp in RFC3339 format -func extractCreationTimestamp(descriptor ocispecs.ReferenceDescriptor) (time.Time, error) { +func extractCreationTimestamp(createdAnnotationName string, descriptor ocispecs.ReferenceDescriptor) (time.Time, error) { if descriptor.Annotations == nil { return time.Time{}, fmt.Errorf("no annotations found for descriptor:[%v]", descriptor) } - created, ok := descriptor.Annotations[CreatedAnnotation] + created, ok := descriptor.Annotations[createdAnnotationName] if !ok { - return time.Time{}, fmt.Errorf("created annotation not found for descriptor:[%v]", descriptor) + return time.Time{}, fmt.Errorf("created annotation [%s] not found for descriptor:[%v]", createdAnnotationName, descriptor) } // check if created annotation is a valid timestamp createdTime, err := time.Parse(time.RFC3339, created) diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go index 0ba04fb34..bc34b5cb7 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go @@ -869,7 +869,7 @@ func TestExtractCreationTimestamp(t *testing.T) { }, }, want: want{ - err: fmt.Errorf("created annotation not found for descriptor:[%v]", ocispecs.ReferenceDescriptor{Descriptor: oci.Descriptor{Annotations: map[string]string{}}}), + err: fmt.Errorf("created annotation [%s] not found for descriptor:[%v]", DefaultCreatedAnnotation, ocispecs.ReferenceDescriptor{Descriptor: oci.Descriptor{Annotations: map[string]string{}}}), created: time.Time{}, }, }, @@ -908,7 +908,7 @@ func TestExtractCreationTimestamp(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - created, err := extractCreationTimestamp(tt.args.desc) + created, err := extractCreationTimestamp(DefaultCreatedAnnotation, tt.args.desc) if err != nil && err.Error() != tt.want.err.Error() { t.Errorf("extractCreationTimestamp() error = %v, wantErr %v", err, tt.want.err) return From 497c82edef9a3f13c4951a1ec038f7a26c02a6b1 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Thu, 30 Nov 2023 18:25:41 +0000 Subject: [PATCH 14/17] rename disallowedSeverity to disallowedSeverities --- .../config_v1beta1_verifier_vulnerabilityreport.yaml | 2 +- .../vulnerabilityreport/vulnerability_report.go | 10 +++++----- .../vulnerabilityreport/vulnerability_report_test.go | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml b/config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml index 95c6dd678..b82a68b01 100644 --- a/config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml +++ b/config/samples/config_v1beta1_verifier_vulnerabilityreport.yaml @@ -7,7 +7,7 @@ spec: artifactTypes: application/sarif+json parameters: maximumAge: 24h - disallowedSeverity: + disallowedSeverities: - high - critical denylistCVEs: diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report.go b/plugins/verifier/vulnerabilityreport/vulnerability_report.go index 60ecaa356..60d0cd8c1 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report.go @@ -53,7 +53,7 @@ type PluginConfig struct { SchemaURL string `json:"schemaURL,omitempty"` CreatedAnnotationName string `json:"createdAnnotationName,omitempty"` MaximumAge string `json:"maximumAge,omitempty"` - DisallowedSeverity []string `json:"disallowedSeverity,omitempty"` + DisallowedSeverities []string `json:"disallowedSeverities,omitempty"` Passthrough bool `json:"passthrough,omitempty"` DenylistCVEs []string `json:"denylistCVEs,omitempty"` } @@ -250,8 +250,8 @@ func processSarifReport(input *PluginConfig, verifierName string, blob []byte, c return verifierReport, nil } } - if len(input.DisallowedSeverity) > 0 { - verifierReport, err := verifyDisallowedSeverities(input.Name, scannerName, sarifReport, input.DisallowedSeverity, createdTime) + if len(input.DisallowedSeverities) > 0 { + verifierReport, err := verifyDisallowedSeverities(input.Name, scannerName, sarifReport, input.DisallowedSeverities, createdTime) if err != nil { return nil, err } @@ -332,7 +332,7 @@ func verifyDenyListCVEs(verifierName string, scannerName string, sarifReport *sa } // verifyDisallowedSeverities verifies that the report does not contain any disallowed severity levels -func verifyDisallowedSeverities(verifierName string, scannerName string, sarifReport *sarif.Report, disallowedSeverity []string, createdTime time.Time) (*verifier.VerifierResult, error) { +func verifyDisallowedSeverities(verifierName string, scannerName string, sarifReport *sarif.Report, disallowedSeverities []string, createdTime time.Time) (*verifier.VerifierResult, error) { ruleMap := make(map[string]*sarif.ReportingDescriptor) violatingRules := make([]sarif.ReportingDescriptor, 0) // create a map of rule id to rule for easy lookup @@ -377,7 +377,7 @@ func verifyDisallowedSeverities(verifierName string, scannerName string, sarifRe }, nil } // check if the severity is disallowed and add it to the list of violating rules - for _, disallowed := range disallowedSeverity { + for _, disallowed := range disallowedSeverities { if strings.EqualFold(severity, disallowed) { violatingRules = append(violatingRules, *rule) } diff --git a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go index bc34b5cb7..d06c5af91 100644 --- a/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go +++ b/plugins/verifier/vulnerabilityreport/vulnerability_report_test.go @@ -367,7 +367,7 @@ func TestProcessSarifReport(t *testing.T) { input: PluginConfig{ Name: "test_verifier", DenylistCVEs: []string{"CVE-2022-48174"}, - DisallowedSeverity: []string{ + DisallowedSeverities: []string{ "critical", }, }, @@ -384,7 +384,7 @@ func TestProcessSarifReport(t *testing.T) { input: PluginConfig{ Name: "test_verifier", DenylistCVEs: []string{"CVE-2022-48174"}, - DisallowedSeverity: []string{ + DisallowedSeverities: []string{ "high", }, }, From c44187a00aa14141dacfc926bf2756c712c5d3bc Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Thu, 30 Nov 2023 23:51:19 +0000 Subject: [PATCH 15/17] add rego policy library for rego policy provider --- library/rego/README.md | 3 + .../rego/vulnerability-report-validation.rego | 57 +++++++++++++++++++ 2 files changed, 60 insertions(+) create mode 100644 library/rego/README.md create mode 100644 library/rego/vulnerability-report-validation.rego diff --git a/library/rego/README.md b/library/rego/README.md new file mode 100644 index 000000000..dfb5e56cd --- /dev/null +++ b/library/rego/README.md @@ -0,0 +1,3 @@ +# Ratify Rego Policies + +This folder contains `.rego` files that containe rego policies to be used with Ratify's [Rego Policy Provider](https://ratify.dev/docs/1.0/reference/crds/policies#regopolicy) \ No newline at end of file diff --git a/library/rego/vulnerability-report-validation.rego b/library/rego/vulnerability-report-validation.rego new file mode 100644 index 000000000..765a12709 --- /dev/null +++ b/library/rego/vulnerability-report-validation.rego @@ -0,0 +1,57 @@ +package ratify.policy + +import future.keywords.if +import future.keywords.in +import future.keywords.every + +# This template defines policy for vulnerability report validation. +# It checks the following: +# - If there are any system errors +# - If there are errors for any of the images +# - There is at least one vulnerability report that was verified +# - Only considers the latest vulnerability report +# - The latest vulnerability report is valid (isSuccess = true) +# - The latest vulnerability report has a valid notary project signature (if require_signature = true) + +default require_signature := false # change to true to require notary project signature on vulnerability report +default valid := false + +# all artifacts MUST be valid +valid { + not failed_verify(input) +} + +failed_verify(reports) { + not process_vuln_reports(reports) +} + +process_vuln_reports(subject_result) if { + # collect verifier reports from vulnerabilityreport verifier + vuln_results := [res | subject_result.verifierReports[i].verifierReports[j].name == "vulnerabilityreport"; res := subject_result.verifierReports[i].verifierReports[j]] + count(vuln_results) > 0 + # calculate the timestamp between current time and creation time + timestamp_diff_results_map := {diff_in_ns: i | diff_in_ns := time.now_ns() - time.parse_rfc3339_ns(vuln_results[i].extensions["createdAt"])} + count(timestamp_diff_results_map) > 0 + # extract time difference durations into separate array to find global minimum + timestamp_diff_results_arr := [key | timestamp_diff_results_map[key]] + smallest_timestamp_diff := min(timestamp_diff_results_arr) + # validate latest report + process_vuln_report(vuln_results[timestamp_diff_results_map[smallest_timestamp_diff]]) +} + +process_vuln_report(report) if { + report.isSuccess == true + valid_signatures(report) +} + +valid_signatures(_) := true { + require_signature == false +} + +valid_signatures(report) := true { + require_signature + count(report.nestedResults) > 0 + some nestedResult in report.nestedResults + nestedResult.artifactType == "application/vnd.cncf.notary.signature" + nestedResult.isSuccess +} \ No newline at end of file From 8966de2d4c5db194bc5283476d4cabec251b09b0 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Thu, 30 Nov 2023 23:52:53 +0000 Subject: [PATCH 16/17] small typo --- library/rego/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/rego/README.md b/library/rego/README.md index dfb5e56cd..8f21f7892 100644 --- a/library/rego/README.md +++ b/library/rego/README.md @@ -1,3 +1,3 @@ # Ratify Rego Policies -This folder contains `.rego` files that containe rego policies to be used with Ratify's [Rego Policy Provider](https://ratify.dev/docs/1.0/reference/crds/policies#regopolicy) \ No newline at end of file +This folder contains `.rego` files that contain rego policies to be used ONLY with Ratify's [Rego Policy Provider](https://ratify.dev/docs/1.0/reference/crds/policies#regopolicy) \ No newline at end of file From 3f22d047ed0eea855dac6819afa85ca1b17dff21 Mon Sep 17 00:00:00 2001 From: Akash Singhal Date: Thu, 30 Nov 2023 23:54:45 +0000 Subject: [PATCH 17/17] add license header --- library/rego/vulnerability-report-validation.rego | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/library/rego/vulnerability-report-validation.rego b/library/rego/vulnerability-report-validation.rego index 765a12709..98aedca8d 100644 --- a/library/rego/vulnerability-report-validation.rego +++ b/library/rego/vulnerability-report-validation.rego @@ -1,3 +1,16 @@ +# Copyright The Ratify Authors. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + package ratify.policy import future.keywords.if