From a664743b7cb1477f10b0d98588cb048dc5f02686 Mon Sep 17 00:00:00 2001 From: siddhikhapare <81567515+siddhikhapare@users.noreply.github.com> Date: Wed, 29 May 2024 04:54:01 +0530 Subject: [PATCH 1/8] Added test for copy-ns-labels policy (#967) * Added test for copy-ns-labels policy Signed-off-by: siddhikhapare * files fixed Signed-off-by: siddhikhapare * error fixed Signed-off-by: siddhikhapare * issue resolved Signed-off-by: siddhikhapare * resource removed Signed-off-by: siddhikhapare * chainsaw-test file fixed Signed-off-by: siddhikhapare * container image name changed Signed-off-by: siddhikhapare * files added Signed-off-by: siddhikhapare * files deleted Signed-off-by: siddhikhapare * test for label added Signed-off-by: siddhikhapare * test file modified Signed-off-by: siddhikhapare * Errors fixed Signed-off-by: siddhikhapare * fixing lint error Signed-off-by: siddhikhapare * fixing test error Signed-off-by: siddhikhapare --------- Signed-off-by: siddhikhapare --- .../.chainsaw-test/chainsaw-test.yaml | 28 ++++++++ .../.chainsaw-test/ns.yaml | 7 ++ .../.chainsaw-test/patchresource.yaml | 66 +++++++++++++++++++ .../.chainsaw-test/policy-ready.yaml | 6 ++ .../.chainsaw-test/resource.yaml | 60 +++++++++++++++++ 5 files changed, 167 insertions(+) create mode 100644 other/copy-namespace-labels/.chainsaw-test/chainsaw-test.yaml create mode 100644 other/copy-namespace-labels/.chainsaw-test/ns.yaml create mode 100644 other/copy-namespace-labels/.chainsaw-test/patchresource.yaml create mode 100644 other/copy-namespace-labels/.chainsaw-test/policy-ready.yaml create mode 100644 other/copy-namespace-labels/.chainsaw-test/resource.yaml diff --git a/other/copy-namespace-labels/.chainsaw-test/chainsaw-test.yaml b/other/copy-namespace-labels/.chainsaw-test/chainsaw-test.yaml new file mode 100644 index 000000000..125cccbe9 --- /dev/null +++ b/other/copy-namespace-labels/.chainsaw-test/chainsaw-test.yaml @@ -0,0 +1,28 @@ +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + name: copy-namespace-labels +spec: + steps: + - name: step-01 + try: + - apply: + file: ns.yaml + - apply: + file: ../copy-namespace-labels.yaml + - assert: + file: policy-ready.yaml + - name: step-02 + try: + - apply: + file: resource.yaml + - assert: + file: patchresource.yaml + - error: + resource: + apiVersion: apps/v1 + kind: Deployment + metadata: + namespace: within-ns + labels: + kubernetes.io/metadata.name: "within-ns" \ No newline at end of file diff --git a/other/copy-namespace-labels/.chainsaw-test/ns.yaml b/other/copy-namespace-labels/.chainsaw-test/ns.yaml new file mode 100644 index 000000000..5940d9141 --- /dev/null +++ b/other/copy-namespace-labels/.chainsaw-test/ns.yaml @@ -0,0 +1,7 @@ +apiVersion: v1 +kind: Namespace +metadata: + name: within-ns + labels: + owner: "any-corp" + env: dev diff --git a/other/copy-namespace-labels/.chainsaw-test/patchresource.yaml b/other/copy-namespace-labels/.chainsaw-test/patchresource.yaml new file mode 100644 index 000000000..a2f8ea14b --- /dev/null +++ b/other/copy-namespace-labels/.chainsaw-test/patchresource.yaml @@ -0,0 +1,66 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: my-deployment-1 + namespace: within-ns + labels: + app: good-app + owner: "any-corp" + env: dev +spec: + replicas: 1 + selector: + matchLabels: + app: good-app + template: + metadata: + labels: + app: good-app + spec: + containers: + - name: good-app-deploy + image: busybox:1.35 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: my-deployment-3 + namespace: within-ns + labels: + owner: "any-corp" + env: dev +spec: + replicas: 1 + selector: + matchLabels: + app: my-app03 + template: + metadata: + labels: + app: my-app03 + spec: + containers: + - name: my-app03-deploy + image: busybox:1.35 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: my-deployment-4 + namespace: within-ns + labels: + owner: "any-corp" + env: dev +spec: + replicas: 1 + selector: + matchLabels: + app: my-app04 + template: + metadata: + labels: + app: my-app04 + spec: + containers: + - name: my-app04-deploy + image: busybox:1.35 \ No newline at end of file diff --git a/other/copy-namespace-labels/.chainsaw-test/policy-ready.yaml b/other/copy-namespace-labels/.chainsaw-test/policy-ready.yaml new file mode 100644 index 000000000..1fed6971c --- /dev/null +++ b/other/copy-namespace-labels/.chainsaw-test/policy-ready.yaml @@ -0,0 +1,6 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: copy-namespace-labels +status: + ready: true diff --git a/other/copy-namespace-labels/.chainsaw-test/resource.yaml b/other/copy-namespace-labels/.chainsaw-test/resource.yaml new file mode 100644 index 000000000..b1b58993e --- /dev/null +++ b/other/copy-namespace-labels/.chainsaw-test/resource.yaml @@ -0,0 +1,60 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: my-deployment-1 + namespace: within-ns + labels: + app: good-app +spec: + replicas: 1 + selector: + matchLabels: + app: good-app + template: + metadata: + labels: + app: good-app + spec: + containers: + - name: good-app-deploy + image: busybox:1.35 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: my-deployment-3 + namespace: within-ns + labels: + owner: "dev-team" +spec: + replicas: 1 + selector: + matchLabels: + app: my-app03 + template: + metadata: + labels: + app: my-app03 + spec: + containers: + - name: my-app03-deploy + image: busybox:1.35 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: my-deployment-4 + namespace: within-ns +spec: + replicas: 1 + selector: + matchLabels: + app: my-app04 + template: + metadata: + labels: + app: my-app04 + spec: + containers: + - name: my-app04-deploy + image: busybox:1.35 From 91819eb103ba14ead87a0ce51ac588f7cd900a87 Mon Sep 17 00:00:00 2001 From: Marcus Date: Wed, 29 May 2024 01:41:22 +0200 Subject: [PATCH 2/8] Improve policy description (#1005) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - StatefulSet wasn't mentioned in artifact-hub.yaml - Previous description stated problems if minAvailable is lower than the replica definition, but the opposite is true. Signed-off-by: Marcus Kraßmann --- .../artifact-hub.yaml | 14 +++++++------- .../require-replicas-allow-disruption.yaml | 4 ++-- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/other/require-replicas-allow-disruption/artifact-hub.yaml b/other/require-replicas-allow-disruption/artifact-hub.yaml index 747a72121..05992824d 100644 --- a/other/require-replicas-allow-disruption/artifact-hub.yaml +++ b/other/require-replicas-allow-disruption/artifact-hub.yaml @@ -1,12 +1,12 @@ name: require-replicas-allow-disruption version: 1.0.0 -displayName: PodDisruptionBudget maxUnavailable Non-Zero with Deployments +displayName: PodDisruptionBudget maxUnavailable Non-Zero with Deployments/StatefulSets createdAt: "2024-03-03T16:54:00.000Z" description: >- Existing PodDisruptionBudgets can apply to all future matching Pod controllers. - If the minAvailable field is defined for such matching PDBs and is lower than the - replica count of a new Deployment, then availability could be negatively impacted. - This policy specifies that Deployment replicas exceed the minAvailable value of all + If the minAvailable field is defined for such matching PDBs and the replica count of a new + Deployment or StatefulSet is lower than that, then availability could be negatively impacted. + This policy specifies that Deployment/StatefulSet replicas exceed the minAvailable value of all matching PodDisruptionBudgets which specify minAvailable as a number and not percentage. install: |- ```shell @@ -17,9 +17,9 @@ keywords: - Sample readme: | Existing PodDisruptionBudgets can apply to all future matching Pod controllers. - If the minAvailable field is defined for such matching PDBs and is lower than the - replica count of a new Deployment, then availability could be negatively impacted. - This policy specifies that Deployment replicas exceed the minAvailable value of all + If the minAvailable field is defined for such matching PDBs and the replica count of a new + Deployment or StatefulSet is lower than that, then availability could be negatively impacted. + This policy specifies that Deployment/StatefulSet replicas exceed the minAvailable value of all matching PodDisruptionBudgets which specify minAvailable as a number and not percentage. Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ diff --git a/other/require-replicas-allow-disruption/require-replicas-allow-disruption.yaml b/other/require-replicas-allow-disruption/require-replicas-allow-disruption.yaml index 26b158ad5..74a047941 100644 --- a/other/require-replicas-allow-disruption/require-replicas-allow-disruption.yaml +++ b/other/require-replicas-allow-disruption/require-replicas-allow-disruption.yaml @@ -10,8 +10,8 @@ metadata: policies.kyverno.io/subject: PodDisruptionBudget, Deployment, StatefulSet policies.kyverno.io/description: >- Existing PodDisruptionBudgets can apply to all future matching Pod controllers. - If the minAvailable field is defined for such matching PDBs and is lower than the - replica count of a new Deployment or StatefulSet, then availability could be negatively impacted. + If the minAvailable field is defined for such matching PDBs and the replica count of a new + Deployment or StatefulSet is lower than that, then availability could be negatively impacted. This policy specifies that Deployment/StatefulSet replicas exceed the minAvailable value of all matching PodDisruptionBudgets which specify minAvailable as a number and not percentage. spec: From c4fa43f5d3bdaab680ec52aaee97b47a3cec6982 Mon Sep 17 00:00:00 2001 From: Chip Zoller Date: Tue, 28 May 2024 20:10:22 -0400 Subject: [PATCH 3/8] fix AH metadata (#1036) Signed-off-by: Chip Zoller --- .../kasten-generate-policy-by-preset-label/artifacthub-pkg.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/kasten/kasten-generate-policy-by-preset-label/artifacthub-pkg.yml b/kasten/kasten-generate-policy-by-preset-label/artifacthub-pkg.yml index 52a146446..3a0f687fc 100644 --- a/kasten/kasten-generate-policy-by-preset-label/artifacthub-pkg.yml +++ b/kasten/kasten-generate-policy-by-preset-label/artifacthub-pkg.yml @@ -8,7 +8,6 @@ description: >- Use with "kasten-validate-ns-by-preset-label" policy to require "dataprotection" labeling on new namespaces. install: |- ```shell - kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/kasten/kasten-generate-policy-by-preset-label/create-kasten-policies-clusterrole.yaml kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/kasten/kasten-generate-policy-by-preset-label/kasten-generate-policy-by-preset-label.yaml ``` keywords: From 8d8580ba67271169c417688955b4ac9e1dd3cbc5 Mon Sep 17 00:00:00 2001 From: Chip Zoller Date: Tue, 28 May 2024 20:29:28 -0400 Subject: [PATCH 4/8] remove oras setup...installed by default (#1037) Signed-off-by: Chip Zoller --- .github/workflows/test.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9dcf0b959..e73607084 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -88,8 +88,6 @@ jobs: [ $(uname -m) = aarch64 ] && curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.20.0/kind-linux-arm64 chmod +x ./kind sudo mv ./kind /usr/local/bin/kind - - name: Install oras CLI - uses: oras-project/setup-oras@ee7dbe1144cb00080a89497f937dae78f85fce29 # v1.1.0 - name: Install latest Kyverno CLI uses: kyverno/action-install-cli@fcee92fca5c883169ef9927acf543e0b5fc58289 # v0.2.0 - name: Create kind cluster From 5da950e17d8c0246bfd3a1d60f8524f712db62e7 Mon Sep 17 00:00:00 2001 From: Matt Bator Date: Tue, 28 May 2024 20:36:05 -0400 Subject: [PATCH 5/8] Updating Kasten data protection by label policy (#997) * Update data protection by label policy Signed-off-by: Matt Bator * Naming updates to chainsaw test Signed-off-by: Matt Bator * Replace audit with Audit, update digest Signed-off-by: Matt Bator --------- Signed-off-by: Matt Bator --- .../.kyverno-test/kyverno-test.yaml | 21 ---- .../.kyverno-test/nginx-deployment.yaml | 60 ----------- .../artifacthub-pkg.yml | 22 ---- .../k10-data-protection-by-label.yaml | 33 ------ .../chainsaw-step-01-assert-1.yaml | 2 +- .../chainsaw-step-01-assert-2.yaml | 0 .../.chainsaw-test/chainsaw-test.yaml | 6 +- .../deployment-bad-badlabel.yaml | 0 .../deployment-bad-nolabel.yaml | 0 .../.chainsaw-test/deployment-good.yaml | 2 +- .../nginx-deployment-invalid.yaml | 0 .../.chainsaw-test/ns.yaml | 0 .../.chainsaw-test/ss-bad-badlabel.yaml | 0 .../.chainsaw-test/ss-bad-nolabel.yaml | 0 .../.chainsaw-test/ss-good.yaml | 2 +- .../.kyverno-test/kyverno-test.yaml | 33 ++++++ .../.kyverno-test/nginx-deployment.yaml | 100 ++++++++++++++++++ .../artifacthub-pkg.yml | 26 +++++ .../kasten-data-protection-by-label.yaml | 38 +++++++ 19 files changed, 203 insertions(+), 142 deletions(-) delete mode 100644 kasten/k10-data-protection-by-label/.kyverno-test/kyverno-test.yaml delete mode 100644 kasten/k10-data-protection-by-label/.kyverno-test/nginx-deployment.yaml delete mode 100644 kasten/k10-data-protection-by-label/artifacthub-pkg.yml delete mode 100644 kasten/k10-data-protection-by-label/k10-data-protection-by-label.yaml rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/chainsaw-step-01-assert-1.yaml (66%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/chainsaw-step-01-assert-2.yaml (100%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/chainsaw-test.yaml (91%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/deployment-bad-badlabel.yaml (100%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/deployment-bad-nolabel.yaml (100%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/deployment-good.yaml (97%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/nginx-deployment-invalid.yaml (100%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/ns.yaml (100%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/ss-bad-badlabel.yaml (100%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/ss-bad-nolabel.yaml (100%) rename kasten/{k10-data-protection-by-label => kasten-data-protection-by-label}/.chainsaw-test/ss-good.yaml (96%) create mode 100644 kasten/kasten-data-protection-by-label/.kyverno-test/kyverno-test.yaml create mode 100644 kasten/kasten-data-protection-by-label/.kyverno-test/nginx-deployment.yaml create mode 100644 kasten/kasten-data-protection-by-label/artifacthub-pkg.yml create mode 100644 kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml diff --git a/kasten/k10-data-protection-by-label/.kyverno-test/kyverno-test.yaml b/kasten/k10-data-protection-by-label/.kyverno-test/kyverno-test.yaml deleted file mode 100644 index 58f925a36..000000000 --- a/kasten/k10-data-protection-by-label/.kyverno-test/kyverno-test.yaml +++ /dev/null @@ -1,21 +0,0 @@ -apiVersion: cli.kyverno.io/v1alpha1 -kind: Test -metadata: - name: kyverno_data_protection_tests -policies: -- ../k10-data-protection-by-label.yaml -resources: -- nginx-deployment.yaml -results: -- kind: Deployment - policy: k10-data-protection-by-label - resources: - - nginx-deployment-invalid - result: fail - rule: k10-data-protection-by-label -- kind: Deployment - policy: k10-data-protection-by-label - resources: - - nginx-deployment - result: pass - rule: k10-data-protection-by-label diff --git a/kasten/k10-data-protection-by-label/.kyverno-test/nginx-deployment.yaml b/kasten/k10-data-protection-by-label/.kyverno-test/nginx-deployment.yaml deleted file mode 100644 index 9ceb00a66..000000000 --- a/kasten/k10-data-protection-by-label/.kyverno-test/nginx-deployment.yaml +++ /dev/null @@ -1,60 +0,0 @@ -kind: Namespace -apiVersion: v1 -metadata: - name: nginx - labels: - name: nginx - purpose: production - dataprotection: k10-goldpolicy - immutable: enabled ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: nginx-deployment - namespace: nginx - labels: - app: nginx - purpose: production - dataprotection: k10-goldpolicy # set a policy to use our 'gold' standard data protection policy (generate-gold-backup-policy) - immutable: enabled -spec: - replicas: 3 - selector: - matchLabels: - app: nginx - template: - metadata: - labels: - app: nginx - spec: - containers: - - name: nginx - image: nginx:1.14.2 - ports: - - containerPort: 80 ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: nginx-deployment-invalid - namespace: nginx - labels: - app: nginx - purpose: production - dataprotection: none # invalid named K10 Policy!! -spec: - replicas: 3 - selector: - matchLabels: - app: nginx - template: - metadata: - labels: - app: nginx - spec: - containers: - - name: nginx - image: nginx:1.14.2 - ports: - - containerPort: 80 diff --git a/kasten/k10-data-protection-by-label/artifacthub-pkg.yml b/kasten/k10-data-protection-by-label/artifacthub-pkg.yml deleted file mode 100644 index fc23cdbe9..000000000 --- a/kasten/k10-data-protection-by-label/artifacthub-pkg.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: k10-data-protection-by-label -version: 1.0.0 -displayName: Check Data Protection By Label -createdAt: "2023-04-10T20:12:53.000Z" -description: >- - Check the 'dataprotection' label that production Deployments and StatefulSet have a named K10 Policy. Use in combination with 'generate' ClusterPolicy to 'generate' a specific K10 Policy by name. -install: |- - ```shell - kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/kasten/k10-data-protection-by-label/k10-data-protection-by-label.yaml - ``` -keywords: - - kyverno - - Kasten K10 by Veeam -readme: | - Check the 'dataprotection' label that production Deployments and StatefulSet have a named K10 Policy. Use in combination with 'generate' ClusterPolicy to 'generate' a specific K10 Policy by name. - - Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ -annotations: - kyverno/category: "Kasten K10 by Veeam" - kyverno/kubernetesVersion: "1.21-1.22" - kyverno/subject: "Deployment, StatefulSet" -digest: 7f4a303a81cd673f876f42a4c8bf74f4d197f0a005907f2b3d09f3745bb749dc diff --git a/kasten/k10-data-protection-by-label/k10-data-protection-by-label.yaml b/kasten/k10-data-protection-by-label/k10-data-protection-by-label.yaml deleted file mode 100644 index 03e0606cf..000000000 --- a/kasten/k10-data-protection-by-label/k10-data-protection-by-label.yaml +++ /dev/null @@ -1,33 +0,0 @@ -apiVersion: kyverno.io/v1 -kind: ClusterPolicy -metadata: - name: k10-data-protection-by-label - annotations: - policies.kyverno.io/title: Check Data Protection By Label - policies.kyverno.io/category: Kasten K10 by Veeam - kyverno.io/kyverno-version: 1.6.2 - policies.kyverno.io/minversion: 1.6.2 - kyverno.io/kubernetes-version: "1.21-1.22" - policies.kyverno.io/subject: Deployment, StatefulSet - policies.kyverno.io/description: >- - Check the 'dataprotection' label that production Deployments and StatefulSet have a named K10 Policy. - Use in combination with 'generate' ClusterPolicy to 'generate' a specific K10 Policy by name. -spec: - validationFailureAction: audit - rules: - - name: k10-data-protection-by-label - match: - any: - - resources: - kinds: - - Deployment - - StatefulSet - selector: - matchLabels: - purpose: production - validate: - message: "Deployments and StatefulSets that specify 'dataprotection' label must have a valid k10-?* name (use labels: dataprotection: k10-)" - pattern: - metadata: - labels: - dataprotection: "k10-*" diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-1.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-1.yaml similarity index 66% rename from kasten/k10-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-1.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-1.yaml index 4e8dfe8c2..e9f379862 100755 --- a/kasten/k10-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-1.yaml +++ b/kasten/kasten-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-1.yaml @@ -1,6 +1,6 @@ apiVersion: kyverno.io/v1 kind: ClusterPolicy metadata: - name: k10-data-protection-by-label + name: kasten-data-protection-by-label status: ready: true diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-2.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-2.yaml similarity index 100% rename from kasten/k10-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-2.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/chainsaw-step-01-assert-2.yaml diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/chainsaw-test.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/chainsaw-test.yaml similarity index 91% rename from kasten/k10-data-protection-by-label/.chainsaw-test/chainsaw-test.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/chainsaw-test.yaml index 107d75e22..ce89f5974 100755 --- a/kasten/k10-data-protection-by-label/.chainsaw-test/chainsaw-test.yaml +++ b/kasten/kasten-data-protection-by-label/.chainsaw-test/chainsaw-test.yaml @@ -3,19 +3,19 @@ apiVersion: chainsaw.kyverno.io/v1alpha1 kind: Test metadata: creationTimestamp: null - name: k10-data-protection-by-label + name: kasten-data-protection-by-label spec: steps: - name: step-01 try: - apply: - file: ../k10-data-protection-by-label.yaml + file: ../kasten-data-protection-by-label.yaml - patch: resource: apiVersion: kyverno.io/v1 kind: ClusterPolicy metadata: - name: k10-data-protection-by-label + name: kasten-data-protection-by-label spec: validationFailureAction: Enforce - assert: diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/deployment-bad-badlabel.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/deployment-bad-badlabel.yaml similarity index 100% rename from kasten/k10-data-protection-by-label/.chainsaw-test/deployment-bad-badlabel.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/deployment-bad-badlabel.yaml diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/deployment-bad-nolabel.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/deployment-bad-nolabel.yaml similarity index 100% rename from kasten/k10-data-protection-by-label/.chainsaw-test/deployment-bad-nolabel.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/deployment-bad-nolabel.yaml diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/deployment-good.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/deployment-good.yaml similarity index 97% rename from kasten/k10-data-protection-by-label/.chainsaw-test/deployment-good.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/deployment-good.yaml index dcf3c489e..f197557ee 100644 --- a/kasten/k10-data-protection-by-label/.chainsaw-test/deployment-good.yaml +++ b/kasten/kasten-data-protection-by-label/.chainsaw-test/deployment-good.yaml @@ -6,7 +6,7 @@ metadata: labels: app: busybox purpose: production - dataprotection: k10-goldpolicy + dataprotection: kasten-example spec: replicas: 1 selector: diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/nginx-deployment-invalid.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/nginx-deployment-invalid.yaml similarity index 100% rename from kasten/k10-data-protection-by-label/.chainsaw-test/nginx-deployment-invalid.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/nginx-deployment-invalid.yaml diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/ns.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/ns.yaml similarity index 100% rename from kasten/k10-data-protection-by-label/.chainsaw-test/ns.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/ns.yaml diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/ss-bad-badlabel.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/ss-bad-badlabel.yaml similarity index 100% rename from kasten/k10-data-protection-by-label/.chainsaw-test/ss-bad-badlabel.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/ss-bad-badlabel.yaml diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/ss-bad-nolabel.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/ss-bad-nolabel.yaml similarity index 100% rename from kasten/k10-data-protection-by-label/.chainsaw-test/ss-bad-nolabel.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/ss-bad-nolabel.yaml diff --git a/kasten/k10-data-protection-by-label/.chainsaw-test/ss-good.yaml b/kasten/kasten-data-protection-by-label/.chainsaw-test/ss-good.yaml similarity index 96% rename from kasten/k10-data-protection-by-label/.chainsaw-test/ss-good.yaml rename to kasten/kasten-data-protection-by-label/.chainsaw-test/ss-good.yaml index bc6216c38..848d14d3a 100644 --- a/kasten/k10-data-protection-by-label/.chainsaw-test/ss-good.yaml +++ b/kasten/kasten-data-protection-by-label/.chainsaw-test/ss-good.yaml @@ -5,7 +5,7 @@ metadata: namespace: k10-dplabel-ns labels: purpose: production - dataprotection: k10-silverpolicy + dataprotection: kasten-example spec: selector: matchLabels: diff --git a/kasten/kasten-data-protection-by-label/.kyverno-test/kyverno-test.yaml b/kasten/kasten-data-protection-by-label/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..6476031b7 --- /dev/null +++ b/kasten/kasten-data-protection-by-label/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,33 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: kyverno_data_protection_tests +policies: +- ../kasten-data-protection-by-label.yaml +resources: +- nginx-deployment.yaml +results: +- kind: Deployment + policy: kasten-data-protection-by-label + resources: + - nginx-deployment-invalid + result: fail + rule: kasten-data-protection-by-label +- kind: Deployment + policy: kasten-data-protection-by-label + resources: + - nginx-deployment-pass + result: pass + rule: kasten-data-protection-by-label +- kind: Deployment + policy: kasten-data-protection-by-label + resources: + - nginx-deployment-none + result: pass + rule: kasten-data-protection-by-label +- kind: Deployment + policy: kasten-data-protection-by-label + resources: + - nginx-deployment-skipped + result: skip + rule: kasten-data-protection-by-label \ No newline at end of file diff --git a/kasten/kasten-data-protection-by-label/.kyverno-test/nginx-deployment.yaml b/kasten/kasten-data-protection-by-label/.kyverno-test/nginx-deployment.yaml new file mode 100644 index 000000000..b3f7adf08 --- /dev/null +++ b/kasten/kasten-data-protection-by-label/.kyverno-test/nginx-deployment.yaml @@ -0,0 +1,100 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment-pass + namespace: nginx + labels: + app: nginx + purpose: production + dataprotection: kasten-example + immutable: enabled +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 80 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment-none + namespace: nginx + labels: + app: nginx + purpose: production + dataprotection: none +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 80 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment-invalid + namespace: nginx + labels: + app: nginx + purpose: production + dataprotection: invalid +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 80 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment-skipped + namespace: nginx + labels: + app: nginx + purpose: test + dataprotection: kasten-example +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.14.2 + ports: + - containerPort: 80 diff --git a/kasten/kasten-data-protection-by-label/artifacthub-pkg.yml b/kasten/kasten-data-protection-by-label/artifacthub-pkg.yml new file mode 100644 index 000000000..12530c412 --- /dev/null +++ b/kasten/kasten-data-protection-by-label/artifacthub-pkg.yml @@ -0,0 +1,26 @@ +name: kasten-data-protection-by-label +version: 1.0.1 +displayName: Check Data Protection By Label +createdAt: "2023-05-07T00:00:00.000Z" +description: >- + Check the 'dataprotection' label for production Deployments and StatefulSet workloads. + + Use in combination with 'kasten-generate-example-backup-policy' policy to generate a Kasten policy for the workload namespace, if it doesn't already exist. +install: |- + ```shell + kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml + ``` +keywords: + - kyverno + - Veeam Kasten +readme: | + Check the 'dataprotection' label for production Deployments and StatefulSet workloads. + + Use in combination with 'kasten-generate-example-backup-policy' policy to generate a Kasten policy for the workload namespace, if it doesn't already exist. + + Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ +annotations: + kyverno/category: "Veeam Kasten" + kyverno/kubernetesVersion: "1.24-1.30" + kyverno/subject: "Deployment, StatefulSet" +digest: c25638fd2f76aa212e1bb6c56c3b743ffb4c3d6819fb6d130149d8a0e0e989d9 diff --git a/kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml b/kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml new file mode 100644 index 000000000..e7ec4c100 --- /dev/null +++ b/kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml @@ -0,0 +1,38 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: kasten-data-protection-by-label + annotations: + policies.kyverno.io/title: Check Data Protection By Label + policies.kyverno.io/category: Veeam Kasten + kyverno.io/kyverno-version: 1.12.1 + policies.kyverno.io/minversion: 1.6.2 + kyverno.io/kubernetes-version: "1.24-1.30" + policies.kyverno.io/subject: Deployment, StatefulSet + policies.kyverno.io/description: >- + Check the 'dataprotection' label for production Deployments and StatefulSet workloads. + + Use in combination with 'kasten-generate-example-backup-policy' policy to generate a Kasten policy for the workload namespace, if it doesn't already exist. +spec: + validationFailureAction: Audit + rules: + - name: kasten-data-protection-by-label + match: + any: + - resources: + kinds: + - Deployment + - StatefulSet + selector: + matchLabels: + purpose: production + validate: + message: >- + "Deployments and StatefulSets with 'purpose=production' label must specify a valid 'dataprotection' label: + + "dataprotection=kasten-example" - + "dataprotection=none" - No local snapshots or backups + pattern: + metadata: + labels: + dataprotection: "kasten-example|none" From c89faeac0411a59f01d86e30baa2cc2f3181c2db Mon Sep 17 00:00:00 2001 From: Chip Zoller Date: Wed, 29 May 2024 10:02:17 -0400 Subject: [PATCH 6/8] fix indentation issues (#1038) Signed-off-by: Chip Zoller --- kasten/kasten-3-2-1-backup/artifacthub-pkg.yml | 3 +-- kasten/kasten-3-2-1-backup/kasten-3-2-1-backup.yaml | 1 - kasten/kasten-data-protection-by-label/artifacthub-pkg.yml | 3 +-- .../kasten-data-protection-by-label.yaml | 1 - .../artifacthub-pkg.yml | 4 +--- .../kasten-generate-policy-by-preset-label.yaml | 1 - 6 files changed, 3 insertions(+), 10 deletions(-) diff --git a/kasten/kasten-3-2-1-backup/artifacthub-pkg.yml b/kasten/kasten-3-2-1-backup/artifacthub-pkg.yml index 8306e860d..7e939214b 100644 --- a/kasten/kasten-3-2-1-backup/artifacthub-pkg.yml +++ b/kasten/kasten-3-2-1-backup/artifacthub-pkg.yml @@ -4,7 +4,6 @@ displayName: Check Kasten 3-2-1 Backup Policy createdAt: "2023-05-07T00:00:00.000Z" description: >- The 3-2-1 rule of data protection recommends that you have at least 3 copies of data, on 2 different storage targets, with 1 being offsite. This approach ensures a health mix of redundancy options for data recovery of the application for localized & multi-region cloud failures or compromise. In Kubernetes, this translates to the original running resources, a local snapshot, and a copy of all application resources and volume data exported to an external repository. - This policy accomplishes 3-2-1 validation by ensuring each policy contains both 'action: backup' and 'action: export'. install: |- ```shell @@ -23,4 +22,4 @@ annotations: kyverno/category: "Veeam Kasten" kyverno/kubernetesVersion: "1.24-1.30" kyverno/subject: "Policy" -digest: 45c8d345b2188ec47fe8b38a417726b7eae951edf18d770abdb602faec7d30a4 +digest: ae3f8af7d3708b5bcbc4e0a5fb368f5100441a85923dad8f096b367f279462a4 diff --git a/kasten/kasten-3-2-1-backup/kasten-3-2-1-backup.yaml b/kasten/kasten-3-2-1-backup/kasten-3-2-1-backup.yaml index e898dd10d..6dcb48468 100644 --- a/kasten/kasten-3-2-1-backup/kasten-3-2-1-backup.yaml +++ b/kasten/kasten-3-2-1-backup/kasten-3-2-1-backup.yaml @@ -12,7 +12,6 @@ metadata: policies.kyverno.io/subject: Policy policies.kyverno.io/description: >- The 3-2-1 rule of data protection recommends that you have at least 3 copies of data, on 2 different storage targets, with 1 being offsite. This approach ensures a health mix of redundancy options for data recovery of the application for localized & multi-region cloud failures or compromise. In Kubernetes, this translates to the original running resources, a local snapshot, and a copy of all application resources and volume data exported to an external repository. - This policy accomplishes 3-2-1 validation by ensuring each policy contains both 'action: backup' and 'action: export'. spec: validationFailureAction: Audit diff --git a/kasten/kasten-data-protection-by-label/artifacthub-pkg.yml b/kasten/kasten-data-protection-by-label/artifacthub-pkg.yml index 12530c412..ae58b9e7f 100644 --- a/kasten/kasten-data-protection-by-label/artifacthub-pkg.yml +++ b/kasten/kasten-data-protection-by-label/artifacthub-pkg.yml @@ -4,7 +4,6 @@ displayName: Check Data Protection By Label createdAt: "2023-05-07T00:00:00.000Z" description: >- Check the 'dataprotection' label for production Deployments and StatefulSet workloads. - Use in combination with 'kasten-generate-example-backup-policy' policy to generate a Kasten policy for the workload namespace, if it doesn't already exist. install: |- ```shell @@ -23,4 +22,4 @@ annotations: kyverno/category: "Veeam Kasten" kyverno/kubernetesVersion: "1.24-1.30" kyverno/subject: "Deployment, StatefulSet" -digest: c25638fd2f76aa212e1bb6c56c3b743ffb4c3d6819fb6d130149d8a0e0e989d9 +digest: 8751cca18f18d7a2cd1b923e84b805580af363b1aff8766fc4f3f231d6026601 diff --git a/kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml b/kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml index e7ec4c100..3db97db3b 100644 --- a/kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml +++ b/kasten/kasten-data-protection-by-label/kasten-data-protection-by-label.yaml @@ -11,7 +11,6 @@ metadata: policies.kyverno.io/subject: Deployment, StatefulSet policies.kyverno.io/description: >- Check the 'dataprotection' label for production Deployments and StatefulSet workloads. - Use in combination with 'kasten-generate-example-backup-policy' policy to generate a Kasten policy for the workload namespace, if it doesn't already exist. spec: validationFailureAction: Audit diff --git a/kasten/kasten-generate-policy-by-preset-label/artifacthub-pkg.yml b/kasten/kasten-generate-policy-by-preset-label/artifacthub-pkg.yml index 3a0f687fc..43e1aa102 100644 --- a/kasten/kasten-generate-policy-by-preset-label/artifacthub-pkg.yml +++ b/kasten/kasten-generate-policy-by-preset-label/artifacthub-pkg.yml @@ -15,12 +15,10 @@ keywords: - Veeam Kasten readme: | Generates a Kasten policy for a new namespace that includes a valid "dataprotection" label, if the policy does not already exist. - Use with "kasten-validate-ns-by-preset-label" policy to require "dataprotection" labeling on new namespaces. - Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ annotations: kyverno/category: "Veeam Kasten" kyverno/kubernetesVersion: "1.24-1.30" kyverno/subject: "Policy" -digest: 99bafb98c4f5f3c93423414fc8c6bbed9226b6e056d5d65d2d2873290e86d1df +digest: bd6c752cc28abd28792b579956bdddc69864ab0ffae4dd95b3d47de6977b0aae diff --git a/kasten/kasten-generate-policy-by-preset-label/kasten-generate-policy-by-preset-label.yaml b/kasten/kasten-generate-policy-by-preset-label/kasten-generate-policy-by-preset-label.yaml index c558b6b77..8d8da97b6 100644 --- a/kasten/kasten-generate-policy-by-preset-label/kasten-generate-policy-by-preset-label.yaml +++ b/kasten/kasten-generate-policy-by-preset-label/kasten-generate-policy-by-preset-label.yaml @@ -19,7 +19,6 @@ metadata: kyverno.io/kubernetes-version: "1.24-1.30" policies.kyverno.io/description: >- Generates a Kasten policy for a new namespace that includes a valid "dataprotection" label, if the policy does not already exist. - Use with "kasten-validate-ns-by-preset-label" policy to require "dataprotection" labeling on new namespaces. spec: rules: From 8ce32807e0bf76bbe5d6daab1611704fd645fab7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Charles-Edouard=20Br=C3=A9t=C3=A9ch=C3=A9?= Date: Thu, 30 May 2024 01:17:57 +0200 Subject: [PATCH 7/8] chore: bump chainsaw to v0.2.3 (#1029) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: bump chainsaw to v0.2.2 Signed-off-by: Charles-Edouard Brétéché * fix cel tests Signed-off-by: Charles-Edouard Brétéché * fix cel tests Signed-off-by: Charles-Edouard Brétéché * bump Signed-off-by: Charles-Edouard Brétéché --------- Signed-off-by: Charles-Edouard Brétéché --- .github/workflows/test.yml | 2 +- .../allowed-annotations/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../allowed-pod-priorities/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ other-cel/check-env-vars/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 3 +++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../disallow-all-secrets/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../enforce-pod-duration/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../ensure-probes-different/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../ensure-readonly-hostpath/.chainsaw-test/chainsaw-test.yaml | 2 ++ other-cel/forbid-cpu-limits/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../imagepullpolicy-always/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../ingress-host-match-tls/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../limit-containers-per-pod/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../limit-hostpath-type-pv/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../metadata-match-regex/.chainsaw-test/chainsaw-test.yaml | 2 ++ other-cel/pdb-maxunavailable/.chainsaw-test/chainsaw-test.yaml | 2 ++ other-cel/prevent-bare-pods/.chainsaw-test/chainsaw-test.yaml | 2 ++ other-cel/prevent-cr8escape/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../disallow-capabilities/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../disallow-host-namespaces/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../disallow-host-path/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../disallow-host-ports/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../disallow-host-process/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../disallow-selinux/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../restrict-seccomp/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../restrict-sysctls/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../.chainsaw-test/chainsaw-test.yaml | 2 ++ .../require-run-as-nonroot/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../restrict-seccomp-strict/.chainsaw-test/chainsaw-test.yaml | 2 ++ .../restrict-volume-types/.chainsaw-test/chainsaw-test.yaml | 2 ++ 40 files changed, 80 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e73607084..ec3bbc5a5 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -107,7 +107,7 @@ jobs: set -e kubectl apply -f ./.chainsaw/crds - name: Install Chainsaw - uses: kyverno/action-install-chainsaw@995cddaee7702e849270b84fa44cdcebe7462da8 # v0.1.9 + uses: kyverno/action-install-chainsaw@ef2517389320aae0fd7c067aa14b060eef08b76d # v0.2.3 - name: Test with Chainsaw env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/other-cel/allowed-annotations/.chainsaw-test/chainsaw-test.yaml b/other-cel/allowed-annotations/.chainsaw-test/chainsaw-test.yaml index 0b3284bbd..d139ab184 100755 --- a/other-cel/allowed-annotations/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/allowed-annotations/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: allowed-annotations spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/allowed-pod-priorities/.chainsaw-test/chainsaw-test.yaml b/other-cel/allowed-pod-priorities/.chainsaw-test/chainsaw-test.yaml index 8bd44427b..01a9a8915 100755 --- a/other-cel/allowed-pod-priorities/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/allowed-pod-priorities/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: allowed-pod-priorities spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/block-ephemeral-containers/.chainsaw-test/chainsaw-test.yaml b/other-cel/block-ephemeral-containers/.chainsaw-test/chainsaw-test.yaml index bf3b869c1..0edd78c5c 100755 --- a/other-cel/block-ephemeral-containers/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/block-ephemeral-containers/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: block-ephemeral-containers spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/check-env-vars/.chainsaw-test/chainsaw-test.yaml b/other-cel/check-env-vars/.chainsaw-test/chainsaw-test.yaml index 898591ce7..eea78f49c 100755 --- a/other-cel/check-env-vars/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/check-env-vars/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: check-env-vars spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/check-serviceaccount-secrets/.chainsaw-test/chainsaw-test.yaml b/other-cel/check-serviceaccount-secrets/.chainsaw-test/chainsaw-test.yaml index 92cb193cf..2313e8dcd 100644 --- a/other-cel/check-serviceaccount-secrets/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/check-serviceaccount-secrets/.chainsaw-test/chainsaw-test.yaml @@ -1,8 +1,11 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/kyverno/chainsaw/main/.schemas/json/test-chainsaw-v1alpha1.json apiVersion: chainsaw.kyverno.io/v1alpha1 kind: Test metadata: name: check-service-accounts spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/deny-secret-service-account-token-type/.chainsaw-test/chainsaw-test.yaml b/other-cel/deny-secret-service-account-token-type/.chainsaw-test/chainsaw-test.yaml index 701fc765e..b6fd9e46a 100644 --- a/other-cel/deny-secret-service-account-token-type/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/deny-secret-service-account-token-type/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: deny-secret-service-account-token-type spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/disallow-all-secrets/.chainsaw-test/chainsaw-test.yaml b/other-cel/disallow-all-secrets/.chainsaw-test/chainsaw-test.yaml index a735a05e2..1dba881c9 100755 --- a/other-cel/disallow-all-secrets/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/disallow-all-secrets/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-all-secrets spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/disallow-localhost-services/.chainsaw-test/chainsaw-test.yaml b/other-cel/disallow-localhost-services/.chainsaw-test/chainsaw-test.yaml index a6d9212c5..929abb7c9 100755 --- a/other-cel/disallow-localhost-services/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/disallow-localhost-services/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-localhost-services spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/disallow-secrets-from-env-vars/.chainsaw-test/chainsaw-test.yaml b/other-cel/disallow-secrets-from-env-vars/.chainsaw-test/chainsaw-test.yaml index d52d2fab8..2bf63b63b 100755 --- a/other-cel/disallow-secrets-from-env-vars/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/disallow-secrets-from-env-vars/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-secrets-from-env-vars spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/docker-socket-requires-label/.chainsaw-test/chainsaw-test.yaml b/other-cel/docker-socket-requires-label/.chainsaw-test/chainsaw-test.yaml index 0fb756232..524a22b97 100755 --- a/other-cel/docker-socket-requires-label/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/docker-socket-requires-label/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: docker-socket-requires-label spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/enforce-pod-duration/.chainsaw-test/chainsaw-test.yaml b/other-cel/enforce-pod-duration/.chainsaw-test/chainsaw-test.yaml index 18c018e87..8ea615288 100755 --- a/other-cel/enforce-pod-duration/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/enforce-pod-duration/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: enforce-pod-duration spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/ensure-probes-different/.chainsaw-test/chainsaw-test.yaml b/other-cel/ensure-probes-different/.chainsaw-test/chainsaw-test.yaml index 119c554b3..efd20a119 100755 --- a/other-cel/ensure-probes-different/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/ensure-probes-different/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: ensure-probes-different spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/ensure-readonly-hostpath/.chainsaw-test/chainsaw-test.yaml b/other-cel/ensure-readonly-hostpath/.chainsaw-test/chainsaw-test.yaml index d6e330389..63f225da1 100755 --- a/other-cel/ensure-readonly-hostpath/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/ensure-readonly-hostpath/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: ensure-readonly-hostpath spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/forbid-cpu-limits/.chainsaw-test/chainsaw-test.yaml b/other-cel/forbid-cpu-limits/.chainsaw-test/chainsaw-test.yaml index 5025169a3..407a15fa3 100755 --- a/other-cel/forbid-cpu-limits/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/forbid-cpu-limits/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: forbid-cpu-limits spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/imagepullpolicy-always/.chainsaw-test/chainsaw-test.yaml b/other-cel/imagepullpolicy-always/.chainsaw-test/chainsaw-test.yaml index aadc8a25e..57f982b6f 100755 --- a/other-cel/imagepullpolicy-always/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/imagepullpolicy-always/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: imagepullpolicy-always spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/ingress-host-match-tls/.chainsaw-test/chainsaw-test.yaml b/other-cel/ingress-host-match-tls/.chainsaw-test/chainsaw-test.yaml index ab409abe4..598ffcf65 100755 --- a/other-cel/ingress-host-match-tls/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/ingress-host-match-tls/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: ingress-host-match-tls spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/limit-containers-per-pod/.chainsaw-test/chainsaw-test.yaml b/other-cel/limit-containers-per-pod/.chainsaw-test/chainsaw-test.yaml index 9722cde91..f8fb80346 100755 --- a/other-cel/limit-containers-per-pod/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/limit-containers-per-pod/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: limit-containers-per-pod spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/limit-hostpath-type-pv/.chainsaw-test/chainsaw-test.yaml b/other-cel/limit-hostpath-type-pv/.chainsaw-test/chainsaw-test.yaml index 1d18ea171..2fbaea93a 100755 --- a/other-cel/limit-hostpath-type-pv/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/limit-hostpath-type-pv/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: limit-hostpath-type-pv spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/memory-requests-equal-limits/.chainsaw-test/chainsaw-test.yaml b/other-cel/memory-requests-equal-limits/.chainsaw-test/chainsaw-test.yaml index a6646a301..bc0c04e69 100755 --- a/other-cel/memory-requests-equal-limits/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/memory-requests-equal-limits/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: memory-requests-equal-limits spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/metadata-match-regex/.chainsaw-test/chainsaw-test.yaml b/other-cel/metadata-match-regex/.chainsaw-test/chainsaw-test.yaml index 51f02a908..5bca0a41c 100755 --- a/other-cel/metadata-match-regex/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/metadata-match-regex/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: metadata-match-regex spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/pdb-maxunavailable/.chainsaw-test/chainsaw-test.yaml b/other-cel/pdb-maxunavailable/.chainsaw-test/chainsaw-test.yaml index 84811c9fc..6a50f10cb 100755 --- a/other-cel/pdb-maxunavailable/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/pdb-maxunavailable/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: pdb-maxunavailable spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/prevent-bare-pods/.chainsaw-test/chainsaw-test.yaml b/other-cel/prevent-bare-pods/.chainsaw-test/chainsaw-test.yaml index 71881a8c8..e5a3052f4 100755 --- a/other-cel/prevent-bare-pods/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/prevent-bare-pods/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: prevent-naked-pods spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/other-cel/prevent-cr8escape/.chainsaw-test/chainsaw-test.yaml b/other-cel/prevent-cr8escape/.chainsaw-test/chainsaw-test.yaml index 7816e6083..fa078154f 100755 --- a/other-cel/prevent-cr8escape/.chainsaw-test/chainsaw-test.yaml +++ b/other-cel/prevent-cr8escape/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: prevent-cr8escape spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/disallow-capabilities/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/disallow-capabilities/.chainsaw-test/chainsaw-test.yaml index cce6560d8..9b73b5a5b 100755 --- a/pod-security-cel/baseline/disallow-capabilities/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/disallow-capabilities/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-capabilities spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/disallow-host-namespaces/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/disallow-host-namespaces/.chainsaw-test/chainsaw-test.yaml index 87e85a658..efad5c669 100755 --- a/pod-security-cel/baseline/disallow-host-namespaces/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/disallow-host-namespaces/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-host-namespaces spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/disallow-host-path/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/disallow-host-path/.chainsaw-test/chainsaw-test.yaml index 38f23efe7..f23c82367 100755 --- a/pod-security-cel/baseline/disallow-host-path/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/disallow-host-path/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-host-path spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/disallow-host-ports-range/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/disallow-host-ports-range/.chainsaw-test/chainsaw-test.yaml index 886dbce6f..172a4e811 100755 --- a/pod-security-cel/baseline/disallow-host-ports-range/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/disallow-host-ports-range/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-host-ports-range spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/disallow-host-ports/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/disallow-host-ports/.chainsaw-test/chainsaw-test.yaml index 1a96c924c..31558d00d 100755 --- a/pod-security-cel/baseline/disallow-host-ports/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/disallow-host-ports/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-host-ports spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/disallow-host-process/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/disallow-host-process/.chainsaw-test/chainsaw-test.yaml index cde2fc7e2..e5c9e4bac 100755 --- a/pod-security-cel/baseline/disallow-host-process/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/disallow-host-process/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-host-process spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/disallow-privileged-containers/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/disallow-privileged-containers/.chainsaw-test/chainsaw-test.yaml index f910a750f..c3e53ad5d 100755 --- a/pod-security-cel/baseline/disallow-privileged-containers/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/disallow-privileged-containers/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-privileged-containers spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/disallow-selinux/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/disallow-selinux/.chainsaw-test/chainsaw-test.yaml index e2f0fe20e..111502ae4 100755 --- a/pod-security-cel/baseline/disallow-selinux/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/disallow-selinux/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-selinux spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/restrict-seccomp/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/restrict-seccomp/.chainsaw-test/chainsaw-test.yaml index 78b7a6a97..1d3da5b2a 100755 --- a/pod-security-cel/baseline/restrict-seccomp/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/restrict-seccomp/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: restrict-seccomp spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/baseline/restrict-sysctls/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/baseline/restrict-sysctls/.chainsaw-test/chainsaw-test.yaml index 4e65167e3..b71ac1a59 100755 --- a/pod-security-cel/baseline/restrict-sysctls/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/baseline/restrict-sysctls/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: restrict-sysctls spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/restricted/disallow-capabilities-strict/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/restricted/disallow-capabilities-strict/.chainsaw-test/chainsaw-test.yaml index dae67f0d6..369c57496 100755 --- a/pod-security-cel/restricted/disallow-capabilities-strict/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/restricted/disallow-capabilities-strict/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-capabilities-strict spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/restricted/disallow-privilege-escalation/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/restricted/disallow-privilege-escalation/.chainsaw-test/chainsaw-test.yaml index 9a5dd38fd..4298a819d 100755 --- a/pod-security-cel/restricted/disallow-privilege-escalation/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/restricted/disallow-privilege-escalation/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: disallow-privilege-escalation spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/restricted/require-run-as-non-root-user/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/restricted/require-run-as-non-root-user/.chainsaw-test/chainsaw-test.yaml index 0aad0df1c..181d9a737 100755 --- a/pod-security-cel/restricted/require-run-as-non-root-user/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/restricted/require-run-as-non-root-user/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: require-run-as-non-root-user spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/restricted/require-run-as-nonroot/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/restricted/require-run-as-nonroot/.chainsaw-test/chainsaw-test.yaml index 0cedcf00a..de6e6ad77 100755 --- a/pod-security-cel/restricted/require-run-as-nonroot/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/restricted/require-run-as-nonroot/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: require-run-as-nonroot spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/restricted/restrict-seccomp-strict/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/restricted/restrict-seccomp-strict/.chainsaw-test/chainsaw-test.yaml index 2bfa71adf..54a934167 100755 --- a/pod-security-cel/restricted/restrict-seccomp-strict/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/restricted/restrict-seccomp-strict/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: restrict-seccomp-strict spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: diff --git a/pod-security-cel/restricted/restrict-volume-types/.chainsaw-test/chainsaw-test.yaml b/pod-security-cel/restricted/restrict-volume-types/.chainsaw-test/chainsaw-test.yaml index 083c28d2e..a1f934ab1 100755 --- a/pod-security-cel/restricted/restrict-volume-types/.chainsaw-test/chainsaw-test.yaml +++ b/pod-security-cel/restricted/restrict-volume-types/.chainsaw-test/chainsaw-test.yaml @@ -5,6 +5,8 @@ metadata: creationTimestamp: null name: restrict-volume-types spec: + # disable templating because it can cause issues with CEL expressions + template: false steps: - name: step-01 try: From c2fc68a11a14c6150f90d04ca23d687e6cb42ee0 Mon Sep 17 00:00:00 2001 From: Chandan-DK Date: Thu, 30 May 2024 10:09:12 +0530 Subject: [PATCH 8/8] feat: add other policies in CEL expressions - Part 4 (#964) * add CI test for other-cel re(c-q) directories Signed-off-by: Chandan-DK * copy require-annotations Signed-off-by: Chandan-DK * add kyverno tests for require-annotations Signed-off-by: Chandan-DK * convert require-annotations to cel Signed-off-by: Chandan-DK * copy require-deployments-have-multiple-replicas Signed-off-by: Chandan-DK * add fail kyverno tests for require-deployments-have-multiple-replica Signed-off-by: Chandan-DK * convert require-deployments-have-multiple-replicas to cel Signed-off-by: Chandan-DK * copy require-image-checksum Signed-off-by: Chandan-DK * convert require-image-checksum to cel Signed-off-by: Chandan-DK * copy require-ingress-https Signed-off-by: Chandan-DK * add kyverno tests for require-ingress-https Signed-off-by: Chandan-DK * convert require-ingress-https to cel Signed-off-by: Chandan-DK * copy require-pod-priorityclassname Signed-off-by: Chandan-DK * convert require-pod-priorityclassname to cel Signed-off-by: Chandan-DK * copy require-qos-burstable Signed-off-by: Chandan-DK * convert require-qos-burstable to cel Signed-off-by: Chandan-DK * copy require-storageclass Signed-off-by: Chandan-DK * convert require-storageclass to cel Signed-off-by: Chandan-DK * rename files for clarity Signed-off-by: Chandan-DK * add a new line at the end of files Signed-off-by: Chandan-DK * update digest Signed-off-by: Chandan-DK * add corresponding kyverno tests in other folder Signed-off-by: Chandan-DK * use has() in cel expression for readability Signed-off-by: Chandan-DK * remove extra whitespace and line Signed-off-by: Chandan-DK * resolve conflicts Signed-off-by: Chandan-DK * remove duplicate tests Signed-off-by: Chandan-DK --------- Signed-off-by: Chandan-DK Co-authored-by: Mariam Fahmy --- .github/workflows/test.yml | 1 + .../.chainsaw-test/chainsaw-test.yaml | 39 ++++ .../.chainsaw-test/pod-bad.yaml | 20 ++ .../.chainsaw-test/pod-good.yaml | 11 ++ .../.chainsaw-test/podcontroller-bad.yaml | 39 ++++ .../.chainsaw-test/podcontroller-good.yaml | 41 ++++ .../.chainsaw-test/policy-ready.yaml | 7 + .../.kyverno-test/kyverno-test.yaml | 50 +++++ .../require-annotations/artifacthub-pkg.yml | 24 +++ .../require-annotations.yaml | 34 ++++ .../.chainsaw-test/chainsaw-test.yaml | 32 ++++ .../.chainsaw-test/deploy-bad.yaml | 68 +++++++ .../.chainsaw-test/deploy-good.yaml | 46 +++++ .../.chainsaw-test/policy-ready.yaml | 7 + .../.kyverno-test/kyverno-test.yaml | 22 +++ .../.kyverno-test/resource.yaml | 41 ++++ .../artifacthub-pkg.yml | 24 +++ ...re-deployments-have-multiple-replicas.yaml | 31 +++ .../.chainsaw-test/chainsaw-test.yaml | 39 ++++ .../.chainsaw-test/pod-bad.yaml | 22 +++ .../.chainsaw-test/pod-good.yaml | 22 +++ .../.chainsaw-test/podcontroller-bad.yaml | 40 ++++ .../.chainsaw-test/podcontroller-good.yaml | 40 ++++ .../.chainsaw-test/policy-ready.yaml | 7 + .../.kyverno-test/kyverno-test.yaml | 34 ++++ .../.kyverno-test/resource.yaml | 65 +++++++ .../artifacthub-pkg.yml | 24 +++ .../require-image-checksum.yaml | 31 +++ .../.chainsaw-test/chainsaw-test.yaml | 32 ++++ .../.chainsaw-test/ingress-bad.yaml | 121 ++++++++++++ .../.chainsaw-test/ingress-good.yaml | 59 ++++++ .../.chainsaw-test/policy-ready.yaml | 7 + .../.kyverno-test/kyverno-test.yaml | 38 ++++ .../.kyverno-test/resource.yaml | 180 ++++++++++++++++++ .../require-ingress-https/artifacthub-pkg.yml | 24 +++ .../require-ingress-https.yaml | 46 +++++ .../.chainsaw-test/chainsaw-test.yaml | 41 ++++ .../.chainsaw-test/pc.yaml | 8 + .../.chainsaw-test/pod-bad.yaml | 19 ++ .../.chainsaw-test/pod-good.yaml | 10 + .../.chainsaw-test/podcontroller-bad.yaml | 33 ++++ .../.chainsaw-test/podcontroller-good.yaml | 35 ++++ .../.chainsaw-test/policy-ready.yaml | 10 + .../.kyverno-test/kyverno-test.yaml | 22 +++ .../.kyverno-test/resource.yaml | 23 +++ .../artifacthub-pkg.yml | 25 +++ .../require-pod-priorityclassname.yaml | 34 ++++ .../.chainsaw-test/chainsaw-test.yaml | 39 ++++ .../.chainsaw-test/pod-bad.yaml | 24 +++ .../.chainsaw-test/pod-good.yaml | 81 ++++++++ .../.chainsaw-test/podcontroller-bad.yaml | 37 ++++ .../.chainsaw-test/podcontroller-good.yaml | 43 +++++ .../.chainsaw-test/policy-ready.yaml | 7 + .../.kyverno-test/kyverno-test.yaml | 25 +++ .../.kyverno-test/resource.yaml | 59 ++++++ .../require-qos-burstable/artifacthub-pkg.yml | 25 +++ .../require-qos-burstable.yaml | 38 ++++ .../.chainsaw-test/chainsaw-test.yaml | 39 ++++ .../.chainsaw-test/policy-ready.yaml | 7 + .../.chainsaw-test/pvc-bad.yaml | 18 ++ .../.chainsaw-test/pvc-good.yaml | 18 ++ .../.chainsaw-test/ss-bad.yaml | 34 ++++ .../.chainsaw-test/ss-good.yaml | 47 +++++ .../.kyverno-test/kyverno-test.yaml | 35 ++++ .../.kyverno-test/resource.yaml | 127 ++++++++++++ .../require-storageclass/artifacthub-pkg.yml | 25 +++ .../require-storageclass.yaml | 47 +++++ .../.kyverno-test/kyverno-test.yaml | 50 +++++ .../.kyverno-test/kyverno-test.yaml | 9 +- .../.kyverno-test/resource.yaml | 28 ++- .../.kyverno-test/kyverno-test.yaml | 38 ++++ .../.kyverno-test/resource.yaml | 180 ++++++++++++++++++ 72 files changed, 2698 insertions(+), 10 deletions(-) create mode 100755 other-cel/require-annotations/.chainsaw-test/chainsaw-test.yaml create mode 100644 other-cel/require-annotations/.chainsaw-test/pod-bad.yaml create mode 100644 other-cel/require-annotations/.chainsaw-test/pod-good.yaml create mode 100644 other-cel/require-annotations/.chainsaw-test/podcontroller-bad.yaml create mode 100644 other-cel/require-annotations/.chainsaw-test/podcontroller-good.yaml create mode 100755 other-cel/require-annotations/.chainsaw-test/policy-ready.yaml create mode 100644 other-cel/require-annotations/.kyverno-test/kyverno-test.yaml create mode 100644 other-cel/require-annotations/artifacthub-pkg.yml create mode 100644 other-cel/require-annotations/require-annotations.yaml create mode 100755 other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/chainsaw-test.yaml create mode 100644 other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/deploy-bad.yaml create mode 100644 other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/deploy-good.yaml create mode 100755 other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/policy-ready.yaml create mode 100644 other-cel/require-deployments-have-multiple-replicas/.kyverno-test/kyverno-test.yaml create mode 100644 other-cel/require-deployments-have-multiple-replicas/.kyverno-test/resource.yaml create mode 100644 other-cel/require-deployments-have-multiple-replicas/artifacthub-pkg.yml create mode 100644 other-cel/require-deployments-have-multiple-replicas/require-deployments-have-multiple-replicas.yaml create mode 100755 other-cel/require-image-checksum/.chainsaw-test/chainsaw-test.yaml create mode 100644 other-cel/require-image-checksum/.chainsaw-test/pod-bad.yaml create mode 100644 other-cel/require-image-checksum/.chainsaw-test/pod-good.yaml create mode 100644 other-cel/require-image-checksum/.chainsaw-test/podcontroller-bad.yaml create mode 100644 other-cel/require-image-checksum/.chainsaw-test/podcontroller-good.yaml create mode 100755 other-cel/require-image-checksum/.chainsaw-test/policy-ready.yaml create mode 100644 other-cel/require-image-checksum/.kyverno-test/kyverno-test.yaml create mode 100644 other-cel/require-image-checksum/.kyverno-test/resource.yaml create mode 100644 other-cel/require-image-checksum/artifacthub-pkg.yml create mode 100644 other-cel/require-image-checksum/require-image-checksum.yaml create mode 100755 other-cel/require-ingress-https/.chainsaw-test/chainsaw-test.yaml create mode 100644 other-cel/require-ingress-https/.chainsaw-test/ingress-bad.yaml create mode 100644 other-cel/require-ingress-https/.chainsaw-test/ingress-good.yaml create mode 100755 other-cel/require-ingress-https/.chainsaw-test/policy-ready.yaml create mode 100644 other-cel/require-ingress-https/.kyverno-test/kyverno-test.yaml create mode 100644 other-cel/require-ingress-https/.kyverno-test/resource.yaml create mode 100644 other-cel/require-ingress-https/artifacthub-pkg.yml create mode 100644 other-cel/require-ingress-https/require-ingress-https.yaml create mode 100755 other-cel/require-pod-priorityclassname/.chainsaw-test/chainsaw-test.yaml create mode 100644 other-cel/require-pod-priorityclassname/.chainsaw-test/pc.yaml create mode 100644 other-cel/require-pod-priorityclassname/.chainsaw-test/pod-bad.yaml create mode 100644 other-cel/require-pod-priorityclassname/.chainsaw-test/pod-good.yaml create mode 100644 other-cel/require-pod-priorityclassname/.chainsaw-test/podcontroller-bad.yaml create mode 100644 other-cel/require-pod-priorityclassname/.chainsaw-test/podcontroller-good.yaml create mode 100644 other-cel/require-pod-priorityclassname/.chainsaw-test/policy-ready.yaml create mode 100644 other-cel/require-pod-priorityclassname/.kyverno-test/kyverno-test.yaml create mode 100644 other-cel/require-pod-priorityclassname/.kyverno-test/resource.yaml create mode 100644 other-cel/require-pod-priorityclassname/artifacthub-pkg.yml create mode 100644 other-cel/require-pod-priorityclassname/require-pod-priorityclassname.yaml create mode 100755 other-cel/require-qos-burstable/.chainsaw-test/chainsaw-test.yaml create mode 100644 other-cel/require-qos-burstable/.chainsaw-test/pod-bad.yaml create mode 100644 other-cel/require-qos-burstable/.chainsaw-test/pod-good.yaml create mode 100644 other-cel/require-qos-burstable/.chainsaw-test/podcontroller-bad.yaml create mode 100644 other-cel/require-qos-burstable/.chainsaw-test/podcontroller-good.yaml create mode 100755 other-cel/require-qos-burstable/.chainsaw-test/policy-ready.yaml create mode 100644 other-cel/require-qos-burstable/.kyverno-test/kyverno-test.yaml create mode 100644 other-cel/require-qos-burstable/.kyverno-test/resource.yaml create mode 100644 other-cel/require-qos-burstable/artifacthub-pkg.yml create mode 100644 other-cel/require-qos-burstable/require-qos-burstable.yaml create mode 100755 other-cel/require-storageclass/.chainsaw-test/chainsaw-test.yaml create mode 100755 other-cel/require-storageclass/.chainsaw-test/policy-ready.yaml create mode 100644 other-cel/require-storageclass/.chainsaw-test/pvc-bad.yaml create mode 100644 other-cel/require-storageclass/.chainsaw-test/pvc-good.yaml create mode 100644 other-cel/require-storageclass/.chainsaw-test/ss-bad.yaml create mode 100644 other-cel/require-storageclass/.chainsaw-test/ss-good.yaml create mode 100644 other-cel/require-storageclass/.kyverno-test/kyverno-test.yaml create mode 100644 other-cel/require-storageclass/.kyverno-test/resource.yaml create mode 100644 other-cel/require-storageclass/artifacthub-pkg.yml create mode 100644 other-cel/require-storageclass/require-storageclass.yaml create mode 100644 other/require-annotations/.kyverno-test/kyverno-test.yaml create mode 100644 other/require-ingress-https/.kyverno-test/kyverno-test.yaml create mode 100644 other/require-ingress-https/.kyverno-test/resource.yaml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ec3bbc5a5..c97b7ca47 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -55,6 +55,7 @@ jobs: - ^other$/^[m-q] - ^other-cel$/^[m-q] - ^other$/^re[c-q] + - ^other-cel$/^re[c-q] - ^other$/^res - ^other$/^[s-z] - ^pod-security$ diff --git a/other-cel/require-annotations/.chainsaw-test/chainsaw-test.yaml b/other-cel/require-annotations/.chainsaw-test/chainsaw-test.yaml new file mode 100755 index 000000000..7d0558015 --- /dev/null +++ b/other-cel/require-annotations/.chainsaw-test/chainsaw-test.yaml @@ -0,0 +1,39 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/kyverno/chainsaw/main/.schemas/json/test-chainsaw-v1alpha1.json +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + creationTimestamp: null + name: require-annotations +spec: + steps: + - name: step-01 + try: + - apply: + file: ../require-annotations.yaml + - patch: + resource: + apiVersion: kyverno.io/v1 + kind: ClusterPolicy + metadata: + name: require-annotations + spec: + validationFailureAction: Enforce + - assert: + file: policy-ready.yaml + - name: step-02 + try: + - apply: + file: pod-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: pod-bad.yaml + - apply: + file: podcontroller-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: podcontroller-bad.yaml + diff --git a/other-cel/require-annotations/.chainsaw-test/pod-bad.yaml b/other-cel/require-annotations/.chainsaw-test/pod-bad.yaml new file mode 100644 index 000000000..feba52874 --- /dev/null +++ b/other-cel/require-annotations/.chainsaw-test/pod-bad.yaml @@ -0,0 +1,20 @@ +apiVersion: v1 +kind: Pod +metadata: + annotations: + corp.org/department: "" + name: badpod01 +spec: + containers: + - name: busybox + image: busybox:1.35 +--- +apiVersion: v1 +kind: Pod +metadata: + name: badpod02 +spec: + containers: + - name: busybox + image: busybox:1.35 + diff --git a/other-cel/require-annotations/.chainsaw-test/pod-good.yaml b/other-cel/require-annotations/.chainsaw-test/pod-good.yaml new file mode 100644 index 000000000..2b0268b45 --- /dev/null +++ b/other-cel/require-annotations/.chainsaw-test/pod-good.yaml @@ -0,0 +1,11 @@ +apiVersion: v1 +kind: Pod +metadata: + annotations: + corp.org/department: "foo" + name: goodpod01 +spec: + containers: + - name: busybox + image: busybox:1.35 + diff --git a/other-cel/require-annotations/.chainsaw-test/podcontroller-bad.yaml b/other-cel/require-annotations/.chainsaw-test/podcontroller-bad.yaml new file mode 100644 index 000000000..0b0e44020 --- /dev/null +++ b/other-cel/require-annotations/.chainsaw-test/podcontroller-bad.yaml @@ -0,0 +1,39 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: baddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + annotations: + corp.org/department: "" + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox:1.35 +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: badcronjob01 +spec: + schedule: "* * * * *" + jobTemplate: + spec: + template: + metadata: + spec: + containers: + - name: busybox + image: busybox:1.35 + restartPolicy: OnFailure + diff --git a/other-cel/require-annotations/.chainsaw-test/podcontroller-good.yaml b/other-cel/require-annotations/.chainsaw-test/podcontroller-good.yaml new file mode 100644 index 000000000..c3b2c08ab --- /dev/null +++ b/other-cel/require-annotations/.chainsaw-test/podcontroller-good.yaml @@ -0,0 +1,41 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: gooddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + annotations: + corp.org/department: "foo" + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox:1.35 +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: goodcronjob01 +spec: + schedule: "* * * * *" + jobTemplate: + spec: + template: + metadata: + annotations: + corp.org/department: "foo" + spec: + containers: + - name: busybox + image: busybox:1.35 + restartPolicy: OnFailure + diff --git a/other-cel/require-annotations/.chainsaw-test/policy-ready.yaml b/other-cel/require-annotations/.chainsaw-test/policy-ready.yaml new file mode 100755 index 000000000..d01618467 --- /dev/null +++ b/other-cel/require-annotations/.chainsaw-test/policy-ready.yaml @@ -0,0 +1,7 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-annotations +status: + ready: true + diff --git a/other-cel/require-annotations/.kyverno-test/kyverno-test.yaml b/other-cel/require-annotations/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..8cf9c591a --- /dev/null +++ b/other-cel/require-annotations/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,50 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: require-annotations +policies: +- ../require-annotations.yaml +resources: +- ../.chainsaw-test/pod-bad.yaml +- ../.chainsaw-test/pod-good.yaml +- ../.chainsaw-test/podcontroller-bad.yaml +- ../.chainsaw-test/podcontroller-good.yaml +results: +- kind: Pod + policy: require-annotations + resources: + - badpod01 + - badpod02 + result: fail + rule: check-for-annotation +- kind: Pod + policy: require-annotations + resources: + - goodpod01 + result: pass + rule: check-for-annotation +- kind: Deployment + policy: require-annotations + resources: + - baddeployment01 + result: fail + rule: check-for-annotation +- kind: CronJob + policy: require-annotations + resources: + - badcronjob01 + result: fail + rule: check-for-annotation +- kind: Deployment + policy: require-annotations + resources: + - gooddeployment01 + result: pass + rule: check-for-annotation +- kind: CronJob + policy: require-annotations + resources: + - goodcronjob01 + result: pass + rule: check-for-annotation + diff --git a/other-cel/require-annotations/artifacthub-pkg.yml b/other-cel/require-annotations/artifacthub-pkg.yml new file mode 100644 index 000000000..eee0e31f1 --- /dev/null +++ b/other-cel/require-annotations/artifacthub-pkg.yml @@ -0,0 +1,24 @@ +name: require-annotations-cel +version: 1.0.0 +displayName: Require Annotations in CEL expressions +description: >- + Define and use annotations that identify semantic attributes of your application or Deployment. A common set of annotations allows tools to work collaboratively, describing objects in a common manner that all tools can understand. The recommended annotations describe applications in a way that can be queried. This policy validates that the annotation `corp.org/department` is specified with some value. +install: |- + ```shell + kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/other-cel/require-annotations/require-annotations.yaml + ``` +keywords: + - kyverno + - Other + - CEL Expressions +readme: | + Define and use annotations that identify semantic attributes of your application or Deployment. A common set of annotations allows tools to work collaboratively, describing objects in a common manner that all tools can understand. The recommended annotations describe applications in a way that can be queried. This policy validates that the annotation `corp.org/department` is specified with some value. + + Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ +annotations: + kyverno/category: "Other in CEL" + kyverno/kubernetesVersion: "1.26-1.27" + kyverno/subject: "Pod, Annotation" +digest: dc8408d4a7a929f2f142b174a2ea06148f4dbd65ab16d70870a2213919dadf9d +createdAt: "2024-04-09T15:56:35Z" + diff --git a/other-cel/require-annotations/require-annotations.yaml b/other-cel/require-annotations/require-annotations.yaml new file mode 100644 index 000000000..1ff29c235 --- /dev/null +++ b/other-cel/require-annotations/require-annotations.yaml @@ -0,0 +1,34 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-annotations + annotations: + policies.kyverno.io/title: Require Annotations in CEL expressions + policies.kyverno.io/category: Other in CEL + policies.kyverno.io/severity: medium + policies.kyverno.io/subject: Pod, Annotation + kyverno.io/kyverno-version: 1.11.0 + kyverno.io/kubernetes-version: "1.26-1.27" + policies.kyverno.io/description: >- + Define and use annotations that identify semantic attributes of your application or Deployment. + A common set of annotations allows tools to work collaboratively, describing objects in a common manner that + all tools can understand. The recommended annotations describe applications in a way that can be + queried. This policy validates that the annotation `corp.org/department` is specified with some value. +spec: + validationFailureAction: Audit + background: true + rules: + - name: check-for-annotation + match: + any: + - resources: + kinds: + - Pod + validate: + cel: + expressions: + - expression: >- + has(object.metadata.annotations) && + 'corp.org/department' in object.metadata.annotations && object.metadata.annotations['corp.org/department'] != '' + message: "The annotation `corp.org/department` is required." + diff --git a/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/chainsaw-test.yaml b/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/chainsaw-test.yaml new file mode 100755 index 000000000..4fdd0c552 --- /dev/null +++ b/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/chainsaw-test.yaml @@ -0,0 +1,32 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/kyverno/chainsaw/main/.schemas/json/test-chainsaw-v1alpha1.json +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + creationTimestamp: null + name: require-deployments-have-multiple-replicas +spec: + steps: + - name: step-01 + try: + - apply: + file: ../require-deployments-have-multiple-replicas.yaml + - patch: + resource: + apiVersion: kyverno.io/v1 + kind: ClusterPolicy + metadata: + name: deployment-has-multiple-replicas + spec: + validationFailureAction: Enforce + - assert: + file: policy-ready.yaml + - name: step-02 + try: + - apply: + file: deploy-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: deploy-bad.yaml + diff --git a/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/deploy-bad.yaml b/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/deploy-bad.yaml new file mode 100644 index 000000000..fe9b249f6 --- /dev/null +++ b/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/deploy-bad.yaml @@ -0,0 +1,68 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: baddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox02 + image: busybox:1.35 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: baddeployment02 +spec: + replicas: 0 + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox02 + image: busybox:1.35 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: baddeployment03 +spec: + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox02 + image: busybox:1.35 + diff --git a/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/deploy-good.yaml b/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/deploy-good.yaml new file mode 100644 index 000000000..40e56f064 --- /dev/null +++ b/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/deploy-good.yaml @@ -0,0 +1,46 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: gooddeployment01 +spec: + replicas: 2 + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox02 + image: busybox:1.35 +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: gooddeployment02 +spec: + replicas: 3 + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox02 + image: busybox:1.35 + diff --git a/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/policy-ready.yaml b/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/policy-ready.yaml new file mode 100755 index 000000000..c9b84bffe --- /dev/null +++ b/other-cel/require-deployments-have-multiple-replicas/.chainsaw-test/policy-ready.yaml @@ -0,0 +1,7 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: deployment-has-multiple-replicas +status: + ready: true + diff --git a/other-cel/require-deployments-have-multiple-replicas/.kyverno-test/kyverno-test.yaml b/other-cel/require-deployments-have-multiple-replicas/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..07bccb8b6 --- /dev/null +++ b/other-cel/require-deployments-have-multiple-replicas/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,22 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: deployment-has-multiple-replicas +policies: +- ../require-deployments-have-multiple-replicas.yaml +resources: +- resource.yaml +results: +- kind: Deployment + policy: deployment-has-multiple-replicas + resources: + - mydeploygood + result: pass + rule: deployment-has-multiple-replicas +- kind: Deployment + policy: deployment-has-multiple-replicas + resources: + - mydeploybad + result: fail + rule: deployment-has-multiple-replicas + diff --git a/other-cel/require-deployments-have-multiple-replicas/.kyverno-test/resource.yaml b/other-cel/require-deployments-have-multiple-replicas/.kyverno-test/resource.yaml new file mode 100644 index 000000000..6fc0aa939 --- /dev/null +++ b/other-cel/require-deployments-have-multiple-replicas/.kyverno-test/resource.yaml @@ -0,0 +1,41 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mydeploygood +spec: + replicas: 2 + selector: + matchLabels: + app: myapp + template: + metadata: + labels: + app: myapp + spec: + containers: + - name: nginx + image: nginx + ports: + - containerPort: 80 + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mydeploybad +spec: + replicas: 1 + selector: + matchLabels: + app: myapp + template: + metadata: + labels: + app: myapp + spec: + containers: + - name: nginx + image: nginx + ports: + - containerPort: 80 + diff --git a/other-cel/require-deployments-have-multiple-replicas/artifacthub-pkg.yml b/other-cel/require-deployments-have-multiple-replicas/artifacthub-pkg.yml new file mode 100644 index 000000000..bb5f2ecbb --- /dev/null +++ b/other-cel/require-deployments-have-multiple-replicas/artifacthub-pkg.yml @@ -0,0 +1,24 @@ +name: require-deployments-have-multiple-replicas-cel +version: 1.0.0 +displayName: Require Multiple Replicas in CEL expressions +description: >- + Deployments with a single replica cannot be highly available and thus the application may suffer downtime if that one replica goes down. This policy validates that Deployments have more than one replica. +install: |- + ```shell + kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/other-cel/require-deployments-have-multiple-replicas/require-deployments-have-multiple-replicas.yaml + ``` +keywords: + - kyverno + - Sample + - CEL Expressions +readme: | + Deployments with a single replica cannot be highly available and thus the application may suffer downtime if that one replica goes down. This policy validates that Deployments have more than one replica. + + Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ +annotations: + kyverno/category: "Sample in CEL" + kyverno/kubernetesVersion: "1.26-1.27" + kyverno/subject: "Deployment" +digest: 5c1e1b6bdb837cfba211615438cd50fa38b78c559ae43f4a791f5558f873b5d3 +createdAt: "2024-04-09T16:03:47Z" + diff --git a/other-cel/require-deployments-have-multiple-replicas/require-deployments-have-multiple-replicas.yaml b/other-cel/require-deployments-have-multiple-replicas/require-deployments-have-multiple-replicas.yaml new file mode 100644 index 000000000..5eaf97659 --- /dev/null +++ b/other-cel/require-deployments-have-multiple-replicas/require-deployments-have-multiple-replicas.yaml @@ -0,0 +1,31 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: deployment-has-multiple-replicas + annotations: + policies.kyverno.io/title: Require Multiple Replicas in CEL expressions + policies.kyverno.io/category: Sample in CEL + policies.kyverno.io/severity: medium + policies.kyverno.io/subject: Deployment + policies.kyverno.io/minversion: 1.11.0 + kyverno.io/kubernetes-version: "1.26-1.27" + policies.kyverno.io/description: >- + Deployments with a single replica cannot be highly available and thus the application + may suffer downtime if that one replica goes down. This policy validates that Deployments + have more than one replica. +spec: + validationFailureAction: Audit + background: true + rules: + - name: deployment-has-multiple-replicas + match: + any: + - resources: + kinds: + - Deployment + validate: + cel: + expressions: + - expression: "object.spec.replicas > 1" + message: "Deployments should have more than one replica to ensure availability." + diff --git a/other-cel/require-image-checksum/.chainsaw-test/chainsaw-test.yaml b/other-cel/require-image-checksum/.chainsaw-test/chainsaw-test.yaml new file mode 100755 index 000000000..24fd96589 --- /dev/null +++ b/other-cel/require-image-checksum/.chainsaw-test/chainsaw-test.yaml @@ -0,0 +1,39 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/kyverno/chainsaw/main/.schemas/json/test-chainsaw-v1alpha1.json +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + creationTimestamp: null + name: require-image-checksum +spec: + steps: + - name: step-01 + try: + - apply: + file: ../require-image-checksum.yaml + - patch: + resource: + apiVersion: kyverno.io/v1 + kind: ClusterPolicy + metadata: + name: require-image-checksum + spec: + validationFailureAction: Enforce + - assert: + file: policy-ready.yaml + - name: step-02 + try: + - apply: + file: pod-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: pod-bad.yaml + - apply: + file: podcontroller-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: podcontroller-bad.yaml + diff --git a/other-cel/require-image-checksum/.chainsaw-test/pod-bad.yaml b/other-cel/require-image-checksum/.chainsaw-test/pod-bad.yaml new file mode 100644 index 000000000..40f2785ec --- /dev/null +++ b/other-cel/require-image-checksum/.chainsaw-test/pod-bad.yaml @@ -0,0 +1,22 @@ +apiVersion: v1 +kind: Pod +metadata: + name: badpod01 +spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox02 + image: busybox@sha256:67a8ef886e2ca4055f00e7cd13aedb9b24148c1451a6832d16fcc997a157eedc +--- +apiVersion: v1 +kind: Pod +metadata: + name: badpod02 +spec: + containers: + - name: busybox + image: busybox + - name: bb + image: busybox:latest + diff --git a/other-cel/require-image-checksum/.chainsaw-test/pod-good.yaml b/other-cel/require-image-checksum/.chainsaw-test/pod-good.yaml new file mode 100644 index 000000000..926cef9f4 --- /dev/null +++ b/other-cel/require-image-checksum/.chainsaw-test/pod-good.yaml @@ -0,0 +1,22 @@ +apiVersion: v1 +kind: Pod +metadata: + name: goodpod01 +spec: + containers: + - name: busybox + image: busybox@sha256:67a8ef886e2ca4055f00e7cd13aedb9b24148c1451a6832d16fcc997a157eedc + - name: busybox02 + image: busybox@sha256:67a8ef886e2ca4055f00e7cd13aedb9b24148c1451a6832d16fcc997a157eedc +--- +apiVersion: v1 +kind: Pod +metadata: + name: goodpod02 +spec: + containers: + - name: busybox + image: busybox@sha256:67a8ef886e2ca4055f00e7cd13aedb9b24148c1451a6832d16fcc997a157eedc + - name: nginx + image: nginx@sha256:1bb5c4b86cb7c1e9f0209611dc2135d8a2c1c3a6436163970c99193787d067ea + diff --git a/other-cel/require-image-checksum/.chainsaw-test/podcontroller-bad.yaml b/other-cel/require-image-checksum/.chainsaw-test/podcontroller-bad.yaml new file mode 100644 index 000000000..eac23fb1b --- /dev/null +++ b/other-cel/require-image-checksum/.chainsaw-test/podcontroller-bad.yaml @@ -0,0 +1,40 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: baddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox:1.35 + - name: nginx + image: nginx@sha256:1bb5c4b86cb7c1e9f0209611dc2135d8a2c1c3a6436163970c99193787d067ea +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: badcronjob01 +spec: + schedule: "* * * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: busybox + image: busybox@sha256:67a8ef886e2ca4055f00e7cd13aedb9b24148c1451a6832d16fcc997a157eedc + - name: bb + image: busybox:latest + restartPolicy: OnFailure + diff --git a/other-cel/require-image-checksum/.chainsaw-test/podcontroller-good.yaml b/other-cel/require-image-checksum/.chainsaw-test/podcontroller-good.yaml new file mode 100644 index 000000000..05e49a52c --- /dev/null +++ b/other-cel/require-image-checksum/.chainsaw-test/podcontroller-good.yaml @@ -0,0 +1,40 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + labels: + app: busybox + name: gooddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + app: busybox + strategy: {} + template: + metadata: + labels: + app: busybox + spec: + containers: + - name: busybox + image: busybox@sha256:67a8ef886e2ca4055f00e7cd13aedb9b24148c1451a6832d16fcc997a157eedc + - name: nginx + image: nginx@sha256:1bb5c4b86cb7c1e9f0209611dc2135d8a2c1c3a6436163970c99193787d067ea +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: goodcronjob01 +spec: + schedule: "* * * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: busybox + image: busybox@sha256:67a8ef886e2ca4055f00e7cd13aedb9b24148c1451a6832d16fcc997a157eedc + - name: nginx + image: nginx@sha256:1bb5c4b86cb7c1e9f0209611dc2135d8a2c1c3a6436163970c99193787d067ea + restartPolicy: OnFailure + diff --git a/other-cel/require-image-checksum/.chainsaw-test/policy-ready.yaml b/other-cel/require-image-checksum/.chainsaw-test/policy-ready.yaml new file mode 100755 index 000000000..af8d87a62 --- /dev/null +++ b/other-cel/require-image-checksum/.chainsaw-test/policy-ready.yaml @@ -0,0 +1,7 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-image-checksum +status: + ready: true + diff --git a/other-cel/require-image-checksum/.kyverno-test/kyverno-test.yaml b/other-cel/require-image-checksum/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..c8e21c333 --- /dev/null +++ b/other-cel/require-image-checksum/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,34 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: require-image-checksum +policies: +- ../require-image-checksum.yaml +resources: +- resource.yaml +results: +- kind: CronJob + policy: require-image-checksum + resources: + - hello + result: fail + rule: require-image-checksum +- kind: Pod + policy: require-image-checksum + resources: + - myapp-pod-2 + result: fail + rule: require-image-checksum +- kind: Deployment + policy: require-image-checksum + resources: + - mydeploy + result: pass + rule: require-image-checksum +- kind: Pod + policy: require-image-checksum + resources: + - myapp-pod-1 + result: pass + rule: require-image-checksum + diff --git a/other-cel/require-image-checksum/.kyverno-test/resource.yaml b/other-cel/require-image-checksum/.kyverno-test/resource.yaml new file mode 100644 index 000000000..1b0a8ab89 --- /dev/null +++ b/other-cel/require-image-checksum/.kyverno-test/resource.yaml @@ -0,0 +1,65 @@ +apiVersion: v1 +kind: Pod +metadata: + name: myapp-pod-1 + labels: + app: myapp-1 +spec: + containers: + - name: nginx + image: nginx@sha256:353c20f74d9b6aee359f30e8e4f69c3d7eaea2f610681c4a95849a2fd7c497f9 + +--- +apiVersion: v1 +kind: Pod +metadata: + name: myapp-pod-2 + labels: + app: myapp-2 +spec: + containers: + - name: nginx + image: nginx + +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mydeploy +spec: + replicas: 2 + selector: + matchLabels: + app: myapp + template: + metadata: + labels: + app: myapp + spec: + containers: + - name: nginx + image: nginx@sha256:353c20f74d9b6aee359f30e8e4f69c3d7eaea2f610681c4a95849a2fd7c497f9 + ports: + - containerPort: 80 + +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: hello +spec: + schedule: "*/1 * * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: hello + image: busybox + imagePullPolicy: IfNotPresent + command: + - /bin/sh + - -c + - date; echo Hello from the Kubernetes cluster + restartPolicy: OnFailure + diff --git a/other-cel/require-image-checksum/artifacthub-pkg.yml b/other-cel/require-image-checksum/artifacthub-pkg.yml new file mode 100644 index 000000000..0ebbe4a86 --- /dev/null +++ b/other-cel/require-image-checksum/artifacthub-pkg.yml @@ -0,0 +1,24 @@ +name: require-image-checksum-cel +version: 1.0.0 +displayName: Require Images Use Checksums in CEL expressions +description: >- + Use of a SHA checksum when pulling an image is often preferable because tags are mutable and can be overwritten. This policy checks to ensure that all images use SHA checksums rather than tags. +install: |- + ```shell + kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/other-cel/require-image-checksum/require-image-checksum.yaml + ``` +keywords: + - kyverno + - Sample + - CEL Expressions +readme: | + Use of a SHA checksum when pulling an image is often preferable because tags are mutable and can be overwritten. This policy checks to ensure that all images use SHA checksums rather than tags. + + Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ +annotations: + kyverno/category: "Sample in CEL" + kyverno/kubernetesVersion: "1.26-1.27" + kyverno/subject: "Pod" +digest: e9c64e577e3f4d255489ce34191ee29c4180fae774a60eceaff1d12c1e716891 +createdAt: "2024-04-10T18:21:59Z" + diff --git a/other-cel/require-image-checksum/require-image-checksum.yaml b/other-cel/require-image-checksum/require-image-checksum.yaml new file mode 100644 index 000000000..43f87b64c --- /dev/null +++ b/other-cel/require-image-checksum/require-image-checksum.yaml @@ -0,0 +1,31 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-image-checksum + annotations: + policies.kyverno.io/title: Require Images Use Checksums in CEL expressions + policies.kyverno.io/category: Sample in CEL + policies.kyverno.io/severity: medium + policies.kyverno.io/subject: Pod + policies.kyverno.io/minversion: 1.11.0 + kyverno.io/kubernetes-version: "1.26-1.27" + policies.kyverno.io/description: >- + Use of a SHA checksum when pulling an image is often preferable because tags + are mutable and can be overwritten. This policy checks to ensure that all images + use SHA checksums rather than tags. +spec: + validationFailureAction: Audit + background: true + rules: + - name: require-image-checksum + match: + any: + - resources: + kinds: + - Pod + validate: + cel: + expressions: + - expression: "object.spec.containers.all(container, container.image.contains('@'))" + message: "Images must use checksums rather than tags." + diff --git a/other-cel/require-ingress-https/.chainsaw-test/chainsaw-test.yaml b/other-cel/require-ingress-https/.chainsaw-test/chainsaw-test.yaml new file mode 100755 index 000000000..1ea002383 --- /dev/null +++ b/other-cel/require-ingress-https/.chainsaw-test/chainsaw-test.yaml @@ -0,0 +1,32 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/kyverno/chainsaw/main/.schemas/json/test-chainsaw-v1alpha1.json +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + creationTimestamp: null + name: require-ingress-https +spec: + steps: + - name: step-01 + try: + - apply: + file: ../require-ingress-https.yaml + - patch: + resource: + apiVersion: kyverno.io/v1 + kind: ClusterPolicy + metadata: + name: require-ingress-https + spec: + validationFailureAction: Enforce + - assert: + file: policy-ready.yaml + - name: step-02 + try: + - apply: + file: ingress-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: ingress-bad.yaml + diff --git a/other-cel/require-ingress-https/.chainsaw-test/ingress-bad.yaml b/other-cel/require-ingress-https/.chainsaw-test/ingress-bad.yaml new file mode 100644 index 000000000..b48c26ecc --- /dev/null +++ b/other-cel/require-ingress-https/.chainsaw-test/ingress-bad.yaml @@ -0,0 +1,121 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "true" + name: badingress01 +spec: + ingressClassName: someingress + rules: + - host: endpoint01 + http: + paths: + - backend: + service: + name: demo-svc + port: + number: 8080 + path: / + pathType: Prefix + tls: + - hosts: + - endpoint01 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: badingress02 + annotations: + kyverno.io/foo: bar +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: badingress03 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "false" + name: badingress04 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + diff --git a/other-cel/require-ingress-https/.chainsaw-test/ingress-good.yaml b/other-cel/require-ingress-https/.chainsaw-test/ingress-good.yaml new file mode 100644 index 000000000..151a757ec --- /dev/null +++ b/other-cel/require-ingress-https/.chainsaw-test/ingress-good.yaml @@ -0,0 +1,59 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "false" + name: goodingress01 +spec: + ingressClassName: someingress + rules: + - host: endpoint01 + https: + paths: + - backend: + service: + name: demo-svc + port: + number: 8080 + path: / + pathType: Prefix + tls: + - hosts: + - endpoint01 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kubernetes.io/ingress.allow-http: "false" + kyverno.io/foo: bar + name: goodingress02 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 + diff --git a/other-cel/require-ingress-https/.chainsaw-test/policy-ready.yaml b/other-cel/require-ingress-https/.chainsaw-test/policy-ready.yaml new file mode 100755 index 000000000..77b55948d --- /dev/null +++ b/other-cel/require-ingress-https/.chainsaw-test/policy-ready.yaml @@ -0,0 +1,7 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-ingress-https +status: + ready: true + diff --git a/other-cel/require-ingress-https/.kyverno-test/kyverno-test.yaml b/other-cel/require-ingress-https/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..530b4fe66 --- /dev/null +++ b/other-cel/require-ingress-https/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,38 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: require-ingress-https +policies: +- ../require-ingress-https.yaml +resources: +- resource.yaml +results: +- kind: Ingress + policy: require-ingress-https + resources: + - goodingress01 + - goodingress02 + result: pass + rule: has-annotation +- kind: Ingress + policy: require-ingress-https + resources: + - goodingress01 + - goodingress02 + result: pass + rule: has-tls +- kind: Ingress + policy: require-ingress-https + resources: + - badingress01 + - badingress02 + - badingress03 + result: fail + rule: has-annotation +- kind: Ingress + policy: require-ingress-https + resources: + - badingress04 + result: fail + rule: has-tls + diff --git a/other-cel/require-ingress-https/.kyverno-test/resource.yaml b/other-cel/require-ingress-https/.kyverno-test/resource.yaml new file mode 100644 index 000000000..a97cba4e7 --- /dev/null +++ b/other-cel/require-ingress-https/.kyverno-test/resource.yaml @@ -0,0 +1,180 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "true" + name: badingress01 +spec: + ingressClassName: someingress + rules: + - host: endpoint01 + http: + paths: + - backend: + service: + name: demo-svc + port: + number: 8080 + path: / + pathType: Prefix + tls: + - hosts: + - endpoint01 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: badingress02 + annotations: + kyverno.io/foo: bar +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: badingress03 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "false" + name: badingress04 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "false" + name: goodingress01 +spec: + ingressClassName: someingress + rules: + - host: endpoint01 + https: + paths: + - backend: + service: + name: demo-svc + port: + number: 8080 + path: / + pathType: Prefix + tls: + - hosts: + - endpoint01 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kubernetes.io/ingress.allow-http: "false" + kyverno.io/foo: bar + name: goodingress02 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 + diff --git a/other-cel/require-ingress-https/artifacthub-pkg.yml b/other-cel/require-ingress-https/artifacthub-pkg.yml new file mode 100644 index 000000000..711e1b678 --- /dev/null +++ b/other-cel/require-ingress-https/artifacthub-pkg.yml @@ -0,0 +1,24 @@ +name: require-ingress-https-cel +version: 1.0.0 +displayName: Require Ingress HTTPS in CEL expressions +description: >- + Ingress resources should only allow secure traffic by disabling HTTP and therefore only allowing HTTPS. This policy requires that all Ingress resources set the annotation `kubernetes.io/ingress.allow-http` to `"false"` and specify TLS in the spec. +install: |- + ```shell + kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/other-cel/require-ingress-https/require-ingress-https.yaml + ``` +keywords: + - kyverno + - Other + - CEL Expressions +readme: | + Ingress resources should only allow secure traffic by disabling HTTP and therefore only allowing HTTPS. This policy requires that all Ingress resources set the annotation `kubernetes.io/ingress.allow-http` to `"false"` and specify TLS in the spec. + + Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ +annotations: + kyverno/category: "Other in CEL" + kyverno/kubernetesVersion: "1.26-1.27" + kyverno/subject: "Ingress" +digest: fb50d3603fbd348e84ce2e64c06e313c9c028daa640893f3c95a9e28c27687c0 +createdAt: "2024-04-10T18:31:27Z" + diff --git a/other-cel/require-ingress-https/require-ingress-https.yaml b/other-cel/require-ingress-https/require-ingress-https.yaml new file mode 100644 index 000000000..e2d8d866c --- /dev/null +++ b/other-cel/require-ingress-https/require-ingress-https.yaml @@ -0,0 +1,46 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-ingress-https + annotations: + policies.kyverno.io/title: Require Ingress HTTPS in CEL expressions + policies.kyverno.io/category: Other in CEL + policies.kyverno.io/severity: medium + kyverno.io/kyverno-version: 1.11.0 + kyverno.io/kubernetes-version: "1.26-1.27" + policies.kyverno.io/subject: Ingress + policies.kyverno.io/description: >- + Ingress resources should only allow secure traffic by disabling + HTTP and therefore only allowing HTTPS. This policy requires that all + Ingress resources set the annotation `kubernetes.io/ingress.allow-http` to + `"false"` and specify TLS in the spec. +spec: + background: true + validationFailureAction: Audit + rules: + - name: has-annotation + match: + any: + - resources: + kinds: + - Ingress + validate: + cel: + expressions: + - expression: >- + has(object.metadata.annotations) && + 'kubernetes.io/ingress.allow-http' in object.metadata.annotations && + object.metadata.annotations['kubernetes.io/ingress.allow-http'] == 'false' + message: "The kubernetes.io/ingress.allow-http annotation must be set to false." + - name: has-tls + match: + any: + - resources: + kinds: + - Ingress + validate: + cel: + expressions: + - expression: "has(object.spec.tls)" + message: "TLS must be defined." + diff --git a/other-cel/require-pod-priorityclassname/.chainsaw-test/chainsaw-test.yaml b/other-cel/require-pod-priorityclassname/.chainsaw-test/chainsaw-test.yaml new file mode 100755 index 000000000..71fb21d54 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.chainsaw-test/chainsaw-test.yaml @@ -0,0 +1,41 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/kyverno/chainsaw/main/.schemas/json/test-chainsaw-v1alpha1.json +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + creationTimestamp: null + name: require-pod-priorityclassname +spec: + steps: + - name: step-01 + try: + - apply: + file: ../require-pod-priorityclassname.yaml + - patch: + resource: + apiVersion: kyverno.io/v1 + kind: ClusterPolicy + metadata: + name: require-pod-priorityclassname + spec: + validationFailureAction: Enforce + - apply: + file: pc.yaml + - assert: + file: policy-ready.yaml + - name: step-02 + try: + - apply: + file: pod-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: pod-bad.yaml + - apply: + file: podcontroller-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: podcontroller-bad.yaml + diff --git a/other-cel/require-pod-priorityclassname/.chainsaw-test/pc.yaml b/other-cel/require-pod-priorityclassname/.chainsaw-test/pc.yaml new file mode 100644 index 000000000..8128d99f9 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.chainsaw-test/pc.yaml @@ -0,0 +1,8 @@ +apiVersion: scheduling.k8s.io/v1 +kind: PriorityClass +metadata: + name: high +value: 1234 +globalDefault: false +description: "This priority class should be used for XYZ service pods only." + diff --git a/other-cel/require-pod-priorityclassname/.chainsaw-test/pod-bad.yaml b/other-cel/require-pod-priorityclassname/.chainsaw-test/pod-bad.yaml new file mode 100644 index 000000000..87f71c387 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.chainsaw-test/pod-bad.yaml @@ -0,0 +1,19 @@ +apiVersion: v1 +kind: Pod +metadata: + name: badpod01 +spec: + containers: + - name: busybox + image: busybox:1.35 + priorityClassName: "" +--- +apiVersion: v1 +kind: Pod +metadata: + name: badpod02 +spec: + containers: + - name: busybox + image: busybox:1.35 + diff --git a/other-cel/require-pod-priorityclassname/.chainsaw-test/pod-good.yaml b/other-cel/require-pod-priorityclassname/.chainsaw-test/pod-good.yaml new file mode 100644 index 000000000..e2d653623 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.chainsaw-test/pod-good.yaml @@ -0,0 +1,10 @@ +apiVersion: v1 +kind: Pod +metadata: + name: goodpod01 +spec: + containers: + - name: busybox + image: busybox:1.35 + priorityClassName: high + diff --git a/other-cel/require-pod-priorityclassname/.chainsaw-test/podcontroller-bad.yaml b/other-cel/require-pod-priorityclassname/.chainsaw-test/podcontroller-bad.yaml new file mode 100644 index 000000000..84125da65 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.chainsaw-test/podcontroller-bad.yaml @@ -0,0 +1,33 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: baddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + app: app + template: + metadata: + labels: + app: app + spec: + containers: + - name: busybox + image: busybox:1.35 +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: badcronjob01 +spec: + schedule: "*/1 * * * *" + jobTemplate: + spec: + template: + spec: + restartPolicy: OnFailure + containers: + - name: busybox + image: busybox:1.35 + diff --git a/other-cel/require-pod-priorityclassname/.chainsaw-test/podcontroller-good.yaml b/other-cel/require-pod-priorityclassname/.chainsaw-test/podcontroller-good.yaml new file mode 100644 index 000000000..006d70fb8 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.chainsaw-test/podcontroller-good.yaml @@ -0,0 +1,35 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: gooddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + app: app + template: + metadata: + labels: + app: app + spec: + containers: + - name: busybox + image: busybox:1.35 + priorityClassName: high +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: goodcronjob01 +spec: + schedule: "*/1 * * * *" + jobTemplate: + spec: + template: + spec: + restartPolicy: OnFailure + containers: + - name: busybox + image: busybox:1.35 + priorityClassName: high + diff --git a/other-cel/require-pod-priorityclassname/.chainsaw-test/policy-ready.yaml b/other-cel/require-pod-priorityclassname/.chainsaw-test/policy-ready.yaml new file mode 100644 index 000000000..45ee903fd --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.chainsaw-test/policy-ready.yaml @@ -0,0 +1,10 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-pod-priorityclassname +status: + conditions: + - reason: Succeeded + status: "True" + type: Ready + diff --git a/other-cel/require-pod-priorityclassname/.kyverno-test/kyverno-test.yaml b/other-cel/require-pod-priorityclassname/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..525314f70 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,22 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: require-pod-priorityclassname +policies: +- ../require-pod-priorityclassname.yaml +resources: +- resource.yaml +results: +- kind: Pod + policy: require-pod-priorityclassname + resources: + - badpod01 + result: fail + rule: check-priorityclassname +- kind: Pod + policy: require-pod-priorityclassname + resources: + - goodpod01 + result: pass + rule: check-priorityclassname + diff --git a/other-cel/require-pod-priorityclassname/.kyverno-test/resource.yaml b/other-cel/require-pod-priorityclassname/.kyverno-test/resource.yaml new file mode 100644 index 000000000..43c3c3d46 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/.kyverno-test/resource.yaml @@ -0,0 +1,23 @@ +apiVersion: v1 +kind: Pod +metadata: + name: goodpod01 + labels: + app: myapp +spec: + priorityClassName: foo + containers: + - name: goproxy + image: registry.k8s.io/goproxy:0.1 +--- +apiVersion: v1 +kind: Pod +metadata: + name: badpod01 + labels: + app: myapp +spec: + containers: + - name: goproxy + image: registry.k8s.io/goproxy:0.1 + diff --git a/other-cel/require-pod-priorityclassname/artifacthub-pkg.yml b/other-cel/require-pod-priorityclassname/artifacthub-pkg.yml new file mode 100644 index 000000000..20700714c --- /dev/null +++ b/other-cel/require-pod-priorityclassname/artifacthub-pkg.yml @@ -0,0 +1,25 @@ +name: require-pod-priorityclassname-cel +version: 1.0.0 +displayName: Require Pod priorityClassName in CEL expressions +description: >- + A Pod may optionally specify a priorityClassName which indicates the scheduling priority relative to others. This requires creation of a PriorityClass object in advance. With this created, a Pod may set this field to that value. In a multi-tenant environment, it is often desired to require this priorityClassName be set to make certain tenant scheduling guarantees. This policy requires that a Pod defines the priorityClassName field with some value. +install: |- + ```shell + kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/other-cel/require-pod-priorityclassname/require-pod-priorityclassname.yaml + ``` +keywords: + - kyverno + - Multi-Tenancy + - EKS Best Practices + - CEL Expressions +readme: | + A Pod may optionally specify a priorityClassName which indicates the scheduling priority relative to others. This requires creation of a PriorityClass object in advance. With this created, a Pod may set this field to that value. In a multi-tenant environment, it is often desired to require this priorityClassName be set to make certain tenant scheduling guarantees. This policy requires that a Pod defines the priorityClassName field with some value. + + Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ +annotations: + kyverno/category: "Multi-Tenancy, EKS Best Practices in CEL" + kyverno/kubernetesVersion: "1.26-1.27" + kyverno/subject: "Pod" +digest: dad8ffaa48df075faa27733ac7a6a88ac644864bc1a1a5693ddb443775148279 +createdAt: "2024-04-11T17:46:06Z" + diff --git a/other-cel/require-pod-priorityclassname/require-pod-priorityclassname.yaml b/other-cel/require-pod-priorityclassname/require-pod-priorityclassname.yaml new file mode 100644 index 000000000..f8a37b3a0 --- /dev/null +++ b/other-cel/require-pod-priorityclassname/require-pod-priorityclassname.yaml @@ -0,0 +1,34 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-pod-priorityclassname + annotations: + policies.kyverno.io/title: Require Pod priorityClassName in CEL expressions + policies.kyverno.io/category: Multi-Tenancy, EKS Best Practices in CEL + policies.kyverno.io/severity: medium + policies.kyverno.io/subject: Pod + kyverno.io/kyverno-version: 1.11.0 + kyverno.io/kubernetes-version: "1.26-1.27" + policies.kyverno.io/description: >- + A Pod may optionally specify a priorityClassName which indicates the scheduling + priority relative to others. This requires creation of a PriorityClass object in advance. + With this created, a Pod may set this field to that value. In a multi-tenant environment, + it is often desired to require this priorityClassName be set to make certain tenant + scheduling guarantees. This policy requires that a Pod defines the priorityClassName field + with some value. +spec: + validationFailureAction: Audit + background: true + rules: + - name: check-priorityclassname + match: + any: + - resources: + kinds: + - Pod + validate: + cel: + expressions: + - expression: "has(object.spec.priorityClassName) && object.spec.priorityClassName != ''" + message: "Pods must define the priorityClassName field." + diff --git a/other-cel/require-qos-burstable/.chainsaw-test/chainsaw-test.yaml b/other-cel/require-qos-burstable/.chainsaw-test/chainsaw-test.yaml new file mode 100755 index 000000000..e6092e657 --- /dev/null +++ b/other-cel/require-qos-burstable/.chainsaw-test/chainsaw-test.yaml @@ -0,0 +1,39 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/kyverno/chainsaw/main/.schemas/json/test-chainsaw-v1alpha1.json +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + creationTimestamp: null + name: require-qos-burstable +spec: + steps: + - name: step-01 + try: + - apply: + file: ../require-qos-burstable.yaml + - patch: + resource: + apiVersion: kyverno.io/v1 + kind: ClusterPolicy + metadata: + name: require-qos-burstable + spec: + validationFailureAction: Enforce + - assert: + file: policy-ready.yaml + - name: step-02 + try: + - apply: + file: pod-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: pod-bad.yaml + - apply: + file: podcontroller-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: podcontroller-bad.yaml + diff --git a/other-cel/require-qos-burstable/.chainsaw-test/pod-bad.yaml b/other-cel/require-qos-burstable/.chainsaw-test/pod-bad.yaml new file mode 100644 index 000000000..022973982 --- /dev/null +++ b/other-cel/require-qos-burstable/.chainsaw-test/pod-bad.yaml @@ -0,0 +1,24 @@ +apiVersion: v1 +kind: Pod +metadata: + name: badpod01 + labels: + app: myapp +spec: + containers: + - name: busybox + image: busybox:1.35 +--- +apiVersion: v1 +kind: Pod +metadata: + name: badpod02 + labels: + app: myapp +spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox-again + image: busybox:1.35 + diff --git a/other-cel/require-qos-burstable/.chainsaw-test/pod-good.yaml b/other-cel/require-qos-burstable/.chainsaw-test/pod-good.yaml new file mode 100644 index 000000000..dffe62ec2 --- /dev/null +++ b/other-cel/require-qos-burstable/.chainsaw-test/pod-good.yaml @@ -0,0 +1,81 @@ +apiVersion: v1 +kind: Pod +metadata: + name: goodpod01 + labels: + app: myapp +spec: + containers: + - name: busybox + image: busybox:1.35 + resources: + requests: + memory: "50Mi" + cpu: "100m" + limits: + memory: "100Mi" +--- +apiVersion: v1 +kind: Pod +metadata: + name: goodpod02 + labels: + app: myapp +spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox-again + image: busybox:1.35 + resources: + limits: + memory: "100Mi" +--- +apiVersion: v1 +kind: Pod +metadata: + name: goodpod03 + labels: + app: myapp +spec: + containers: + - name: busybox + image: busybox:1.35 + resources: + requests: + memory: "100Mi" + - name: busybox-again + image: busybox:1.35 +--- +apiVersion: v1 +kind: Pod +metadata: + name: goodpod04 + labels: + app: myapp +spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox-again + image: busybox:1.35 + resources: + requests: + cpu: "1" +--- +apiVersion: v1 +kind: Pod +metadata: + name: goodpod05 + labels: + app: myapp +spec: + containers: + - name: busybox + image: busybox:1.35 + resources: + limits: + cpu: "1" + - name: busybox-again + image: busybox:1.35 + diff --git a/other-cel/require-qos-burstable/.chainsaw-test/podcontroller-bad.yaml b/other-cel/require-qos-burstable/.chainsaw-test/podcontroller-bad.yaml new file mode 100644 index 000000000..2f64b134f --- /dev/null +++ b/other-cel/require-qos-burstable/.chainsaw-test/podcontroller-bad.yaml @@ -0,0 +1,37 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: baddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + foo: bar + template: + metadata: + labels: + foo: bar + spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox-again + image: busybox:1.35 +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: badcronjob01 +spec: + schedule: "*/1 * * * *" + jobTemplate: + spec: + template: + spec: + restartPolicy: OnFailure + containers: + - name: busybox + image: busybox:1.35 + - name: busybox-again + image: busybox:1.35 + diff --git a/other-cel/require-qos-burstable/.chainsaw-test/podcontroller-good.yaml b/other-cel/require-qos-burstable/.chainsaw-test/podcontroller-good.yaml new file mode 100644 index 000000000..02687caa1 --- /dev/null +++ b/other-cel/require-qos-burstable/.chainsaw-test/podcontroller-good.yaml @@ -0,0 +1,43 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: gooddeployment01 +spec: + replicas: 1 + selector: + matchLabels: + foo: bar + template: + metadata: + labels: + foo: bar + spec: + containers: + - name: busybox + image: busybox:1.35 + - name: busybox-again + image: busybox:1.35 + resources: + limits: + memory: "100Mi" +--- +apiVersion: batch/v1 +kind: CronJob +metadata: + name: goodcronjob01 +spec: + schedule: "*/1 * * * *" + jobTemplate: + spec: + template: + spec: + restartPolicy: OnFailure + containers: + - name: busybox + image: busybox:1.35 + resources: + requests: + cpu: "1" + - name: busybox-again + image: busybox:1.35 + diff --git a/other-cel/require-qos-burstable/.chainsaw-test/policy-ready.yaml b/other-cel/require-qos-burstable/.chainsaw-test/policy-ready.yaml new file mode 100755 index 000000000..100b07c2d --- /dev/null +++ b/other-cel/require-qos-burstable/.chainsaw-test/policy-ready.yaml @@ -0,0 +1,7 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-qos-burstable +status: + ready: true + diff --git a/other-cel/require-qos-burstable/.kyverno-test/kyverno-test.yaml b/other-cel/require-qos-burstable/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..c76aef176 --- /dev/null +++ b/other-cel/require-qos-burstable/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,25 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: require-qos-burstable +policies: +- ../require-qos-burstable.yaml +resources: +- resource.yaml +results: +- kind: Pod + policy: require-qos-burstable + resources: + - badpod01 + - badpod02 + result: fail + rule: burstable +- kind: Pod + policy: require-qos-burstable + resources: + - goodpod01 + - goodpod02 + - goodpod03 + result: pass + rule: burstable + diff --git a/other-cel/require-qos-burstable/.kyverno-test/resource.yaml b/other-cel/require-qos-burstable/.kyverno-test/resource.yaml new file mode 100644 index 000000000..1d2a59eac --- /dev/null +++ b/other-cel/require-qos-burstable/.kyverno-test/resource.yaml @@ -0,0 +1,59 @@ +apiVersion: v1 +kind: Pod +metadata: + name: goodpod01 +spec: + containers: + - name: qos-demo-ctr + image: thisdoesnotexist:1.1.1 + resources: + requests: + memory: "200Mi" +--- +apiVersion: v1 +kind: Pod +metadata: + name: goodpod02 +spec: + containers: + - name: qos-demo-ctr + image: thisdoesnotexist:1.1.1 + resources: + limits: + memory: "200Mi" +--- +apiVersion: v1 +kind: Pod +metadata: + name: goodpod03 +spec: + containers: + - name: qos-demo-ctr + image: thisdoesnotexist:1.1.1 + resources: + requests: + memory: "200Mi" + cpu: "700m" + - name: seconddemo + image: thisdoesnotexist:1.1.1 +--- +apiVersion: v1 +kind: Pod +metadata: + name: badpod01 +spec: + containers: + - name: qos-demo-ctr + image: thisdoesnotexist:1.1.1 +--- +apiVersion: v1 +kind: Pod +metadata: + name: badpod02 +spec: + containers: + - name: qos-demo-ctr + image: thisdoesnotexist:1.1.1 + - name: second + image: thisdoesnotexist:1.1.1 + diff --git a/other-cel/require-qos-burstable/artifacthub-pkg.yml b/other-cel/require-qos-burstable/artifacthub-pkg.yml new file mode 100644 index 000000000..f80147354 --- /dev/null +++ b/other-cel/require-qos-burstable/artifacthub-pkg.yml @@ -0,0 +1,25 @@ +name: require-qos-burstable-cel +version: 1.0.0 +displayName: Require QoS Burstable in CEL expressions +description: >- + Pod Quality of Service (QoS) is a mechanism to ensure Pods receive certain priority guarantees based upon the resources they define. When a Pod has at least one container which defines either requests or limits for either memory or CPU, Kubernetes grants the QoS class as burstable if it does not otherwise qualify for a QoS class of guaranteed. This policy requires that a Pod meet the criteria qualify for a QoS of burstable. This policy is provided with the intention that users will need to control its scope by using exclusions, preconditions, and other policy language mechanisms. +install: |- + ```shell + kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/other-cel/require-qos-burstable/require-qos-burstable.yaml + ``` +keywords: + - kyverno + - Other + - Multi-Tenancy + - CEL Expressions +readme: | + Pod Quality of Service (QoS) is a mechanism to ensure Pods receive certain priority guarantees based upon the resources they define. When a Pod has at least one container which defines either requests or limits for either memory or CPU, Kubernetes grants the QoS class as burstable if it does not otherwise qualify for a QoS class of guaranteed. This policy requires that a Pod meet the criteria qualify for a QoS of burstable. This policy is provided with the intention that users will need to control its scope by using exclusions, preconditions, and other policy language mechanisms. + + Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ +annotations: + kyverno/category: "Other, Multi-Tenancy in CEL" + kyverno/kubernetesVersion: "1.26-1.27" + kyverno/subject: "Pod" +digest: 3e93eb4eee26bb198811f9441bca6ef58dfb848e6446ccb38156b534a16fe16b +createdAt: "2024-04-11T17:54:50Z" + diff --git a/other-cel/require-qos-burstable/require-qos-burstable.yaml b/other-cel/require-qos-burstable/require-qos-burstable.yaml new file mode 100644 index 000000000..329007c28 --- /dev/null +++ b/other-cel/require-qos-burstable/require-qos-burstable.yaml @@ -0,0 +1,38 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-qos-burstable + annotations: + policies.kyverno.io/title: Require QoS Burstable in CEL expressions + policies.kyverno.io/category: Other, Multi-Tenancy in CEL + policies.kyverno.io/severity: medium + policies.kyverno.io/subject: Pod + kyverno.io/kyverno-version: 1.11.0 + kyverno.io/kubernetes-version: "1.26-1.27" + policies.kyverno.io/description: >- + Pod Quality of Service (QoS) is a mechanism to ensure Pods receive certain + priority guarantees based upon the resources they define. When a Pod has at least + one container which defines either requests or limits for either memory or CPU, + Kubernetes grants the QoS class as burstable if it does not otherwise qualify for a QoS class of guaranteed. + This policy requires that a Pod meet the criteria qualify for a QoS of burstable. + This policy is provided with the intention that users will need to control its scope by using + exclusions, preconditions, and other policy language mechanisms. +spec: + validationFailureAction: Audit + background: true + rules: + - name: burstable + match: + any: + - resources: + kinds: + - Pod + validate: + cel: + expressions: + - expression: >- + object.spec.containers.exists(container, + has(container.resources) && + (has(container.resources.requests) || has(container.resources.limits))) + message: "At least one container in the Pod must define either requests or limits for either CPU or memory." + diff --git a/other-cel/require-storageclass/.chainsaw-test/chainsaw-test.yaml b/other-cel/require-storageclass/.chainsaw-test/chainsaw-test.yaml new file mode 100755 index 000000000..e27410e3e --- /dev/null +++ b/other-cel/require-storageclass/.chainsaw-test/chainsaw-test.yaml @@ -0,0 +1,39 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/kyverno/chainsaw/main/.schemas/json/test-chainsaw-v1alpha1.json +apiVersion: chainsaw.kyverno.io/v1alpha1 +kind: Test +metadata: + creationTimestamp: null + name: require-storageclass +spec: + steps: + - name: step-01 + try: + - apply: + file: ../require-storageclass.yaml + - patch: + resource: + apiVersion: kyverno.io/v1 + kind: ClusterPolicy + metadata: + name: require-storageclass + spec: + validationFailureAction: Enforce + - assert: + file: policy-ready.yaml + - name: step-02 + try: + - apply: + file: ss-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: ss-bad.yaml + - apply: + file: pvc-good.yaml + - apply: + expect: + - check: + ($error != null): true + file: pvc-bad.yaml + diff --git a/other-cel/require-storageclass/.chainsaw-test/policy-ready.yaml b/other-cel/require-storageclass/.chainsaw-test/policy-ready.yaml new file mode 100755 index 000000000..512228993 --- /dev/null +++ b/other-cel/require-storageclass/.chainsaw-test/policy-ready.yaml @@ -0,0 +1,7 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-storageclass +status: + ready: true + diff --git a/other-cel/require-storageclass/.chainsaw-test/pvc-bad.yaml b/other-cel/require-storageclass/.chainsaw-test/pvc-bad.yaml new file mode 100644 index 000000000..e0f9f948c --- /dev/null +++ b/other-cel/require-storageclass/.chainsaw-test/pvc-bad.yaml @@ -0,0 +1,18 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: badpvc +spec: + accessModes: + - ReadWriteOnce + volumeMode: Filesystem + resources: + requests: + storage: 1Gi + storageClassName: "" + selector: + matchLabels: + release: "stable" + matchExpressions: + - {key: environment, operator: In, values: [dev]} + diff --git a/other-cel/require-storageclass/.chainsaw-test/pvc-good.yaml b/other-cel/require-storageclass/.chainsaw-test/pvc-good.yaml new file mode 100644 index 000000000..498f27c83 --- /dev/null +++ b/other-cel/require-storageclass/.chainsaw-test/pvc-good.yaml @@ -0,0 +1,18 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: goodpvc +spec: + accessModes: + - ReadWriteOnce + volumeMode: Filesystem + resources: + requests: + storage: 1Gi + storageClassName: slow + selector: + matchLabels: + release: "stable" + matchExpressions: + - {key: environment, operator: In, values: [dev]} + diff --git a/other-cel/require-storageclass/.chainsaw-test/ss-bad.yaml b/other-cel/require-storageclass/.chainsaw-test/ss-bad.yaml new file mode 100644 index 000000000..74251b466 --- /dev/null +++ b/other-cel/require-storageclass/.chainsaw-test/ss-bad.yaml @@ -0,0 +1,34 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: badss +spec: + selector: + matchLabels: + app: nginx + serviceName: "nginx" + replicas: 1 + template: + metadata: + labels: + app: nginx + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: busybox + image: busybox:1.35 + ports: + - containerPort: 80 + name: web + volumeMounts: + - name: www + mountPath: /usr/share/nginx/html + volumeClaimTemplates: + - metadata: + name: www + spec: + accessModes: [ "ReadWriteOnce" ] + resources: + requests: + storage: 1Gi + diff --git a/other-cel/require-storageclass/.chainsaw-test/ss-good.yaml b/other-cel/require-storageclass/.chainsaw-test/ss-good.yaml new file mode 100644 index 000000000..45a6b7429 --- /dev/null +++ b/other-cel/require-storageclass/.chainsaw-test/ss-good.yaml @@ -0,0 +1,47 @@ +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: goodss01 +spec: + selector: + matchLabels: + app: nginx + replicas: 1 + template: + metadata: + labels: + app: nginx + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: busybox + image: busybox:1.35 + volumeClaimTemplates: + - metadata: + name: www + spec: + accessModes: [ "ReadWriteOnce" ] + storageClassName: "my-storage-class" + resources: + requests: + storage: 1Gi +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: goodss02 +spec: + selector: + matchLabels: + app: busybox + replicas: 1 + template: + metadata: + labels: + app: busybox + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: busybox + image: busybox:1.35 + diff --git a/other-cel/require-storageclass/.kyverno-test/kyverno-test.yaml b/other-cel/require-storageclass/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..168cfd341 --- /dev/null +++ b/other-cel/require-storageclass/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,35 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: require-storageclass +policies: +- ../require-storageclass.yaml +resources: +- resource.yaml +results: +- kind: PersistentVolumeClaim + policy: require-storageclass + resources: + - badpvc + result: fail + rule: pvc-storageclass +- kind: PersistentVolumeClaim + policy: require-storageclass + resources: + - goodpvc + result: pass + rule: pvc-storageclass +- kind: StatefulSet + policy: require-storageclass + resources: + - badss + result: fail + rule: ss-storageclass +- kind: StatefulSet + policy: require-storageclass + resources: + - goodss + - goodss-novct + result: pass + rule: ss-storageclass + diff --git a/other-cel/require-storageclass/.kyverno-test/resource.yaml b/other-cel/require-storageclass/.kyverno-test/resource.yaml new file mode 100644 index 000000000..d2bca0d00 --- /dev/null +++ b/other-cel/require-storageclass/.kyverno-test/resource.yaml @@ -0,0 +1,127 @@ +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: badpvc +spec: + accessModes: + - ReadWriteOnce + volumeMode: Filesystem + resources: + requests: + storage: 8Gi + selector: + matchLabels: + release: "stable" + matchExpressions: + - {key: environment, operator: In, values: [dev]} +--- +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: goodpvc +spec: + accessModes: + - ReadWriteOnce + volumeMode: Filesystem + resources: + requests: + storage: 8Gi + storageClassName: slow + selector: + matchLabels: + release: "stable" + matchExpressions: + - {key: environment, operator: In, values: [dev]} +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: goodss +spec: + selector: + matchLabels: + app: nginx + serviceName: "nginx" + replicas: 3 + template: + metadata: + labels: + app: nginx + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: nginx + image: thisdoesnotexist:0.8 + ports: + - containerPort: 80 + name: web + volumeMounts: + - name: www + mountPath: /usr/share/nginx/html + volumeClaimTemplates: + - metadata: + name: www + spec: + accessModes: [ "ReadWriteOnce" ] + storageClassName: "my-storage-class" + resources: + requests: + storage: 1Gi +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: goodss-novct +spec: + selector: + matchLabels: + app: nginx + serviceName: "nginx" + replicas: 3 + template: + metadata: + labels: + app: nginx + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: nginx + image: thisdoesnotexist:0.8 + ports: + - containerPort: 80 + name: web +--- +apiVersion: apps/v1 +kind: StatefulSet +metadata: + name: badss +spec: + selector: + matchLabels: + app: nginx + serviceName: "nginx" + replicas: 3 + template: + metadata: + labels: + app: nginx + spec: + terminationGracePeriodSeconds: 10 + containers: + - name: nginx + image: thisdoesnotexist:0.8 + ports: + - containerPort: 80 + name: web + volumeMounts: + - name: www + mountPath: /usr/share/nginx/html + volumeClaimTemplates: + - metadata: + name: www + spec: + accessModes: [ "ReadWriteOnce" ] + resources: + requests: + storage: 1Gi + diff --git a/other-cel/require-storageclass/artifacthub-pkg.yml b/other-cel/require-storageclass/artifacthub-pkg.yml new file mode 100644 index 000000000..b12cc6d8e --- /dev/null +++ b/other-cel/require-storageclass/artifacthub-pkg.yml @@ -0,0 +1,25 @@ +name: require-storageclass-cel +version: 1.0.0 +displayName: Require StorageClass in CEL expressions +description: >- + PersistentVolumeClaims (PVCs) and StatefulSets may optionally define a StorageClass to dynamically provision storage. In a multi-tenancy environment where StorageClasses are far more common, it is often better to require storage only be provisioned from these StorageClasses. This policy requires that PVCs and StatefulSets define the storageClassName field with some value. +install: |- + ```shell + kubectl apply -f https://raw.githubusercontent.com/kyverno/policies/main/other-cel/require-storageclass/require-storageclass.yaml + ``` +keywords: + - kyverno + - Other + - Multi-Tenancy + - CEL Expressions +readme: | + PersistentVolumeClaims (PVCs) and StatefulSets may optionally define a StorageClass to dynamically provision storage. In a multi-tenancy environment where StorageClasses are far more common, it is often better to require storage only be provisioned from these StorageClasses. This policy requires that PVCs and StatefulSets define the storageClassName field with some value. + + Refer to the documentation for more details on Kyverno annotations: https://artifacthub.io/docs/topics/annotations/kyverno/ +annotations: + kyverno/category: "Other, Multi-Tenancy in CEL" + kyverno/kubernetesVersion: "1.26-1.27" + kyverno/subject: "PersistentVolumeClaim, StatefulSet" +digest: 3ff19239688cf47b624bbd62d02624153b27059c6bed26ca290599eb2810ccf6 +createdAt: "2024-04-11T18:06:16Z" + diff --git a/other-cel/require-storageclass/require-storageclass.yaml b/other-cel/require-storageclass/require-storageclass.yaml new file mode 100644 index 000000000..ff0240ab2 --- /dev/null +++ b/other-cel/require-storageclass/require-storageclass.yaml @@ -0,0 +1,47 @@ +apiVersion: kyverno.io/v1 +kind: ClusterPolicy +metadata: + name: require-storageclass + annotations: + policies.kyverno.io/title: Require StorageClass in CEL expressions + policies.kyverno.io/category: Other, Multi-Tenancy in CEL + policies.kyverno.io/severity: medium + policies.kyverno.io/subject: PersistentVolumeClaim, StatefulSet + kyverno.io/kyverno-version: 1.11.0 + kyverno.io/kubernetes-version: "1.26-1.27" + policies.kyverno.io/description: >- + PersistentVolumeClaims (PVCs) and StatefulSets may optionally define a StorageClass + to dynamically provision storage. In a multi-tenancy environment where StorageClasses are + far more common, it is often better to require storage only be provisioned from these + StorageClasses. This policy requires that PVCs and StatefulSets containing + volumeClaimTemplates define the storageClassName field with some value. +spec: + validationFailureAction: Audit + background: true + rules: + - name: pvc-storageclass + match: + any: + - resources: + kinds: + - PersistentVolumeClaim + validate: + cel: + expressions: + - expression: "has(object.spec.storageClassName) && object.spec.storageClassName != ''" + message: "PersistentVolumeClaims must define a storageClassName." + - name: ss-storageclass + match: + any: + - resources: + kinds: + - StatefulSet + validate: + cel: + expressions: + - expression: >- + !has(object.spec.volumeClaimTemplates) || + object.spec.volumeClaimTemplates.all(volumeClaimTemplate, + has(volumeClaimTemplate.spec.storageClassName) && volumeClaimTemplate.spec.storageClassName != '') + message: "StatefulSets must define a storageClassName." + diff --git a/other/require-annotations/.kyverno-test/kyverno-test.yaml b/other/require-annotations/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..8cf9c591a --- /dev/null +++ b/other/require-annotations/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,50 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: require-annotations +policies: +- ../require-annotations.yaml +resources: +- ../.chainsaw-test/pod-bad.yaml +- ../.chainsaw-test/pod-good.yaml +- ../.chainsaw-test/podcontroller-bad.yaml +- ../.chainsaw-test/podcontroller-good.yaml +results: +- kind: Pod + policy: require-annotations + resources: + - badpod01 + - badpod02 + result: fail + rule: check-for-annotation +- kind: Pod + policy: require-annotations + resources: + - goodpod01 + result: pass + rule: check-for-annotation +- kind: Deployment + policy: require-annotations + resources: + - baddeployment01 + result: fail + rule: check-for-annotation +- kind: CronJob + policy: require-annotations + resources: + - badcronjob01 + result: fail + rule: check-for-annotation +- kind: Deployment + policy: require-annotations + resources: + - gooddeployment01 + result: pass + rule: check-for-annotation +- kind: CronJob + policy: require-annotations + resources: + - goodcronjob01 + result: pass + rule: check-for-annotation + diff --git a/other/require-deployments-have-multiple-replicas/.kyverno-test/kyverno-test.yaml b/other/require-deployments-have-multiple-replicas/.kyverno-test/kyverno-test.yaml index f4755fdc2..07bccb8b6 100644 --- a/other/require-deployments-have-multiple-replicas/.kyverno-test/kyverno-test.yaml +++ b/other/require-deployments-have-multiple-replicas/.kyverno-test/kyverno-test.yaml @@ -10,6 +10,13 @@ results: - kind: Deployment policy: deployment-has-multiple-replicas resources: - - mydeploy + - mydeploygood result: pass rule: deployment-has-multiple-replicas +- kind: Deployment + policy: deployment-has-multiple-replicas + resources: + - mydeploybad + result: fail + rule: deployment-has-multiple-replicas + diff --git a/other/require-deployments-have-multiple-replicas/.kyverno-test/resource.yaml b/other/require-deployments-have-multiple-replicas/.kyverno-test/resource.yaml index 85fef6f89..6fc0aa939 100644 --- a/other/require-deployments-have-multiple-replicas/.kyverno-test/resource.yaml +++ b/other/require-deployments-have-multiple-replicas/.kyverno-test/resource.yaml @@ -1,7 +1,7 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: mydeploy + name: mydeploygood spec: replicas: 2 selector: @@ -19,13 +19,23 @@ spec: - containerPort: 80 --- -apiVersion: v1 -kind: Pod +apiVersion: apps/v1 +kind: Deployment metadata: - labels: - foo: bar - name: myapp-pod + name: mydeploybad spec: - containers: - - image: nginx - name: nginx \ No newline at end of file + replicas: 1 + selector: + matchLabels: + app: myapp + template: + metadata: + labels: + app: myapp + spec: + containers: + - name: nginx + image: nginx + ports: + - containerPort: 80 + diff --git a/other/require-ingress-https/.kyverno-test/kyverno-test.yaml b/other/require-ingress-https/.kyverno-test/kyverno-test.yaml new file mode 100644 index 000000000..530b4fe66 --- /dev/null +++ b/other/require-ingress-https/.kyverno-test/kyverno-test.yaml @@ -0,0 +1,38 @@ +apiVersion: cli.kyverno.io/v1alpha1 +kind: Test +metadata: + name: require-ingress-https +policies: +- ../require-ingress-https.yaml +resources: +- resource.yaml +results: +- kind: Ingress + policy: require-ingress-https + resources: + - goodingress01 + - goodingress02 + result: pass + rule: has-annotation +- kind: Ingress + policy: require-ingress-https + resources: + - goodingress01 + - goodingress02 + result: pass + rule: has-tls +- kind: Ingress + policy: require-ingress-https + resources: + - badingress01 + - badingress02 + - badingress03 + result: fail + rule: has-annotation +- kind: Ingress + policy: require-ingress-https + resources: + - badingress04 + result: fail + rule: has-tls + diff --git a/other/require-ingress-https/.kyverno-test/resource.yaml b/other/require-ingress-https/.kyverno-test/resource.yaml new file mode 100644 index 000000000..a97cba4e7 --- /dev/null +++ b/other/require-ingress-https/.kyverno-test/resource.yaml @@ -0,0 +1,180 @@ +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "true" + name: badingress01 +spec: + ingressClassName: someingress + rules: + - host: endpoint01 + http: + paths: + - backend: + service: + name: demo-svc + port: + number: 8080 + path: / + pathType: Prefix + tls: + - hosts: + - endpoint01 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: badingress02 + annotations: + kyverno.io/foo: bar +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: badingress03 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "false" + name: badingress04 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + http: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kyverno.io/foo: bar + kubernetes.io/ingress.allow-http: "false" + name: goodingress01 +spec: + ingressClassName: someingress + rules: + - host: endpoint01 + https: + paths: + - backend: + service: + name: demo-svc + port: + number: 8080 + path: / + pathType: Prefix + tls: + - hosts: + - endpoint01 +--- +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + annotations: + kubernetes.io/ingress.allow-http: "false" + kyverno.io/foo: bar + name: goodingress02 +spec: + ingressClassName: nginx-int + rules: + - host: endpoint01 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + - host: endpoint02 + https: + paths: + - path: /testpath + pathType: Prefix + backend: + service: + name: test + port: + number: 80 + tls: + - hosts: + - endpoint01 + - endpoint02 +