From 528801571f005e5670f0b604c0a362aa2ab48d6a Mon Sep 17 00:00:00 2001 From: Wen Zhou Date: Wed, 28 Feb 2024 14:20:47 +0100 Subject: [PATCH] Revert "DSC, DSCI: add validating webhook (#711)" This reverts commit 9992e2af8e68f5168c38f062eeb1f216d6f7f058. Signed-off-by: Wen Zhou (cherry picked from commit 03b468bfb6cf9fa4c756e84b5e8fd11c29cfe6ef) --- Makefile | 4 +- PROJECT | 6 - ...er.opendatahub.io_datascienceclusters.yaml | 36 +-- ...ion.opendatahub.io_dscinitializations.yaml | 2 - ...s-operator-webhook-service_v1_service.yaml | 24 -- .../rhods-operator.clusterserviceversion.yaml | 69 ++--- ...ion.opendatahub.io_dscinitializations.yaml | 2 - config/default/kustomization.yaml | 5 +- config/default/manager_webhook_patch.yaml | 23 -- .../rhods-operator.clusterserviceversion.yaml | 6 - config/webhook/kustomization.yaml | 9 - config/webhook/kustomizeconfig.yaml | 25 -- config/webhook/manifests.yaml | 29 --- config/webhook/service.yaml | 22 -- .../certconfigmapgenerator_controller.go | 5 +- .../datasciencecluster_controller.go | 24 +- .../dscinitialization_controller.go | 26 +- .../dscinitialization_test.go | 35 ++- controllers/webhook/webhook.go | 111 -------- controllers/webhook/webhook_suite_test.go | 244 ------------------ main.go | 19 +- tests/e2e/controller_setup_test.go | 4 +- tests/e2e/dsc_creation_test.go | 59 ----- tests/e2e/helper_test.go | 8 +- 24 files changed, 129 insertions(+), 668 deletions(-) delete mode 100644 bundle/manifests/redhat-ods-operator-webhook-service_v1_service.yaml delete mode 100644 config/default/manager_webhook_patch.yaml delete mode 100644 config/webhook/kustomization.yaml delete mode 100644 config/webhook/kustomizeconfig.yaml delete mode 100644 config/webhook/manifests.yaml delete mode 100644 config/webhook/service.yaml delete mode 100644 controllers/webhook/webhook.go delete mode 100644 controllers/webhook/webhook_suite_test.go diff --git a/Makefile b/Makefile index 430584e9cbd..b2357975a45 100644 --- a/Makefile +++ b/Makefile @@ -138,7 +138,9 @@ endef .PHONY: manifests manifests: controller-gen ## Generate WebhookConfiguration, ClusterRole and CustomResourceDefinition objects. - $(CONTROLLER_GEN) rbac:roleName=controller-manager-role crd:ignoreUnexportedFields=true webhook paths="./..." output:crd:artifacts:config=config/crd/bases +# TODO: enable below when we do webhook +# $(CONTROLLER_GEN) rbac:roleName=controller-manager-role crd webhook paths="./..." output:crd:artifacts:config=config/crd/bases + $(CONTROLLER_GEN) rbac:roleName=controller-manager-role crd:ignoreUnexportedFields=true paths="./..." output:crd:artifacts:config=config/crd/bases $(call fetch-external-crds,github.com/openshift/api,route/v1) $(call fetch-external-crds,github.com/openshift/api,user/v1) diff --git a/PROJECT b/PROJECT index 4bfc6c97792..cbb40d6a5b2 100644 --- a/PROJECT +++ b/PROJECT @@ -21,9 +21,6 @@ resources: kind: DSCInitialization path: github.com/opendatahub-io/opendatahub-operator/v2/apis/dscinitialization/v1 version: v1 - webhooks: - validation: true - webhookVersion: v1 - api: crdVersion: v1 namespaced: false @@ -33,7 +30,4 @@ resources: kind: DataScienceCluster path: github.com/opendatahub-io/opendatahub-operator/v2/apis/datasciencecluster/v1 version: v1 - webhooks: - validation: true - webhookVersion: v1 version: "3" diff --git a/bundle/manifests/datasciencecluster.opendatahub.io_datascienceclusters.yaml b/bundle/manifests/datasciencecluster.opendatahub.io_datascienceclusters.yaml index 9608c6de4aa..ba962a1d9a1 100644 --- a/bundle/manifests/datasciencecluster.opendatahub.io_datascienceclusters.yaml +++ b/bundle/manifests/datasciencecluster.opendatahub.io_datascienceclusters.yaml @@ -62,12 +62,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array @@ -105,12 +105,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array @@ -149,12 +149,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array @@ -207,12 +207,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array @@ -310,12 +310,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array @@ -354,12 +354,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array @@ -397,12 +397,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array @@ -440,12 +440,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array @@ -483,12 +483,12 @@ spec: description: 'sourcePath is the subpath within contextDir where kustomize builds start. Examples include any sub-folder or path: `base`, `overlays/dev`, - `default`, `odh` etc.' + `default`, `odh` etc' type: string uri: default: "" description: uri is the URI point to a git repo - with tag/branch. e.g. https://github.com/org/repo/tarball/ + with tag/branch. e.g https://github.com/org/repo/tarball/ type: string type: object type: array diff --git a/bundle/manifests/dscinitialization.opendatahub.io_dscinitializations.yaml b/bundle/manifests/dscinitialization.opendatahub.io_dscinitializations.yaml index 6b9514bb1e5..8ae3397ba85 100644 --- a/bundle/manifests/dscinitialization.opendatahub.io_dscinitializations.yaml +++ b/bundle/manifests/dscinitialization.opendatahub.io_dscinitializations.yaml @@ -131,7 +131,6 @@ spec: field. properties: customCABundle: - default: "" description: A custom CA bundle that will be available for all components in the Data Science Cluster(DSC). This bundle will be stored in odh-trusted-ca-bundle ConfigMap .data.odh-ca-bundle.crt . @@ -147,7 +146,6 @@ spec: pattern: ^(Managed|Unmanaged|Force|Removed)$ type: string required: - - customCABundle - managementState type: object required: diff --git a/bundle/manifests/redhat-ods-operator-webhook-service_v1_service.yaml b/bundle/manifests/redhat-ods-operator-webhook-service_v1_service.yaml deleted file mode 100644 index 218c45fefcc..00000000000 --- a/bundle/manifests/redhat-ods-operator-webhook-service_v1_service.yaml +++ /dev/null @@ -1,24 +0,0 @@ -apiVersion: v1 -kind: Service -metadata: - annotations: - service.beta.openshift.io/inject-cabundle: "true" - service.beta.openshift.io/serving-cert-secret-name: redhat-ods-operator-controller-webhook-cert - creationTimestamp: null - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/created-by: rhods-operator - app.kubernetes.io/instance: webhook-service - app.kubernetes.io/managed-by: kustomize - app.kubernetes.io/name: service - app.kubernetes.io/part-of: rhods-operator - name: redhat-ods-operator-webhook-service -spec: - ports: - - port: 443 - protocol: TCP - targetPort: 9443 - selector: - control-plane: controller-manager -status: - loadBalancer: {} diff --git a/bundle/manifests/rhods-operator.clusterserviceversion.yaml b/bundle/manifests/rhods-operator.clusterserviceversion.yaml index 8de96754492..2c0409aaab5 100644 --- a/bundle/manifests/rhods-operator.clusterserviceversion.yaml +++ b/bundle/manifests/rhods-operator.clusterserviceversion.yaml @@ -136,15 +136,15 @@ metadata: }, "kserve": { "managementState": "Managed", - "serving": { - "ingressGateway": { - "certificate": { - "type": "SelfSigned" - } - }, - "managementState": "Managed", - "name": "knative-serving" - } + "serving": { + "ingressGateway": { + "certificate": { + "type": "SelfSigned" + } + }, + "managementState": "Managed", + "name": "knative-serving" + } }, "kueue": { "managementState": "Removed" @@ -165,7 +165,7 @@ metadata: operators.operatorframework.io/internal-objects: '[dscinitialization.opendatahub.io]' operators.operatorframework.io/project_layout: go.kubebuilder.io/v3 repository: https://github.com/red-hat-data-services/rhods-operator - name: rhods-operator.v2.8.0 + name: rhods-operator.v2.4.0 namespace: placeholder spec: apiservicedefinitions: {} @@ -1744,6 +1744,18 @@ spec: - patch - update - watch + - apiGroups: + - authentication.k8s.io + resources: + - tokenreviews + verbs: + - create + - apiGroups: + - authorization.k8s.io + resources: + - subjectaccessreviews + verbs: + - create serviceAccountName: redhat-ods-operator-controller-manager deployments: - label: @@ -1778,10 +1790,6 @@ spec: initialDelaySeconds: 15 periodSeconds: 20 name: manager - ports: - - containerPort: 9443 - name: webhook-server - protocol: TCP readinessProbe: httpGet: path: /readyz @@ -1795,15 +1803,6 @@ spec: requests: cpu: 500m memory: 256Mi - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - volumeMounts: - - mountPath: /tmp/k8s-webhook-server/serving-certs - name: cert - readOnly: true securityContext: runAsNonRoot: true serviceAccountName: redhat-ods-operator-controller-manager @@ -1862,6 +1861,7 @@ spec: - training - kserve - distributed-workloads + - trustyai links: - name: Red Hat OpenShift AI url: https://www.redhat.com/en/technologies/cloud-computing/openshift/openshift-ai @@ -1869,26 +1869,3 @@ spec: provider: name: Red Hat version: 2.8.0 - webhookdefinitions: - - admissionReviewVersions: - - v1 - containerPort: 443 - deploymentName: redhat-ods-operator-controller-manager - failurePolicy: Fail - generateName: operator.opendatahub.io - rules: - - apiGroups: - - datasciencecluster.opendatahub.io - - dscinitialization.opendatahub.io - apiVersions: - - v1 - operations: - - CREATE - - UPDATE - resources: - - datascienceclusters - - dscinitializations - sideEffects: None - targetPort: 9443 - type: ValidatingAdmissionWebhook - webhookPath: /validate-opendatahub-io-v1 diff --git a/config/crd/bases/dscinitialization.opendatahub.io_dscinitializations.yaml b/config/crd/bases/dscinitialization.opendatahub.io_dscinitializations.yaml index a821a61ff36..48621d88cc7 100644 --- a/config/crd/bases/dscinitialization.opendatahub.io_dscinitializations.yaml +++ b/config/crd/bases/dscinitialization.opendatahub.io_dscinitializations.yaml @@ -132,7 +132,6 @@ spec: field. properties: customCABundle: - default: "" description: A custom CA bundle that will be available for all components in the Data Science Cluster(DSC). This bundle will be stored in odh-trusted-ca-bundle ConfigMap .data.odh-ca-bundle.crt . @@ -148,7 +147,6 @@ spec: pattern: ^(Managed|Unmanaged|Force|Removed)$ type: string required: - - customCABundle - managementState type: object required: diff --git a/config/default/kustomization.yaml b/config/default/kustomization.yaml index 1a7fe00e8c9..fc1b6912725 100644 --- a/config/default/kustomization.yaml +++ b/config/default/kustomization.yaml @@ -21,7 +21,7 @@ resources: - ../manager # [WEBHOOK] To enable webhook, uncomment all the sections with [WEBHOOK] prefix including the one in # crd/kustomization.yaml -- ../webhook +#- ../webhook # [CERTMANAGER] To enable cert-manager, uncomment all sections with 'CERTMANAGER'. 'WEBHOOK' components are required. #- ../certmanager # [PROMETHEUS] To enable prometheus monitor, uncomment all sections with 'PROMETHEUS'. @@ -37,7 +37,6 @@ resources: # [WEBHOOK] To enable webhook, uncomment all the sections with [WEBHOOK] prefix including the one in # crd/kustomization.yaml -# Moved below to patches #- manager_webhook_patch.yaml # [CERTMANAGER] To enable cert-manager, uncomment all sections with 'CERTMANAGER'. @@ -77,5 +76,3 @@ resources: patches: - path: manager_auth_proxy_patch.yaml -# [WEBHOOK] -- path: manager_webhook_patch.yaml diff --git a/config/default/manager_webhook_patch.yaml b/config/default/manager_webhook_patch.yaml deleted file mode 100644 index d76a0133d0d..00000000000 --- a/config/default/manager_webhook_patch.yaml +++ /dev/null @@ -1,23 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: controller-manager - namespace: system -spec: - template: - spec: - containers: - - name: manager - ports: - - containerPort: 9443 - name: webhook-server - protocol: TCP - volumeMounts: - - mountPath: /tmp/k8s-webhook-server/serving-certs - name: cert - readOnly: true - volumes: - - name: cert - secret: - defaultMode: 420 - secretName: redhat-ods-operator-controller-webhook-cert diff --git a/config/manifests/bases/rhods-operator.clusterserviceversion.yaml b/config/manifests/bases/rhods-operator.clusterserviceversion.yaml index c1b51f64ac4..c5f07b4ea18 100644 --- a/config/manifests/bases/rhods-operator.clusterserviceversion.yaml +++ b/config/manifests/bases/rhods-operator.clusterserviceversion.yaml @@ -97,12 +97,6 @@ spec: e.g. it provides unified authentication giving a Single Sign On experience. displayName: Service Mesh path: serviceMesh - - description: When set to `Managed`, adds odh-trusted-ca-bundle Configmap to - all namespaces that includes cluster-wide Trusted CA Bundle in .data["ca-bundle.crt"]. - Additionally, this fields allows admins to add custom CA bundles to the - configmap using the .CustomCABundle field. - displayName: Trusted CABundle - path: trustedCABundle - description: Internal development useful field to test customizations. This is not recommended to be used in production environment. displayName: Dev Flags diff --git a/config/webhook/kustomization.yaml b/config/webhook/kustomization.yaml deleted file mode 100644 index 8428859f524..00000000000 --- a/config/webhook/kustomization.yaml +++ /dev/null @@ -1,9 +0,0 @@ -resources: -- manifests.yaml -- service.yaml - -commonAnnotations: - service.beta.openshift.io/inject-cabundle: "true" - -configurations: -- kustomizeconfig.yaml diff --git a/config/webhook/kustomizeconfig.yaml b/config/webhook/kustomizeconfig.yaml deleted file mode 100644 index 25e21e3c963..00000000000 --- a/config/webhook/kustomizeconfig.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# the following config is for teaching kustomize where to look at when substituting vars. -# It requires kustomize v2.1.0 or newer to work properly. -nameReference: -- kind: Service - version: v1 - fieldSpecs: - - kind: MutatingWebhookConfiguration - group: admissionregistration.k8s.io - path: webhooks/clientConfig/service/name - - kind: ValidatingWebhookConfiguration - group: admissionregistration.k8s.io - path: webhooks/clientConfig/service/name - -namespace: -- kind: MutatingWebhookConfiguration - group: admissionregistration.k8s.io - path: webhooks/clientConfig/service/namespace - create: true -- kind: ValidatingWebhookConfiguration - group: admissionregistration.k8s.io - path: webhooks/clientConfig/service/namespace - create: true - -varReference: -- path: metadata/annotations diff --git a/config/webhook/manifests.yaml b/config/webhook/manifests.yaml deleted file mode 100644 index 5595314bffb..00000000000 --- a/config/webhook/manifests.yaml +++ /dev/null @@ -1,29 +0,0 @@ ---- -apiVersion: admissionregistration.k8s.io/v1 -kind: ValidatingWebhookConfiguration -metadata: - creationTimestamp: null - name: validating-webhook-configuration -webhooks: -- admissionReviewVersions: - - v1 - clientConfig: - service: - name: webhook-service - namespace: system - path: /validate-opendatahub-io-v1 - failurePolicy: Fail - name: operator.opendatahub.io - rules: - - apiGroups: - - datasciencecluster.opendatahub.io - - dscinitialization.opendatahub.io - apiVersions: - - v1 - operations: - - CREATE - - UPDATE - resources: - - datascienceclusters - - dscinitializations - sideEffects: None diff --git a/config/webhook/service.yaml b/config/webhook/service.yaml deleted file mode 100644 index 72c652ddaae..00000000000 --- a/config/webhook/service.yaml +++ /dev/null @@ -1,22 +0,0 @@ - -apiVersion: v1 -kind: Service -metadata: - labels: - app.kubernetes.io/name: service - app.kubernetes.io/instance: webhook-service - app.kubernetes.io/component: webhook - app.kubernetes.io/created-by: rhods-operator - app.kubernetes.io/part-of: rhods-operator - app.kubernetes.io/managed-by: kustomize - name: webhook-service - namespace: system - annotations: - service.beta.openshift.io/serving-cert-secret-name: redhat-ods-operator-controller-webhook-cert -spec: - ports: - - port: 443 - protocol: TCP - targetPort: 9443 - selector: - control-plane: controller-manager diff --git a/controllers/certconfigmapgenerator/certconfigmapgenerator_controller.go b/controllers/certconfigmapgenerator/certconfigmapgenerator_controller.go index 2b9c94dc182..17732592d39 100644 --- a/controllers/certconfigmapgenerator/certconfigmapgenerator_controller.go +++ b/controllers/certconfigmapgenerator/certconfigmapgenerator_controller.go @@ -49,7 +49,7 @@ func (r *CertConfigmapGeneratorReconciler) SetupWithManager(mgr ctrl.Manager) er // ca bundle in every new namespace created. func (r *CertConfigmapGeneratorReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { // Request includes namespace that is newly created or where odh-trusted-ca-bundle configmap is updated. - r.Log.Info("Reconciling certConfigMapGenerator.", "CertConfigMapGenerator Request.Namespace", req.NamespacedName) + r.Log.Info("Reconciling certConfigMapGenerator.", " CertConfigMapGenerator Request.Namespace", req.NamespacedName) // Get namespace instance userNamespace := &corev1.Namespace{} err := r.Client.Get(ctx, client.ObjectKey{Name: req.Namespace}, userNamespace) @@ -71,6 +71,9 @@ func (r *CertConfigmapGeneratorReconciler) Reconcile(ctx context.Context, req ct return ctrl.Result{}, nil case 1: dsciInstance = &dsciInstances.Items[0] + default: + message := "only one instance of DSCInitialization object is allowed" + return ctrl.Result{}, errors.New(message) } if dsciInstance.Spec.TrustedCABundle.ManagementState != operatorv1.Managed { diff --git a/controllers/datasciencecluster/datasciencecluster_controller.go b/controllers/datasciencecluster/datasciencecluster_controller.go index 878a0230ec9..0340d4bd5eb 100644 --- a/controllers/datasciencecluster/datasciencecluster_controller.go +++ b/controllers/datasciencecluster/datasciencecluster_controller.go @@ -82,7 +82,7 @@ const ( // Reconcile is part of the main kubernetes reconciliation loop which aims to // move the current state of the cluster closer to the desired state. -func (r *DataScienceClusterReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { //nolint:gocyclo +func (r *DataScienceClusterReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl.Result, error) { //nolint:gocyclo,maintidx r.Log.Info("Reconciling DataScienceCluster resources", "Request.Name", req.Name) instances := &dsc.DataScienceClusterList{} @@ -138,6 +138,19 @@ func (r *DataScienceClusterReconciler) Reconcile(ctx context.Context, req ctrl.R return reconcile.Result{Requeue: true}, nil } + if len(instances.Items) > 1 { + message := fmt.Sprintf("only one instance of DataScienceCluster object is allowed. Update existing instance %s", req.Name) + err := errors.New(message) + _ = r.reportError(err, instance, message) + + _, _ = r.updateStatus(ctx, instance, func(saved *dsc.DataScienceCluster) { + status.SetErrorCondition(&saved.Status.Conditions, status.DuplicateDataScienceCluster, message) + saved.Status.Phase = status.PhaseError + }) + + return ctrl.Result{}, err + } + // Verify a valid DSCInitialization instance is created dsciInstances := &dsci.DSCInitializationList{} err = r.Client.List(ctx, dsciInstances) @@ -148,7 +161,7 @@ func (r *DataScienceClusterReconciler) Reconcile(ctx context.Context, req ctrl.R } // Update phase to error state if DataScienceCluster is created without valid DSCInitialization - switch len(dsciInstances.Items) { // only handle number as 0 or 1, others won't be existed since webhook block creation + switch len(dsciInstances.Items) { case 0: reason := status.ReconcileFailed message := "Failed to get a valid DSCInitialization instance" @@ -164,6 +177,13 @@ func (r *DataScienceClusterReconciler) Reconcile(ctx context.Context, req ctrl.R case 1: dscInitializationSpec := dsciInstances.Items[0].Spec dscInitializationSpec.DeepCopyInto(r.DataScienceCluster.DSCISpec) + default: + message := "only one instance of DSCInitialization object is allowed" + _, _ = r.updateStatus(ctx, instance, func(saved *dsc.DataScienceCluster) { + status.SetErrorCondition(&saved.Status.Conditions, status.DuplicateDSCInitialization, message) + saved.Status.Phase = status.PhaseError + }) + return ctrl.Result{}, errors.New(message) } if instance.ObjectMeta.DeletionTimestamp.IsZero() { diff --git a/controllers/dscinitialization/dscinitialization_controller.go b/controllers/dscinitialization/dscinitialization_controller.go index cd076d5cee2..be34f146816 100644 --- a/controllers/dscinitialization/dscinitialization_controller.go +++ b/controllers/dscinitialization/dscinitialization_controller.go @@ -19,6 +19,8 @@ package dscinitialization import ( "context" + "errors" + "fmt" "path/filepath" "reflect" @@ -87,11 +89,33 @@ func (r *DSCInitializationReconciler) Reconcile(ctx context.Context, req ctrl.Re } var instance *dsciv1.DSCInitialization - switch { // only handle number as 0 or 1, others won't be existed since webhook block creation + switch { case len(instances.Items) == 0: return ctrl.Result{}, nil case len(instances.Items) == 1: instance = &instances.Items[0] + case len(instances.Items) > 1: + // find out the one by created timestamp and use it as the default one + earliestDSCI := &instances.Items[0] + for _, instance := range instances.Items { + currentDSCI := instance + if currentDSCI.CreationTimestamp.Before(&earliestDSCI.CreationTimestamp) { + earliestDSCI = ¤tDSCI + } + } + message := fmt.Sprintf("only one instance of DSCInitialization object is allowed. Please delete other instances than %s", earliestDSCI.Name) + // update all instances Message and Status + for _, deletionInstance := range instances.Items { + deletionInstance := deletionInstance + if deletionInstance.Name != earliestDSCI.Name { + _, _ = r.updateStatus(ctx, &deletionInstance, func(saved *dsciv1.DSCInitialization) { + status.SetErrorCondition(&saved.Status.Conditions, status.DuplicateDSCInitialization, message) + saved.Status.Phase = status.PhaseError + }) + } + } + + return ctrl.Result{}, errors.New(message) } if instance.ObjectMeta.DeletionTimestamp.IsZero() { diff --git a/controllers/dscinitialization/dscinitialization_test.go b/controllers/dscinitialization/dscinitialization_test.go index 17b510c4d2c..6a726a1998b 100644 --- a/controllers/dscinitialization/dscinitialization_test.go +++ b/controllers/dscinitialization/dscinitialization_test.go @@ -20,7 +20,6 @@ import ( const ( workingNamespace = "test-operator-ns" - applicationName = "default-dsci" applicationNamespace = "test-application-ns" configmapName = "odh-common-config" monitoringNamespace = "test-monitoring-ns" @@ -30,10 +29,10 @@ const ( var _ = Describe("DataScienceCluster initialization", func() { Context("Creation of related resources", func() { // must be default as instance name, or it will break - + const applicationName = "default-dsci" BeforeEach(func() { // when - desiredDsci := createDSCI(operatorv1.Managed, operatorv1.Managed, monitoringNamespace) + desiredDsci := createDSCI(applicationName, operatorv1.Managed, operatorv1.Managed, monitoringNamespace) Expect(k8sClient.Create(context.Background(), desiredDsci)).Should(Succeed()) foundDsci := &dsci.DSCInitialization{} Eventually(dscInitializationIsReady(applicationName, workingNamespace, foundDsci)). @@ -120,7 +119,7 @@ var _ = Describe("DataScienceCluster initialization", func() { const applicationName = "default-dsci" It("Should not create monitoring namespace if monitoring is disabled", func() { // when - desiredDsci := createDSCI(operatorv1.Removed, operatorv1.Managed, monitoringNamespace2) + desiredDsci := createDSCI(applicationName, operatorv1.Removed, operatorv1.Managed, monitoringNamespace2) Expect(k8sClient.Create(context.Background(), desiredDsci)).Should(Succeed()) foundDsci := &dsci.DSCInitialization{} Eventually(dscInitializationIsReady(applicationName, workingNamespace, foundDsci)). @@ -136,7 +135,7 @@ var _ = Describe("DataScienceCluster initialization", func() { }) It("Should create default monitoring namespace if monitoring enabled", func() { // when - desiredDsci := createDSCI(operatorv1.Managed, operatorv1.Managed, monitoringNamespace2) + desiredDsci := createDSCI(applicationName, operatorv1.Managed, operatorv1.Managed, monitoringNamespace2) Expect(k8sClient.Create(context.Background(), desiredDsci)).Should(Succeed()) foundDsci := &dsci.DSCInitialization{} Eventually(dscInitializationIsReady(applicationName, workingNamespace, foundDsci)). @@ -155,6 +154,22 @@ var _ = Describe("DataScienceCluster initialization", func() { Context("Handling existing resources", func() { AfterEach(cleanupResources) + const applicationName = "default-dsci" + + It("Should not have more than one DSCI instance in the cluster", func() { + + anotherApplicationName := "default2" + // given + desiredDsci := createDSCI(applicationName, operatorv1.Managed, operatorv1.Managed, monitoringNamespace) + Expect(k8sClient.Create(context.Background(), desiredDsci)).Should(Succeed()) + // when + desiredDsci2 := createDSCI(anotherApplicationName, operatorv1.Managed, operatorv1.Managed, monitoringNamespace) + // then + Eventually(dscInitializationIsReady(anotherApplicationName, workingNamespace, desiredDsci2)). + WithTimeout(timeout). + WithPolling(interval). + Should(BeFalse()) + }) It("Should not update rolebinding if it exists", func() { applicationName := envtestutil.AppendRandomNameTo("rolebinding-test") @@ -184,7 +199,7 @@ var _ = Describe("DataScienceCluster initialization", func() { Should(BeTrue()) // when - desiredDsci := createDSCI(operatorv1.Managed, operatorv1.Managed, monitoringNamespace) + desiredDsci := createDSCI(applicationName, operatorv1.Managed, operatorv1.Managed, monitoringNamespace) Expect(k8sClient.Create(context.Background(), desiredDsci)).Should(Succeed()) foundDsci := &dsci.DSCInitialization{} Eventually(dscInitializationIsReady(applicationName, workingNamespace, foundDsci)). @@ -225,7 +240,7 @@ var _ = Describe("DataScienceCluster initialization", func() { Should(BeTrue()) // when - desiredDsci := createDSCI(operatorv1.Managed, operatorv1.Managed, monitoringNamespace) + desiredDsci := createDSCI(applicationName, operatorv1.Managed, operatorv1.Managed, monitoringNamespace) Expect(k8sClient.Create(context.Background(), desiredDsci)).Should(Succeed()) foundDsci := &dsci.DSCInitialization{} Eventually(dscInitializationIsReady(applicationName, workingNamespace, foundDsci)). @@ -262,7 +277,7 @@ var _ = Describe("DataScienceCluster initialization", func() { Should(BeTrue()) // when - desiredDsci := createDSCI(operatorv1.Managed, operatorv1.Managed, monitoringNamespace) + desiredDsci := createDSCI(applicationName, operatorv1.Managed, operatorv1.Managed, monitoringNamespace) Expect(k8sClient.Create(context.Background(), desiredDsci)).Should(Succeed()) foundDsci := &dsci.DSCInitialization{} Eventually(dscInitializationIsReady(applicationName, workingNamespace, foundDsci)). @@ -334,14 +349,14 @@ func objectExists(ns string, name string, obj client.Object) func() bool { //nol } } -func createDSCI(enableMonitoring operatorv1.ManagementState, enableTrustedCABundle operatorv1.ManagementState, monitoringNS string) *dsci.DSCInitialization { +func createDSCI(appName string, enableMonitoring operatorv1.ManagementState, enableTrustedCABundle operatorv1.ManagementState, monitoringNS string) *dsci.DSCInitialization { return &dsci.DSCInitialization{ TypeMeta: metav1.TypeMeta{ Kind: "DSCInitialization", APIVersion: "v1", }, ObjectMeta: metav1.ObjectMeta{ - Name: applicationName, + Name: appName, Namespace: workingNamespace, }, Spec: dsci.DSCInitializationSpec{ diff --git a/controllers/webhook/webhook.go b/controllers/webhook/webhook.go deleted file mode 100644 index f730dc78703..00000000000 --- a/controllers/webhook/webhook.go +++ /dev/null @@ -1,111 +0,0 @@ -/* -Copyright 2023. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package webhook - -import ( - "context" - "fmt" - "net/http" - - admissionv1 "k8s.io/api/admission/v1" - "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" - "k8s.io/apimachinery/pkg/runtime/schema" - ctrl "sigs.k8s.io/controller-runtime" - "sigs.k8s.io/controller-runtime/pkg/client" - "sigs.k8s.io/controller-runtime/pkg/webhook" - "sigs.k8s.io/controller-runtime/pkg/webhook/admission" -) - -var log = ctrl.Log.WithName("odh-controller-webhook") - -//+kubebuilder:webhook:path=/validate-opendatahub-io-v1,mutating=false,failurePolicy=fail,sideEffects=None,groups=datasciencecluster.opendatahub.io;dscinitialization.opendatahub.io,resources=datascienceclusters;dscinitializations,verbs=create;update,versions=v1,name=operator.opendatahub.io,admissionReviewVersions=v1 -//nolint:lll - -type OpenDataHubWebhook struct { - client client.Client - decoder *admission.Decoder -} - -func (w *OpenDataHubWebhook) SetupWithManager(mgr ctrl.Manager) { - hookServer := mgr.GetWebhookServer() - odhWebhook := &webhook.Admission{ - Handler: w, - } - hookServer.Register("/validate-opendatahub-io-v1", odhWebhook) -} - -func (w *OpenDataHubWebhook) InjectDecoder(d *admission.Decoder) error { - w.decoder = d - return nil -} - -func (w *OpenDataHubWebhook) InjectClient(c client.Client) error { - w.client = c - return nil -} - -func (w *OpenDataHubWebhook) checkDupCreation(ctx context.Context, req admission.Request) admission.Response { - if req.Operation != admissionv1.Create { - return admission.Allowed(fmt.Sprintf("duplication check: skipping %v request", req.Operation)) - } - - switch req.Kind.Kind { - case "DataScienceCluster": - case "DSCInitialization": - default: - log.Info("Got wrong kind", "kind", req.Kind.Kind) - return admission.Errored(http.StatusBadRequest, nil) - } - - gvk := schema.GroupVersionKind{ - Group: req.Kind.Group, - Version: req.Kind.Version, - Kind: req.Kind.Kind, - } - - list := &unstructured.UnstructuredList{} - list.SetGroupVersionKind(gvk) - - if err := w.client.List(ctx, list); err != nil { - return admission.Errored(http.StatusBadRequest, err) - } - - // if len == 1 now creation of #2 is being handled - if len(list.Items) > 0 { - return admission.Denied(fmt.Sprintf("Only one instance of %s object is allowed", req.Kind.Kind)) - } - - return admission.Allowed(fmt.Sprintf("%s duplication check passed", req.Kind.Kind)) -} - -func (w *OpenDataHubWebhook) Handle(ctx context.Context, req admission.Request) admission.Response { - var resp admission.Response - - // Handle only Create and Update - if req.Operation == admissionv1.Delete || req.Operation == admissionv1.Connect { - msg := fmt.Sprintf("ODH skipping %v request", req.Operation) - log.Info(msg) - return admission.Allowed(msg) - } - - resp = w.checkDupCreation(ctx, req) - if !resp.Allowed { - return resp - } - - return admission.Allowed(fmt.Sprintf("%s allowed", req.Kind.Kind)) -} diff --git a/controllers/webhook/webhook_suite_test.go b/controllers/webhook/webhook_suite_test.go deleted file mode 100644 index 95ce437c10c..00000000000 --- a/controllers/webhook/webhook_suite_test.go +++ /dev/null @@ -1,244 +0,0 @@ -/* -Copyright 2023. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package webhook_test - -import ( - "context" - "crypto/tls" - "fmt" - "net" - "path/filepath" - "testing" - "time" - - operatorv1 "github.com/openshift/api/operator/v1" - admissionv1beta1 "k8s.io/api/admission/v1beta1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/runtime" - "k8s.io/client-go/rest" - ctrl "sigs.k8s.io/controller-runtime" - "sigs.k8s.io/controller-runtime/pkg/client" - "sigs.k8s.io/controller-runtime/pkg/envtest" - logf "sigs.k8s.io/controller-runtime/pkg/log" - "sigs.k8s.io/controller-runtime/pkg/log/zap" - - dsc "github.com/opendatahub-io/opendatahub-operator/v2/apis/datasciencecluster/v1" - dsci "github.com/opendatahub-io/opendatahub-operator/v2/apis/dscinitialization/v1" - "github.com/opendatahub-io/opendatahub-operator/v2/components" - "github.com/opendatahub-io/opendatahub-operator/v2/components/codeflare" - "github.com/opendatahub-io/opendatahub-operator/v2/components/dashboard" - "github.com/opendatahub-io/opendatahub-operator/v2/components/datasciencepipelines" - "github.com/opendatahub-io/opendatahub-operator/v2/components/kserve" - "github.com/opendatahub-io/opendatahub-operator/v2/components/modelmeshserving" - "github.com/opendatahub-io/opendatahub-operator/v2/components/ray" - "github.com/opendatahub-io/opendatahub-operator/v2/components/trustyai" - "github.com/opendatahub-io/opendatahub-operator/v2/components/workbenches" - "github.com/opendatahub-io/opendatahub-operator/v2/controllers/webhook" - - . "github.com/onsi/ginkgo/v2" - . "github.com/onsi/gomega" -) - -const ( - namespace = "webhook-test-ns" - nameBase = "webhook-test" -) - -// These tests use Ginkgo (BDD-style Go testing framework). Refer to -// http://onsi.github.io/ginkgo/ to learn more about Ginkgo. - -var cfg *rest.Config -var k8sClient client.Client -var testEnv *envtest.Environment -var ctx context.Context -var cancel context.CancelFunc - -func TestAPIs(t *testing.T) { - RegisterFailHandler(Fail) - - RunSpecs(t, "Webhook Suite") -} - -var _ = BeforeSuite(func() { - logf.SetLogger(zap.New(zap.WriteTo(GinkgoWriter), zap.UseDevMode(true))) - - ctx, cancel = context.WithCancel(context.TODO()) - - By("bootstrapping test environment") - testEnv = &envtest.Environment{ - CRDDirectoryPaths: []string{filepath.Join("..", "..", "config", "crd", "bases")}, - ErrorIfCRDPathMissing: false, - WebhookInstallOptions: envtest.WebhookInstallOptions{ - Paths: []string{filepath.Join("..", "..", "config", "webhook")}, - }, - } - - var err error - // cfg is defined in this file globally. - cfg, err = testEnv.Start() - Expect(err).NotTo(HaveOccurred()) - Expect(cfg).NotTo(BeNil()) - - scheme := runtime.NewScheme() - // DSCI - err = dsci.AddToScheme(scheme) - Expect(err).NotTo(HaveOccurred()) - // DSC - err = dsc.AddToScheme(scheme) - Expect(err).NotTo(HaveOccurred()) - // Webhook - err = admissionv1beta1.AddToScheme(scheme) - Expect(err).NotTo(HaveOccurred()) - - k8sClient, err = client.New(cfg, client.Options{Scheme: scheme}) - Expect(err).NotTo(HaveOccurred()) - Expect(k8sClient).NotTo(BeNil()) - - // start webhook server using Manager - webhookInstallOptions := &testEnv.WebhookInstallOptions - mgr, err := ctrl.NewManager(cfg, ctrl.Options{ - Scheme: scheme, - Host: webhookInstallOptions.LocalServingHost, - Port: webhookInstallOptions.LocalServingPort, - CertDir: webhookInstallOptions.LocalServingCertDir, - LeaderElection: false, - MetricsBindAddress: "0", - }) - Expect(err).NotTo(HaveOccurred()) - - (&webhook.OpenDataHubWebhook{}).SetupWithManager(mgr) - - //+kubebuilder:scaffold:webhook - - go func() { - defer GinkgoRecover() - err = mgr.Start(ctx) - Expect(err).NotTo(HaveOccurred()) - }() - - // wait for the webhook server to get ready - dialer := &net.Dialer{Timeout: time.Second} - addrPort := fmt.Sprintf("%s:%d", webhookInstallOptions.LocalServingHost, webhookInstallOptions.LocalServingPort) - Eventually(func() error { - conn, err := tls.DialWithDialer(dialer, "tcp", addrPort, &tls.Config{InsecureSkipVerify: true}) //nolint:gosec - if err != nil { - return err - } - conn.Close() - return nil - }).Should(Succeed()) - -}) - -var _ = AfterSuite(func() { - cancel() - By("tearing down the test environment") - err := testEnv.Stop() - Expect(err).NotTo(HaveOccurred()) -}) - -var _ = Describe("DSC/DSCI webhook", func() { - It("Should not have more than one DSCI instance in the cluster", func() { - desiredDsci := newDSCI(nameBase + "-dsci-1") - Expect(k8sClient.Create(context.Background(), desiredDsci)).Should(Succeed()) - desiredDsci2 := newDSCI(nameBase + "-dsci-2") - Expect(k8sClient.Create(context.Background(), desiredDsci2)).ShouldNot(Succeed()) - }) - - It("Should block creation of second DSC instance", func() { - dscSpec := newDSC(nameBase+"-dsc-1", namespace) - Expect(k8sClient.Create(context.Background(), dscSpec)).Should(Succeed()) - dscSpec = newDSC(nameBase+"-dsc-2", namespace) - Expect(k8sClient.Create(context.Background(), dscSpec)).ShouldNot(Succeed()) - }) -}) - -func newDSCI(appName string) *dsci.DSCInitialization { - monitoringNS := "monitoring-namespace" - return &dsci.DSCInitialization{ - TypeMeta: metav1.TypeMeta{ - Kind: "DSCInitialization", - APIVersion: "v1", - }, - ObjectMeta: metav1.ObjectMeta{ - Name: appName, - Namespace: namespace, - }, - Spec: dsci.DSCInitializationSpec{ - ApplicationsNamespace: namespace, - Monitoring: dsci.Monitoring{ - Namespace: monitoringNS, - ManagementState: operatorv1.Managed, - }, - TrustedCABundle: dsci.TrustedCABundleSpec{ - ManagementState: operatorv1.Managed, - }, - }, - } -} -func newDSC(name string, namespace string) *dsc.DataScienceCluster { - return &dsc.DataScienceCluster{ - ObjectMeta: metav1.ObjectMeta{ - Name: name, - Namespace: namespace, - }, - Spec: dsc.DataScienceClusterSpec{ - Components: dsc.Components{ - Dashboard: dashboard.Dashboard{ - Component: components.Component{ - ManagementState: operatorv1.Removed, - }, - }, - Workbenches: workbenches.Workbenches{ - Component: components.Component{ - ManagementState: operatorv1.Removed, - }, - }, - ModelMeshServing: modelmeshserving.ModelMeshServing{ - Component: components.Component{ - ManagementState: operatorv1.Removed, - }, - }, - DataSciencePipelines: datasciencepipelines.DataSciencePipelines{ - Component: components.Component{ - ManagementState: operatorv1.Removed, - }, - }, - Kserve: kserve.Kserve{ - Component: components.Component{ - ManagementState: operatorv1.Removed, - }, - }, - CodeFlare: codeflare.CodeFlare{ - Component: components.Component{ - ManagementState: operatorv1.Removed, - }, - }, - Ray: ray.Ray{ - Component: components.Component{ - ManagementState: operatorv1.Removed, - }, - }, - TrustyAI: trustyai.TrustyAI{ - Component: components.Component{ - ManagementState: operatorv1.Removed, - }, - }, - }, - }, - } -} diff --git a/main.go b/main.go index 0d411c4c575..cf76380826c 100644 --- a/main.go +++ b/main.go @@ -17,7 +17,6 @@ limitations under the License. package main import ( - "context" "flag" "os" @@ -46,7 +45,6 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client/config" "sigs.k8s.io/controller-runtime/pkg/healthz" "sigs.k8s.io/controller-runtime/pkg/log/zap" - "sigs.k8s.io/controller-runtime/pkg/manager" kfdefv1 "github.com/opendatahub-io/opendatahub-operator/apis/kfdef.apps.kubeflow.org/v1" dsc "github.com/opendatahub-io/opendatahub-operator/v2/apis/datasciencecluster/v1" @@ -56,7 +54,6 @@ import ( datascienceclustercontrollers "github.com/opendatahub-io/opendatahub-operator/v2/controllers/datasciencecluster" dscicontr "github.com/opendatahub-io/opendatahub-operator/v2/controllers/dscinitialization" "github.com/opendatahub-io/opendatahub-operator/v2/controllers/secretgenerator" - "github.com/opendatahub-io/opendatahub-operator/v2/controllers/webhook" "github.com/opendatahub-io/opendatahub-operator/v2/pkg/deploy" "github.com/opendatahub-io/opendatahub-operator/v2/pkg/upgrade" ) @@ -140,8 +137,6 @@ func main() { os.Exit(1) } - (&webhook.OpenDataHubWebhook{}).SetupWithManager(mgr) - if err = (&dscicontr.DSCInitializationReconciler{ Client: mgr.GetClient(), Scheme: mgr.GetScheme(), @@ -208,18 +203,8 @@ func main() { // Check if user opted for disabling DSC configuration _, disableDSCConfig := os.LookupEnv("DISABLE_DSC_CONFIG") if !disableDSCConfig { - var createDefaultDSCIFunc manager.RunnableFunc = func(ctx context.Context) error { - err := upgrade.CreateDefaultDSCI(setupClient, platform, dscApplicationsNamespace, dscMonitoringNamespace) - if err != nil { - setupLog.Error(err, "unable to create initial setup for the operator") - } - return err - } - - err := mgr.Add(createDefaultDSCIFunc) - if err != nil { - setupLog.Error(err, "error scheduling DSCI creation") - os.Exit(1) + if err = upgrade.CreateDefaultDSCI(setupClient, platform, dscApplicationsNamespace, dscMonitoringNamespace); err != nil { + setupLog.Error(err, "unable to create initial setup for the operator") } } diff --git a/tests/e2e/controller_setup_test.go b/tests/e2e/controller_setup_test.go index b5f058c4957..bb005142fce 100644 --- a/tests/e2e/controller_setup_test.go +++ b/tests/e2e/controller_setup_test.go @@ -76,9 +76,9 @@ func NewTestContext() (*testContext, error) { //nolint:golint,revive // Only use } // setup DSCI CR since we do not create automatically by operator - testDSCI := setupDSCICR("e2e-test-dsci") + testDSCI := setupDSCICR() // Setup DataScienceCluster CR - testDSC := setupDSCInstance("e2e-test") + testDSC := setupDSCInstance() return &testContext{ cfg: config, diff --git a/tests/e2e/dsc_creation_test.go b/tests/e2e/dsc_creation_test.go index 5c2e078f78c..b5fcc71b888 100644 --- a/tests/e2e/dsc_creation_test.go +++ b/tests/e2e/dsc_creation_test.go @@ -13,9 +13,6 @@ import ( autoscalingv1 "k8s.io/api/autoscaling/v1" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" - "k8s.io/apimachinery/pkg/runtime" - "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/client-go/util/retry" @@ -37,16 +34,10 @@ func creationTestSuite(t *testing.T) { err = testCtx.testDSCICreation() require.NoError(t, err, "error creating DSCI CR") }) - t.Run("Creation of more than one of DSCInitialization instance", func(t *testing.T) { - testCtx.testDSCIDuplication(t) - }) t.Run("Creation of DataScienceCluster instance", func(t *testing.T) { err = testCtx.testDSCCreation() require.NoError(t, err, "error creating DataScienceCluster instance") }) - t.Run("Creation of more than one of DataScienceCluster instance", func(t *testing.T) { - testCtx.testDSCDuplication(t) - }) t.Run("Validate all deployed components", func(t *testing.T) { err = testCtx.testAllApplicationCreation(t) require.NoError(t, err, "error testing deployments for DataScienceCluster: "+testCtx.testDsc.Name) @@ -141,56 +132,6 @@ func (tc *testContext) testDSCCreation() error { return nil } -func (tc *testContext) requireInstalled(t *testing.T, gvk schema.GroupVersionKind) { - t.Helper() - list := &unstructured.UnstructuredList{} - list.SetGroupVersionKind(gvk) - - err := tc.customClient.List(tc.ctx, list) - require.NoErrorf(t, err, "Could not get %s list", gvk.Kind) - - require.Greaterf(t, len(list.Items), 0, "%s has not been installed", gvk.Kind) -} - -func (tc *testContext) testDuplication(t *testing.T, gvk schema.GroupVersionKind, o any) { - t.Helper() - tc.requireInstalled(t, gvk) - - u, err := runtime.DefaultUnstructuredConverter.ToUnstructured(o) - require.NoErrorf(t, err, "Could not unstructure %s", gvk.Kind) - - obj := &unstructured.Unstructured{ - Object: u, - } - obj.SetGroupVersionKind(gvk) - - err = tc.customClient.Create(tc.ctx, obj) - - require.Errorf(t, err, "Could create second %s", gvk.Kind) -} - -func (tc *testContext) testDSCIDuplication(t *testing.T) { //nolint:thelper - gvk := schema.GroupVersionKind{ - Group: "dscinitialization.opendatahub.io", - Version: "v1", - Kind: "DSCInitialization", - } - dup := setupDSCICR("e2e-test-dsci-dup") - - tc.testDuplication(t, gvk, dup) -} - -func (tc *testContext) testDSCDuplication(t *testing.T) { //nolint:thelper - gvk := schema.GroupVersionKind{ - Group: "datasciencecluster.opendatahub.io", - Version: "v1", - Kind: "DataScienceCluster", - } - dup := setupDSCInstance("e2e-test-dup") - - tc.testDuplication(t, gvk, dup) -} - func (tc *testContext) testAllApplicationCreation(t *testing.T) error { //nolint:funlen,thelper // Validate test instance is in Ready state diff --git a/tests/e2e/helper_test.go b/tests/e2e/helper_test.go index c426d13feeb..592a95cff4b 100644 --- a/tests/e2e/helper_test.go +++ b/tests/e2e/helper_test.go @@ -53,10 +53,10 @@ func (tc *testContext) waitForControllerDeployment(name string, replicas int32) return err } -func setupDSCICR(name string) *dsci.DSCInitialization { +func setupDSCICR() *dsci.DSCInitialization { dsciTest := &dsci.DSCInitialization{ ObjectMeta: metav1.ObjectMeta{ - Name: name, + Name: "e2e-test-dsci", }, Spec: dsci.DSCInitializationSpec{ ApplicationsNamespace: "opendatahub", @@ -73,10 +73,10 @@ func setupDSCICR(name string) *dsci.DSCInitialization { return dsciTest } -func setupDSCInstance(name string) *dsc.DataScienceCluster { +func setupDSCInstance() *dsc.DataScienceCluster { dscTest := &dsc.DataScienceCluster{ ObjectMeta: metav1.ObjectMeta{ - Name: name, + Name: "e2e-test-dsc", }, Spec: dsc.DataScienceClusterSpec{ Components: dsc.Components{