diff --git a/.github/resources/dspa-lite/dspa.yaml b/.github/resources/dspa-lite/dspa.yaml index ee057c24..0bb03a4c 100644 --- a/.github/resources/dspa-lite/dspa.yaml +++ b/.github/resources/dspa-lite/dspa.yaml @@ -1,4 +1,4 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: test-dspa diff --git a/README.md b/README.md index 8d16794e..1540b896 100644 --- a/README.md +++ b/README.md @@ -285,7 +285,7 @@ To understand how these components interact with each other please refer to the To deploy a standalone MariaDB metadata database (rather than providing your own database connection details), simply add a `mariaDB` item under the `spec.database` in your DSPA definition with an `deploy` key set to `true`. All other fields are defaultable/optional, see [All Fields DSPA Example](config/samples/v2/dspa-all-fields/dspa_all_fields.yaml) for full details. Note that this component is mutually exclusive with externally-provided databases (defined by `spec.database.externalDB`). ```yaml -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample @@ -302,7 +302,7 @@ spec: To deploy a Minio Object Storage component (rather than providing your own object storage connection details), simply add a `minio` item under the `spec.objectStorage` in your DSPA definition with an `image` key set to a valid minio component container image. All other fields are defaultable/optional, see [All Fields DSPA Example](config/samples/v2/dspa-all-fields/dspa_all_fields.yaml) for full details. Note that this component is mutually exclusive with externally-provided object stores (defined by `spec.objectStorage.externalStorage`). ```yaml -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample @@ -320,7 +320,7 @@ spec: To deploy the standalone DS Pipelines UI component, simply add a `spec.mlpipelineUI` item to your DSPA with an `image` key set to a valid ui component container image. All other fields are defaultable/optional, see [All Fields DSPA Example](config/samples/v2/dspa-all-fields/dspa_all_fields.yaml) for full details. ```yaml -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample @@ -337,7 +337,7 @@ spec: To deploy the ML Metadata artifact linage/metadata component, simply add a `spec.mlmd` item to your DSPA with `deploy` set to `true`. All other fields are defaultable/optional, see [All Fields DSPA Example](config/samples/v2/dspa-all-fields/dspa_all_fields.yaml) for full details. ```yaml -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample diff --git a/api/v1/dspipeline_types.go b/api/v1/dspipeline_types.go new file mode 100644 index 00000000..8f625331 --- /dev/null +++ b/api/v1/dspipeline_types.go @@ -0,0 +1,394 @@ +/* +Copyright 2023. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1 + +import ( + "k8s.io/apimachinery/pkg/api/resource" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +type DSPASpec struct { + // DS Pipelines API Server configuration. + // +kubebuilder:default:={deploy: true} + *APIServer `json:"apiServer,omitempty"` + // DS Pipelines PersistenceAgent configuration. + // +kubebuilder:default:={deploy: true} + *PersistenceAgent `json:"persistenceAgent,omitempty"` + // DS Pipelines Scheduled Workflow configuration. + // +kubebuilder:default:={deploy: true} + *ScheduledWorkflow `json:"scheduledWorkflow,omitempty"` + // Database specifies database configurations, used for DS Pipelines metadata tracking. Specify either the default MariaDB deployment, or configure your own External SQL DB. + // +kubebuilder:default:={mariaDB: {deploy: true}} + *Database `json:"database,omitempty"` + // Deploy the KFP UI with DS Pipelines UI. This feature is unsupported, and primarily used for exploration, testing, and development purposes. + // +kubebuilder:validation:Optional + *MlPipelineUI `json:"mlpipelineUI"` + // ObjectStorage specifies Object Store configurations, used for DS Pipelines artifact passing and storage. Specify either the your own External Storage (e.g. AWS S3), or use the default Minio deployment (unsupported, primarily for development, and testing) . + // +kubebuilder:validation:Required + *ObjectStorage `json:"objectStorage"` + *MLMD `json:"mlmd,omitempty"` + // +kubebuilder:validation:Optional + // +kubebuilder:default:="v2" + DSPVersion string `json:"dspVersion,omitempty"` + + // PodToPodTLS Set to "true" or "false" to enable or disable TLS communication between DSPA components (pods). Defaults to "true" to enable TLS between all pods. Only supported in DSP V2 on OpenShift. + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + PodToPodTLS *bool `json:"podToPodTLS"` + + // WorkflowController is an argo-specific component that manages a DSPA's Workflow objects and handles the orchestration of them with the central Argo server + // +kubebuilder:validation:Optional + *WorkflowController `json:"workflowController,omitempty"` +} + +type APIServer struct { + // Enable DS Pipelines Operator management of DSP API Server. Setting Deploy to false disables operator reconciliation. Default: true + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + Deploy bool `json:"deploy"` + // Specify a custom image for DSP API Server. + Image string `json:"image,omitempty"` + // Create an Openshift Route for this DSP API Server. Default: true + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + EnableRoute bool `json:"enableOauth"` + // Include sample pipelines with the deployment of this DSP API Server. Default: true + // +kubebuilder:default:=false + // +kubebuilder:validation:Optional + EnableSamplePipeline bool `json:"enableSamplePipeline"` + ArgoLauncherImage string `json:"argoLauncherImage,omitempty"` + ArgoDriverImage string `json:"argoDriverImage,omitempty"` + // Specify custom Pod resource requirements for this component. + Resources *ResourceRequirements `json:"resources,omitempty"` + + // If the Object store/DB is behind a TLS secured connection that is + // unrecognized by the host OpenShift/K8s cluster, then you can + // provide a PEM formatted CA bundle to be injected into the DSP + // server pod to trust this connection. CA Bundle should be provided + // as values within configmaps, mapped to keys. + CABundle *CABundle `json:"cABundle,omitempty"` + + // CustomServerConfig is a custom config file that you can provide + // for the api server to use instead. + CustomServerConfig *ScriptConfigMap `json:"customServerConfigMap,omitempty"` + + // When specified, the `data` contents of the `kfp-launcher` ConfigMap that DSPO writes + // will be fully replaced with the `data` contents of the ConfigMap specified here. + // This allows the user to fully replace the `data` contents of the kfp-launcher ConfigMap. + // The `kfp-launcher` component requires a ConfigMap to exist in the namespace + // where it runs (i.e. the namespace where pipelines run). This ConfigMap contains + // object storage configuration, as well as pipeline root (object store root path + // where artifacts will be uploaded) configuration. Currently this ConfigMap *must* + // be named "kfp-launcher". We currently deploy a default copy of the kfp-launcher + // ConfigMap via DSPO, but a user may want to provide their own ConfigMap configuration, + // so that they can specify multiple object storage sources and paths. + // +kubebuilder:validation:Optional + CustomKfpLauncherConfigMap string `json:"customKfpLauncherConfigMap,omitempty"` + + // This is the path where the ca bundle will be mounted in the + // pipeline server and user executor pods + // +kubebuilder:validation:Optional + CABundleFileMountPath string `json:"caBundleFileMountPath"` + // This is the filename of the ca bundle that will be created in the + // pipeline server and user executor pods + // +kubebuilder:validation:Optional + CABundleFileName string `json:"caBundleFileName"` + + // The expiry time (seconds) for artifact download links when + // querying the dsp server via /apis/v2beta1/artifacts/{id}?share_url=true + // Default: 60 + // +kubebuilder:default:=60 + // +kubebuilder:validation:Optional + ArtifactSignedURLExpirySeconds *int `json:"artifactSignedURLExpirySeconds"` +} + +type CABundle struct { + // +kubebuilder:validation:Required + ConfigMapName string `json:"configMapName"` + // Key should map to a CA bundle. The key is also used to name + // the CA bundle file (e.g. ca-bundle.crt) + // +kubebuilder:validation:Required + ConfigMapKey string `json:"configMapKey"` +} + +type ScriptConfigMap struct { + Name string `json:"name,omitempty"` + Key string `json:"key,omitempty"` +} + +type PersistenceAgent struct { + // Enable DS Pipelines Operator management of Persisence Agent. Setting Deploy to false disables operator reconciliation. Default: true + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + Deploy bool `json:"deploy"` + // Specify a custom image for DSP PersistenceAgent. + Image string `json:"image,omitempty"` + // Number of worker for Persistence Agent sync job. Default: 2 + // +kubebuilder:default:=2 + NumWorkers int `json:"numWorkers,omitempty"` + // Specify custom Pod resource requirements for this component. + Resources *ResourceRequirements `json:"resources,omitempty"` +} + +type ScheduledWorkflow struct { + // Enable DS Pipelines Operator management of ScheduledWorkflow. Setting Deploy to false disables operator reconciliation. Default: true + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + Deploy bool `json:"deploy"` + // Specify a custom image for DSP ScheduledWorkflow controller. + Image string `json:"image,omitempty"` + // Specify the Cron timezone used for ScheduledWorkflow PipelineRuns. Default: UTC + // +kubebuilder:default:=UTC + CronScheduleTimezone string `json:"cronScheduleTimezone,omitempty"` + // Specify custom Pod resource requirements for this component. + Resources *ResourceRequirements `json:"resources,omitempty"` +} + +type MlPipelineUI struct { + // Enable DS Pipelines Operator management of KFP UI. Setting Deploy to false disables operator reconciliation. Default: true + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + Deploy bool `json:"deploy"` + ConfigMapName string `json:"configMap,omitempty"` + // Specify custom Pod resource requirements for this component. + Resources *ResourceRequirements `json:"resources,omitempty"` + // Specify a custom image for KFP UI pod. + // +kubebuilder:validation:Required + Image string `json:"image"` +} + +type Database struct { + *MariaDB `json:"mariaDB,omitempty"` + *ExternalDB `json:"externalDB,omitempty"` + + // +kubebuilder:validation:Optional + // CustomExtraParams allow users to further customize the sql dsn parameters used by the Pipeline Server + // when opening a connection with the Database. + // ref: https://github.com/go-sql-driver/mysql?tab=readme-ov-file#dsn-data-source-name + // + // Value must be a JSON string. For example, to disable tls for Pipeline Server DB connection + // the user can provide a string: {"tls":"true"} + // + // If updating post DSPA deployment, then a manual restart of the pipeline server pod will be required + // so the new configmap may be consumed. + CustomExtraParams *string `json:"customExtraParams,omitempty"` + + // Default: false + // +kubebuilder:default:=false + // +kubebuilder:validation:Optional + DisableHealthCheck bool `json:"disableHealthCheck"` +} + +type MariaDB struct { + // Enable DS Pipelines Operator management of MariaDB. Setting Deploy to false disables operator reconciliation. Default: true + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + Deploy bool `json:"deploy"` + // Specify a custom image for DSP MariaDB pod. + Image string `json:"image,omitempty"` + // The MariadB username that will be created. Should match `^[a-zA-Z0-9_]+`. Default: mlpipeline + // +kubebuilder:default:=mlpipeline + // +kubebuilder:validation:Pattern=`^[a-zA-Z0-9_]+$` + Username string `json:"username,omitempty"` + PasswordSecret *SecretKeyValue `json:"passwordSecret,omitempty"` + // +kubebuilder:default:=mlpipeline + // The database name that will be created. Should match `^[a-zA-Z0-9_]+`. // Default: mlpipeline + // +kubebuilder:validation:Pattern=`^[a-zA-Z0-9_]+$` + DBName string `json:"pipelineDBName,omitempty"` + // Customize the size of the PVC created for the default MariaDB instance. Default: 10Gi + // +kubebuilder:default:="10Gi" + PVCSize resource.Quantity `json:"pvcSize,omitempty"` + // Volume Mode Filesystem storageClass to use for PVC creation + // +kubebuilder:validation:Optional + StorageClassName string `json:"storageClassName,omitempty"` + // Specify custom Pod resource requirements for this component. + Resources *ResourceRequirements `json:"resources,omitempty"` +} + +type ExternalDB struct { + // +kubebuilder:validation:Required + Host string `json:"host"` + Port string `json:"port"` + Username string `json:"username"` + DBName string `json:"pipelineDBName"` + PasswordSecret *SecretKeyValue `json:"passwordSecret"` +} + +type ObjectStorage struct { + // Enable DS Pipelines Operator management of Minio. Setting Deploy to false disables operator reconciliation. + *Minio `json:"minio,omitempty"` + *ExternalStorage `json:"externalStorage,omitempty"` + // Default: false + // +kubebuilder:default:=false + // +kubebuilder:validation:Optional + DisableHealthCheck bool `json:"disableHealthCheck"` + // Enable an external route so the object storage is reachable from outside the cluster. Default: false + // +kubebuilder:default:=false + // +kubebuilder:validation:Optional + EnableExternalRoute bool `json:"enableExternalRoute"` +} + +type Minio struct { + // Enable DS Pipelines Operator management of Minio. Setting Deploy to false disables operator reconciliation. Default: true + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + Deploy bool `json:"deploy"` + // Provide the Bucket name that will be used to store artifacts in S3. If provided bucket does not exist, DSP Apiserver will attempt to create it. As such the credentials provided should have sufficient permissions to do create buckets. Default: mlpipeline + // +kubebuilder:default:=mlpipeline + Bucket string `json:"bucket,omitempty"` + // Credentials for the S3 user (e.g. IAM user cred stored in a k8s secret.). Note that the S3 user should have the permissions to create a bucket if the provided bucket does not exist. + *S3CredentialSecret `json:"s3CredentialsSecret,omitempty"` + // Customize the size of the PVC created for the Minio instance. Default: 10Gi + // +kubebuilder:default:="10Gi" + PVCSize resource.Quantity `json:"pvcSize,omitempty"` + // Volume Mode Filesystem storageClass to use for PVC creation + // +kubebuilder:validation:Optional + StorageClassName string `json:"storageClassName,omitempty"` + // Specify custom Pod resource requirements for this component. + Resources *ResourceRequirements `json:"resources,omitempty"` + // Specify a custom image for Minio pod. + // +kubebuilder:validation:Required + Image string `json:"image"` +} + +type MLMD struct { + // Enable DS Pipelines Operator management of MLMD. Setting Deploy to false disables operator reconciliation. Default: true + // +kubebuilder:default:=false + // +kubebuilder:validation:Optional + Deploy bool `json:"deploy"` + *Envoy `json:"envoy,omitempty"` + *GRPC `json:"grpc,omitempty"` +} + +type Envoy struct { + Resources *ResourceRequirements `json:"resources,omitempty"` + Image string `json:"image,omitempty"` + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + DeployRoute bool `json:"deployRoute"` +} + +type GRPC struct { + Resources *ResourceRequirements `json:"resources,omitempty"` + Image string `json:"image,omitempty"` + // +kubebuilder:validation:Optional + Port string `json:"port"` +} + +type Writer struct { + Resources *ResourceRequirements `json:"resources,omitempty"` + // +kubebuilder:validation:Required + Image string `json:"image"` +} + +type WorkflowController struct { + // +kubebuilder:default:=true + // +kubebuilder:validation:Optional + Deploy bool `json:"deploy"` + Image string `json:"image,omitempty"` + ArgoExecImage string `json:"argoExecImage,omitempty"` + CustomConfig string `json:"customConfig,omitempty"` + // Specify custom Pod resource requirements for this component. + Resources *ResourceRequirements `json:"resources,omitempty"` +} + +// ResourceRequirements structures compute resource requirements. +// Replaces ResourceRequirements from corev1 which also includes optional storage field. +// We handle storage field separately, and should not include it as a subfield for Resources. +type ResourceRequirements struct { + Limits *Resources `json:"limits,omitempty"` + Requests *Resources `json:"requests,omitempty"` +} + +type Resources struct { + CPU resource.Quantity `json:"cpu,omitempty"` + Memory resource.Quantity `json:"memory,omitempty"` +} + +type ExternalStorage struct { + // +kubebuilder:validation:Required + Host string `json:"host"` + Bucket string `json:"bucket"` + Scheme string `json:"scheme"` + // +kubebuilder:validation:Optional + Region string `json:"region"` + // Subpath where objects should be stored for this DSPA + // +kubebuilder:validation:Optional + BasePath string `json:"basePath"` + *S3CredentialSecret `json:"s3CredentialsSecret"` + // +kubebuilder:validation:Optional + Secure *bool `json:"secure"` + // +kubebuilder:validation:Optional + Port string `json:"port"` +} + +type S3CredentialSecret struct { + // +kubebuilder:validation:Required + // The name of the Secret where the AccessKey and SecretKey are defined. + SecretName string `json:"secretName"` + // The "Keys" in the k8sSecret key/value pairs. Not to be confused with the values. + AccessKey string `json:"accessKey"` + SecretKey string `json:"secretKey"` +} + +type SecretKeyValue struct { + // +kubebuilder:validation:Required + Name string `json:"name"` + Key string `json:"key"` +} + +type DSPAStatus struct { + // +kubebuilder:validation:Optional + Components ComponentStatus `json:"components,omitempty"` + Conditions []metav1.Condition `json:"conditions,omitempty"` +} + +type ComponentStatus struct { + // +kubebuilder:validation:Optional + MLMDProxy ComponentDetailStatus `json:"mlmdProxy,omitempty"` + APIServer ComponentDetailStatus `json:"apiServer,omitempty"` +} + +type ComponentDetailStatus struct { + Url string `json:"url,omitempty"` + // +kubebuilder:validation:Optional + ExternalUrl string `json:"externalUrl,omitempty"` +} + +//+kubebuilder:object:root=true +//+kubebuilder:subresource:status +//+kubebuilder:resource:shortName=dspa +// +kubebuilder:storageversion + +type DataSciencePipelinesApplication struct { + metav1.TypeMeta `json:",inline"` + metav1.ObjectMeta `json:"metadata,omitempty"` + Spec DSPASpec `json:"spec,omitempty"` + Status DSPAStatus `json:"status,omitempty"` +} + +//+kubebuilder:object:root=true + +type DataSciencePipelinesApplicationList struct { + metav1.TypeMeta `json:",inline"` + metav1.ListMeta `json:"metadata,omitempty"` + Items []DataSciencePipelinesApplication `json:"items"` +} + +func init() { + SchemeBuilder.Register(&DataSciencePipelinesApplication{}, &DataSciencePipelinesApplicationList{}) +} diff --git a/api/v1/groupversion_info.go b/api/v1/groupversion_info.go new file mode 100644 index 00000000..291141ff --- /dev/null +++ b/api/v1/groupversion_info.go @@ -0,0 +1,36 @@ +/* +Copyright 2023. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Package v1 contains API Schema definitions for the datasciencepipelinesapplications v1 API group +// +kubebuilder:object:generate=true +// +groupName=datasciencepipelinesapplications.opendatahub.io +package v1 + +import ( + "k8s.io/apimachinery/pkg/runtime/schema" + "sigs.k8s.io/controller-runtime/pkg/scheme" +) + +var ( + // GroupVersion is group version used to register these objects + GroupVersion = schema.GroupVersion{Group: "datasciencepipelinesapplications.opendatahub.io", Version: "v1"} + + // SchemeBuilder is used to add go types to the GroupVersionKind scheme + SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} + + // AddToScheme adds the types in this group-version to the given scheme. + AddToScheme = SchemeBuilder.AddToScheme +) diff --git a/api/v1/zz_generated.deepcopy.go b/api/v1/zz_generated.deepcopy.go new file mode 100644 index 00000000..f3788e5f --- /dev/null +++ b/api/v1/zz_generated.deepcopy.go @@ -0,0 +1,655 @@ +//go:build !ignore_autogenerated +// +build !ignore_autogenerated + +/* +Copyright 2023. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by controller-gen. DO NOT EDIT. + +package v1 + +import ( + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + runtime "k8s.io/apimachinery/pkg/runtime" +) + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *APIServer) DeepCopyInto(out *APIServer) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } + if in.CABundle != nil { + in, out := &in.CABundle, &out.CABundle + *out = new(CABundle) + **out = **in + } + if in.CustomServerConfig != nil { + in, out := &in.CustomServerConfig, &out.CustomServerConfig + *out = new(ScriptConfigMap) + **out = **in + } + if in.ArtifactSignedURLExpirySeconds != nil { + in, out := &in.ArtifactSignedURLExpirySeconds, &out.ArtifactSignedURLExpirySeconds + *out = new(int) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new APIServer. +func (in *APIServer) DeepCopy() *APIServer { + if in == nil { + return nil + } + out := new(APIServer) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *CABundle) DeepCopyInto(out *CABundle) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new CABundle. +func (in *CABundle) DeepCopy() *CABundle { + if in == nil { + return nil + } + out := new(CABundle) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ComponentDetailStatus) DeepCopyInto(out *ComponentDetailStatus) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComponentDetailStatus. +func (in *ComponentDetailStatus) DeepCopy() *ComponentDetailStatus { + if in == nil { + return nil + } + out := new(ComponentDetailStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ComponentStatus) DeepCopyInto(out *ComponentStatus) { + *out = *in + out.MLMDProxy = in.MLMDProxy + out.APIServer = in.APIServer +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ComponentStatus. +func (in *ComponentStatus) DeepCopy() *ComponentStatus { + if in == nil { + return nil + } + out := new(ComponentStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DSPASpec) DeepCopyInto(out *DSPASpec) { + *out = *in + if in.APIServer != nil { + in, out := &in.APIServer, &out.APIServer + *out = new(APIServer) + (*in).DeepCopyInto(*out) + } + if in.PersistenceAgent != nil { + in, out := &in.PersistenceAgent, &out.PersistenceAgent + *out = new(PersistenceAgent) + (*in).DeepCopyInto(*out) + } + if in.ScheduledWorkflow != nil { + in, out := &in.ScheduledWorkflow, &out.ScheduledWorkflow + *out = new(ScheduledWorkflow) + (*in).DeepCopyInto(*out) + } + if in.Database != nil { + in, out := &in.Database, &out.Database + *out = new(Database) + (*in).DeepCopyInto(*out) + } + if in.MlPipelineUI != nil { + in, out := &in.MlPipelineUI, &out.MlPipelineUI + *out = new(MlPipelineUI) + (*in).DeepCopyInto(*out) + } + if in.ObjectStorage != nil { + in, out := &in.ObjectStorage, &out.ObjectStorage + *out = new(ObjectStorage) + (*in).DeepCopyInto(*out) + } + if in.MLMD != nil { + in, out := &in.MLMD, &out.MLMD + *out = new(MLMD) + (*in).DeepCopyInto(*out) + } + if in.PodToPodTLS != nil { + in, out := &in.PodToPodTLS, &out.PodToPodTLS + *out = new(bool) + **out = **in + } + if in.WorkflowController != nil { + in, out := &in.WorkflowController, &out.WorkflowController + *out = new(WorkflowController) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DSPASpec. +func (in *DSPASpec) DeepCopy() *DSPASpec { + if in == nil { + return nil + } + out := new(DSPASpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DSPAStatus) DeepCopyInto(out *DSPAStatus) { + *out = *in + out.Components = in.Components + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]metav1.Condition, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DSPAStatus. +func (in *DSPAStatus) DeepCopy() *DSPAStatus { + if in == nil { + return nil + } + out := new(DSPAStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DataSciencePipelinesApplication) DeepCopyInto(out *DataSciencePipelinesApplication) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + in.Status.DeepCopyInto(&out.Status) +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSciencePipelinesApplication. +func (in *DataSciencePipelinesApplication) DeepCopy() *DataSciencePipelinesApplication { + if in == nil { + return nil + } + out := new(DataSciencePipelinesApplication) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *DataSciencePipelinesApplication) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *DataSciencePipelinesApplicationList) DeepCopyInto(out *DataSciencePipelinesApplicationList) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ListMeta.DeepCopyInto(&out.ListMeta) + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]DataSciencePipelinesApplication, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new DataSciencePipelinesApplicationList. +func (in *DataSciencePipelinesApplicationList) DeepCopy() *DataSciencePipelinesApplicationList { + if in == nil { + return nil + } + out := new(DataSciencePipelinesApplicationList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *DataSciencePipelinesApplicationList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Database) DeepCopyInto(out *Database) { + *out = *in + if in.MariaDB != nil { + in, out := &in.MariaDB, &out.MariaDB + *out = new(MariaDB) + (*in).DeepCopyInto(*out) + } + if in.ExternalDB != nil { + in, out := &in.ExternalDB, &out.ExternalDB + *out = new(ExternalDB) + (*in).DeepCopyInto(*out) + } + if in.CustomExtraParams != nil { + in, out := &in.CustomExtraParams, &out.CustomExtraParams + *out = new(string) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Database. +func (in *Database) DeepCopy() *Database { + if in == nil { + return nil + } + out := new(Database) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Envoy) DeepCopyInto(out *Envoy) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Envoy. +func (in *Envoy) DeepCopy() *Envoy { + if in == nil { + return nil + } + out := new(Envoy) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ExternalDB) DeepCopyInto(out *ExternalDB) { + *out = *in + if in.PasswordSecret != nil { + in, out := &in.PasswordSecret, &out.PasswordSecret + *out = new(SecretKeyValue) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalDB. +func (in *ExternalDB) DeepCopy() *ExternalDB { + if in == nil { + return nil + } + out := new(ExternalDB) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ExternalStorage) DeepCopyInto(out *ExternalStorage) { + *out = *in + if in.S3CredentialSecret != nil { + in, out := &in.S3CredentialSecret, &out.S3CredentialSecret + *out = new(S3CredentialSecret) + **out = **in + } + if in.Secure != nil { + in, out := &in.Secure, &out.Secure + *out = new(bool) + **out = **in + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExternalStorage. +func (in *ExternalStorage) DeepCopy() *ExternalStorage { + if in == nil { + return nil + } + out := new(ExternalStorage) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *GRPC) DeepCopyInto(out *GRPC) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GRPC. +func (in *GRPC) DeepCopy() *GRPC { + if in == nil { + return nil + } + out := new(GRPC) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MLMD) DeepCopyInto(out *MLMD) { + *out = *in + if in.Envoy != nil { + in, out := &in.Envoy, &out.Envoy + *out = new(Envoy) + (*in).DeepCopyInto(*out) + } + if in.GRPC != nil { + in, out := &in.GRPC, &out.GRPC + *out = new(GRPC) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MLMD. +func (in *MLMD) DeepCopy() *MLMD { + if in == nil { + return nil + } + out := new(MLMD) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MariaDB) DeepCopyInto(out *MariaDB) { + *out = *in + if in.PasswordSecret != nil { + in, out := &in.PasswordSecret, &out.PasswordSecret + *out = new(SecretKeyValue) + **out = **in + } + out.PVCSize = in.PVCSize.DeepCopy() + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MariaDB. +func (in *MariaDB) DeepCopy() *MariaDB { + if in == nil { + return nil + } + out := new(MariaDB) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Minio) DeepCopyInto(out *Minio) { + *out = *in + if in.S3CredentialSecret != nil { + in, out := &in.S3CredentialSecret, &out.S3CredentialSecret + *out = new(S3CredentialSecret) + **out = **in + } + out.PVCSize = in.PVCSize.DeepCopy() + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Minio. +func (in *Minio) DeepCopy() *Minio { + if in == nil { + return nil + } + out := new(Minio) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *MlPipelineUI) DeepCopyInto(out *MlPipelineUI) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MlPipelineUI. +func (in *MlPipelineUI) DeepCopy() *MlPipelineUI { + if in == nil { + return nil + } + out := new(MlPipelineUI) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ObjectStorage) DeepCopyInto(out *ObjectStorage) { + *out = *in + if in.Minio != nil { + in, out := &in.Minio, &out.Minio + *out = new(Minio) + (*in).DeepCopyInto(*out) + } + if in.ExternalStorage != nil { + in, out := &in.ExternalStorage, &out.ExternalStorage + *out = new(ExternalStorage) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ObjectStorage. +func (in *ObjectStorage) DeepCopy() *ObjectStorage { + if in == nil { + return nil + } + out := new(ObjectStorage) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PersistenceAgent) DeepCopyInto(out *PersistenceAgent) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PersistenceAgent. +func (in *PersistenceAgent) DeepCopy() *PersistenceAgent { + if in == nil { + return nil + } + out := new(PersistenceAgent) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ResourceRequirements) DeepCopyInto(out *ResourceRequirements) { + *out = *in + if in.Limits != nil { + in, out := &in.Limits, &out.Limits + *out = new(Resources) + (*in).DeepCopyInto(*out) + } + if in.Requests != nil { + in, out := &in.Requests, &out.Requests + *out = new(Resources) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ResourceRequirements. +func (in *ResourceRequirements) DeepCopy() *ResourceRequirements { + if in == nil { + return nil + } + out := new(ResourceRequirements) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Resources) DeepCopyInto(out *Resources) { + *out = *in + out.CPU = in.CPU.DeepCopy() + out.Memory = in.Memory.DeepCopy() +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Resources. +func (in *Resources) DeepCopy() *Resources { + if in == nil { + return nil + } + out := new(Resources) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *S3CredentialSecret) DeepCopyInto(out *S3CredentialSecret) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new S3CredentialSecret. +func (in *S3CredentialSecret) DeepCopy() *S3CredentialSecret { + if in == nil { + return nil + } + out := new(S3CredentialSecret) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ScheduledWorkflow) DeepCopyInto(out *ScheduledWorkflow) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScheduledWorkflow. +func (in *ScheduledWorkflow) DeepCopy() *ScheduledWorkflow { + if in == nil { + return nil + } + out := new(ScheduledWorkflow) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *ScriptConfigMap) DeepCopyInto(out *ScriptConfigMap) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ScriptConfigMap. +func (in *ScriptConfigMap) DeepCopy() *ScriptConfigMap { + if in == nil { + return nil + } + out := new(ScriptConfigMap) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *SecretKeyValue) DeepCopyInto(out *SecretKeyValue) { + *out = *in +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretKeyValue. +func (in *SecretKeyValue) DeepCopy() *SecretKeyValue { + if in == nil { + return nil + } + out := new(SecretKeyValue) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *WorkflowController) DeepCopyInto(out *WorkflowController) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new WorkflowController. +func (in *WorkflowController) DeepCopy() *WorkflowController { + if in == nil { + return nil + } + out := new(WorkflowController) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Writer) DeepCopyInto(out *Writer) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(ResourceRequirements) + (*in).DeepCopyInto(*out) + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Writer. +func (in *Writer) DeepCopy() *Writer { + if in == nil { + return nil + } + out := new(Writer) + in.DeepCopyInto(out) + return out +} diff --git a/api/v1alpha1/dspipeline_types.go b/api/v1alpha1/dspipeline_types.go index 657e0a2a..6a01914a 100644 --- a/api/v1alpha1/dspipeline_types.go +++ b/api/v1alpha1/dspipeline_types.go @@ -424,10 +424,10 @@ type ComponentDetailStatus struct { ExternalUrl string `json:"externalUrl,omitempty"` } -//+kubebuilder:object:root=true -//+kubebuilder:subresource:status -//+kubebuilder:resource:shortName=dspa - +// +kubebuilder:object:root=true +// +kubebuilder:subresource:status +// +kubebuilder:resource:shortName=dspa +// +kubebuilder:deprecatedversion:warning="datasciencepipelinesapplications.opendatahub.io/v1alpha1 is deprecated." type DataSciencePipelinesApplication struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` diff --git a/config/base/kustomization.yaml b/config/base/kustomization.yaml index cc3a5ef5..4cfe5112 100644 --- a/config/base/kustomization.yaml +++ b/config/base/kustomization.yaml @@ -17,20 +17,21 @@ configMapGenerator: envs: - params.env vars: - - name: IMAGES_APISERVER + # Images + - name: IMAGES_DSPO objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_APISERVER - - name: IMAGES_ARTIFACT + fieldpath: data.IMAGES_DSPO + - name: IMAGES_APISERVER objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_ARTIFACT + fieldpath: data.IMAGES_APISERVER - name: IMAGES_OAUTHPROXY objref: kind: ConfigMap @@ -38,13 +39,13 @@ vars: apiVersion: v1 fieldref: fieldpath: data.IMAGES_OAUTHPROXY - - name: IMAGES_PERSISTENTAGENT + - name: IMAGES_PERSISTENCEAGENT objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_PERSISTENTAGENT + fieldpath: data.IMAGES_PERSISTENCEAGENT - name: IMAGES_SCHEDULEDWORKFLOW objref: kind: ConfigMap @@ -52,55 +53,57 @@ vars: apiVersion: v1 fieldref: fieldpath: data.IMAGES_SCHEDULEDWORKFLOW - - name: IMAGES_CACHE + - name: IMAGES_MARIADB objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_CACHE - - name: IMAGES_MOVERESULTSIMAGE + fieldpath: data.IMAGES_MARIADB + - name: IMAGES_MLMDENVOY objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MOVERESULTSIMAGE - - name: IMAGES_MARIADB + fieldpath: data.IMAGES_MLMDENVOY + - name: IMAGES_MLMDGRPC objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MARIADB - - name: IMAGES_MLMDENVOY + fieldpath: data.IMAGES_MLMDGRPC + - name: IMAGES_LAUNCHER objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MLMDENVOY - - name: IMAGES_MLMDGRPC + fieldpath: data.IMAGES_LAUNCHER + - name: IMAGES_DRIVER objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MLMDGRPC - - name: IMAGES_MLMDWRITER + fieldpath: data.IMAGES_DRIVER + - name: IMAGES_ARGO_EXEC objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MLMDWRITER - - name: IMAGES_DSPO + fieldpath: data.IMAGES_ARGO_EXEC + - name: IMAGES_ARGO_WORKFLOWCONTROLLER objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_DSPO + fieldpath: data.IMAGES_ARGO_WORKFLOWCONTROLLER + + # DSPO level configs - name: ZAP_LOG_LEVEL objref: kind: ConfigMap @@ -136,69 +139,6 @@ vars: apiVersion: v1 fieldref: fieldpath: data.DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT - - name: IMAGESV2_ARGO_APISERVER - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_APISERVER - - name: IMAGESV2_ARGO_PERSISTENCEAGENT - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_PERSISTENCEAGENT - - name: IMAGESV2_ARGO_SCHEDULEDWORKFLOW - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_SCHEDULEDWORKFLOW - - name: IMAGESV2_ARGO_MLMDENVOY - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_MLMDENVOY - - name: IMAGESV2_ARGO_MLMDGRPC - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_MLMDGRPC - - name: IMAGESV2_ARGO_ARGOEXEC - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_ARGOEXEC - - name: IMAGESV2_ARGO_WORKFLOWCONTROLLER - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_WORKFLOWCONTROLLER - - name: V2_LAUNCHER_IMAGE - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.V2_LAUNCHER_IMAGE - - name: V2_DRIVER_IMAGE - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.V2_DRIVER_IMAGE - name: DSPO_APISERVER_INCLUDE_OWNERREFERENCE objref: kind: ConfigMap diff --git a/config/base/params.env b/config/base/params.env index df130560..8b50425e 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,24 +1,15 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:v1.6.3 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager:v1.6.3 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:v1.6.3 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:v1.6.3 -IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy:v1.6.3 -IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc:v1.6.3 -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer:v1.6.3 IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:latest -V2_LAUNCHER_IMAGE=quay.io/opendatahub/ds-pipelines-launcher:latest -V2_DRIVER_IMAGE=quay.io/opendatahub/ds-pipelines-driver:latest -IMAGESV2_ARGO_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:latest -IMAGESV2_ARGO_PERSISTENCEAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:latest -IMAGESV2_ARGO_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:latest -IMAGESV2_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcontroller:v3.4.17-upstream -IMAGESV2_ARGO_ARGOEXEC=quay.io/opendatahub/ds-pipelines-argo-argoexec:v3.4.17-upstream -IMAGESV2_ARGO_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server:latest -IMAGESV2_ARGO_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:02b834fd74da71ec37f6a5c0d10aac9a679d1a0f4e510c4f77723ef2367e858a +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:latest +IMAGES_PERSISTENCEAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:latest +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:latest +IMAGES_ARGO_EXEC=quay.io/opendatahub/ds-pipelines-argo-argoexec:v3.4.17-upstream +IMAGES_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcontroller:v3.4.17-upstream +IMAGES_LAUNCHER=quay.io/opendatahub/ds-pipelines-launcher:latest +IMAGES_DRIVER=quay.io/opendatahub/ds-pipelines-driver:latest +IMAGES_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server:latest +IMAGES_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:02b834fd74da71ec37f6a5c0d10aac9a679d1a0f4e510c4f77723ef2367e858a IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1 IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:4f8d66597feeb32bb18699326029f9a71a5aca4a57679d636b876377c2e95695 -IMAGES_CACHE=registry.redhat.io/ubi8/ubi-minimal@sha256:5d2d4d4dbec470f8ffb679915e2a8ae25ad754cd9193fa966deee1ecb7b3ee00 -IMAGES_MOVERESULTSIMAGE=registry.redhat.io/ubi8/ubi-micro@sha256:396baed3d689157d96aa7d8988fdfea7eb36684c8335eb391cf1952573e689c1 ZAP_LOG_LEVEL=info MAX_CONCURRENT_RECONCILES=10 DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT=15s diff --git a/config/configmaps/files/config.yaml b/config/configmaps/files/config.yaml index b263ef3c..bf6e01e3 100644 --- a/config/configmaps/files/config.yaml +++ b/config/configmaps/files/config.yaml @@ -1,26 +1,15 @@ Images: ApiServer: $(IMAGES_APISERVER) - Artifact: $(IMAGES_ARTIFACT) - OAuthProxy: $(IMAGES_OAUTHPROXY) - PersistentAgent: $(IMAGES_PERSISTENTAGENT) + PersistentAgent: $(IMAGES_PERSISTENCEAGENT) ScheduledWorkflow: $(IMAGES_SCHEDULEDWORKFLOW) - Cache: $(IMAGES_CACHE) - MoveResultsImage: $(IMAGES_MOVERESULTSIMAGE) - MariaDB: $(IMAGES_MARIADB) MlmdEnvoy: $(IMAGES_MLMDENVOY) MlmdGRPC: $(IMAGES_MLMDGRPC) - MlmdWriter: $(IMAGES_MLMDWRITER) -ImagesV2: - Argo: - ApiServer: $(IMAGESV2_ARGO_APISERVER) - PersistentAgent: $(IMAGESV2_ARGO_PERSISTENCEAGENT) - ScheduledWorkflow: $(IMAGESV2_ARGO_SCHEDULEDWORKFLOW) - MlmdEnvoy: $(IMAGESV2_ARGO_MLMDENVOY) - MlmdGRPC: $(IMAGESV2_ARGO_MLMDGRPC) - WorkflowController: $(IMAGESV2_ARGO_WORKFLOWCONTROLLER) - ArgoExecImage: $(IMAGESV2_ARGO_ARGOEXEC) - ArgoLauncherImage: $(V2_LAUNCHER_IMAGE) - ArgoDriverImage: $(V2_DRIVER_IMAGE) + ArgoExecImage: $(IMAGES_ARGO_EXEC) + ArgoWorkflowController: $(IMAGES_ARGO_WORKFLOWCONTROLLER) + LauncherImage: $(IMAGES_LAUNCHER) + DriverImage: $(IMAGES_DRIVER) + OAuthProxy: $(IMAGES_OAUTHPROXY) + MariaDB: $(IMAGES_MARIADB) DSPO: HealthCheck: Database: diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index ac888ab9..380aaeb5 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -17,7 +17,850 @@ spec: singular: datasciencepipelinesapplication scope: Namespaced versions: - - name: v1alpha1 + - name: v1 + schema: + openAPIV3Schema: + properties: + apiVersion: + description: 'APIVersion defines the versioned schema of this representation + of an object. Servers should convert recognized schemas to the latest + internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources' + type: string + kind: + description: 'Kind is a string value representing the REST resource this + object represents. Servers may infer this from the endpoint the client + submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds' + type: string + metadata: + type: object + spec: + properties: + apiServer: + default: + deploy: true + description: DS Pipelines API Server configuration. + properties: + argoDriverImage: + type: string + argoLauncherImage: + type: string + artifactSignedURLExpirySeconds: + default: 60 + description: 'The expiry time (seconds) for artifact download + links when querying the dsp server via /apis/v2beta1/artifacts/{id}?share_url=true + Default: 60' + type: integer + cABundle: + description: If the Object store/DB is behind a TLS secured connection + that is unrecognized by the host OpenShift/K8s cluster, then + you can provide a PEM formatted CA bundle to be injected into + the DSP server pod to trust this connection. CA Bundle should + be provided as values within configmaps, mapped to keys. + properties: + configMapKey: + description: Key should map to a CA bundle. The key is also + used to name the CA bundle file (e.g. ca-bundle.crt) + type: string + configMapName: + type: string + required: + - configMapKey + - configMapName + type: object + caBundleFileMountPath: + description: This is the path where the ca bundle will be mounted + in the pipeline server and user executor pods + type: string + caBundleFileName: + description: This is the filename of the ca bundle that will be + created in the pipeline server and user executor pods + type: string + customKfpLauncherConfigMap: + description: When specified, the `data` contents of the `kfp-launcher` + ConfigMap that DSPO writes will be fully replaced with the `data` + contents of the ConfigMap specified here. This allows the user + to fully replace the `data` contents of the kfp-launcher ConfigMap. + The `kfp-launcher` component requires a ConfigMap to exist in + the namespace where it runs (i.e. the namespace where pipelines + run). This ConfigMap contains object storage configuration, + as well as pipeline root (object store root path where artifacts + will be uploaded) configuration. Currently this ConfigMap *must* + be named "kfp-launcher". We currently deploy a default copy + of the kfp-launcher ConfigMap via DSPO, but a user may want + to provide their own ConfigMap configuration, so that they can + specify multiple object storage sources and paths. + type: string + customServerConfigMap: + description: CustomServerConfig is a custom config file that you + can provide for the api server to use instead. + properties: + key: + type: string + name: + type: string + type: object + deploy: + default: true + description: 'Enable DS Pipelines Operator management of DSP API + Server. Setting Deploy to false disables operator reconciliation. + Default: true' + type: boolean + enableOauth: + default: true + description: 'Create an Openshift Route for this DSP API Server. + Default: true' + type: boolean + enableSamplePipeline: + default: false + description: 'Include sample pipelines with the deployment of + this DSP API Server. Default: true' + type: boolean + image: + description: Specify a custom image for DSP API Server. + type: string + resources: + description: Specify custom Pod resource requirements for this + component. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + type: object + database: + default: + mariaDB: + deploy: true + description: Database specifies database configurations, used for + DS Pipelines metadata tracking. Specify either the default MariaDB + deployment, or configure your own External SQL DB. + properties: + customExtraParams: + description: "CustomExtraParams allow users to further customize + the sql dsn parameters used by the Pipeline Server when opening + a connection with the Database. ref: https://github.com/go-sql-driver/mysql?tab=readme-ov-file#dsn-data-source-name + \n Value must be a JSON string. For example, to disable tls + for Pipeline Server DB connection the user can provide a string: + {\"tls\":\"true\"} \n If updating post DSPA deployment, then + a manual restart of the pipeline server pod will be required + so the new configmap may be consumed." + type: string + disableHealthCheck: + default: false + description: 'Default: false' + type: boolean + externalDB: + properties: + host: + type: string + passwordSecret: + properties: + key: + type: string + name: + type: string + required: + - key + - name + type: object + pipelineDBName: + type: string + port: + type: string + username: + type: string + required: + - host + - passwordSecret + - pipelineDBName + - port + - username + type: object + mariaDB: + properties: + deploy: + default: true + description: 'Enable DS Pipelines Operator management of MariaDB. + Setting Deploy to false disables operator reconciliation. + Default: true' + type: boolean + image: + description: Specify a custom image for DSP MariaDB pod. + type: string + passwordSecret: + properties: + key: + type: string + name: + type: string + required: + - key + - name + type: object + pipelineDBName: + default: mlpipeline + description: 'The database name that will be created. Should + match `^[a-zA-Z0-9_]+`. // Default: mlpipeline' + pattern: ^[a-zA-Z0-9_]+$ + type: string + pvcSize: + anyOf: + - type: integer + - type: string + default: 10Gi + description: 'Customize the size of the PVC created for the + default MariaDB instance. Default: 10Gi' + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resources: + description: Specify custom Pod resource requirements for + this component. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + storageClassName: + description: Volume Mode Filesystem storageClass to use for + PVC creation + type: string + username: + default: mlpipeline + description: 'The MariadB username that will be created. Should + match `^[a-zA-Z0-9_]+`. Default: mlpipeline' + pattern: ^[a-zA-Z0-9_]+$ + type: string + type: object + type: object + dspVersion: + default: v2 + type: string + mlmd: + properties: + deploy: + default: false + description: 'Enable DS Pipelines Operator management of MLMD. + Setting Deploy to false disables operator reconciliation. Default: + true' + type: boolean + envoy: + properties: + deployRoute: + default: true + type: boolean + image: + type: string + resources: + description: ResourceRequirements structures compute resource + requirements. Replaces ResourceRequirements from corev1 + which also includes optional storage field. We handle storage + field separately, and should not include it as a subfield + for Resources. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + type: object + grpc: + properties: + image: + type: string + port: + type: string + resources: + description: ResourceRequirements structures compute resource + requirements. Replaces ResourceRequirements from corev1 + which also includes optional storage field. We handle storage + field separately, and should not include it as a subfield + for Resources. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + type: object + type: object + mlpipelineUI: + description: Deploy the KFP UI with DS Pipelines UI. This feature + is unsupported, and primarily used for exploration, testing, and + development purposes. + properties: + configMap: + type: string + deploy: + default: true + description: 'Enable DS Pipelines Operator management of KFP UI. + Setting Deploy to false disables operator reconciliation. Default: + true' + type: boolean + image: + description: Specify a custom image for KFP UI pod. + type: string + resources: + description: Specify custom Pod resource requirements for this + component. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + required: + - image + type: object + objectStorage: + description: ObjectStorage specifies Object Store configurations, + used for DS Pipelines artifact passing and storage. Specify either + the your own External Storage (e.g. AWS S3), or use the default + Minio deployment (unsupported, primarily for development, and testing) + . + properties: + disableHealthCheck: + default: false + description: 'Default: false' + type: boolean + enableExternalRoute: + default: false + description: 'Enable an external route so the object storage is + reachable from outside the cluster. Default: false' + type: boolean + externalStorage: + properties: + basePath: + description: Subpath where objects should be stored for this + DSPA + type: string + bucket: + type: string + host: + type: string + port: + type: string + region: + type: string + s3CredentialsSecret: + properties: + accessKey: + description: The "Keys" in the k8sSecret key/value pairs. + Not to be confused with the values. + type: string + secretKey: + type: string + secretName: + description: The name of the Secret where the AccessKey + and SecretKey are defined. + type: string + required: + - accessKey + - secretKey + - secretName + type: object + scheme: + type: string + secure: + type: boolean + required: + - bucket + - host + - s3CredentialsSecret + - scheme + type: object + minio: + description: Enable DS Pipelines Operator management of Minio. + Setting Deploy to false disables operator reconciliation. + properties: + bucket: + default: mlpipeline + description: 'Provide the Bucket name that will be used to + store artifacts in S3. If provided bucket does not exist, + DSP Apiserver will attempt to create it. As such the credentials + provided should have sufficient permissions to do create + buckets. Default: mlpipeline' + type: string + deploy: + default: true + description: 'Enable DS Pipelines Operator management of Minio. + Setting Deploy to false disables operator reconciliation. + Default: true' + type: boolean + image: + description: Specify a custom image for Minio pod. + type: string + pvcSize: + anyOf: + - type: integer + - type: string + default: 10Gi + description: 'Customize the size of the PVC created for the + Minio instance. Default: 10Gi' + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + resources: + description: Specify custom Pod resource requirements for + this component. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + s3CredentialsSecret: + description: Credentials for the S3 user (e.g. IAM user cred + stored in a k8s secret.). Note that the S3 user should have + the permissions to create a bucket if the provided bucket + does not exist. + properties: + accessKey: + description: The "Keys" in the k8sSecret key/value pairs. + Not to be confused with the values. + type: string + secretKey: + type: string + secretName: + description: The name of the Secret where the AccessKey + and SecretKey are defined. + type: string + required: + - accessKey + - secretKey + - secretName + type: object + storageClassName: + description: Volume Mode Filesystem storageClass to use for + PVC creation + type: string + required: + - image + type: object + type: object + persistenceAgent: + default: + deploy: true + description: DS Pipelines PersistenceAgent configuration. + properties: + deploy: + default: true + description: 'Enable DS Pipelines Operator management of Persisence + Agent. Setting Deploy to false disables operator reconciliation. + Default: true' + type: boolean + image: + description: Specify a custom image for DSP PersistenceAgent. + type: string + numWorkers: + default: 2 + description: 'Number of worker for Persistence Agent sync job. + Default: 2' + type: integer + resources: + description: Specify custom Pod resource requirements for this + component. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + type: object + podToPodTLS: + default: true + description: PodToPodTLS Set to "true" or "false" to enable or disable + TLS communication between DSPA components (pods). Defaults to "true" + to enable TLS between all pods. Only supported in DSP V2 on OpenShift. + type: boolean + scheduledWorkflow: + default: + deploy: true + description: DS Pipelines Scheduled Workflow configuration. + properties: + cronScheduleTimezone: + default: UTC + description: 'Specify the Cron timezone used for ScheduledWorkflow + PipelineRuns. Default: UTC' + type: string + deploy: + default: true + description: 'Enable DS Pipelines Operator management of ScheduledWorkflow. + Setting Deploy to false disables operator reconciliation. Default: + true' + type: boolean + image: + description: Specify a custom image for DSP ScheduledWorkflow + controller. + type: string + resources: + description: Specify custom Pod resource requirements for this + component. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + type: object + workflowController: + description: WorkflowController is an argo-specific component that + manages a DSPA's Workflow objects and handles the orchestration + of them with the central Argo server + properties: + argoExecImage: + type: string + customConfig: + type: string + deploy: + default: true + type: boolean + image: + type: string + resources: + description: Specify custom Pod resource requirements for this + component. + properties: + limits: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + requests: + properties: + cpu: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + memory: + anyOf: + - type: integer + - type: string + pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ + x-kubernetes-int-or-string: true + type: object + type: object + type: object + required: + - objectStorage + type: object + status: + properties: + components: + properties: + apiServer: + properties: + externalUrl: + type: string + url: + type: string + type: object + mlmdProxy: + properties: + externalUrl: + type: string + url: + type: string + type: object + type: object + conditions: + items: + description: "Condition contains details for one aspect of the current + state of this API Resource. --- This struct is intended for direct + use as an array at the field path .status.conditions. For example, + \n type FooStatus struct{ // Represents the observations of a + foo's current state. // Known .status.conditions.type are: \"Available\", + \"Progressing\", and \"Degraded\" // +patchMergeKey=type // +patchStrategy=merge + // +listType=map // +listMapKey=type Conditions []metav1.Condition + `json:\"conditions,omitempty\" patchStrategy:\"merge\" patchMergeKey:\"type\" + protobuf:\"bytes,1,rep,name=conditions\"` \n // other fields }" + properties: + lastTransitionTime: + description: lastTransitionTime is the last time the condition + transitioned from one status to another. This should be when + the underlying condition changed. If that is not known, then + using the time when the API field changed is acceptable. + format: date-time + type: string + message: + description: message is a human readable message indicating + details about the transition. This may be an empty string. + maxLength: 32768 + type: string + observedGeneration: + description: observedGeneration represents the .metadata.generation + that the condition was set based upon. For instance, if .metadata.generation + is currently 12, but the .status.conditions[x].observedGeneration + is 9, the condition is out of date with respect to the current + state of the instance. + format: int64 + minimum: 0 + type: integer + reason: + description: reason contains a programmatic identifier indicating + the reason for the condition's last transition. Producers + of specific condition types may define expected values and + meanings for this field, and whether the values are considered + a guaranteed API. The value should be a CamelCase string. + This field may not be empty. + maxLength: 1024 + minLength: 1 + pattern: ^[A-Za-z]([A-Za-z0-9_,:]*[A-Za-z0-9_])?$ + type: string + status: + description: status of the condition, one of True, False, Unknown. + enum: + - "True" + - "False" + - Unknown + type: string + type: + description: type of condition in CamelCase or in foo.example.com/CamelCase. + --- Many .condition.type values are consistent across resources + like Available, but because arbitrary conditions can be useful + (see .node.status.conditions), the ability to deconflict is + important. The regex it matches is (dns1123SubdomainFmt/)?(qualifiedNameFmt) + maxLength: 316 + pattern: ^([a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*/)?(([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9])$ + type: string + required: + - lastTransitionTime + - message + - reason + - status + - type + type: object + type: array + type: object + type: object + served: true + storage: true + subresources: + status: {} + - deprecated: true + deprecationWarning: datasciencepipelinesapplications.opendatahub.io/v1alpha1 is + deprecated. + name: v1alpha1 schema: openAPIV3Schema: properties: @@ -974,6 +1817,6 @@ spec: type: object type: object served: true - storage: true + storage: false subresources: status: {} diff --git a/config/internal/apiserver/default/artifact_script.yaml.tmpl b/config/internal/apiserver/default/artifact_script.yaml.tmpl deleted file mode 100644 index 15320a76..00000000 --- a/config/internal/apiserver/default/artifact_script.yaml.tmpl +++ /dev/null @@ -1,44 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { -{{ if .CustomCABundle }} - aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} --ca-bundle {{ .PiplinesCABundleMountPath }} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz -{{ else }} - aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz -{{ end }} - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-{{ .Name }} - namespace: {{.Namespace}} - labels: - app: ds-pipeline-{{.Name}} - component: data-science-pipelines diff --git a/config/internal/apiserver/default/deployment.yaml.tmpl b/config/internal/apiserver/default/deployment.yaml.tmpl index 8a8cdd76..ee555d76 100644 --- a/config/internal/apiserver/default/deployment.yaml.tmpl +++ b/config/internal/apiserver/default/deployment.yaml.tmpl @@ -59,10 +59,9 @@ spec: - name: SSL_CERT_DIR value: {{.CustomSSLCertDir}} {{ end }} - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "{{.APIServer.AutoUpdatePipelineDefaultVersion}}" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "{{.APIServer.DBConfigConMaxLifetimeSec}}" + # Visualization server is something we deploy + # But this env is required in KFP, even though + # It is not used. - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -124,8 +123,6 @@ spec: - name: METADATA_TLS_ENABLED value: "true" {{ end }} - {{ if (eq .DSPVersion "v2") }} - ## Argo-Specific Env Vars ## - name: EXECUTIONTYPE value: Workflow - name: DB_DRIVER_NAME @@ -143,40 +140,6 @@ spec: value: "{{.DBConnection.Host}}" - name: DBCONFIG_MYSQLCONFIG_PORT value: "{{.DBConnection.Port}}" - {{ else }} - ## Tekton-Specific Env Vars ## - - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "{{.APIServer.CacheImage}}" - - name: MOVERESULTS_IMAGE - value: "{{.APIServer.MoveResultsImage}}" - - name: ARTIFACT_IMAGE - value: "{{.APIServer.ArtifactImage}}" - - name: ARTIFACT_BUCKET - value: "{{.ObjectStorageConnection.Bucket}}" - - name: ARTIFACT_ENDPOINT - value: "{{.ObjectStorageConnection.Endpoint}}" - - name: ARTIFACT_SCRIPT - valueFrom: - configMapKeyRef: - key: "{{ .APIServer.ArtifactScriptConfigMap.Key }}" - name: "{{ .APIServer.ArtifactScriptConfigMap.Name }}" - - name: ARCHIVE_LOGS - value: "{{.APIServer.ArchiveLogs}}" - - name: TRACK_ARTIFACTS - value: "{{.APIServer.TrackArtifacts}}" - - name: STRIP_EOF - value: "{{.APIServer.StripEOF}}" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "{{.APIServer.InjectDefaultScript}}" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "{{.APIServer.ApplyTektonCustomResource}}" - - name: TERMINATE_STATUS - value: "{{.APIServer.TerminateStatus}}" - {{ end }} image: {{.APIServer.Image}} # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server diff --git a/config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl b/config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl index 0915e31d..13396692 100644 --- a/config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl +++ b/config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl @@ -35,22 +35,6 @@ rules: - update - patch - delete - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - - runs - - tasks - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - apiGroups: - kubeflow.org resources: @@ -74,18 +58,6 @@ rules: - tokenreviews verbs: - create - - apiGroups: - - custom.tekton.dev - resources: - - pipelineloops - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - apiGroups: - image.openshift.io resources: diff --git a/config/internal/apiserver/default/role_pipeline-runner.yaml.tmpl b/config/internal/apiserver/default/role_pipeline-runner.yaml.tmpl index 7d827331..c4358c42 100644 --- a/config/internal/apiserver/default/role_pipeline-runner.yaml.tmpl +++ b/config/internal/apiserver/default/role_pipeline-runner.yaml.tmpl @@ -90,22 +90,6 @@ rules: - seldondeployments verbs: - '*' - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - - runs - - tasks - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - apiGroups: - ray.io resources: diff --git a/config/internal/apiserver/default/server-config.yaml.tmpl b/config/internal/apiserver/default/server-config.yaml.tmpl index ce92a506..d019a523 100644 --- a/config/internal/apiserver/default/server-config.yaml.tmpl +++ b/config/internal/apiserver/default/server-config.yaml.tmpl @@ -8,7 +8,6 @@ metadata: component: data-science-pipelines data: config.json: | -{{ if eq .DSPVersion "v2" }} { "DBConfig": { "MySQLConfig": { @@ -26,16 +25,3 @@ data: "ARCHIVE_CONFIG_LOG_PATH_PREFIX": "/artifacts", "InitConnectionTimeout": "6m" } -{{ else }} - { - "DBConfig": { - "DriverName": "mysql", - "ConMaxLifeTime": "120s", - "ExtraParams": {{ .DBConnection.ExtraParams }} - }, - "ObjectStoreConfig": { - "PipelinePath": "pipelines" - }, - "InitConnectionTimeout": "6m" - } -{{ end }} diff --git a/config/internal/apiserver/sample-pipeline/sample-pipeline.yaml.tmpl b/config/internal/apiserver/sample-pipeline/sample-pipeline.yaml.tmpl index 86281971..05d07cde 100644 --- a/config/internal/apiserver/sample-pipeline/sample-pipeline.yaml.tmpl +++ b/config/internal/apiserver/sample-pipeline/sample-pipeline.yaml.tmpl @@ -1,4 +1,3 @@ -{{ if (eq .DSPVersion "v2") }} apiVersion: v1 kind: ConfigMap metadata: @@ -253,559 +252,3 @@ data: schemaVersion: 0.0.1 schemaVersion: 2.1.0 sdkVersion: kfp-2.7.0 -{{ else }} -apiVersion: v1 -kind: ConfigMap -metadata: - name: sample-pipeline-{{.Name}} - namespace: {{.Namespace}} - labels: - app: ds-pipeline-{{.Name}} - component: data-science-pipelines -data: - iris-pipeline-compiled.yaml: |- - apiVersion: tekton.dev/v1beta1 - kind: PipelineRun - metadata: - name: iris-pipeline - annotations: - tekton.dev/output_artifacts: '{"data-prep": [{"key": "artifacts/$PIPELINERUN/data-prep/X_test.tgz", - "name": "data-prep-X_test", "path": "/tmp/outputs/X_test/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/X_train.tgz", - "name": "data-prep-X_train", "path": "/tmp/outputs/X_train/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/y_test.tgz", - "name": "data-prep-y_test", "path": "/tmp/outputs/y_test/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/y_train.tgz", - "name": "data-prep-y_train", "path": "/tmp/outputs/y_train/data"}], "evaluate-model": - [{"key": "artifacts/$PIPELINERUN/evaluate-model/mlpipeline-metrics.tgz", "name": - "mlpipeline-metrics", "path": "/tmp/outputs/mlpipeline_metrics/data"}], "train-model": - [{"key": "artifacts/$PIPELINERUN/train-model/model.tgz", "name": "train-model-model", - "path": "/tmp/outputs/model/data"}]}' - tekton.dev/input_artifacts: '{"evaluate-model": [{"name": "data-prep-X_test", - "parent_task": "data-prep"}, {"name": "data-prep-y_test", "parent_task": "data-prep"}, - {"name": "train-model-model", "parent_task": "train-model"}], "train-model": - [{"name": "data-prep-X_train", "parent_task": "data-prep"}, {"name": "data-prep-y_train", - "parent_task": "data-prep"}], "validate-model": [{"name": "train-model-model", - "parent_task": "train-model"}]}' - tekton.dev/artifact_bucket: mlpipeline - tekton.dev/artifact_endpoint: ${MINIO_SERVICE_SERVICE_HOST}:${MINIO_SERVICE_SERVICE_PORT} - tekton.dev/artifact_endpoint_scheme: http:// - tekton.dev/artifact_items: '{"data-prep": [["X_test", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test"], - ["X_train", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train"], - ["y_test", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test"], - ["y_train", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train"]], - "evaluate-model": [["mlpipeline-metrics", "/tmp/outputs/mlpipeline_metrics/data"]], - "train-model": [["model", "$(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model"]], - "validate-model": []}' - sidecar.istio.io/inject: "false" - tekton.dev/template: '' - pipelines.kubeflow.org/big_data_passing_format: $(workspaces.$TASK_NAME.path)/artifacts/$ORIG_PR_NAME/$TASKRUN_NAME/$TASK_PARAM_NAME - pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "iris-model", "name": - "model_obc", "optional": true, "type": "String"}], "name": "Iris Pipeline"}' - labels: - pipelines.kubeflow.org/pipelinename: '' - pipelines.kubeflow.org/generation: '' - spec: - params: - - name: model_obc - value: iris-model - pipelineSpec: - params: - - name: model_obc - default: iris-model - tasks: - - name: data-prep - taskSpec: - steps: - - name: main - args: - - --X-train - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train - - --X-test - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test - - --y-train - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train - - --y-test - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def data_prep( - X_train_file, - X_test_file, - y_train_file, - y_test_file, - ): - import pickle - - import pandas as pd - - from sklearn import datasets - from sklearn.model_selection import train_test_split - - def get_iris_data(): - iris = datasets.load_iris() - data = pd.DataFrame( - { - "sepalLength": iris.data[:, 0], - "sepalWidth": iris.data[:, 1], - "petalLength": iris.data[:, 2], - "petalWidth": iris.data[:, 3], - "species": iris.target, - } - ) - - print("Initial Dataset:") - print(data.head()) - - return data - - def create_training_set(dataset, test_size = 0.3): - # Features - X = dataset[["sepalLength", "sepalWidth", "petalLength", "petalWidth"]] - # Labels - y = dataset["species"] - - # Split dataset into training set and test set - X_train, X_test, y_train, y_test = train_test_split( - X, y, test_size=test_size, random_state=11 - ) - - return X_train, X_test, y_train, y_test - - def save_pickle(object_file, target_object): - with open(object_file, "wb") as f: - pickle.dump(target_object, f) - - dataset = get_iris_data() - X_train, X_test, y_train, y_test = create_training_set(dataset) - - save_pickle(X_train_file, X_train) - save_pickle(X_test_file, X_test) - save_pickle(y_train_file, y_train) - save_pickle(y_test_file, y_test) - - import argparse - _parser = argparse.ArgumentParser(prog='Data prep', description='') - _parser.add_argument("--X-train", dest="X_train_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--X-test", dest="X_test_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-train", dest="y_train_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-test", dest="y_test_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = data_prep(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: output-taskrun-name - command: - - sh - - -ec - - echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)" - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: copy-results-artifacts - command: - - sh - - -ec - - | - set -exo pipefail - TOTAL_SIZE=0 - copy_artifact() { - if [ -d "$1" ]; then - tar -czvf "$1".tar.gz "$1" - SUFFIX=".tar.gz" - fi - ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'` - TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE) - touch "$2" - if [[ $TOTAL_SIZE -lt 3072 ]]; then - if [ -d "$1" ]; then - tar -tzf "$1".tar.gz > "$2" - elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then - cp "$1" "$2" - fi - fi - } - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train $(results.X-train.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test $(results.X-test.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train $(results.y-train.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test $(results.y-test.path) - onError: continue - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - results: - - name: X-test - description: /tmp/outputs/X_test/data - - name: X-train - description: /tmp/outputs/X_train/data - - name: taskrun-name - - name: y-test - description: /tmp/outputs/y_test/data - - name: y-train - description: /tmp/outputs/y_train/data - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Data prep", "outputs": - [{"name": "X_train"}, {"name": "X_test"}, {"name": "y_train"}, {"name": - "y_test"}], "version": "Data prep@sha256=5aeb512900f57983c9f643ec30ddb4ccc66490a443269b51ce0a67d57cb373b0"}' - workspaces: - - name: data-prep - workspaces: - - name: data-prep - workspace: iris-pipeline - - name: train-model - params: - - name: data-prep-trname - value: $(tasks.data-prep.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --X-train - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/X_train - - --y-train - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/y_train - - --model - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def train_model( - X_train_file, - y_train_file, - model_file, - ): - import pickle - - from sklearn.ensemble import RandomForestClassifier - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - def save_pickle(object_file, target_object): - with open(object_file, "wb") as f: - pickle.dump(target_object, f) - - def train_iris(X_train, y_train): - model = RandomForestClassifier(n_estimators=100) - model.fit(X_train, y_train) - - return model - - X_train = load_pickle(X_train_file) - y_train = load_pickle(y_train_file) - - model = train_iris(X_train, y_train) - - save_pickle(model_file, model) - - import argparse - _parser = argparse.ArgumentParser(prog='Train model', description='') - _parser.add_argument("--X-train", dest="X_train_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-train", dest="y_train_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = train_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: output-taskrun-name - command: - - sh - - -ec - - echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)" - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: copy-results-artifacts - command: - - sh - - -ec - - | - set -exo pipefail - TOTAL_SIZE=0 - copy_artifact() { - if [ -d "$1" ]; then - tar -czvf "$1".tar.gz "$1" - SUFFIX=".tar.gz" - fi - ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'` - TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE) - touch "$2" - if [[ $TOTAL_SIZE -lt 3072 ]]; then - if [ -d "$1" ]; then - tar -tzf "$1".tar.gz > "$2" - elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then - cp "$1" "$2" - fi - fi - } - copy_artifact $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model $(results.model.path) - onError: continue - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: data-prep-trname - results: - - name: model - description: /tmp/outputs/model/data - - name: taskrun-name - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Train model", - "outputs": [{"name": "model"}], "version": "Train model@sha256=cb1fbd399ee5849dcdfaafced23a0496cae1d5861795062b22512b766ec418ce"}' - workspaces: - - name: train-model - workspaces: - - name: train-model - workspace: iris-pipeline - runAfter: - - data-prep - - data-prep - - name: evaluate-model - params: - - name: data-prep-trname - value: $(tasks.data-prep.results.taskrun-name) - - name: train-model-trname - value: $(tasks.train-model.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --X-test - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/X_test - - --y-test - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/y_test - - --model - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.train-model-trname)/model - - --mlpipeline-metrics - - /tmp/outputs/mlpipeline_metrics/data - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def evaluate_model( - X_test_file, - y_test_file, - model_file, - mlpipeline_metrics_file, - ): - import json - import pickle - - from sklearn.metrics import accuracy_score - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - X_test = load_pickle(X_test_file) - y_test = load_pickle(y_test_file) - model = load_pickle(model_file) - - y_pred = model.predict(X_test) - - accuracy_score_metric = accuracy_score(y_test, y_pred) - print(f"Accuracy: {accuracy_score_metric}") - - metrics = { - "metrics": [ - { - "name": "accuracy-score", - "numberValue": accuracy_score_metric, - "format": "PERCENTAGE", - }, - ] - } - - with open(mlpipeline_metrics_file, "w") as f: - json.dump(metrics, f) - - import argparse - _parser = argparse.ArgumentParser(prog='Evaluate model', description='') - _parser.add_argument("--X-test", dest="X_test_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-test", dest="y_test_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--mlpipeline-metrics", dest="mlpipeline_metrics_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = evaluate_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: data-prep-trname - - name: train-model-trname - stepTemplate: - volumeMounts: - - name: mlpipeline-metrics - mountPath: /tmp/outputs/mlpipeline_metrics - volumes: - - name: mlpipeline-metrics - emptyDir: {} - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Evaluate model", - "outputs": [{"name": "mlpipeline_metrics", "type": "Metrics"}], "version": - "Evaluate model@sha256=f398e65faecc6f5a4ba11a2c78d8a2274e3ede205a0e199c8bb615531a3abd4a"}' - workspaces: - - name: evaluate-model - workspaces: - - name: evaluate-model - workspace: iris-pipeline - runAfter: - - data-prep - - data-prep - - train-model - - name: validate-model - params: - - name: train-model-trname - value: $(tasks.train-model.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --model - - $(workspaces.validate-model.path)/artifacts/$ORIG_PR_NAME/$(params.train-model-trname)/model - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def validate_model(model_file): - import pickle - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - model = load_pickle(model_file) - - input_values = [[5, 3, 1.6, 0.2]] - - print(f"Performing test prediction on {input_values}") - result = model.predict(input_values) - - print(f"Response: {result}") - - import argparse - _parser = argparse.ArgumentParser(prog='Validate model', description='') - _parser.add_argument("--model", dest="model_file", type=str, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = validate_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: train-model-trname - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Validate model", - "outputs": [], "version": "Validate model@sha256=53d18ff94fc8f164e7d8455f2c87fa7fdac17e7502502aaa52012e4247d089ee"}' - workspaces: - - name: validate-model - workspaces: - - name: validate-model - workspace: iris-pipeline - runAfter: - - train-model - workspaces: - - name: iris-pipeline - workspaces: - - name: iris-pipeline - volumeClaimTemplate: - spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 2Gi -{{ end }} diff --git a/config/internal/common/argo/policy.yaml.tmpl b/config/internal/common/default/policy.yaml.tmpl similarity index 100% rename from config/internal/common/argo/policy.yaml.tmpl rename to config/internal/common/default/policy.yaml.tmpl diff --git a/config/internal/common/tekton/policy.yaml.tmpl b/config/internal/common/tekton/policy.yaml.tmpl deleted file mode 100644 index b750639f..00000000 --- a/config/internal/common/tekton/policy.yaml.tmpl +++ /dev/null @@ -1,84 +0,0 @@ -apiVersion: networking.k8s.io/v1 -kind: NetworkPolicy -metadata: - name: ds-pipelines-{{.Name}} - namespace: {{.Namespace}} -spec: - podSelector: - matchLabels: - app: {{.APIServerDefaultResourceName}} - component: data-science-pipelines - policyTypes: - - Ingress - ingress: - # Match all sources for oauth endpoint - - ports: - - protocol: TCP - port: 8443 - # We only allow DSPA components to communicate - # by bypassing oauth proxy, all external - # traffic should go through oauth proxy - - from: - - namespaceSelector: - matchLabels: - name: openshift-user-workload-monitoring - - namespaceSelector: - matchLabels: - kubernetes.io/metadata.name: redhat-ods-monitoring - - namespaceSelector: - matchLabels: - kubernetes.io/metadata.name: openshift-pipelines - - podSelector: - matchLabels: - app.kubernetes.io/managed-by: tekton-pipelines - pipelines.kubeflow.org/v2_component: 'true' - - podSelector: - matchLabels: - app: mariadb-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: minio-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-ui-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-persistenceagent-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-scheduledworkflow-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-metadata-envoy-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-metadata-grpc-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - opendatahub.io/workbenches: 'true' - ports: - - protocol: TCP - port: 8888 - - protocol: TCP - port: 8887 - - ports: - - protocol: TCP - port: 8080 - from: - - podSelector: - matchLabels: - app.kubernetes.io/name: data-science-pipelines-operator-driver - namespaceSelector: - matchLabels: - kubernetes.io/metadata.name: openshift-pipelines diff --git a/config/internal/ml-metadata/v1/metadata-writer.deployment.yaml.tmpl b/config/internal/ml-metadata/v1/metadata-writer.deployment.yaml.tmpl deleted file mode 100644 index 39068eaf..00000000 --- a/config/internal/ml-metadata/v1/metadata-writer.deployment.yaml.tmpl +++ /dev/null @@ -1,75 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-metadata-writer-{{.Name}} - namespace: {{.Namespace}} - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - dspa: {{.Name}} -spec: - replicas: 1 - selector: - matchLabels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - dspa: {{.Name}} - template: - metadata: - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - dspa: {{.Name}} - spec: - containers: - - env: - - name: NAMESPACE_TO_WATCH - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: PIPELINE_RUNTIME - value: tekton - - name: ARCHIVE_LOGS - value: "{{.APIServer.ArchiveLogs}}" - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-{{.Name}}" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "{{.MLMD.GRPC.Port}}" - image: "{{.MLMD.Writer.Image}}" - name: main - livenessProbe: - exec: - command: - - pidof - - python3 - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - pidof - - python3 - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - {{ if .MLMD.Writer.Resources.Requests }} - requests: - {{ if .MLMD.Writer.Resources.Requests.CPU }} - cpu: {{.MLMD.Writer.Resources.Requests.CPU}} - {{ end }} - {{ if .MLMD.Writer.Resources.Requests.Memory }} - memory: {{.MLMD.Writer.Resources.Requests.Memory}} - {{ end }} - {{ end }} - {{ if .MLMD.Writer.Resources.Limits }} - limits: - {{ if .MLMD.Writer.Resources.Limits.CPU }} - cpu: {{.MLMD.Writer.Resources.Limits.CPU}} - {{ end }} - {{ if .MLMD.Writer.Resources.Limits.Memory }} - memory: {{.MLMD.Writer.Resources.Limits.Memory}} - {{ end }} - {{ end }} - serviceAccountName: ds-pipeline-metadata-writer-{{.Name}} diff --git a/config/internal/ml-metadata/v1/metadata-writer.role.yaml.tmpl b/config/internal/ml-metadata/v1/metadata-writer.role.yaml.tmpl deleted file mode 100644 index 05becbf3..00000000 --- a/config/internal/ml-metadata/v1/metadata-writer.role.yaml.tmpl +++ /dev/null @@ -1,48 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - name: ds-pipeline-metadata-writer-{{.Name}} - namespace: {{.Namespace}} - -rules: - - apiGroups: - - "" - resources: - - pods - verbs: - - get - - list - - watch - - update - - patch - - apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - apiGroups: - - argoproj.io - resources: - - workflows - verbs: - - get - - list - - watch - - update - - patch - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - verbs: - - get - - list - - watch - - update - - patch diff --git a/config/internal/ml-metadata/v1/metadata-writer.rolebinding.yaml.tmpl b/config/internal/ml-metadata/v1/metadata-writer.rolebinding.yaml.tmpl deleted file mode 100644 index 1a96fd35..00000000 --- a/config/internal/ml-metadata/v1/metadata-writer.rolebinding.yaml.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - name: ds-pipeline-metadata-writer-{{.Name}} - namespace: {{.Namespace}} -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: ds-pipeline-metadata-writer-{{.Name}} -subjects: - - kind: ServiceAccount - name: ds-pipeline-metadata-writer-{{.Name}} diff --git a/config/internal/ml-metadata/v1/metadata-writer.serviceaccount.yaml.tmpl b/config/internal/ml-metadata/v1/metadata-writer.serviceaccount.yaml.tmpl deleted file mode 100644 index f4613182..00000000 --- a/config/internal/ml-metadata/v1/metadata-writer.serviceaccount.yaml.tmpl +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: ds-pipeline-metadata-writer-{{.Name}} - namespace: {{.Namespace}} - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines diff --git a/config/internal/mlpipelines-ui/role.yaml.tmpl b/config/internal/mlpipelines-ui/role.yaml.tmpl index f2cfe591..6838676c 100644 --- a/config/internal/mlpipelines-ui/role.yaml.tmpl +++ b/config/internal/mlpipelines-ui/role.yaml.tmpl @@ -34,21 +34,6 @@ rules: verbs: - get - list - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - - tasks - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - apiGroups: - route.openshift.io verbs: diff --git a/config/internal/persistence-agent/deployment.yaml.tmpl b/config/internal/persistence-agent/deployment.yaml.tmpl index 9c91bc8d..ac1711a8 100644 --- a/config/internal/persistence-agent/deployment.yaml.tmpl +++ b/config/internal/persistence-agent/deployment.yaml.tmpl @@ -35,11 +35,7 @@ spec: - name: KUBEFLOW_USERID_PREFIX value: "" - name: EXECUTIONTYPE - {{ if eq .DSPVersion "v2" }} value: Workflow - {{ else }} - value: PipelineRun - {{ end }} {{ if .PodToPodTLS }} - name: SSL_CERT_DIR value: "/etc/pki/tls/certs:/var/run/secrets/kubernetes.io/serviceaccount/" @@ -96,14 +92,11 @@ spec: memory: {{.PersistenceAgent.Resources.Limits.Memory}} {{ end }} {{ end }} - {{ if eq .DSPVersion "v2" }} volumeMounts: - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token name: persistenceagent-sa-token subPath: ds-pipeline-persistenceagent-{{.Name}}-token - {{ end }} serviceAccountName: ds-pipeline-persistenceagent-{{.Name}} - {{ if eq .DSPVersion "v2" }} volumes: - name: persistenceagent-sa-token projected: @@ -112,4 +105,3 @@ spec: audience: pipelines.kubeflow.org expirationSeconds: 3600 path: ds-pipeline-persistenceagent-{{.Name}}-token - {{ end }} diff --git a/config/internal/persistence-agent/role.yaml.tmpl b/config/internal/persistence-agent/role.yaml.tmpl index 454a32bf..174a2c39 100644 --- a/config/internal/persistence-agent/role.yaml.tmpl +++ b/config/internal/persistence-agent/role.yaml.tmpl @@ -23,17 +23,3 @@ rules: - get - list - watch - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - verbs: - - create - - get - - list - - watch - - update - - patch - - delete diff --git a/config/internal/scheduled-workflow/deployment.yaml.tmpl b/config/internal/scheduled-workflow/deployment.yaml.tmpl index 241b3e40..a46e5523 100644 --- a/config/internal/scheduled-workflow/deployment.yaml.tmpl +++ b/config/internal/scheduled-workflow/deployment.yaml.tmpl @@ -28,8 +28,6 @@ spec: value: "{{.Namespace}}" - name: CRON_SCHEDULE_TIMEZONE value: "{{.ScheduledWorkflow.CronScheduleTimezone}}" - - name: EXECUTIONTYPE - value: PipelineRun image: "{{.ScheduledWorkflow.Image}}" # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-scheduledworkflow diff --git a/config/internal/scheduled-workflow/role.yaml.tmpl b/config/internal/scheduled-workflow/role.yaml.tmpl index a4785ae8..748de502 100644 --- a/config/internal/scheduled-workflow/role.yaml.tmpl +++ b/config/internal/scheduled-workflow/role.yaml.tmpl @@ -39,31 +39,3 @@ rules: verbs: - create - patch - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - - runs - - tasks - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - - apiGroups: - - custom.tekton.dev - resources: - - pipelineloops - verbs: - - create - - get - - list - - watch - - update - - patch - - delete diff --git a/config/internal/workflow-controller/configmap.yaml.tmpl b/config/internal/workflow-controller/configmap.yaml.tmpl index e703d83e..46c8766f 100644 --- a/config/internal/workflow-controller/configmap.yaml.tmpl +++ b/config/internal/workflow-controller/configmap.yaml.tmpl @@ -12,7 +12,7 @@ metadata: namespace: {{.Namespace}} data: artifactRepository: | - archiveLogs: {{.APIServer.ArchiveLogs}} + archiveLogs: false s3: endpoint: "{{.ObjectStorageConnection.Endpoint}}" bucket: "{{.ObjectStorageConnection.Bucket}}" diff --git a/config/manager/manager.yaml b/config/manager/manager.yaml index f028ff04..484d6284 100644 --- a/config/manager/manager.yaml +++ b/config/manager/manager.yaml @@ -39,44 +39,26 @@ spec: # Env vars are prioritized over --config - name: IMAGES_APISERVER value: $(IMAGES_APISERVER) - - name: IMAGES_ARTIFACT - value: $(IMAGES_ARTIFACT) - - name: IMAGES_OAUTHPROXY - value: $(IMAGES_OAUTHPROXY) - - name: IMAGES_PERSISTENTAGENT - value: $(IMAGES_PERSISTENTAGENT) + - name: IMAGES_PERSISTENCEAGENT + value: $(IMAGES_PERSISTENCEAGENT) - name: IMAGES_SCHEDULEDWORKFLOW value: $(IMAGES_SCHEDULEDWORKFLOW) - - name: IMAGES_CACHE - value: $(IMAGES_CACHE) - - name: IMAGES_MOVERESULTSIMAGE - value: $(IMAGES_MOVERESULTSIMAGE) - - name: IMAGES_MARIADB - value: $(IMAGES_MARIADB) - name: IMAGES_MLMDENVOY value: $(IMAGES_MLMDENVOY) - name: IMAGES_MLMDGRPC value: $(IMAGES_MLMDGRPC) - - name: IMAGES_MLMDWRITER - value: $(IMAGES_MLMDWRITER) - - name: IMAGESV2_ARGO_APISERVER - value: $(IMAGESV2_ARGO_APISERVER) - - name: IMAGESV2_ARGO_PERSISTENCEAGENT - value: $(IMAGESV2_ARGO_PERSISTENCEAGENT) - - name: IMAGESV2_ARGO_SCHEDULEDWORKFLOW - value: $(IMAGESV2_ARGO_SCHEDULEDWORKFLOW) - - name: IMAGESV2_ARGO_MLMDENVOY - value: $(IMAGESV2_ARGO_MLMDENVOY) - - name: IMAGESV2_ARGO_MLMDGRPC - value: $(IMAGESV2_ARGO_MLMDGRPC) - - name: IMAGESV2_ARGO_WORKFLOWCONTROLLER - value: $(IMAGESV2_ARGO_WORKFLOWCONTROLLER) - - name: IMAGESV2_ARGO_ARGOEXEC - value: $(IMAGESV2_ARGO_ARGOEXEC) - - name: V2_LAUNCHER_IMAGE - value: $(V2_LAUNCHER_IMAGE) - - name: V2_DRIVER_IMAGE - value: $(V2_DRIVER_IMAGE) + - name: IMAGES_ARGO_EXEC + value: $(IMAGES_ARGO_EXEC) + - name: IMAGES_ARGO_WORKFLOWCONTROLLER + value: $(IMAGES_ARGO_WORKFLOWCONTROLLER) + - name: IMAGES_LAUNCHER + value: $(IMAGES_LAUNCHER) + - name: IMAGES_DRIVER + value: $(IMAGES_DRIVER) + - name: IMAGES_OAUTHPROXY + value: $(IMAGES_OAUTHPROXY) + - name: IMAGES_MARIADB + value: $(IMAGES_MARIADB) - name: ZAP_LOG_LEVEL value: $(ZAP_LOG_LEVEL) - name: MAX_CONCURRENT_RECONCILES diff --git a/config/rbac/role.yaml b/config/rbac/role.yaml index 4c2f10c2..40e3bd72 100644 --- a/config/rbac/role.yaml +++ b/config/rbac/role.yaml @@ -118,12 +118,6 @@ rules: - services verbs: - '*' -- apiGroups: - - custom.tekton.dev - resources: - - pipelineloops - verbs: - - '*' - apiGroups: - datasciencepipelinesapplications.opendatahub.io resources: @@ -256,12 +250,6 @@ rules: - create - delete - get -- apiGroups: - - tekton.dev - resources: - - '*' - verbs: - - '*' - apiGroups: - workload.codeflare.dev resources: diff --git a/config/samples/v2/custom-configs/db-creds.yaml b/config/samples/custom-configs/db-creds.yaml similarity index 100% rename from config/samples/v2/custom-configs/db-creds.yaml rename to config/samples/custom-configs/db-creds.yaml diff --git a/config/samples/v2/custom-configs/dspa.yaml b/config/samples/custom-configs/dspa.yaml similarity index 96% rename from config/samples/v2/custom-configs/dspa.yaml rename to config/samples/custom-configs/dspa.yaml index bdc0ff1f..bfdc9ab2 100644 --- a/config/samples/v2/custom-configs/dspa.yaml +++ b/config/samples/custom-configs/dspa.yaml @@ -1,4 +1,4 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample diff --git a/config/samples/v2/custom-configs/kustomization.yaml b/config/samples/custom-configs/kustomization.yaml similarity index 100% rename from config/samples/v2/custom-configs/kustomization.yaml rename to config/samples/custom-configs/kustomization.yaml diff --git a/config/samples/v2/custom-configs/storage-creds.yaml b/config/samples/custom-configs/storage-creds.yaml similarity index 100% rename from config/samples/v2/custom-configs/storage-creds.yaml rename to config/samples/custom-configs/storage-creds.yaml diff --git a/config/samples/v1/custom-configs/ui-configmap.yaml b/config/samples/custom-configs/ui-configmap.yaml similarity index 100% rename from config/samples/v1/custom-configs/ui-configmap.yaml rename to config/samples/custom-configs/ui-configmap.yaml diff --git a/config/samples/v2/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml b/config/samples/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml similarity index 100% rename from config/samples/v2/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml rename to config/samples/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml diff --git a/config/samples/v2/custom-workflow-controller-config/dspa.yaml b/config/samples/custom-workflow-controller-config/dspa.yaml similarity index 86% rename from config/samples/v2/custom-workflow-controller-config/dspa.yaml rename to config/samples/custom-workflow-controller-config/dspa.yaml index 4d3e50db..8e0a1f40 100644 --- a/config/samples/v2/custom-workflow-controller-config/dspa.yaml +++ b/config/samples/custom-workflow-controller-config/dspa.yaml @@ -1,4 +1,4 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample diff --git a/config/samples/v2/custom-workflow-controller-config/kustomization.yaml b/config/samples/custom-workflow-controller-config/kustomization.yaml similarity index 100% rename from config/samples/v2/custom-workflow-controller-config/kustomization.yaml rename to config/samples/custom-workflow-controller-config/kustomization.yaml diff --git a/config/samples/v2/dspa-all-fields/dspa_all_fields.yaml b/config/samples/dspa-all-fields/dspa_all_fields.yaml similarity index 91% rename from config/samples/v2/dspa-all-fields/dspa_all_fields.yaml rename to config/samples/dspa-all-fields/dspa_all_fields.yaml index 87d538d7..48d02608 100644 --- a/config/samples/v2/dspa-all-fields/dspa_all_fields.yaml +++ b/config/samples/dspa-all-fields/dspa_all_fields.yaml @@ -3,7 +3,7 @@ # Note that you cannot specify all fields, some are mutually exclusive # For example, you can only specify either a miniodb deployment or # externalstorage connection, but not both -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample @@ -24,8 +24,6 @@ spec: limits: cpu: 500m memory: 1Gi - CABundleFileMountPath: /your/certbundle/path.crt - CABundleFileName: certbundlefilename.crt # requires this configmap to be created beforehand, cABundle: configMapKey: keyname @@ -34,19 +32,6 @@ spec: customServerConfigMap: name: configmapname key: keyname - # the following are v1 specific options in spec.apiServer.* - applyTektonCustomResource: true - archiveLogs: false - artifactImage: quay.io/opendatahub/ds-pipelines-artifact-manager:latest - cacheImage: registry.access.redhat.com/ubi8/ubi-minimal:8.8 - moveResultsImage: busybox - injectDefaultScript: true - stripEOF: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 120 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true persistenceAgent: deploy: true image: quay.io/modh/odh-ml-pipelines-persistenceagent-container:v1.18.0-8 diff --git a/config/samples/v2/dspa-simple/dspa_simple.yaml b/config/samples/dspa-simple/dspa_simple.yaml similarity index 89% rename from config/samples/v2/dspa-simple/dspa_simple.yaml rename to config/samples/dspa-simple/dspa_simple.yaml index 5d28ff0a..6a035e12 100644 --- a/config/samples/v2/dspa-simple/dspa_simple.yaml +++ b/config/samples/dspa-simple/dspa_simple.yaml @@ -1,4 +1,4 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample diff --git a/config/samples/v1/dspa-simple/kustomization.yaml b/config/samples/dspa-simple/kustomization.yaml similarity index 100% rename from config/samples/v1/dspa-simple/kustomization.yaml rename to config/samples/dspa-simple/kustomization.yaml diff --git a/config/samples/dspa_healthcheck.yaml b/config/samples/dspa_healthcheck.yaml index 3f3bced1..3ad30c78 100644 --- a/config/samples/dspa_healthcheck.yaml +++ b/config/samples/dspa_healthcheck.yaml @@ -1,4 +1,4 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample diff --git a/config/samples/v2/external-object-storage/dspa.yaml b/config/samples/external-object-storage/dspa.yaml similarity index 88% rename from config/samples/v2/external-object-storage/dspa.yaml rename to config/samples/external-object-storage/dspa.yaml index 2b06aa00..c07769d7 100644 --- a/config/samples/v2/external-object-storage/dspa.yaml +++ b/config/samples/external-object-storage/dspa.yaml @@ -1,4 +1,4 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample diff --git a/config/samples/v2/external-object-storage/kustomization.yaml b/config/samples/external-object-storage/kustomization.yaml similarity index 100% rename from config/samples/v2/external-object-storage/kustomization.yaml rename to config/samples/external-object-storage/kustomization.yaml diff --git a/config/samples/v2/local-dev/dspa.yaml b/config/samples/local-dev/dspa.yaml similarity index 94% rename from config/samples/v2/local-dev/dspa.yaml rename to config/samples/local-dev/dspa.yaml index eaefd5be..24a63739 100644 --- a/config/samples/v2/local-dev/dspa.yaml +++ b/config/samples/local-dev/dspa.yaml @@ -4,7 +4,7 @@ # a locally-run DSPO that manages an external cluster (common development practice) # would not be able to run the pre-deploy health checks on these prerequisite components # and therefore the DSPA will never fully deploy without disabling them, as this DSPA sample does -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: sample diff --git a/config/samples/v2/local-dev/kustomization.yaml b/config/samples/local-dev/kustomization.yaml similarity index 100% rename from config/samples/v2/local-dev/kustomization.yaml rename to config/samples/local-dev/kustomization.yaml diff --git a/config/samples/v2/local-dev/storage-creds.yaml b/config/samples/local-dev/storage-creds.yaml similarity index 100% rename from config/samples/v2/local-dev/storage-creds.yaml rename to config/samples/local-dev/storage-creds.yaml diff --git a/config/samples/v1/custom-configs/artifact_script.yaml b/config/samples/v1/custom-configs/artifact_script.yaml deleted file mode 100644 index 890f301a..00000000 --- a/config/samples/v1/custom-configs/artifact_script.yaml +++ /dev/null @@ -1,24 +0,0 @@ -apiVersion: v1 -data: - somekey: |- - #!/usr/bin/env sh - push_artifact() { - if [ -f "$2" ]; then - tar -cvzf $1.tgz $2 - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://$ARTIFACT_BUCKET/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: custom-artifact-script diff --git a/config/samples/v1/custom-configs/db-creds.yaml b/config/samples/v1/custom-configs/db-creds.yaml deleted file mode 100644 index d84d13c8..00000000 --- a/config/samples/v1/custom-configs/db-creds.yaml +++ /dev/null @@ -1,10 +0,0 @@ -kind: Secret -apiVersion: v1 -metadata: - name: testdbsecret - labels: - app: mariadb-sample - component: data-science-pipelines -stringData: - password: testingpassword -type: Opaque diff --git a/config/samples/v1/custom-configs/dspa.yaml b/config/samples/v1/custom-configs/dspa.yaml deleted file mode 100644 index 3cb024cd..00000000 --- a/config/samples/v1/custom-configs/dspa.yaml +++ /dev/null @@ -1,96 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample -spec: - apiServer: - deploy: true - enableSamplePipeline: true - applyTektonCustomResource: true - archiveLogs: false - cacheImage: registry.access.redhat.com/ubi8/ubi-minimal - moveResultsImage: busybox - injectDefaultScript: true - stripEOF: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 120 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true - artifactScriptConfigMap: - name: custom-artifact-script - key: "somekey" - resources: - requests: - cpu: 250m - memory: 500Mi - limits: - cpu: 500m - memory: 1Gi - persistenceAgent: - deploy: true - numWorkers: 2 - resources: - requests: - cpu: 120m - memory: 500Mi - limits: - cpu: 250m - memory: 1Gi - scheduledWorkflow: - deploy: true - cronScheduleTimezone: UTC - resources: - requests: - cpu: 120m - memory: 100Mi - limits: - cpu: 250m - memory: 250Mi - mlpipelineUI: - deploy: true - image: quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - configMap: custom-ui-configmap - database: - mariaDB: - deploy: true - image: registry.redhat.io/rhel8/mariadb-103:1-188 - username: mlpipeline - pipelineDBName: randomDBName - pvcSize: 10Gi - resources: - requests: - cpu: 300m - memory: 800Mi - limits: - cpu: "1" - memory: 1Gi - passwordSecret: - name: testdbsecret - key: password - storageClassName: "" - objectStorage: - minio: - deploy: true - image: quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance - bucket: mlpipeline - pvcSize: 10Gi - resources: - requests: - cpu: 200m - memory: 100Mi - limits: - cpu: 250m - memory: 1Gi - storageClassName: "" - s3CredentialsSecret: - secretName: teststoragesecret - accessKey: AWS_ACCESS_KEY_ID - secretKey: AWS_SECRET_ACCESS_KEY diff --git a/config/samples/v1/custom-configs/kustomization.yaml b/config/samples/v1/custom-configs/kustomization.yaml deleted file mode 100644 index 5b7f5481..00000000 --- a/config/samples/v1/custom-configs/kustomization.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: - - dspa.yaml - - db-creds.yaml - - artifact_script.yaml - - storage-creds.yaml - - ui-configmap.yaml diff --git a/config/samples/v1/custom-configs/storage-creds.yaml b/config/samples/v1/custom-configs/storage-creds.yaml deleted file mode 100644 index 40903bf6..00000000 --- a/config/samples/v1/custom-configs/storage-creds.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: teststoragesecret - labels: - opendatahub.io/dashboard: 'true' - opendatahub.io/managed: 'true' - annotations: - opendatahub.io/connection-type: s3 - openshift.io/display-name: Minio Data Connection -data: - AWS_ACCESS_KEY_ID: QUtJQUlPU0ZPRE5ON0VYQU1QTEU= - AWS_SECRET_ACCESS_KEY: d0phbHJYVXRuRkVNSS9LN01ERU5HL2JQeFJmaUNZRVhBTVBMRUtFWQ== -type: Opaque diff --git a/config/samples/v1/dspa-all-fields/dspa_all_fields.yaml b/config/samples/v1/dspa-all-fields/dspa_all_fields.yaml deleted file mode 100644 index 8bfa58a1..00000000 --- a/config/samples/v1/dspa-all-fields/dspa_all_fields.yaml +++ /dev/null @@ -1,214 +0,0 @@ -# This file should not be used to deploy a DataSciencePipelinesApplication -# It's main purpose is to show all possible fields that can be configured -# Note that you cannot specify all fields, some are mutually exclusive -# For example, you can only specify either a miniodb deployment or -# externalstorage connection, but not both -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample - namespace: data-science-project -spec: - apiServer: - deploy: true - image: quay.io/modh/odh-ml-pipelines-api-server-container:v1.18.0-8 - enableSamplePipeline: true - applyTektonCustomResource: true - archiveLogs: false - artifactImage: quay.io/modh/odh-ml-pipelines-artifact-manager-container:v1.18.0-8 - cacheImage: registry.access.redhat.com/ubi8/ubi-minimal - moveResultsImage: busybox - injectDefaultScript: true - stripEOF: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 120 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true - resources: - requests: - cpu: 250m - memory: 500Mi - limits: - cpu: 500m - memory: 1Gi - # optional (default is: ds-pipeline-artifact-script-${metadata.name}) - # apiserver deployment will fail if the specified custom config does not exist - # if default name is used, the configmap will be over-written by the operator: - - # artifactScriptConfigMap: - # name: YourConfigMapName - # key: "artifact_script" - persistenceAgent: - deploy: true - image: quay.io/modh/odh-ml-pipelines-persistenceagent-container:v1.18.0-8 - numWorkers: 2 # Number of worker for sync job. - resources: - requests: - cpu: 120m - memory: 500Mi - limits: - cpu: 250m - memory: 1Gi - scheduledWorkflow: - deploy: true - image: quay.io/modh/odh-ml-pipelines-scheduledworkflow-container:v1.18.0-8 - cronScheduleTimezone: UTC - resources: - requests: - cpu: 120m - memory: 100Mi - limits: - cpu: 250m - memory: 250Mi - mlpipelineUI: - deploy: true - image: quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - # requires this configmap to be created before hand, - # otherwise operator will not deploy DSPA - configMap: ds-pipeline-ui-configmap - database: - disableHealthCheck: false - mariaDB: # mutually exclusive with externalDB - deploy: true - image: registry.redhat.io/rhel8/mariadb-103:1-188 - username: mlpipeline - pipelineDBName: randomDBName - pvcSize: 20Gi - resources: - requests: - cpu: 300m - memory: 800Mi - limits: - cpu: "1" - memory: 1Gi - # requires this configmap to be created before hand, - # otherwise operator will not deploy DSPA - passwordSecret: - name: ds-pipelines-db-sample - key: password -# externalDB: -# host: mysql:3306 -# port: "8888" -# username: root -# pipelineDBName: randomDBName -# passwordSecret: -# name: somesecret -# key: somekey - objectStorage: - disableHealthCheck: false - minio: # mutually exclusive with externalStorage - deploy: true - image: quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance - bucket: mlpipeline - pvcSize: 10Gi - resources: - requests: - cpu: 200m - memory: 100Mi - limits: - cpu: 250m - memory: 1Gi - # requires this configmap to be created before hand, - # otherwise operator will not deploy DSPA - s3CredentialsSecret: - secretName: somesecret-sample - accessKey: AWS_ACCESS_KEY_ID - secretKey: AWS_SECRET_ACCESS_KEY -# externalStorage: -# host: minio.com -# port: "9092" -# bucket: mlpipeline -# scheme: https -# s3CredentialsSecret: -# secretName: somesecret-db-sample -# accessKey: somekey -# secretKey: somekey - mlmd: # Deploys an optional ML-Metadata Component - deploy: true - envoy: - image: quay.io/opendatahub/ds-pipelines-metadata-envoy:1.7.0 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - grpc: - image: quay.io/opendatahub/ds-pipelines-metadata-grpc:1.0.0 - port: "8080" - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - writer: - image: quay.io/opendatahub/ds-pipelines-metadata-writer:1.1.0 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi -status: - # Reports True iff: - # * ApiServerReady, PersistenceAgentReady, ScheduledWorkflowReady, DatabaseReady, ObjectStorageReady report True - # AND - # * MLPIpelinesUIReady is (Ready: True) OR is (Ready: False && DeploymentDisabled) - conditions: - - type: Ready - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - - type: ApiServerReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - - type: UserInterfaceReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - - type: PersistenceAgentReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - - type: ScheduledWorkflowReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - # Do we need to do this?? API Server application already - # checks for db/storage connectivity, and pod will fail to come up - # in such a case. - - type: DatabaseReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: DataBaseReady - message: '' - - type: ObjectStorageReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: ObjectStorageReady - message: '' diff --git a/config/samples/v1/dspa-local-dev/dspa_local_dev.yaml b/config/samples/v1/dspa-local-dev/dspa_local_dev.yaml deleted file mode 100644 index 9f24771a..00000000 --- a/config/samples/v1/dspa-local-dev/dspa_local_dev.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# A simple DSPA with the Database and ObjectStore Health Checks Disabled -# -# Since the default database and storage options leverage internal Services, -# a locally-run DSPO that manages an external cluster (common development practice) -# would not be able to run the pre-deploy health checks on these prerequisite components -# and therefore the DSPA will never fully deploy without disabling them, as this DSPA sample does -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample -spec: - apiServer: - enableSamplePipeline: true - database: - disableHealthCheck: true - objectStorage: - disableHealthCheck: true - minio: - image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' - mlpipelineUI: - image: 'quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui' diff --git a/config/samples/v1/dspa-simple/dspa_simple.yaml b/config/samples/v1/dspa-simple/dspa_simple.yaml deleted file mode 100644 index e4d7798d..00000000 --- a/config/samples/v1/dspa-simple/dspa_simple.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample -spec: - apiServer: - enableSamplePipeline: true - # One of minio or externalStorage must be specified for objectStorage - # This example illustrates minimal deployment with minio - # This is NOT supported and should be used for dev testing/experimentation only. - # See external-object-storage/dspa.yaml for an example with external connection. - objectStorage: - minio: - # Image field is required - image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' - # Optional - mlpipelineUI: - # Image field is required - image: 'quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui' diff --git a/config/samples/v1/external-object-storage/dspa.yaml b/config/samples/v1/external-object-storage/dspa.yaml deleted file mode 100644 index b2daa701..00000000 --- a/config/samples/v1/external-object-storage/dspa.yaml +++ /dev/null @@ -1,21 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample -spec: - apiServer: - enableSamplePipeline: true - objectStorage: - externalStorage: - bucket: rhods-dsp-dev - host: s3.us-east-2.amazonaws.com - region: us-east-2 - s3CredentialsSecret: - accessKey: k8saccesskey - secretKey: k8ssecretkey - secretName: aws-bucket-creds - scheme: https - # Optional - mlpipelineUI: - # Image field is required - image: 'quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui' diff --git a/config/samples/v1/external-object-storage/kustomization.yaml b/config/samples/v1/external-object-storage/kustomization.yaml deleted file mode 100644 index 4e4ae0d0..00000000 --- a/config/samples/v1/external-object-storage/kustomization.yaml +++ /dev/null @@ -1,5 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: - - dspa.yaml - - storage-creds.yaml diff --git a/config/samples/v1/external-object-storage/storage-creds.yaml b/config/samples/v1/external-object-storage/storage-creds.yaml deleted file mode 100644 index 6d33d53f..00000000 --- a/config/samples/v1/external-object-storage/storage-creds.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: aws-bucket-creds - labels: - opendatahub.io/dashboard: 'true' - opendatahub.io/managed: 'true' - annotations: - opendatahub.io/connection-type: s3 - openshift.io/display-name: AWS S3 Connection -stringData: - k8saccesskey: someaccesskey - k8ssecretkey: somesecretkey -type: Opaque diff --git a/config/samples/v2/custom-configs/ui-configmap.yaml b/config/samples/v2/custom-configs/ui-configmap.yaml deleted file mode 100644 index 7e2e7eba..00000000 --- a/config/samples/v2/custom-configs/ui-configmap.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: v1 -data: - viewer-pod-template.json: |- - { - "spec": { - "serviceAccountName": "ds-pipelines-viewer-sample" - } - } -kind: ConfigMap -metadata: - name: custom-ui-configmap diff --git a/config/samples/v2/dspa-simple/kustomization.yaml b/config/samples/v2/dspa-simple/kustomization.yaml deleted file mode 100644 index d673cd99..00000000 --- a/config/samples/v2/dspa-simple/kustomization.yaml +++ /dev/null @@ -1,2 +0,0 @@ -resources: -- dspa_simple.yaml diff --git a/config/v2/cache/clusterrole.yaml b/config/v2/cache/clusterrole.yaml deleted file mode 100644 index 4ecb868a..00000000 --- a/config/v2/cache/clusterrole.yaml +++ /dev/null @@ -1,34 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app: cache-deployer - name: cache-deployer -rules: -- apiGroups: - - certificates.k8s.io - resources: - - certificatesigningrequests - verbs: - - create - - delete - - get - - update -- apiGroups: - - admissionregistration.k8s.io - resources: - - mutatingwebhookconfigurations - verbs: - - create - - delete - - get - - list - - patch -- apiGroups: - - certificates.k8s.io - resourceNames: - - kubernetes.io/* - resources: - - signers - verbs: - - approve diff --git a/config/v2/configmaps/configartifactbucket.yaml b/config/v2/configmaps/configartifactbucket.yaml deleted file mode 100644 index 2df1c0ba..00000000 --- a/config/v2/configmaps/configartifactbucket.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-artifact-bucket - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configartifactpvc.yaml b/config/v2/configmaps/configartifactpvc.yaml deleted file mode 100644 index a5d869bb..00000000 --- a/config/v2/configmaps/configartifactpvc.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-artifact-pvc - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configdefaults.yaml b/config/v2/configmaps/configdefaults.yaml deleted file mode 100644 index dc48532e..00000000 --- a/config/v2/configmaps/configdefaults.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-defaults - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configobservability.yaml b/config/v2/configmaps/configobservability.yaml deleted file mode 100644 index 6a12cdb7..00000000 --- a/config/v2/configmaps/configobservability.yaml +++ /dev/null @@ -1,9 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-observability - labels: - app.kubernetes.io/component: resolvers - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configspire.yaml b/config/v2/configmaps/configspire.yaml deleted file mode 100644 index c4dc80b4..00000000 --- a/config/v2/configmaps/configspire.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-spire - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configtrustedsources.yaml b/config/v2/configmaps/configtrustedsources.yaml deleted file mode 100644 index 9c1cd485..00000000 --- a/config/v2/configmaps/configtrustedsources.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-trusted-resources - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/featureflags.yaml b/config/v2/configmaps/featureflags.yaml deleted file mode 100644 index 9218692c..00000000 --- a/config/v2/configmaps/featureflags.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: feature-flags - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/kustomization.yaml b/config/v2/configmaps/kustomization.yaml deleted file mode 100644 index df5f2f95..00000000 --- a/config/v2/configmaps/kustomization.yaml +++ /dev/null @@ -1,8 +0,0 @@ -resources: -- configdefaults.yaml -- configobservability.yaml -- configspire.yaml -- featureflags.yaml -- configartifactbucket.yaml -- configartifactpvc.yaml -- configtrustedsources.yaml diff --git a/config/v2/exithandler/clusterrole.leaderelection.yaml b/config/v2/exithandler/clusterrole.leaderelection.yaml deleted file mode 100644 index 6a620995..00000000 --- a/config/v2/exithandler/clusterrole.leaderelection.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: exithandler-leader-election -rules: -- apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/exithandler/controller/clusterrole.clusteraccess.yaml b/config/v2/exithandler/controller/clusterrole.clusteraccess.yaml deleted file mode 100644 index efcb8bdf..00000000 --- a/config/v2/exithandler/controller/clusterrole.clusteraccess.yaml +++ /dev/null @@ -1,54 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: exithandler-controller-cluster-access -rules: -- apiGroups: - - tekton.dev - resources: - - runs - - customruns - - taskruns - - pipelineruns - - runs/status - - customruns/status - - taskruns/status - - pipelineruns/status - - runs/finalizers - - customruns/finalizers - verbs: - - get - - list - - create - - update - - delete - - patch - - watch -- apiGroups: - - custom.tekton.dev - resources: - - exithandlers - verbs: - - get - - list - - create - - update - - delete - - patch - - watch -- apiGroups: - - apps - resources: - - deployments - verbs: - - get - - list - - create - - update - - delete - - patch - - watch diff --git a/config/v2/exithandler/controller/clusterrole.tenantaccess.yaml b/config/v2/exithandler/controller/clusterrole.tenantaccess.yaml deleted file mode 100644 index f0090f30..00000000 --- a/config/v2/exithandler/controller/clusterrole.tenantaccess.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: exithandler-controller-tenant-access -rules: -- apiGroups: - - "" - resources: - - events - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/exithandler/webhook/kustomization.yaml b/config/v2/exithandler/webhook/kustomization.yaml deleted file mode 100644 index 8202e77f..00000000 --- a/config/v2/exithandler/webhook/kustomization.yaml +++ /dev/null @@ -1,10 +0,0 @@ -resources: -- clusterrole.clusteraccess.yaml -- clusterrolebinding.clusteraccess.yaml -- deployment.yaml -- mutatingwebhookconfig.yaml -- role.yaml -- rolebinding.yaml -- service.yaml -- serviceaccount.yaml -- validatingwebhookconfig.yaml diff --git a/config/v2/kfptask/clusterrole.leaderelection.yaml b/config/v2/kfptask/clusterrole.leaderelection.yaml deleted file mode 100644 index ef9689d6..00000000 --- a/config/v2/kfptask/clusterrole.leaderelection.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: leader-election -rules: -- apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/kfptask/webhook/clusterrole.clusteraccess.yaml b/config/v2/kfptask/webhook/clusterrole.clusteraccess.yaml deleted file mode 100644 index 9d32b310..00000000 --- a/config/v2/kfptask/webhook/clusterrole.clusteraccess.yaml +++ /dev/null @@ -1,82 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: webhook-cluster-access -rules: -- apiGroups: - - apiextensions.k8s.io - resources: - - customresourcedefinitions - - customresourcedefinitions/status - verbs: - - get - - list - - update -- apiGroups: - - "" - resources: - - namespaces - verbs: - - get - - list - - update -- apiGroups: - - admissionregistration.k8s.io - resources: - - mutatingwebhookconfigurations - - validatingwebhookconfigurations - verbs: - - list - - watch -- apiGroups: - - admissionregistration.k8s.io - resourceNames: - - webhook.kfptask.custom.tekton.dev - resources: - - mutatingwebhookconfigurations - verbs: - - get - - update - - delete -- apiGroups: - - apps - resources: - - deployments - - deployments/finalizers - verbs: - - get - - list - - create - - update - - delete -- apiGroups: - - "" - resources: - - namespaces/finalizers - resourceNames: - - openshift-pipelines - verbs: - - update -- apiGroups: - - admissionregistration.k8s.io - resourceNames: - - validation.webhook.kfptask.custom.tekton.dev - resources: - - validatingwebhookconfigurations - verbs: - - get - - update - - delete -- apiGroups: - - policy - resourceNames: - - tekton-pipelines - - openshift-pipelines - resources: - - podsecuritypolicies - verbs: - - use diff --git a/config/v2/kfptask/webhook/kustomization.yaml b/config/v2/kfptask/webhook/kustomization.yaml deleted file mode 100644 index df691ded..00000000 --- a/config/v2/kfptask/webhook/kustomization.yaml +++ /dev/null @@ -1,11 +0,0 @@ -resources: -- clusterrole.clusteraccess.yaml -- clusterrolebinding.clusteraccess.yaml -- clusterrolebinding.leaderelection.yaml -- deployment.yaml -- mutatingwebhookconfig.yaml -- role.yaml -- rolebinding.yaml -- service.yaml -- serviceaccount.yaml -- validatingwebhookconfig.yaml diff --git a/config/v2/pipelineloop/clusterrole.leaderelection.yaml b/config/v2/pipelineloop/clusterrole.leaderelection.yaml deleted file mode 100644 index 341c80e1..00000000 --- a/config/v2/pipelineloop/clusterrole.leaderelection.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - name: pipelineloop-leader-election -rules: -- apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/pipelineloop/controller/clusterrole.tenantaccess.yaml b/config/v2/pipelineloop/controller/clusterrole.tenantaccess.yaml deleted file mode 100644 index e16c0e29..00000000 --- a/config/v2/pipelineloop/controller/clusterrole.tenantaccess.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - name: pipelineloop-controller-tenant-access -rules: -- apiGroups: - - "" - resources: - - events - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/secrets/kfpexithandlerwebhookcertssecret.yaml b/config/v2/secrets/kfpexithandlerwebhookcertssecret.yaml deleted file mode 100644 index ae60d20f..00000000 --- a/config/v2/secrets/kfpexithandlerwebhookcertssecret.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - pipeline.tekton.dev/release: devel - name: kfp-exithandler-webhook-certs diff --git a/config/v2/secrets/kfptaskwebhookcertssecret.yaml b/config/v2/secrets/kfptaskwebhookcertssecret.yaml deleted file mode 100644 index 6387033c..00000000 --- a/config/v2/secrets/kfptaskwebhookcertssecret.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - pipeline.tekton.dev/release: devel - name: kfptask-webhook-certs diff --git a/config/v2/secrets/kustomization.yaml b/config/v2/secrets/kustomization.yaml deleted file mode 100644 index 2907d843..00000000 --- a/config/v2/secrets/kustomization.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resources: -- kfpexithandlerwebhookcertssecret.yaml -- kfptaskwebhookcertssecret.yaml -- tektonpipelineloopwebhookcertssecret.yaml diff --git a/config/v2/secrets/tektonpipelineloopwebhookcertssecret.yaml b/config/v2/secrets/tektonpipelineloopwebhookcertssecret.yaml deleted file mode 100644 index 262a53f5..00000000 --- a/config/v2/secrets/tektonpipelineloopwebhookcertssecret.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - pipeline.tekton.dev/release: devel - name: tektonpipelineloop-webhook-certs diff --git a/controllers/apiserver.go b/controllers/apiserver.go index 33196d9e..929ede23 100644 --- a/controllers/apiserver.go +++ b/controllers/apiserver.go @@ -18,7 +18,7 @@ package controllers import ( "context" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" v1 "github.com/openshift/api/route/v1" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/types" @@ -39,7 +39,7 @@ var samplePipelineTemplates = map[string]string{ "sample-config": "apiserver/sample-pipeline/sample-config.yaml.tmpl", } -func (r *DSPAReconciler) ReconcileAPIServer(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams) error { +func (r *DSPAReconciler) ReconcileAPIServer(ctx context.Context, dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) if !dsp.Spec.APIServer.Deploy { diff --git a/controllers/apiserver_test.go b/controllers/apiserver_test.go index 8d11138b..c757521f 100644 --- a/controllers/apiserver_test.go +++ b/controllers/apiserver_test.go @@ -22,7 +22,7 @@ import ( "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" appsv1 "k8s.io/api/apps/v1" ) @@ -33,21 +33,24 @@ func TestDeployAPIServer(t *testing.T) { expectedAPIServerName := apiServerDefaultResourceNamePrefix + testDSPAName // Construct DSPASpec with deployed APIServer - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - APIServer: &dspav1alpha1.APIServer{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + PodToPodTLS: boolPtr(false), + APIServer: &dspav1.APIServer{ Deploy: true, }, - MLMD: &dspav1alpha1.MLMD{}, - Database: &dspav1alpha1.Database{ + MLMD: &dspav1.MLMD{ + Deploy: true, + }, + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -92,9 +95,9 @@ func TestDontDeployAPIServer(t *testing.T) { expectedAPIServerName := apiServerDefaultResourceNamePrefix + testDSPAName // Construct DSPASpec with non-deployed APIServer - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - APIServer: &dspav1alpha1.APIServer{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + APIServer: &dspav1.APIServer{ Deploy: false, }, }, @@ -128,21 +131,24 @@ func TestApiServerEndpoints(t *testing.T) { expectedAPIServerName := apiServerDefaultResourceNamePrefix + testDSPAName // Construct DSPASpec with deployed APIServer - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - APIServer: &dspav1alpha1.APIServer{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + PodToPodTLS: boolPtr(false), + APIServer: &dspav1.APIServer{ + Deploy: true, + }, + MLMD: &dspav1.MLMD{ Deploy: true, }, - MLMD: &dspav1alpha1.MLMD{}, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -169,7 +175,7 @@ func TestApiServerEndpoints(t *testing.T) { err = reconciler.ReconcileAPIServer(ctx, dspa, params) assert.Nil(t, err) - dspa_created := &dspav1alpha1.DataSciencePipelinesApplication{} + dspa_created := &dspav1.DataSciencePipelinesApplication{} created, err = reconciler.IsResourceCreated(ctx, dspa, testDSPAName, testNamespace) assert.NotNil(t, dspa_created.Status.Components.APIServer.Url) assert.NotNil(t, dspa_created.Status.Components.APIServer.ExternalUrl) diff --git a/controllers/common.go b/controllers/common.go index 1e9ea675..86a07db2 100644 --- a/controllers/common.go +++ b/controllers/common.go @@ -16,16 +16,14 @@ limitations under the License. package controllers import ( - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" ) var commonTemplatesDir = "common/default" -var argoOnlyCommonTemplatesDir = "common/argo" -var tektonOnlyCommonTemplatesDir = "common/tekton" const commonCusterRolebindingTemplate = "common/no-owner/clusterrolebinding.yaml.tmpl" -func (r *DSPAReconciler) ReconcileCommon(dsp *dspav1alpha1.DataSciencePipelinesApplication, params *DSPAParams) error { +func (r *DSPAReconciler) ReconcileCommon(dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) log.Info("Applying Common Resources") @@ -33,17 +31,6 @@ func (r *DSPAReconciler) ReconcileCommon(dsp *dspav1alpha1.DataSciencePipelinesA if err != nil { return err } - - log.Info("Applying Engine-Specific Common Resources") - if params.UsingArgoEngineDriver(dsp) { - err = r.ApplyDir(dsp, params, argoOnlyCommonTemplatesDir) - } else if params.UsingTektonEngineDriver(dsp) { - err = r.ApplyDir(dsp, params, tektonOnlyCommonTemplatesDir) - } - if err != nil { - return err - } - err = r.ApplyWithoutOwner(params, commonCusterRolebindingTemplate) if err != nil { return err diff --git a/controllers/common_test.go b/controllers/common_test.go index c0b41166..1ba1ce7c 100644 --- a/controllers/common_test.go +++ b/controllers/common_test.go @@ -20,7 +20,7 @@ package controllers import ( "testing" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" networkingv1 "k8s.io/api/networking/v1" ) @@ -32,17 +32,17 @@ func TestDeployCommonPolicies(t *testing.T) { expectedEnvoyNetworkPolicyName := "ds-pipelines-envoy-testdspa" // Construct Basic DSPA Spec - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - Database: &dspav1alpha1.Database{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index b3b0c2b3..63c92bc5 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -22,11 +22,16 @@ import ( "time" "github.com/go-logr/logr" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/spf13/viper" "k8s.io/apimachinery/pkg/api/resource" ) +const DSPV2VersionString = "v2" +const DSPVersionk8sLabel = "dsp-version" + +var SupportedDSPVersions = []string{DSPV2VersionString} + const ( DefaultImageValue = "MustSetInConfig" @@ -48,9 +53,7 @@ const ( DefaultSystemSSLCertFile = "SSL_CERT_FILE" DefaultSystemSSLCertFilePath = "/etc/pki/tls/certs/ca-bundle.crt" // Fedora/RHEL 6 - MLPipelineUIConfigMapPrefix = "ds-pipeline-ui-configmap-" - ArtifactScriptConfigMapNamePrefix = "ds-pipeline-artifact-script-" - ArtifactScriptConfigMapKey = "artifact_script" + MLPipelineUIConfigMapPrefix = "ds-pipeline-ui-configmap-" CustomServerConfigMapNamePrefix = "ds-pipeline-server-config-" CustomServerConfigMapNameKey = "config.json" @@ -85,53 +88,26 @@ const ( // DSPO Config File Paths const ( - APIServerImagePath = "Images.ApiServer" - APIServerArtifactImagePath = "Images.Artifact" - PersistenceAgentImagePath = "Images.PersistentAgent" - ScheduledWorkflowImagePath = "Images.ScheduledWorkflow" - APIServerCacheImagePath = "Images.Cache" - APIServerMoveResultsImagePath = "Images.MoveResultsImage" - MariaDBImagePath = "Images.MariaDB" - OAuthProxyImagePath = "Images.OAuthProxy" - MlmdEnvoyImagePath = "Images.MlmdEnvoy" - MlmdGRPCImagePath = "Images.MlmdGRPC" - MlmdWriterImagePath = "Images.MlmdWriter" + // Images + APIServerImagePath = "Images.ApiServer" + PersistenceAgentImagePath = "Images.PersistentAgent" + ScheduledWorkflowImagePath = "Images.ScheduledWorkflow" + MlmdEnvoyImagePath = "Images.MlmdEnvoy" + MlmdGRPCImagePath = "Images.MlmdGRPC" + LauncherImagePath = "Images.LauncherImage" + DriverImagePath = "Images.DriverImage" + ArgoExecImagePath = "Images.ArgoExecImage" + ArgoWorkflowControllerImagePath = "Images.ArgoWorkflowController" + MariaDBImagePath = "Images.MariaDB" + OAuthProxyImagePath = "Images.OAuthProxy" + + // Other configs ObjStoreConnectionTimeoutConfigName = "DSPO.HealthCheck.ObjectStore.ConnectionTimeout" DBConnectionTimeoutConfigName = "DSPO.HealthCheck.Database.ConnectionTimeout" RequeueTimeConfigName = "DSPO.RequeueTime" ApiServerIncludeOwnerReferenceConfigName = "DSPO.ApiServer.IncludeOwnerReference" ) -// DSPV2-Argo Image Paths -const ( - APIServerImagePathV2Argo = "ImagesV2.Argo.ApiServer" - APIServerArtifactImagePathV2Argo = "ImagesV2.Argo.Artifact" - APIServerCacheImagePathV2Argo = "ImagesV2.Argo.Cache" - APIServerMoveResultsImagePathV2Argo = "ImagesV2.Argo.MoveResultsImage" - APIServerArgoLauncherImagePathV2Argo = "ImagesV2.Argo.ArgoLauncherImage" - APIServerArgoDriverImagePathV2Argo = "ImagesV2.Argo.ArgoDriverImage" - PersistenceAgentImagePathV2Argo = "ImagesV2.Argo.PersistentAgent" - ScheduledWorkflowImagePathV2Argo = "ImagesV2.Argo.ScheduledWorkflow" - MlmdEnvoyImagePathV2Argo = "ImagesV2.Argo.MlmdEnvoy" - MlmdGRPCImagePathV2Argo = "ImagesV2.Argo.MlmdGRPC" - ArgoWorkflowControllerImagePath = "ImagesV2.Argo.WorkflowController" - ArgoExecImagePath = "ImagesV2.Argo.ArgoExecImage" -) - -// DSPV2-Tekton Image Paths -// Note: These won't exist in config but aren't used, adding in case of future support -// TODO: remove -const ( - APIServerImagePathV2Tekton = "ImagesV2.Tekton.ApiServer" - APIServerArtifactImagePathV2Tekton = "ImagesV2.Tekton.Artifact" - APIServerCacheImagePathV2Tekton = "ImagesV2.Tekton.Cache" - APIServerMoveResultsImagePathV2Tekton = "ImagesV2.Tekton.MoveResultsImage" - PersistenceAgentImagePathV2Tekton = "ImagesV2.Tekton.PersistentAgent" - ScheduledWorkflowImagePathV2Tekton = "ImagesV2.Tekton.ScheduledWorkflow" - MlmdEnvoyImagePathV2Tekton = "ImagesV2.Tekton.MlmdEnvoy" - MlmdGRPCImagePathV2Tekton = "ImagesV2.Tekton.MlmdGRPC" -) - // DSPA Status Condition Types const ( DatabaseAvailable = "DatabaseAvailable" @@ -153,6 +129,7 @@ const ( FailingToDeploy = "FailingToDeploy" Deploying = "Deploying" ComponentDeploymentNotFound = "ComponentDeploymentNotFound" + UnsupportedVersion = "UnsupportedVersion" ) // Any required Configmap paths can be added here, @@ -160,11 +137,8 @@ const ( // validation check var requiredFields = []string{ APIServerImagePath, - APIServerArtifactImagePath, PersistenceAgentImagePath, ScheduledWorkflowImagePath, - APIServerCacheImagePath, - APIServerMoveResultsImagePath, MariaDBImagePath, OAuthProxyImagePath, } @@ -196,18 +170,17 @@ var ( MlPipelineUIResourceRequirements = createResourceRequirement(resource.MustParse("100m"), resource.MustParse("256Mi"), resource.MustParse("100m"), resource.MustParse("256Mi")) MlmdEnvoyResourceRequirements = createResourceRequirement(resource.MustParse("100m"), resource.MustParse("256Mi"), resource.MustParse("100m"), resource.MustParse("256Mi")) MlmdGRPCResourceRequirements = createResourceRequirement(resource.MustParse("100m"), resource.MustParse("256Mi"), resource.MustParse("100m"), resource.MustParse("256Mi")) - MlmdWriterResourceRequirements = createResourceRequirement(resource.MustParse("100m"), resource.MustParse("256Mi"), resource.MustParse("100m"), resource.MustParse("256Mi")) ) type DBExtraParams map[string]string -func createResourceRequirement(RequestsCPU resource.Quantity, RequestsMemory resource.Quantity, LimitsCPU resource.Quantity, LimitsMemory resource.Quantity) dspav1alpha1.ResourceRequirements { - return dspav1alpha1.ResourceRequirements{ - Requests: &dspav1alpha1.Resources{ +func createResourceRequirement(RequestsCPU resource.Quantity, RequestsMemory resource.Quantity, LimitsCPU resource.Quantity, LimitsMemory resource.Quantity) dspav1.ResourceRequirements { + return dspav1.ResourceRequirements{ + Requests: &dspav1.Resources{ CPU: RequestsCPU, Memory: RequestsMemory, }, - Limits: &dspav1alpha1.Resources{ + Limits: &dspav1.Resources{ CPU: LimitsCPU, Memory: LimitsMemory, }, @@ -252,3 +225,7 @@ func GetDefaultDBExtraParams(params DBExtraParams, log logr.Logger) (string, err } return string(extraParamsJson), nil } + +func GetSupportedDSPAVersions() []string { + return SupportedDSPVersions +} diff --git a/controllers/database.go b/controllers/database.go index ce15f1c4..9243d176 100644 --- a/controllers/database.go +++ b/controllers/database.go @@ -30,7 +30,7 @@ import ( "github.com/go-logr/logr" "github.com/go-sql-driver/mysql" _ "github.com/go-sql-driver/mysql" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" "k8s.io/apimachinery/pkg/util/json" "os" @@ -161,7 +161,7 @@ var ConnectAndQueryDatabase = func( return true, nil } -func (r *DSPAReconciler) isDatabaseAccessible(dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) isDatabaseAccessible(dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) (bool, error) { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) @@ -233,7 +233,7 @@ func (r *DSPAReconciler) isDatabaseAccessible(dsp *dspav1alpha1.DataSciencePipel return dbHealthCheckPassed, err } -func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) @@ -273,7 +273,7 @@ func (r *DSPAReconciler) ReconcileDatabase(ctx context.Context, dsp *dspav1alpha // Update the CR with the state of mariaDB to accurately portray // desired state. if !databaseSpecified { - dsp.Spec.Database = &dspav1alpha1.Database{} + dsp.Spec.Database = &dspav1.Database{} } if !databaseSpecified || defaultDBRequired { dsp.Spec.Database.MariaDB = params.MariaDB.DeepCopy() diff --git a/controllers/database_test.go b/controllers/database_test.go index 9147f3c5..6f554014 100644 --- a/controllers/database_test.go +++ b/controllers/database_test.go @@ -20,7 +20,7 @@ package controllers import ( "testing" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" appsv1 "k8s.io/api/apps/v1" ) @@ -31,17 +31,17 @@ func TestDeployDatabase(t *testing.T) { expectedDatabaseName := "mariadb-testdspa" // Construct DSPA Spec with deployed MariaDB Database - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - Database: &dspav1alpha1.Database{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -81,17 +81,17 @@ func TestDontDeployDatabase(t *testing.T) { expectedDatabaseName := "mariadb-testdspa" // Construct DSPA Spec with non-deployed MariaDB Database - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - Database: &dspav1alpha1.Database{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: false, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, diff --git a/controllers/dspastatus/dspa_status.go b/controllers/dspastatus/dspa_status.go index f7da0978..c8d09ecf 100644 --- a/controllers/dspastatus/dspa_status.go +++ b/controllers/dspastatus/dspa_status.go @@ -3,7 +3,7 @@ package dspastatus import ( "fmt" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) @@ -23,10 +23,12 @@ type DSPAStatus interface { SetMLMDProxyStatus(mlmdProxyReady metav1.Condition) + SetDSPANotReady(err error, reason string) + GetConditions() []metav1.Condition } -func NewDSPAStatus(dspa *dspav1alpha1.DataSciencePipelinesApplication) DSPAStatus { +func NewDSPAStatus(dspa *dspav1.DataSciencePipelinesApplication) DSPAStatus { databaseCondition := BuildUnknownCondition(config.DatabaseAvailable) objStoreCondition := BuildUnknownCondition(config.ObjectStoreAvailable) apiServerCondition := BuildUnknownCondition(config.APIServerReady) @@ -46,13 +48,14 @@ func NewDSPAStatus(dspa *dspav1alpha1.DataSciencePipelinesApplication) DSPAStatu } type dspaStatus struct { - dspa *dspav1alpha1.DataSciencePipelinesApplication + dspa *dspav1.DataSciencePipelinesApplication databaseAvailable *metav1.Condition objStoreAvailable *metav1.Condition apiServerReady *metav1.Condition persistenceAgentReady *metav1.Condition scheduledWorkflowReady *metav1.Condition mlmdProxyReady *metav1.Condition + dspaReady *metav1.Condition } func (s *dspaStatus) SetDatabaseNotReady(err error, reason string) { @@ -100,6 +103,20 @@ func (s *dspaStatus) SetMLMDProxyStatus(mlmdProxyReady metav1.Condition) { s.mlmdProxyReady = &mlmdProxyReady } +// SetDSPANotReady is an override option for reporting a custom +// overall DSP Ready state. This is the condition type that +// reports on the overall state of the DSPA. If this is never +// called, then the overall ready state is auto generated based +// on the conditions of the other components. +func (s *dspaStatus) SetDSPANotReady(err error, reason string) { + message := "" + if err != nil { + message = err.Error() + } + condition := BuildFalseCondition(config.CrReady, reason, message) + s.dspaReady = &condition +} + func (s *dspaStatus) GetConditions() []metav1.Condition { componentConditions := []metav1.Condition{ *s.getDatabaseAvailableCondition(), @@ -119,23 +136,28 @@ func (s *dspaStatus) GetConditions() []metav1.Condition { } } - var crReady metav1.Condition - - if allReady { - crReady = metav1.Condition{ - Type: config.CrReady, - Status: metav1.ConditionTrue, - Reason: config.MinimumReplicasAvailable, - Message: "All components are ready.", - LastTransitionTime: metav1.Now(), - } - } else { - crReady = metav1.Condition{ - Type: config.CrReady, - Status: metav1.ConditionFalse, - Reason: config.MinimumReplicasAvailable, - Message: failureMessages, - LastTransitionTime: metav1.Now(), + // Allow for dspa ready status to be overridden + // otherwise we auto generate the overall ready status + // based off of the other components + crReady := s.dspaReady + + if s.dspaReady == nil { + if allReady { + crReady = &metav1.Condition{ + Type: config.CrReady, + Status: metav1.ConditionTrue, + Reason: config.MinimumReplicasAvailable, + Message: "All components are ready.", + LastTransitionTime: metav1.Now(), + } + } else { + crReady = &metav1.Condition{ + Type: config.CrReady, + Status: metav1.ConditionFalse, + Reason: config.MinimumReplicasAvailable, + Message: failureMessages, + LastTransitionTime: metav1.Now(), + } } } @@ -146,7 +168,7 @@ func (s *dspaStatus) GetConditions() []metav1.Condition { *s.persistenceAgentReady, *s.scheduledWorkflowReady, *s.mlmdProxyReady, - crReady, + *crReady, } for i, condition := range s.dspa.Status.Conditions { diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index 8a6a644e..8f29d8da 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -19,14 +19,13 @@ package controllers import ( "context" "fmt" - "github.com/opendatahub-io/data-science-pipelines-operator/controllers/dspastatus" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/controller-runtime/pkg/controller" "github.com/go-logr/logr" mf "github.com/manifestival/manifestival" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/util" routev1 "github.com/openshift/api/route/v1" @@ -82,18 +81,25 @@ func (r *DSPAReconciler) Apply(owner mf.Owner, params *DSPAParams, template stri if err != nil { return fmt.Errorf("error loading template (%s) yaml: %w", template, err) } + + // Apply the owner injection transformation tmplManifest, err = tmplManifest.Transform( mf.InjectOwner(owner), + // Apply dsp-version= label on all resources managed by this dspo + util.AddLabelTransformer(config.DSPVersionk8sLabel, params.DSPVersion), + util.AddDeploymentPodLabelTransformer(config.DSPVersionk8sLabel, params.DSPVersion), ) if err != nil { return err } + // Apply dsp-version labels to all manifests tmplManifest, err = tmplManifest.Transform(fns...) if err != nil { return err } + // Apply the manifest return tmplManifest.Apply() } @@ -159,8 +165,6 @@ func (r *DSPAReconciler) DeleteResourceIfItExists(ctx context.Context, obj clien //+kubebuilder:rbac:groups=kubeflow.org,resources=*,verbs=* //+kubebuilder:rbac:groups=batch,resources=jobs,verbs=* //+kubebuilder:rbac:groups=machinelearning.seldon.io,resources=seldondeployments,verbs=* -//+kubebuilder:rbac:groups=tekton.dev,resources=*,verbs=* -//+kubebuilder:rbac:groups=custom.tekton.dev,resources=pipelineloops,verbs=* //+kubebuilder:rbac:groups=ray.io,resources=rayclusters;rayjobs;rayservices,verbs=create;get;list;patch;delete //+kubebuilder:rbac:groups=authorization.k8s.io,resources=subjectaccessreviews,verbs=create //+kubebuilder:rbac:groups=authentication.k8s.io,resources=tokenreviews,verbs=create @@ -176,7 +180,7 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. params := &DSPAParams{} - dspa := &dspav1alpha1.DataSciencePipelinesApplication{} + dspa := &dspav1.DataSciencePipelinesApplication{} err := r.Get(ctx, req.NamespacedName, dspa) if err != nil && apierrs.IsNotFound(err) { log.V(1).Info("DSPA resource was not found, assuming it was recently deleted, nothing to do here") @@ -190,12 +194,26 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. defer r.updateStatus(ctx, dspa, dspaStatus, log, req) + if !util.DSPAWithSupportedDSPVersion(dspa) { + err1 := fmt.Errorf("unsupported DSP version %s detected. Please manually remove "+ + "this DSP resource and re-apply with a supported version field set", dspa.Spec.DSPVersion) + dspaStatus.SetDatabaseNotReady(err1, config.UnsupportedVersion) + dspaStatus.SetObjStoreNotReady(err1, config.UnsupportedVersion) + r.setStatusAsUnsupported(config.APIServerReady, err1, dspaStatus.SetApiServerStatus) + r.setStatusAsUnsupported(config.PersistenceAgentReady, err1, dspaStatus.SetPersistenceAgentStatus) + r.setStatusAsUnsupported(config.ScheduledWorkflowReady, err1, dspaStatus.SetScheduledWorkflowStatus) + r.setStatusAsUnsupported(config.MLMDProxyReady, err1, dspaStatus.SetMLMDProxyStatus) + dspaStatus.SetDSPANotReady(err1, config.UnsupportedVersion) + log.Info(err1.Error()) + return ctrl.Result{}, nil + } + // FixMe: Hack for stubbing gvk during tests as these are not populated by test suite // https://github.com/opendatahub-io/data-science-pipelines-operator/pull/7#discussion_r1102887037 // In production we expect these to be populated if dspa.Kind == "" { dspa = dspa.DeepCopy() - gvk := dspav1alpha1.GroupVersion.WithKind("DataSciencePipelinesApplication") + gvk := dspav1.GroupVersion.WithKind("DataSciencePipelinesApplication") dspa.APIVersion, dspa.Kind = gvk.Version, gvk.Kind } @@ -351,8 +369,13 @@ func (r *DSPAReconciler) setStatusAsNotReady(conditionType string, err error, se setStatus(condition) } +func (r *DSPAReconciler) setStatusAsUnsupported(conditionType string, err error, setStatus func(metav1.Condition)) { + condition := dspastatus.BuildFalseCondition(conditionType, config.UnsupportedVersion, err.Error()) + setStatus(condition) +} + func (r *DSPAReconciler) setStatus(ctx context.Context, resourceName string, conditionType string, - dspa *dspav1alpha1.DataSciencePipelinesApplication, setStatus func(metav1.Condition), + dspa *dspav1.DataSciencePipelinesApplication, setStatus func(metav1.Condition), log logr.Logger) { condition, err := r.evaluateCondition(ctx, dspa, resourceName, conditionType) setStatus(condition) @@ -361,7 +384,7 @@ func (r *DSPAReconciler) setStatus(ctx context.Context, resourceName string, con } } -func (r *DSPAReconciler) updateStatus(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) updateStatus(ctx context.Context, dspa *dspav1.DataSciencePipelinesApplication, dspaStatus dspastatus.DSPAStatus, log logr.Logger, req ctrl.Request) { r.refreshDspa(ctx, dspa, req, log) @@ -380,7 +403,7 @@ func (r *DSPAReconciler) updateStatus(ctx context.Context, dspa *dspav1alpha1.Da // evaluateCondition evaluates if condition with "name" is in condition of type "conditionType". // this procedure is valid only for conditions with bool status type, for conditions of non bool type // results are undefined. -func (r *DSPAReconciler) evaluateCondition(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, component string, conditionType string) (metav1.Condition, error) { +func (r *DSPAReconciler) evaluateCondition(ctx context.Context, dspa *dspav1.DataSciencePipelinesApplication, component string, conditionType string) (metav1.Condition, error) { condition := dspastatus.BuildUnknownCondition(conditionType) deployment := &appsv1.Deployment{} @@ -493,14 +516,14 @@ func (r *DSPAReconciler) evaluateCondition(ctx context.Context, dspa *dspav1alph } -func (r *DSPAReconciler) refreshDspa(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication, req ctrl.Request, log logr.Logger) { +func (r *DSPAReconciler) refreshDspa(ctx context.Context, dspa *dspav1.DataSciencePipelinesApplication, req ctrl.Request, log logr.Logger) { err := r.Get(ctx, req.NamespacedName, dspa) if err != nil { log.Info(err.Error()) } } -func (r *DSPAReconciler) PublishMetrics(dspa *dspav1alpha1.DataSciencePipelinesApplication, metricsMap map[metav1.Condition]*prometheus.GaugeVec) { +func (r *DSPAReconciler) PublishMetrics(dspa *dspav1.DataSciencePipelinesApplication, metricsMap map[metav1.Condition]*prometheus.GaugeVec) { log := r.Log.WithValues("namespace", dspa.Namespace).WithValues("dspa_name", dspa.Name) log.Info("Publishing Ready Metrics") @@ -516,10 +539,8 @@ func (r *DSPAReconciler) PublishMetrics(dspa *dspav1alpha1.DataSciencePipelinesA } } -func (r *DSPAReconciler) GetComponents(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication) dspav1alpha1.ComponentStatus { +func (r *DSPAReconciler) GetComponents(ctx context.Context, dspa *dspav1.DataSciencePipelinesApplication) dspav1.ComponentStatus { log := r.Log.WithValues("namespace", dspa.Namespace).WithValues("dspa_name", dspa.Name) - log.Info("Updating components endpoints") - mlmdProxyResourceName := fmt.Sprintf("ds-pipeline-md-%s", dspa.Name) apiServerResourceName := fmt.Sprintf("ds-pipeline-%s", dspa.Name) @@ -543,7 +564,7 @@ func (r *DSPAReconciler) GetComponents(ctx context.Context, dspa *dspav1alpha1.D log.Error(err, "Error retrieving API Server Route endpoint") } - mlmdProxyComponent := &dspav1alpha1.ComponentDetailStatus{} + mlmdProxyComponent := &dspav1.ComponentDetailStatus{} if mlmdProxyUrl != "" { mlmdProxyComponent.Url = mlmdProxyUrl } @@ -551,7 +572,7 @@ func (r *DSPAReconciler) GetComponents(ctx context.Context, dspa *dspav1alpha1.D mlmdProxyComponent.ExternalUrl = mlmdProxyExternalUrl } - apiServerComponent := &dspav1alpha1.ComponentDetailStatus{} + apiServerComponent := &dspav1.ComponentDetailStatus{} if apiServerUrl != "" { apiServerComponent.Url = apiServerUrl } @@ -559,7 +580,7 @@ func (r *DSPAReconciler) GetComponents(ctx context.Context, dspa *dspav1alpha1.D apiServerComponent.ExternalUrl = apiServerExternalUrl } - status := dspav1alpha1.ComponentStatus{} + status := dspav1.ComponentStatus{} if mlmdProxyComponent.Url != "" && mlmdProxyComponent.ExternalUrl != "" { status.MLMDProxy = *mlmdProxyComponent } @@ -572,7 +593,7 @@ func (r *DSPAReconciler) GetComponents(ctx context.Context, dspa *dspav1alpha1.D // SetupWithManager sets up the controller with the Manager. func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { return ctrl.NewControllerManagedBy(mgr). - For(&dspav1alpha1.DataSciencePipelinesApplication{}). + For(&dspav1.DataSciencePipelinesApplication{}). Owns(&appsv1.Deployment{}). Owns(&corev1.Secret{}). Owns(&corev1.ConfigMap{}). @@ -585,7 +606,6 @@ func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { // Watch for global ca bundle, if one is added to this namespace // we need to reconcile on all the dspa's in this namespace // so they may mount this cert in the appropriate containers - WatchesRawSource(source.Kind(mgr.GetCache(), &corev1.ConfigMap{}), handler.EnqueueRequestsFromMapFunc(func(ctx context.Context, o client.Object) []reconcile.Request { cm := o.(*corev1.ConfigMap) @@ -596,9 +616,7 @@ func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { return nil } - log.V(1).Info(fmt.Sprintf("Reconcile event triggered by change in event on Global CA Bundle: %s", cm.Name)) - - var dspaList dspav1alpha1.DataSciencePipelinesApplicationList + var dspaList dspav1.DataSciencePipelinesApplicationList if err := r.List(ctx, &dspaList, client.InNamespace(thisNamespace)); err != nil { log.Error(err, "unable to list DSPA's when attempting to handle Global CA Bundle event.") return nil @@ -606,11 +624,18 @@ func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { var reconcileRequests []reconcile.Request for _, dspa := range dspaList.Items { - namespacedName := types.NamespacedName{ - Name: dspa.Name, - Namespace: thisNamespace, + // Only update supported DSP versions + if util.DSPAWithSupportedDSPVersion(&dspa) { + namespacedName := types.NamespacedName{ + Name: dspa.Name, + Namespace: thisNamespace, + } + reconcileRequests = append(reconcileRequests, reconcile.Request{NamespacedName: namespacedName}) } - reconcileRequests = append(reconcileRequests, reconcile.Request{NamespacedName: namespacedName}) + } + + if len(reconcileRequests) > 0 { + log.V(1).Info(fmt.Sprintf("Reconcile event triggered by change in event on Global CA Bundle: %s", cm.Name)) } return reconcileRequests @@ -626,6 +651,12 @@ func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { return nil } + // Silently skip reconcile on this pod if the resource was owned + // by an unsupported dspa + if !util.HasSupportedDSPVersionLabel(pod.Labels) { + return nil + } + dspaName, hasDSPALabel := pod.Labels["dspa"] if !hasDSPALabel { msg := fmt.Sprintf("Pod with data-science-pipelines label encountered, but is missing dspa "+ @@ -650,9 +681,6 @@ func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { if secret.Annotations["openshift.io/owning-component"] != "service-ca" { return nil } - - log.V(1).Info(fmt.Sprintf("Reconcile event triggered by change on Secret owned by service-ca: %s", secret.Name)) - serviceName := secret.Annotations["service.beta.openshift.io/originating-service-name"] namespacedServiceName := types.NamespacedName{ @@ -666,17 +694,22 @@ func (r *DSPAReconciler) SetupWithManager(mgr ctrl.Manager) error { if err != nil { return nil } - dspaName, hasDSPALabel := service.Labels["dspa"] if !hasDSPALabel { return nil } - log.V(1).Info(fmt.Sprintf("Reconcile event triggered by [Service: %s] ", serviceName)) + // Silently skip reconcile on this ervice if the resource was owned + // by an unsupported DSPA + if !util.HasSupportedDSPVersionLabel(service.Labels) { + return nil + } + namespacedDspaName := types.NamespacedName{ Name: dspaName, Namespace: secret.Namespace, } + log.V(1).Info(fmt.Sprintf("Reconcile event triggered by change on Secret: %s owned by service-ca: %s", secret.Name, serviceName)) return []reconcile.Request{{NamespacedName: namespacedDspaName}} }), ). diff --git a/controllers/dspipeline_fake_controller.go b/controllers/dspipeline_fake_controller.go index f431131c..09857b67 100644 --- a/controllers/dspipeline_fake_controller.go +++ b/controllers/dspipeline_fake_controller.go @@ -25,7 +25,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/client/fake" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" buildv1 "github.com/openshift/api/build/v1" imagev1 "github.com/openshift/api/image/v1" routev1 "github.com/openshift/api/route/v1" @@ -45,7 +45,7 @@ func NewFakeController() *DSPAReconciler { utilruntime.Must(buildv1.Install(FakeScheme)) utilruntime.Must(imagev1.Install(FakeScheme)) utilruntime.Must(routev1.Install(FakeScheme)) - utilruntime.Must(dspav1alpha1.AddToScheme(FakeScheme)) + utilruntime.Must(dspav1.AddToScheme(FakeScheme)) FakeBuilder.WithScheme(FakeScheme) // Build Fake Client diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 4bba6dc0..2908a01d 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -21,6 +21,7 @@ import ( "context" "encoding/base64" "encoding/json" + "errors" "fmt" "math/rand" "os" @@ -31,7 +32,7 @@ import ( "github.com/go-logr/logr" mf "github.com/manifestival/manifestival" - dspa "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspa "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/util" routev1 "github.com/openshift/api/route/v1" @@ -42,7 +43,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" ) -const MlmdIsRequiredInV2Msg = "MLMD explicitly disabled in DSPA, but is a required component for V2 Pipelines" +const MlmdIsRequired = "MLMD explicitly disabled in DSPA, but is a required component for DSP" type DSPAParams struct { IncludeOwnerReference bool @@ -121,35 +122,6 @@ type ObjectStorageConnection struct { ExternalRouteURL string } -func (p *DSPAParams) UsingV2Pipelines(dsp *dspa.DataSciencePipelinesApplication) bool { - return dsp.Spec.DSPVersion == "v2" -} - -func (p *DSPAParams) UsingV1Pipelines(dsp *dspa.DataSciencePipelinesApplication) bool { - return !p.UsingV2Pipelines(dsp) -} - -func (p *DSPAParams) UsingArgoEngineDriver(dsp *dspa.DataSciencePipelinesApplication) bool { - return p.UsingV2Pipelines(dsp) -} - -func (p *DSPAParams) UsingTektonEngineDriver(dsp *dspa.DataSciencePipelinesApplication) bool { - return !p.UsingV2Pipelines(dsp) -} - -// TODO: rework to dynamically retrieve image based soley on 'pipelinesVersion' and 'engineDriver' rather than -// explicitly set images -func (p *DSPAParams) GetImageForComponent(dsp *dspa.DataSciencePipelinesApplication, v1Image, v2ArgoImage, v2TektonImage string) string { - if p.UsingV2Pipelines(dsp) { - if p.UsingArgoEngineDriver(dsp) { - return v2ArgoImage - } else { - return v2TektonImage - } - } - return v1Image -} - // UsingExternalDB will return true if an external Database is specified in the CR, otherwise false. func (p *DSPAParams) UsingExternalDB(dsp *dspa.DataSciencePipelinesApplication) bool { if dsp.Spec.Database != nil && dsp.Spec.Database.ExternalDB != nil { @@ -371,8 +343,8 @@ func (p *DSPAParams) SetupDBParams(ctx context.Context, dsp *dspa.DataSciencePip } if p.DBConnection.Password == "" { - return fmt.Errorf(fmt.Sprintf("DB Password from secret [%s] for key [%s] was not successfully retrieved, "+ - "ensure that the secret with this key exist.", p.DBConnection.CredentialsSecret.Name, p.DBConnection.CredentialsSecret.Key)) + return fmt.Errorf("db password from secret [%s] for key [%s] was not successfully retrieved, ensure that the secret with this key exist", + p.DBConnection.CredentialsSecret.Name, p.DBConnection.CredentialsSecret.Key) } return nil } @@ -499,47 +471,43 @@ func (p *DSPAParams) SetupObjectParams(ctx context.Context, dsp *dspa.DataScienc p.ObjectStorageConnection.Endpoint = endpoint if p.ObjectStorageConnection.AccessKeyID == "" || p.ObjectStorageConnection.SecretAccessKey == "" { - return fmt.Errorf(fmt.Sprintf("Object Storage Password from secret [%s] for keys [%s, %s] was not "+ - "successfully retrieved, ensure that the secret with this key exist.", + return fmt.Errorf("object storage password from secret [%s] for keys [%s, %s] was not "+ + "successfully retrieved, ensure that the secret with this key exist", p.ObjectStorageConnection.CredentialsSecret.SecretName, - p.ObjectStorageConnection.CredentialsSecret.AccessKey, p.ObjectStorageConnection.CredentialsSecret.SecretKey)) + p.ObjectStorageConnection.CredentialsSecret.AccessKey, p.ObjectStorageConnection.CredentialsSecret.SecretKey) } return nil } func (p *DSPAParams) SetupMLMD(dsp *dspa.DataSciencePipelinesApplication, log logr.Logger) error { - if p.UsingV2Pipelines(dsp) { - if p.MLMD == nil { - log.Info("MLMD not specified, but is a required component for V2 Pipelines. Including MLMD with default specs.") - p.MLMD = &dspa.MLMD{ - Deploy: true, - Envoy: &dspa.Envoy{ - DeployRoute: true, - }, - } - } else if !p.MLMD.Deploy { - return fmt.Errorf(MlmdIsRequiredInV2Msg) + if p.MLMD == nil { + log.Info("MLMD not specified, but is a required component for Pipelines. Including MLMD with default specs.") + p.MLMD = &dspa.MLMD{ + Deploy: true, + Envoy: &dspa.Envoy{ + DeployRoute: true, + }, } + } else if !p.MLMD.Deploy { + return errors.New(MlmdIsRequired) } - if p.MLMD != nil { - MlmdEnvoyImagePath := p.GetImageForComponent(dsp, config.MlmdEnvoyImagePath, config.MlmdEnvoyImagePathV2Argo, config.MlmdEnvoyImagePathV2Tekton) - MlmdGRPCImagePath := p.GetImageForComponent(dsp, config.MlmdGRPCImagePath, config.MlmdGRPCImagePathV2Argo, config.MlmdGRPCImagePathV2Tekton) + if p.MLMD != nil { if p.MLMD.Envoy == nil { p.MLMD.Envoy = &dspa.Envoy{ - Image: config.GetStringConfigWithDefault(MlmdEnvoyImagePath, config.DefaultImageValue), + Image: config.GetStringConfigWithDefault(config.MlmdEnvoyImagePath, config.DefaultImageValue), DeployRoute: true, } } if p.MLMD.GRPC == nil { p.MLMD.GRPC = &dspa.GRPC{ - Image: config.GetStringConfigWithDefault(MlmdGRPCImagePath, config.DefaultImageValue), + Image: config.GetStringConfigWithDefault(config.MlmdGRPCImagePath, config.DefaultImageValue), } } - mlmdEnvoyImageFromConfig := config.GetStringConfigWithDefault(MlmdEnvoyImagePath, config.DefaultImageValue) - mlmdGRPCImageFromConfig := config.GetStringConfigWithDefault(MlmdGRPCImagePath, config.DefaultImageValue) + mlmdEnvoyImageFromConfig := config.GetStringConfigWithDefault(config.MlmdEnvoyImagePath, config.DefaultImageValue) + mlmdGRPCImageFromConfig := config.GetStringConfigWithDefault(config.MlmdGRPCImagePath, config.DefaultImageValue) setStringDefault(mlmdEnvoyImageFromConfig, &p.MLMD.Envoy.Image) setStringDefault(mlmdGRPCImageFromConfig, &p.MLMD.GRPC.Image) @@ -548,20 +516,6 @@ func (p *DSPAParams) SetupMLMD(dsp *dspa.DataSciencePipelinesApplication, log lo setResourcesDefault(config.MlmdGRPCResourceRequirements, &p.MLMD.GRPC.Resources) setStringDefault(config.MlmdGrpcPort, &p.MLMD.GRPC.Port) - - if p.UsingV1Pipelines(dsp) { - MlmdWriterImagePath := config.MlmdWriterImagePath - - if p.MLMD.Writer == nil { - p.MLMD.Writer = &dspa.Writer{ - Image: config.GetStringConfigWithDefault(MlmdWriterImagePath, config.DefaultImageValue), - } - } - - mlmdWriterImageFromConfig := config.GetStringConfigWithDefault(MlmdWriterImagePath, config.DefaultImageValue) - setStringDefault(mlmdWriterImageFromConfig, &p.MLMD.Writer.Image) - setResourcesDefault(config.MlmdWriterResourceRequirements, &p.MLMD.Writer.Resources) - } } return nil } @@ -628,48 +582,25 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip p.PodToPodTLS = false dspTrustedCAConfigMapKey := config.CustomDSPTrustedCAConfigMapKey - // PodToPodTLS is only used in v2 dsp - if p.UsingV2Pipelines(dsp) { - // by default it's enabled when omitted - if dsp.Spec.PodToPodTLS == nil { - p.PodToPodTLS = true - } else { - p.PodToPodTLS = *dsp.Spec.PodToPodTLS - } + // by default it's enabled when omitted + if dsp.Spec.PodToPodTLS == nil { + p.PodToPodTLS = true + } else { + p.PodToPodTLS = *dsp.Spec.PodToPodTLS } log := loggr.WithValues("namespace", p.Namespace).WithValues("dspa_name", p.Name) if p.APIServer != nil { - APIServerImagePath := p.GetImageForComponent(dsp, config.APIServerImagePath, config.APIServerImagePathV2Argo, config.APIServerImagePathV2Tekton) - APIServerArtifactImagePath := config.APIServerArtifactImagePath - APIServerCacheImagePath := config.APIServerCacheImagePath - APIServerMoveResultsImagePath := config.APIServerMoveResultsImagePath - APIServerArgoLauncherImagePath := config.APIServerArgoLauncherImagePathV2Argo - APIServerArgoDriverImagePath := config.APIServerArgoDriverImagePathV2Argo - - serverImageFromConfig := config.GetStringConfigWithDefault(APIServerImagePath, config.DefaultImageValue) - artifactImageFromConfig := config.GetStringConfigWithDefault(APIServerArtifactImagePath, config.DefaultImageValue) - cacheImageFromConfig := config.GetStringConfigWithDefault(APIServerCacheImagePath, config.DefaultImageValue) - moveResultsImageFromConfig := config.GetStringConfigWithDefault(APIServerMoveResultsImagePath, config.DefaultImageValue) - argoLauncherImageFromConfig := config.GetStringConfigWithDefault(APIServerArgoLauncherImagePath, config.DefaultImageValue) - argoDriverImageFromConfig := config.GetStringConfigWithDefault(APIServerArgoDriverImagePath, config.DefaultImageValue) + serverImageFromConfig := config.GetStringConfigWithDefault(config.APIServerImagePath, config.DefaultImageValue) + argoLauncherImageFromConfig := config.GetStringConfigWithDefault(config.LauncherImagePath, config.DefaultImageValue) + argoDriverImageFromConfig := config.GetStringConfigWithDefault(config.DriverImagePath, config.DefaultImageValue) setStringDefault(serverImageFromConfig, &p.APIServer.Image) - setStringDefault(artifactImageFromConfig, &p.APIServer.ArtifactImage) - setStringDefault(cacheImageFromConfig, &p.APIServer.CacheImage) - setStringDefault(moveResultsImageFromConfig, &p.APIServer.MoveResultsImage) setStringDefault(argoLauncherImageFromConfig, &p.APIServer.ArgoLauncherImage) setStringDefault(argoDriverImageFromConfig, &p.APIServer.ArgoDriverImage) setResourcesDefault(config.APIServerResourceRequirements, &p.APIServer.Resources) - if p.APIServer.ArtifactScriptConfigMap == nil { - p.APIServer.ArtifactScriptConfigMap = &dspa.ScriptConfigMap{ - Name: config.ArtifactScriptConfigMapNamePrefix + dsp.Name, - Key: config.ArtifactScriptConfigMapKey, - } - } - if p.APIServer.CustomServerConfig == nil { p.APIServer.CustomServerConfig = &dspa.ScriptConfigMap{ Name: config.CustomServerConfigMapNamePrefix + dsp.Name, @@ -859,14 +790,12 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip } if p.PersistenceAgent != nil { - PersistenceAgentImagePath := p.GetImageForComponent(dsp, config.PersistenceAgentImagePath, config.PersistenceAgentImagePathV2Argo, config.PersistenceAgentImagePathV2Tekton) - persistenceAgentImageFromConfig := config.GetStringConfigWithDefault(PersistenceAgentImagePath, config.DefaultImageValue) + persistenceAgentImageFromConfig := config.GetStringConfigWithDefault(config.PersistenceAgentImagePath, config.DefaultImageValue) setStringDefault(persistenceAgentImageFromConfig, &p.PersistenceAgent.Image) setResourcesDefault(config.PersistenceAgentResourceRequirements, &p.PersistenceAgent.Resources) } if p.ScheduledWorkflow != nil { - ScheduledWorkflowImagePath := p.GetImageForComponent(dsp, config.ScheduledWorkflowImagePath, config.ScheduledWorkflowImagePathV2Argo, config.ScheduledWorkflowImagePathV2Tekton) - scheduledWorkflowImageFromConfig := config.GetStringConfigWithDefault(ScheduledWorkflowImagePath, config.DefaultImageValue) + scheduledWorkflowImageFromConfig := config.GetStringConfigWithDefault(config.ScheduledWorkflowImagePath, config.DefaultImageValue) setStringDefault(scheduledWorkflowImageFromConfig, &p.ScheduledWorkflow.Image) setResourcesDefault(config.ScheduledWorkflowResourceRequirements, &p.ScheduledWorkflow.Resources) } @@ -882,10 +811,7 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip // If user did not specify WorkflowController if dsp.Spec.WorkflowController == nil { dsp.Spec.WorkflowController = &dspa.WorkflowController{ - Deploy: false, - } - if p.UsingV2Pipelines(dsp) { - dsp.Spec.WorkflowController.Deploy = true + Deploy: true, } } p.WorkflowController = dsp.Spec.WorkflowController.DeepCopy() diff --git a/controllers/dspipeline_params_test.go b/controllers/dspipeline_params_test.go index 55609d89..d7907fb0 100644 --- a/controllers/dspipeline_params_test.go +++ b/controllers/dspipeline_params_test.go @@ -21,7 +21,7 @@ import ( "encoding/json" "testing" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -47,13 +47,13 @@ func TestExtractParams_CABundle(t *testing.T) { tt := []struct { msg string - dsp *dspav1alpha1.DataSciencePipelinesApplication + dsp *dspav1.DataSciencePipelinesApplication CustomCABundleRootMountPath string CustomSSLCertDir *string PiplinesCABundleMountPath string SSLCertFileEnv string APICustomPemCerts [][]byte - CustomCABundle *dspav1alpha1.CABundle + CustomCABundle *dspav1.CABundle ConfigMapPreReq []*v1.ConfigMap errorMsg string }{ @@ -84,7 +84,7 @@ func TestExtractParams_CABundle(t *testing.T) { CustomSSLCertDir: strPtr("/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs"), PiplinesCABundleMountPath: "/dsp-custom-certs/dsp-ca.crt", APICustomPemCerts: [][]byte{[]byte("bundle-contents")}, - CustomCABundle: &dspav1alpha1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, + CustomCABundle: &dspav1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, ConfigMapPreReq: []*v1.ConfigMap{ { ObjectMeta: metav1.ObjectMeta{Name: "testcaname", Namespace: "testnamespace"}, @@ -99,7 +99,7 @@ func TestExtractParams_CABundle(t *testing.T) { CustomSSLCertDir: strPtr("/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs"), PiplinesCABundleMountPath: "/dsp-custom-certs/dsp-ca.crt", APICustomPemCerts: [][]byte{[]byte("odh-bundle-contents")}, - CustomCABundle: &dspav1alpha1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, + CustomCABundle: &dspav1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, ConfigMapPreReq: []*v1.ConfigMap{ { ObjectMeta: metav1.ObjectMeta{Name: "odh-trusted-ca-bundle", Namespace: "testnamespace"}, @@ -114,7 +114,7 @@ func TestExtractParams_CABundle(t *testing.T) { CustomSSLCertDir: strPtr("/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs"), PiplinesCABundleMountPath: "/dsp-custom-certs/dsp-ca.crt", APICustomPemCerts: [][]byte{[]byte("odh-bundle-contents-2")}, - CustomCABundle: &dspav1alpha1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, + CustomCABundle: &dspav1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, ConfigMapPreReq: []*v1.ConfigMap{ { ObjectMeta: metav1.ObjectMeta{Name: "odh-trusted-ca-bundle", Namespace: "testnamespace"}, @@ -144,7 +144,7 @@ func TestExtractParams_CABundle(t *testing.T) { CustomSSLCertDir: strPtr("/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs"), PiplinesCABundleMountPath: "/dsp-custom-certs/dsp-ca.crt", APICustomPemCerts: [][]byte{[]byte("odh-bundle-contents"), []byte("bundle-contents")}, - CustomCABundle: &dspav1alpha1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, + CustomCABundle: &dspav1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, ConfigMapPreReq: []*v1.ConfigMap{ { ObjectMeta: metav1.ObjectMeta{Name: "odh-trusted-ca-bundle", Namespace: "testnamespace"}, @@ -163,7 +163,7 @@ func TestExtractParams_CABundle(t *testing.T) { CustomSSLCertDir: strPtr("/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs"), PiplinesCABundleMountPath: "/dsp-custom-certs/dsp-ca.crt", APICustomPemCerts: [][]byte{[]byte("odh-bundle-contents"), []byte("bundle-contents"), []byte("dummycontent")}, - CustomCABundle: &dspav1alpha1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, + CustomCABundle: &dspav1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, ConfigMapPreReq: []*v1.ConfigMap{ { ObjectMeta: metav1.ObjectMeta{Name: "odh-trusted-ca-bundle", Namespace: "testnamespace"}, @@ -184,7 +184,7 @@ func TestExtractParams_CABundle(t *testing.T) { CustomSSLCertDir: strPtr("/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs"), PiplinesCABundleMountPath: "/dsp-custom-certs/dsp-ca.crt", APICustomPemCerts: [][]byte{[]byte("service-ca-contents")}, - CustomCABundle: &dspav1alpha1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, + CustomCABundle: &dspav1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, ConfigMapPreReq: []*v1.ConfigMap{ { ObjectMeta: metav1.ObjectMeta{Name: "openshift-service-ca.crt", Namespace: "testnamespace"}, @@ -199,7 +199,7 @@ func TestExtractParams_CABundle(t *testing.T) { CustomSSLCertDir: strPtr("/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs"), PiplinesCABundleMountPath: "/dsp-custom-certs/dsp-ca.crt", APICustomPemCerts: [][]byte{[]byte("service-ca-contents"), []byte("dummycontent")}, - CustomCABundle: &dspav1alpha1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, + CustomCABundle: &dspav1.CABundle{ConfigMapKey: "dsp-ca.crt", ConfigMapName: "dsp-trusted-ca-testdspa"}, ConfigMapPreReq: []*v1.ConfigMap{ { ObjectMeta: metav1.ObjectMeta{Name: "openshift-service-ca.crt", Namespace: "testnamespace"}, diff --git a/controllers/mlmd.go b/controllers/mlmd.go index c6c639c8..a70d272a 100644 --- a/controllers/mlmd.go +++ b/controllers/mlmd.go @@ -18,7 +18,7 @@ package controllers import ( "context" "errors" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" ) const ( @@ -28,7 +28,7 @@ const ( mlmdGrpcService = "grpc-service" ) -func (r *DSPAReconciler) ReconcileMLMD(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) ReconcileMLMD(ctx context.Context, dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) @@ -40,55 +40,34 @@ func (r *DSPAReconciler) ReconcileMLMD(ctx context.Context, dsp *dspav1alpha1.Da log.Info("Applying ML-Metadata (MLMD) Resources") - if params.UsingV1Pipelines(dsp) { - if dsp.Spec.MLMD != nil { - err := r.ApplyDir(dsp, params, mlmdTemplatesDir) - if err != nil { - return err - } - - if dsp.Spec.MLMD.Envoy == nil || dsp.Spec.MLMD.Envoy.DeployRoute { - err = r.Apply(dsp, params, mlmdEnvoyRoute) - if err != nil { - return err - } - } - } + // We need to create the service first so OpenShift creates the certificate that we'll use later. + err := r.ApplyDir(dsp, params, mlmdTemplatesDir+"/"+mlmdGrpcService) + if err != nil { + return err + } - err := r.ApplyDir(dsp, params, mlmdTemplatesDir+"/v1") - if err != nil { - return err - } - } else { - // We need to create the service first so OpenShift creates the certificate that we'll use later. - err := r.ApplyDir(dsp, params, mlmdTemplatesDir+"/"+mlmdGrpcService) + if params.PodToPodTLS { + var certificatesExist bool + certificatesExist, err = params.LoadMlmdCertificates(ctx, r.Client) if err != nil { return err } - if params.PodToPodTLS { - var certificatesExist bool - certificatesExist, err = params.LoadMlmdCertificates(ctx, r.Client) - if err != nil { - return err - } - - if !certificatesExist { - return errors.New("secret containing the certificate for MLMD gRPC Server was not created yet") - } + if !certificatesExist { + return errors.New("secret containing the certificate for MLMD gRPC Server was not created yet") } + } + + err = r.ApplyDir(dsp, params, mlmdTemplatesDir) + if err != nil { + return err + } - err = r.ApplyDir(dsp, params, mlmdTemplatesDir) + if dsp.Spec.MLMD == nil || dsp.Spec.MLMD.Envoy == nil || dsp.Spec.MLMD.Envoy.DeployRoute { + err = r.Apply(dsp, params, mlmdEnvoyRoute) if err != nil { return err } - - if dsp.Spec.MLMD == nil || dsp.Spec.MLMD.Envoy == nil || dsp.Spec.MLMD.Envoy.DeployRoute { - err = r.Apply(dsp, params, mlmdEnvoyRoute) - if err != nil { - return err - } - } } log.Info("Finished applying MLMD Resources") diff --git a/controllers/mlmd_test.go b/controllers/mlmd_test.go index fc96c990..b4260ab2 100644 --- a/controllers/mlmd_test.go +++ b/controllers/mlmd_test.go @@ -22,13 +22,13 @@ import ( v1 "github.com/openshift/api/route/v1" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" appsv1 "k8s.io/api/apps/v1" ) -func TestDeployMLMDV1(t *testing.T) { +func TestDeployMLMD(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" @@ -37,117 +37,23 @@ func TestDeployMLMDV1(t *testing.T) { expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ - Deploy: true, - }, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route now exists - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) -} - -func TestDeployMLMDV2(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" - expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" - - // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ DSPVersion: "v2", PodToPodTLS: boolPtr(false), - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ + APIServer: &dspav1.APIServer{}, + MLMD: &dspav1.MLMD{ Deploy: true, }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -217,122 +123,28 @@ func TestDeployMLMDV2(t *testing.T) { assert.Nil(t, err) } -func TestDontDeployMLMDV1(t *testing.T) { +func TestDontDeployMLMD(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" - expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" // Construct DSPA Spec with MLMD Not Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ - Deploy: false, - }, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route still doesn't exist - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources stil doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) -} - -func TestDontDeployMLMDV2(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - - // Construct DSPA Spec with MLMD Not Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ DSPVersion: "v2", PodToPodTLS: boolPtr(false), - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ + APIServer: &dspav1.APIServer{}, + MLMD: &dspav1.MLMD{ Deploy: false, }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -347,10 +159,10 @@ func TestDontDeployMLMDV2(t *testing.T) { // Create Context, Fake Controller and Params ctx, params, reconciler := CreateNewTestObjects() err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.EqualError(t, err, MlmdIsRequiredInV2Msg) + assert.EqualError(t, err, MlmdIsRequired) } -func TestDefaultDeployBehaviorMLMDV1(t *testing.T) { +func TestDefaultDeployBehaviorMLMD(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" @@ -359,111 +171,20 @@ func TestDefaultDeployBehaviorMLMDV1(t *testing.T) { expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" // Construct DSPA Spec with MLMD Spec not defined - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route still doesn't exist - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) -} - -func TestDefaultDeployBehaviorMLMDV2(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" - expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" - - // Construct DSPA Spec with MLMD Spec not defined - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ DSPVersion: "v2", PodToPodTLS: boolPtr(false), - APIServer: &dspav1alpha1.APIServer{}, - Database: &dspav1alpha1.Database{ + APIServer: &dspav1.APIServer{}, + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -533,104 +254,33 @@ func TestDefaultDeployBehaviorMLMDV2(t *testing.T) { assert.Nil(t, err) } -func TestDeployEnvoyRouteV1(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - - // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ - Deploy: true, - Envoy: &dspav1alpha1.Envoy{ - DeployRoute: true, - }, - }, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route now exists - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) -} - -func TestDeployEnvoyRouteV2(t *testing.T) { +func TestDeployEnvoyRoute(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ DSPVersion: "v2", PodToPodTLS: boolPtr(false), - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ + APIServer: &dspav1.APIServer{}, + MLMD: &dspav1.MLMD{ Deploy: true, - Envoy: &dspav1alpha1.Envoy{ + Envoy: &dspav1.Envoy{ DeployRoute: true, }, }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -676,104 +326,33 @@ func TestDeployEnvoyRouteV2(t *testing.T) { assert.Nil(t, err) } -func TestDontDeployEnvoyRouteV1(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - - // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ - Deploy: true, - Envoy: &dspav1alpha1.Envoy{ - DeployRoute: false, - }, - }, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route still doesn't exist - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) -} - -func TestDontDeployEnvoyRouteV2(t *testing.T) { +func TestDontDeployEnvoyRoute(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ DSPVersion: "v2", PodToPodTLS: boolPtr(false), - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ + APIServer: &dspav1.APIServer{}, + MLMD: &dspav1.MLMD{ Deploy: true, - Envoy: &dspav1alpha1.Envoy{ + Envoy: &dspav1.Envoy{ DeployRoute: false, }, }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -823,34 +402,34 @@ func boolPtr(b bool) *bool { return &b } -func TestGetEndpointsMLMDV2(t *testing.T) { +func TestGetEndpointsMLMD(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ DSPVersion: "v2", PodToPodTLS: boolPtr(false), - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ + APIServer: &dspav1.APIServer{}, + MLMD: &dspav1.MLMD{ Deploy: true, - Envoy: &dspav1alpha1.Envoy{ + Envoy: &dspav1.Envoy{ Image: "someimage", DeployRoute: true, }, }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -895,7 +474,7 @@ func TestGetEndpointsMLMDV2(t *testing.T) { require.True(t, created) require.Nil(t, err) - dspa_created := &dspav1alpha1.DataSciencePipelinesApplication{} + dspa_created := &dspav1.DataSciencePipelinesApplication{} created, err = reconciler.IsResourceCreated(ctx, dspa, testDSPAName, testNamespace) require.NotNil(t, dspa_created.Status.Components.MLMDProxy.Url) require.NotNil(t, dspa_created.Status.Components.MLMDProxy.ExternalUrl) diff --git a/controllers/mlpipeline_ui.go b/controllers/mlpipeline_ui.go index 16a87a9c..4cec9dcb 100644 --- a/controllers/mlpipeline_ui.go +++ b/controllers/mlpipeline_ui.go @@ -17,12 +17,12 @@ limitations under the License. package controllers import ( - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" ) var mlPipelineUITemplatesDir = "mlpipelines-ui" -func (r *DSPAReconciler) ReconcileUI(dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) ReconcileUI(dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) diff --git a/controllers/mlpipeline_ui_test.go b/controllers/mlpipeline_ui_test.go index aa1a6478..097351ba 100644 --- a/controllers/mlpipeline_ui_test.go +++ b/controllers/mlpipeline_ui_test.go @@ -20,7 +20,7 @@ package controllers import ( "testing" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" appsv1 "k8s.io/api/apps/v1" ) @@ -31,21 +31,21 @@ func TestDeployUI(t *testing.T) { expectedUIName := "ds-pipeline-ui-testdspa" // Construct DSPASpec with deployed UI - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - MlPipelineUI: &dspav1alpha1.MlPipelineUI{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + MlPipelineUI: &dspav1.MlPipelineUI{ Deploy: true, Image: "test-image:latest", }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -85,21 +85,21 @@ func TestDontDeployUI(t *testing.T) { expectedUIName := "ds-pipeline-ui-testdspa" // Construct DSPASpec with non-deployed UI - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - MlPipelineUI: &dspav1alpha1.MlPipelineUI{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + MlPipelineUI: &dspav1.MlPipelineUI{ Deploy: false, Image: "uiimage", }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -139,17 +139,17 @@ func TestDefaultDeployBehaviorUI(t *testing.T) { expectedUIName := "ds-pipeline-ui-testdspa" // Construct DSPASpec without UI defined - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - Database: &dspav1alpha1.Database{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, diff --git a/controllers/persistence_agent.go b/controllers/persistence_agent.go index 13ac0e57..5d5d7de7 100644 --- a/controllers/persistence_agent.go +++ b/controllers/persistence_agent.go @@ -17,14 +17,14 @@ limitations under the License. package controllers import ( - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" ) var persistenceAgentTemplatesDir = "persistence-agent" const persistenceAgentDefaultResourceNamePrefix = "ds-pipeline-persistenceagent-" -func (r *DSPAReconciler) ReconcilePersistenceAgent(dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) ReconcilePersistenceAgent(dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) diff --git a/controllers/persistence_agent_test.go b/controllers/persistence_agent_test.go index 8854d233..9e8e839d 100644 --- a/controllers/persistence_agent_test.go +++ b/controllers/persistence_agent_test.go @@ -21,7 +21,7 @@ import ( "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" "testing" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" appsv1 "k8s.io/api/apps/v1" ) @@ -32,20 +32,20 @@ func TestDeployPersistenceAgent(t *testing.T) { expectedPersistenceAgentName := persistenceAgentDefaultResourceNamePrefix + testDSPAName // Construct DSPASpec with deployed PersistenceAgent - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - PersistenceAgent: &dspav1alpha1.PersistenceAgent{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + PersistenceAgent: &dspav1.PersistenceAgent{ Deploy: true, }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -90,9 +90,9 @@ func TestDontDeployPersistenceAgent(t *testing.T) { expectedPersistenceAgentName := persistenceAgentDefaultResourceNamePrefix + testDSPAName // Construct DSPASpec with non-deployed PersistenceAgent - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - PersistenceAgent: &dspav1alpha1.PersistenceAgent{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + PersistenceAgent: &dspav1.PersistenceAgent{ Deploy: false, }, }, diff --git a/controllers/scheduled_workflow.go b/controllers/scheduled_workflow.go index 6df7d238..ce0a06d1 100644 --- a/controllers/scheduled_workflow.go +++ b/controllers/scheduled_workflow.go @@ -17,14 +17,14 @@ limitations under the License. package controllers import ( - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" ) var scheduledWorkflowTemplatesDir = "scheduled-workflow" const scheduledWorkflowDefaultResourceNamePrefix = "ds-pipeline-scheduledworkflow-" -func (r *DSPAReconciler) ReconcileScheduledWorkflow(dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) ReconcileScheduledWorkflow(dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) diff --git a/controllers/scheduled_workflow_test.go b/controllers/scheduled_workflow_test.go index e12b48db..bffedf46 100644 --- a/controllers/scheduled_workflow_test.go +++ b/controllers/scheduled_workflow_test.go @@ -21,7 +21,7 @@ import ( "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" "testing" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" appsv1 "k8s.io/api/apps/v1" ) @@ -32,20 +32,20 @@ func TestDeployScheduledWorkflow(t *testing.T) { expectedScheduledWorkflowName := scheduledWorkflowDefaultResourceNamePrefix + testDSPAName // Construct DSPASpec with deployed ScheduledWorkflow - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - ScheduledWorkflow: &dspav1alpha1.ScheduledWorkflow{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + ScheduledWorkflow: &dspav1.ScheduledWorkflow{ Deploy: true, }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -90,9 +90,9 @@ func TestDontDeployScheduledWorkflow(t *testing.T) { expectedScheduledWorkflowName := scheduledWorkflowDefaultResourceNamePrefix + testDSPAName // Construct DSPASpec with non-deployed ScheduledWorkflow - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - ScheduledWorkflow: &dspav1alpha1.ScheduledWorkflow{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + ScheduledWorkflow: &dspav1.ScheduledWorkflow{ Deploy: false, }, }, diff --git a/controllers/storage.go b/controllers/storage.go index 4ea6fa75..d7607a81 100644 --- a/controllers/storage.go +++ b/controllers/storage.go @@ -29,7 +29,7 @@ import ( "github.com/go-logr/logr" "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/config" "github.com/opendatahub-io/data-science-pipelines-operator/controllers/util" ) @@ -172,7 +172,7 @@ var ConnectAndQueryObjStore = func( return true, nil } -func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) (bool, error) { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) if params.ObjectStorageHealthCheckDisabled(dsp) { @@ -220,7 +220,7 @@ func (r *DSPAReconciler) isObjectStorageAccessible(ctx context.Context, dsp *dsp } // ReconcileStorage will set up Storage Connection. -func (r *DSPAReconciler) ReconcileStorage(ctx context.Context, dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) ReconcileStorage(ctx context.Context, dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) @@ -259,7 +259,7 @@ func (r *DSPAReconciler) ReconcileStorage(ctx context.Context, dsp *dspav1alpha1 // Update the CR with the state of minio to accurately portray // desired state. if !storageSpecified { - dsp.Spec.ObjectStorage = &dspav1alpha1.ObjectStorage{} + dsp.Spec.ObjectStorage = &dspav1.ObjectStorage{} dsp.Spec.ObjectStorage.Minio = params.Minio.DeepCopy() dsp.Spec.ObjectStorage.Minio.Deploy = true if err := r.Update(ctx, dsp); err != nil { diff --git a/controllers/storage_test.go b/controllers/storage_test.go index 5b45bf4a..5e7a72a0 100644 --- a/controllers/storage_test.go +++ b/controllers/storage_test.go @@ -26,7 +26,7 @@ import ( "github.com/go-logr/logr" "github.com/minio/minio-go/v7/pkg/credentials" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" routev1 "github.com/openshift/api/route/v1" "github.com/stretchr/testify/assert" @@ -40,25 +40,25 @@ func TestDeployStorage(t *testing.T) { expectedStorageName := "minio-testdspa" // Construct DSPA Spec with deployed Minio Object Storage - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - Database: &dspav1alpha1.Database{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: true, Image: "someimage", - Resources: &dspav1alpha1.ResourceRequirements{ //TODO: fails without this block. Why? - Requests: &dspav1alpha1.Resources{ + Resources: &dspav1.ResourceRequirements{ //TODO: fails without this block. Why? + Requests: &dspav1.Resources{ CPU: resource.MustParse("250m"), Memory: resource.MustParse("500Mi"), }, - Limits: &dspav1alpha1.Resources{ + Limits: &dspav1.Resources{ CPU: resource.MustParse("500m"), Memory: resource.MustParse("1Gi"), }, @@ -106,26 +106,26 @@ func TestDeployStorageWithExternalRouteEnabled(t *testing.T) { expectedStorageName := "minio-testdspa" // Construct DSPA Spec with deployed Minio Object Storage - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - Database: &dspav1alpha1.Database{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, EnableExternalRoute: true, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: true, Image: "someimage", - Resources: &dspav1alpha1.ResourceRequirements{ //TODO: fails without this block. Why? - Requests: &dspav1alpha1.Resources{ + Resources: &dspav1.ResourceRequirements{ //TODO: fails without this block. Why? + Requests: &dspav1.Resources{ CPU: resource.MustParse("250m"), Memory: resource.MustParse("500Mi"), }, - Limits: &dspav1alpha1.Resources{ + Limits: &dspav1.Resources{ CPU: resource.MustParse("500m"), Memory: resource.MustParse("1Gi"), }, @@ -179,17 +179,17 @@ func TestDontDeployStorage(t *testing.T) { expectedStorageName := "minio-testdspa" // Construct DSPA Spec with non-deployed Minio Object Storage - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - Database: &dspav1alpha1.Database{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -229,15 +229,15 @@ func TestDefaultDeployBehaviorStorage(t *testing.T) { expectedStorageName := "minio-testdspa" // Construct DSPA Spec with deployed Minio Object Storage - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - Database: &dspav1alpha1.Database{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, }, }, @@ -279,9 +279,9 @@ func TestIsDatabaseAccessibleTrue(t *testing.T) { testDSPAName := "testdspa" // Minimal Inputs - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - ObjectStorage: &dspav1alpha1.ObjectStorage{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, }, }, @@ -317,9 +317,9 @@ func TestIsDatabaseNotAccessibleFalse(t *testing.T) { testDSPAName := "testdspa" // Minimal Inputs - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - ObjectStorage: &dspav1alpha1.ObjectStorage{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, }, }, @@ -355,9 +355,9 @@ func TestDisabledHealthCheckReturnsTrue(t *testing.T) { testDSPAName := "testdspa" // Minimal Inputs - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - ObjectStorage: &dspav1alpha1.ObjectStorage{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: true, }, }, @@ -395,9 +395,9 @@ func TestIsDatabaseAccessibleBadAccessKey(t *testing.T) { testDSPAName := "testdspa" // Minimal Inputs - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - ObjectStorage: &dspav1alpha1.ObjectStorage{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, }, }, @@ -433,9 +433,9 @@ func TestIsDatabaseAccessibleBadSecretKey(t *testing.T) { testDSPAName := "testdspa" // Minimal Inputs - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - ObjectStorage: &dspav1alpha1.ObjectStorage{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, }, }, diff --git a/controllers/suite_test.go b/controllers/suite_test.go index 61359555..d1c83113 100644 --- a/controllers/suite_test.go +++ b/controllers/suite_test.go @@ -21,7 +21,7 @@ package controllers import ( "context" "github.com/go-logr/logr" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" buildv1 "github.com/openshift/api/build/v1" imagev1 "github.com/openshift/api/image/v1" routev1 "github.com/openshift/api/route/v1" @@ -107,7 +107,7 @@ func (s *ControllerSuite) SetupSuite() { utilruntime.Must(buildv1.AddToScheme(scheme.Scheme)) utilruntime.Must(imagev1.AddToScheme(scheme.Scheme)) utilruntime.Must(routev1.AddToScheme(scheme.Scheme)) - utilruntime.Must(dspav1alpha1.AddToScheme(scheme.Scheme)) + utilruntime.Must(dspav1.AddToScheme(scheme.Scheme)) //+kubebuilder:scaffold:scheme diff --git a/controllers/testdata/README.md b/controllers/testdata/README.md index 41bbe9fb..c3ea391f 100644 --- a/controllers/testdata/README.md +++ b/controllers/testdata/README.md @@ -14,7 +14,7 @@ Let's say we want a new test case `case_4` that tests that this `DSPA`... ```yaml # dspa.yaml -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: testdsp4 #ma diff --git a/controllers/testdata/declarative/case_0/config.yaml b/controllers/testdata/declarative/case_0/config.yaml index f2f536c0..025fe51e 100644 --- a/controllers/testdata/declarative/case_0/config.yaml +++ b/controllers/testdata/declarative/case_0/config.yaml @@ -1,15 +1,18 @@ # When a minimal DSPA is deployed Images: ApiServer: api-server:test0 - Artifact: artifact-manager:test0 PersistentAgent: persistenceagent:test0 ScheduledWorkflow: scheduledworkflow:test0 - Cache: ubi-minimal:test0 - MoveResultsImage: busybox:test0 - MlPipelineUI: frontend:test0 + MlmdEnvoy: mlmdenvoy:test0 + MlmdGRPC: mlmdgrpc:test0 + ArgoExecImage: argoexec:test0 + ArgoWorkflowController: argowfcontroller:test0 + LauncherImage: launcherimage:test0 + DriverImage: driverimage:test0 + OAuthProxy: oauth-proxy:test0 MariaDB: mariadb:test0 + MlPipelineUI: frontend:test0 Minio: minio:test0 - OAuthProxy: oauth-proxy:test0 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_0/deploy/cr.yaml b/controllers/testdata/declarative/case_0/deploy/cr.yaml index 85322533..ce1985ba 100644 --- a/controllers/testdata/declarative/case_0/deploy/cr.yaml +++ b/controllers/testdata/declarative/case_0/deploy/cr.yaml @@ -1,8 +1,9 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: testdsp0 spec: + podToPodTLS: false apiServer: enableSamplePipeline: true argoLauncherImage: argolauncherimage:test0 @@ -12,3 +13,5 @@ spec: image: minio:test0 mlpipelineUI: image: frontend:test0 + mlmd: + deploy: true diff --git a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml index 8aeb7c67..4a952983 100644 --- a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-testdsp0 namespace: default labels: + dsp-version: v2 app: ds-pipeline-testdsp0 component: data-science-pipelines dspa: testdsp0 @@ -16,6 +17,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: ds-pipeline-testdsp0 component: data-science-pipelines dspa: testdsp0 @@ -37,10 +39,6 @@ spec: value: "mariadb-testdsp0.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -75,6 +73,10 @@ spec: value: "argolauncherimage:test0" - name: V2_DRIVER_IMAGE value: "argodriverimage:test0" + - name: METADATA_GRPC_SERVICE_SERVICE_HOST + value: "ds-pipeline-metadata-grpc-testdsp0.default.svc.cluster.local" + - name: METADATA_GRPC_SERVICE_SERVICE_PORT + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp0.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC @@ -82,36 +84,22 @@ spec: - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "ubi-minimal:test0" - - name: MOVERESULTS_IMAGE - value: "busybox:test0" - - name: ARTIFACT_IMAGE - value: "artifact-manager:test0" - - name: ARTIFACT_BUCKET + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER value: "mlpipeline" - - name: ARTIFACT_ENDPOINT - value: "http://minio-testdsp0.default.svc.cluster.local:9000" - - name: ARTIFACT_SCRIPT + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "artifact_script" - name: "ds-pipeline-artifact-script-testdsp0" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp0" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "mlpipeline" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "mariadb-testdsp0.default.svc.cluster.local" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "3306" image: api-server:test0 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server diff --git a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index cc7a02b1..00000000 --- a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp0.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp0 - namespace: default - labels: - app: ds-pipeline-testdsp0 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml b/controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml index 5f81ff7f..20b920a7 100644 --- a/controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml @@ -4,19 +4,25 @@ metadata: name: ds-pipeline-server-config-testdsp0 namespace: default labels: + dsp-version: v2 app: ds-pipeline-testdsp0 component: data-science-pipelines data: config.json: | - - { - "DBConfig": { - "DriverName": "mysql", - "ConMaxLifeTime": "120s", - "ExtraParams": {"tls":"false"} - }, - "ObjectStoreConfig": { - "PipelinePath": "pipelines" - }, - "InitConnectionTimeout": "6m" - } + { + "DBConfig": { + "MySQLConfig": { + "ExtraParams": {"tls":"false"}, + "GroupConcatMaxLen": "4194304" + }, + "PostgreSQLConfig": {}, + "ConMaxLifeTime": "120s" + }, + "ObjectStoreConfig": { + "PipelinePath": "pipelines" + }, + "DBDriverName": "mysql", + "ARCHIVE_CONFIG_LOG_FILE_NAME": "main.log", + "ARCHIVE_CONFIG_LOG_PATH_PREFIX": "/artifacts", + "InitConnectionTimeout": "6m" + } diff --git a/controllers/testdata/declarative/case_0/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/mariadb_deployment.yaml index 20aad1f7..3a2c4adf 100644 --- a/controllers/testdata/declarative/case_0/expected/created/mariadb_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/mariadb_deployment.yaml @@ -5,6 +5,7 @@ metadata: name: mariadb-testdsp0 namespace: default labels: + dsp-version: v2 app: mariadb-testdsp0 component: data-science-pipelines dspa: testdsp0 @@ -19,6 +20,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: mariadb-testdsp0 component: data-science-pipelines dspa: testdsp0 diff --git a/controllers/testdata/declarative/case_0/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/persistence-agent_deployment.yaml index 30b58463..d0029e97 100644 --- a/controllers/testdata/declarative/case_0/expected/created/persistence-agent_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/persistence-agent_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-persistenceagent-testdsp0 namespace: default labels: + dsp-version: v2 app: ds-pipeline-persistenceagent-testdsp0 component: data-science-pipelines dspa: testdsp0 @@ -18,6 +19,7 @@ spec: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: + dsp-version: v2 app: ds-pipeline-persistenceagent-testdsp0 component: data-science-pipelines dspa: testdsp0 @@ -35,7 +37,7 @@ spec: - name: KUBEFLOW_USERID_PREFIX value: "" - name: EXECUTIONTYPE - value: PipelineRun + value: Workflow image: persistenceagent:test0 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-persistenceagent @@ -73,4 +75,17 @@ spec: limits: cpu: 250m memory: 1Gi + volumeMounts: + - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token + name: persistenceagent-sa-token + subPath: ds-pipeline-persistenceagent-testdsp0-token serviceAccountName: ds-pipeline-persistenceagent-testdsp0 + volumes: + - name: persistenceagent-sa-token + projected: + defaultMode: 420 + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 3600 + path: ds-pipeline-persistenceagent-testdsp0-token diff --git a/controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml index e25e252a..9c476cf6 100644 --- a/controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-scheduledworkflow-testdsp0 namespace: default labels: + dsp-version: v2 app: ds-pipeline-scheduledworkflow-testdsp0 component: data-science-pipelines dspa: testdsp0 @@ -18,6 +19,7 @@ spec: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: + dsp-version: v2 app: ds-pipeline-scheduledworkflow-testdsp0 component: data-science-pipelines dspa: testdsp0 @@ -28,8 +30,6 @@ spec: value: "default" - name: CRON_SCHEDULE_TIMEZONE value: "UTC" - - name: EXECUTIONTYPE - value: PipelineRun image: scheduledworkflow:test0 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-scheduledworkflow diff --git a/controllers/testdata/declarative/case_1/config.yaml b/controllers/testdata/declarative/case_1/config.yaml index 16d0e588..9700d9c5 100644 --- a/controllers/testdata/declarative/case_1/config.yaml +++ b/controllers/testdata/declarative/case_1/config.yaml @@ -1,10 +1,14 @@ -# When a DSPA is deployed with all deployments disabled Images: ApiServer: api-server:test1 - Artifact: artifact-manager:test1 PersistentAgent: persistenceagent:test1 ScheduledWorkflow: scheduledworkflow:test1 - Cache: ubi-minimal:test1 - MoveResultsImage: busybox:test1 - MariaDB: mariadb:test1 + MlmdEnvoy: mlmdenvoy:test1 + MlmdGRPC: mlmdgrpc:test1 + ArgoExecImage: argoexec:test1 + ArgoWorkflowController: argowfcontroller:test1 + LauncherImage: launcherimage:test1 + DriverImage: driverimage:test1 OAuthProxy: oauth-proxy:test1 + MariaDB: mariadb:test1 + MlPipelineUI: frontend:test1 + Minio: minio:test1 diff --git a/controllers/testdata/declarative/case_1/deploy/cr.yaml b/controllers/testdata/declarative/case_1/deploy/cr.yaml index d733e479..94bd8e7c 100644 --- a/controllers/testdata/declarative/case_1/deploy/cr.yaml +++ b/controllers/testdata/declarative/case_1/deploy/cr.yaml @@ -1,8 +1,9 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: testdsp1 spec: + podToPodTLS: false apiServer: deploy: false persistenceAgent: @@ -19,3 +20,8 @@ spec: minio: deploy: false image: minio:test0 + mlmd: + # curently logic requires mlmd + # probably should make this consistent + # with other components + deploy: true diff --git a/controllers/testdata/declarative/case_2/config.yaml b/controllers/testdata/declarative/case_2/config.yaml index 62adc5e2..6fd2da23 100644 --- a/controllers/testdata/declarative/case_2/config.yaml +++ b/controllers/testdata/declarative/case_2/config.yaml @@ -1,13 +1,18 @@ -# When a complete DSPA is deployed with (defaults specified) Images: ApiServer: api-server:test2 - Artifact: artifact-manager:test2 PersistentAgent: persistenceagent:test2 ScheduledWorkflow: scheduledworkflow:test2 - Cache: ubi-minimal:test2 - MoveResultsImage: busybox:test2 - MariaDB: mariadb:test2 + MlmdEnvoy: mlmdenvoy:test2 + MlmdGRPC: mlmdgrpc:test2 + ArgoExecImage: argoexec:test2 + ArgoWorkflowController: argowfcontroller:test2 + LauncherImage: launcherimage:test2 + DriverImage: driverimage:test2 OAuthProxy: oauth-proxy:test2 + MariaDB: mariadb:test2 + MlPipelineUI: frontend:test2 + Minio: minio:test2 + DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_2/deploy/cr.yaml b/controllers/testdata/declarative/case_2/deploy/cr.yaml index 4db5793f..be4e900e 100644 --- a/controllers/testdata/declarative/case_2/deploy/cr.yaml +++ b/controllers/testdata/declarative/case_2/deploy/cr.yaml @@ -1,29 +1,18 @@ # Test: # Various DSPA fields, resources, apiserver fields, custom script -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: testdsp2 spec: + podToPodTLS: false apiServer: deploy: true image: api-server:test2 - applyTektonCustomResource: true - archiveLogs: false - artifactImage: artifact-manager:test2 - cacheImage: ubi-minimal:test2 - moveResultsImage: busybox:test2 argoLauncherImage: argolauncherimage:test2 argoDriverImage: argodriverimage:test2 - injectDefaultScript: true - stripEOF: true enableOauth: true enableSamplePipeline: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 125 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true customServerConfigMap: name: testserverconfigmapdspa2 key: testserverconfigmapkeydspa2 @@ -94,3 +83,25 @@ spec: limits: cpu: "2535m" memory: "5Gi" + mlmd: + deploy: true + grpc: + resources: + requests: + cpu: "1334m" + memory: "1Gi" + limits: + cpu: "2535m" + memory: "5Gi" + image: mlmdgrpc:test2 + port: "8080" + envoy: + resources: + requests: + cpu: "1334m" + memory: "1Gi" + limits: + cpu: "2535m" + memory: "5Gi" + image: mlmdenvoy:test2 + deployRoute: false diff --git a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml index ce8956c8..66c7e833 100644 --- a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-testdsp2 namespace: default labels: + dsp-version: v2 app: ds-pipeline-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -16,6 +17,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: ds-pipeline-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -37,10 +39,6 @@ spec: value: "mariadb-testdsp2.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "125" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -75,6 +73,10 @@ spec: value: "argolauncherimage:test2" - name: V2_DRIVER_IMAGE value: "argodriverimage:test2" + - name: METADATA_GRPC_SERVICE_SERVICE_HOST + value: "ds-pipeline-metadata-grpc-testdsp2.default.svc.cluster.local" + - name: METADATA_GRPC_SERVICE_SERVICE_PORT + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp2.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC @@ -82,36 +84,22 @@ spec: - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "ubi-minimal:test2" - - name: MOVERESULTS_IMAGE - value: "busybox:test2" - - name: ARTIFACT_IMAGE - value: "artifact-manager:test2" - - name: ARTIFACT_BUCKET - value: "mlpipeline" - - name: ARTIFACT_ENDPOINT - value: "http://minio-testdsp2.default.svc.cluster.local:9000" - - name: ARTIFACT_SCRIPT + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER + value: "testuser" + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "artifact_script" - name: "ds-pipeline-artifact-script-testdsp2" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp2" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "randomDBName" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "mariadb-testdsp2.default.svc.cluster.local" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "3306" image: api-server:test2 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server diff --git a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index ad0f15ce..00000000 --- a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp2.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp2 - namespace: default - labels: - app: ds-pipeline-testdsp2 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_2/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/mariadb_deployment.yaml index d122f60d..9d64497c 100644 --- a/controllers/testdata/declarative/case_2/expected/created/mariadb_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/mariadb_deployment.yaml @@ -5,6 +5,7 @@ metadata: name: mariadb-testdsp2 namespace: default labels: + dsp-version: v2 app: mariadb-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -19,6 +20,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: mariadb-testdsp2 component: data-science-pipelines dspa: testdsp2 diff --git a/controllers/testdata/declarative/case_2/expected/created/minio_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/minio_deployment.yaml index c3150158..7387e072 100644 --- a/controllers/testdata/declarative/case_2/expected/created/minio_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/minio_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: minio-testdsp2 namespace: default labels: + dsp-version: v2 app: minio-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -18,6 +19,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: minio-testdsp2 component: data-science-pipelines dspa: testdsp2 diff --git a/controllers/testdata/declarative/case_2/expected/created/mlmd_envoy_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/mlmd_envoy_deployment.yaml new file mode 100644 index 00000000..91040713 --- /dev/null +++ b/controllers/testdata/declarative/case_2/expected/created/mlmd_envoy_deployment.yaml @@ -0,0 +1,79 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ds-pipeline-metadata-envoy-testdsp2 + namespace: default + labels: + dsp-version: v2 + app: ds-pipeline-metadata-envoy-testdsp2 + component: data-science-pipelines + dspa: testdsp2 +spec: + replicas: 1 + selector: + matchLabels: + app: ds-pipeline-metadata-envoy-testdsp2 + component: data-science-pipelines + dspa: testdsp2 + template: + metadata: + annotations: + sidecar.istio.io/inject: "false" + labels: + dsp-version: v2 + app: ds-pipeline-metadata-envoy-testdsp2 + component: data-science-pipelines + dspa: testdsp2 + spec: + containers: + - image: mlmdenvoy:test2 + name: container + command: ["/usr/local/bin/envoy"] + args: [ + "-c", + "/etc/envoy.yaml" + ] + ports: + - containerPort: 9090 + name: md-envoy + protocol: TCP + - containerPort: 9901 + name: envoy-admin + protocol: TCP + livenessProbe: + initialDelaySeconds: 30 + periodSeconds: 5 + tcpSocket: + port: md-envoy + timeoutSeconds: 2 + readinessProbe: + initialDelaySeconds: 3 + periodSeconds: 5 + tcpSocket: + port: md-envoy + timeoutSeconds: 2 + resources: + requests: + cpu: 1334m + memory: 1Gi + limits: + cpu: 2535m + memory: 5Gi + volumeMounts: + - mountPath: /etc/envoy.yaml + name: envoy-config + subPath: envoy.yaml + serviceAccountName: ds-pipeline-metadata-envoy-testdsp2 + volumes: + - name: envoy-config + configMap: + name: ds-pipeline-metadata-envoy-config-testdsp2 + defaultMode: 420 + - name: proxy-tls + secret: + secretName: ds-pipelines-envoy-proxy-tls-testdsp2 + defaultMode: 420 + - name: proxy-tls-upstream + configMap: + name: dsp-trusted-ca-testdsp2 + defaultMode: 420 diff --git a/controllers/testdata/declarative/case_5/expected/created/metadata-grpc_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/mlmd_grpc_deployment.yaml similarity index 67% rename from controllers/testdata/declarative/case_5/expected/created/metadata-grpc_deployment.yaml rename to controllers/testdata/declarative/case_2/expected/created/mlmd_grpc_deployment.yaml index 48b8e395..7b81e773 100644 --- a/controllers/testdata/declarative/case_5/expected/created/metadata-grpc_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/mlmd_grpc_deployment.yaml @@ -1,29 +1,31 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-metadata-grpc-testdsp5 + name: ds-pipeline-metadata-grpc-testdsp2 namespace: default labels: - app: ds-pipeline-metadata-grpc-testdsp5 + dsp-version: v2 + app: ds-pipeline-metadata-grpc-testdsp2 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp2 spec: replicas: 1 selector: matchLabels: - app: ds-pipeline-metadata-grpc-testdsp5 + app: ds-pipeline-metadata-grpc-testdsp2 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp2 template: metadata: labels: - app: ds-pipeline-metadata-grpc-testdsp5 + dsp-version: v2 + app: ds-pipeline-metadata-grpc-testdsp2 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp2 spec: containers: - args: - - --grpc_port=1337 + - --grpc_port=8080 - --mysql_config_database=$(MYSQL_DATABASE) - --mysql_config_host=$(MYSQL_HOST) - --mysql_config_port=$(MYSQL_PORT) @@ -34,22 +36,22 @@ spec: - /bin/metadata_store_server env: - name: DBCONFIG_USER - value: "mlpipeline" + value: "testuser" - name: DBCONFIG_PASSWORD valueFrom: secretKeyRef: key: "password" - name: "ds-pipeline-db-testdsp5" + name: "ds-pipeline-db-testdsp2" - name: MYSQL_DATABASE - value: "mlpipeline" + value: "randomDBName" - name: MYSQL_HOST - value: mariadb-testdsp5.default.svc.cluster.local + value: "mariadb-testdsp2.default.svc.cluster.local" - name: MYSQL_PORT value: "3306" - image: metadata-grpc:test5 + image: mlmdgrpc:test2 name: container ports: - - containerPort: 1337 + - containerPort: 8080 name: grpc-api protocol: TCP livenessProbe: @@ -65,10 +67,10 @@ spec: port: grpc-api timeoutSeconds: 2 resources: - limits: - cpu: 100m - memory: 256Mi requests: - cpu: 100m - memory: 256Mi - serviceAccountName: ds-pipeline-metadata-grpc-testdsp5 + cpu: 1334m + memory: 1Gi + limits: + cpu: 2535m + memory: 5Gi + serviceAccountName: ds-pipeline-metadata-grpc-testdsp2 diff --git a/controllers/testdata/declarative/case_2/expected/created/mlpipelines-ui_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/mlpipelines-ui_deployment.yaml index 53b19793..37148477 100644 --- a/controllers/testdata/declarative/case_2/expected/created/mlpipelines-ui_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/mlpipelines-ui_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-ui-testdsp2 namespace: default labels: + dsp-version: v2 app: ds-pipeline-ui-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -18,6 +19,7 @@ spec: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: + dsp-version: v2 app: ds-pipeline-ui-testdsp2 component: data-science-pipelines dspa: testdsp2 diff --git a/controllers/testdata/declarative/case_2/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/persistence-agent_deployment.yaml index 6db4d107..500a95d5 100644 --- a/controllers/testdata/declarative/case_2/expected/created/persistence-agent_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/persistence-agent_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-persistenceagent-testdsp2 namespace: default labels: + dsp-version: v2 app: ds-pipeline-persistenceagent-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -18,6 +19,7 @@ spec: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: + dsp-version: v2 app: ds-pipeline-persistenceagent-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -35,7 +37,7 @@ spec: - name: KUBEFLOW_USERID_PREFIX value: "" - name: EXECUTIONTYPE - value: PipelineRun + value: Workflow image: persistenceagent:test2 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-persistenceagent @@ -73,4 +75,17 @@ spec: limits: cpu: 2524m memory: 5Gi + volumeMounts: + - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token + name: persistenceagent-sa-token + subPath: ds-pipeline-persistenceagent-testdsp2-token serviceAccountName: ds-pipeline-persistenceagent-testdsp2 + volumes: + - name: persistenceagent-sa-token + projected: + defaultMode: 420 + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 3600 + path: ds-pipeline-persistenceagent-testdsp2-token diff --git a/controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl b/controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl index 99fa4fd1..aca6d21e 100644 --- a/controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl +++ b/controllers/testdata/declarative/case_2/expected/created/sample-config.yaml.tmpl @@ -4,6 +4,7 @@ metadata: name: sample-config-testdsp2 namespace: default labels: + dsp-version: v2 app: ds-pipeline-testdsp2 component: data-science-pipelines data: diff --git a/controllers/testdata/declarative/case_2/expected/created/sample-pipeline.yaml.tmpl b/controllers/testdata/declarative/case_2/expected/created/sample-pipeline.yaml.tmpl deleted file mode 100644 index 9d19fa58..00000000 --- a/controllers/testdata/declarative/case_2/expected/created/sample-pipeline.yaml.tmpl +++ /dev/null @@ -1,554 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: sample-pipeline-testdsp2 - namespace: default - labels: - app: ds-pipeline-testdsp2 - component: data-science-pipelines -data: - iris-pipeline-compiled.yaml: |- - apiVersion: tekton.dev/v1beta1 - kind: PipelineRun - metadata: - name: iris-pipeline - annotations: - tekton.dev/output_artifacts: '{"data-prep": [{"key": "artifacts/$PIPELINERUN/data-prep/X_test.tgz", - "name": "data-prep-X_test", "path": "/tmp/outputs/X_test/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/X_train.tgz", - "name": "data-prep-X_train", "path": "/tmp/outputs/X_train/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/y_test.tgz", - "name": "data-prep-y_test", "path": "/tmp/outputs/y_test/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/y_train.tgz", - "name": "data-prep-y_train", "path": "/tmp/outputs/y_train/data"}], "evaluate-model": - [{"key": "artifacts/$PIPELINERUN/evaluate-model/mlpipeline-metrics.tgz", "name": - "mlpipeline-metrics", "path": "/tmp/outputs/mlpipeline_metrics/data"}], "train-model": - [{"key": "artifacts/$PIPELINERUN/train-model/model.tgz", "name": "train-model-model", - "path": "/tmp/outputs/model/data"}]}' - tekton.dev/input_artifacts: '{"evaluate-model": [{"name": "data-prep-X_test", - "parent_task": "data-prep"}, {"name": "data-prep-y_test", "parent_task": "data-prep"}, - {"name": "train-model-model", "parent_task": "train-model"}], "train-model": - [{"name": "data-prep-X_train", "parent_task": "data-prep"}, {"name": "data-prep-y_train", - "parent_task": "data-prep"}], "validate-model": [{"name": "train-model-model", - "parent_task": "train-model"}]}' - tekton.dev/artifact_bucket: mlpipeline - tekton.dev/artifact_endpoint: ${MINIO_SERVICE_SERVICE_HOST}:${MINIO_SERVICE_SERVICE_PORT} - tekton.dev/artifact_endpoint_scheme: http:// - tekton.dev/artifact_items: '{"data-prep": [["X_test", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test"], - ["X_train", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train"], - ["y_test", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test"], - ["y_train", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train"]], - "evaluate-model": [["mlpipeline-metrics", "/tmp/outputs/mlpipeline_metrics/data"]], - "train-model": [["model", "$(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model"]], - "validate-model": []}' - sidecar.istio.io/inject: "false" - tekton.dev/template: '' - pipelines.kubeflow.org/big_data_passing_format: $(workspaces.$TASK_NAME.path)/artifacts/$ORIG_PR_NAME/$TASKRUN_NAME/$TASK_PARAM_NAME - pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "iris-model", "name": - "model_obc", "optional": true, "type": "String"}], "name": "Iris Pipeline"}' - labels: - pipelines.kubeflow.org/pipelinename: '' - pipelines.kubeflow.org/generation: '' - spec: - params: - - name: model_obc - value: iris-model - pipelineSpec: - params: - - name: model_obc - default: iris-model - tasks: - - name: data-prep - taskSpec: - steps: - - name: main - args: - - --X-train - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train - - --X-test - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test - - --y-train - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train - - --y-test - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def data_prep( - X_train_file, - X_test_file, - y_train_file, - y_test_file, - ): - import pickle - - import pandas as pd - - from sklearn import datasets - from sklearn.model_selection import train_test_split - - def get_iris_data(): - iris = datasets.load_iris() - data = pd.DataFrame( - { - "sepalLength": iris.data[:, 0], - "sepalWidth": iris.data[:, 1], - "petalLength": iris.data[:, 2], - "petalWidth": iris.data[:, 3], - "species": iris.target, - } - ) - - print("Initial Dataset:") - print(data.head()) - - return data - - def create_training_set(dataset, test_size = 0.3): - # Features - X = dataset[["sepalLength", "sepalWidth", "petalLength", "petalWidth"]] - # Labels - y = dataset["species"] - - # Split dataset into training set and test set - X_train, X_test, y_train, y_test = train_test_split( - X, y, test_size=test_size, random_state=11 - ) - - return X_train, X_test, y_train, y_test - - def save_pickle(object_file, target_object): - with open(object_file, "wb") as f: - pickle.dump(target_object, f) - - dataset = get_iris_data() - X_train, X_test, y_train, y_test = create_training_set(dataset) - - save_pickle(X_train_file, X_train) - save_pickle(X_test_file, X_test) - save_pickle(y_train_file, y_train) - save_pickle(y_test_file, y_test) - - import argparse - _parser = argparse.ArgumentParser(prog='Data prep', description='') - _parser.add_argument("--X-train", dest="X_train_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--X-test", dest="X_test_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-train", dest="y_train_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-test", dest="y_test_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = data_prep(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: output-taskrun-name - command: - - sh - - -ec - - echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)" - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: copy-results-artifacts - command: - - sh - - -ec - - | - set -exo pipefail - TOTAL_SIZE=0 - copy_artifact() { - if [ -d "$1" ]; then - tar -czvf "$1".tar.gz "$1" - SUFFIX=".tar.gz" - fi - ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'` - TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE) - touch "$2" - if [[ $TOTAL_SIZE -lt 3072 ]]; then - if [ -d "$1" ]; then - tar -tzf "$1".tar.gz > "$2" - elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then - cp "$1" "$2" - fi - fi - } - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train $(results.X-train.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test $(results.X-test.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train $(results.y-train.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test $(results.y-test.path) - onError: continue - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - results: - - name: X-test - description: /tmp/outputs/X_test/data - - name: X-train - description: /tmp/outputs/X_train/data - - name: taskrun-name - - name: y-test - description: /tmp/outputs/y_test/data - - name: y-train - description: /tmp/outputs/y_train/data - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Data prep", "outputs": - [{"name": "X_train"}, {"name": "X_test"}, {"name": "y_train"}, {"name": - "y_test"}], "version": "Data prep@sha256=5aeb512900f57983c9f643ec30ddb4ccc66490a443269b51ce0a67d57cb373b0"}' - workspaces: - - name: data-prep - workspaces: - - name: data-prep - workspace: iris-pipeline - - name: train-model - params: - - name: data-prep-trname - value: $(tasks.data-prep.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --X-train - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/X_train - - --y-train - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/y_train - - --model - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def train_model( - X_train_file, - y_train_file, - model_file, - ): - import pickle - - from sklearn.ensemble import RandomForestClassifier - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - def save_pickle(object_file, target_object): - with open(object_file, "wb") as f: - pickle.dump(target_object, f) - - def train_iris(X_train, y_train): - model = RandomForestClassifier(n_estimators=100) - model.fit(X_train, y_train) - - return model - - X_train = load_pickle(X_train_file) - y_train = load_pickle(y_train_file) - - model = train_iris(X_train, y_train) - - save_pickle(model_file, model) - - import argparse - _parser = argparse.ArgumentParser(prog='Train model', description='') - _parser.add_argument("--X-train", dest="X_train_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-train", dest="y_train_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = train_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: output-taskrun-name - command: - - sh - - -ec - - echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)" - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: copy-results-artifacts - command: - - sh - - -ec - - | - set -exo pipefail - TOTAL_SIZE=0 - copy_artifact() { - if [ -d "$1" ]; then - tar -czvf "$1".tar.gz "$1" - SUFFIX=".tar.gz" - fi - ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'` - TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE) - touch "$2" - if [[ $TOTAL_SIZE -lt 3072 ]]; then - if [ -d "$1" ]; then - tar -tzf "$1".tar.gz > "$2" - elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then - cp "$1" "$2" - fi - fi - } - copy_artifact $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model $(results.model.path) - onError: continue - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: data-prep-trname - results: - - name: model - description: /tmp/outputs/model/data - - name: taskrun-name - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Train model", - "outputs": [{"name": "model"}], "version": "Train model@sha256=cb1fbd399ee5849dcdfaafced23a0496cae1d5861795062b22512b766ec418ce"}' - workspaces: - - name: train-model - workspaces: - - name: train-model - workspace: iris-pipeline - runAfter: - - data-prep - - data-prep - - name: evaluate-model - params: - - name: data-prep-trname - value: $(tasks.data-prep.results.taskrun-name) - - name: train-model-trname - value: $(tasks.train-model.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --X-test - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/X_test - - --y-test - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/y_test - - --model - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.train-model-trname)/model - - --mlpipeline-metrics - - /tmp/outputs/mlpipeline_metrics/data - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def evaluate_model( - X_test_file, - y_test_file, - model_file, - mlpipeline_metrics_file, - ): - import json - import pickle - - from sklearn.metrics import accuracy_score - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - X_test = load_pickle(X_test_file) - y_test = load_pickle(y_test_file) - model = load_pickle(model_file) - - y_pred = model.predict(X_test) - - accuracy_score_metric = accuracy_score(y_test, y_pred) - print(f"Accuracy: {accuracy_score_metric}") - - metrics = { - "metrics": [ - { - "name": "accuracy-score", - "numberValue": accuracy_score_metric, - "format": "PERCENTAGE", - }, - ] - } - - with open(mlpipeline_metrics_file, "w") as f: - json.dump(metrics, f) - - import argparse - _parser = argparse.ArgumentParser(prog='Evaluate model', description='') - _parser.add_argument("--X-test", dest="X_test_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-test", dest="y_test_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--mlpipeline-metrics", dest="mlpipeline_metrics_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = evaluate_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: data-prep-trname - - name: train-model-trname - stepTemplate: - volumeMounts: - - name: mlpipeline-metrics - mountPath: /tmp/outputs/mlpipeline_metrics - volumes: - - name: mlpipeline-metrics - emptyDir: {} - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Evaluate model", - "outputs": [{"name": "mlpipeline_metrics", "type": "Metrics"}], "version": - "Evaluate model@sha256=f398e65faecc6f5a4ba11a2c78d8a2274e3ede205a0e199c8bb615531a3abd4a"}' - workspaces: - - name: evaluate-model - workspaces: - - name: evaluate-model - workspace: iris-pipeline - runAfter: - - data-prep - - data-prep - - train-model - - name: validate-model - params: - - name: train-model-trname - value: $(tasks.train-model.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --model - - $(workspaces.validate-model.path)/artifacts/$ORIG_PR_NAME/$(params.train-model-trname)/model - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def validate_model(model_file): - import pickle - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - model = load_pickle(model_file) - - input_values = [[5, 3, 1.6, 0.2]] - - print(f"Performing test prediction on {input_values}") - result = model.predict(input_values) - - print(f"Response: {result}") - - import argparse - _parser = argparse.ArgumentParser(prog='Validate model', description='') - _parser.add_argument("--model", dest="model_file", type=str, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = validate_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: train-model-trname - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Validate model", - "outputs": [], "version": "Validate model@sha256=53d18ff94fc8f164e7d8455f2c87fa7fdac17e7502502aaa52012e4247d089ee"}' - workspaces: - - name: validate-model - workspaces: - - name: validate-model - workspace: iris-pipeline - runAfter: - - train-model - workspaces: - - name: iris-pipeline - workspaces: - - name: iris-pipeline - volumeClaimTemplate: - spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 2Gi diff --git a/controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml index f49e4341..b855c94d 100644 --- a/controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-scheduledworkflow-testdsp2 namespace: default labels: + dsp-version: v2 app: ds-pipeline-scheduledworkflow-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -18,6 +19,7 @@ spec: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: + dsp-version: v2 app: ds-pipeline-scheduledworkflow-testdsp2 component: data-science-pipelines dspa: testdsp2 @@ -28,8 +30,6 @@ spec: value: "default" - name: CRON_SCHEDULE_TIMEZONE value: "EST" - - name: EXECUTIONTYPE - value: PipelineRun image: scheduledworkflow:test2 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-scheduledworkflow diff --git a/controllers/testdata/declarative/case_3/config.yaml b/controllers/testdata/declarative/case_3/config.yaml index 8da75da6..f5f9ce75 100644 --- a/controllers/testdata/declarative/case_3/config.yaml +++ b/controllers/testdata/declarative/case_3/config.yaml @@ -1,13 +1,17 @@ -# When a DSPA with a custom db/storage secret, and custom artifact script is deployed. Images: ApiServer: api-server:test3 - Artifact: artifact-manager:test3 PersistentAgent: persistenceagent:test3 ScheduledWorkflow: scheduledworkflow:test3 - Cache: ubi-minimal:test3 - MoveResultsImage: busybox:test3 - MariaDB: mariadb:test3 + MlmdEnvoy: mlmdenvoy:test3 + MlmdGRPC: mlmdgrpc:test3 + ArgoExecImage: argoexec:test3 + ArgoWorkflowController: argowfcontroller:test3 + LauncherImage: launcherimage:test3 + DriverImage: driverimage:test3 OAuthProxy: oauth-proxy:test3 + MariaDB: mariadb:test3 + MlPipelineUI: frontend:test3 + Minio: minio:test3 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_3/deploy/02_cr.yaml b/controllers/testdata/declarative/case_3/deploy/02_cr.yaml index 0d7beca3..263372f9 100644 --- a/controllers/testdata/declarative/case_3/deploy/02_cr.yaml +++ b/controllers/testdata/declarative/case_3/deploy/02_cr.yaml @@ -1,19 +1,19 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: testdsp3 spec: + podToPodTLS: false apiServer: enableOauth: true enableSamplePipeline: false - artifactScriptConfigMap: - name: doesnotexist - key: "somekey" deploy: true argoLauncherImage: argolauncherimage:test3 argoDriverImage: argodriverimage:test3 persistenceAgent: {} scheduledWorkflow: {} + mlmd: + deploy: true database: externalDB: host: testdbhost3 diff --git a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml index 875a79f7..8646f1ea 100644 --- a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-testdsp3 namespace: default labels: + dsp-version: v2 app: ds-pipeline-testdsp3 component: data-science-pipelines dspa: testdsp3 @@ -16,6 +17,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: ds-pipeline-testdsp3 component: data-science-pipelines dspa: testdsp3 @@ -37,10 +39,6 @@ spec: value: "testdbhost3" - name: DBCONFIG_PORT value: "test3" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -75,6 +73,10 @@ spec: value: "argolauncherimage:test3" - name: V2_DRIVER_IMAGE value: "argodriverimage:test3" + - name: METADATA_GRPC_SERVICE_SERVICE_HOST + value: "ds-pipeline-metadata-grpc-testdsp3.default.svc.cluster.local" + - name: METADATA_GRPC_SERVICE_SERVICE_PORT + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp3.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC @@ -82,36 +84,22 @@ spec: - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: ubi-minimal:test3 - - name: MOVERESULTS_IMAGE - value: busybox:test3 - - name: ARTIFACT_IMAGE - value: artifact-manager:test3 - - name: ARTIFACT_BUCKET - value: "testbucket3" - - name: ARTIFACT_ENDPOINT - value: "https://teststoragehost3:80" - - name: ARTIFACT_SCRIPT + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER + value: "testuser3" + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "somekey" - name: "doesnotexist" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "testpswkey3" + name: "testdbpswsecretname3" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "testdbname3" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "testdbhost3" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "test3" image: api-server:test3 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server diff --git a/controllers/testdata/declarative/case_3/expected/not_created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_3/expected/not_created/configmap_artifact_script.yaml deleted file mode 100644 index 3c41745d..00000000 --- a/controllers/testdata/declarative/case_3/expected/not_created/configmap_artifact_script.yaml +++ /dev/null @@ -1,39 +0,0 @@ -apiVersion: v1 -data: - somekey: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp3.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: doesnotexist - namespace: default diff --git a/controllers/testdata/declarative/case_4/config.yaml b/controllers/testdata/declarative/case_4/config.yaml index 52619024..43221eb4 100644 --- a/controllers/testdata/declarative/case_4/config.yaml +++ b/controllers/testdata/declarative/case_4/config.yaml @@ -1,13 +1,17 @@ -# When a DSPA with a custom db/storage secret, and custom artifact script is deployed. Images: - ApiServer: this-apiserver-image-from-config-should-not-be-used:test4 - Artifact: this-artifact-manager-image-from-config-should-not-be-used:test4 - PersistentAgent: this-persistenceagent-image-from-config-should-not-be-used:test4 - ScheduledWorkflow: this-scheduledworkflow-image-from-config-should-not-be-used:test4 - Cache: this-ubi-minimal-image-from-config-should-not-be-used:test4 - MoveResultsImage: this-busybox-image-from-config-should-not-be-used:test4 - MariaDB: this-mariadb-image-from-config-should-not-be-used:test4 + ApiServer: api-server:test4 + PersistentAgent: persistenceagent:test4 + ScheduledWorkflow: scheduledworkflow:test4 + MlmdEnvoy: mlmdenvoy:test4 + MlmdGRPC: mlmdgrpc:test4 + ArgoExecImage: argoexec:test4 + ArgoWorkflowController: argowfcontroller:test4 + LauncherImage: launcherimage:test4 + DriverImage: driverimage:test4 OAuthProxy: oauth-proxy:test4 + MariaDB: mariadb:test4 + MlPipelineUI: frontend:test4 + Minio: minio:test4 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_4/deploy/00_cr.yaml b/controllers/testdata/declarative/case_4/deploy/00_cr.yaml index a2d7e9f4..7e47383f 100644 --- a/controllers/testdata/declarative/case_4/deploy/00_cr.yaml +++ b/controllers/testdata/declarative/case_4/deploy/00_cr.yaml @@ -1,27 +1,19 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +# Test: +# image setting via DSPA +# disabling sample config +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: testdsp4 spec: + podToPodTLS: false apiServer: deploy: true image: this-apiserver-image-from-cr-should-be-used:test4 enableSamplePipeline: false - applyTektonCustomResource: true - archiveLogs: false - artifactImage: this-artifact-manager-image-from-cr-should-be-used:test4 - cacheImage: this-ubi-minimal-image-from-cr-should-be-used:test4 - moveResultsImage: this-busybox-image-from-cr-should-be-used:test4 argoLauncherImage: this-argolauncher-image-from-cr-should-be-used:test4 argoDriverImage: this-argodriver-image-from-cr-should-be-used:test4 - injectDefaultScript: true - stripEOF: true enableOauth: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 125 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true resources: requests: cpu: "1231m" @@ -51,6 +43,26 @@ spec: limits: cpu: "2526m" memory: "5Gi" + mlmd: + deploy: true + grpc: + image: this-grpc-image-from-cr-should-be-used:test4 + resources: + requests: + cpu: "1235m" + memory: "1Gi" + limits: + cpu: "2526m" + memory: "5Gi" + envoy: + image: this-envoy-image-from-cr-should-be-used:test4 + resources: + requests: + cpu: "1235m" + memory: "1Gi" + limits: + cpu: "2526m" + memory: "5Gi" mlpipelineUI: deploy: true image: this-frontend-image-from-cr-should-be-used:test4 diff --git a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml index e26220ec..b0474ff8 100644 --- a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-testdsp4 namespace: default labels: + dsp-version: v2 app: ds-pipeline-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -16,6 +17,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: ds-pipeline-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -37,10 +39,6 @@ spec: value: "mariadb-testdsp4.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "125" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -75,6 +73,10 @@ spec: value: "this-argolauncher-image-from-cr-should-be-used:test4" - name: V2_DRIVER_IMAGE value: "this-argodriver-image-from-cr-should-be-used:test4" + - name: METADATA_GRPC_SERVICE_SERVICE_HOST + value: "ds-pipeline-metadata-grpc-testdsp4.default.svc.cluster.local" + - name: METADATA_GRPC_SERVICE_SERVICE_PORT + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp4.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC @@ -82,44 +84,26 @@ spec: - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "this-ubi-minimal-image-from-cr-should-be-used:test4" - - name: MOVERESULTS_IMAGE - value: "this-busybox-image-from-cr-should-be-used:test4" - - name: ARTIFACT_IMAGE - value: "this-artifact-manager-image-from-cr-should-be-used:test4" - - name: ARTIFACT_BUCKET - value: "mlpipeline" - - name: ARTIFACT_ENDPOINT - value: "http://minio-testdsp4.default.svc.cluster.local:9000" - - name: ARTIFACT_SCRIPT + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER + value: "testuser" + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "artifact_script" - name: "ds-pipeline-artifact-script-testdsp4" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp4" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "randomDBName" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "mariadb-testdsp4.default.svc.cluster.local" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "3306" image: this-apiserver-image-from-cr-should-be-used:test4 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server command: ['/bin/apiserver'] - volumeMounts: - - name: server-config - mountPath: /config/config.json - subPath: config.json args: - --config=/config - -logtostderr=true @@ -161,6 +145,10 @@ spec: limits: cpu: 2522m memory: 5Gi + volumeMounts: + - mountPath: /config/config.json + name: server-config + subPath: config.json - name: oauth-proxy args: - --https-address=:8443 diff --git a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index e0bddf31..00000000 --- a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp4.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp4 - namespace: default - labels: - app: ds-pipeline-testdsp4 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_4/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/mariadb_deployment.yaml index 3130c29a..a88cf751 100644 --- a/controllers/testdata/declarative/case_4/expected/created/mariadb_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/mariadb_deployment.yaml @@ -5,6 +5,7 @@ metadata: name: mariadb-testdsp4 namespace: default labels: + dsp-version: v2 app: mariadb-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -19,6 +20,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: mariadb-testdsp4 component: data-science-pipelines dspa: testdsp4 diff --git a/controllers/testdata/declarative/case_4/expected/created/minio_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/minio_deployment.yaml index 0ea9304b..dddd17f6 100644 --- a/controllers/testdata/declarative/case_4/expected/created/minio_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/minio_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: minio-testdsp4 namespace: default labels: + dsp-version: v2 app: minio-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -18,6 +19,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: minio-testdsp4 component: data-science-pipelines dspa: testdsp4 diff --git a/controllers/testdata/declarative/case_5/expected/created/metadata-envoy_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/mlmd_envoy_deployment.yaml similarity index 78% rename from controllers/testdata/declarative/case_5/expected/created/metadata-envoy_deployment.yaml rename to controllers/testdata/declarative/case_4/expected/created/mlmd_envoy_deployment.yaml index 04175020..0dc50500 100644 --- a/controllers/testdata/declarative/case_5/expected/created/metadata-envoy_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/mlmd_envoy_deployment.yaml @@ -1,30 +1,32 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-metadata-envoy-testdsp5 + name: ds-pipeline-metadata-envoy-testdsp4 namespace: default labels: - app: ds-pipeline-metadata-envoy-testdsp5 + dsp-version: v2 + app: ds-pipeline-metadata-envoy-testdsp4 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp4 spec: replicas: 1 selector: matchLabels: - app: ds-pipeline-metadata-envoy-testdsp5 + app: ds-pipeline-metadata-envoy-testdsp4 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp4 template: metadata: annotations: sidecar.istio.io/inject: "false" labels: - app: ds-pipeline-metadata-envoy-testdsp5 + dsp-version: v2 + app: ds-pipeline-metadata-envoy-testdsp4 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp4 spec: containers: - - image: metadata-envoy:test5 + - image: this-envoy-image-from-cr-should-be-used:test4 name: container command: ["/usr/local/bin/envoy"] args: [ @@ -51,12 +53,12 @@ spec: port: md-envoy timeoutSeconds: 2 resources: - limits: - cpu: 100m - memory: 256Mi requests: - cpu: 100m - memory: 256Mi + cpu: 1235m + memory: 1Gi + limits: + cpu: 2526m + memory: 5Gi volumeMounts: - mountPath: /etc/envoy.yaml name: envoy-config @@ -65,15 +67,15 @@ spec: args: - --https-address=:8443 - --provider=openshift - - --openshift-service-account=ds-pipeline-metadata-envoy-testdsp5 + - --openshift-service-account=ds-pipeline-metadata-envoy-testdsp4 - --upstream=http://localhost:9090 - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-metadata-envoy-testdsp5","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-metadata-envoy-testdsp5","verb":"get","resourceAPIGroup":"route.openshift.io"}' + - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-metadata-envoy-testdsp4","namespace":"default"}}' + - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-metadata-envoy-testdsp4","verb":"get","resourceAPIGroup":"route.openshift.io"}' - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test5 + image: oauth-proxy:test4 ports: - containerPort: 8443 name: oauth2-proxy @@ -108,16 +110,17 @@ spec: volumeMounts: - mountPath: /etc/tls/private name: proxy-tls + serviceAccountName: ds-pipeline-metadata-envoy-testdsp4 volumes: - name: envoy-config configMap: - name: ds-pipeline-metadata-envoy-config-testdsp5 + name: ds-pipeline-metadata-envoy-config-testdsp4 defaultMode: 420 - name: proxy-tls secret: - secretName: ds-pipelines-envoy-proxy-tls-testdsp5 + secretName: ds-pipelines-envoy-proxy-tls-testdsp4 defaultMode: 420 - name: proxy-tls-upstream configMap: - name: dsp-trusted-ca-testdsp5 + name: dsp-trusted-ca-testdsp4 defaultMode: 420 diff --git a/controllers/testdata/declarative/case_4/expected/created/mlmd_grpc_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/mlmd_grpc_deployment.yaml new file mode 100644 index 00000000..f8c79bc8 --- /dev/null +++ b/controllers/testdata/declarative/case_4/expected/created/mlmd_grpc_deployment.yaml @@ -0,0 +1,76 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ds-pipeline-metadata-grpc-testdsp4 + namespace: default + labels: + dsp-version: v2 + app: ds-pipeline-metadata-grpc-testdsp4 + component: data-science-pipelines + dspa: testdsp4 +spec: + replicas: 1 + selector: + matchLabels: + app: ds-pipeline-metadata-grpc-testdsp4 + component: data-science-pipelines + dspa: testdsp4 + template: + metadata: + labels: + dsp-version: v2 + app: ds-pipeline-metadata-grpc-testdsp4 + component: data-science-pipelines + dspa: testdsp4 + spec: + containers: + - args: + - --grpc_port=8080 + - --mysql_config_database=$(MYSQL_DATABASE) + - --mysql_config_host=$(MYSQL_HOST) + - --mysql_config_port=$(MYSQL_PORT) + - --mysql_config_user=$(DBCONFIG_USER) + - --mysql_config_password=$(DBCONFIG_PASSWORD) + - --enable_database_upgrade=true + command: + - /bin/metadata_store_server + env: + - name: DBCONFIG_USER + value: "testuser" + - name: DBCONFIG_PASSWORD + valueFrom: + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp4" + - name: MYSQL_DATABASE + value: "randomDBName" + - name: MYSQL_HOST + value: "mariadb-testdsp4.default.svc.cluster.local" + - name: MYSQL_PORT + value: "3306" + image: this-grpc-image-from-cr-should-be-used:test4 + name: container + ports: + - containerPort: 8080 + name: grpc-api + protocol: TCP + livenessProbe: + initialDelaySeconds: 30 + periodSeconds: 5 + tcpSocket: + port: grpc-api + timeoutSeconds: 2 + readinessProbe: + initialDelaySeconds: 3 + periodSeconds: 5 + tcpSocket: + port: grpc-api + timeoutSeconds: 2 + resources: + requests: + cpu: 1235m + memory: 1Gi + limits: + cpu: 2526m + memory: 5Gi + serviceAccountName: ds-pipeline-metadata-grpc-testdsp4 diff --git a/controllers/testdata/declarative/case_4/expected/created/mlpipelines-ui_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/mlpipelines-ui_deployment.yaml index 62022305..fbeb5e5f 100644 --- a/controllers/testdata/declarative/case_4/expected/created/mlpipelines-ui_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/mlpipelines-ui_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-ui-testdsp4 namespace: default labels: + dsp-version: v2 app: ds-pipeline-ui-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -18,6 +19,7 @@ spec: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: + dsp-version: v2 app: ds-pipeline-ui-testdsp4 component: data-science-pipelines dspa: testdsp4 diff --git a/controllers/testdata/declarative/case_4/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/persistence-agent_deployment.yaml index b25c4471..f009d959 100644 --- a/controllers/testdata/declarative/case_4/expected/created/persistence-agent_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/persistence-agent_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-persistenceagent-testdsp4 namespace: default labels: + dsp-version: v2 app: ds-pipeline-persistenceagent-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -18,6 +19,7 @@ spec: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: + dsp-version: v2 app: ds-pipeline-persistenceagent-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -35,7 +37,7 @@ spec: - name: KUBEFLOW_USERID_PREFIX value: "" - name: EXECUTIONTYPE - value: PipelineRun + value: Workflow image: this-persistenceagent-image-from-cr-should-be-used:test4 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-persistenceagent @@ -73,4 +75,17 @@ spec: limits: cpu: 2524m memory: 5Gi + volumeMounts: + - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token + name: persistenceagent-sa-token + subPath: ds-pipeline-persistenceagent-testdsp4-token serviceAccountName: ds-pipeline-persistenceagent-testdsp4 + volumes: + - name: persistenceagent-sa-token + projected: + defaultMode: 420 + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 3600 + path: ds-pipeline-persistenceagent-testdsp4-token diff --git a/controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml index c85f687e..5f15836e 100644 --- a/controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-scheduledworkflow-testdsp4 namespace: default labels: + dsp-version: v2 app: ds-pipeline-scheduledworkflow-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -18,6 +19,7 @@ spec: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: + dsp-version: v2 app: ds-pipeline-scheduledworkflow-testdsp4 component: data-science-pipelines dspa: testdsp4 @@ -28,8 +30,6 @@ spec: value: "default" - name: CRON_SCHEDULE_TIMEZONE value: "EST" - - name: EXECUTIONTYPE - value: PipelineRun image: this-scheduledworkflow-image-from-cr-should-be-used:test4 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-scheduledworkflow diff --git a/controllers/testdata/declarative/case_5/config.yaml b/controllers/testdata/declarative/case_5/config.yaml index b7b47ce0..5742fc30 100644 --- a/controllers/testdata/declarative/case_5/config.yaml +++ b/controllers/testdata/declarative/case_5/config.yaml @@ -1,18 +1,17 @@ -# When a minimal DSPA is deployed Images: ApiServer: api-server:test5 - Artifact: artifact-manager:test5 PersistentAgent: persistenceagent:test5 ScheduledWorkflow: scheduledworkflow:test5 - Cache: ubi-minimal:test5 - MoveResultsImage: busybox:test5 - MlPipelineUI: frontend:test5 + MlmdEnvoy: mlmdenvoy:test5 + MlmdGRPC: mlmdgrpc:test5 + ArgoExecImage: argoexec:test5 + ArgoWorkflowController: argowfcontroller:test5 + LauncherImage: launcherimage:test5 + DriverImage: driverimage:test5 + OAuthProxy: oauth-proxy:test5 MariaDB: mariadb:test5 + MlPipelineUI: frontend:test5 Minio: minio:test5 - OAuthProxy: oauth-proxy:test5 - MlmdEnvoy: metadata-envoy:changeme - MlmdGrpc: metadata-grpc:changeme - MlmdWriter: metadata-grpc:changeme DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_8/deploy/00_configmap.yaml b/controllers/testdata/declarative/case_5/deploy/00_configmap.yaml similarity index 96% rename from controllers/testdata/declarative/case_8/deploy/00_configmap.yaml rename to controllers/testdata/declarative/case_5/deploy/00_configmap.yaml index 1dc6a89f..5edbbb42 100644 --- a/controllers/testdata/declarative/case_8/deploy/00_configmap.yaml +++ b/controllers/testdata/declarative/case_5/deploy/00_configmap.yaml @@ -1,9 +1,9 @@ kind: ConfigMap apiVersion: v1 metadata: - name: testcabundleconfigmap8 + name: testcabundleconfigmap5 data: - testcabundleconfigmapkey8.crt: | + testcabundleconfigmapkey5.crt: | -----BEGIN CERTIFICATE----- MIIFLTCCAxWgAwIBAgIUIvY4jV0212P/ddjuCZhcUyJfoocwDQYJKoZIhvcNAQEL BQAwJjELMAkGA1UEBhMCWFgxFzAVBgNVBAMMDnJoLWRzcC1kZXZzLmlvMB4XDTI0 diff --git a/controllers/testdata/declarative/case_8/deploy/01_configmap.yaml b/controllers/testdata/declarative/case_5/deploy/01_configmap.yaml similarity index 100% rename from controllers/testdata/declarative/case_8/deploy/01_configmap.yaml rename to controllers/testdata/declarative/case_5/deploy/01_configmap.yaml diff --git a/controllers/testdata/declarative/case_8/deploy/02_cr.yaml b/controllers/testdata/declarative/case_5/deploy/02_cr.yaml similarity index 57% rename from controllers/testdata/declarative/case_8/deploy/02_cr.yaml rename to controllers/testdata/declarative/case_5/deploy/02_cr.yaml index b10aa421..58b214a9 100644 --- a/controllers/testdata/declarative/case_8/deploy/02_cr.yaml +++ b/controllers/testdata/declarative/case_5/deploy/02_cr.yaml @@ -1,22 +1,24 @@ # Test: # DSPA CA bundle, ensure user provided CA Bundle results in dsp-trusted-ca config map creation and utilization in artifact config. -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: - name: testdsp8 + name: testdsp5 spec: podToPodTLS: true dspVersion: v2 objectStorage: minio: - image: minio:test8 + image: minio:test5 database: mariaDB: deploy: true + mlmd: + deploy: true apiServer: deploy: true enableSamplePipeline: false - caBundleFileName: testcabundleconfigmapkey8.crt + caBundleFileName: testcabundleconfigmapkey5.crt cABundle: - configMapName: testcabundleconfigmap8 - configMapKey: testcabundleconfigmapkey8.crt + configMapName: testcabundleconfigmap5 + configMapKey: testcabundleconfigmapkey5.crt diff --git a/controllers/testdata/declarative/case_5/deploy/cr.yaml b/controllers/testdata/declarative/case_5/deploy/cr.yaml deleted file mode 100644 index 2aba4287..00000000 --- a/controllers/testdata/declarative/case_5/deploy/cr.yaml +++ /dev/null @@ -1,22 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: testdsp5 -spec: - apiServer: - argoLauncherImage: argolauncherimage:test5 - argoDriverImage: argodriverimage:test5 - objectStorage: - minio: - image: minio:test5 - mlpipelineUI: - image: frontend:test5 - mlmd: - deploy: true - envoy: - image: metadata-envoy:test5 - grpc: - image: metadata-grpc:test5 - port: "1337" - writer: - image: metadata-writer:test5 diff --git a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml index 2af6993a..03a74eeb 100644 --- a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml @@ -4,6 +4,7 @@ metadata: name: ds-pipeline-testdsp5 namespace: default labels: + dsp-version: v2 app: ds-pipeline-testdsp5 component: data-science-pipelines dspa: testdsp5 @@ -16,6 +17,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: ds-pipeline-testdsp5 component: data-science-pipelines dspa: testdsp5 @@ -37,10 +39,14 @@ spec: value: "mariadb-testdsp5.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" + - name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_NAME + value: dsp-trusted-ca-testdsp5 + - name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_KEY + value: testcabundleconfigmapkey5.crt + - name: ARTIFACT_COPY_STEP_CABUNDLE_MOUNTPATH + value: /dsp-custom-certs + - name: SSL_CERT_DIR + value: "/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -72,50 +78,40 @@ spec: - name: MINIO_SERVICE_SERVICE_PORT value: "9000" - name: V2_LAUNCHER_IMAGE - value: "argolauncherimage:test5" + value: "launcherimage:test5" - name: V2_DRIVER_IMAGE - value: "argodriverimage:test5" + value: "driverimage:test5" - name: METADATA_GRPC_SERVICE_SERVICE_HOST value: "ds-pipeline-metadata-grpc-testdsp5.default.svc.cluster.local" - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "1337" + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp5.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC value: "8887" - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" + - name: ML_PIPELINE_TLS_ENABLED + value: "true" + - name: METADATA_TLS_ENABLED + value: "true" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "ubi-minimal:test5" - - name: MOVERESULTS_IMAGE - value: "busybox:test5" - - name: ARTIFACT_IMAGE - value: "artifact-manager:test5" - - name: ARTIFACT_BUCKET + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER value: "mlpipeline" - - name: ARTIFACT_ENDPOINT - value: "http://minio-testdsp5.default.svc.cluster.local:9000" - - name: ARTIFACT_SCRIPT + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "artifact_script" - name: "ds-pipeline-artifact-script-testdsp5" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp5" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "mlpipeline" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "mariadb-testdsp5.default.svc.cluster.local" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "3306" image: api-server:test5 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server @@ -123,6 +119,8 @@ spec: args: - --config=/config - -logtostderr=true + - --tlsCertPath=/etc/tls/private/tls.crt + - --tlsCertKeyPath=/etc/tls/private/tls.key ports: - containerPort: 8888 name: http @@ -131,29 +129,15 @@ spec: name: grpc protocol: TCP livenessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:8888/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 + httpGet: + path: /apis/v1beta1/healthz + port: http + scheme: HTTPS readinessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:8888/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 + httpGet: + path: /apis/v1beta1/healthz + port: http + scheme: HTTPS resources: requests: cpu: 250m @@ -165,12 +149,17 @@ spec: - name: server-config mountPath: /config/config.json subPath: config.json + - mountPath: /etc/tls/private + name: proxy-tls + - name: ca-bundle + mountPath: /dsp-custom-certs - name: oauth-proxy args: - --https-address=:8443 - --provider=openshift - --openshift-service-account=ds-pipeline-testdsp5 - - --upstream=http://localhost:8888 + - --upstream=https://ds-pipeline-testdsp5.default.svc.cluster.local:8888 + - --upstream-ca=/var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET @@ -217,8 +206,12 @@ spec: secret: secretName: ds-pipelines-proxy-tls-testdsp5 defaultMode: 420 - - configMap: - defaultMode: 420 + - name: server-config + configMap: name: ds-pipeline-server-config-testdsp5 - name: server-config + defaultMode: 420 + - name: ca-bundle + configMap: + name: dsp-trusted-ca-testdsp5 + defaultMode: 420 serviceAccountName: ds-pipeline-testdsp5 diff --git a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index 33aebad0..00000000 --- a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp5.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp5 - namespace: default - labels: - app: ds-pipeline-testdsp5 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_8/expected/created/configmap_dspa_trusted_ca.yaml b/controllers/testdata/declarative/case_5/expected/created/configmap_dspa_trusted_ca.yaml similarity index 98% rename from controllers/testdata/declarative/case_8/expected/created/configmap_dspa_trusted_ca.yaml rename to controllers/testdata/declarative/case_5/expected/created/configmap_dspa_trusted_ca.yaml index 567b05bf..c136947a 100644 --- a/controllers/testdata/declarative/case_8/expected/created/configmap_dspa_trusted_ca.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/configmap_dspa_trusted_ca.yaml @@ -1,9 +1,11 @@ kind: ConfigMap apiVersion: v1 metadata: - name: dsp-trusted-ca-testdsp8 + name: dsp-trusted-ca-testdsp5 + labels: + dsp-version: v2 data: - testcabundleconfigmapkey8.crt: |- + testcabundleconfigmapkey5.crt: |- -----BEGIN CERTIFICATE----- MIIFLTCCAxWgAwIBAgIUIvY4jV0212P/ddjuCZhcUyJfoocwDQYJKoZIhvcNAQEL BQAwJjELMAkGA1UEBhMCWFgxFzAVBgNVBAMMDnJoLWRzcC1kZXZzLmlvMB4XDTI0 diff --git a/controllers/testdata/declarative/case_5/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/mariadb_deployment.yaml index 95276f7f..de101cf6 100644 --- a/controllers/testdata/declarative/case_5/expected/created/mariadb_deployment.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/mariadb_deployment.yaml @@ -5,6 +5,7 @@ metadata: name: mariadb-testdsp5 namespace: default labels: + dsp-version: v2 app: mariadb-testdsp5 component: data-science-pipelines dspa: testdsp5 @@ -19,6 +20,7 @@ spec: template: metadata: labels: + dsp-version: v2 app: mariadb-testdsp5 component: data-science-pipelines dspa: testdsp5 @@ -73,7 +75,25 @@ spec: volumeMounts: - name: mariadb-persistent-storage mountPath: /var/lib/mysql + - name: mariadb-tls + mountPath: /.mariadb/certs + - name: mariadb-tls-config + mountPath: /etc/my.cnf.d/mariadb-tls-config.cnf + subPath: mariadb-tls-config.cnf volumes: - name: mariadb-persistent-storage persistentVolumeClaim: claimName: mariadb-testdsp5 + - name: mariadb-tls + secret: + secretName: ds-pipelines-mariadb-tls-testdsp5 + items: + - key: tls.crt + path: tls.crt + - key: tls.key + path: tls.key + defaultMode: 420 + - name: mariadb-tls-config + configMap: + name: ds-pipelines-mariadb-tls-config-testdsp5 + defaultMode: 420 diff --git a/controllers/testdata/declarative/case_5/expected/created/metadata-writer_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/metadata-writer_deployment.yaml deleted file mode 100644 index 908cf42c..00000000 --- a/controllers/testdata/declarative/case_5/expected/created/metadata-writer_deployment.yaml +++ /dev/null @@ -1,63 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-metadata-writer-testdsp5 - namespace: default - labels: - app: ds-pipeline-metadata-writer-testdsp5 - component: data-science-pipelines - dspa: testdsp5 -spec: - replicas: 1 - selector: - matchLabels: - app: ds-pipeline-metadata-writer-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - template: - metadata: - labels: - app: ds-pipeline-metadata-writer-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - spec: - containers: - - env: - - name: NAMESPACE_TO_WATCH - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.namespace - - name: PIPELINE_RUNTIME - value: tekton - - name: ARCHIVE_LOGS - value: "false" - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-testdsp5" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "1337" - image: metadata-writer:test5 - name: main - livenessProbe: - exec: - command: - - pidof - - python3 - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - pidof - - python3 - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi diff --git a/controllers/testdata/declarative/case_5/expected/created/mlpipelines-ui_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/mlpipelines-ui_deployment.yaml deleted file mode 100644 index a549d1d5..00000000 --- a/controllers/testdata/declarative/case_5/expected/created/mlpipelines-ui_deployment.yaml +++ /dev/null @@ -1,171 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-ui-testdsp5 - namespace: default - labels: - app: ds-pipeline-ui-testdsp5 - component: data-science-pipelines - dspa: testdsp5 -spec: - selector: - matchLabels: - app: ds-pipeline-ui-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-ui-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - spec: - containers: - - env: - - name: VIEWER_TENSORBOARD_POD_TEMPLATE_SPEC_PATH - value: /etc/config/viewer-pod-template.json - - name: MINIO_NAMESPACE - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.namespace - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp5" - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp5" - - name: ALLOW_CUSTOM_VISUALIZATIONS - value: "true" - - name: ARGO_ARCHIVE_LOGS - value: "true" - - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp5.default.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT - value: '8888' - - name: METADATA_ENVOY_SERVICE_SERVICE_HOST - value: ds-pipeline-md-testdsp5 - - name: METADATA_ENVOY_SERVICE_SERVICE_PORT - value: "9090" - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp5" - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp5" - - name: AWS_REGION - value: "minio" - - name: AWS_S3_ENDPOINT - value: "minio-testdsp5.default.svc.cluster.local" - - name: AWS_SSL - value: "false" - - name: DISABLE_GKE_METADATA - value: 'true' - image: frontend:test5 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - livenessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:3000/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - name: ds-pipeline-ui - ports: - - containerPort: 3000 - protocol: TCP - readinessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:3000/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/config - name: config-volume - readOnly: true - - name: oauth-proxy - args: - - --https-address=:8443 - - --provider=openshift - - --openshift-service-account=ds-pipeline-ui-testdsp5 - - --upstream=http://localhost:3000 - - --tls-cert=/etc/tls/private/tls.crt - - --tls-key=/etc/tls/private/tls.key - - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-ui-testdsp5","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-ui-testdsp5","verb":"get","resourceAPIGroup":"route.openshift.io"}' - - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test5 - ports: - - containerPort: 8443 - name: https - protocol: TCP - livenessProbe: - httpGet: - path: /oauth/healthz - port: 8443 - scheme: HTTPS - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /oauth/healthz - port: 8443 - scheme: HTTPS - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/tls/private - name: proxy-tls - serviceAccountName: ds-pipeline-ui-testdsp5 - volumes: - - configMap: - name: ds-pipeline-ui-configmap-testdsp5 - defaultMode: 420 - name: config-volume - - name: proxy-tls - secret: - secretName: ds-pipelines-ui-proxy-tls-testdsp5 - defaultMode: 420 diff --git a/controllers/testdata/declarative/case_5/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/persistence-agent_deployment.yaml deleted file mode 100644 index faad5b73..00000000 --- a/controllers/testdata/declarative/case_5/expected/created/persistence-agent_deployment.yaml +++ /dev/null @@ -1,76 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-persistenceagent-testdsp5 - namespace: default - labels: - app: ds-pipeline-persistenceagent-testdsp5 - component: data-science-pipelines - dspa: testdsp5 -spec: - selector: - matchLabels: - app: ds-pipeline-persistenceagent-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-persistenceagent-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - spec: - containers: - - env: - - name: NAMESPACE - value: "default" - - name: TTL_SECONDS_AFTER_WORKFLOW_FINISH - value: "86400" - - name: NUM_WORKERS - value: "2" - - name: KUBEFLOW_USERID_HEADER - value: kubeflow-userid - - name: KUBEFLOW_USERID_PREFIX - value: "" - - name: EXECUTIONTYPE - value: PipelineRun - image: persistenceagent:test5 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-persistenceagent - command: - - persistence_agent - - "--logtostderr=true" - - "--ttlSecondsAfterWorkflowFinish=86400" - - "--numWorker=2" - - "--mlPipelineAPIServerName=ds-pipeline-testdsp5" - - "--namespace=default" - - "--mlPipelineServiceHttpPort=8888" - - "--mlPipelineServiceGRPCPort=8887" - livenessProbe: - exec: - command: - - test - - -x - - persistence_agent - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - test - - -x - - persistence_agent - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 120m - memory: 500Mi - limits: - cpu: 250m - memory: 1Gi - serviceAccountName: ds-pipeline-persistenceagent-testdsp5 diff --git a/controllers/testdata/declarative/case_5/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/scheduled-workflow_deployment.yaml deleted file mode 100644 index f315e6cc..00000000 --- a/controllers/testdata/declarative/case_5/expected/created/scheduled-workflow_deployment.yaml +++ /dev/null @@ -1,65 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-scheduledworkflow-testdsp5 - namespace: default - labels: - app: ds-pipeline-scheduledworkflow-testdsp5 - component: data-science-pipelines - dspa: testdsp5 -spec: - selector: - matchLabels: - app: ds-pipeline-scheduledworkflow-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-scheduledworkflow-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - spec: - containers: - - env: - - name: NAMESPACE - value: "default" - - name: CRON_SCHEDULE_TIMEZONE - value: "UTC" - - name: EXECUTIONTYPE - value: PipelineRun - image: scheduledworkflow:test5 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-scheduledworkflow - command: - - controller - - "--logtostderr=true" - - "--namespace=testdsp5" - livenessProbe: - exec: - command: - - test - - -x - - controller - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - test - - -x - - controller - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 120m - memory: 100Mi - limits: - cpu: 250m - memory: 250Mi - serviceAccountName: ds-pipeline-scheduledworkflow-testdsp5 diff --git a/controllers/testdata/declarative/case_6/config.yaml b/controllers/testdata/declarative/case_6/config.yaml new file mode 100644 index 00000000..78c93516 --- /dev/null +++ b/controllers/testdata/declarative/case_6/config.yaml @@ -0,0 +1,17 @@ +Images: + ApiServer: api-server:test6 + PersistentAgent: persistenceagent:test6 + ScheduledWorkflow: scheduledworkflow:test6 + MlmdEnvoy: mlmdenvoy:test6 + MlmdGRPC: mlmdgrpc:test6 + ArgoExecImage: argoexec:test6 + ArgoWorkflowController: argowfcontroller:test6 + LauncherImage: launcherimage:test6 + DriverImage: driverimage:test6 + OAuthProxy: oauth-proxy:test6 + MariaDB: mariadb:test6 + MlPipelineUI: frontend:test6 + Minio: minio:test6 +DSPO: + ApiServer: + IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_9/deploy/00_cr.yaml b/controllers/testdata/declarative/case_6/deploy/00_cr.yaml similarity index 69% rename from controllers/testdata/declarative/case_9/deploy/00_cr.yaml rename to controllers/testdata/declarative/case_6/deploy/00_cr.yaml index 612fb3dc..d070d5a1 100644 --- a/controllers/testdata/declarative/case_9/deploy/00_cr.yaml +++ b/controllers/testdata/declarative/case_6/deploy/00_cr.yaml @@ -1,21 +1,22 @@ # Test: # podToPodTLS = false, should disable any tls configs for apiserver, pa, and kfp ui -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: - name: testdsp9 + name: testdsp6 spec: - dspVersion: v2 podToPodTLS: false objectStorage: minio: - image: minio:test9 + image: minio:test6 database: mariaDB: deploy: true mlpipelineUI: deploy: true - image: frontend:test9 + image: frontend:test6 + mlmd: + deploy: true apiServer: deploy: true enableOauth: true diff --git a/controllers/testdata/declarative/case_9/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml similarity index 79% rename from controllers/testdata/declarative/case_9/expected/created/apiserver_deployment.yaml rename to controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml index 86a7c730..f4809db8 100644 --- a/controllers/testdata/declarative/case_9/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml @@ -1,24 +1,26 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-testdsp9 + name: ds-pipeline-testdsp6 namespace: default labels: - app: ds-pipeline-testdsp9 + dsp-version: v2 + app: ds-pipeline-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: selector: matchLabels: - app: ds-pipeline-testdsp9 + app: ds-pipeline-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 template: metadata: labels: - app: ds-pipeline-testdsp9 + dsp-version: v2 + app: ds-pipeline-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: containers: - env: @@ -30,57 +32,53 @@ spec: valueFrom: secretKeyRef: key: "password" - name: "ds-pipeline-db-testdsp9" + name: "ds-pipeline-db-testdsp6" - name: DBCONFIG_DBNAME value: "mlpipeline" - name: DBCONFIG_HOST - value: "mariadb-testdsp9.default.svc.cluster.local" + value: "mariadb-testdsp6.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT value: "8888" - name: OBJECTSTORECONFIG_CREDENTIALSSECRET - value: "ds-pipeline-s3-testdsp9" + value: "ds-pipeline-s3-testdsp6" - name: OBJECTSTORECONFIG_CREDENTIALSACCESSKEYKEY value: "accesskey" - name: OBJECTSTORECONFIG_CREDENTIALSSECRETKEYKEY value: "secretkey" - name: DEFAULTPIPELINERUNNERSERVICEACCOUNT - value: "pipeline-runner-testdsp9" + value: "pipeline-runner-testdsp6" - name: OBJECTSTORECONFIG_BUCKETNAME value: "mlpipeline" - name: OBJECTSTORECONFIG_ACCESSKEY valueFrom: secretKeyRef: key: "accesskey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: OBJECTSTORECONFIG_SECRETACCESSKEY valueFrom: secretKeyRef: key: "secretkey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: OBJECTSTORECONFIG_SECURE value: "false" - name: MINIO_SERVICE_SERVICE_HOST - value: "minio-testdsp9.default.svc.cluster.local" + value: "minio-testdsp6.default.svc.cluster.local" - name: MINIO_SERVICE_SERVICE_PORT value: "9000" - name: V2_LAUNCHER_IMAGE - value: "argolauncherimage:test9" + value: "launcherimage:test6" - name: V2_DRIVER_IMAGE - value: "argodriverimage:test9" + value: "driverimage:test6" - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-testdsp9.default.svc.cluster.local" + value: "ds-pipeline-metadata-grpc-testdsp6.default.svc.cluster.local" - name: METADATA_GRPC_SERVICE_SERVICE_PORT value: "8080" - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp9.default.svc.cluster.local + value: ds-pipeline-testdsp6.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC value: "8887" - name: SIGNED_URL_EXPIRY_TIME_SECONDS @@ -95,14 +93,14 @@ spec: valueFrom: secretKeyRef: key: "password" - name: "ds-pipeline-db-testdsp9" + name: "ds-pipeline-db-testdsp6" - name: DBCONFIG_MYSQLCONFIG_DBNAME value: "mlpipeline" - name: DBCONFIG_MYSQLCONFIG_HOST - value: "mariadb-testdsp9.default.svc.cluster.local" + value: "mariadb-testdsp6.default.svc.cluster.local" - name: DBCONFIG_MYSQLCONFIG_PORT value: "3306" - image: api-server:test9 + image: api-server:test6 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server command: ['/bin/apiserver'] @@ -141,15 +139,15 @@ spec: args: - --https-address=:8443 - --provider=openshift - - --openshift-service-account=ds-pipeline-testdsp9 + - --openshift-service-account=ds-pipeline-testdsp6 - --upstream=http://localhost:8888 - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-testdsp9","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-testdsp9","verb":"get","resourceAPIGroup":"route.openshift.io"}' + - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-testdsp6","namespace":"default"}}' + - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-testdsp6","verb":"get","resourceAPIGroup":"route.openshift.io"}' - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test9 + image: oauth-proxy:test6 ports: - containerPort: 8443 name: oauth @@ -187,10 +185,10 @@ spec: volumes: - name: proxy-tls secret: - secretName: ds-pipelines-proxy-tls-testdsp9 + secretName: ds-pipelines-proxy-tls-testdsp6 defaultMode: 420 - name: server-config configMap: - name: ds-pipeline-server-config-testdsp9 + name: ds-pipeline-server-config-testdsp6 defaultMode: 420 - serviceAccountName: ds-pipeline-testdsp9 + serviceAccountName: ds-pipeline-testdsp6 diff --git a/controllers/testdata/declarative/case_9/expected/created/mlpipelines-ui_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/mlpipelines-ui_deployment.yaml similarity index 83% rename from controllers/testdata/declarative/case_9/expected/created/mlpipelines-ui_deployment.yaml rename to controllers/testdata/declarative/case_6/expected/created/mlpipelines-ui_deployment.yaml index d8bf9c83..5d9d2f24 100644 --- a/controllers/testdata/declarative/case_9/expected/created/mlpipelines-ui_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/mlpipelines-ui_deployment.yaml @@ -1,26 +1,28 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-ui-testdsp9 + name: ds-pipeline-ui-testdsp6 namespace: default labels: - app: ds-pipeline-ui-testdsp9 + dsp-version: v2 + app: ds-pipeline-ui-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: selector: matchLabels: - app: ds-pipeline-ui-testdsp9 + app: ds-pipeline-ui-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 template: metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: - app: ds-pipeline-ui-testdsp9 + dsp-version: v2 + app: ds-pipeline-ui-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: containers: - env: @@ -35,43 +37,43 @@ spec: valueFrom: secretKeyRef: key: "accesskey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: MINIO_SECRET_KEY valueFrom: secretKeyRef: key: "secretkey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: ALLOW_CUSTOM_VISUALIZATIONS value: "true" - name: ARGO_ARCHIVE_LOGS value: "true" - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp9.default.svc.cluster.local + value: ds-pipeline-testdsp6.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT value: '8888' - name: METADATA_ENVOY_SERVICE_SERVICE_HOST - value: ds-pipeline-md-testdsp9 + value: ds-pipeline-md-testdsp6 - name: METADATA_ENVOY_SERVICE_SERVICE_PORT value: "9090" - name: AWS_ACCESS_KEY_ID valueFrom: secretKeyRef: key: "accesskey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: AWS_SECRET_ACCESS_KEY valueFrom: secretKeyRef: key: "secretkey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: AWS_REGION value: "minio" - name: AWS_S3_ENDPOINT - value: "minio-testdsp9.default.svc.cluster.local" + value: "minio-testdsp6.default.svc.cluster.local" - name: AWS_SSL value: "false" - name: DISABLE_GKE_METADATA value: 'true' - image: frontend:test9 + image: frontend:test6 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting livenessProbe: exec: @@ -116,15 +118,15 @@ spec: args: - --https-address=:8443 - --provider=openshift - - --openshift-service-account=ds-pipeline-ui-testdsp9 + - --openshift-service-account=ds-pipeline-ui-testdsp6 - --upstream=http://localhost:3000 - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-ui-testdsp9","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-ui-testdsp9","verb":"get","resourceAPIGroup":"route.openshift.io"}' + - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-ui-testdsp6","namespace":"default"}}' + - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-ui-testdsp6","verb":"get","resourceAPIGroup":"route.openshift.io"}' - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test9 + image: oauth-proxy:test6 ports: - containerPort: 8443 name: https @@ -159,13 +161,13 @@ spec: volumeMounts: - mountPath: /etc/tls/private name: proxy-tls - serviceAccountName: ds-pipeline-ui-testdsp9 + serviceAccountName: ds-pipeline-ui-testdsp6 volumes: - configMap: - name: ds-pipeline-ui-configmap-testdsp9 + name: ds-pipeline-ui-configmap-testdsp6 defaultMode: 420 name: config-volume - name: proxy-tls secret: - secretName: ds-pipelines-ui-proxy-tls-testdsp9 + secretName: ds-pipelines-ui-proxy-tls-testdsp6 defaultMode: 420 diff --git a/controllers/testdata/declarative/case_9/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/persistence-agent_deployment.yaml similarity index 78% rename from controllers/testdata/declarative/case_9/expected/created/persistence-agent_deployment.yaml rename to controllers/testdata/declarative/case_6/expected/created/persistence-agent_deployment.yaml index 27fcf135..da50f82f 100644 --- a/controllers/testdata/declarative/case_9/expected/created/persistence-agent_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/persistence-agent_deployment.yaml @@ -1,26 +1,28 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-persistenceagent-testdsp9 + name: ds-pipeline-persistenceagent-testdsp6 namespace: default labels: - app: ds-pipeline-persistenceagent-testdsp9 + dsp-version: v2 + app: ds-pipeline-persistenceagent-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: selector: matchLabels: - app: ds-pipeline-persistenceagent-testdsp9 + app: ds-pipeline-persistenceagent-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 template: metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: - app: ds-pipeline-persistenceagent-testdsp9 + dsp-version: v2 + app: ds-pipeline-persistenceagent-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: containers: - env: @@ -36,7 +38,7 @@ spec: value: "" - name: EXECUTIONTYPE value: Workflow - image: persistenceagent:test9 + image: persistenceagent:test6 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-persistenceagent command: @@ -44,8 +46,8 @@ spec: - "--logtostderr=true" - "--ttlSecondsAfterWorkflowFinish=86400" - "--numWorker=5" - - "--mlPipelineAPIServerName=ds-pipeline-testdsp9" - - "--namespace=testdsp9" + - "--mlPipelineAPIServerName=ds-pipeline-testdsp6" + - "--namespace=testdsp6" - "--mlPipelineServiceHttpPort=8888" - "--mlPipelineServiceGRPCPort=8887" livenessProbe: @@ -76,8 +78,8 @@ spec: volumeMounts: - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token name: persistenceagent-sa-token - subPath: ds-pipeline-persistenceagent-testdsp9-token - serviceAccountName: ds-pipeline-persistenceagent-testdsp9 + subPath: ds-pipeline-persistenceagent-testdsp6-token + serviceAccountName: ds-pipeline-persistenceagent-testdsp6 volumes: - name: persistenceagent-sa-token projected: @@ -85,5 +87,5 @@ spec: - serviceAccountToken: audience: pipelines.kubeflow.org expirationSeconds: 3600 - path: ds-pipeline-persistenceagent-testdsp9-token + path: ds-pipeline-persistenceagent-testdsp6-token defaultMode: 420 diff --git a/controllers/testdata/declarative/case_7/config.yaml b/controllers/testdata/declarative/case_7/config.yaml deleted file mode 100644 index 2a5f895c..00000000 --- a/controllers/testdata/declarative/case_7/config.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# When a minimal DSPA is deployed -Images: - ApiServer: api-server:test7 - Artifact: artifact-manager:test7 - PersistentAgent: persistenceagent:test7 - ScheduledWorkflow: scheduledworkflow:test7 - Cache: ubi-minimal:test7 - MoveResultsImage: busybox:test7 - MlPipelineUI: frontend:test7 - MariaDB: mariadb:test7 - Minio: minio:test7 - OAuthProxy: oauth-proxy:test7 -DSPO: - ApiServer: - IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_7/deploy/cr.yaml b/controllers/testdata/declarative/case_7/deploy/cr.yaml deleted file mode 100644 index ac4aa327..00000000 --- a/controllers/testdata/declarative/case_7/deploy/cr.yaml +++ /dev/null @@ -1,93 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: testdsp7 -spec: - dspVersion: v2 - podToPodTLS: false - apiServer: - deploy: true - image: api-server:test7 - applyTektonCustomResource: true - archiveLogs: false - artifactImage: artifact-manager:test7 - cacheImage: ubi-minimal:test7 - moveResultsImage: busybox:test7 - argoLauncherImage: argolauncherimage:test7 - argoDriverImage: argodriverimage:test7 - injectDefaultScript: true - stripEOF: true - enableOauth: true - enableSamplePipeline: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 125 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true - resources: - requests: - cpu: "1231m" - memory: "1Gi" - limits: - cpu: "2522m" - memory: "5Gi" - persistenceAgent: - deploy: true - image: persistenceagent:test7 - numWorkers: 5 - resources: - requests: - cpu: "1233m" - memory: "1Gi" - limits: - cpu: "2524m" - memory: "5Gi" - scheduledWorkflow: - deploy: true - image: scheduledworkflow:test7 - cronScheduleTimezone: EST - resources: - requests: - cpu: "1235m" - memory: "1Gi" - limits: - cpu: "2526m" - memory: "5Gi" - mlpipelineUI: - deploy: true - image: frontend:test7 - configMap: some-test-configmap - resources: - requests: - cpu: "1239m" - memory: "1Gi" - limits: - cpu: "2530m" - memory: "5Gi" - database: - mariaDB: - deploy: true - image: mariadb:test7 - username: testuser - pipelineDBName: randomDBName - pvcSize: 32Gi - resources: - requests: - cpu: "1212m" - memory: "1Gi" - limits: - cpu: "2554m" - memory: "5Gi" - objectStorage: - minio: - deploy: true - image: minio:test7 - bucket: mlpipeline - pvcSize: 40Gi - resources: - requests: - cpu: "1334m" - memory: "1Gi" - limits: - cpu: "2535m" - memory: "5Gi" diff --git a/controllers/testdata/declarative/case_7/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/apiserver_deployment.yaml deleted file mode 100644 index 2458af03..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/apiserver_deployment.yaml +++ /dev/null @@ -1,210 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-testdsp7 - namespace: default - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - env: - - name: POD_NAMESPACE - value: "default" - - name: DBCONFIG_USER - value: "testuser" - - name: DBCONFIG_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp7" - - name: DBCONFIG_DBNAME - value: "randomDBName" - - name: DBCONFIG_HOST - value: "mariadb-testdsp7.default.svc.cluster.local" - - name: DBCONFIG_PORT - value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "125" - - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST - value: "ds-pipeline-visualizationserver" - - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT - value: "8888" - - name: OBJECTSTORECONFIG_CREDENTIALSSECRET - value: "ds-pipeline-s3-testdsp7" - - name: OBJECTSTORECONFIG_CREDENTIALSACCESSKEYKEY - value: "accesskey" - - name: OBJECTSTORECONFIG_CREDENTIALSSECRETKEYKEY - value: "secretkey" - - name: DEFAULTPIPELINERUNNERSERVICEACCOUNT - value: "pipeline-runner-testdsp7" - - name: OBJECTSTORECONFIG_BUCKETNAME - value: "mlpipeline" - - name: OBJECTSTORECONFIG_ACCESSKEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp7" - - name: OBJECTSTORECONFIG_SECRETACCESSKEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp7" - - name: OBJECTSTORECONFIG_SECURE - value: "false" - - name: MINIO_SERVICE_SERVICE_HOST - value: "minio-testdsp7.default.svc.cluster.local" - - name: MINIO_SERVICE_SERVICE_PORT - value: "9000" - - name: V2_LAUNCHER_IMAGE - value: "argolauncherimage:test7" - - name: V2_DRIVER_IMAGE - value: "argodriverimage:test7" - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-testdsp7.default.svc.cluster.local" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "8080" - - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp7.default.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - - name: SIGNED_URL_EXPIRY_TIME_SECONDS - value: "60" - - name: EXECUTIONTYPE - value: Workflow - - name: DB_DRIVER_NAME - value: mysql - - name: DBCONFIG_MYSQLCONFIG_USER - value: testuser - - name: DBCONFIG_MYSQLCONFIG_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp7" - - name: DBCONFIG_MYSQLCONFIG_DBNAME - value: "randomDBName" - - name: DBCONFIG_MYSQLCONFIG_HOST - value: "mariadb-testdsp7.default.svc.cluster.local" - - name: DBCONFIG_MYSQLCONFIG_PORT - value: "3306" - image: api-server:test7 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-api-server - command: ['/bin/apiserver'] - args: - - --config=/config - - -logtostderr=true - - --sampleconfig=/config/sample_config.json - ports: - - containerPort: 8888 - name: http - protocol: TCP - - containerPort: 8887 - name: grpc - protocol: TCP - livenessProbe: - httpGet: - path: /apis/v1beta1/healthz - port: http - scheme: HTTPS - readinessProbe: - httpGet: - path: /apis/v1beta1/healthz - port: http - scheme: HTTPS - resources: - requests: - cpu: 1231m - memory: 1Gi - limits: - cpu: 2522m - memory: 5Gi - volumeMounts: - - name: server-config - mountPath: /config/config.json - subPath: config.json - - mountPath: /config/sample_config.json - name: sample-config - subPath: sample_config.json - - mountPath: /samples/ - name: sample-pipeline - - name: oauth-proxy - args: - - --https-address=:8443 - - --provider=openshift - - --openshift-service-account=ds-pipeline-testdsp7 - - --upstream=http://localhost:8888 - - --tls-cert=/etc/tls/private/tls.crt - - --tls-key=/etc/tls/private/tls.key - - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-testdsp7","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-testdsp7","verb":"get","resourceAPIGroup":"route.openshift.io"}' - - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test7 - ports: - - containerPort: 8443 - name: oauth - protocol: TCP - livenessProbe: - httpGet: - path: /oauth/healthz - port: oauth - scheme: HTTPS - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /oauth/healthz - port: oauth - scheme: HTTPS - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/tls/private - name: proxy-tls - volumes: - - name: proxy-tls - secret: - secretName: ds-pipelines-proxy-tls-testdsp7 - defaultMode: 420 - - name: server-config - configMap: - defaultMode: 420 - name: ds-pipeline-server-config-testdsp7 - - configMap: - defaultMode: 420 - name: sample-config-testdsp7 - name: sample-config - - configMap: - defaultMode: 420 - name: sample-pipeline-testdsp7 - name: sample-pipeline - serviceAccountName: ds-pipeline-testdsp7 diff --git a/controllers/testdata/declarative/case_7/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_7/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index a1550c01..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp7.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp7 - namespace: default - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_7/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/mariadb_deployment.yaml deleted file mode 100644 index e982d3b3..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/mariadb_deployment.yaml +++ /dev/null @@ -1,79 +0,0 @@ ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: mariadb-testdsp7 - namespace: default - labels: - app: mariadb-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - strategy: - type: Recreate # Need this since backing PVC is ReadWriteOnce, which creates resource lock condition in default Rolling strategy - selector: - matchLabels: - app: mariadb-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - labels: - app: mariadb-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - name: mariadb - image: mariadb:test7 - ports: - - containerPort: 3306 - protocol: TCP - readinessProbe: - exec: - command: - - /bin/sh - - "-i" - - "-c" - - >- - MYSQL_PWD=$MYSQL_PASSWORD mysql -h 127.0.0.1 -u $MYSQL_USER -D - $MYSQL_DATABASE -e 'SELECT 1' - failureThreshold: 3 - initialDelaySeconds: 5 - periodSeconds: 10 - successThreshold: 1 - timeoutSeconds: 1 - livenessProbe: - failureThreshold: 3 - initialDelaySeconds: 30 - periodSeconds: 10 - successThreshold: 1 - tcpSocket: - port: 3306 - timeoutSeconds: 1 - env: - - name: MYSQL_USER - value: "testuser" - - name: MYSQL_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp7" - - name: MYSQL_DATABASE - value: "randomDBName" - - name: MYSQL_ALLOW_EMPTY_PASSWORD - value: "true" - resources: - requests: - cpu: 1212m - memory: 1Gi - limits: - cpu: 2554m - memory: 5Gi - volumeMounts: - - name: mariadb-persistent-storage - mountPath: /var/lib/mysql - volumes: - - name: mariadb-persistent-storage - persistentVolumeClaim: - claimName: mariadb-testdsp7 diff --git a/controllers/testdata/declarative/case_7/expected/created/minio_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/minio_deployment.yaml deleted file mode 100644 index da4a1627..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/minio_deployment.yaml +++ /dev/null @@ -1,75 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: minio-testdsp7 - namespace: default - labels: - app: minio-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: minio-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - strategy: - type: Recreate - template: - metadata: - labels: - app: minio-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - args: - - server - - /data - env: - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp7" - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp7" - image: minio:test7 - name: minio - ports: - - containerPort: 9000 - protocol: TCP - livenessProbe: - tcpSocket: - port: 9000 - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - tcpSocket: - port: 9000 - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - requests: - cpu: 1334m - memory: 1Gi - limits: - cpu: 2535m - memory: 5Gi - volumeMounts: - - mountPath: /data - name: data - subPath: minio - volumes: - - name: data - persistentVolumeClaim: - claimName: minio-testdsp7 diff --git a/controllers/testdata/declarative/case_7/expected/created/mlpipelines-ui_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/mlpipelines-ui_deployment.yaml deleted file mode 100644 index abf2d040..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/mlpipelines-ui_deployment.yaml +++ /dev/null @@ -1,171 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-ui-testdsp7 - namespace: default - labels: - app: ds-pipeline-ui-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: ds-pipeline-ui-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-ui-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - env: - - name: VIEWER_TENSORBOARD_POD_TEMPLATE_SPEC_PATH - value: /etc/config/viewer-pod-template.json - - name: MINIO_NAMESPACE - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.namespace - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp7" - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp7" - - name: ALLOW_CUSTOM_VISUALIZATIONS - value: "true" - - name: ARGO_ARCHIVE_LOGS - value: "true" - - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp7.default.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT - value: '8888' - - name: METADATA_ENVOY_SERVICE_SERVICE_HOST - value: ds-pipeline-md-testdsp7 - - name: METADATA_ENVOY_SERVICE_SERVICE_PORT - value: "9090" - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp7" - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp7" - - name: AWS_REGION - value: "minio" - - name: AWS_S3_ENDPOINT - value: "minio-testdsp7.default.svc.cluster.local" - - name: AWS_SSL - value: "false" - - name: DISABLE_GKE_METADATA - value: 'true' - image: frontend:test7 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - livenessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:3000/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - name: ds-pipeline-ui - ports: - - containerPort: 3000 - protocol: TCP - readinessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:3000/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 1239m - memory: 1Gi - limits: - cpu: 2530m - memory: 5Gi - volumeMounts: - - mountPath: /etc/config - name: config-volume - readOnly: true - - name: oauth-proxy - args: - - --https-address=:8443 - - --provider=openshift - - --openshift-service-account=ds-pipeline-ui-testdsp7 - - --upstream=http://localhost:3000 - - --tls-cert=/etc/tls/private/tls.crt - - --tls-key=/etc/tls/private/tls.key - - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-ui-testdsp7","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-ui-testdsp7","verb":"get","resourceAPIGroup":"route.openshift.io"}' - - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test7 - ports: - - containerPort: 8443 - name: https - protocol: TCP - livenessProbe: - httpGet: - path: /oauth/healthz - port: 8443 - scheme: HTTPS - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /oauth/healthz - port: 8443 - scheme: HTTPS - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/tls/private - name: proxy-tls - serviceAccountName: ds-pipeline-ui-testdsp7 - volumes: - - configMap: - name: some-test-configmap - defaultMode: 420 - name: config-volume - - name: proxy-tls - secret: - secretName: ds-pipelines-ui-proxy-tls-testdsp7 - defaultMode: 420 diff --git a/controllers/testdata/declarative/case_7/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/persistence-agent_deployment.yaml deleted file mode 100644 index a5f4e31f..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/persistence-agent_deployment.yaml +++ /dev/null @@ -1,89 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-persistenceagent-testdsp7 - namespace: default - labels: - app: ds-pipeline-persistenceagent-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: ds-pipeline-persistenceagent-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-persistenceagent-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - env: - - name: NAMESPACE - value: "default" - - name: TTL_SECONDS_AFTER_WORKFLOW_FINISH - value: "86400" - - name: NUM_WORKERS - value: "2" - - name: KUBEFLOW_USERID_HEADER - value: kubeflow-userid - - name: KUBEFLOW_USERID_PREFIX - value: "" - - name: EXECUTIONTYPE - value: Workflow - image: persistenceagent:test7 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-persistenceagent - command: - - persistence_agent - - "--logtostderr=true" - - "--ttlSecondsAfterWorkflowFinish=86400" - - "--numWorker=5" - - "--mlPipelineAPIServerName=ds-pipeline-testdsp7" - - "--namespace=testdsp7" - - "--mlPipelineServiceHttpPort=8888" - - "--mlPipelineServiceGRPCPort=8887" - livenessProbe: - exec: - command: - - test - - -x - - persistence_agent - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - test - - -x - - persistence_agent - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 1233m - memory: 1Gi - limits: - cpu: 2524m - memory: 5Gi - volumeMounts: - - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token - name: persistenceagent-sa-token - subPath: ds-pipeline-persistenceagent-testdsp7-token - serviceAccountName: ds-pipeline-persistenceagent-testdsp7 - volumes: - - name: persistenceagent-sa-token - projected: - sources: - - serviceAccountToken: - audience: pipelines.kubeflow.org - expirationSeconds: 3600 - path: ds-pipeline-persistenceagent-testdsp7-token - defaultMode: 420 diff --git a/controllers/testdata/declarative/case_7/expected/created/sample-config.yaml.tmpl b/controllers/testdata/declarative/case_7/expected/created/sample-config.yaml.tmpl deleted file mode 100644 index c7bfcafe..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/sample-config.yaml.tmpl +++ /dev/null @@ -1,17 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: sample-config-testdsp7 - namespace: default - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines -data: - sample_config.json: |- - [ - { - "name": "[Demo] iris-training", - "description": "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow", - "file": "/samples/iris-pipeline-compiled.yaml" - } - ] diff --git a/controllers/testdata/declarative/case_7/expected/created/sample-pipeline.yaml.tmpl b/controllers/testdata/declarative/case_7/expected/created/sample-pipeline.yaml.tmpl deleted file mode 100644 index 832cedfc..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/sample-pipeline.yaml.tmpl +++ /dev/null @@ -1,254 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: sample-pipeline-testdsp7 - namespace: default - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines -data: - iris-pipeline-compiled.yaml: |- - # PIPELINE DEFINITION - # Name: iris-training-pipeline - # Inputs: - # neighbors: int [Default: 3.0] - # standard_scaler: bool [Default: True] - # Outputs: - # train-model-metrics: system.ClassificationMetrics - components: - comp-create-dataset: - executorLabel: exec-create-dataset - outputDefinitions: - artifacts: - iris_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-normalize-dataset: - executorLabel: exec-normalize-dataset - inputDefinitions: - artifacts: - input_iris_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - standard_scaler: - parameterType: BOOLEAN - outputDefinitions: - artifacts: - normalized_iris_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-train-model: - executorLabel: exec-train-model - inputDefinitions: - artifacts: - normalized_iris_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - n_neighbors: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - metrics: - artifactType: - schemaTitle: system.ClassificationMetrics - schemaVersion: 0.0.1 - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - deploymentSpec: - executors: - exec-create-dataset: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - create_dataset - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ - \ && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef create_dataset(iris_dataset: Output[Dataset]):\n import pandas\ - \ as pd\n\n csv_url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data'\n\ - \ col_names = [\n 'Sepal_Length', 'Sepal_Width', 'Petal_Length',\ - \ 'Petal_Width', 'Labels'\n ]\n df = pd.read_csv(csv_url, names=col_names)\n\ - \n with open(iris_dataset.path, 'w') as f:\n df.to_csv(f)\n\n" - image: quay.io/opendatahub/ds-pipelines-sample-base:v1.0 - exec-normalize-dataset: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - normalize_dataset - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ - \ 'scikit-learn==1.4.0' && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef normalize_dataset(\n input_iris_dataset: Input[Dataset],\n\ - \ normalized_iris_dataset: Output[Dataset],\n standard_scaler: bool,\n\ - ):\n\n import pandas as pd\n from sklearn.preprocessing import MinMaxScaler\n\ - \ from sklearn.preprocessing import StandardScaler\n\n with open(input_iris_dataset.path)\ - \ as f:\n df = pd.read_csv(f)\n labels = df.pop('Labels')\n\n\ - \ scaler = StandardScaler() if standard_scaler else MinMaxScaler()\n\n\ - \ df = pd.DataFrame(scaler.fit_transform(df))\n df['Labels'] = labels\n\ - \ normalized_iris_dataset.metadata['state'] = \"Normalized\"\n with\ - \ open(normalized_iris_dataset.path, 'w') as f:\n df.to_csv(f)\n\n" - image: quay.io/opendatahub/ds-pipelines-sample-base:v1.0 - exec-train-model: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - train_model - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ - \ 'scikit-learn==1.4.0' && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef train_model(\n normalized_iris_dataset: Input[Dataset],\n\ - \ model: Output[Model],\n metrics: Output[ClassificationMetrics],\n\ - \ n_neighbors: int,\n):\n import pickle\n\n import pandas as pd\n\ - \ from sklearn.model_selection import train_test_split\n from sklearn.neighbors\ - \ import KNeighborsClassifier\n\n from sklearn.metrics import roc_curve\n\ - \ from sklearn.model_selection import train_test_split, cross_val_predict\n\ - \ from sklearn.metrics import confusion_matrix\n\n\n with open(normalized_iris_dataset.path)\ - \ as f:\n df = pd.read_csv(f)\n\n y = df.pop('Labels')\n X\ - \ = df\n\n X_train, X_test, y_train, y_test = train_test_split(X, y,\ - \ random_state=0)\n\n clf = KNeighborsClassifier(n_neighbors=n_neighbors)\n\ - \ clf.fit(X_train, y_train)\n\n predictions = cross_val_predict(\n\ - \ clf, X_train, y_train, cv=3)\n metrics.log_confusion_matrix(\n\ - \ ['Iris-Setosa', 'Iris-Versicolour', 'Iris-Virginica'],\n \ - \ confusion_matrix(\n y_train,\n predictions).tolist()\ - \ # .tolist() to convert np array to list.\n )\n\n model.metadata['framework']\ - \ = 'scikit-learn'\n with open(model.path, 'wb') as f:\n pickle.dump(clf,\ - \ f)\n\n" - image: quay.io/opendatahub/ds-pipelines-sample-base:v1.0 - pipelineInfo: - name: iris-training-pipeline - root: - dag: - outputs: - artifacts: - train-model-metrics: - artifactSelectors: - - outputArtifactKey: metrics - producerSubtask: train-model - tasks: - create-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-create-dataset - taskInfo: - name: create-dataset - normalize-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-normalize-dataset - dependentTasks: - - create-dataset - inputs: - artifacts: - input_iris_dataset: - taskOutputArtifact: - outputArtifactKey: iris_dataset - producerTask: create-dataset - parameters: - standard_scaler: - runtimeValue: - constant: true - taskInfo: - name: normalize-dataset - train-model: - cachingOptions: - enableCache: true - componentRef: - name: comp-train-model - dependentTasks: - - normalize-dataset - inputs: - artifacts: - normalized_iris_dataset: - taskOutputArtifact: - outputArtifactKey: normalized_iris_dataset - producerTask: normalize-dataset - parameters: - n_neighbors: - componentInputParameter: neighbors - taskInfo: - name: train-model - inputDefinitions: - parameters: - neighbors: - defaultValue: 3.0 - isOptional: true - parameterType: NUMBER_INTEGER - standard_scaler: - defaultValue: true - isOptional: true - parameterType: BOOLEAN - outputDefinitions: - artifacts: - train-model-metrics: - artifactType: - schemaTitle: system.ClassificationMetrics - schemaVersion: 0.0.1 - schemaVersion: 2.1.0 - sdkVersion: kfp-2.7.0 diff --git a/controllers/testdata/declarative/case_7/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/scheduled-workflow_deployment.yaml deleted file mode 100644 index 2a0d4fd0..00000000 --- a/controllers/testdata/declarative/case_7/expected/created/scheduled-workflow_deployment.yaml +++ /dev/null @@ -1,65 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-scheduledworkflow-testdsp7 - namespace: default - labels: - app: ds-pipeline-scheduledworkflow-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: ds-pipeline-scheduledworkflow-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-scheduledworkflow-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - env: - - name: NAMESPACE - value: "default" - - name: CRON_SCHEDULE_TIMEZONE - value: "EST" - - name: EXECUTIONTYPE - value: PipelineRun - image: scheduledworkflow:test7 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-scheduledworkflow - command: - - controller - - "--logtostderr=true" - - "--namespace=default" - livenessProbe: - exec: - command: - - test - - -x - - controller - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - test - - -x - - controller - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 1235m - memory: 1Gi - limits: - cpu: 2526m - memory: 5Gi - serviceAccountName: ds-pipeline-scheduledworkflow-testdsp7 diff --git a/controllers/testdata/declarative/case_8/config.yaml b/controllers/testdata/declarative/case_8/config.yaml deleted file mode 100644 index c868b402..00000000 --- a/controllers/testdata/declarative/case_8/config.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# When a minimal DSPA is deployed -Images: - ApiServer: api-server:test8 - Artifact: artifact-manager:test8 - PersistentAgent: persistenceagent:test8 - ScheduledWorkflow: scheduledworkflow:test8 - Cache: ubi-minimal:test8 - MoveResultsImage: busybox:test8 - MlPipelineUI: frontend:test8 - MariaDB: mariadb:test8 - Minio: minio:test8 - OAuthProxy: oauth-proxy:test8 -ImagesV2: - Argo: - ApiServer: api-server:test8 - ArgoLauncherImage: argolauncherimage:test8 - ArgoDriverImage: argodriverimage:test8 -DSPO: - ApiServer: - IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_8/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_8/expected/created/apiserver_deployment.yaml deleted file mode 100644 index 66a00206..00000000 --- a/controllers/testdata/declarative/case_8/expected/created/apiserver_deployment.yaml +++ /dev/null @@ -1,219 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-testdsp8 - namespace: default - labels: - app: ds-pipeline-testdsp8 - component: data-science-pipelines - dspa: testdsp8 -spec: - selector: - matchLabels: - app: ds-pipeline-testdsp8 - component: data-science-pipelines - dspa: testdsp8 - template: - metadata: - labels: - app: ds-pipeline-testdsp8 - component: data-science-pipelines - dspa: testdsp8 - spec: - containers: - - env: - - name: POD_NAMESPACE - value: "default" - - name: DBCONFIG_USER - value: "mlpipeline" - - name: DBCONFIG_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp8" - - name: DBCONFIG_DBNAME - value: "mlpipeline" - - name: DBCONFIG_HOST - value: "mariadb-testdsp8.default.svc.cluster.local" - - name: DBCONFIG_PORT - value: "3306" - - name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_NAME - value: dsp-trusted-ca-testdsp8 - - name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_KEY - value: testcabundleconfigmapkey8.crt - - name: ARTIFACT_COPY_STEP_CABUNDLE_MOUNTPATH - value: /dsp-custom-certs - - name: SSL_CERT_DIR - value: "/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" - - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST - value: "ds-pipeline-visualizationserver" - - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT - value: "8888" - - name: OBJECTSTORECONFIG_CREDENTIALSSECRET - value: "ds-pipeline-s3-testdsp8" - - name: OBJECTSTORECONFIG_CREDENTIALSACCESSKEYKEY - value: "accesskey" - - name: OBJECTSTORECONFIG_CREDENTIALSSECRETKEYKEY - value: "secretkey" - - name: DEFAULTPIPELINERUNNERSERVICEACCOUNT - value: "pipeline-runner-testdsp8" - - name: OBJECTSTORECONFIG_BUCKETNAME - value: "mlpipeline" - - name: OBJECTSTORECONFIG_ACCESSKEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp8" - - name: OBJECTSTORECONFIG_SECRETACCESSKEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp8" - - name: OBJECTSTORECONFIG_SECURE - value: "false" - - name: MINIO_SERVICE_SERVICE_HOST - value: "minio-testdsp8.default.svc.cluster.local" - - name: MINIO_SERVICE_SERVICE_PORT - value: "9000" - - name: V2_LAUNCHER_IMAGE - value: "argolauncherimage:test8" - - name: V2_DRIVER_IMAGE - value: "argodriverimage:test8" - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-testdsp8.default.svc.cluster.local" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "8080" - - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp8.default.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - - name: SIGNED_URL_EXPIRY_TIME_SECONDS - value: "60" - - name: ML_PIPELINE_TLS_ENABLED - value: "true" - - name: METADATA_TLS_ENABLED - value: "true" - - name: EXECUTIONTYPE - value: Workflow - - name: DB_DRIVER_NAME - value: mysql - - name: DBCONFIG_MYSQLCONFIG_USER - value: mlpipeline - - name: DBCONFIG_MYSQLCONFIG_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp8" - - name: DBCONFIG_MYSQLCONFIG_DBNAME - value: "mlpipeline" - - name: DBCONFIG_MYSQLCONFIG_HOST - value: "mariadb-testdsp8.default.svc.cluster.local" - - name: DBCONFIG_MYSQLCONFIG_PORT - value: "3306" - image: api-server:test8 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-api-server - command: ['/bin/apiserver'] - args: - - --config=/config - - -logtostderr=true - - --tlsCertPath=/etc/tls/private/tls.crt - - --tlsCertKeyPath=/etc/tls/private/tls.key - ports: - - containerPort: 8888 - name: http - protocol: TCP - - containerPort: 8887 - name: grpc - protocol: TCP - livenessProbe: - httpGet: - path: /apis/v1beta1/healthz - port: http - scheme: HTTPS - readinessProbe: - httpGet: - path: /apis/v1beta1/healthz - port: http - scheme: HTTPS - resources: - requests: - cpu: 250m - memory: 500Mi - limits: - cpu: 500m - memory: 1Gi - volumeMounts: - - name: server-config - mountPath: /config/config.json - subPath: config.json - - mountPath: /etc/tls/private - name: proxy-tls - - name: ca-bundle - mountPath: /dsp-custom-certs - - name: oauth-proxy - args: - - --https-address=:8443 - - --provider=openshift - - --openshift-service-account=ds-pipeline-testdsp8 - - --upstream=https://ds-pipeline-testdsp8.default.svc.cluster.local:8888 - - --upstream-ca=/var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt - - --tls-cert=/etc/tls/private/tls.crt - - --tls-key=/etc/tls/private/tls.key - - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-testdsp8","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-testdsp8","verb":"get","resourceAPIGroup":"route.openshift.io"}' - - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test8 - ports: - - containerPort: 8443 - name: oauth - protocol: TCP - livenessProbe: - httpGet: - path: /oauth/healthz - port: oauth - scheme: HTTPS - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /oauth/healthz - port: oauth - scheme: HTTPS - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/tls/private - name: proxy-tls - volumes: - - name: proxy-tls - secret: - secretName: ds-pipelines-proxy-tls-testdsp8 - defaultMode: 420 - - name: server-config - configMap: - name: ds-pipeline-server-config-testdsp8 - defaultMode: 420 - - name: ca-bundle - configMap: - name: dsp-trusted-ca-testdsp8 - defaultMode: 420 - serviceAccountName: ds-pipeline-testdsp8 diff --git a/controllers/testdata/declarative/case_8/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_8/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index 7bfa6c0d..00000000 --- a/controllers/testdata/declarative/case_8/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp8.default.svc.cluster.local:9000 --ca-bundle /dsp-custom-certs/testcabundleconfigmapkey8.crt cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp8 - namespace: default - labels: - app: ds-pipeline-testdsp5 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_8/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_8/expected/created/mariadb_deployment.yaml deleted file mode 100644 index 9a0b5a11..00000000 --- a/controllers/testdata/declarative/case_8/expected/created/mariadb_deployment.yaml +++ /dev/null @@ -1,97 +0,0 @@ ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: mariadb-testdsp8 - namespace: default - labels: - app: mariadb-testdsp8 - component: data-science-pipelines - dspa: testdsp8 -spec: - strategy: - type: Recreate # Need this since backing PVC is ReadWriteOnce, which creates resource lock condition in default Rolling strategy - selector: - matchLabels: - app: mariadb-testdsp8 - component: data-science-pipelines - dspa: testdsp8 - template: - metadata: - labels: - app: mariadb-testdsp8 - component: data-science-pipelines - dspa: testdsp8 - spec: - containers: - - name: mariadb - image: mariadb:test8 - ports: - - containerPort: 3306 - protocol: TCP - readinessProbe: - exec: - command: - - /bin/sh - - "-i" - - "-c" - - >- - MYSQL_PWD=$MYSQL_PASSWORD mysql -h 127.0.0.1 -u $MYSQL_USER -D - $MYSQL_DATABASE -e 'SELECT 1' - failureThreshold: 3 - initialDelaySeconds: 5 - periodSeconds: 10 - successThreshold: 1 - timeoutSeconds: 1 - livenessProbe: - failureThreshold: 3 - initialDelaySeconds: 30 - periodSeconds: 10 - successThreshold: 1 - tcpSocket: - port: 3306 - timeoutSeconds: 1 - env: - - name: MYSQL_USER - value: "mlpipeline" - - name: MYSQL_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp8" - - name: MYSQL_DATABASE - value: "mlpipeline" - - name: MYSQL_ALLOW_EMPTY_PASSWORD - value: "true" - resources: - requests: - cpu: 300m - memory: 800Mi - limits: - cpu: "1" - memory: 1Gi - volumeMounts: - - name: mariadb-persistent-storage - mountPath: /var/lib/mysql - - name: mariadb-tls - mountPath: /.mariadb/certs - - name: mariadb-tls-config - mountPath: /etc/my.cnf.d/mariadb-tls-config.cnf - subPath: mariadb-tls-config.cnf - volumes: - - name: mariadb-persistent-storage - persistentVolumeClaim: - claimName: mariadb-testdsp8 - - name: mariadb-tls - secret: - secretName: ds-pipelines-mariadb-tls-testdsp8 - items: - - key: tls.crt - path: tls.crt - - key: tls.key - path: tls.key - defaultMode: 420 - - name: mariadb-tls-config - configMap: - name: ds-pipelines-mariadb-tls-config-testdsp8 - defaultMode: 420 diff --git a/controllers/testdata/declarative/case_9/config.yaml b/controllers/testdata/declarative/case_9/config.yaml deleted file mode 100644 index dbcd4d0d..00000000 --- a/controllers/testdata/declarative/case_9/config.yaml +++ /dev/null @@ -1,14 +0,0 @@ -Images: - MlPipelineUI: frontend:test9 - MariaDB: mariadb:test9 - Minio: minio:test9 - OAuthProxy: oauth-proxy:test9 -ImagesV2: - Argo: - ApiServer: api-server:test9 - ArgoLauncherImage: argolauncherimage:test9 - ArgoDriverImage: argodriverimage:test9 - PersistentAgent: persistenceagent:test9 -DSPO: - ApiServer: - IncludeOwnerReference: false diff --git a/controllers/testutil/equalities.go b/controllers/testutil/equalities.go index 336922e3..bc0a9a55 100644 --- a/controllers/testutil/equalities.go +++ b/controllers/testutil/equalities.go @@ -17,6 +17,7 @@ limitations under the License. package testutil import ( + "errors" "fmt" "strings" @@ -168,7 +169,7 @@ func deploymentsAreEqual(expected, actual *unstructured.Unstructured) (bool, err } if len(expectedDep.Spec.Template.Spec.Containers) != len(actualDep.Spec.Template.Spec.Containers) { - return false, notEqualMsg("Containers") + return false, notEqualMsg("Container lengths") } for i := range expectedDep.Spec.Template.Spec.Containers { expectedContainer := expectedDep.Spec.Template.Spec.Containers[i] @@ -224,5 +225,5 @@ func notDeeplyEqualMsg(value string, diff []string) error { for _, d := range diff { errStr += fmt.Sprintln("\t" + d) } - return fmt.Errorf(errStr) + return errors.New(errStr) } diff --git a/controllers/testutil/util.go b/controllers/testutil/util.go index f5d1c962..d55998a3 100644 --- a/controllers/testutil/util.go +++ b/controllers/testutil/util.go @@ -19,7 +19,7 @@ package testutil import ( "context" "fmt" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "os" @@ -207,22 +207,23 @@ func GenerateDeclarativeTestCases(t *testing.T) []Case { return testcases } -func CreateEmptyDSPA() *dspav1alpha1.DataSciencePipelinesApplication { - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - APIServer: &dspav1alpha1.APIServer{Deploy: false}, - MLMD: &dspav1alpha1.MLMD{Deploy: false}, - PersistenceAgent: &dspav1alpha1.PersistenceAgent{Deploy: false}, - ScheduledWorkflow: &dspav1alpha1.ScheduledWorkflow{Deploy: false}, - MlPipelineUI: &dspav1alpha1.MlPipelineUI{ +func CreateEmptyDSPA() *dspav1.DataSciencePipelinesApplication { + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + PodToPodTLS: boolPtr(false), + APIServer: &dspav1.APIServer{Deploy: false}, + MLMD: &dspav1.MLMD{Deploy: true}, // MLMD is required + PersistenceAgent: &dspav1.PersistenceAgent{Deploy: false}, + ScheduledWorkflow: &dspav1.ScheduledWorkflow{Deploy: false}, + MlPipelineUI: &dspav1.MlPipelineUI{ Deploy: false, Image: "testimage-MlPipelineUI:test", }, - WorkflowController: &dspav1alpha1.WorkflowController{Deploy: false}, - Database: &dspav1alpha1.Database{DisableHealthCheck: false, MariaDB: &dspav1alpha1.MariaDB{Deploy: false}}, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + WorkflowController: &dspav1.WorkflowController{Deploy: false}, + Database: &dspav1.Database{DisableHealthCheck: false, MariaDB: &dspav1.MariaDB{Deploy: false}}, + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "testimage-Minio:test", }, @@ -234,11 +235,11 @@ func CreateEmptyDSPA() *dspav1alpha1.DataSciencePipelinesApplication { return dspa } -func CreateDSPAWithAPIServerCABundle(key string, cfgmapName string) *dspav1alpha1.DataSciencePipelinesApplication { +func CreateDSPAWithAPIServerCABundle(key string, cfgmapName string) *dspav1.DataSciencePipelinesApplication { dspa := CreateEmptyDSPA() - dspa.Spec.APIServer = &dspav1alpha1.APIServer{ + dspa.Spec.APIServer = &dspav1.APIServer{ Deploy: true, - CABundle: &dspav1alpha1.CABundle{ + CABundle: &dspav1.CABundle{ ConfigMapKey: key, ConfigMapName: cfgmapName, }, @@ -246,10 +247,10 @@ func CreateDSPAWithAPIServerCABundle(key string, cfgmapName string) *dspav1alpha return dspa } -func CreateDSPAWithAPIServerPodtoPodTlsEnabled() *dspav1alpha1.DataSciencePipelinesApplication { +func CreateDSPAWithAPIServerPodtoPodTlsEnabled() *dspav1.DataSciencePipelinesApplication { dspa := CreateEmptyDSPA() dspa.Spec.DSPVersion = "v2" - dspa.Spec.APIServer = &dspav1alpha1.APIServer{ + dspa.Spec.APIServer = &dspav1.APIServer{ Deploy: true, } dspa.Spec.MLMD.Deploy = true @@ -262,14 +263,14 @@ func boolPtr(b bool) *bool { return &b } -func CreateDSPAWithCustomKfpLauncherConfigMap(configMapName string) *dspav1alpha1.DataSciencePipelinesApplication { +func CreateDSPAWithCustomKfpLauncherConfigMap(configMapName string) *dspav1.DataSciencePipelinesApplication { dspa := CreateEmptyDSPA() dspa.Spec.DSPVersion = "v2" // required, or we get an error because OCP certs aren't found dspa.Spec.PodToPodTLS = boolPtr(false) // required, or we get an error because this is required in v2 dspa.Spec.MLMD.Deploy = true - dspa.Spec.APIServer = &dspav1alpha1.APIServer{ + dspa.Spec.APIServer = &dspav1.APIServer{ Deploy: true, CustomKfpLauncherConfigMap: configMapName, } diff --git a/controllers/util/util.go b/controllers/util/util.go index 08276d52..469cbbc6 100644 --- a/controllers/util/util.go +++ b/controllers/util/util.go @@ -18,6 +18,9 @@ package util import ( "fmt" + mf "github.com/manifestival/manifestival" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "os" "path/filepath" @@ -211,3 +214,69 @@ func GetSecret(ctx context.Context, secretName, ns string, client client.Client) } return secret, nil } + +// DSPAWithSupportedDSPVersion returns True if dspa's dsp version is supported, return False otherwise. +// Note that the procedure verifies the DSPA's .spec.dspVerson field. Not to be confused with the apiversion. +func DSPAWithSupportedDSPVersion(dspa *dspav1.DataSciencePipelinesApplication) bool { + isSupported := false + for _, supportedVersion := range config.GetSupportedDSPAVersions() { + if dspa.Spec.DSPVersion == supportedVersion { + isSupported = true + } + } + return isSupported +} + +// HasSupportedDSPVersionLabel returns true if labels (representing labels for a k8s resource) +// has the DSPVersionk8sLabel label AND the value belongs to a supported DSP Version +func HasSupportedDSPVersionLabel(labels map[string]string) bool { + version, ok := labels[config.DSPVersionk8sLabel] + if !ok { + return false + } + for _, supportedVersion := range config.GetSupportedDSPAVersions() { + if version == supportedVersion { + return true + } + } + return false +} + +func AddLabelTransformer(labelKey, labelValue string) mf.Transformer { + return func(mfObj *unstructured.Unstructured) error { + // Get existing labels + labels := mfObj.GetLabels() + if labels == nil { + labels = make(map[string]string) + } + // Add or override the label + labels[labelKey] = labelValue + // Set the labels back on the object + mfObj.SetLabels(labels) + return nil + } +} + +func AddDeploymentPodLabelTransformer(labelKey, labelValue string) mf.Transformer { + return func(mfObj *unstructured.Unstructured) error { + // Check if the resource is a Deployment + if mfObj.GetKind() == "Deployment" { + // Get the spec.template.metadata.labels (which are the Pod labels) + podLabels, found, err := unstructured.NestedStringMap(mfObj.Object, "spec", "template", "metadata", "labels") + if err != nil { + return err + } + if !found { + podLabels = make(map[string]string) + } + // Add or override the pod label + podLabels[labelKey] = labelValue + // Set the updated labels back to spec.template.metadata.labels + err = unstructured.SetNestedStringMap(mfObj.Object, podLabels, "spec", "template", "metadata", "labels") + if err != nil { + return fmt.Errorf("failed to set pod labels: %w", err) + } + } + return nil + } +} diff --git a/controllers/workflow_controller.go b/controllers/workflow_controller.go index 6ff5227c..2d9a2c4b 100644 --- a/controllers/workflow_controller.go +++ b/controllers/workflow_controller.go @@ -17,12 +17,12 @@ limitations under the License. package controllers import ( - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" ) var workflowControllerTemplatesDir = "workflow-controller" -func (r *DSPAReconciler) ReconcileWorkflowController(dsp *dspav1alpha1.DataSciencePipelinesApplication, +func (r *DSPAReconciler) ReconcileWorkflowController(dsp *dspav1.DataSciencePipelinesApplication, params *DSPAParams) error { log := r.Log.WithValues("namespace", dsp.Namespace).WithValues("dspa_name", dsp.Name) diff --git a/controllers/workflow_controller_test.go b/controllers/workflow_controller_test.go index f8b0abd5..d71f16ea 100644 --- a/controllers/workflow_controller_test.go +++ b/controllers/workflow_controller_test.go @@ -20,7 +20,7 @@ package controllers import ( "testing" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/stretchr/testify/assert" appsv1 "k8s.io/api/apps/v1" ) @@ -31,21 +31,23 @@ func TestDeployWorkflowController(t *testing.T) { expectedWorkflowControllerName := "ds-pipeline-workflow-controller-testdspa" // Construct DSPASpec with deployed WorkflowController - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - APIServer: &dspav1alpha1.APIServer{}, - WorkflowController: &dspav1alpha1.WorkflowController{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + PodToPodTLS: boolPtr(false), + APIServer: &dspav1.APIServer{}, + WorkflowController: &dspav1.WorkflowController{ Deploy: true, }, - Database: &dspav1alpha1.Database{ + Database: &dspav1.Database{ DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ + MariaDB: &dspav1.MariaDB{ Deploy: true, }, }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ + MLMD: &dspav1.MLMD{Deploy: true}, + ObjectStorage: &dspav1.ObjectStorage{ DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ + Minio: &dspav1.Minio{ Deploy: false, Image: "someimage", }, @@ -86,9 +88,9 @@ func TestDontDeployWorkflowController(t *testing.T) { expectedWorkflowControllerName := "ds-pipeline-workflow-controller-testdspa" // Construct DSPASpec with non-deployed WorkflowController - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - WorkflowController: &dspav1alpha1.WorkflowController{ + dspa := &dspav1.DataSciencePipelinesApplication{ + Spec: dspav1.DSPASpec{ + WorkflowController: &dspav1.WorkflowController{ Deploy: false, }, }, diff --git a/main.go b/main.go index 324b1225..d2a1098d 100644 --- a/main.go +++ b/main.go @@ -27,7 +27,7 @@ import ( "time" "github.com/fsnotify/fsnotify" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" "github.com/opendatahub-io/data-science-pipelines-operator/controllers" buildv1 "github.com/openshift/api/build/v1" imagev1 "github.com/openshift/api/image/v1" @@ -56,7 +56,7 @@ func init() { utilruntime.Must(imagev1.AddToScheme(scheme)) utilruntime.Must(routev1.AddToScheme(scheme)) - utilruntime.Must(dspav1alpha1.AddToScheme(scheme)) + utilruntime.Must(dspav1.AddToScheme(scheme)) //+kubebuilder:scaffold:scheme controllers.InitMetrics() @@ -81,7 +81,7 @@ func initConfig(configPath string) error { for _, c := range config.GetConfigRequiredFields() { if !viper.IsSet(c) { - return fmt.Errorf(fmt.Sprintf("Missing required field in config: %s", c)) + return fmt.Errorf("missing required field in config: %s", c) } } diff --git a/scripts/release/params.py b/scripts/release/params.py index 82d83c5a..973a412e 100644 --- a/scripts/release/params.py +++ b/scripts/release/params.py @@ -2,50 +2,37 @@ import requests -V1_TAG = "v1.6.4" ODH_QUAY_ORG = "opendatahub" -QUAY_REPOS_V1 = { +QUAY_REPOS = { + "IMAGES_DSPO": "data-science-pipelines-operator", "IMAGES_APISERVER": "ds-pipelines-api-server", - "IMAGES_ARTIFACT": "ds-pipelines-artifact-manager", - "IMAGES_PERSISTENTAGENT": "ds-pipelines-persistenceagent", + "IMAGES_PERSISTENCEAGENT": "ds-pipelines-persistenceagent", "IMAGES_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow", - "IMAGES_MLMDENVOY": "ds-pipelines-metadata-envoy", - "IMAGES_MLMDGRPC": "ds-pipelines-metadata-grpc", - "IMAGES_MLMDWRITER": "ds-pipelines-metadata-writer", -} - -QUAY_REPOS_V2 = { - "IMAGES_DSPO": "data-science-pipelines-operator", - "V2_LAUNCHER_IMAGE": "ds-pipelines-launcher", - "V2_DRIVER_IMAGE": "ds-pipelines-driver", - "IMAGESV2_ARGO_APISERVER": "ds-pipelines-api-server", - "IMAGESV2_ARGO_PERSISTENCEAGENT": "ds-pipelines-persistenceagent", - "IMAGESV2_ARGO_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow", + "IMAGES_LAUNCHER": "ds-pipelines-launcher", + "IMAGES_DRIVER": "ds-pipelines-driver", } TAGGED_REPOS = { - "IMAGESV2_ARGO_WORKFLOWCONTROLLER" : { + "IMAGES_ARGO_WORKFLOWCONTROLLER" : { "TAG": "3.3.10-upstream", "REPO": "ds-pipelines-argo-workflowcontroller" }, - "IMAGESV2_ARGO_ARGOEXEC" : { + "IMAGES_ARGO_EXEC" : { "TAG": "3.3.10-upstream", "REPO": "ds-pipelines-argo-argoexec" }, - "IMAGESV2_ARGO_MLMDGRPC": { + "IMAGES_MLMDGRPC": { "TAG": "main-94ae1e9", "REPO": "mlmd-grpc-server" }, } STATIC_REPOS = { - "IMAGESV2_ARGO_MLMDENVOY": "registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:a744c1b386fd5e4f94e43543e829df1bfdd1b564137917372a11da06872f4bcb", + "IMAGES_MLMDENVOY": "registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:a744c1b386fd5e4f94e43543e829df1bfdd1b564137917372a11da06872f4bcb", "IMAGES_MARIADB": "registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1", "IMAGES_OAUTHPROXY": "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33", - "IMAGES_CACHE": "registry.redhat.io/ubi8/ubi-minimal@sha256:5d2d4d4dbec470f8ffb679915e2a8ae25ad754cd9193fa966deee1ecb7b3ee00", - "IMAGES_MOVERESULTSIMAGE": "registry.redhat.io/ubi8/ubi-micro@sha256:396baed3d689157d96aa7d8988fdfea7eb36684c8335eb391cf1952573e689c1", } OTHER_OPTIONS = { @@ -116,8 +103,7 @@ def generate_params(args): env_var_lines = [] - fetch_images(QUAY_REPOS_V1, overrides, env_var_lines, quay_org, V1_TAG) - fetch_images(QUAY_REPOS_V2, overrides, env_var_lines, quay_org, tag) + fetch_images(QUAY_REPOS, overrides, env_var_lines, quay_org, tag) for image in TAGGED_REPOS: target_repo = {image: TAGGED_REPOS[image]["REPO"]} target_tag = TAGGED_REPOS[image]["TAG"] diff --git a/tests/resources/dspa-external-lite.yaml b/tests/resources/dspa-external-lite.yaml index 6b064511..3a409e2d 100644 --- a/tests/resources/dspa-external-lite.yaml +++ b/tests/resources/dspa-external-lite.yaml @@ -1,4 +1,4 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: dspa-ext diff --git a/tests/resources/dspa-lite.yaml b/tests/resources/dspa-lite.yaml index f09e480d..ff36007b 100644 --- a/tests/resources/dspa-lite.yaml +++ b/tests/resources/dspa-lite.yaml @@ -1,4 +1,4 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 +apiVersion: datasciencepipelinesapplications.opendatahub.io/v1 kind: DataSciencePipelinesApplication metadata: name: test-dspa diff --git a/tests/suite_test.go b/tests/suite_test.go index a84b1eb6..1ca84b5c 100644 --- a/tests/suite_test.go +++ b/tests/suite_test.go @@ -35,7 +35,7 @@ import ( "github.com/go-logr/logr" mfc "github.com/manifestival/controller-runtime-client" mf "github.com/manifestival/manifestival" - dspav1alpha1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" + dspav1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" testUtil "github.com/opendatahub-io/data-science-pipelines-operator/tests/util" "github.com/stretchr/testify/suite" "go.uber.org/zap/zapcore" @@ -64,7 +64,7 @@ var ( skipDeploy bool skipCleanup bool PortforwardLocalPort int - DSPA *dspav1alpha1.DataSciencePipelinesApplication + DSPA *dspav1.DataSciencePipelinesApplication forwarderResult *forwarder.Result endpointType string ) @@ -102,7 +102,7 @@ type IntegrationTestSuite struct { Clientmgr ClientManager Ctx context.Context DSPANamespace string - DSPA *dspav1alpha1.DataSciencePipelinesApplication + DSPA *dspav1.DataSciencePipelinesApplication } type testLogWriter struct { @@ -161,7 +161,7 @@ func (suite *IntegrationTestSuite) SetupSuite() { var err error - utilruntime.Must(dspav1alpha1.AddToScheme(scheme.Scheme)) + utilruntime.Must(dspav1.AddToScheme(scheme.Scheme)) clientmgr = ClientManager{} cfg, err = clientcmd.BuildConfigFromFlags(k8sApiServerHost, kubeconfig) diff --git a/tests/util/resources.go b/tests/util/resources.go index 2edf46aa..c1899403 100644 --- a/tests/util/resources.go +++ b/tests/util/resources.go @@ -16,12 +16,12 @@ package testUtil import ( "context" "fmt" + v1 "github.com/opendatahub-io/data-science-pipelines-operator/api/v1" routev1 "github.com/openshift/api/route/v1" "testing" "time" mf "github.com/manifestival/manifestival" - "github.com/opendatahub-io/data-science-pipelines-operator/api/v1alpha1" "github.com/stretchr/testify/require" appsv1 "k8s.io/api/apps/v1" apierrs "k8s.io/apimachinery/pkg/api/errors" @@ -32,7 +32,7 @@ import ( ) // DeployDSPA will deploy resource found in path by requesting -func DeployDSPA(t *testing.T, ctx context.Context, client client.Client, deployDSPA *v1alpha1.DataSciencePipelinesApplication, dspaNS string, timeout, interval time.Duration) error { +func DeployDSPA(t *testing.T, ctx context.Context, client client.Client, deployDSPA *v1.DataSciencePipelinesApplication, dspaNS string, timeout, interval time.Duration) error { deployDSPA.ObjectMeta.Namespace = dspaNS err := client.Create(ctx, deployDSPA) require.NoError(t, err) @@ -41,7 +41,7 @@ func DeployDSPA(t *testing.T, ctx context.Context, client client.Client, deployD Name: deployDSPA.ObjectMeta.Name, Namespace: dspaNS, } - fetchedDspa := &v1alpha1.DataSciencePipelinesApplication{} + fetchedDspa := &v1.DataSciencePipelinesApplication{} return WaitFor(ctx, timeout, interval, func() (bool, error) { err := client.Get(ctx, nsn, fetchedDspa) if err != nil { @@ -57,7 +57,7 @@ func WaitForDSPAReady(t *testing.T, ctx context.Context, client client.Client, d Name: dspaName, Namespace: dspaNS, } - dspa := &v1alpha1.DataSciencePipelinesApplication{} + dspa := &v1.DataSciencePipelinesApplication{} err := WaitFor(ctx, timeout, interval, func() (bool, error) { err := client.Get(ctx, nsn, dspa) if err != nil { @@ -106,7 +106,7 @@ func DeleteDSPA(t *testing.T, ctx context.Context, client client.Client, dspaNam Name: dspaName, Namespace: dspaNS, } - dspa := &v1alpha1.DataSciencePipelinesApplication{ + dspa := &v1.DataSciencePipelinesApplication{ ObjectMeta: metav1.ObjectMeta{ Name: dspaName, Namespace: dspaNS, @@ -141,8 +141,8 @@ func TestForSuccessfulDeployment(t *testing.T, ctx context.Context, namespace, d require.True(t, deploymentAvailable) } -func GetDSPAFromPath(t *testing.T, opts mf.Option, path string) *v1alpha1.DataSciencePipelinesApplication { - dspa := &v1alpha1.DataSciencePipelinesApplication{} +func GetDSPAFromPath(t *testing.T, opts mf.Option, path string) *v1.DataSciencePipelinesApplication { + dspa := &v1.DataSciencePipelinesApplication{} manifest, err := mf.NewManifest(path, opts) require.NoError(t, err) expected := &manifest.Resources()[0] @@ -167,7 +167,7 @@ func WaitFor(ctx context.Context, timeout, interval time.Duration, conditionFunc return fmt.Errorf("timed out waiting for condition") } -func PrintConditions(ctx context.Context, dspa *v1alpha1.DataSciencePipelinesApplication, namespace string, client client.Client) string { +func PrintConditions(ctx context.Context, dspa *v1.DataSciencePipelinesApplication, namespace string, client client.Client) string { nsn := types.NamespacedName{ Name: dspa.Name, Namespace: namespace,