From ecf6ba52cf144030ece5b78d44fc11a29c4cf452 Mon Sep 17 00:00:00 2001 From: Humair Khan Date: Mon, 30 Sep 2024 16:00:29 -0400 Subject: [PATCH] remove v1 Signed-off-by: Humair Khan --- api/v1alpha1/dspipeline_types.go | 57 +- api/v1alpha1/zz_generated.deepcopy.go | 10 - config/base/kustomization.yaml | 106 +--- config/base/params.env | 27 +- config/configmaps/files/config.yaml | 25 +- ...b.io_datasciencepipelinesapplications.yaml | 122 +--- .../default/artifact_script.yaml.tmpl | 44 -- .../apiserver/default/deployment.yaml.tmpl | 43 +- .../default/role_ds-pipeline.yaml.tmpl | 28 - .../default/role_pipeline-runner.yaml.tmpl | 16 - .../apiserver/default/server-config.yaml.tmpl | 14 - .../sample-pipeline/sample-pipeline.yaml.tmpl | 557 ------------------ .../common/{argo => default}/policy.yaml.tmpl | 0 .../internal/common/tekton/policy.yaml.tmpl | 84 --- .../v1/metadata-writer.deployment.yaml.tmpl | 75 --- .../v1/metadata-writer.role.yaml.tmpl | 48 -- .../v1/metadata-writer.rolebinding.yaml.tmpl | 15 - .../metadata-writer.serviceaccount.yaml.tmpl | 8 - config/internal/mlpipelines-ui/role.yaml.tmpl | 15 - .../persistence-agent/deployment.yaml.tmpl | 8 - .../internal/persistence-agent/role.yaml.tmpl | 14 - .../scheduled-workflow/deployment.yaml.tmpl | 2 - .../scheduled-workflow/role.yaml.tmpl | 28 - .../workflow-controller/configmap.yaml.tmpl | 2 +- config/manager/manager.yaml | 46 +- config/rbac/role.yaml | 12 - .../{v2 => }/custom-configs/db-creds.yaml | 0 .../samples/{v2 => }/custom-configs/dspa.yaml | 0 .../custom-configs/kustomization.yaml | 0 .../custom-configs/storage-creds.yaml | 0 .../{v1 => }/custom-configs/ui-configmap.yaml | 0 .../custom-workflow-controller-configmap.yaml | 0 .../dspa.yaml | 0 .../kustomization.yaml | 0 .../dspa-all-fields/dspa_all_fields.yaml | 15 - .../{v2 => }/dspa-simple/dspa_simple.yaml | 0 .../{v1 => }/dspa-simple/kustomization.yaml | 0 .../external-object-storage/dspa.yaml | 0 .../kustomization.yaml | 0 config/samples/{v2 => }/local-dev/dspa.yaml | 0 .../{v2 => }/local-dev/kustomization.yaml | 0 .../{v2 => }/local-dev/storage-creds.yaml | 0 .../v1/custom-configs/artifact_script.yaml | 24 - .../samples/v1/custom-configs/db-creds.yaml | 10 - config/samples/v1/custom-configs/dspa.yaml | 96 --- .../v1/custom-configs/kustomization.yaml | 8 - .../v1/custom-configs/storage-creds.yaml | 14 - .../v1/dspa-all-fields/dspa_all_fields.yaml | 214 ------- .../v1/dspa-local-dev/dspa_local_dev.yaml | 21 - .../samples/v1/dspa-simple/dspa_simple.yaml | 19 - .../v1/external-object-storage/dspa.yaml | 21 - .../kustomization.yaml | 5 - .../storage-creds.yaml | 14 - .../v2/custom-configs/ui-configmap.yaml | 11 - .../samples/v2/dspa-simple/kustomization.yaml | 2 - config/v2/cache/clusterrole.yaml | 34 -- .../v2/configmaps/configartifactbucket.yaml | 8 - config/v2/configmaps/configartifactpvc.yaml | 8 - config/v2/configmaps/configdefaults.yaml | 8 - config/v2/configmaps/configobservability.yaml | 9 - config/v2/configmaps/configspire.yaml | 8 - .../v2/configmaps/configtrustedsources.yaml | 8 - config/v2/configmaps/featureflags.yaml | 8 - config/v2/configmaps/kustomization.yaml | 8 - .../clusterrole.leaderelection.yaml | 19 - .../controller/clusterrole.clusteraccess.yaml | 54 -- .../controller/clusterrole.tenantaccess.yaml | 20 - .../v2/exithandler/webhook/kustomization.yaml | 10 - .../kfptask/clusterrole.leaderelection.yaml | 19 - .../webhook/clusterrole.clusteraccess.yaml | 82 --- config/v2/kfptask/webhook/kustomization.yaml | 11 - .../clusterrole.leaderelection.yaml | 19 - .../controller/clusterrole.tenantaccess.yaml | 20 - .../kfpexithandlerwebhookcertssecret.yaml | 9 - .../v2/secrets/kfptaskwebhookcertssecret.yaml | 9 - config/v2/secrets/kustomization.yaml | 4 - .../tektonpipelineloopwebhookcertssecret.yaml | 9 - controllers/apiserver_test.go | 10 +- controllers/common.go | 13 - controllers/config/defaults.go | 68 +-- controllers/dspastatus/dspa_status.go | 58 +- controllers/dspipeline_controller.go | 23 +- controllers/dspipeline_params.go | 138 +---- controllers/mlmd.go | 57 +- controllers/mlmd_test.go | 435 +------------- .../testdata/declarative/case_0/config.yaml | 13 +- .../declarative/case_0/deploy/cr.yaml | 3 + .../created/apiserver_deployment.yaml | 50 +- .../created/configmap_artifact_script.yaml | 42 -- .../created/configmap_server_config.yaml | 29 +- .../created/persistence-agent_deployment.yaml | 15 +- .../scheduled-workflow_deployment.yaml | 2 - .../testdata/declarative/case_1/config.yaml | 14 +- .../declarative/case_1/deploy/cr.yaml | 6 + .../testdata/declarative/case_2/config.yaml | 15 +- .../declarative/case_2/deploy/cr.yaml | 35 +- .../created/apiserver_deployment.yaml | 52 +- .../created/configmap_artifact_script.yaml | 42 -- .../created/mlmd_envoy_deployment.yaml | 77 +++ .../created/mlmd_grpc_deployment.yaml} | 40 +- .../created/persistence-agent_deployment.yaml | 15 +- .../created/sample-pipeline.yaml.tmpl | 554 ----------------- .../scheduled-workflow_deployment.yaml | 2 - .../testdata/declarative/case_3/config.yaml | 14 +- .../declarative/case_3/deploy/02_cr.yaml | 6 +- .../created/apiserver_deployment.yaml | 52 +- .../configmap_artifact_script.yaml | 39 -- .../testdata/declarative/case_4/config.yaml | 20 +- .../declarative/case_4/deploy/00_cr.yaml | 36 +- .../created/apiserver_deployment.yaml | 60 +- .../created/configmap_artifact_script.yaml | 42 -- .../created/mlmd_envoy_deployment.yaml} | 41 +- .../created/mlmd_grpc_deployment.yaml | 74 +++ .../created/persistence-agent_deployment.yaml | 15 +- .../scheduled-workflow_deployment.yaml | 2 - .../testdata/declarative/case_5/config.yaml | 17 +- .../deploy/00_configmap.yaml | 4 +- .../deploy/01_configmap.yaml | 0 .../{case_8 => case_5}/deploy/02_cr.yaml | 12 +- .../declarative/case_5/deploy/cr.yaml | 22 - .../created/apiserver_deployment.yaml | 113 ++-- .../created/configmap_artifact_script.yaml | 42 -- .../created/configmap_dspa_trusted_ca.yaml | 4 +- .../expected/created/mariadb_deployment.yaml | 18 + .../created/metadata-writer_deployment.yaml | 63 -- .../created/mlpipelines-ui_deployment.yaml | 171 ------ .../created/persistence-agent_deployment.yaml | 76 --- .../scheduled-workflow_deployment.yaml | 65 -- .../testdata/declarative/case_6/config.yaml | 17 + .../{case_9 => case_6}/deploy/00_cr.yaml | 9 +- .../created/apiserver_deployment.yaml | 60 +- .../created/mlpipelines-ui_deployment.yaml | 44 +- .../created/persistence-agent_deployment.yaml | 26 +- .../testdata/declarative/case_7/config.yaml | 15 - .../declarative/case_7/deploy/cr.yaml | 93 --- .../created/apiserver_deployment.yaml | 210 ------- .../created/configmap_artifact_script.yaml | 42 -- .../expected/created/mariadb_deployment.yaml | 79 --- .../expected/created/minio_deployment.yaml | 75 --- .../created/mlpipelines-ui_deployment.yaml | 171 ------ .../created/persistence-agent_deployment.yaml | 89 --- .../expected/created/sample-config.yaml.tmpl | 17 - .../created/sample-pipeline.yaml.tmpl | 254 -------- .../scheduled-workflow_deployment.yaml | 65 -- .../testdata/declarative/case_8/config.yaml | 20 - .../created/apiserver_deployment.yaml | 219 ------- .../created/configmap_artifact_script.yaml | 42 -- .../expected/created/mariadb_deployment.yaml | 97 --- .../testdata/declarative/case_9/config.yaml | 14 - controllers/testutil/equalities.go | 5 +- controllers/testutil/util.go | 3 +- controllers/workflow_controller_test.go | 4 +- main.go | 2 +- scripts/release/params.py | 34 +- 154 files changed, 832 insertions(+), 5954 deletions(-) delete mode 100644 config/internal/apiserver/default/artifact_script.yaml.tmpl rename config/internal/common/{argo => default}/policy.yaml.tmpl (100%) delete mode 100644 config/internal/common/tekton/policy.yaml.tmpl delete mode 100644 config/internal/ml-metadata/v1/metadata-writer.deployment.yaml.tmpl delete mode 100644 config/internal/ml-metadata/v1/metadata-writer.role.yaml.tmpl delete mode 100644 config/internal/ml-metadata/v1/metadata-writer.rolebinding.yaml.tmpl delete mode 100644 config/internal/ml-metadata/v1/metadata-writer.serviceaccount.yaml.tmpl rename config/samples/{v2 => }/custom-configs/db-creds.yaml (100%) rename config/samples/{v2 => }/custom-configs/dspa.yaml (100%) rename config/samples/{v2 => }/custom-configs/kustomization.yaml (100%) rename config/samples/{v2 => }/custom-configs/storage-creds.yaml (100%) rename config/samples/{v1 => }/custom-configs/ui-configmap.yaml (100%) rename config/samples/{v2 => }/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml (100%) rename config/samples/{v2 => }/custom-workflow-controller-config/dspa.yaml (100%) rename config/samples/{v2 => }/custom-workflow-controller-config/kustomization.yaml (100%) rename config/samples/{v2 => }/dspa-all-fields/dspa_all_fields.yaml (92%) rename config/samples/{v2 => }/dspa-simple/dspa_simple.yaml (100%) rename config/samples/{v1 => }/dspa-simple/kustomization.yaml (100%) rename config/samples/{v2 => }/external-object-storage/dspa.yaml (100%) rename config/samples/{v2 => }/external-object-storage/kustomization.yaml (100%) rename config/samples/{v2 => }/local-dev/dspa.yaml (100%) rename config/samples/{v2 => }/local-dev/kustomization.yaml (100%) rename config/samples/{v2 => }/local-dev/storage-creds.yaml (100%) delete mode 100644 config/samples/v1/custom-configs/artifact_script.yaml delete mode 100644 config/samples/v1/custom-configs/db-creds.yaml delete mode 100644 config/samples/v1/custom-configs/dspa.yaml delete mode 100644 config/samples/v1/custom-configs/kustomization.yaml delete mode 100644 config/samples/v1/custom-configs/storage-creds.yaml delete mode 100644 config/samples/v1/dspa-all-fields/dspa_all_fields.yaml delete mode 100644 config/samples/v1/dspa-local-dev/dspa_local_dev.yaml delete mode 100644 config/samples/v1/dspa-simple/dspa_simple.yaml delete mode 100644 config/samples/v1/external-object-storage/dspa.yaml delete mode 100644 config/samples/v1/external-object-storage/kustomization.yaml delete mode 100644 config/samples/v1/external-object-storage/storage-creds.yaml delete mode 100644 config/samples/v2/custom-configs/ui-configmap.yaml delete mode 100644 config/samples/v2/dspa-simple/kustomization.yaml delete mode 100644 config/v2/cache/clusterrole.yaml delete mode 100644 config/v2/configmaps/configartifactbucket.yaml delete mode 100644 config/v2/configmaps/configartifactpvc.yaml delete mode 100644 config/v2/configmaps/configdefaults.yaml delete mode 100644 config/v2/configmaps/configobservability.yaml delete mode 100644 config/v2/configmaps/configspire.yaml delete mode 100644 config/v2/configmaps/configtrustedsources.yaml delete mode 100644 config/v2/configmaps/featureflags.yaml delete mode 100644 config/v2/configmaps/kustomization.yaml delete mode 100644 config/v2/exithandler/clusterrole.leaderelection.yaml delete mode 100644 config/v2/exithandler/controller/clusterrole.clusteraccess.yaml delete mode 100644 config/v2/exithandler/controller/clusterrole.tenantaccess.yaml delete mode 100644 config/v2/exithandler/webhook/kustomization.yaml delete mode 100644 config/v2/kfptask/clusterrole.leaderelection.yaml delete mode 100644 config/v2/kfptask/webhook/clusterrole.clusteraccess.yaml delete mode 100644 config/v2/kfptask/webhook/kustomization.yaml delete mode 100644 config/v2/pipelineloop/clusterrole.leaderelection.yaml delete mode 100644 config/v2/pipelineloop/controller/clusterrole.tenantaccess.yaml delete mode 100644 config/v2/secrets/kfpexithandlerwebhookcertssecret.yaml delete mode 100644 config/v2/secrets/kfptaskwebhookcertssecret.yaml delete mode 100644 config/v2/secrets/kustomization.yaml delete mode 100644 config/v2/secrets/tektonpipelineloopwebhookcertssecret.yaml delete mode 100644 controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml delete mode 100644 controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml create mode 100644 controllers/testdata/declarative/case_2/expected/created/mlmd_envoy_deployment.yaml rename controllers/testdata/declarative/{case_5/expected/created/metadata-grpc_deployment.yaml => case_2/expected/created/mlmd_grpc_deployment.yaml} (68%) delete mode 100644 controllers/testdata/declarative/case_2/expected/created/sample-pipeline.yaml.tmpl delete mode 100644 controllers/testdata/declarative/case_3/expected/not_created/configmap_artifact_script.yaml delete mode 100644 controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml rename controllers/testdata/declarative/{case_5/expected/created/metadata-envoy_deployment.yaml => case_4/expected/created/mlmd_envoy_deployment.yaml} (79%) create mode 100644 controllers/testdata/declarative/case_4/expected/created/mlmd_grpc_deployment.yaml rename controllers/testdata/declarative/{case_8 => case_5}/deploy/00_configmap.yaml (96%) rename controllers/testdata/declarative/{case_8 => case_5}/deploy/01_configmap.yaml (100%) rename controllers/testdata/declarative/{case_8 => case_5}/deploy/02_cr.yaml (67%) delete mode 100644 controllers/testdata/declarative/case_5/deploy/cr.yaml delete mode 100644 controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml rename controllers/testdata/declarative/{case_8 => case_5}/expected/created/configmap_dspa_trusted_ca.yaml (98%) delete mode 100644 controllers/testdata/declarative/case_5/expected/created/metadata-writer_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_5/expected/created/mlpipelines-ui_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_5/expected/created/persistence-agent_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_5/expected/created/scheduled-workflow_deployment.yaml create mode 100644 controllers/testdata/declarative/case_6/config.yaml rename controllers/testdata/declarative/{case_9 => case_6}/deploy/00_cr.yaml (82%) rename controllers/testdata/declarative/{case_9 => case_6}/expected/created/apiserver_deployment.yaml (79%) rename controllers/testdata/declarative/{case_9 => case_6}/expected/created/mlpipelines-ui_deployment.yaml (84%) rename controllers/testdata/declarative/{case_9 => case_6}/expected/created/persistence-agent_deployment.yaml (79%) delete mode 100644 controllers/testdata/declarative/case_7/config.yaml delete mode 100644 controllers/testdata/declarative/case_7/deploy/cr.yaml delete mode 100644 controllers/testdata/declarative/case_7/expected/created/apiserver_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_7/expected/created/configmap_artifact_script.yaml delete mode 100644 controllers/testdata/declarative/case_7/expected/created/mariadb_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_7/expected/created/minio_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_7/expected/created/mlpipelines-ui_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_7/expected/created/persistence-agent_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_7/expected/created/sample-config.yaml.tmpl delete mode 100644 controllers/testdata/declarative/case_7/expected/created/sample-pipeline.yaml.tmpl delete mode 100644 controllers/testdata/declarative/case_7/expected/created/scheduled-workflow_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_8/config.yaml delete mode 100644 controllers/testdata/declarative/case_8/expected/created/apiserver_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_8/expected/created/configmap_artifact_script.yaml delete mode 100644 controllers/testdata/declarative/case_8/expected/created/mariadb_deployment.yaml delete mode 100644 controllers/testdata/declarative/case_9/config.yaml diff --git a/api/v1alpha1/dspipeline_types.go b/api/v1alpha1/dspipeline_types.go index 657e0a2ad..e532f9f6e 100644 --- a/api/v1alpha1/dspipeline_types.go +++ b/api/v1alpha1/dspipeline_types.go @@ -42,7 +42,7 @@ type DSPASpec struct { *ObjectStorage `json:"objectStorage"` *MLMD `json:"mlmd,omitempty"` // +kubebuilder:validation:Optional - // +kubebuilder:default:="v1" + // +kubebuilder:default:="v2" DSPVersion string `json:"dspVersion,omitempty"` // PodToPodTLS Set to "true" or "false" to enable or disable TLS communication between DSPA components (pods). Defaults to "true" to enable TLS between all pods. Only supported in DSP V2 on OpenShift. @@ -99,59 +99,6 @@ type APIServer struct { // +kubebuilder:validation:Optional CustomKfpLauncherConfigMap string `json:"customKfpLauncherConfigMap,omitempty"` - // Default: true - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:default:=true - // +kubebuilder:validation:Optional - ApplyTektonCustomResource bool `json:"applyTektonCustomResource"` - // Default: false - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:default:=false - // +kubebuilder:validation:Optional - ArchiveLogs bool `json:"archiveLogs"` - // Deprecated: DSP V1 only, will be removed in the future. - ArtifactImage string `json:"artifactImage,omitempty"` - // Deprecated: DSP V1 only, will be removed in the future. - CacheImage string `json:"cacheImage,omitempty"` - // Image used for internal artifact passing handling within Tekton taskruns. This field specifies the image used in the 'move-all-results-to-tekton-home' step. - // Deprecated: DSP V1 only, will be removed in the future. - MoveResultsImage string `json:"moveResultsImage,omitempty"` - // Deprecated: DSP V1 only, will be removed in the future. - ArtifactScriptConfigMap *ScriptConfigMap `json:"artifactScriptConfigMap,omitempty"` - // Inject the archive step script. Default: true - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:default:=true - // +kubebuilder:validation:Optional - InjectDefaultScript bool `json:"injectDefaultScript"` - // Default: true - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:default:=true - // +kubebuilder:validation:Optional - StripEOF bool `json:"stripEOF"` - // Default: "Cancelled" - Allowed Values: "Cancelled", "StoppedRunFinally", "CancelledRunFinally" - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:validation:Enum=Cancelled;StoppedRunFinally;CancelledRunFinally - // +kubebuilder:default:=Cancelled - TerminateStatus string `json:"terminateStatus,omitempty"` - // Default: true - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:default:=true - // +kubebuilder:validation:Optional - TrackArtifacts bool `json:"trackArtifacts"` - // Default: 120 - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:default:=120 - DBConfigConMaxLifetimeSec int `json:"dbConfigConMaxLifetimeSec,omitempty"` - // Default: true - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:default:=true - // +kubebuilder:validation:Optional - CollectMetrics bool `json:"collectMetrics"` - // Default: true - // Deprecated: DSP V1 only, will be removed in the future. - // +kubebuilder:default:=true - // +kubebuilder:validation:Optional - AutoUpdatePipelineDefaultVersion bool `json:"autoUpdatePipelineDefaultVersion"` // This is the path where the ca bundle will be mounted in the // pipeline server and user executor pods // +kubebuilder:validation:Optional @@ -325,8 +272,6 @@ type MLMD struct { Deploy bool `json:"deploy"` *Envoy `json:"envoy,omitempty"` *GRPC `json:"grpc,omitempty"` - // Deprecated: DSP V1 only, will be removed in the future. - *Writer `json:"writer,omitempty"` } type Envoy struct { diff --git a/api/v1alpha1/zz_generated.deepcopy.go b/api/v1alpha1/zz_generated.deepcopy.go index ac5fa0ceb..2269c9536 100644 --- a/api/v1alpha1/zz_generated.deepcopy.go +++ b/api/v1alpha1/zz_generated.deepcopy.go @@ -44,11 +44,6 @@ func (in *APIServer) DeepCopyInto(out *APIServer) { *out = new(ScriptConfigMap) **out = **in } - if in.ArtifactScriptConfigMap != nil { - in, out := &in.ArtifactScriptConfigMap, &out.ArtifactScriptConfigMap - *out = new(ScriptConfigMap) - **out = **in - } if in.ArtifactSignedURLExpirySeconds != nil { in, out := &in.ArtifactSignedURLExpirySeconds, &out.ArtifactSignedURLExpirySeconds *out = new(int) @@ -383,11 +378,6 @@ func (in *MLMD) DeepCopyInto(out *MLMD) { *out = new(GRPC) (*in).DeepCopyInto(*out) } - if in.Writer != nil { - in, out := &in.Writer, &out.Writer - *out = new(Writer) - (*in).DeepCopyInto(*out) - } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new MLMD. diff --git a/config/base/kustomization.yaml b/config/base/kustomization.yaml index cc3a5ef50..4cfe5112b 100644 --- a/config/base/kustomization.yaml +++ b/config/base/kustomization.yaml @@ -17,20 +17,21 @@ configMapGenerator: envs: - params.env vars: - - name: IMAGES_APISERVER + # Images + - name: IMAGES_DSPO objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_APISERVER - - name: IMAGES_ARTIFACT + fieldpath: data.IMAGES_DSPO + - name: IMAGES_APISERVER objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_ARTIFACT + fieldpath: data.IMAGES_APISERVER - name: IMAGES_OAUTHPROXY objref: kind: ConfigMap @@ -38,13 +39,13 @@ vars: apiVersion: v1 fieldref: fieldpath: data.IMAGES_OAUTHPROXY - - name: IMAGES_PERSISTENTAGENT + - name: IMAGES_PERSISTENCEAGENT objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_PERSISTENTAGENT + fieldpath: data.IMAGES_PERSISTENCEAGENT - name: IMAGES_SCHEDULEDWORKFLOW objref: kind: ConfigMap @@ -52,55 +53,57 @@ vars: apiVersion: v1 fieldref: fieldpath: data.IMAGES_SCHEDULEDWORKFLOW - - name: IMAGES_CACHE + - name: IMAGES_MARIADB objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_CACHE - - name: IMAGES_MOVERESULTSIMAGE + fieldpath: data.IMAGES_MARIADB + - name: IMAGES_MLMDENVOY objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MOVERESULTSIMAGE - - name: IMAGES_MARIADB + fieldpath: data.IMAGES_MLMDENVOY + - name: IMAGES_MLMDGRPC objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MARIADB - - name: IMAGES_MLMDENVOY + fieldpath: data.IMAGES_MLMDGRPC + - name: IMAGES_LAUNCHER objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MLMDENVOY - - name: IMAGES_MLMDGRPC + fieldpath: data.IMAGES_LAUNCHER + - name: IMAGES_DRIVER objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MLMDGRPC - - name: IMAGES_MLMDWRITER + fieldpath: data.IMAGES_DRIVER + - name: IMAGES_ARGO_EXEC objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_MLMDWRITER - - name: IMAGES_DSPO + fieldpath: data.IMAGES_ARGO_EXEC + - name: IMAGES_ARGO_WORKFLOWCONTROLLER objref: kind: ConfigMap name: dspo-parameters apiVersion: v1 fieldref: - fieldpath: data.IMAGES_DSPO + fieldpath: data.IMAGES_ARGO_WORKFLOWCONTROLLER + + # DSPO level configs - name: ZAP_LOG_LEVEL objref: kind: ConfigMap @@ -136,69 +139,6 @@ vars: apiVersion: v1 fieldref: fieldpath: data.DSPO_HEALTHCHECK_OBJECTSTORE_CONNECTIONTIMEOUT - - name: IMAGESV2_ARGO_APISERVER - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_APISERVER - - name: IMAGESV2_ARGO_PERSISTENCEAGENT - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_PERSISTENCEAGENT - - name: IMAGESV2_ARGO_SCHEDULEDWORKFLOW - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_SCHEDULEDWORKFLOW - - name: IMAGESV2_ARGO_MLMDENVOY - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_MLMDENVOY - - name: IMAGESV2_ARGO_MLMDGRPC - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_MLMDGRPC - - name: IMAGESV2_ARGO_ARGOEXEC - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_ARGOEXEC - - name: IMAGESV2_ARGO_WORKFLOWCONTROLLER - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.IMAGESV2_ARGO_WORKFLOWCONTROLLER - - name: V2_LAUNCHER_IMAGE - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.V2_LAUNCHER_IMAGE - - name: V2_DRIVER_IMAGE - objref: - kind: ConfigMap - name: dspo-parameters - apiVersion: v1 - fieldref: - fieldpath: data.V2_DRIVER_IMAGE - name: DSPO_APISERVER_INCLUDE_OWNERREFERENCE objref: kind: ConfigMap diff --git a/config/base/params.env b/config/base/params.env index f861cb30e..6cec827e6 100644 --- a/config/base/params.env +++ b/config/base/params.env @@ -1,24 +1,15 @@ -IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:v1.6.3 -IMAGES_ARTIFACT=quay.io/opendatahub/ds-pipelines-artifact-manager:v1.6.3 -IMAGES_PERSISTENTAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:v1.6.3 -IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:v1.6.3 -IMAGES_MLMDENVOY=quay.io/opendatahub/ds-pipelines-metadata-envoy:v1.6.3 -IMAGES_MLMDGRPC=quay.io/opendatahub/ds-pipelines-metadata-grpc:v1.6.3 -IMAGES_MLMDWRITER=quay.io/opendatahub/ds-pipelines-metadata-writer:v1.6.3 IMAGES_DSPO=quay.io/opendatahub/data-science-pipelines-operator:latest -V2_LAUNCHER_IMAGE=quay.io/opendatahub/ds-pipelines-launcher:latest -V2_DRIVER_IMAGE=quay.io/opendatahub/ds-pipelines-driver:latest -IMAGESV2_ARGO_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:latest -IMAGESV2_ARGO_PERSISTENCEAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:latest -IMAGESV2_ARGO_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:latest -IMAGESV2_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcontroller:3.3.10-upstream -IMAGESV2_ARGO_ARGOEXEC=quay.io/opendatahub/ds-pipelines-argo-argoexec:3.3.10-upstream -IMAGESV2_ARGO_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server:latest -IMAGESV2_ARGO_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:a744c1b386fd5e4f94e43543e829df1bfdd1b564137917372a11da06872f4bcb +IMAGES_APISERVER=quay.io/opendatahub/ds-pipelines-api-server:latest +IMAGES_PERSISTENCEAGENT=quay.io/opendatahub/ds-pipelines-persistenceagent:latest +IMAGES_SCHEDULEDWORKFLOW=quay.io/opendatahub/ds-pipelines-scheduledworkflow:latest +IMAGES_ARGO_EXEC=quay.io/opendatahub/ds-pipelines-argo-argoexec:3.3.10-upstream +IMAGES_ARGO_WORKFLOWCONTROLLER=quay.io/opendatahub/ds-pipelines-argo-workflowcontroller:3.3.10-upstream +IMAGES_LAUNCHER=quay.io/opendatahub/ds-pipelines-launcher:latest +IMAGES_DRIVER=quay.io/opendatahub/ds-pipelines-driver:latest +IMAGES_MLMDGRPC=quay.io/opendatahub/mlmd-grpc-server:latest +IMAGES_MLMDENVOY=registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:a744c1b386fd5e4f94e43543e829df1bfdd1b564137917372a11da06872f4bcb IMAGES_MARIADB=registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1 IMAGES_OAUTHPROXY=registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33 -IMAGES_CACHE=registry.redhat.io/ubi8/ubi-minimal@sha256:5d2d4d4dbec470f8ffb679915e2a8ae25ad754cd9193fa966deee1ecb7b3ee00 -IMAGES_MOVERESULTSIMAGE=registry.redhat.io/ubi8/ubi-micro@sha256:396baed3d689157d96aa7d8988fdfea7eb36684c8335eb391cf1952573e689c1 ZAP_LOG_LEVEL=info MAX_CONCURRENT_RECONCILES=10 DSPO_HEALTHCHECK_DATABASE_CONNECTIONTIMEOUT=15s diff --git a/config/configmaps/files/config.yaml b/config/configmaps/files/config.yaml index b263ef3c6..bf6e01e30 100644 --- a/config/configmaps/files/config.yaml +++ b/config/configmaps/files/config.yaml @@ -1,26 +1,15 @@ Images: ApiServer: $(IMAGES_APISERVER) - Artifact: $(IMAGES_ARTIFACT) - OAuthProxy: $(IMAGES_OAUTHPROXY) - PersistentAgent: $(IMAGES_PERSISTENTAGENT) + PersistentAgent: $(IMAGES_PERSISTENCEAGENT) ScheduledWorkflow: $(IMAGES_SCHEDULEDWORKFLOW) - Cache: $(IMAGES_CACHE) - MoveResultsImage: $(IMAGES_MOVERESULTSIMAGE) - MariaDB: $(IMAGES_MARIADB) MlmdEnvoy: $(IMAGES_MLMDENVOY) MlmdGRPC: $(IMAGES_MLMDGRPC) - MlmdWriter: $(IMAGES_MLMDWRITER) -ImagesV2: - Argo: - ApiServer: $(IMAGESV2_ARGO_APISERVER) - PersistentAgent: $(IMAGESV2_ARGO_PERSISTENCEAGENT) - ScheduledWorkflow: $(IMAGESV2_ARGO_SCHEDULEDWORKFLOW) - MlmdEnvoy: $(IMAGESV2_ARGO_MLMDENVOY) - MlmdGRPC: $(IMAGESV2_ARGO_MLMDGRPC) - WorkflowController: $(IMAGESV2_ARGO_WORKFLOWCONTROLLER) - ArgoExecImage: $(IMAGESV2_ARGO_ARGOEXEC) - ArgoLauncherImage: $(V2_LAUNCHER_IMAGE) - ArgoDriverImage: $(V2_DRIVER_IMAGE) + ArgoExecImage: $(IMAGES_ARGO_EXEC) + ArgoWorkflowController: $(IMAGES_ARGO_WORKFLOWCONTROLLER) + LauncherImage: $(IMAGES_LAUNCHER) + DriverImage: $(IMAGES_DRIVER) + OAuthProxy: $(IMAGES_OAUTHPROXY) + MariaDB: $(IMAGES_MARIADB) DSPO: HealthCheck: Database: diff --git a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml index ac888ab93..da9679123 100644 --- a/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml +++ b/config/crd/bases/datasciencepipelinesapplications.opendatahub.io_datasciencepipelinesapplications.yaml @@ -40,44 +40,16 @@ spec: deploy: true description: DS Pipelines API Server configuration. properties: - applyTektonCustomResource: - default: true - description: 'Default: true Deprecated: DSP V1 only, will be removed - in the future.' - type: boolean - archiveLogs: - default: false - description: 'Default: false Deprecated: DSP V1 only, will be - removed in the future.' - type: boolean argoDriverImage: type: string argoLauncherImage: type: string - artifactImage: - description: 'Deprecated: DSP V1 only, will be removed in the - future.' - type: string - artifactScriptConfigMap: - description: 'Deprecated: DSP V1 only, will be removed in the - future.' - properties: - key: - type: string - name: - type: string - type: object artifactSignedURLExpirySeconds: default: 60 description: 'The expiry time (seconds) for artifact download links when querying the dsp server via /apis/v2beta1/artifacts/{id}?share_url=true Default: 60' type: integer - autoUpdatePipelineDefaultVersion: - default: true - description: 'Default: true Deprecated: DSP V1 only, will be removed - in the future.' - type: boolean cABundle: description: If the Object store/DB is behind a TLS secured connection that is unrecognized by the host OpenShift/K8s cluster, then @@ -103,15 +75,6 @@ spec: description: This is the filename of the ca bundle that will be created in the pipeline server and user executor pods type: string - cacheImage: - description: 'Deprecated: DSP V1 only, will be removed in the - future.' - type: string - collectMetrics: - default: true - description: 'Default: true Deprecated: DSP V1 only, will be removed - in the future.' - type: boolean customKfpLauncherConfigMap: description: When specified, the `data` contents of the `kfp-launcher` ConfigMap that DSPO writes will be fully replaced with the `data` @@ -136,11 +99,6 @@ spec: name: type: string type: object - dbConfigConMaxLifetimeSec: - default: 120 - description: 'Default: 120 Deprecated: DSP V1 only, will be removed - in the future.' - type: integer deploy: default: true description: 'Enable DS Pipelines Operator management of DSP API @@ -160,17 +118,6 @@ spec: image: description: Specify a custom image for DSP API Server. type: string - injectDefaultScript: - default: true - description: 'Inject the archive step script. Default: true Deprecated: - DSP V1 only, will be removed in the future.' - type: boolean - moveResultsImage: - description: 'Image used for internal artifact passing handling - within Tekton taskruns. This field specifies the image used - in the ''move-all-results-to-tekton-home'' step. Deprecated: - DSP V1 only, will be removed in the future.' - type: string resources: description: Specify custom Pod resource requirements for this component. @@ -206,26 +153,6 @@ spec: x-kubernetes-int-or-string: true type: object type: object - stripEOF: - default: true - description: 'Default: true Deprecated: DSP V1 only, will be removed - in the future.' - type: boolean - terminateStatus: - default: Cancelled - description: 'Default: "Cancelled" - Allowed Values: "Cancelled", - "StoppedRunFinally", "CancelledRunFinally" Deprecated: DSP V1 - only, will be removed in the future.' - enum: - - Cancelled - - StoppedRunFinally - - CancelledRunFinally - type: string - trackArtifacts: - default: true - description: 'Default: true Deprecated: DSP V1 only, will be removed - in the future.' - type: boolean type: object database: default: @@ -360,7 +287,7 @@ spec: type: object type: object dspVersion: - default: v1 + default: v2 type: string mlmd: properties: @@ -461,53 +388,6 @@ spec: type: object type: object type: object - writer: - description: 'Deprecated: DSP V1 only, will be removed in the - future.' - properties: - image: - type: string - resources: - description: ResourceRequirements structures compute resource - requirements. Replaces ResourceRequirements from corev1 - which also includes optional storage field. We handle storage - field separately, and should not include it as a subfield - for Resources. - properties: - limits: - properties: - cpu: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - memory: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - requests: - properties: - cpu: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - memory: - anyOf: - - type: integer - - type: string - pattern: ^(\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\+|-)?(([0-9]+(\.[0-9]*)?)|(\.[0-9]+))))?$ - x-kubernetes-int-or-string: true - type: object - type: object - required: - - image - type: object type: object mlpipelineUI: description: Deploy the KFP UI with DS Pipelines UI. This feature diff --git a/config/internal/apiserver/default/artifact_script.yaml.tmpl b/config/internal/apiserver/default/artifact_script.yaml.tmpl deleted file mode 100644 index 15320a760..000000000 --- a/config/internal/apiserver/default/artifact_script.yaml.tmpl +++ /dev/null @@ -1,44 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { -{{ if .CustomCABundle }} - aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} --ca-bundle {{ .PiplinesCABundleMountPath }} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz -{{ else }} - aws s3 --endpoint {{.ObjectStorageConnection.Endpoint}} cp $1.tgz s3://{{.ObjectStorageConnection.Bucket}}/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz -{{ end }} - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-{{ .Name }} - namespace: {{.Namespace}} - labels: - app: ds-pipeline-{{.Name}} - component: data-science-pipelines diff --git a/config/internal/apiserver/default/deployment.yaml.tmpl b/config/internal/apiserver/default/deployment.yaml.tmpl index 8a8cdd76d..ee555d763 100644 --- a/config/internal/apiserver/default/deployment.yaml.tmpl +++ b/config/internal/apiserver/default/deployment.yaml.tmpl @@ -59,10 +59,9 @@ spec: - name: SSL_CERT_DIR value: {{.CustomSSLCertDir}} {{ end }} - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "{{.APIServer.AutoUpdatePipelineDefaultVersion}}" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "{{.APIServer.DBConfigConMaxLifetimeSec}}" + # Visualization server is something we deploy + # But this env is required in KFP, even though + # It is not used. - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -124,8 +123,6 @@ spec: - name: METADATA_TLS_ENABLED value: "true" {{ end }} - {{ if (eq .DSPVersion "v2") }} - ## Argo-Specific Env Vars ## - name: EXECUTIONTYPE value: Workflow - name: DB_DRIVER_NAME @@ -143,40 +140,6 @@ spec: value: "{{.DBConnection.Host}}" - name: DBCONFIG_MYSQLCONFIG_PORT value: "{{.DBConnection.Port}}" - {{ else }} - ## Tekton-Specific Env Vars ## - - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "{{.APIServer.CacheImage}}" - - name: MOVERESULTS_IMAGE - value: "{{.APIServer.MoveResultsImage}}" - - name: ARTIFACT_IMAGE - value: "{{.APIServer.ArtifactImage}}" - - name: ARTIFACT_BUCKET - value: "{{.ObjectStorageConnection.Bucket}}" - - name: ARTIFACT_ENDPOINT - value: "{{.ObjectStorageConnection.Endpoint}}" - - name: ARTIFACT_SCRIPT - valueFrom: - configMapKeyRef: - key: "{{ .APIServer.ArtifactScriptConfigMap.Key }}" - name: "{{ .APIServer.ArtifactScriptConfigMap.Name }}" - - name: ARCHIVE_LOGS - value: "{{.APIServer.ArchiveLogs}}" - - name: TRACK_ARTIFACTS - value: "{{.APIServer.TrackArtifacts}}" - - name: STRIP_EOF - value: "{{.APIServer.StripEOF}}" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "{{.APIServer.InjectDefaultScript}}" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "{{.APIServer.ApplyTektonCustomResource}}" - - name: TERMINATE_STATUS - value: "{{.APIServer.TerminateStatus}}" - {{ end }} image: {{.APIServer.Image}} # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server diff --git a/config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl b/config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl index 0915e31dd..13396692b 100644 --- a/config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl +++ b/config/internal/apiserver/default/role_ds-pipeline.yaml.tmpl @@ -35,22 +35,6 @@ rules: - update - patch - delete - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - - runs - - tasks - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - apiGroups: - kubeflow.org resources: @@ -74,18 +58,6 @@ rules: - tokenreviews verbs: - create - - apiGroups: - - custom.tekton.dev - resources: - - pipelineloops - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - apiGroups: - image.openshift.io resources: diff --git a/config/internal/apiserver/default/role_pipeline-runner.yaml.tmpl b/config/internal/apiserver/default/role_pipeline-runner.yaml.tmpl index 2079a183e..7df3a94f9 100644 --- a/config/internal/apiserver/default/role_pipeline-runner.yaml.tmpl +++ b/config/internal/apiserver/default/role_pipeline-runner.yaml.tmpl @@ -89,22 +89,6 @@ rules: - seldondeployments verbs: - '*' - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - - runs - - tasks - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - apiGroups: - ray.io resources: diff --git a/config/internal/apiserver/default/server-config.yaml.tmpl b/config/internal/apiserver/default/server-config.yaml.tmpl index ce92a506e..d019a5232 100644 --- a/config/internal/apiserver/default/server-config.yaml.tmpl +++ b/config/internal/apiserver/default/server-config.yaml.tmpl @@ -8,7 +8,6 @@ metadata: component: data-science-pipelines data: config.json: | -{{ if eq .DSPVersion "v2" }} { "DBConfig": { "MySQLConfig": { @@ -26,16 +25,3 @@ data: "ARCHIVE_CONFIG_LOG_PATH_PREFIX": "/artifacts", "InitConnectionTimeout": "6m" } -{{ else }} - { - "DBConfig": { - "DriverName": "mysql", - "ConMaxLifeTime": "120s", - "ExtraParams": {{ .DBConnection.ExtraParams }} - }, - "ObjectStoreConfig": { - "PipelinePath": "pipelines" - }, - "InitConnectionTimeout": "6m" - } -{{ end }} diff --git a/config/internal/apiserver/sample-pipeline/sample-pipeline.yaml.tmpl b/config/internal/apiserver/sample-pipeline/sample-pipeline.yaml.tmpl index 86281971f..05d07cdef 100644 --- a/config/internal/apiserver/sample-pipeline/sample-pipeline.yaml.tmpl +++ b/config/internal/apiserver/sample-pipeline/sample-pipeline.yaml.tmpl @@ -1,4 +1,3 @@ -{{ if (eq .DSPVersion "v2") }} apiVersion: v1 kind: ConfigMap metadata: @@ -253,559 +252,3 @@ data: schemaVersion: 0.0.1 schemaVersion: 2.1.0 sdkVersion: kfp-2.7.0 -{{ else }} -apiVersion: v1 -kind: ConfigMap -metadata: - name: sample-pipeline-{{.Name}} - namespace: {{.Namespace}} - labels: - app: ds-pipeline-{{.Name}} - component: data-science-pipelines -data: - iris-pipeline-compiled.yaml: |- - apiVersion: tekton.dev/v1beta1 - kind: PipelineRun - metadata: - name: iris-pipeline - annotations: - tekton.dev/output_artifacts: '{"data-prep": [{"key": "artifacts/$PIPELINERUN/data-prep/X_test.tgz", - "name": "data-prep-X_test", "path": "/tmp/outputs/X_test/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/X_train.tgz", - "name": "data-prep-X_train", "path": "/tmp/outputs/X_train/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/y_test.tgz", - "name": "data-prep-y_test", "path": "/tmp/outputs/y_test/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/y_train.tgz", - "name": "data-prep-y_train", "path": "/tmp/outputs/y_train/data"}], "evaluate-model": - [{"key": "artifacts/$PIPELINERUN/evaluate-model/mlpipeline-metrics.tgz", "name": - "mlpipeline-metrics", "path": "/tmp/outputs/mlpipeline_metrics/data"}], "train-model": - [{"key": "artifacts/$PIPELINERUN/train-model/model.tgz", "name": "train-model-model", - "path": "/tmp/outputs/model/data"}]}' - tekton.dev/input_artifacts: '{"evaluate-model": [{"name": "data-prep-X_test", - "parent_task": "data-prep"}, {"name": "data-prep-y_test", "parent_task": "data-prep"}, - {"name": "train-model-model", "parent_task": "train-model"}], "train-model": - [{"name": "data-prep-X_train", "parent_task": "data-prep"}, {"name": "data-prep-y_train", - "parent_task": "data-prep"}], "validate-model": [{"name": "train-model-model", - "parent_task": "train-model"}]}' - tekton.dev/artifact_bucket: mlpipeline - tekton.dev/artifact_endpoint: ${MINIO_SERVICE_SERVICE_HOST}:${MINIO_SERVICE_SERVICE_PORT} - tekton.dev/artifact_endpoint_scheme: http:// - tekton.dev/artifact_items: '{"data-prep": [["X_test", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test"], - ["X_train", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train"], - ["y_test", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test"], - ["y_train", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train"]], - "evaluate-model": [["mlpipeline-metrics", "/tmp/outputs/mlpipeline_metrics/data"]], - "train-model": [["model", "$(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model"]], - "validate-model": []}' - sidecar.istio.io/inject: "false" - tekton.dev/template: '' - pipelines.kubeflow.org/big_data_passing_format: $(workspaces.$TASK_NAME.path)/artifacts/$ORIG_PR_NAME/$TASKRUN_NAME/$TASK_PARAM_NAME - pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "iris-model", "name": - "model_obc", "optional": true, "type": "String"}], "name": "Iris Pipeline"}' - labels: - pipelines.kubeflow.org/pipelinename: '' - pipelines.kubeflow.org/generation: '' - spec: - params: - - name: model_obc - value: iris-model - pipelineSpec: - params: - - name: model_obc - default: iris-model - tasks: - - name: data-prep - taskSpec: - steps: - - name: main - args: - - --X-train - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train - - --X-test - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test - - --y-train - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train - - --y-test - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def data_prep( - X_train_file, - X_test_file, - y_train_file, - y_test_file, - ): - import pickle - - import pandas as pd - - from sklearn import datasets - from sklearn.model_selection import train_test_split - - def get_iris_data(): - iris = datasets.load_iris() - data = pd.DataFrame( - { - "sepalLength": iris.data[:, 0], - "sepalWidth": iris.data[:, 1], - "petalLength": iris.data[:, 2], - "petalWidth": iris.data[:, 3], - "species": iris.target, - } - ) - - print("Initial Dataset:") - print(data.head()) - - return data - - def create_training_set(dataset, test_size = 0.3): - # Features - X = dataset[["sepalLength", "sepalWidth", "petalLength", "petalWidth"]] - # Labels - y = dataset["species"] - - # Split dataset into training set and test set - X_train, X_test, y_train, y_test = train_test_split( - X, y, test_size=test_size, random_state=11 - ) - - return X_train, X_test, y_train, y_test - - def save_pickle(object_file, target_object): - with open(object_file, "wb") as f: - pickle.dump(target_object, f) - - dataset = get_iris_data() - X_train, X_test, y_train, y_test = create_training_set(dataset) - - save_pickle(X_train_file, X_train) - save_pickle(X_test_file, X_test) - save_pickle(y_train_file, y_train) - save_pickle(y_test_file, y_test) - - import argparse - _parser = argparse.ArgumentParser(prog='Data prep', description='') - _parser.add_argument("--X-train", dest="X_train_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--X-test", dest="X_test_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-train", dest="y_train_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-test", dest="y_test_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = data_prep(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: output-taskrun-name - command: - - sh - - -ec - - echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)" - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: copy-results-artifacts - command: - - sh - - -ec - - | - set -exo pipefail - TOTAL_SIZE=0 - copy_artifact() { - if [ -d "$1" ]; then - tar -czvf "$1".tar.gz "$1" - SUFFIX=".tar.gz" - fi - ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'` - TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE) - touch "$2" - if [[ $TOTAL_SIZE -lt 3072 ]]; then - if [ -d "$1" ]; then - tar -tzf "$1".tar.gz > "$2" - elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then - cp "$1" "$2" - fi - fi - } - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train $(results.X-train.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test $(results.X-test.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train $(results.y-train.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test $(results.y-test.path) - onError: continue - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - results: - - name: X-test - description: /tmp/outputs/X_test/data - - name: X-train - description: /tmp/outputs/X_train/data - - name: taskrun-name - - name: y-test - description: /tmp/outputs/y_test/data - - name: y-train - description: /tmp/outputs/y_train/data - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Data prep", "outputs": - [{"name": "X_train"}, {"name": "X_test"}, {"name": "y_train"}, {"name": - "y_test"}], "version": "Data prep@sha256=5aeb512900f57983c9f643ec30ddb4ccc66490a443269b51ce0a67d57cb373b0"}' - workspaces: - - name: data-prep - workspaces: - - name: data-prep - workspace: iris-pipeline - - name: train-model - params: - - name: data-prep-trname - value: $(tasks.data-prep.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --X-train - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/X_train - - --y-train - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/y_train - - --model - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def train_model( - X_train_file, - y_train_file, - model_file, - ): - import pickle - - from sklearn.ensemble import RandomForestClassifier - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - def save_pickle(object_file, target_object): - with open(object_file, "wb") as f: - pickle.dump(target_object, f) - - def train_iris(X_train, y_train): - model = RandomForestClassifier(n_estimators=100) - model.fit(X_train, y_train) - - return model - - X_train = load_pickle(X_train_file) - y_train = load_pickle(y_train_file) - - model = train_iris(X_train, y_train) - - save_pickle(model_file, model) - - import argparse - _parser = argparse.ArgumentParser(prog='Train model', description='') - _parser.add_argument("--X-train", dest="X_train_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-train", dest="y_train_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = train_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: output-taskrun-name - command: - - sh - - -ec - - echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)" - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: copy-results-artifacts - command: - - sh - - -ec - - | - set -exo pipefail - TOTAL_SIZE=0 - copy_artifact() { - if [ -d "$1" ]; then - tar -czvf "$1".tar.gz "$1" - SUFFIX=".tar.gz" - fi - ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'` - TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE) - touch "$2" - if [[ $TOTAL_SIZE -lt 3072 ]]; then - if [ -d "$1" ]; then - tar -tzf "$1".tar.gz > "$2" - elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then - cp "$1" "$2" - fi - fi - } - copy_artifact $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model $(results.model.path) - onError: continue - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: data-prep-trname - results: - - name: model - description: /tmp/outputs/model/data - - name: taskrun-name - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Train model", - "outputs": [{"name": "model"}], "version": "Train model@sha256=cb1fbd399ee5849dcdfaafced23a0496cae1d5861795062b22512b766ec418ce"}' - workspaces: - - name: train-model - workspaces: - - name: train-model - workspace: iris-pipeline - runAfter: - - data-prep - - data-prep - - name: evaluate-model - params: - - name: data-prep-trname - value: $(tasks.data-prep.results.taskrun-name) - - name: train-model-trname - value: $(tasks.train-model.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --X-test - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/X_test - - --y-test - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/y_test - - --model - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.train-model-trname)/model - - --mlpipeline-metrics - - /tmp/outputs/mlpipeline_metrics/data - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def evaluate_model( - X_test_file, - y_test_file, - model_file, - mlpipeline_metrics_file, - ): - import json - import pickle - - from sklearn.metrics import accuracy_score - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - X_test = load_pickle(X_test_file) - y_test = load_pickle(y_test_file) - model = load_pickle(model_file) - - y_pred = model.predict(X_test) - - accuracy_score_metric = accuracy_score(y_test, y_pred) - print(f"Accuracy: {accuracy_score_metric}") - - metrics = { - "metrics": [ - { - "name": "accuracy-score", - "numberValue": accuracy_score_metric, - "format": "PERCENTAGE", - }, - ] - } - - with open(mlpipeline_metrics_file, "w") as f: - json.dump(metrics, f) - - import argparse - _parser = argparse.ArgumentParser(prog='Evaluate model', description='') - _parser.add_argument("--X-test", dest="X_test_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-test", dest="y_test_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--mlpipeline-metrics", dest="mlpipeline_metrics_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = evaluate_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: data-prep-trname - - name: train-model-trname - stepTemplate: - volumeMounts: - - name: mlpipeline-metrics - mountPath: /tmp/outputs/mlpipeline_metrics - volumes: - - name: mlpipeline-metrics - emptyDir: {} - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Evaluate model", - "outputs": [{"name": "mlpipeline_metrics", "type": "Metrics"}], "version": - "Evaluate model@sha256=f398e65faecc6f5a4ba11a2c78d8a2274e3ede205a0e199c8bb615531a3abd4a"}' - workspaces: - - name: evaluate-model - workspaces: - - name: evaluate-model - workspace: iris-pipeline - runAfter: - - data-prep - - data-prep - - train-model - - name: validate-model - params: - - name: train-model-trname - value: $(tasks.train-model.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --model - - $(workspaces.validate-model.path)/artifacts/$ORIG_PR_NAME/$(params.train-model-trname)/model - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def validate_model(model_file): - import pickle - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - model = load_pickle(model_file) - - input_values = [[5, 3, 1.6, 0.2]] - - print(f"Performing test prediction on {input_values}") - result = model.predict(input_values) - - print(f"Response: {result}") - - import argparse - _parser = argparse.ArgumentParser(prog='Validate model', description='') - _parser.add_argument("--model", dest="model_file", type=str, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = validate_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: train-model-trname - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Validate model", - "outputs": [], "version": "Validate model@sha256=53d18ff94fc8f164e7d8455f2c87fa7fdac17e7502502aaa52012e4247d089ee"}' - workspaces: - - name: validate-model - workspaces: - - name: validate-model - workspace: iris-pipeline - runAfter: - - train-model - workspaces: - - name: iris-pipeline - workspaces: - - name: iris-pipeline - volumeClaimTemplate: - spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 2Gi -{{ end }} diff --git a/config/internal/common/argo/policy.yaml.tmpl b/config/internal/common/default/policy.yaml.tmpl similarity index 100% rename from config/internal/common/argo/policy.yaml.tmpl rename to config/internal/common/default/policy.yaml.tmpl diff --git a/config/internal/common/tekton/policy.yaml.tmpl b/config/internal/common/tekton/policy.yaml.tmpl deleted file mode 100644 index b750639f3..000000000 --- a/config/internal/common/tekton/policy.yaml.tmpl +++ /dev/null @@ -1,84 +0,0 @@ -apiVersion: networking.k8s.io/v1 -kind: NetworkPolicy -metadata: - name: ds-pipelines-{{.Name}} - namespace: {{.Namespace}} -spec: - podSelector: - matchLabels: - app: {{.APIServerDefaultResourceName}} - component: data-science-pipelines - policyTypes: - - Ingress - ingress: - # Match all sources for oauth endpoint - - ports: - - protocol: TCP - port: 8443 - # We only allow DSPA components to communicate - # by bypassing oauth proxy, all external - # traffic should go through oauth proxy - - from: - - namespaceSelector: - matchLabels: - name: openshift-user-workload-monitoring - - namespaceSelector: - matchLabels: - kubernetes.io/metadata.name: redhat-ods-monitoring - - namespaceSelector: - matchLabels: - kubernetes.io/metadata.name: openshift-pipelines - - podSelector: - matchLabels: - app.kubernetes.io/managed-by: tekton-pipelines - pipelines.kubeflow.org/v2_component: 'true' - - podSelector: - matchLabels: - app: mariadb-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: minio-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-ui-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-persistenceagent-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-scheduledworkflow-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-metadata-envoy-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-metadata-grpc-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - - podSelector: - matchLabels: - opendatahub.io/workbenches: 'true' - ports: - - protocol: TCP - port: 8888 - - protocol: TCP - port: 8887 - - ports: - - protocol: TCP - port: 8080 - from: - - podSelector: - matchLabels: - app.kubernetes.io/name: data-science-pipelines-operator-driver - namespaceSelector: - matchLabels: - kubernetes.io/metadata.name: openshift-pipelines diff --git a/config/internal/ml-metadata/v1/metadata-writer.deployment.yaml.tmpl b/config/internal/ml-metadata/v1/metadata-writer.deployment.yaml.tmpl deleted file mode 100644 index 39068eaf2..000000000 --- a/config/internal/ml-metadata/v1/metadata-writer.deployment.yaml.tmpl +++ /dev/null @@ -1,75 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-metadata-writer-{{.Name}} - namespace: {{.Namespace}} - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - dspa: {{.Name}} -spec: - replicas: 1 - selector: - matchLabels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - dspa: {{.Name}} - template: - metadata: - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - dspa: {{.Name}} - spec: - containers: - - env: - - name: NAMESPACE_TO_WATCH - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: PIPELINE_RUNTIME - value: tekton - - name: ARCHIVE_LOGS - value: "{{.APIServer.ArchiveLogs}}" - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-{{.Name}}" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "{{.MLMD.GRPC.Port}}" - image: "{{.MLMD.Writer.Image}}" - name: main - livenessProbe: - exec: - command: - - pidof - - python3 - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - pidof - - python3 - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - {{ if .MLMD.Writer.Resources.Requests }} - requests: - {{ if .MLMD.Writer.Resources.Requests.CPU }} - cpu: {{.MLMD.Writer.Resources.Requests.CPU}} - {{ end }} - {{ if .MLMD.Writer.Resources.Requests.Memory }} - memory: {{.MLMD.Writer.Resources.Requests.Memory}} - {{ end }} - {{ end }} - {{ if .MLMD.Writer.Resources.Limits }} - limits: - {{ if .MLMD.Writer.Resources.Limits.CPU }} - cpu: {{.MLMD.Writer.Resources.Limits.CPU}} - {{ end }} - {{ if .MLMD.Writer.Resources.Limits.Memory }} - memory: {{.MLMD.Writer.Resources.Limits.Memory}} - {{ end }} - {{ end }} - serviceAccountName: ds-pipeline-metadata-writer-{{.Name}} diff --git a/config/internal/ml-metadata/v1/metadata-writer.role.yaml.tmpl b/config/internal/ml-metadata/v1/metadata-writer.role.yaml.tmpl deleted file mode 100644 index 05becbf37..000000000 --- a/config/internal/ml-metadata/v1/metadata-writer.role.yaml.tmpl +++ /dev/null @@ -1,48 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - name: ds-pipeline-metadata-writer-{{.Name}} - namespace: {{.Namespace}} - -rules: - - apiGroups: - - "" - resources: - - pods - verbs: - - get - - list - - watch - - update - - patch - - apiGroups: - - "" - resources: - - configmaps - verbs: - - get - - apiGroups: - - argoproj.io - resources: - - workflows - verbs: - - get - - list - - watch - - update - - patch - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - verbs: - - get - - list - - watch - - update - - patch diff --git a/config/internal/ml-metadata/v1/metadata-writer.rolebinding.yaml.tmpl b/config/internal/ml-metadata/v1/metadata-writer.rolebinding.yaml.tmpl deleted file mode 100644 index 1a96fd356..000000000 --- a/config/internal/ml-metadata/v1/metadata-writer.rolebinding.yaml.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines - name: ds-pipeline-metadata-writer-{{.Name}} - namespace: {{.Namespace}} -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: ds-pipeline-metadata-writer-{{.Name}} -subjects: - - kind: ServiceAccount - name: ds-pipeline-metadata-writer-{{.Name}} diff --git a/config/internal/ml-metadata/v1/metadata-writer.serviceaccount.yaml.tmpl b/config/internal/ml-metadata/v1/metadata-writer.serviceaccount.yaml.tmpl deleted file mode 100644 index f46131828..000000000 --- a/config/internal/ml-metadata/v1/metadata-writer.serviceaccount.yaml.tmpl +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: v1 -kind: ServiceAccount -metadata: - name: ds-pipeline-metadata-writer-{{.Name}} - namespace: {{.Namespace}} - labels: - app: ds-pipeline-metadata-writer-{{.Name}} - component: data-science-pipelines diff --git a/config/internal/mlpipelines-ui/role.yaml.tmpl b/config/internal/mlpipelines-ui/role.yaml.tmpl index f2cfe5911..6838676c5 100644 --- a/config/internal/mlpipelines-ui/role.yaml.tmpl +++ b/config/internal/mlpipelines-ui/role.yaml.tmpl @@ -34,21 +34,6 @@ rules: verbs: - get - list - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - - tasks - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - apiGroups: - route.openshift.io verbs: diff --git a/config/internal/persistence-agent/deployment.yaml.tmpl b/config/internal/persistence-agent/deployment.yaml.tmpl index 9c91bc8d6..ac1711a81 100644 --- a/config/internal/persistence-agent/deployment.yaml.tmpl +++ b/config/internal/persistence-agent/deployment.yaml.tmpl @@ -35,11 +35,7 @@ spec: - name: KUBEFLOW_USERID_PREFIX value: "" - name: EXECUTIONTYPE - {{ if eq .DSPVersion "v2" }} value: Workflow - {{ else }} - value: PipelineRun - {{ end }} {{ if .PodToPodTLS }} - name: SSL_CERT_DIR value: "/etc/pki/tls/certs:/var/run/secrets/kubernetes.io/serviceaccount/" @@ -96,14 +92,11 @@ spec: memory: {{.PersistenceAgent.Resources.Limits.Memory}} {{ end }} {{ end }} - {{ if eq .DSPVersion "v2" }} volumeMounts: - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token name: persistenceagent-sa-token subPath: ds-pipeline-persistenceagent-{{.Name}}-token - {{ end }} serviceAccountName: ds-pipeline-persistenceagent-{{.Name}} - {{ if eq .DSPVersion "v2" }} volumes: - name: persistenceagent-sa-token projected: @@ -112,4 +105,3 @@ spec: audience: pipelines.kubeflow.org expirationSeconds: 3600 path: ds-pipeline-persistenceagent-{{.Name}}-token - {{ end }} diff --git a/config/internal/persistence-agent/role.yaml.tmpl b/config/internal/persistence-agent/role.yaml.tmpl index 454a32bf8..174a2c39a 100644 --- a/config/internal/persistence-agent/role.yaml.tmpl +++ b/config/internal/persistence-agent/role.yaml.tmpl @@ -23,17 +23,3 @@ rules: - get - list - watch - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - verbs: - - create - - get - - list - - watch - - update - - patch - - delete diff --git a/config/internal/scheduled-workflow/deployment.yaml.tmpl b/config/internal/scheduled-workflow/deployment.yaml.tmpl index 241b3e40d..a46e55239 100644 --- a/config/internal/scheduled-workflow/deployment.yaml.tmpl +++ b/config/internal/scheduled-workflow/deployment.yaml.tmpl @@ -28,8 +28,6 @@ spec: value: "{{.Namespace}}" - name: CRON_SCHEDULE_TIMEZONE value: "{{.ScheduledWorkflow.CronScheduleTimezone}}" - - name: EXECUTIONTYPE - value: PipelineRun image: "{{.ScheduledWorkflow.Image}}" # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-scheduledworkflow diff --git a/config/internal/scheduled-workflow/role.yaml.tmpl b/config/internal/scheduled-workflow/role.yaml.tmpl index a4785ae84..748de5025 100644 --- a/config/internal/scheduled-workflow/role.yaml.tmpl +++ b/config/internal/scheduled-workflow/role.yaml.tmpl @@ -39,31 +39,3 @@ rules: verbs: - create - patch - - apiGroups: - - tekton.dev - resources: - - pipelineruns - - taskruns - - conditions - - runs - - tasks - verbs: - - create - - get - - list - - watch - - update - - patch - - delete - - apiGroups: - - custom.tekton.dev - resources: - - pipelineloops - verbs: - - create - - get - - list - - watch - - update - - patch - - delete diff --git a/config/internal/workflow-controller/configmap.yaml.tmpl b/config/internal/workflow-controller/configmap.yaml.tmpl index 49aab8a23..50d25a4d5 100644 --- a/config/internal/workflow-controller/configmap.yaml.tmpl +++ b/config/internal/workflow-controller/configmap.yaml.tmpl @@ -12,7 +12,7 @@ metadata: namespace: {{.Namespace}} data: artifactRepository: | - archiveLogs: {{.APIServer.ArchiveLogs}} + archiveLogs: false s3: endpoint: "{{.ObjectStorageConnection.Endpoint}}" bucket: "{{.ObjectStorageConnection.Bucket}}" diff --git a/config/manager/manager.yaml b/config/manager/manager.yaml index f028ff04a..484d6284d 100644 --- a/config/manager/manager.yaml +++ b/config/manager/manager.yaml @@ -39,44 +39,26 @@ spec: # Env vars are prioritized over --config - name: IMAGES_APISERVER value: $(IMAGES_APISERVER) - - name: IMAGES_ARTIFACT - value: $(IMAGES_ARTIFACT) - - name: IMAGES_OAUTHPROXY - value: $(IMAGES_OAUTHPROXY) - - name: IMAGES_PERSISTENTAGENT - value: $(IMAGES_PERSISTENTAGENT) + - name: IMAGES_PERSISTENCEAGENT + value: $(IMAGES_PERSISTENCEAGENT) - name: IMAGES_SCHEDULEDWORKFLOW value: $(IMAGES_SCHEDULEDWORKFLOW) - - name: IMAGES_CACHE - value: $(IMAGES_CACHE) - - name: IMAGES_MOVERESULTSIMAGE - value: $(IMAGES_MOVERESULTSIMAGE) - - name: IMAGES_MARIADB - value: $(IMAGES_MARIADB) - name: IMAGES_MLMDENVOY value: $(IMAGES_MLMDENVOY) - name: IMAGES_MLMDGRPC value: $(IMAGES_MLMDGRPC) - - name: IMAGES_MLMDWRITER - value: $(IMAGES_MLMDWRITER) - - name: IMAGESV2_ARGO_APISERVER - value: $(IMAGESV2_ARGO_APISERVER) - - name: IMAGESV2_ARGO_PERSISTENCEAGENT - value: $(IMAGESV2_ARGO_PERSISTENCEAGENT) - - name: IMAGESV2_ARGO_SCHEDULEDWORKFLOW - value: $(IMAGESV2_ARGO_SCHEDULEDWORKFLOW) - - name: IMAGESV2_ARGO_MLMDENVOY - value: $(IMAGESV2_ARGO_MLMDENVOY) - - name: IMAGESV2_ARGO_MLMDGRPC - value: $(IMAGESV2_ARGO_MLMDGRPC) - - name: IMAGESV2_ARGO_WORKFLOWCONTROLLER - value: $(IMAGESV2_ARGO_WORKFLOWCONTROLLER) - - name: IMAGESV2_ARGO_ARGOEXEC - value: $(IMAGESV2_ARGO_ARGOEXEC) - - name: V2_LAUNCHER_IMAGE - value: $(V2_LAUNCHER_IMAGE) - - name: V2_DRIVER_IMAGE - value: $(V2_DRIVER_IMAGE) + - name: IMAGES_ARGO_EXEC + value: $(IMAGES_ARGO_EXEC) + - name: IMAGES_ARGO_WORKFLOWCONTROLLER + value: $(IMAGES_ARGO_WORKFLOWCONTROLLER) + - name: IMAGES_LAUNCHER + value: $(IMAGES_LAUNCHER) + - name: IMAGES_DRIVER + value: $(IMAGES_DRIVER) + - name: IMAGES_OAUTHPROXY + value: $(IMAGES_OAUTHPROXY) + - name: IMAGES_MARIADB + value: $(IMAGES_MARIADB) - name: ZAP_LOG_LEVEL value: $(ZAP_LOG_LEVEL) - name: MAX_CONCURRENT_RECONCILES diff --git a/config/rbac/role.yaml b/config/rbac/role.yaml index 90aecbba7..678554188 100644 --- a/config/rbac/role.yaml +++ b/config/rbac/role.yaml @@ -111,12 +111,6 @@ rules: - services verbs: - '*' -- apiGroups: - - custom.tekton.dev - resources: - - pipelineloops - verbs: - - '*' - apiGroups: - datasciencepipelinesapplications.opendatahub.io resources: @@ -249,12 +243,6 @@ rules: - create - delete - get -- apiGroups: - - tekton.dev - resources: - - '*' - verbs: - - '*' - apiGroups: - workload.codeflare.dev resources: diff --git a/config/samples/v2/custom-configs/db-creds.yaml b/config/samples/custom-configs/db-creds.yaml similarity index 100% rename from config/samples/v2/custom-configs/db-creds.yaml rename to config/samples/custom-configs/db-creds.yaml diff --git a/config/samples/v2/custom-configs/dspa.yaml b/config/samples/custom-configs/dspa.yaml similarity index 100% rename from config/samples/v2/custom-configs/dspa.yaml rename to config/samples/custom-configs/dspa.yaml diff --git a/config/samples/v2/custom-configs/kustomization.yaml b/config/samples/custom-configs/kustomization.yaml similarity index 100% rename from config/samples/v2/custom-configs/kustomization.yaml rename to config/samples/custom-configs/kustomization.yaml diff --git a/config/samples/v2/custom-configs/storage-creds.yaml b/config/samples/custom-configs/storage-creds.yaml similarity index 100% rename from config/samples/v2/custom-configs/storage-creds.yaml rename to config/samples/custom-configs/storage-creds.yaml diff --git a/config/samples/v1/custom-configs/ui-configmap.yaml b/config/samples/custom-configs/ui-configmap.yaml similarity index 100% rename from config/samples/v1/custom-configs/ui-configmap.yaml rename to config/samples/custom-configs/ui-configmap.yaml diff --git a/config/samples/v2/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml b/config/samples/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml similarity index 100% rename from config/samples/v2/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml rename to config/samples/custom-workflow-controller-config/custom-workflow-controller-configmap.yaml diff --git a/config/samples/v2/custom-workflow-controller-config/dspa.yaml b/config/samples/custom-workflow-controller-config/dspa.yaml similarity index 100% rename from config/samples/v2/custom-workflow-controller-config/dspa.yaml rename to config/samples/custom-workflow-controller-config/dspa.yaml diff --git a/config/samples/v2/custom-workflow-controller-config/kustomization.yaml b/config/samples/custom-workflow-controller-config/kustomization.yaml similarity index 100% rename from config/samples/v2/custom-workflow-controller-config/kustomization.yaml rename to config/samples/custom-workflow-controller-config/kustomization.yaml diff --git a/config/samples/v2/dspa-all-fields/dspa_all_fields.yaml b/config/samples/dspa-all-fields/dspa_all_fields.yaml similarity index 92% rename from config/samples/v2/dspa-all-fields/dspa_all_fields.yaml rename to config/samples/dspa-all-fields/dspa_all_fields.yaml index 87d538d70..27d2dfaec 100644 --- a/config/samples/v2/dspa-all-fields/dspa_all_fields.yaml +++ b/config/samples/dspa-all-fields/dspa_all_fields.yaml @@ -24,8 +24,6 @@ spec: limits: cpu: 500m memory: 1Gi - CABundleFileMountPath: /your/certbundle/path.crt - CABundleFileName: certbundlefilename.crt # requires this configmap to be created beforehand, cABundle: configMapKey: keyname @@ -34,19 +32,6 @@ spec: customServerConfigMap: name: configmapname key: keyname - # the following are v1 specific options in spec.apiServer.* - applyTektonCustomResource: true - archiveLogs: false - artifactImage: quay.io/opendatahub/ds-pipelines-artifact-manager:latest - cacheImage: registry.access.redhat.com/ubi8/ubi-minimal:8.8 - moveResultsImage: busybox - injectDefaultScript: true - stripEOF: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 120 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true persistenceAgent: deploy: true image: quay.io/modh/odh-ml-pipelines-persistenceagent-container:v1.18.0-8 diff --git a/config/samples/v2/dspa-simple/dspa_simple.yaml b/config/samples/dspa-simple/dspa_simple.yaml similarity index 100% rename from config/samples/v2/dspa-simple/dspa_simple.yaml rename to config/samples/dspa-simple/dspa_simple.yaml diff --git a/config/samples/v1/dspa-simple/kustomization.yaml b/config/samples/dspa-simple/kustomization.yaml similarity index 100% rename from config/samples/v1/dspa-simple/kustomization.yaml rename to config/samples/dspa-simple/kustomization.yaml diff --git a/config/samples/v2/external-object-storage/dspa.yaml b/config/samples/external-object-storage/dspa.yaml similarity index 100% rename from config/samples/v2/external-object-storage/dspa.yaml rename to config/samples/external-object-storage/dspa.yaml diff --git a/config/samples/v2/external-object-storage/kustomization.yaml b/config/samples/external-object-storage/kustomization.yaml similarity index 100% rename from config/samples/v2/external-object-storage/kustomization.yaml rename to config/samples/external-object-storage/kustomization.yaml diff --git a/config/samples/v2/local-dev/dspa.yaml b/config/samples/local-dev/dspa.yaml similarity index 100% rename from config/samples/v2/local-dev/dspa.yaml rename to config/samples/local-dev/dspa.yaml diff --git a/config/samples/v2/local-dev/kustomization.yaml b/config/samples/local-dev/kustomization.yaml similarity index 100% rename from config/samples/v2/local-dev/kustomization.yaml rename to config/samples/local-dev/kustomization.yaml diff --git a/config/samples/v2/local-dev/storage-creds.yaml b/config/samples/local-dev/storage-creds.yaml similarity index 100% rename from config/samples/v2/local-dev/storage-creds.yaml rename to config/samples/local-dev/storage-creds.yaml diff --git a/config/samples/v1/custom-configs/artifact_script.yaml b/config/samples/v1/custom-configs/artifact_script.yaml deleted file mode 100644 index 890f301aa..000000000 --- a/config/samples/v1/custom-configs/artifact_script.yaml +++ /dev/null @@ -1,24 +0,0 @@ -apiVersion: v1 -data: - somekey: |- - #!/usr/bin/env sh - push_artifact() { - if [ -f "$2" ]; then - tar -cvzf $1.tgz $2 - aws s3 --endpoint ${ARTIFACT_ENDPOINT} cp $1.tgz s3://$ARTIFACT_BUCKET/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: custom-artifact-script diff --git a/config/samples/v1/custom-configs/db-creds.yaml b/config/samples/v1/custom-configs/db-creds.yaml deleted file mode 100644 index d84d13c8e..000000000 --- a/config/samples/v1/custom-configs/db-creds.yaml +++ /dev/null @@ -1,10 +0,0 @@ -kind: Secret -apiVersion: v1 -metadata: - name: testdbsecret - labels: - app: mariadb-sample - component: data-science-pipelines -stringData: - password: testingpassword -type: Opaque diff --git a/config/samples/v1/custom-configs/dspa.yaml b/config/samples/v1/custom-configs/dspa.yaml deleted file mode 100644 index 3cb024cdb..000000000 --- a/config/samples/v1/custom-configs/dspa.yaml +++ /dev/null @@ -1,96 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample -spec: - apiServer: - deploy: true - enableSamplePipeline: true - applyTektonCustomResource: true - archiveLogs: false - cacheImage: registry.access.redhat.com/ubi8/ubi-minimal - moveResultsImage: busybox - injectDefaultScript: true - stripEOF: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 120 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true - artifactScriptConfigMap: - name: custom-artifact-script - key: "somekey" - resources: - requests: - cpu: 250m - memory: 500Mi - limits: - cpu: 500m - memory: 1Gi - persistenceAgent: - deploy: true - numWorkers: 2 - resources: - requests: - cpu: 120m - memory: 500Mi - limits: - cpu: 250m - memory: 1Gi - scheduledWorkflow: - deploy: true - cronScheduleTimezone: UTC - resources: - requests: - cpu: 120m - memory: 100Mi - limits: - cpu: 250m - memory: 250Mi - mlpipelineUI: - deploy: true - image: quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - configMap: custom-ui-configmap - database: - mariaDB: - deploy: true - image: registry.redhat.io/rhel8/mariadb-103:1-188 - username: mlpipeline - pipelineDBName: randomDBName - pvcSize: 10Gi - resources: - requests: - cpu: 300m - memory: 800Mi - limits: - cpu: "1" - memory: 1Gi - passwordSecret: - name: testdbsecret - key: password - storageClassName: "" - objectStorage: - minio: - deploy: true - image: quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance - bucket: mlpipeline - pvcSize: 10Gi - resources: - requests: - cpu: 200m - memory: 100Mi - limits: - cpu: 250m - memory: 1Gi - storageClassName: "" - s3CredentialsSecret: - secretName: teststoragesecret - accessKey: AWS_ACCESS_KEY_ID - secretKey: AWS_SECRET_ACCESS_KEY diff --git a/config/samples/v1/custom-configs/kustomization.yaml b/config/samples/v1/custom-configs/kustomization.yaml deleted file mode 100644 index 5b7f5481b..000000000 --- a/config/samples/v1/custom-configs/kustomization.yaml +++ /dev/null @@ -1,8 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: - - dspa.yaml - - db-creds.yaml - - artifact_script.yaml - - storage-creds.yaml - - ui-configmap.yaml diff --git a/config/samples/v1/custom-configs/storage-creds.yaml b/config/samples/v1/custom-configs/storage-creds.yaml deleted file mode 100644 index 40903bf6f..000000000 --- a/config/samples/v1/custom-configs/storage-creds.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: teststoragesecret - labels: - opendatahub.io/dashboard: 'true' - opendatahub.io/managed: 'true' - annotations: - opendatahub.io/connection-type: s3 - openshift.io/display-name: Minio Data Connection -data: - AWS_ACCESS_KEY_ID: QUtJQUlPU0ZPRE5ON0VYQU1QTEU= - AWS_SECRET_ACCESS_KEY: d0phbHJYVXRuRkVNSS9LN01ERU5HL2JQeFJmaUNZRVhBTVBMRUtFWQ== -type: Opaque diff --git a/config/samples/v1/dspa-all-fields/dspa_all_fields.yaml b/config/samples/v1/dspa-all-fields/dspa_all_fields.yaml deleted file mode 100644 index 8bfa58a1a..000000000 --- a/config/samples/v1/dspa-all-fields/dspa_all_fields.yaml +++ /dev/null @@ -1,214 +0,0 @@ -# This file should not be used to deploy a DataSciencePipelinesApplication -# It's main purpose is to show all possible fields that can be configured -# Note that you cannot specify all fields, some are mutually exclusive -# For example, you can only specify either a miniodb deployment or -# externalstorage connection, but not both -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample - namespace: data-science-project -spec: - apiServer: - deploy: true - image: quay.io/modh/odh-ml-pipelines-api-server-container:v1.18.0-8 - enableSamplePipeline: true - applyTektonCustomResource: true - archiveLogs: false - artifactImage: quay.io/modh/odh-ml-pipelines-artifact-manager-container:v1.18.0-8 - cacheImage: registry.access.redhat.com/ubi8/ubi-minimal - moveResultsImage: busybox - injectDefaultScript: true - stripEOF: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 120 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true - resources: - requests: - cpu: 250m - memory: 500Mi - limits: - cpu: 500m - memory: 1Gi - # optional (default is: ds-pipeline-artifact-script-${metadata.name}) - # apiserver deployment will fail if the specified custom config does not exist - # if default name is used, the configmap will be over-written by the operator: - - # artifactScriptConfigMap: - # name: YourConfigMapName - # key: "artifact_script" - persistenceAgent: - deploy: true - image: quay.io/modh/odh-ml-pipelines-persistenceagent-container:v1.18.0-8 - numWorkers: 2 # Number of worker for sync job. - resources: - requests: - cpu: 120m - memory: 500Mi - limits: - cpu: 250m - memory: 1Gi - scheduledWorkflow: - deploy: true - image: quay.io/modh/odh-ml-pipelines-scheduledworkflow-container:v1.18.0-8 - cronScheduleTimezone: UTC - resources: - requests: - cpu: 120m - memory: 100Mi - limits: - cpu: 250m - memory: 250Mi - mlpipelineUI: - deploy: true - image: quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - # requires this configmap to be created before hand, - # otherwise operator will not deploy DSPA - configMap: ds-pipeline-ui-configmap - database: - disableHealthCheck: false - mariaDB: # mutually exclusive with externalDB - deploy: true - image: registry.redhat.io/rhel8/mariadb-103:1-188 - username: mlpipeline - pipelineDBName: randomDBName - pvcSize: 20Gi - resources: - requests: - cpu: 300m - memory: 800Mi - limits: - cpu: "1" - memory: 1Gi - # requires this configmap to be created before hand, - # otherwise operator will not deploy DSPA - passwordSecret: - name: ds-pipelines-db-sample - key: password -# externalDB: -# host: mysql:3306 -# port: "8888" -# username: root -# pipelineDBName: randomDBName -# passwordSecret: -# name: somesecret -# key: somekey - objectStorage: - disableHealthCheck: false - minio: # mutually exclusive with externalStorage - deploy: true - image: quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance - bucket: mlpipeline - pvcSize: 10Gi - resources: - requests: - cpu: 200m - memory: 100Mi - limits: - cpu: 250m - memory: 1Gi - # requires this configmap to be created before hand, - # otherwise operator will not deploy DSPA - s3CredentialsSecret: - secretName: somesecret-sample - accessKey: AWS_ACCESS_KEY_ID - secretKey: AWS_SECRET_ACCESS_KEY -# externalStorage: -# host: minio.com -# port: "9092" -# bucket: mlpipeline -# scheme: https -# s3CredentialsSecret: -# secretName: somesecret-db-sample -# accessKey: somekey -# secretKey: somekey - mlmd: # Deploys an optional ML-Metadata Component - deploy: true - envoy: - image: quay.io/opendatahub/ds-pipelines-metadata-envoy:1.7.0 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - grpc: - image: quay.io/opendatahub/ds-pipelines-metadata-grpc:1.0.0 - port: "8080" - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - writer: - image: quay.io/opendatahub/ds-pipelines-metadata-writer:1.1.0 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi -status: - # Reports True iff: - # * ApiServerReady, PersistenceAgentReady, ScheduledWorkflowReady, DatabaseReady, ObjectStorageReady report True - # AND - # * MLPIpelinesUIReady is (Ready: True) OR is (Ready: False && DeploymentDisabled) - conditions: - - type: Ready - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - - type: ApiServerReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - - type: UserInterfaceReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - - type: PersistenceAgentReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - - type: ScheduledWorkflowReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: MinimumReplicasAvailable - message: 'some message' - # Do we need to do this?? API Server application already - # checks for db/storage connectivity, and pod will fail to come up - # in such a case. - - type: DatabaseReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: DataBaseReady - message: '' - - type: ObjectStorageReady - status: "True" - observedGeneration: 4 - lastTransitionTime: '2023-02-02T21:00:00Z' - reason: ObjectStorageReady - message: '' diff --git a/config/samples/v1/dspa-local-dev/dspa_local_dev.yaml b/config/samples/v1/dspa-local-dev/dspa_local_dev.yaml deleted file mode 100644 index 9f24771ad..000000000 --- a/config/samples/v1/dspa-local-dev/dspa_local_dev.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# A simple DSPA with the Database and ObjectStore Health Checks Disabled -# -# Since the default database and storage options leverage internal Services, -# a locally-run DSPO that manages an external cluster (common development practice) -# would not be able to run the pre-deploy health checks on these prerequisite components -# and therefore the DSPA will never fully deploy without disabling them, as this DSPA sample does -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample -spec: - apiServer: - enableSamplePipeline: true - database: - disableHealthCheck: true - objectStorage: - disableHealthCheck: true - minio: - image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' - mlpipelineUI: - image: 'quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui' diff --git a/config/samples/v1/dspa-simple/dspa_simple.yaml b/config/samples/v1/dspa-simple/dspa_simple.yaml deleted file mode 100644 index e4d7798d8..000000000 --- a/config/samples/v1/dspa-simple/dspa_simple.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample -spec: - apiServer: - enableSamplePipeline: true - # One of minio or externalStorage must be specified for objectStorage - # This example illustrates minimal deployment with minio - # This is NOT supported and should be used for dev testing/experimentation only. - # See external-object-storage/dspa.yaml for an example with external connection. - objectStorage: - minio: - # Image field is required - image: 'quay.io/opendatahub/minio:RELEASE.2019-08-14T20-37-41Z-license-compliance' - # Optional - mlpipelineUI: - # Image field is required - image: 'quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui' diff --git a/config/samples/v1/external-object-storage/dspa.yaml b/config/samples/v1/external-object-storage/dspa.yaml deleted file mode 100644 index b2daa7019..000000000 --- a/config/samples/v1/external-object-storage/dspa.yaml +++ /dev/null @@ -1,21 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: sample -spec: - apiServer: - enableSamplePipeline: true - objectStorage: - externalStorage: - bucket: rhods-dsp-dev - host: s3.us-east-2.amazonaws.com - region: us-east-2 - s3CredentialsSecret: - accessKey: k8saccesskey - secretKey: k8ssecretkey - secretName: aws-bucket-creds - scheme: https - # Optional - mlpipelineUI: - # Image field is required - image: 'quay.io/opendatahub/odh-ml-pipelines-frontend-container:beta-ui' diff --git a/config/samples/v1/external-object-storage/kustomization.yaml b/config/samples/v1/external-object-storage/kustomization.yaml deleted file mode 100644 index 4e4ae0d01..000000000 --- a/config/samples/v1/external-object-storage/kustomization.yaml +++ /dev/null @@ -1,5 +0,0 @@ -apiVersion: kustomize.config.k8s.io/v1beta1 -kind: Kustomization -resources: - - dspa.yaml - - storage-creds.yaml diff --git a/config/samples/v1/external-object-storage/storage-creds.yaml b/config/samples/v1/external-object-storage/storage-creds.yaml deleted file mode 100644 index 6d33d53f8..000000000 --- a/config/samples/v1/external-object-storage/storage-creds.yaml +++ /dev/null @@ -1,14 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - name: aws-bucket-creds - labels: - opendatahub.io/dashboard: 'true' - opendatahub.io/managed: 'true' - annotations: - opendatahub.io/connection-type: s3 - openshift.io/display-name: AWS S3 Connection -stringData: - k8saccesskey: someaccesskey - k8ssecretkey: somesecretkey -type: Opaque diff --git a/config/samples/v2/custom-configs/ui-configmap.yaml b/config/samples/v2/custom-configs/ui-configmap.yaml deleted file mode 100644 index 7e2e7ebaf..000000000 --- a/config/samples/v2/custom-configs/ui-configmap.yaml +++ /dev/null @@ -1,11 +0,0 @@ -apiVersion: v1 -data: - viewer-pod-template.json: |- - { - "spec": { - "serviceAccountName": "ds-pipelines-viewer-sample" - } - } -kind: ConfigMap -metadata: - name: custom-ui-configmap diff --git a/config/samples/v2/dspa-simple/kustomization.yaml b/config/samples/v2/dspa-simple/kustomization.yaml deleted file mode 100644 index d673cd998..000000000 --- a/config/samples/v2/dspa-simple/kustomization.yaml +++ /dev/null @@ -1,2 +0,0 @@ -resources: -- dspa_simple.yaml diff --git a/config/v2/cache/clusterrole.yaml b/config/v2/cache/clusterrole.yaml deleted file mode 100644 index 4ecb868a7..000000000 --- a/config/v2/cache/clusterrole.yaml +++ /dev/null @@ -1,34 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app: cache-deployer - name: cache-deployer -rules: -- apiGroups: - - certificates.k8s.io - resources: - - certificatesigningrequests - verbs: - - create - - delete - - get - - update -- apiGroups: - - admissionregistration.k8s.io - resources: - - mutatingwebhookconfigurations - verbs: - - create - - delete - - get - - list - - patch -- apiGroups: - - certificates.k8s.io - resourceNames: - - kubernetes.io/* - resources: - - signers - verbs: - - approve diff --git a/config/v2/configmaps/configartifactbucket.yaml b/config/v2/configmaps/configartifactbucket.yaml deleted file mode 100644 index 2df1c0bad..000000000 --- a/config/v2/configmaps/configartifactbucket.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-artifact-bucket - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configartifactpvc.yaml b/config/v2/configmaps/configartifactpvc.yaml deleted file mode 100644 index a5d869bbb..000000000 --- a/config/v2/configmaps/configartifactpvc.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-artifact-pvc - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configdefaults.yaml b/config/v2/configmaps/configdefaults.yaml deleted file mode 100644 index dc48532e7..000000000 --- a/config/v2/configmaps/configdefaults.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-defaults - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configobservability.yaml b/config/v2/configmaps/configobservability.yaml deleted file mode 100644 index 6a12cdb76..000000000 --- a/config/v2/configmaps/configobservability.yaml +++ /dev/null @@ -1,9 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-observability - labels: - app.kubernetes.io/component: resolvers - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configspire.yaml b/config/v2/configmaps/configspire.yaml deleted file mode 100644 index c4dc80b44..000000000 --- a/config/v2/configmaps/configspire.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-spire - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/configtrustedsources.yaml b/config/v2/configmaps/configtrustedsources.yaml deleted file mode 100644 index 9c1cd485c..000000000 --- a/config/v2/configmaps/configtrustedsources.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: config-trusted-resources - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/featureflags.yaml b/config/v2/configmaps/featureflags.yaml deleted file mode 100644 index 9218692c7..000000000 --- a/config/v2/configmaps/featureflags.yaml +++ /dev/null @@ -1,8 +0,0 @@ -kind: ConfigMap -apiVersion: v1 -metadata: - name: feature-flags - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipelines - operator.tekton.dev/operand-name: tektoncd-pipelines diff --git a/config/v2/configmaps/kustomization.yaml b/config/v2/configmaps/kustomization.yaml deleted file mode 100644 index df5f2f957..000000000 --- a/config/v2/configmaps/kustomization.yaml +++ /dev/null @@ -1,8 +0,0 @@ -resources: -- configdefaults.yaml -- configobservability.yaml -- configspire.yaml -- featureflags.yaml -- configartifactbucket.yaml -- configartifactpvc.yaml -- configtrustedsources.yaml diff --git a/config/v2/exithandler/clusterrole.leaderelection.yaml b/config/v2/exithandler/clusterrole.leaderelection.yaml deleted file mode 100644 index 6a6209952..000000000 --- a/config/v2/exithandler/clusterrole.leaderelection.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: exithandler-leader-election -rules: -- apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/exithandler/controller/clusterrole.clusteraccess.yaml b/config/v2/exithandler/controller/clusterrole.clusteraccess.yaml deleted file mode 100644 index efcb8bdfa..000000000 --- a/config/v2/exithandler/controller/clusterrole.clusteraccess.yaml +++ /dev/null @@ -1,54 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: exithandler-controller-cluster-access -rules: -- apiGroups: - - tekton.dev - resources: - - runs - - customruns - - taskruns - - pipelineruns - - runs/status - - customruns/status - - taskruns/status - - pipelineruns/status - - runs/finalizers - - customruns/finalizers - verbs: - - get - - list - - create - - update - - delete - - patch - - watch -- apiGroups: - - custom.tekton.dev - resources: - - exithandlers - verbs: - - get - - list - - create - - update - - delete - - patch - - watch -- apiGroups: - - apps - resources: - - deployments - verbs: - - get - - list - - create - - update - - delete - - patch - - watch diff --git a/config/v2/exithandler/controller/clusterrole.tenantaccess.yaml b/config/v2/exithandler/controller/clusterrole.tenantaccess.yaml deleted file mode 100644 index f0090f30e..000000000 --- a/config/v2/exithandler/controller/clusterrole.tenantaccess.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: exithandler-controller-tenant-access -rules: -- apiGroups: - - "" - resources: - - events - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/exithandler/webhook/kustomization.yaml b/config/v2/exithandler/webhook/kustomization.yaml deleted file mode 100644 index 8202e77ff..000000000 --- a/config/v2/exithandler/webhook/kustomization.yaml +++ /dev/null @@ -1,10 +0,0 @@ -resources: -- clusterrole.clusteraccess.yaml -- clusterrolebinding.clusteraccess.yaml -- deployment.yaml -- mutatingwebhookconfig.yaml -- role.yaml -- rolebinding.yaml -- service.yaml -- serviceaccount.yaml -- validatingwebhookconfig.yaml diff --git a/config/v2/kfptask/clusterrole.leaderelection.yaml b/config/v2/kfptask/clusterrole.leaderelection.yaml deleted file mode 100644 index ef9689d61..000000000 --- a/config/v2/kfptask/clusterrole.leaderelection.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: leader-election -rules: -- apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/kfptask/webhook/clusterrole.clusteraccess.yaml b/config/v2/kfptask/webhook/clusterrole.clusteraccess.yaml deleted file mode 100644 index 9d32b310b..000000000 --- a/config/v2/kfptask/webhook/clusterrole.clusteraccess.yaml +++ /dev/null @@ -1,82 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - name: webhook-cluster-access -rules: -- apiGroups: - - apiextensions.k8s.io - resources: - - customresourcedefinitions - - customresourcedefinitions/status - verbs: - - get - - list - - update -- apiGroups: - - "" - resources: - - namespaces - verbs: - - get - - list - - update -- apiGroups: - - admissionregistration.k8s.io - resources: - - mutatingwebhookconfigurations - - validatingwebhookconfigurations - verbs: - - list - - watch -- apiGroups: - - admissionregistration.k8s.io - resourceNames: - - webhook.kfptask.custom.tekton.dev - resources: - - mutatingwebhookconfigurations - verbs: - - get - - update - - delete -- apiGroups: - - apps - resources: - - deployments - - deployments/finalizers - verbs: - - get - - list - - create - - update - - delete -- apiGroups: - - "" - resources: - - namespaces/finalizers - resourceNames: - - openshift-pipelines - verbs: - - update -- apiGroups: - - admissionregistration.k8s.io - resourceNames: - - validation.webhook.kfptask.custom.tekton.dev - resources: - - validatingwebhookconfigurations - verbs: - - get - - update - - delete -- apiGroups: - - policy - resourceNames: - - tekton-pipelines - - openshift-pipelines - resources: - - podsecuritypolicies - verbs: - - use diff --git a/config/v2/kfptask/webhook/kustomization.yaml b/config/v2/kfptask/webhook/kustomization.yaml deleted file mode 100644 index df691ded5..000000000 --- a/config/v2/kfptask/webhook/kustomization.yaml +++ /dev/null @@ -1,11 +0,0 @@ -resources: -- clusterrole.clusteraccess.yaml -- clusterrolebinding.clusteraccess.yaml -- clusterrolebinding.leaderelection.yaml -- deployment.yaml -- mutatingwebhookconfig.yaml -- role.yaml -- rolebinding.yaml -- service.yaml -- serviceaccount.yaml -- validatingwebhookconfig.yaml diff --git a/config/v2/pipelineloop/clusterrole.leaderelection.yaml b/config/v2/pipelineloop/clusterrole.leaderelection.yaml deleted file mode 100644 index 341c80e11..000000000 --- a/config/v2/pipelineloop/clusterrole.leaderelection.yaml +++ /dev/null @@ -1,19 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - name: pipelineloop-leader-election -rules: -- apiGroups: - - coordination.k8s.io - resources: - - leases - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/pipelineloop/controller/clusterrole.tenantaccess.yaml b/config/v2/pipelineloop/controller/clusterrole.tenantaccess.yaml deleted file mode 100644 index e16c0e296..000000000 --- a/config/v2/pipelineloop/controller/clusterrole.tenantaccess.yaml +++ /dev/null @@ -1,20 +0,0 @@ -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - app.kubernetes.io/component: controller - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: tekton-pipeline-loops - name: pipelineloop-controller-tenant-access -rules: -- apiGroups: - - "" - resources: - - events - verbs: - - get - - list - - create - - update - - delete - - watch diff --git a/config/v2/secrets/kfpexithandlerwebhookcertssecret.yaml b/config/v2/secrets/kfpexithandlerwebhookcertssecret.yaml deleted file mode 100644 index ae60d20fa..000000000 --- a/config/v2/secrets/kfpexithandlerwebhookcertssecret.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - pipeline.tekton.dev/release: devel - name: kfp-exithandler-webhook-certs diff --git a/config/v2/secrets/kfptaskwebhookcertssecret.yaml b/config/v2/secrets/kfptaskwebhookcertssecret.yaml deleted file mode 100644 index 6387033ce..000000000 --- a/config/v2/secrets/kfptaskwebhookcertssecret.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - pipeline.tekton.dev/release: devel - name: kfptask-webhook-certs diff --git a/config/v2/secrets/kustomization.yaml b/config/v2/secrets/kustomization.yaml deleted file mode 100644 index 2907d843f..000000000 --- a/config/v2/secrets/kustomization.yaml +++ /dev/null @@ -1,4 +0,0 @@ -resources: -- kfpexithandlerwebhookcertssecret.yaml -- kfptaskwebhookcertssecret.yaml -- tektonpipelineloopwebhookcertssecret.yaml diff --git a/config/v2/secrets/tektonpipelineloopwebhookcertssecret.yaml b/config/v2/secrets/tektonpipelineloopwebhookcertssecret.yaml deleted file mode 100644 index 262a53f52..000000000 --- a/config/v2/secrets/tektonpipelineloopwebhookcertssecret.yaml +++ /dev/null @@ -1,9 +0,0 @@ -apiVersion: v1 -kind: Secret -metadata: - labels: - app.kubernetes.io/component: webhook - app.kubernetes.io/instance: default - app.kubernetes.io/part-of: kfp-tekton - pipeline.tekton.dev/release: devel - name: tektonpipelineloop-webhook-certs diff --git a/controllers/apiserver_test.go b/controllers/apiserver_test.go index 8d11138b3..df00779ac 100644 --- a/controllers/apiserver_test.go +++ b/controllers/apiserver_test.go @@ -35,10 +35,13 @@ func TestDeployAPIServer(t *testing.T) { // Construct DSPASpec with deployed APIServer dspa := &dspav1alpha1.DataSciencePipelinesApplication{ Spec: dspav1alpha1.DSPASpec{ + PodToPodTLS: boolPtr(false), APIServer: &dspav1alpha1.APIServer{ Deploy: true, }, - MLMD: &dspav1alpha1.MLMD{}, + MLMD: &dspav1alpha1.MLMD{ + Deploy: true, + }, Database: &dspav1alpha1.Database{ DisableHealthCheck: false, MariaDB: &dspav1alpha1.MariaDB{ @@ -130,10 +133,13 @@ func TestApiServerEndpoints(t *testing.T) { // Construct DSPASpec with deployed APIServer dspa := &dspav1alpha1.DataSciencePipelinesApplication{ Spec: dspav1alpha1.DSPASpec{ + PodToPodTLS: boolPtr(false), APIServer: &dspav1alpha1.APIServer{ Deploy: true, }, - MLMD: &dspav1alpha1.MLMD{}, + MLMD: &dspav1alpha1.MLMD{ + Deploy: true, + }, Database: &dspav1alpha1.Database{ DisableHealthCheck: false, MariaDB: &dspav1alpha1.MariaDB{ diff --git a/controllers/common.go b/controllers/common.go index 1e9ea6753..9e247107d 100644 --- a/controllers/common.go +++ b/controllers/common.go @@ -20,8 +20,6 @@ import ( ) var commonTemplatesDir = "common/default" -var argoOnlyCommonTemplatesDir = "common/argo" -var tektonOnlyCommonTemplatesDir = "common/tekton" const commonCusterRolebindingTemplate = "common/no-owner/clusterrolebinding.yaml.tmpl" @@ -33,17 +31,6 @@ func (r *DSPAReconciler) ReconcileCommon(dsp *dspav1alpha1.DataSciencePipelinesA if err != nil { return err } - - log.Info("Applying Engine-Specific Common Resources") - if params.UsingArgoEngineDriver(dsp) { - err = r.ApplyDir(dsp, params, argoOnlyCommonTemplatesDir) - } else if params.UsingTektonEngineDriver(dsp) { - err = r.ApplyDir(dsp, params, tektonOnlyCommonTemplatesDir) - } - if err != nil { - return err - } - err = r.ApplyWithoutOwner(params, commonCusterRolebindingTemplate) if err != nil { return err diff --git a/controllers/config/defaults.go b/controllers/config/defaults.go index b3b0c2b3d..a1d306a72 100644 --- a/controllers/config/defaults.go +++ b/controllers/config/defaults.go @@ -28,7 +28,9 @@ import ( ) const ( - DefaultImageValue = "MustSetInConfig" + DSPDefaultVersion = "v2" + DSPV2VersionString = DSPDefaultVersion + DefaultImageValue = "MustSetInConfig" CustomCABundleRootMountPath = "/dsp-custom-certs" @@ -48,9 +50,7 @@ const ( DefaultSystemSSLCertFile = "SSL_CERT_FILE" DefaultSystemSSLCertFilePath = "/etc/pki/tls/certs/ca-bundle.crt" // Fedora/RHEL 6 - MLPipelineUIConfigMapPrefix = "ds-pipeline-ui-configmap-" - ArtifactScriptConfigMapNamePrefix = "ds-pipeline-artifact-script-" - ArtifactScriptConfigMapKey = "artifact_script" + MLPipelineUIConfigMapPrefix = "ds-pipeline-ui-configmap-" CustomServerConfigMapNamePrefix = "ds-pipeline-server-config-" CustomServerConfigMapNameKey = "config.json" @@ -85,53 +85,26 @@ const ( // DSPO Config File Paths const ( - APIServerImagePath = "Images.ApiServer" - APIServerArtifactImagePath = "Images.Artifact" - PersistenceAgentImagePath = "Images.PersistentAgent" - ScheduledWorkflowImagePath = "Images.ScheduledWorkflow" - APIServerCacheImagePath = "Images.Cache" - APIServerMoveResultsImagePath = "Images.MoveResultsImage" - MariaDBImagePath = "Images.MariaDB" - OAuthProxyImagePath = "Images.OAuthProxy" - MlmdEnvoyImagePath = "Images.MlmdEnvoy" - MlmdGRPCImagePath = "Images.MlmdGRPC" - MlmdWriterImagePath = "Images.MlmdWriter" + // Images + APIServerImagePath = "Images.ApiServer" + PersistenceAgentImagePath = "Images.PersistentAgent" + ScheduledWorkflowImagePath = "Images.ScheduledWorkflow" + MlmdEnvoyImagePath = "Images.MlmdEnvoy" + MlmdGRPCImagePath = "Images.MlmdGRPC" + LauncherImagePath = "Images.LauncherImage" + DriverImagePath = "Images.DriverImage" + ArgoExecImagePath = "Images.ArgoExecImage" + ArgoWorkflowControllerImagePath = "Images.ArgoWorkflowController" + MariaDBImagePath = "Images.MariaDB" + OAuthProxyImagePath = "Images.OAuthProxy" + + // Other configs ObjStoreConnectionTimeoutConfigName = "DSPO.HealthCheck.ObjectStore.ConnectionTimeout" DBConnectionTimeoutConfigName = "DSPO.HealthCheck.Database.ConnectionTimeout" RequeueTimeConfigName = "DSPO.RequeueTime" ApiServerIncludeOwnerReferenceConfigName = "DSPO.ApiServer.IncludeOwnerReference" ) -// DSPV2-Argo Image Paths -const ( - APIServerImagePathV2Argo = "ImagesV2.Argo.ApiServer" - APIServerArtifactImagePathV2Argo = "ImagesV2.Argo.Artifact" - APIServerCacheImagePathV2Argo = "ImagesV2.Argo.Cache" - APIServerMoveResultsImagePathV2Argo = "ImagesV2.Argo.MoveResultsImage" - APIServerArgoLauncherImagePathV2Argo = "ImagesV2.Argo.ArgoLauncherImage" - APIServerArgoDriverImagePathV2Argo = "ImagesV2.Argo.ArgoDriverImage" - PersistenceAgentImagePathV2Argo = "ImagesV2.Argo.PersistentAgent" - ScheduledWorkflowImagePathV2Argo = "ImagesV2.Argo.ScheduledWorkflow" - MlmdEnvoyImagePathV2Argo = "ImagesV2.Argo.MlmdEnvoy" - MlmdGRPCImagePathV2Argo = "ImagesV2.Argo.MlmdGRPC" - ArgoWorkflowControllerImagePath = "ImagesV2.Argo.WorkflowController" - ArgoExecImagePath = "ImagesV2.Argo.ArgoExecImage" -) - -// DSPV2-Tekton Image Paths -// Note: These won't exist in config but aren't used, adding in case of future support -// TODO: remove -const ( - APIServerImagePathV2Tekton = "ImagesV2.Tekton.ApiServer" - APIServerArtifactImagePathV2Tekton = "ImagesV2.Tekton.Artifact" - APIServerCacheImagePathV2Tekton = "ImagesV2.Tekton.Cache" - APIServerMoveResultsImagePathV2Tekton = "ImagesV2.Tekton.MoveResultsImage" - PersistenceAgentImagePathV2Tekton = "ImagesV2.Tekton.PersistentAgent" - ScheduledWorkflowImagePathV2Tekton = "ImagesV2.Tekton.ScheduledWorkflow" - MlmdEnvoyImagePathV2Tekton = "ImagesV2.Tekton.MlmdEnvoy" - MlmdGRPCImagePathV2Tekton = "ImagesV2.Tekton.MlmdGRPC" -) - // DSPA Status Condition Types const ( DatabaseAvailable = "DatabaseAvailable" @@ -153,6 +126,7 @@ const ( FailingToDeploy = "FailingToDeploy" Deploying = "Deploying" ComponentDeploymentNotFound = "ComponentDeploymentNotFound" + UnsupportedVersion = "UnsupportedVersion" ) // Any required Configmap paths can be added here, @@ -160,11 +134,8 @@ const ( // validation check var requiredFields = []string{ APIServerImagePath, - APIServerArtifactImagePath, PersistenceAgentImagePath, ScheduledWorkflowImagePath, - APIServerCacheImagePath, - APIServerMoveResultsImagePath, MariaDBImagePath, OAuthProxyImagePath, } @@ -196,7 +167,6 @@ var ( MlPipelineUIResourceRequirements = createResourceRequirement(resource.MustParse("100m"), resource.MustParse("256Mi"), resource.MustParse("100m"), resource.MustParse("256Mi")) MlmdEnvoyResourceRequirements = createResourceRequirement(resource.MustParse("100m"), resource.MustParse("256Mi"), resource.MustParse("100m"), resource.MustParse("256Mi")) MlmdGRPCResourceRequirements = createResourceRequirement(resource.MustParse("100m"), resource.MustParse("256Mi"), resource.MustParse("100m"), resource.MustParse("256Mi")) - MlmdWriterResourceRequirements = createResourceRequirement(resource.MustParse("100m"), resource.MustParse("256Mi"), resource.MustParse("100m"), resource.MustParse("256Mi")) ) type DBExtraParams map[string]string diff --git a/controllers/dspastatus/dspa_status.go b/controllers/dspastatus/dspa_status.go index f7da09781..c638d45e3 100644 --- a/controllers/dspastatus/dspa_status.go +++ b/controllers/dspastatus/dspa_status.go @@ -23,6 +23,8 @@ type DSPAStatus interface { SetMLMDProxyStatus(mlmdProxyReady metav1.Condition) + SetDSPANotReady(err error, reason string) + GetConditions() []metav1.Condition } @@ -53,6 +55,7 @@ type dspaStatus struct { persistenceAgentReady *metav1.Condition scheduledWorkflowReady *metav1.Condition mlmdProxyReady *metav1.Condition + dspaReady *metav1.Condition } func (s *dspaStatus) SetDatabaseNotReady(err error, reason string) { @@ -100,6 +103,20 @@ func (s *dspaStatus) SetMLMDProxyStatus(mlmdProxyReady metav1.Condition) { s.mlmdProxyReady = &mlmdProxyReady } +// SetDSPANotReady is an override option for reporting a custom +// overall DSP Ready state. This is the condition type that +// reports on the overall state of the DSPA. If this is never +// called, then the overall ready state is auto generated based +// on the conditions of the other components. +func (s *dspaStatus) SetDSPANotReady(err error, reason string) { + message := "" + if err != nil { + message = err.Error() + } + condition := BuildFalseCondition(config.CrReady, reason, message) + s.dspaReady = &condition +} + func (s *dspaStatus) GetConditions() []metav1.Condition { componentConditions := []metav1.Condition{ *s.getDatabaseAvailableCondition(), @@ -119,23 +136,28 @@ func (s *dspaStatus) GetConditions() []metav1.Condition { } } - var crReady metav1.Condition - - if allReady { - crReady = metav1.Condition{ - Type: config.CrReady, - Status: metav1.ConditionTrue, - Reason: config.MinimumReplicasAvailable, - Message: "All components are ready.", - LastTransitionTime: metav1.Now(), - } - } else { - crReady = metav1.Condition{ - Type: config.CrReady, - Status: metav1.ConditionFalse, - Reason: config.MinimumReplicasAvailable, - Message: failureMessages, - LastTransitionTime: metav1.Now(), + // Allow for dspa ready status to be overridden + // otherwise we auto generate the overall ready status + // based off of the other components + crReady := s.dspaReady + + if s.dspaReady == nil { + if allReady { + crReady = &metav1.Condition{ + Type: config.CrReady, + Status: metav1.ConditionTrue, + Reason: config.MinimumReplicasAvailable, + Message: "All components are ready.", + LastTransitionTime: metav1.Now(), + } + } else { + crReady = &metav1.Condition{ + Type: config.CrReady, + Status: metav1.ConditionFalse, + Reason: config.MinimumReplicasAvailable, + Message: failureMessages, + LastTransitionTime: metav1.Now(), + } } } @@ -146,7 +168,7 @@ func (s *dspaStatus) GetConditions() []metav1.Condition { *s.persistenceAgentReady, *s.scheduledWorkflowReady, *s.mlmdProxyReady, - crReady, + *crReady, } for i, condition := range s.dspa.Status.Conditions { diff --git a/controllers/dspipeline_controller.go b/controllers/dspipeline_controller.go index d6df1f27e..dec9f0393 100644 --- a/controllers/dspipeline_controller.go +++ b/controllers/dspipeline_controller.go @@ -158,8 +158,6 @@ func (r *DSPAReconciler) DeleteResourceIfItExists(ctx context.Context, obj clien //+kubebuilder:rbac:groups=kubeflow.org,resources=*,verbs=* //+kubebuilder:rbac:groups=batch,resources=jobs,verbs=* //+kubebuilder:rbac:groups=machinelearning.seldon.io,resources=seldondeployments,verbs=* -//+kubebuilder:rbac:groups=tekton.dev,resources=*,verbs=* -//+kubebuilder:rbac:groups=custom.tekton.dev,resources=pipelineloops,verbs=* //+kubebuilder:rbac:groups=ray.io,resources=rayclusters;rayjobs;rayservices,verbs=create;get;list;patch;delete //+kubebuilder:rbac:groups=authorization.k8s.io,resources=subjectaccessreviews,verbs=create //+kubebuilder:rbac:groups=authentication.k8s.io,resources=tokenreviews,verbs=create @@ -190,6 +188,20 @@ func (r *DSPAReconciler) Reconcile(ctx context.Context, req ctrl.Request) (ctrl. defer r.updateStatus(ctx, dspa, dspaStatus, log, req) + if dspa.Spec.DSPVersion != config.DSPV2VersionString { + err1 := fmt.Errorf("unsupported DSP version %s detected. Please manually remove "+ + "this DSP resource and re-apply with a supported version field set", dspa.Spec.DSPVersion) + dspaStatus.SetDatabaseNotReady(err1, config.UnsupportedVersion) + dspaStatus.SetObjStoreNotReady(err1, config.UnsupportedVersion) + r.setStatusAsUnsupported(config.APIServerReady, err1, dspaStatus.SetApiServerStatus) + r.setStatusAsUnsupported(config.PersistenceAgentReady, err1, dspaStatus.SetPersistenceAgentStatus) + r.setStatusAsUnsupported(config.ScheduledWorkflowReady, err1, dspaStatus.SetScheduledWorkflowStatus) + r.setStatusAsUnsupported(config.MLMDProxyReady, err1, dspaStatus.SetMLMDProxyStatus) + dspaStatus.SetDSPANotReady(err1, config.UnsupportedVersion) + log.Info(err1.Error()) + return ctrl.Result{}, nil + } + // FixMe: Hack for stubbing gvk during tests as these are not populated by test suite // https://github.com/opendatahub-io/data-science-pipelines-operator/pull/7#discussion_r1102887037 // In production we expect these to be populated @@ -350,6 +362,11 @@ func (r *DSPAReconciler) setStatusAsNotReady(conditionType string, err error, se setStatus(condition) } +func (r *DSPAReconciler) setStatusAsUnsupported(conditionType string, err error, setStatus func(metav1.Condition)) { + condition := dspastatus.BuildFalseCondition(conditionType, config.UnsupportedVersion, err.Error()) + setStatus(condition) +} + func (r *DSPAReconciler) setStatus(ctx context.Context, resourceName string, conditionType string, dspa *dspav1alpha1.DataSciencePipelinesApplication, setStatus func(metav1.Condition), log logr.Logger) { @@ -512,8 +529,6 @@ func (r *DSPAReconciler) PublishMetrics(dspa *dspav1alpha1.DataSciencePipelinesA func (r *DSPAReconciler) GetComponents(ctx context.Context, dspa *dspav1alpha1.DataSciencePipelinesApplication) dspav1alpha1.ComponentStatus { log := r.Log.WithValues("namespace", dspa.Namespace).WithValues("dspa_name", dspa.Name) - log.Info("Updating components endpoints") - mlmdProxyResourceName := fmt.Sprintf("ds-pipeline-md-%s", dspa.Name) apiServerResourceName := fmt.Sprintf("ds-pipeline-%s", dspa.Name) diff --git a/controllers/dspipeline_params.go b/controllers/dspipeline_params.go index 4bba6dc0e..badd0e97d 100644 --- a/controllers/dspipeline_params.go +++ b/controllers/dspipeline_params.go @@ -21,6 +21,7 @@ import ( "context" "encoding/base64" "encoding/json" + "errors" "fmt" "math/rand" "os" @@ -42,7 +43,7 @@ import ( "sigs.k8s.io/controller-runtime/pkg/client" ) -const MlmdIsRequiredInV2Msg = "MLMD explicitly disabled in DSPA, but is a required component for V2 Pipelines" +const MlmdIsRequired = "MLMD explicitly disabled in DSPA, but is a required component for DSP" type DSPAParams struct { IncludeOwnerReference bool @@ -121,35 +122,6 @@ type ObjectStorageConnection struct { ExternalRouteURL string } -func (p *DSPAParams) UsingV2Pipelines(dsp *dspa.DataSciencePipelinesApplication) bool { - return dsp.Spec.DSPVersion == "v2" -} - -func (p *DSPAParams) UsingV1Pipelines(dsp *dspa.DataSciencePipelinesApplication) bool { - return !p.UsingV2Pipelines(dsp) -} - -func (p *DSPAParams) UsingArgoEngineDriver(dsp *dspa.DataSciencePipelinesApplication) bool { - return p.UsingV2Pipelines(dsp) -} - -func (p *DSPAParams) UsingTektonEngineDriver(dsp *dspa.DataSciencePipelinesApplication) bool { - return !p.UsingV2Pipelines(dsp) -} - -// TODO: rework to dynamically retrieve image based soley on 'pipelinesVersion' and 'engineDriver' rather than -// explicitly set images -func (p *DSPAParams) GetImageForComponent(dsp *dspa.DataSciencePipelinesApplication, v1Image, v2ArgoImage, v2TektonImage string) string { - if p.UsingV2Pipelines(dsp) { - if p.UsingArgoEngineDriver(dsp) { - return v2ArgoImage - } else { - return v2TektonImage - } - } - return v1Image -} - // UsingExternalDB will return true if an external Database is specified in the CR, otherwise false. func (p *DSPAParams) UsingExternalDB(dsp *dspa.DataSciencePipelinesApplication) bool { if dsp.Spec.Database != nil && dsp.Spec.Database.ExternalDB != nil { @@ -371,8 +343,8 @@ func (p *DSPAParams) SetupDBParams(ctx context.Context, dsp *dspa.DataSciencePip } if p.DBConnection.Password == "" { - return fmt.Errorf(fmt.Sprintf("DB Password from secret [%s] for key [%s] was not successfully retrieved, "+ - "ensure that the secret with this key exist.", p.DBConnection.CredentialsSecret.Name, p.DBConnection.CredentialsSecret.Key)) + return fmt.Errorf("db password from secret [%s] for key [%s] was not successfully retrieved, ensure that the secret with this key exist", + p.DBConnection.CredentialsSecret.Name, p.DBConnection.CredentialsSecret.Key) } return nil } @@ -499,47 +471,43 @@ func (p *DSPAParams) SetupObjectParams(ctx context.Context, dsp *dspa.DataScienc p.ObjectStorageConnection.Endpoint = endpoint if p.ObjectStorageConnection.AccessKeyID == "" || p.ObjectStorageConnection.SecretAccessKey == "" { - return fmt.Errorf(fmt.Sprintf("Object Storage Password from secret [%s] for keys [%s, %s] was not "+ - "successfully retrieved, ensure that the secret with this key exist.", + return fmt.Errorf("object storage password from secret [%s] for keys [%s, %s] was not "+ + "successfully retrieved, ensure that the secret with this key exist", p.ObjectStorageConnection.CredentialsSecret.SecretName, - p.ObjectStorageConnection.CredentialsSecret.AccessKey, p.ObjectStorageConnection.CredentialsSecret.SecretKey)) + p.ObjectStorageConnection.CredentialsSecret.AccessKey, p.ObjectStorageConnection.CredentialsSecret.SecretKey) } return nil } func (p *DSPAParams) SetupMLMD(dsp *dspa.DataSciencePipelinesApplication, log logr.Logger) error { - if p.UsingV2Pipelines(dsp) { - if p.MLMD == nil { - log.Info("MLMD not specified, but is a required component for V2 Pipelines. Including MLMD with default specs.") - p.MLMD = &dspa.MLMD{ - Deploy: true, - Envoy: &dspa.Envoy{ - DeployRoute: true, - }, - } - } else if !p.MLMD.Deploy { - return fmt.Errorf(MlmdIsRequiredInV2Msg) + if p.MLMD == nil { + log.Info("MLMD not specified, but is a required component for Pipelines. Including MLMD with default specs.") + p.MLMD = &dspa.MLMD{ + Deploy: true, + Envoy: &dspa.Envoy{ + DeployRoute: true, + }, } + } else if !p.MLMD.Deploy { + return errors.New(MlmdIsRequired) } - if p.MLMD != nil { - MlmdEnvoyImagePath := p.GetImageForComponent(dsp, config.MlmdEnvoyImagePath, config.MlmdEnvoyImagePathV2Argo, config.MlmdEnvoyImagePathV2Tekton) - MlmdGRPCImagePath := p.GetImageForComponent(dsp, config.MlmdGRPCImagePath, config.MlmdGRPCImagePathV2Argo, config.MlmdGRPCImagePathV2Tekton) + if p.MLMD != nil { if p.MLMD.Envoy == nil { p.MLMD.Envoy = &dspa.Envoy{ - Image: config.GetStringConfigWithDefault(MlmdEnvoyImagePath, config.DefaultImageValue), + Image: config.GetStringConfigWithDefault(config.MlmdEnvoyImagePath, config.DefaultImageValue), DeployRoute: true, } } if p.MLMD.GRPC == nil { p.MLMD.GRPC = &dspa.GRPC{ - Image: config.GetStringConfigWithDefault(MlmdGRPCImagePath, config.DefaultImageValue), + Image: config.GetStringConfigWithDefault(config.MlmdGRPCImagePath, config.DefaultImageValue), } } - mlmdEnvoyImageFromConfig := config.GetStringConfigWithDefault(MlmdEnvoyImagePath, config.DefaultImageValue) - mlmdGRPCImageFromConfig := config.GetStringConfigWithDefault(MlmdGRPCImagePath, config.DefaultImageValue) + mlmdEnvoyImageFromConfig := config.GetStringConfigWithDefault(config.MlmdEnvoyImagePath, config.DefaultImageValue) + mlmdGRPCImageFromConfig := config.GetStringConfigWithDefault(config.MlmdGRPCImagePath, config.DefaultImageValue) setStringDefault(mlmdEnvoyImageFromConfig, &p.MLMD.Envoy.Image) setStringDefault(mlmdGRPCImageFromConfig, &p.MLMD.GRPC.Image) @@ -548,20 +516,6 @@ func (p *DSPAParams) SetupMLMD(dsp *dspa.DataSciencePipelinesApplication, log lo setResourcesDefault(config.MlmdGRPCResourceRequirements, &p.MLMD.GRPC.Resources) setStringDefault(config.MlmdGrpcPort, &p.MLMD.GRPC.Port) - - if p.UsingV1Pipelines(dsp) { - MlmdWriterImagePath := config.MlmdWriterImagePath - - if p.MLMD.Writer == nil { - p.MLMD.Writer = &dspa.Writer{ - Image: config.GetStringConfigWithDefault(MlmdWriterImagePath, config.DefaultImageValue), - } - } - - mlmdWriterImageFromConfig := config.GetStringConfigWithDefault(MlmdWriterImagePath, config.DefaultImageValue) - setStringDefault(mlmdWriterImageFromConfig, &p.MLMD.Writer.Image) - setResourcesDefault(config.MlmdWriterResourceRequirements, &p.MLMD.Writer.Resources) - } } return nil } @@ -628,48 +582,25 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip p.PodToPodTLS = false dspTrustedCAConfigMapKey := config.CustomDSPTrustedCAConfigMapKey - // PodToPodTLS is only used in v2 dsp - if p.UsingV2Pipelines(dsp) { - // by default it's enabled when omitted - if dsp.Spec.PodToPodTLS == nil { - p.PodToPodTLS = true - } else { - p.PodToPodTLS = *dsp.Spec.PodToPodTLS - } + // by default it's enabled when omitted + if dsp.Spec.PodToPodTLS == nil { + p.PodToPodTLS = true + } else { + p.PodToPodTLS = *dsp.Spec.PodToPodTLS } log := loggr.WithValues("namespace", p.Namespace).WithValues("dspa_name", p.Name) if p.APIServer != nil { - APIServerImagePath := p.GetImageForComponent(dsp, config.APIServerImagePath, config.APIServerImagePathV2Argo, config.APIServerImagePathV2Tekton) - APIServerArtifactImagePath := config.APIServerArtifactImagePath - APIServerCacheImagePath := config.APIServerCacheImagePath - APIServerMoveResultsImagePath := config.APIServerMoveResultsImagePath - APIServerArgoLauncherImagePath := config.APIServerArgoLauncherImagePathV2Argo - APIServerArgoDriverImagePath := config.APIServerArgoDriverImagePathV2Argo - - serverImageFromConfig := config.GetStringConfigWithDefault(APIServerImagePath, config.DefaultImageValue) - artifactImageFromConfig := config.GetStringConfigWithDefault(APIServerArtifactImagePath, config.DefaultImageValue) - cacheImageFromConfig := config.GetStringConfigWithDefault(APIServerCacheImagePath, config.DefaultImageValue) - moveResultsImageFromConfig := config.GetStringConfigWithDefault(APIServerMoveResultsImagePath, config.DefaultImageValue) - argoLauncherImageFromConfig := config.GetStringConfigWithDefault(APIServerArgoLauncherImagePath, config.DefaultImageValue) - argoDriverImageFromConfig := config.GetStringConfigWithDefault(APIServerArgoDriverImagePath, config.DefaultImageValue) + serverImageFromConfig := config.GetStringConfigWithDefault(config.APIServerImagePath, config.DefaultImageValue) + argoLauncherImageFromConfig := config.GetStringConfigWithDefault(config.LauncherImagePath, config.DefaultImageValue) + argoDriverImageFromConfig := config.GetStringConfigWithDefault(config.DriverImagePath, config.DefaultImageValue) setStringDefault(serverImageFromConfig, &p.APIServer.Image) - setStringDefault(artifactImageFromConfig, &p.APIServer.ArtifactImage) - setStringDefault(cacheImageFromConfig, &p.APIServer.CacheImage) - setStringDefault(moveResultsImageFromConfig, &p.APIServer.MoveResultsImage) setStringDefault(argoLauncherImageFromConfig, &p.APIServer.ArgoLauncherImage) setStringDefault(argoDriverImageFromConfig, &p.APIServer.ArgoDriverImage) setResourcesDefault(config.APIServerResourceRequirements, &p.APIServer.Resources) - if p.APIServer.ArtifactScriptConfigMap == nil { - p.APIServer.ArtifactScriptConfigMap = &dspa.ScriptConfigMap{ - Name: config.ArtifactScriptConfigMapNamePrefix + dsp.Name, - Key: config.ArtifactScriptConfigMapKey, - } - } - if p.APIServer.CustomServerConfig == nil { p.APIServer.CustomServerConfig = &dspa.ScriptConfigMap{ Name: config.CustomServerConfigMapNamePrefix + dsp.Name, @@ -859,14 +790,12 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip } if p.PersistenceAgent != nil { - PersistenceAgentImagePath := p.GetImageForComponent(dsp, config.PersistenceAgentImagePath, config.PersistenceAgentImagePathV2Argo, config.PersistenceAgentImagePathV2Tekton) - persistenceAgentImageFromConfig := config.GetStringConfigWithDefault(PersistenceAgentImagePath, config.DefaultImageValue) + persistenceAgentImageFromConfig := config.GetStringConfigWithDefault(config.PersistenceAgentImagePath, config.DefaultImageValue) setStringDefault(persistenceAgentImageFromConfig, &p.PersistenceAgent.Image) setResourcesDefault(config.PersistenceAgentResourceRequirements, &p.PersistenceAgent.Resources) } if p.ScheduledWorkflow != nil { - ScheduledWorkflowImagePath := p.GetImageForComponent(dsp, config.ScheduledWorkflowImagePath, config.ScheduledWorkflowImagePathV2Argo, config.ScheduledWorkflowImagePathV2Tekton) - scheduledWorkflowImageFromConfig := config.GetStringConfigWithDefault(ScheduledWorkflowImagePath, config.DefaultImageValue) + scheduledWorkflowImageFromConfig := config.GetStringConfigWithDefault(config.ScheduledWorkflowImagePath, config.DefaultImageValue) setStringDefault(scheduledWorkflowImageFromConfig, &p.ScheduledWorkflow.Image) setResourcesDefault(config.ScheduledWorkflowResourceRequirements, &p.ScheduledWorkflow.Resources) } @@ -882,10 +811,7 @@ func (p *DSPAParams) ExtractParams(ctx context.Context, dsp *dspa.DataSciencePip // If user did not specify WorkflowController if dsp.Spec.WorkflowController == nil { dsp.Spec.WorkflowController = &dspa.WorkflowController{ - Deploy: false, - } - if p.UsingV2Pipelines(dsp) { - dsp.Spec.WorkflowController.Deploy = true + Deploy: true, } } p.WorkflowController = dsp.Spec.WorkflowController.DeepCopy() diff --git a/controllers/mlmd.go b/controllers/mlmd.go index c6c639c84..356d64be8 100644 --- a/controllers/mlmd.go +++ b/controllers/mlmd.go @@ -40,55 +40,34 @@ func (r *DSPAReconciler) ReconcileMLMD(ctx context.Context, dsp *dspav1alpha1.Da log.Info("Applying ML-Metadata (MLMD) Resources") - if params.UsingV1Pipelines(dsp) { - if dsp.Spec.MLMD != nil { - err := r.ApplyDir(dsp, params, mlmdTemplatesDir) - if err != nil { - return err - } - - if dsp.Spec.MLMD.Envoy == nil || dsp.Spec.MLMD.Envoy.DeployRoute { - err = r.Apply(dsp, params, mlmdEnvoyRoute) - if err != nil { - return err - } - } - } + // We need to create the service first so OpenShift creates the certificate that we'll use later. + err := r.ApplyDir(dsp, params, mlmdTemplatesDir+"/"+mlmdGrpcService) + if err != nil { + return err + } - err := r.ApplyDir(dsp, params, mlmdTemplatesDir+"/v1") - if err != nil { - return err - } - } else { - // We need to create the service first so OpenShift creates the certificate that we'll use later. - err := r.ApplyDir(dsp, params, mlmdTemplatesDir+"/"+mlmdGrpcService) + if params.PodToPodTLS { + var certificatesExist bool + certificatesExist, err = params.LoadMlmdCertificates(ctx, r.Client) if err != nil { return err } - if params.PodToPodTLS { - var certificatesExist bool - certificatesExist, err = params.LoadMlmdCertificates(ctx, r.Client) - if err != nil { - return err - } - - if !certificatesExist { - return errors.New("secret containing the certificate for MLMD gRPC Server was not created yet") - } + if !certificatesExist { + return errors.New("secret containing the certificate for MLMD gRPC Server was not created yet") } + } + + err = r.ApplyDir(dsp, params, mlmdTemplatesDir) + if err != nil { + return err + } - err = r.ApplyDir(dsp, params, mlmdTemplatesDir) + if dsp.Spec.MLMD == nil || dsp.Spec.MLMD.Envoy == nil || dsp.Spec.MLMD.Envoy.DeployRoute { + err = r.Apply(dsp, params, mlmdEnvoyRoute) if err != nil { return err } - - if dsp.Spec.MLMD == nil || dsp.Spec.MLMD.Envoy == nil || dsp.Spec.MLMD.Envoy.DeployRoute { - err = r.Apply(dsp, params, mlmdEnvoyRoute) - if err != nil { - return err - } - } } log.Info("Finished applying MLMD Resources") diff --git a/controllers/mlmd_test.go b/controllers/mlmd_test.go index fc96c9909..33cb2d4b1 100644 --- a/controllers/mlmd_test.go +++ b/controllers/mlmd_test.go @@ -28,101 +28,7 @@ import ( appsv1 "k8s.io/api/apps/v1" ) -func TestDeployMLMDV1(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" - expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" - - // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ - Deploy: true, - }, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route now exists - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) -} - -func TestDeployMLMDV2(t *testing.T) { +func TestDeployMLMD(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" @@ -217,101 +123,7 @@ func TestDeployMLMDV2(t *testing.T) { assert.Nil(t, err) } -func TestDontDeployMLMDV1(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" - expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" - - // Construct DSPA Spec with MLMD Not Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ - Deploy: false, - }, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route still doesn't exist - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources stil doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) -} - -func TestDontDeployMLMDV2(t *testing.T) { +func TestDontDeployMLMD(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" @@ -347,101 +159,10 @@ func TestDontDeployMLMDV2(t *testing.T) { // Create Context, Fake Controller and Params ctx, params, reconciler := CreateNewTestObjects() err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.EqualError(t, err, MlmdIsRequiredInV2Msg) + assert.EqualError(t, err, MlmdIsRequired) } -func TestDefaultDeployBehaviorMLMDV1(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - expectedMLMDGRPCName := "ds-pipeline-metadata-grpc-testdspa" - expectedMLMDWriterName := "ds-pipeline-metadata-writer-testdspa" - - // Construct DSPA Spec with MLMD Spec not defined - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources doesn't yet exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route still doesn't exist - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-GRPC resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDGRPCName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Writer resources still doesn't exist - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDWriterName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) -} - -func TestDefaultDeployBehaviorMLMDV2(t *testing.T) { +func TestDefaultDeployBehaviorMLMD(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" @@ -533,78 +254,7 @@ func TestDefaultDeployBehaviorMLMDV2(t *testing.T) { assert.Nil(t, err) } -func TestDeployEnvoyRouteV1(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - - // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ - Deploy: true, - Envoy: &dspav1alpha1.Envoy{ - DeployRoute: true, - }, - }, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route now exists - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) -} - -func TestDeployEnvoyRouteV2(t *testing.T) { +func TestDeployEnvoyRoute(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" @@ -676,78 +326,7 @@ func TestDeployEnvoyRouteV2(t *testing.T) { assert.Nil(t, err) } -func TestDontDeployEnvoyRouteV1(t *testing.T) { - testNamespace := "testnamespace" - testDSPAName := "testdspa" - expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" - expectedMLMDEnvoyRouteName := "ds-pipeline-md-testdspa" - - // Construct DSPA Spec with MLMD Enabled - dspa := &dspav1alpha1.DataSciencePipelinesApplication{ - Spec: dspav1alpha1.DSPASpec{ - DSPVersion: "v1", - APIServer: &dspav1alpha1.APIServer{}, - MLMD: &dspav1alpha1.MLMD{ - Deploy: true, - Envoy: &dspav1alpha1.Envoy{ - DeployRoute: false, - }, - }, - Database: &dspav1alpha1.Database{ - DisableHealthCheck: false, - MariaDB: &dspav1alpha1.MariaDB{ - Deploy: true, - }, - }, - ObjectStorage: &dspav1alpha1.ObjectStorage{ - DisableHealthCheck: false, - Minio: &dspav1alpha1.Minio{ - Deploy: false, - Image: "someimage", - }, - }, - }, - } - - // Enrich DSPA with name+namespace - dspa.Namespace = testNamespace - dspa.Name = testDSPAName - - // Create Context, Fake Controller and Params - ctx, params, reconciler := CreateNewTestObjects() - err := params.ExtractParams(ctx, dspa, reconciler.Client, reconciler.Log) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources doesn't yet exist - deployment := &appsv1.Deployment{} - created, err := reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route doesn't yet exist - route := &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) - - // Run test reconciliation - err = reconciler.ReconcileMLMD(ctx, dspa, params) - assert.Nil(t, err) - - // Ensure MLMD-Envoy resources now exists - deployment = &appsv1.Deployment{} - created, err = reconciler.IsResourceCreated(ctx, deployment, expectedMLMDEnvoyName, testNamespace) - assert.True(t, created) - assert.Nil(t, err) - - // Ensure MLMD-Envoy route still doesn't exist - route = &v1.Route{} - created, err = reconciler.IsResourceCreated(ctx, route, expectedMLMDEnvoyRouteName, testNamespace) - assert.False(t, created) - assert.Nil(t, err) -} - -func TestDontDeployEnvoyRouteV2(t *testing.T) { +func TestDontDeployEnvoyRoute(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" @@ -823,7 +402,7 @@ func boolPtr(b bool) *bool { return &b } -func TestGetEndpointsMLMDV2(t *testing.T) { +func TestGetEndpointsMLMD(t *testing.T) { testNamespace := "testnamespace" testDSPAName := "testdspa" expectedMLMDEnvoyName := "ds-pipeline-metadata-envoy-testdspa" diff --git a/controllers/testdata/declarative/case_0/config.yaml b/controllers/testdata/declarative/case_0/config.yaml index f2f536c00..025fe51e7 100644 --- a/controllers/testdata/declarative/case_0/config.yaml +++ b/controllers/testdata/declarative/case_0/config.yaml @@ -1,15 +1,18 @@ # When a minimal DSPA is deployed Images: ApiServer: api-server:test0 - Artifact: artifact-manager:test0 PersistentAgent: persistenceagent:test0 ScheduledWorkflow: scheduledworkflow:test0 - Cache: ubi-minimal:test0 - MoveResultsImage: busybox:test0 - MlPipelineUI: frontend:test0 + MlmdEnvoy: mlmdenvoy:test0 + MlmdGRPC: mlmdgrpc:test0 + ArgoExecImage: argoexec:test0 + ArgoWorkflowController: argowfcontroller:test0 + LauncherImage: launcherimage:test0 + DriverImage: driverimage:test0 + OAuthProxy: oauth-proxy:test0 MariaDB: mariadb:test0 + MlPipelineUI: frontend:test0 Minio: minio:test0 - OAuthProxy: oauth-proxy:test0 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_0/deploy/cr.yaml b/controllers/testdata/declarative/case_0/deploy/cr.yaml index 853225336..1b13ab74e 100644 --- a/controllers/testdata/declarative/case_0/deploy/cr.yaml +++ b/controllers/testdata/declarative/case_0/deploy/cr.yaml @@ -3,6 +3,7 @@ kind: DataSciencePipelinesApplication metadata: name: testdsp0 spec: + podToPodTLS: false apiServer: enableSamplePipeline: true argoLauncherImage: argolauncherimage:test0 @@ -12,3 +13,5 @@ spec: image: minio:test0 mlpipelineUI: image: frontend:test0 + mlmd: + deploy: true diff --git a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml index 8aeb7c67b..c6a01bf52 100644 --- a/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/apiserver_deployment.yaml @@ -37,10 +37,6 @@ spec: value: "mariadb-testdsp0.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -75,6 +71,10 @@ spec: value: "argolauncherimage:test0" - name: V2_DRIVER_IMAGE value: "argodriverimage:test0" + - name: METADATA_GRPC_SERVICE_SERVICE_HOST + value: "ds-pipeline-metadata-grpc-testdsp0.default.svc.cluster.local" + - name: METADATA_GRPC_SERVICE_SERVICE_PORT + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp0.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC @@ -82,36 +82,22 @@ spec: - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "ubi-minimal:test0" - - name: MOVERESULTS_IMAGE - value: "busybox:test0" - - name: ARTIFACT_IMAGE - value: "artifact-manager:test0" - - name: ARTIFACT_BUCKET + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER value: "mlpipeline" - - name: ARTIFACT_ENDPOINT - value: "http://minio-testdsp0.default.svc.cluster.local:9000" - - name: ARTIFACT_SCRIPT + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "artifact_script" - name: "ds-pipeline-artifact-script-testdsp0" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp0" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "mlpipeline" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "mariadb-testdsp0.default.svc.cluster.local" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "3306" image: api-server:test0 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server diff --git a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index cc7a02b1a..000000000 --- a/controllers/testdata/declarative/case_0/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp0.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp0 - namespace: default - labels: - app: ds-pipeline-testdsp0 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml b/controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml index 5f81ff7fe..a8d8ad457 100644 --- a/controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/configmap_server_config.yaml @@ -8,15 +8,20 @@ metadata: component: data-science-pipelines data: config.json: | - - { - "DBConfig": { - "DriverName": "mysql", - "ConMaxLifeTime": "120s", - "ExtraParams": {"tls":"false"} - }, - "ObjectStoreConfig": { - "PipelinePath": "pipelines" - }, - "InitConnectionTimeout": "6m" - } + { + "DBConfig": { + "MySQLConfig": { + "ExtraParams": {"tls":"false"}, + "GroupConcatMaxLen": "4194304" + }, + "PostgreSQLConfig": {}, + "ConMaxLifeTime": "120s" + }, + "ObjectStoreConfig": { + "PipelinePath": "pipelines" + }, + "DBDriverName": "mysql", + "ARCHIVE_CONFIG_LOG_FILE_NAME": "main.log", + "ARCHIVE_CONFIG_LOG_PATH_PREFIX": "/artifacts", + "InitConnectionTimeout": "6m" + } diff --git a/controllers/testdata/declarative/case_0/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/persistence-agent_deployment.yaml index 30b58463f..ed2a18971 100644 --- a/controllers/testdata/declarative/case_0/expected/created/persistence-agent_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/persistence-agent_deployment.yaml @@ -35,7 +35,7 @@ spec: - name: KUBEFLOW_USERID_PREFIX value: "" - name: EXECUTIONTYPE - value: PipelineRun + value: Workflow image: persistenceagent:test0 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-persistenceagent @@ -73,4 +73,17 @@ spec: limits: cpu: 250m memory: 1Gi + volumeMounts: + - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token + name: persistenceagent-sa-token + subPath: ds-pipeline-persistenceagent-testdsp0-token serviceAccountName: ds-pipeline-persistenceagent-testdsp0 + volumes: + - name: persistenceagent-sa-token + projected: + defaultMode: 420 + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 3600 + path: ds-pipeline-persistenceagent-testdsp0-token diff --git a/controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml index e25e252a6..7ba917a40 100644 --- a/controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml +++ b/controllers/testdata/declarative/case_0/expected/created/scheduled-workflow_deployment.yaml @@ -28,8 +28,6 @@ spec: value: "default" - name: CRON_SCHEDULE_TIMEZONE value: "UTC" - - name: EXECUTIONTYPE - value: PipelineRun image: scheduledworkflow:test0 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-scheduledworkflow diff --git a/controllers/testdata/declarative/case_1/config.yaml b/controllers/testdata/declarative/case_1/config.yaml index 16d0e588a..9700d9c57 100644 --- a/controllers/testdata/declarative/case_1/config.yaml +++ b/controllers/testdata/declarative/case_1/config.yaml @@ -1,10 +1,14 @@ -# When a DSPA is deployed with all deployments disabled Images: ApiServer: api-server:test1 - Artifact: artifact-manager:test1 PersistentAgent: persistenceagent:test1 ScheduledWorkflow: scheduledworkflow:test1 - Cache: ubi-minimal:test1 - MoveResultsImage: busybox:test1 - MariaDB: mariadb:test1 + MlmdEnvoy: mlmdenvoy:test1 + MlmdGRPC: mlmdgrpc:test1 + ArgoExecImage: argoexec:test1 + ArgoWorkflowController: argowfcontroller:test1 + LauncherImage: launcherimage:test1 + DriverImage: driverimage:test1 OAuthProxy: oauth-proxy:test1 + MariaDB: mariadb:test1 + MlPipelineUI: frontend:test1 + Minio: minio:test1 diff --git a/controllers/testdata/declarative/case_1/deploy/cr.yaml b/controllers/testdata/declarative/case_1/deploy/cr.yaml index d733e4792..512a980af 100644 --- a/controllers/testdata/declarative/case_1/deploy/cr.yaml +++ b/controllers/testdata/declarative/case_1/deploy/cr.yaml @@ -3,6 +3,7 @@ kind: DataSciencePipelinesApplication metadata: name: testdsp1 spec: + podToPodTLS: false apiServer: deploy: false persistenceAgent: @@ -19,3 +20,8 @@ spec: minio: deploy: false image: minio:test0 + mlmd: + # curently logic requires mlmd + # probably should make this consistent + # with other components + deploy: true diff --git a/controllers/testdata/declarative/case_2/config.yaml b/controllers/testdata/declarative/case_2/config.yaml index 62adc5e2a..6fd2da23a 100644 --- a/controllers/testdata/declarative/case_2/config.yaml +++ b/controllers/testdata/declarative/case_2/config.yaml @@ -1,13 +1,18 @@ -# When a complete DSPA is deployed with (defaults specified) Images: ApiServer: api-server:test2 - Artifact: artifact-manager:test2 PersistentAgent: persistenceagent:test2 ScheduledWorkflow: scheduledworkflow:test2 - Cache: ubi-minimal:test2 - MoveResultsImage: busybox:test2 - MariaDB: mariadb:test2 + MlmdEnvoy: mlmdenvoy:test2 + MlmdGRPC: mlmdgrpc:test2 + ArgoExecImage: argoexec:test2 + ArgoWorkflowController: argowfcontroller:test2 + LauncherImage: launcherimage:test2 + DriverImage: driverimage:test2 OAuthProxy: oauth-proxy:test2 + MariaDB: mariadb:test2 + MlPipelineUI: frontend:test2 + Minio: minio:test2 + DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_2/deploy/cr.yaml b/controllers/testdata/declarative/case_2/deploy/cr.yaml index 4db5793f5..f15c8e1b3 100644 --- a/controllers/testdata/declarative/case_2/deploy/cr.yaml +++ b/controllers/testdata/declarative/case_2/deploy/cr.yaml @@ -5,25 +5,14 @@ kind: DataSciencePipelinesApplication metadata: name: testdsp2 spec: + podToPodTLS: false apiServer: deploy: true image: api-server:test2 - applyTektonCustomResource: true - archiveLogs: false - artifactImage: artifact-manager:test2 - cacheImage: ubi-minimal:test2 - moveResultsImage: busybox:test2 argoLauncherImage: argolauncherimage:test2 argoDriverImage: argodriverimage:test2 - injectDefaultScript: true - stripEOF: true enableOauth: true enableSamplePipeline: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 125 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true customServerConfigMap: name: testserverconfigmapdspa2 key: testserverconfigmapkeydspa2 @@ -94,3 +83,25 @@ spec: limits: cpu: "2535m" memory: "5Gi" + mlmd: + deploy: true + grpc: + resources: + requests: + cpu: "1334m" + memory: "1Gi" + limits: + cpu: "2535m" + memory: "5Gi" + image: mlmdgrpc:test2 + port: "8080" + envoy: + resources: + requests: + cpu: "1334m" + memory: "1Gi" + limits: + cpu: "2535m" + memory: "5Gi" + image: mlmdenvoy:test2 + deployRoute: false diff --git a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml index ce8956c80..4a19db478 100644 --- a/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/apiserver_deployment.yaml @@ -37,10 +37,6 @@ spec: value: "mariadb-testdsp2.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "125" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -75,6 +71,10 @@ spec: value: "argolauncherimage:test2" - name: V2_DRIVER_IMAGE value: "argodriverimage:test2" + - name: METADATA_GRPC_SERVICE_SERVICE_HOST + value: "ds-pipeline-metadata-grpc-testdsp2.default.svc.cluster.local" + - name: METADATA_GRPC_SERVICE_SERVICE_PORT + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp2.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC @@ -82,36 +82,22 @@ spec: - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "ubi-minimal:test2" - - name: MOVERESULTS_IMAGE - value: "busybox:test2" - - name: ARTIFACT_IMAGE - value: "artifact-manager:test2" - - name: ARTIFACT_BUCKET - value: "mlpipeline" - - name: ARTIFACT_ENDPOINT - value: "http://minio-testdsp2.default.svc.cluster.local:9000" - - name: ARTIFACT_SCRIPT + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER + value: "testuser" + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "artifact_script" - name: "ds-pipeline-artifact-script-testdsp2" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp2" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "randomDBName" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "mariadb-testdsp2.default.svc.cluster.local" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "3306" image: api-server:test2 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server diff --git a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index ad0f15ce8..000000000 --- a/controllers/testdata/declarative/case_2/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp2.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp2 - namespace: default - labels: - app: ds-pipeline-testdsp2 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_2/expected/created/mlmd_envoy_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/mlmd_envoy_deployment.yaml new file mode 100644 index 000000000..da1263501 --- /dev/null +++ b/controllers/testdata/declarative/case_2/expected/created/mlmd_envoy_deployment.yaml @@ -0,0 +1,77 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ds-pipeline-metadata-envoy-testdsp2 + namespace: default + labels: + app: ds-pipeline-metadata-envoy-testdsp2 + component: data-science-pipelines + dspa: testdsp2 +spec: + replicas: 1 + selector: + matchLabels: + app: ds-pipeline-metadata-envoy-testdsp2 + component: data-science-pipelines + dspa: testdsp2 + template: + metadata: + annotations: + sidecar.istio.io/inject: "false" + labels: + app: ds-pipeline-metadata-envoy-testdsp2 + component: data-science-pipelines + dspa: testdsp2 + spec: + containers: + - image: mlmdenvoy:test2 + name: container + command: ["/usr/local/bin/envoy"] + args: [ + "-c", + "/etc/envoy.yaml" + ] + ports: + - containerPort: 9090 + name: md-envoy + protocol: TCP + - containerPort: 9901 + name: envoy-admin + protocol: TCP + livenessProbe: + initialDelaySeconds: 30 + periodSeconds: 5 + tcpSocket: + port: md-envoy + timeoutSeconds: 2 + readinessProbe: + initialDelaySeconds: 3 + periodSeconds: 5 + tcpSocket: + port: md-envoy + timeoutSeconds: 2 + resources: + requests: + cpu: 1334m + memory: 1Gi + limits: + cpu: 2535m + memory: 5Gi + volumeMounts: + - mountPath: /etc/envoy.yaml + name: envoy-config + subPath: envoy.yaml + serviceAccountName: ds-pipeline-metadata-envoy-testdsp2 + volumes: + - name: envoy-config + configMap: + name: ds-pipeline-metadata-envoy-config-testdsp2 + defaultMode: 420 + - name: proxy-tls + secret: + secretName: ds-pipelines-envoy-proxy-tls-testdsp2 + defaultMode: 420 + - name: proxy-tls-upstream + configMap: + name: dsp-trusted-ca-testdsp2 + defaultMode: 420 diff --git a/controllers/testdata/declarative/case_5/expected/created/metadata-grpc_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/mlmd_grpc_deployment.yaml similarity index 68% rename from controllers/testdata/declarative/case_5/expected/created/metadata-grpc_deployment.yaml rename to controllers/testdata/declarative/case_2/expected/created/mlmd_grpc_deployment.yaml index 48b8e3959..c7f1e9030 100644 --- a/controllers/testdata/declarative/case_5/expected/created/metadata-grpc_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/mlmd_grpc_deployment.yaml @@ -1,29 +1,29 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-metadata-grpc-testdsp5 + name: ds-pipeline-metadata-grpc-testdsp2 namespace: default labels: - app: ds-pipeline-metadata-grpc-testdsp5 + app: ds-pipeline-metadata-grpc-testdsp2 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp2 spec: replicas: 1 selector: matchLabels: - app: ds-pipeline-metadata-grpc-testdsp5 + app: ds-pipeline-metadata-grpc-testdsp2 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp2 template: metadata: labels: - app: ds-pipeline-metadata-grpc-testdsp5 + app: ds-pipeline-metadata-grpc-testdsp2 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp2 spec: containers: - args: - - --grpc_port=1337 + - --grpc_port=8080 - --mysql_config_database=$(MYSQL_DATABASE) - --mysql_config_host=$(MYSQL_HOST) - --mysql_config_port=$(MYSQL_PORT) @@ -34,22 +34,22 @@ spec: - /bin/metadata_store_server env: - name: DBCONFIG_USER - value: "mlpipeline" + value: "testuser" - name: DBCONFIG_PASSWORD valueFrom: secretKeyRef: key: "password" - name: "ds-pipeline-db-testdsp5" + name: "ds-pipeline-db-testdsp2" - name: MYSQL_DATABASE - value: "mlpipeline" + value: "randomDBName" - name: MYSQL_HOST - value: mariadb-testdsp5.default.svc.cluster.local + value: "mariadb-testdsp2.default.svc.cluster.local" - name: MYSQL_PORT value: "3306" - image: metadata-grpc:test5 + image: mlmdgrpc:test2 name: container ports: - - containerPort: 1337 + - containerPort: 8080 name: grpc-api protocol: TCP livenessProbe: @@ -65,10 +65,10 @@ spec: port: grpc-api timeoutSeconds: 2 resources: - limits: - cpu: 100m - memory: 256Mi requests: - cpu: 100m - memory: 256Mi - serviceAccountName: ds-pipeline-metadata-grpc-testdsp5 + cpu: 1334m + memory: 1Gi + limits: + cpu: 2535m + memory: 5Gi + serviceAccountName: ds-pipeline-metadata-grpc-testdsp2 diff --git a/controllers/testdata/declarative/case_2/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/persistence-agent_deployment.yaml index 6db4d107e..67c750f31 100644 --- a/controllers/testdata/declarative/case_2/expected/created/persistence-agent_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/persistence-agent_deployment.yaml @@ -35,7 +35,7 @@ spec: - name: KUBEFLOW_USERID_PREFIX value: "" - name: EXECUTIONTYPE - value: PipelineRun + value: Workflow image: persistenceagent:test2 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-persistenceagent @@ -73,4 +73,17 @@ spec: limits: cpu: 2524m memory: 5Gi + volumeMounts: + - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token + name: persistenceagent-sa-token + subPath: ds-pipeline-persistenceagent-testdsp2-token serviceAccountName: ds-pipeline-persistenceagent-testdsp2 + volumes: + - name: persistenceagent-sa-token + projected: + defaultMode: 420 + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 3600 + path: ds-pipeline-persistenceagent-testdsp2-token diff --git a/controllers/testdata/declarative/case_2/expected/created/sample-pipeline.yaml.tmpl b/controllers/testdata/declarative/case_2/expected/created/sample-pipeline.yaml.tmpl deleted file mode 100644 index 9d19fa58f..000000000 --- a/controllers/testdata/declarative/case_2/expected/created/sample-pipeline.yaml.tmpl +++ /dev/null @@ -1,554 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: sample-pipeline-testdsp2 - namespace: default - labels: - app: ds-pipeline-testdsp2 - component: data-science-pipelines -data: - iris-pipeline-compiled.yaml: |- - apiVersion: tekton.dev/v1beta1 - kind: PipelineRun - metadata: - name: iris-pipeline - annotations: - tekton.dev/output_artifacts: '{"data-prep": [{"key": "artifacts/$PIPELINERUN/data-prep/X_test.tgz", - "name": "data-prep-X_test", "path": "/tmp/outputs/X_test/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/X_train.tgz", - "name": "data-prep-X_train", "path": "/tmp/outputs/X_train/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/y_test.tgz", - "name": "data-prep-y_test", "path": "/tmp/outputs/y_test/data"}, {"key": "artifacts/$PIPELINERUN/data-prep/y_train.tgz", - "name": "data-prep-y_train", "path": "/tmp/outputs/y_train/data"}], "evaluate-model": - [{"key": "artifacts/$PIPELINERUN/evaluate-model/mlpipeline-metrics.tgz", "name": - "mlpipeline-metrics", "path": "/tmp/outputs/mlpipeline_metrics/data"}], "train-model": - [{"key": "artifacts/$PIPELINERUN/train-model/model.tgz", "name": "train-model-model", - "path": "/tmp/outputs/model/data"}]}' - tekton.dev/input_artifacts: '{"evaluate-model": [{"name": "data-prep-X_test", - "parent_task": "data-prep"}, {"name": "data-prep-y_test", "parent_task": "data-prep"}, - {"name": "train-model-model", "parent_task": "train-model"}], "train-model": - [{"name": "data-prep-X_train", "parent_task": "data-prep"}, {"name": "data-prep-y_train", - "parent_task": "data-prep"}], "validate-model": [{"name": "train-model-model", - "parent_task": "train-model"}]}' - tekton.dev/artifact_bucket: mlpipeline - tekton.dev/artifact_endpoint: ${MINIO_SERVICE_SERVICE_HOST}:${MINIO_SERVICE_SERVICE_PORT} - tekton.dev/artifact_endpoint_scheme: http:// - tekton.dev/artifact_items: '{"data-prep": [["X_test", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test"], - ["X_train", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train"], - ["y_test", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test"], - ["y_train", "$(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train"]], - "evaluate-model": [["mlpipeline-metrics", "/tmp/outputs/mlpipeline_metrics/data"]], - "train-model": [["model", "$(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model"]], - "validate-model": []}' - sidecar.istio.io/inject: "false" - tekton.dev/template: '' - pipelines.kubeflow.org/big_data_passing_format: $(workspaces.$TASK_NAME.path)/artifacts/$ORIG_PR_NAME/$TASKRUN_NAME/$TASK_PARAM_NAME - pipelines.kubeflow.org/pipeline_spec: '{"inputs": [{"default": "iris-model", "name": - "model_obc", "optional": true, "type": "String"}], "name": "Iris Pipeline"}' - labels: - pipelines.kubeflow.org/pipelinename: '' - pipelines.kubeflow.org/generation: '' - spec: - params: - - name: model_obc - value: iris-model - pipelineSpec: - params: - - name: model_obc - default: iris-model - tasks: - - name: data-prep - taskSpec: - steps: - - name: main - args: - - --X-train - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train - - --X-test - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test - - --y-train - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train - - --y-test - - $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def data_prep( - X_train_file, - X_test_file, - y_train_file, - y_test_file, - ): - import pickle - - import pandas as pd - - from sklearn import datasets - from sklearn.model_selection import train_test_split - - def get_iris_data(): - iris = datasets.load_iris() - data = pd.DataFrame( - { - "sepalLength": iris.data[:, 0], - "sepalWidth": iris.data[:, 1], - "petalLength": iris.data[:, 2], - "petalWidth": iris.data[:, 3], - "species": iris.target, - } - ) - - print("Initial Dataset:") - print(data.head()) - - return data - - def create_training_set(dataset, test_size = 0.3): - # Features - X = dataset[["sepalLength", "sepalWidth", "petalLength", "petalWidth"]] - # Labels - y = dataset["species"] - - # Split dataset into training set and test set - X_train, X_test, y_train, y_test = train_test_split( - X, y, test_size=test_size, random_state=11 - ) - - return X_train, X_test, y_train, y_test - - def save_pickle(object_file, target_object): - with open(object_file, "wb") as f: - pickle.dump(target_object, f) - - dataset = get_iris_data() - X_train, X_test, y_train, y_test = create_training_set(dataset) - - save_pickle(X_train_file, X_train) - save_pickle(X_test_file, X_test) - save_pickle(y_train_file, y_train) - save_pickle(y_test_file, y_test) - - import argparse - _parser = argparse.ArgumentParser(prog='Data prep', description='') - _parser.add_argument("--X-train", dest="X_train_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--X-test", dest="X_test_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-train", dest="y_train_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-test", dest="y_test_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = data_prep(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: output-taskrun-name - command: - - sh - - -ec - - echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)" - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: copy-results-artifacts - command: - - sh - - -ec - - | - set -exo pipefail - TOTAL_SIZE=0 - copy_artifact() { - if [ -d "$1" ]; then - tar -czvf "$1".tar.gz "$1" - SUFFIX=".tar.gz" - fi - ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'` - TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE) - touch "$2" - if [[ $TOTAL_SIZE -lt 3072 ]]; then - if [ -d "$1" ]; then - tar -tzf "$1".tar.gz > "$2" - elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then - cp "$1" "$2" - fi - fi - } - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_train $(results.X-train.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/X_test $(results.X-test.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_train $(results.y-train.path) - copy_artifact $(workspaces.data-prep.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/y_test $(results.y-test.path) - onError: continue - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - results: - - name: X-test - description: /tmp/outputs/X_test/data - - name: X-train - description: /tmp/outputs/X_train/data - - name: taskrun-name - - name: y-test - description: /tmp/outputs/y_test/data - - name: y-train - description: /tmp/outputs/y_train/data - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Data prep", "outputs": - [{"name": "X_train"}, {"name": "X_test"}, {"name": "y_train"}, {"name": - "y_test"}], "version": "Data prep@sha256=5aeb512900f57983c9f643ec30ddb4ccc66490a443269b51ce0a67d57cb373b0"}' - workspaces: - - name: data-prep - workspaces: - - name: data-prep - workspace: iris-pipeline - - name: train-model - params: - - name: data-prep-trname - value: $(tasks.data-prep.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --X-train - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/X_train - - --y-train - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/y_train - - --model - - $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def train_model( - X_train_file, - y_train_file, - model_file, - ): - import pickle - - from sklearn.ensemble import RandomForestClassifier - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - def save_pickle(object_file, target_object): - with open(object_file, "wb") as f: - pickle.dump(target_object, f) - - def train_iris(X_train, y_train): - model = RandomForestClassifier(n_estimators=100) - model.fit(X_train, y_train) - - return model - - X_train = load_pickle(X_train_file) - y_train = load_pickle(y_train_file) - - model = train_iris(X_train, y_train) - - save_pickle(model_file, model) - - import argparse - _parser = argparse.ArgumentParser(prog='Train model', description='') - _parser.add_argument("--X-train", dest="X_train_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-train", dest="y_train_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = train_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: output-taskrun-name - command: - - sh - - -ec - - echo -n "$(context.taskRun.name)" > "$(results.taskrun-name.path)" - - image: registry.access.redhat.com/ubi8/ubi-minimal - name: copy-results-artifacts - command: - - sh - - -ec - - | - set -exo pipefail - TOTAL_SIZE=0 - copy_artifact() { - if [ -d "$1" ]; then - tar -czvf "$1".tar.gz "$1" - SUFFIX=".tar.gz" - fi - ARTIFACT_SIZE=`wc -c "$1"${SUFFIX} | awk '{print $1}'` - TOTAL_SIZE=$( expr $TOTAL_SIZE + $ARTIFACT_SIZE) - touch "$2" - if [[ $TOTAL_SIZE -lt 3072 ]]; then - if [ -d "$1" ]; then - tar -tzf "$1".tar.gz > "$2" - elif ! awk "/[^[:print:]]/{f=1} END{exit !f}" "$1"; then - cp "$1" "$2" - fi - fi - } - copy_artifact $(workspaces.train-model.path)/artifacts/$ORIG_PR_NAME/$(context.taskRun.name)/model $(results.model.path) - onError: continue - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: data-prep-trname - results: - - name: model - description: /tmp/outputs/model/data - - name: taskrun-name - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Train model", - "outputs": [{"name": "model"}], "version": "Train model@sha256=cb1fbd399ee5849dcdfaafced23a0496cae1d5861795062b22512b766ec418ce"}' - workspaces: - - name: train-model - workspaces: - - name: train-model - workspace: iris-pipeline - runAfter: - - data-prep - - data-prep - - name: evaluate-model - params: - - name: data-prep-trname - value: $(tasks.data-prep.results.taskrun-name) - - name: train-model-trname - value: $(tasks.train-model.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --X-test - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/X_test - - --y-test - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.data-prep-trname)/y_test - - --model - - $(workspaces.evaluate-model.path)/artifacts/$ORIG_PR_NAME/$(params.train-model-trname)/model - - --mlpipeline-metrics - - /tmp/outputs/mlpipeline_metrics/data - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def _make_parent_dirs_and_return_path(file_path: str): - import os - os.makedirs(os.path.dirname(file_path), exist_ok=True) - return file_path - - def evaluate_model( - X_test_file, - y_test_file, - model_file, - mlpipeline_metrics_file, - ): - import json - import pickle - - from sklearn.metrics import accuracy_score - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - X_test = load_pickle(X_test_file) - y_test = load_pickle(y_test_file) - model = load_pickle(model_file) - - y_pred = model.predict(X_test) - - accuracy_score_metric = accuracy_score(y_test, y_pred) - print(f"Accuracy: {accuracy_score_metric}") - - metrics = { - "metrics": [ - { - "name": "accuracy-score", - "numberValue": accuracy_score_metric, - "format": "PERCENTAGE", - }, - ] - } - - with open(mlpipeline_metrics_file, "w") as f: - json.dump(metrics, f) - - import argparse - _parser = argparse.ArgumentParser(prog='Evaluate model', description='') - _parser.add_argument("--X-test", dest="X_test_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--y-test", dest="y_test_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--model", dest="model_file", type=str, required=True, default=argparse.SUPPRESS) - _parser.add_argument("--mlpipeline-metrics", dest="mlpipeline_metrics_file", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = evaluate_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: data-prep-trname - - name: train-model-trname - stepTemplate: - volumeMounts: - - name: mlpipeline-metrics - mountPath: /tmp/outputs/mlpipeline_metrics - volumes: - - name: mlpipeline-metrics - emptyDir: {} - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Evaluate model", - "outputs": [{"name": "mlpipeline_metrics", "type": "Metrics"}], "version": - "Evaluate model@sha256=f398e65faecc6f5a4ba11a2c78d8a2274e3ede205a0e199c8bb615531a3abd4a"}' - workspaces: - - name: evaluate-model - workspaces: - - name: evaluate-model - workspace: iris-pipeline - runAfter: - - data-prep - - data-prep - - train-model - - name: validate-model - params: - - name: train-model-trname - value: $(tasks.train-model.results.taskrun-name) - taskSpec: - steps: - - name: main - args: - - --model - - $(workspaces.validate-model.path)/artifacts/$ORIG_PR_NAME/$(params.train-model-trname)/model - command: - - sh - - -c - - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location - 'pandas' 'scikit-learn' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m - pip install --quiet --no-warn-script-location 'pandas' 'scikit-learn' - --user) && "$0" "$@" - - sh - - -ec - - | - program_path=$(mktemp) - printf "%s" "$0" > "$program_path" - python3 -u "$program_path" "$@" - - | - def validate_model(model_file): - import pickle - - def load_pickle(object_file): - with open(object_file, "rb") as f: - target_object = pickle.load(f) - - return target_object - - model = load_pickle(model_file) - - input_values = [[5, 3, 1.6, 0.2]] - - print(f"Performing test prediction on {input_values}") - result = model.predict(input_values) - - print(f"Response: {result}") - - import argparse - _parser = argparse.ArgumentParser(prog='Validate model', description='') - _parser.add_argument("--model", dest="model_file", type=str, required=True, default=argparse.SUPPRESS) - _parsed_args = vars(_parser.parse_args()) - - _outputs = validate_model(**_parsed_args) - image: registry.access.redhat.com/ubi8/python-38 - env: - - name: ORIG_PR_NAME - valueFrom: - fieldRef: - fieldPath: metadata.labels['custom.tekton.dev/originalPipelineRun'] - params: - - name: train-model-trname - metadata: - labels: - pipelines.kubeflow.org/cache_enabled: "true" - annotations: - pipelines.kubeflow.org/component_spec_digest: '{"name": "Validate model", - "outputs": [], "version": "Validate model@sha256=53d18ff94fc8f164e7d8455f2c87fa7fdac17e7502502aaa52012e4247d089ee"}' - workspaces: - - name: validate-model - workspaces: - - name: validate-model - workspace: iris-pipeline - runAfter: - - train-model - workspaces: - - name: iris-pipeline - workspaces: - - name: iris-pipeline - volumeClaimTemplate: - spec: - accessModes: - - ReadWriteOnce - resources: - requests: - storage: 2Gi diff --git a/controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml index f49e43414..16d314888 100644 --- a/controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml +++ b/controllers/testdata/declarative/case_2/expected/created/scheduled-workflow_deployment.yaml @@ -28,8 +28,6 @@ spec: value: "default" - name: CRON_SCHEDULE_TIMEZONE value: "EST" - - name: EXECUTIONTYPE - value: PipelineRun image: scheduledworkflow:test2 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-scheduledworkflow diff --git a/controllers/testdata/declarative/case_3/config.yaml b/controllers/testdata/declarative/case_3/config.yaml index 8da75da69..f5f9ce75c 100644 --- a/controllers/testdata/declarative/case_3/config.yaml +++ b/controllers/testdata/declarative/case_3/config.yaml @@ -1,13 +1,17 @@ -# When a DSPA with a custom db/storage secret, and custom artifact script is deployed. Images: ApiServer: api-server:test3 - Artifact: artifact-manager:test3 PersistentAgent: persistenceagent:test3 ScheduledWorkflow: scheduledworkflow:test3 - Cache: ubi-minimal:test3 - MoveResultsImage: busybox:test3 - MariaDB: mariadb:test3 + MlmdEnvoy: mlmdenvoy:test3 + MlmdGRPC: mlmdgrpc:test3 + ArgoExecImage: argoexec:test3 + ArgoWorkflowController: argowfcontroller:test3 + LauncherImage: launcherimage:test3 + DriverImage: driverimage:test3 OAuthProxy: oauth-proxy:test3 + MariaDB: mariadb:test3 + MlPipelineUI: frontend:test3 + Minio: minio:test3 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_3/deploy/02_cr.yaml b/controllers/testdata/declarative/case_3/deploy/02_cr.yaml index 0d7beca3f..ba09102f1 100644 --- a/controllers/testdata/declarative/case_3/deploy/02_cr.yaml +++ b/controllers/testdata/declarative/case_3/deploy/02_cr.yaml @@ -3,17 +3,17 @@ kind: DataSciencePipelinesApplication metadata: name: testdsp3 spec: + podToPodTLS: false apiServer: enableOauth: true enableSamplePipeline: false - artifactScriptConfigMap: - name: doesnotexist - key: "somekey" deploy: true argoLauncherImage: argolauncherimage:test3 argoDriverImage: argodriverimage:test3 persistenceAgent: {} scheduledWorkflow: {} + mlmd: + deploy: true database: externalDB: host: testdbhost3 diff --git a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml index 875a79f75..ab05ca095 100644 --- a/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_3/expected/created/apiserver_deployment.yaml @@ -37,10 +37,6 @@ spec: value: "testdbhost3" - name: DBCONFIG_PORT value: "test3" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -75,6 +71,10 @@ spec: value: "argolauncherimage:test3" - name: V2_DRIVER_IMAGE value: "argodriverimage:test3" + - name: METADATA_GRPC_SERVICE_SERVICE_HOST + value: "ds-pipeline-metadata-grpc-testdsp3.default.svc.cluster.local" + - name: METADATA_GRPC_SERVICE_SERVICE_PORT + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp3.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC @@ -82,36 +82,22 @@ spec: - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: ubi-minimal:test3 - - name: MOVERESULTS_IMAGE - value: busybox:test3 - - name: ARTIFACT_IMAGE - value: artifact-manager:test3 - - name: ARTIFACT_BUCKET - value: "testbucket3" - - name: ARTIFACT_ENDPOINT - value: "https://teststoragehost3:80" - - name: ARTIFACT_SCRIPT + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER + value: "testuser3" + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "somekey" - name: "doesnotexist" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "testpswkey3" + name: "testdbpswsecretname3" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "testdbname3" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "testdbhost3" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "test3" image: api-server:test3 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server diff --git a/controllers/testdata/declarative/case_3/expected/not_created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_3/expected/not_created/configmap_artifact_script.yaml deleted file mode 100644 index 3c41745dc..000000000 --- a/controllers/testdata/declarative/case_3/expected/not_created/configmap_artifact_script.yaml +++ /dev/null @@ -1,39 +0,0 @@ -apiVersion: v1 -data: - somekey: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp3.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: doesnotexist - namespace: default diff --git a/controllers/testdata/declarative/case_4/config.yaml b/controllers/testdata/declarative/case_4/config.yaml index 526190245..43221eb43 100644 --- a/controllers/testdata/declarative/case_4/config.yaml +++ b/controllers/testdata/declarative/case_4/config.yaml @@ -1,13 +1,17 @@ -# When a DSPA with a custom db/storage secret, and custom artifact script is deployed. Images: - ApiServer: this-apiserver-image-from-config-should-not-be-used:test4 - Artifact: this-artifact-manager-image-from-config-should-not-be-used:test4 - PersistentAgent: this-persistenceagent-image-from-config-should-not-be-used:test4 - ScheduledWorkflow: this-scheduledworkflow-image-from-config-should-not-be-used:test4 - Cache: this-ubi-minimal-image-from-config-should-not-be-used:test4 - MoveResultsImage: this-busybox-image-from-config-should-not-be-used:test4 - MariaDB: this-mariadb-image-from-config-should-not-be-used:test4 + ApiServer: api-server:test4 + PersistentAgent: persistenceagent:test4 + ScheduledWorkflow: scheduledworkflow:test4 + MlmdEnvoy: mlmdenvoy:test4 + MlmdGRPC: mlmdgrpc:test4 + ArgoExecImage: argoexec:test4 + ArgoWorkflowController: argowfcontroller:test4 + LauncherImage: launcherimage:test4 + DriverImage: driverimage:test4 OAuthProxy: oauth-proxy:test4 + MariaDB: mariadb:test4 + MlPipelineUI: frontend:test4 + Minio: minio:test4 DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_4/deploy/00_cr.yaml b/controllers/testdata/declarative/case_4/deploy/00_cr.yaml index a2d7e9f4c..bcdab9f3e 100644 --- a/controllers/testdata/declarative/case_4/deploy/00_cr.yaml +++ b/controllers/testdata/declarative/case_4/deploy/00_cr.yaml @@ -1,27 +1,19 @@ +# Test: +# image setting via DSPA +# disabling sample config apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 kind: DataSciencePipelinesApplication metadata: name: testdsp4 spec: + podToPodTLS: false apiServer: deploy: true image: this-apiserver-image-from-cr-should-be-used:test4 enableSamplePipeline: false - applyTektonCustomResource: true - archiveLogs: false - artifactImage: this-artifact-manager-image-from-cr-should-be-used:test4 - cacheImage: this-ubi-minimal-image-from-cr-should-be-used:test4 - moveResultsImage: this-busybox-image-from-cr-should-be-used:test4 argoLauncherImage: this-argolauncher-image-from-cr-should-be-used:test4 argoDriverImage: this-argodriver-image-from-cr-should-be-used:test4 - injectDefaultScript: true - stripEOF: true enableOauth: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 125 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true resources: requests: cpu: "1231m" @@ -51,6 +43,26 @@ spec: limits: cpu: "2526m" memory: "5Gi" + mlmd: + deploy: true + grpc: + image: this-grpc-image-from-cr-should-be-used:test4 + resources: + requests: + cpu: "1235m" + memory: "1Gi" + limits: + cpu: "2526m" + memory: "5Gi" + envoy: + image: this-envoy-image-from-cr-should-be-used:test4 + resources: + requests: + cpu: "1235m" + memory: "1Gi" + limits: + cpu: "2526m" + memory: "5Gi" mlpipelineUI: deploy: true image: this-frontend-image-from-cr-should-be-used:test4 diff --git a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml index e26220ec7..a8dea3141 100644 --- a/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/apiserver_deployment.yaml @@ -37,10 +37,6 @@ spec: value: "mariadb-testdsp4.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "125" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -75,6 +71,10 @@ spec: value: "this-argolauncher-image-from-cr-should-be-used:test4" - name: V2_DRIVER_IMAGE value: "this-argodriver-image-from-cr-should-be-used:test4" + - name: METADATA_GRPC_SERVICE_SERVICE_HOST + value: "ds-pipeline-metadata-grpc-testdsp4.default.svc.cluster.local" + - name: METADATA_GRPC_SERVICE_SERVICE_PORT + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp4.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC @@ -82,44 +82,26 @@ spec: - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "this-ubi-minimal-image-from-cr-should-be-used:test4" - - name: MOVERESULTS_IMAGE - value: "this-busybox-image-from-cr-should-be-used:test4" - - name: ARTIFACT_IMAGE - value: "this-artifact-manager-image-from-cr-should-be-used:test4" - - name: ARTIFACT_BUCKET - value: "mlpipeline" - - name: ARTIFACT_ENDPOINT - value: "http://minio-testdsp4.default.svc.cluster.local:9000" - - name: ARTIFACT_SCRIPT + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER + value: "testuser" + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "artifact_script" - name: "ds-pipeline-artifact-script-testdsp4" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp4" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "randomDBName" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "mariadb-testdsp4.default.svc.cluster.local" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "3306" image: this-apiserver-image-from-cr-should-be-used:test4 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server command: ['/bin/apiserver'] - volumeMounts: - - name: server-config - mountPath: /config/config.json - subPath: config.json args: - --config=/config - -logtostderr=true @@ -161,6 +143,10 @@ spec: limits: cpu: 2522m memory: 5Gi + volumeMounts: + - mountPath: /config/config.json + name: server-config + subPath: config.json - name: oauth-proxy args: - --https-address=:8443 diff --git a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index e0bddf319..000000000 --- a/controllers/testdata/declarative/case_4/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp4.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp4 - namespace: default - labels: - app: ds-pipeline-testdsp4 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_5/expected/created/metadata-envoy_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/mlmd_envoy_deployment.yaml similarity index 79% rename from controllers/testdata/declarative/case_5/expected/created/metadata-envoy_deployment.yaml rename to controllers/testdata/declarative/case_4/expected/created/mlmd_envoy_deployment.yaml index 04175020a..cdf98087c 100644 --- a/controllers/testdata/declarative/case_5/expected/created/metadata-envoy_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/mlmd_envoy_deployment.yaml @@ -1,30 +1,30 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-metadata-envoy-testdsp5 + name: ds-pipeline-metadata-envoy-testdsp4 namespace: default labels: - app: ds-pipeline-metadata-envoy-testdsp5 + app: ds-pipeline-metadata-envoy-testdsp4 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp4 spec: replicas: 1 selector: matchLabels: - app: ds-pipeline-metadata-envoy-testdsp5 + app: ds-pipeline-metadata-envoy-testdsp4 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp4 template: metadata: annotations: sidecar.istio.io/inject: "false" labels: - app: ds-pipeline-metadata-envoy-testdsp5 + app: ds-pipeline-metadata-envoy-testdsp4 component: data-science-pipelines - dspa: testdsp5 + dspa: testdsp4 spec: containers: - - image: metadata-envoy:test5 + - image: this-envoy-image-from-cr-should-be-used:test4 name: container command: ["/usr/local/bin/envoy"] args: [ @@ -51,12 +51,12 @@ spec: port: md-envoy timeoutSeconds: 2 resources: - limits: - cpu: 100m - memory: 256Mi requests: - cpu: 100m - memory: 256Mi + cpu: 1235m + memory: 1Gi + limits: + cpu: 2526m + memory: 5Gi volumeMounts: - mountPath: /etc/envoy.yaml name: envoy-config @@ -65,15 +65,15 @@ spec: args: - --https-address=:8443 - --provider=openshift - - --openshift-service-account=ds-pipeline-metadata-envoy-testdsp5 + - --openshift-service-account=ds-pipeline-metadata-envoy-testdsp4 - --upstream=http://localhost:9090 - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-metadata-envoy-testdsp5","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-metadata-envoy-testdsp5","verb":"get","resourceAPIGroup":"route.openshift.io"}' + - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-metadata-envoy-testdsp4","namespace":"default"}}' + - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-metadata-envoy-testdsp4","verb":"get","resourceAPIGroup":"route.openshift.io"}' - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test5 + image: oauth-proxy:test4 ports: - containerPort: 8443 name: oauth2-proxy @@ -108,16 +108,17 @@ spec: volumeMounts: - mountPath: /etc/tls/private name: proxy-tls + serviceAccountName: ds-pipeline-metadata-envoy-testdsp4 volumes: - name: envoy-config configMap: - name: ds-pipeline-metadata-envoy-config-testdsp5 + name: ds-pipeline-metadata-envoy-config-testdsp4 defaultMode: 420 - name: proxy-tls secret: - secretName: ds-pipelines-envoy-proxy-tls-testdsp5 + secretName: ds-pipelines-envoy-proxy-tls-testdsp4 defaultMode: 420 - name: proxy-tls-upstream configMap: - name: dsp-trusted-ca-testdsp5 + name: dsp-trusted-ca-testdsp4 defaultMode: 420 diff --git a/controllers/testdata/declarative/case_4/expected/created/mlmd_grpc_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/mlmd_grpc_deployment.yaml new file mode 100644 index 000000000..136aa5541 --- /dev/null +++ b/controllers/testdata/declarative/case_4/expected/created/mlmd_grpc_deployment.yaml @@ -0,0 +1,74 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: ds-pipeline-metadata-grpc-testdsp4 + namespace: default + labels: + app: ds-pipeline-metadata-grpc-testdsp4 + component: data-science-pipelines + dspa: testdsp4 +spec: + replicas: 1 + selector: + matchLabels: + app: ds-pipeline-metadata-grpc-testdsp4 + component: data-science-pipelines + dspa: testdsp4 + template: + metadata: + labels: + app: ds-pipeline-metadata-grpc-testdsp4 + component: data-science-pipelines + dspa: testdsp4 + spec: + containers: + - args: + - --grpc_port=8080 + - --mysql_config_database=$(MYSQL_DATABASE) + - --mysql_config_host=$(MYSQL_HOST) + - --mysql_config_port=$(MYSQL_PORT) + - --mysql_config_user=$(DBCONFIG_USER) + - --mysql_config_password=$(DBCONFIG_PASSWORD) + - --enable_database_upgrade=true + command: + - /bin/metadata_store_server + env: + - name: DBCONFIG_USER + value: "testuser" + - name: DBCONFIG_PASSWORD + valueFrom: + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp4" + - name: MYSQL_DATABASE + value: "randomDBName" + - name: MYSQL_HOST + value: "mariadb-testdsp4.default.svc.cluster.local" + - name: MYSQL_PORT + value: "3306" + image: this-grpc-image-from-cr-should-be-used:test4 + name: container + ports: + - containerPort: 8080 + name: grpc-api + protocol: TCP + livenessProbe: + initialDelaySeconds: 30 + periodSeconds: 5 + tcpSocket: + port: grpc-api + timeoutSeconds: 2 + readinessProbe: + initialDelaySeconds: 3 + periodSeconds: 5 + tcpSocket: + port: grpc-api + timeoutSeconds: 2 + resources: + requests: + cpu: 1235m + memory: 1Gi + limits: + cpu: 2526m + memory: 5Gi + serviceAccountName: ds-pipeline-metadata-grpc-testdsp4 diff --git a/controllers/testdata/declarative/case_4/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/persistence-agent_deployment.yaml index b25c44713..3818ab387 100644 --- a/controllers/testdata/declarative/case_4/expected/created/persistence-agent_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/persistence-agent_deployment.yaml @@ -35,7 +35,7 @@ spec: - name: KUBEFLOW_USERID_PREFIX value: "" - name: EXECUTIONTYPE - value: PipelineRun + value: Workflow image: this-persistenceagent-image-from-cr-should-be-used:test4 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-persistenceagent @@ -73,4 +73,17 @@ spec: limits: cpu: 2524m memory: 5Gi + volumeMounts: + - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token + name: persistenceagent-sa-token + subPath: ds-pipeline-persistenceagent-testdsp4-token serviceAccountName: ds-pipeline-persistenceagent-testdsp4 + volumes: + - name: persistenceagent-sa-token + projected: + defaultMode: 420 + sources: + - serviceAccountToken: + audience: pipelines.kubeflow.org + expirationSeconds: 3600 + path: ds-pipeline-persistenceagent-testdsp4-token diff --git a/controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml index c85f687e2..e36acc69b 100644 --- a/controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml +++ b/controllers/testdata/declarative/case_4/expected/created/scheduled-workflow_deployment.yaml @@ -28,8 +28,6 @@ spec: value: "default" - name: CRON_SCHEDULE_TIMEZONE value: "EST" - - name: EXECUTIONTYPE - value: PipelineRun image: this-scheduledworkflow-image-from-cr-should-be-used:test4 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-scheduledworkflow diff --git a/controllers/testdata/declarative/case_5/config.yaml b/controllers/testdata/declarative/case_5/config.yaml index b7b47ce06..5742fc308 100644 --- a/controllers/testdata/declarative/case_5/config.yaml +++ b/controllers/testdata/declarative/case_5/config.yaml @@ -1,18 +1,17 @@ -# When a minimal DSPA is deployed Images: ApiServer: api-server:test5 - Artifact: artifact-manager:test5 PersistentAgent: persistenceagent:test5 ScheduledWorkflow: scheduledworkflow:test5 - Cache: ubi-minimal:test5 - MoveResultsImage: busybox:test5 - MlPipelineUI: frontend:test5 + MlmdEnvoy: mlmdenvoy:test5 + MlmdGRPC: mlmdgrpc:test5 + ArgoExecImage: argoexec:test5 + ArgoWorkflowController: argowfcontroller:test5 + LauncherImage: launcherimage:test5 + DriverImage: driverimage:test5 + OAuthProxy: oauth-proxy:test5 MariaDB: mariadb:test5 + MlPipelineUI: frontend:test5 Minio: minio:test5 - OAuthProxy: oauth-proxy:test5 - MlmdEnvoy: metadata-envoy:changeme - MlmdGrpc: metadata-grpc:changeme - MlmdWriter: metadata-grpc:changeme DSPO: ApiServer: IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_8/deploy/00_configmap.yaml b/controllers/testdata/declarative/case_5/deploy/00_configmap.yaml similarity index 96% rename from controllers/testdata/declarative/case_8/deploy/00_configmap.yaml rename to controllers/testdata/declarative/case_5/deploy/00_configmap.yaml index 1dc6a89fc..5edbbb427 100644 --- a/controllers/testdata/declarative/case_8/deploy/00_configmap.yaml +++ b/controllers/testdata/declarative/case_5/deploy/00_configmap.yaml @@ -1,9 +1,9 @@ kind: ConfigMap apiVersion: v1 metadata: - name: testcabundleconfigmap8 + name: testcabundleconfigmap5 data: - testcabundleconfigmapkey8.crt: | + testcabundleconfigmapkey5.crt: | -----BEGIN CERTIFICATE----- MIIFLTCCAxWgAwIBAgIUIvY4jV0212P/ddjuCZhcUyJfoocwDQYJKoZIhvcNAQEL BQAwJjELMAkGA1UEBhMCWFgxFzAVBgNVBAMMDnJoLWRzcC1kZXZzLmlvMB4XDTI0 diff --git a/controllers/testdata/declarative/case_8/deploy/01_configmap.yaml b/controllers/testdata/declarative/case_5/deploy/01_configmap.yaml similarity index 100% rename from controllers/testdata/declarative/case_8/deploy/01_configmap.yaml rename to controllers/testdata/declarative/case_5/deploy/01_configmap.yaml diff --git a/controllers/testdata/declarative/case_8/deploy/02_cr.yaml b/controllers/testdata/declarative/case_5/deploy/02_cr.yaml similarity index 67% rename from controllers/testdata/declarative/case_8/deploy/02_cr.yaml rename to controllers/testdata/declarative/case_5/deploy/02_cr.yaml index b10aa4210..20b25de78 100644 --- a/controllers/testdata/declarative/case_8/deploy/02_cr.yaml +++ b/controllers/testdata/declarative/case_5/deploy/02_cr.yaml @@ -3,20 +3,22 @@ apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 kind: DataSciencePipelinesApplication metadata: - name: testdsp8 + name: testdsp5 spec: podToPodTLS: true dspVersion: v2 objectStorage: minio: - image: minio:test8 + image: minio:test5 database: mariaDB: deploy: true + mlmd: + deploy: true apiServer: deploy: true enableSamplePipeline: false - caBundleFileName: testcabundleconfigmapkey8.crt + caBundleFileName: testcabundleconfigmapkey5.crt cABundle: - configMapName: testcabundleconfigmap8 - configMapKey: testcabundleconfigmapkey8.crt + configMapName: testcabundleconfigmap5 + configMapKey: testcabundleconfigmapkey5.crt diff --git a/controllers/testdata/declarative/case_5/deploy/cr.yaml b/controllers/testdata/declarative/case_5/deploy/cr.yaml deleted file mode 100644 index 2aba42877..000000000 --- a/controllers/testdata/declarative/case_5/deploy/cr.yaml +++ /dev/null @@ -1,22 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: testdsp5 -spec: - apiServer: - argoLauncherImage: argolauncherimage:test5 - argoDriverImage: argodriverimage:test5 - objectStorage: - minio: - image: minio:test5 - mlpipelineUI: - image: frontend:test5 - mlmd: - deploy: true - envoy: - image: metadata-envoy:test5 - grpc: - image: metadata-grpc:test5 - port: "1337" - writer: - image: metadata-writer:test5 diff --git a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml index 2af6993ab..c0b04ebbe 100644 --- a/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/apiserver_deployment.yaml @@ -37,10 +37,14 @@ spec: value: "mariadb-testdsp5.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" + - name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_NAME + value: dsp-trusted-ca-testdsp5 + - name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_KEY + value: testcabundleconfigmapkey5.crt + - name: ARTIFACT_COPY_STEP_CABUNDLE_MOUNTPATH + value: /dsp-custom-certs + - name: SSL_CERT_DIR + value: "/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT @@ -72,50 +76,40 @@ spec: - name: MINIO_SERVICE_SERVICE_PORT value: "9000" - name: V2_LAUNCHER_IMAGE - value: "argolauncherimage:test5" + value: "launcherimage:test5" - name: V2_DRIVER_IMAGE - value: "argodriverimage:test5" + value: "driverimage:test5" - name: METADATA_GRPC_SERVICE_SERVICE_HOST value: "ds-pipeline-metadata-grpc-testdsp5.default.svc.cluster.local" - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "1337" + value: "8080" - name: ML_PIPELINE_SERVICE_HOST value: ds-pipeline-testdsp5.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC value: "8887" - name: SIGNED_URL_EXPIRY_TIME_SECONDS value: "60" + - name: ML_PIPELINE_TLS_ENABLED + value: "true" + - name: METADATA_TLS_ENABLED + value: "true" - name: EXECUTIONTYPE - value: PipelineRun - - name: CACHE_IMAGE - value: "ubi-minimal:test5" - - name: MOVERESULTS_IMAGE - value: "busybox:test5" - - name: ARTIFACT_IMAGE - value: "artifact-manager:test5" - - name: ARTIFACT_BUCKET + value: Workflow + - name: DB_DRIVER_NAME + value: mysql + - name: DBCONFIG_MYSQLCONFIG_USER value: "mlpipeline" - - name: ARTIFACT_ENDPOINT - value: "http://minio-testdsp5.default.svc.cluster.local:9000" - - name: ARTIFACT_SCRIPT + - name: DBCONFIG_MYSQLCONFIG_PASSWORD valueFrom: - configMapKeyRef: - key: "artifact_script" - name: "ds-pipeline-artifact-script-testdsp5" - - name: ARCHIVE_LOGS - value: "false" - - name: TRACK_ARTIFACTS - value: "true" - - name: STRIP_EOF - value: "true" - - name: PIPELINE_RUNTIME - value: "tekton" - - name: INJECT_DEFAULT_SCRIPT - value: "true" - - name: APPLY_TEKTON_CUSTOM_RESOURCE - value: "true" - - name: TERMINATE_STATUS - value: "Cancelled" + secretKeyRef: + key: "password" + name: "ds-pipeline-db-testdsp5" + - name: DBCONFIG_MYSQLCONFIG_DBNAME + value: "mlpipeline" + - name: DBCONFIG_MYSQLCONFIG_HOST + value: "mariadb-testdsp5.default.svc.cluster.local" + - name: DBCONFIG_MYSQLCONFIG_PORT + value: "3306" image: api-server:test5 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server @@ -123,6 +117,8 @@ spec: args: - --config=/config - -logtostderr=true + - --tlsCertPath=/etc/tls/private/tls.crt + - --tlsCertKeyPath=/etc/tls/private/tls.key ports: - containerPort: 8888 name: http @@ -131,29 +127,15 @@ spec: name: grpc protocol: TCP livenessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:8888/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 + httpGet: + path: /apis/v1beta1/healthz + port: http + scheme: HTTPS readinessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:8888/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 + httpGet: + path: /apis/v1beta1/healthz + port: http + scheme: HTTPS resources: requests: cpu: 250m @@ -165,12 +147,17 @@ spec: - name: server-config mountPath: /config/config.json subPath: config.json + - mountPath: /etc/tls/private + name: proxy-tls + - name: ca-bundle + mountPath: /dsp-custom-certs - name: oauth-proxy args: - --https-address=:8443 - --provider=openshift - --openshift-service-account=ds-pipeline-testdsp5 - - --upstream=http://localhost:8888 + - --upstream=https://ds-pipeline-testdsp5.default.svc.cluster.local:8888 + - --upstream-ca=/var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET @@ -217,8 +204,12 @@ spec: secret: secretName: ds-pipelines-proxy-tls-testdsp5 defaultMode: 420 - - configMap: - defaultMode: 420 + - name: server-config + configMap: name: ds-pipeline-server-config-testdsp5 - name: server-config + defaultMode: 420 + - name: ca-bundle + configMap: + name: dsp-trusted-ca-testdsp5 + defaultMode: 420 serviceAccountName: ds-pipeline-testdsp5 diff --git a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index 33aebad09..000000000 --- a/controllers/testdata/declarative/case_5/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp5.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp5 - namespace: default - labels: - app: ds-pipeline-testdsp5 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_8/expected/created/configmap_dspa_trusted_ca.yaml b/controllers/testdata/declarative/case_5/expected/created/configmap_dspa_trusted_ca.yaml similarity index 98% rename from controllers/testdata/declarative/case_8/expected/created/configmap_dspa_trusted_ca.yaml rename to controllers/testdata/declarative/case_5/expected/created/configmap_dspa_trusted_ca.yaml index 567b05bfa..134b47fcf 100644 --- a/controllers/testdata/declarative/case_8/expected/created/configmap_dspa_trusted_ca.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/configmap_dspa_trusted_ca.yaml @@ -1,9 +1,9 @@ kind: ConfigMap apiVersion: v1 metadata: - name: dsp-trusted-ca-testdsp8 + name: dsp-trusted-ca-testdsp5 data: - testcabundleconfigmapkey8.crt: |- + testcabundleconfigmapkey5.crt: |- -----BEGIN CERTIFICATE----- MIIFLTCCAxWgAwIBAgIUIvY4jV0212P/ddjuCZhcUyJfoocwDQYJKoZIhvcNAQEL BQAwJjELMAkGA1UEBhMCWFgxFzAVBgNVBAMMDnJoLWRzcC1kZXZzLmlvMB4XDTI0 diff --git a/controllers/testdata/declarative/case_5/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/mariadb_deployment.yaml index 95276f7f3..06777e778 100644 --- a/controllers/testdata/declarative/case_5/expected/created/mariadb_deployment.yaml +++ b/controllers/testdata/declarative/case_5/expected/created/mariadb_deployment.yaml @@ -73,7 +73,25 @@ spec: volumeMounts: - name: mariadb-persistent-storage mountPath: /var/lib/mysql + - name: mariadb-tls + mountPath: /.mariadb/certs + - name: mariadb-tls-config + mountPath: /etc/my.cnf.d/mariadb-tls-config.cnf + subPath: mariadb-tls-config.cnf volumes: - name: mariadb-persistent-storage persistentVolumeClaim: claimName: mariadb-testdsp5 + - name: mariadb-tls + secret: + secretName: ds-pipelines-mariadb-tls-testdsp5 + items: + - key: tls.crt + path: tls.crt + - key: tls.key + path: tls.key + defaultMode: 420 + - name: mariadb-tls-config + configMap: + name: ds-pipelines-mariadb-tls-config-testdsp5 + defaultMode: 420 diff --git a/controllers/testdata/declarative/case_5/expected/created/metadata-writer_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/metadata-writer_deployment.yaml deleted file mode 100644 index 908cf42cb..000000000 --- a/controllers/testdata/declarative/case_5/expected/created/metadata-writer_deployment.yaml +++ /dev/null @@ -1,63 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-metadata-writer-testdsp5 - namespace: default - labels: - app: ds-pipeline-metadata-writer-testdsp5 - component: data-science-pipelines - dspa: testdsp5 -spec: - replicas: 1 - selector: - matchLabels: - app: ds-pipeline-metadata-writer-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - template: - metadata: - labels: - app: ds-pipeline-metadata-writer-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - spec: - containers: - - env: - - name: NAMESPACE_TO_WATCH - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.namespace - - name: PIPELINE_RUNTIME - value: tekton - - name: ARCHIVE_LOGS - value: "false" - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-testdsp5" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "1337" - image: metadata-writer:test5 - name: main - livenessProbe: - exec: - command: - - pidof - - python3 - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - pidof - - python3 - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi diff --git a/controllers/testdata/declarative/case_5/expected/created/mlpipelines-ui_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/mlpipelines-ui_deployment.yaml deleted file mode 100644 index a549d1d5b..000000000 --- a/controllers/testdata/declarative/case_5/expected/created/mlpipelines-ui_deployment.yaml +++ /dev/null @@ -1,171 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-ui-testdsp5 - namespace: default - labels: - app: ds-pipeline-ui-testdsp5 - component: data-science-pipelines - dspa: testdsp5 -spec: - selector: - matchLabels: - app: ds-pipeline-ui-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-ui-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - spec: - containers: - - env: - - name: VIEWER_TENSORBOARD_POD_TEMPLATE_SPEC_PATH - value: /etc/config/viewer-pod-template.json - - name: MINIO_NAMESPACE - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.namespace - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp5" - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp5" - - name: ALLOW_CUSTOM_VISUALIZATIONS - value: "true" - - name: ARGO_ARCHIVE_LOGS - value: "true" - - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp5.default.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT - value: '8888' - - name: METADATA_ENVOY_SERVICE_SERVICE_HOST - value: ds-pipeline-md-testdsp5 - - name: METADATA_ENVOY_SERVICE_SERVICE_PORT - value: "9090" - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp5" - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp5" - - name: AWS_REGION - value: "minio" - - name: AWS_S3_ENDPOINT - value: "minio-testdsp5.default.svc.cluster.local" - - name: AWS_SSL - value: "false" - - name: DISABLE_GKE_METADATA - value: 'true' - image: frontend:test5 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - livenessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:3000/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - name: ds-pipeline-ui - ports: - - containerPort: 3000 - protocol: TCP - readinessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:3000/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/config - name: config-volume - readOnly: true - - name: oauth-proxy - args: - - --https-address=:8443 - - --provider=openshift - - --openshift-service-account=ds-pipeline-ui-testdsp5 - - --upstream=http://localhost:3000 - - --tls-cert=/etc/tls/private/tls.crt - - --tls-key=/etc/tls/private/tls.key - - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-ui-testdsp5","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-ui-testdsp5","verb":"get","resourceAPIGroup":"route.openshift.io"}' - - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test5 - ports: - - containerPort: 8443 - name: https - protocol: TCP - livenessProbe: - httpGet: - path: /oauth/healthz - port: 8443 - scheme: HTTPS - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /oauth/healthz - port: 8443 - scheme: HTTPS - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/tls/private - name: proxy-tls - serviceAccountName: ds-pipeline-ui-testdsp5 - volumes: - - configMap: - name: ds-pipeline-ui-configmap-testdsp5 - defaultMode: 420 - name: config-volume - - name: proxy-tls - secret: - secretName: ds-pipelines-ui-proxy-tls-testdsp5 - defaultMode: 420 diff --git a/controllers/testdata/declarative/case_5/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/persistence-agent_deployment.yaml deleted file mode 100644 index faad5b733..000000000 --- a/controllers/testdata/declarative/case_5/expected/created/persistence-agent_deployment.yaml +++ /dev/null @@ -1,76 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-persistenceagent-testdsp5 - namespace: default - labels: - app: ds-pipeline-persistenceagent-testdsp5 - component: data-science-pipelines - dspa: testdsp5 -spec: - selector: - matchLabels: - app: ds-pipeline-persistenceagent-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-persistenceagent-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - spec: - containers: - - env: - - name: NAMESPACE - value: "default" - - name: TTL_SECONDS_AFTER_WORKFLOW_FINISH - value: "86400" - - name: NUM_WORKERS - value: "2" - - name: KUBEFLOW_USERID_HEADER - value: kubeflow-userid - - name: KUBEFLOW_USERID_PREFIX - value: "" - - name: EXECUTIONTYPE - value: PipelineRun - image: persistenceagent:test5 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-persistenceagent - command: - - persistence_agent - - "--logtostderr=true" - - "--ttlSecondsAfterWorkflowFinish=86400" - - "--numWorker=2" - - "--mlPipelineAPIServerName=ds-pipeline-testdsp5" - - "--namespace=default" - - "--mlPipelineServiceHttpPort=8888" - - "--mlPipelineServiceGRPCPort=8887" - livenessProbe: - exec: - command: - - test - - -x - - persistence_agent - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - test - - -x - - persistence_agent - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 120m - memory: 500Mi - limits: - cpu: 250m - memory: 1Gi - serviceAccountName: ds-pipeline-persistenceagent-testdsp5 diff --git a/controllers/testdata/declarative/case_5/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_5/expected/created/scheduled-workflow_deployment.yaml deleted file mode 100644 index f315e6ccc..000000000 --- a/controllers/testdata/declarative/case_5/expected/created/scheduled-workflow_deployment.yaml +++ /dev/null @@ -1,65 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-scheduledworkflow-testdsp5 - namespace: default - labels: - app: ds-pipeline-scheduledworkflow-testdsp5 - component: data-science-pipelines - dspa: testdsp5 -spec: - selector: - matchLabels: - app: ds-pipeline-scheduledworkflow-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-scheduledworkflow-testdsp5 - component: data-science-pipelines - dspa: testdsp5 - spec: - containers: - - env: - - name: NAMESPACE - value: "default" - - name: CRON_SCHEDULE_TIMEZONE - value: "UTC" - - name: EXECUTIONTYPE - value: PipelineRun - image: scheduledworkflow:test5 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-scheduledworkflow - command: - - controller - - "--logtostderr=true" - - "--namespace=testdsp5" - livenessProbe: - exec: - command: - - test - - -x - - controller - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - test - - -x - - controller - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 120m - memory: 100Mi - limits: - cpu: 250m - memory: 250Mi - serviceAccountName: ds-pipeline-scheduledworkflow-testdsp5 diff --git a/controllers/testdata/declarative/case_6/config.yaml b/controllers/testdata/declarative/case_6/config.yaml new file mode 100644 index 000000000..78c935163 --- /dev/null +++ b/controllers/testdata/declarative/case_6/config.yaml @@ -0,0 +1,17 @@ +Images: + ApiServer: api-server:test6 + PersistentAgent: persistenceagent:test6 + ScheduledWorkflow: scheduledworkflow:test6 + MlmdEnvoy: mlmdenvoy:test6 + MlmdGRPC: mlmdgrpc:test6 + ArgoExecImage: argoexec:test6 + ArgoWorkflowController: argowfcontroller:test6 + LauncherImage: launcherimage:test6 + DriverImage: driverimage:test6 + OAuthProxy: oauth-proxy:test6 + MariaDB: mariadb:test6 + MlPipelineUI: frontend:test6 + Minio: minio:test6 +DSPO: + ApiServer: + IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_9/deploy/00_cr.yaml b/controllers/testdata/declarative/case_6/deploy/00_cr.yaml similarity index 82% rename from controllers/testdata/declarative/case_9/deploy/00_cr.yaml rename to controllers/testdata/declarative/case_6/deploy/00_cr.yaml index 612fb3dc4..6c3beb38c 100644 --- a/controllers/testdata/declarative/case_9/deploy/00_cr.yaml +++ b/controllers/testdata/declarative/case_6/deploy/00_cr.yaml @@ -3,19 +3,20 @@ apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 kind: DataSciencePipelinesApplication metadata: - name: testdsp9 + name: testdsp6 spec: - dspVersion: v2 podToPodTLS: false objectStorage: minio: - image: minio:test9 + image: minio:test6 database: mariaDB: deploy: true mlpipelineUI: deploy: true - image: frontend:test9 + image: frontend:test6 + mlmd: + deploy: true apiServer: deploy: true enableOauth: true diff --git a/controllers/testdata/declarative/case_9/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml similarity index 79% rename from controllers/testdata/declarative/case_9/expected/created/apiserver_deployment.yaml rename to controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml index 86a7c7302..e72cdfe17 100644 --- a/controllers/testdata/declarative/case_9/expected/created/apiserver_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/apiserver_deployment.yaml @@ -1,24 +1,24 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-testdsp9 + name: ds-pipeline-testdsp6 namespace: default labels: - app: ds-pipeline-testdsp9 + app: ds-pipeline-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: selector: matchLabels: - app: ds-pipeline-testdsp9 + app: ds-pipeline-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 template: metadata: labels: - app: ds-pipeline-testdsp9 + app: ds-pipeline-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: containers: - env: @@ -30,57 +30,53 @@ spec: valueFrom: secretKeyRef: key: "password" - name: "ds-pipeline-db-testdsp9" + name: "ds-pipeline-db-testdsp6" - name: DBCONFIG_DBNAME value: "mlpipeline" - name: DBCONFIG_HOST - value: "mariadb-testdsp9.default.svc.cluster.local" + value: "mariadb-testdsp6.default.svc.cluster.local" - name: DBCONFIG_PORT value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST value: "ds-pipeline-visualizationserver" - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT value: "8888" - name: OBJECTSTORECONFIG_CREDENTIALSSECRET - value: "ds-pipeline-s3-testdsp9" + value: "ds-pipeline-s3-testdsp6" - name: OBJECTSTORECONFIG_CREDENTIALSACCESSKEYKEY value: "accesskey" - name: OBJECTSTORECONFIG_CREDENTIALSSECRETKEYKEY value: "secretkey" - name: DEFAULTPIPELINERUNNERSERVICEACCOUNT - value: "pipeline-runner-testdsp9" + value: "pipeline-runner-testdsp6" - name: OBJECTSTORECONFIG_BUCKETNAME value: "mlpipeline" - name: OBJECTSTORECONFIG_ACCESSKEY valueFrom: secretKeyRef: key: "accesskey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: OBJECTSTORECONFIG_SECRETACCESSKEY valueFrom: secretKeyRef: key: "secretkey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: OBJECTSTORECONFIG_SECURE value: "false" - name: MINIO_SERVICE_SERVICE_HOST - value: "minio-testdsp9.default.svc.cluster.local" + value: "minio-testdsp6.default.svc.cluster.local" - name: MINIO_SERVICE_SERVICE_PORT value: "9000" - name: V2_LAUNCHER_IMAGE - value: "argolauncherimage:test9" + value: "launcherimage:test6" - name: V2_DRIVER_IMAGE - value: "argodriverimage:test9" + value: "driverimage:test6" - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-testdsp9.default.svc.cluster.local" + value: "ds-pipeline-metadata-grpc-testdsp6.default.svc.cluster.local" - name: METADATA_GRPC_SERVICE_SERVICE_PORT value: "8080" - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp9.default.svc.cluster.local + value: ds-pipeline-testdsp6.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT_GRPC value: "8887" - name: SIGNED_URL_EXPIRY_TIME_SECONDS @@ -95,14 +91,14 @@ spec: valueFrom: secretKeyRef: key: "password" - name: "ds-pipeline-db-testdsp9" + name: "ds-pipeline-db-testdsp6" - name: DBCONFIG_MYSQLCONFIG_DBNAME value: "mlpipeline" - name: DBCONFIG_MYSQLCONFIG_HOST - value: "mariadb-testdsp9.default.svc.cluster.local" + value: "mariadb-testdsp6.default.svc.cluster.local" - name: DBCONFIG_MYSQLCONFIG_PORT value: "3306" - image: api-server:test9 + image: api-server:test6 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-api-server command: ['/bin/apiserver'] @@ -141,15 +137,15 @@ spec: args: - --https-address=:8443 - --provider=openshift - - --openshift-service-account=ds-pipeline-testdsp9 + - --openshift-service-account=ds-pipeline-testdsp6 - --upstream=http://localhost:8888 - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-testdsp9","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-testdsp9","verb":"get","resourceAPIGroup":"route.openshift.io"}' + - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-testdsp6","namespace":"default"}}' + - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-testdsp6","verb":"get","resourceAPIGroup":"route.openshift.io"}' - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test9 + image: oauth-proxy:test6 ports: - containerPort: 8443 name: oauth @@ -187,10 +183,10 @@ spec: volumes: - name: proxy-tls secret: - secretName: ds-pipelines-proxy-tls-testdsp9 + secretName: ds-pipelines-proxy-tls-testdsp6 defaultMode: 420 - name: server-config configMap: - name: ds-pipeline-server-config-testdsp9 + name: ds-pipeline-server-config-testdsp6 defaultMode: 420 - serviceAccountName: ds-pipeline-testdsp9 + serviceAccountName: ds-pipeline-testdsp6 diff --git a/controllers/testdata/declarative/case_9/expected/created/mlpipelines-ui_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/mlpipelines-ui_deployment.yaml similarity index 84% rename from controllers/testdata/declarative/case_9/expected/created/mlpipelines-ui_deployment.yaml rename to controllers/testdata/declarative/case_6/expected/created/mlpipelines-ui_deployment.yaml index d8bf9c83c..cd41b1b67 100644 --- a/controllers/testdata/declarative/case_9/expected/created/mlpipelines-ui_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/mlpipelines-ui_deployment.yaml @@ -1,26 +1,26 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-ui-testdsp9 + name: ds-pipeline-ui-testdsp6 namespace: default labels: - app: ds-pipeline-ui-testdsp9 + app: ds-pipeline-ui-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: selector: matchLabels: - app: ds-pipeline-ui-testdsp9 + app: ds-pipeline-ui-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 template: metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: - app: ds-pipeline-ui-testdsp9 + app: ds-pipeline-ui-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: containers: - env: @@ -35,43 +35,43 @@ spec: valueFrom: secretKeyRef: key: "accesskey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: MINIO_SECRET_KEY valueFrom: secretKeyRef: key: "secretkey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: ALLOW_CUSTOM_VISUALIZATIONS value: "true" - name: ARGO_ARCHIVE_LOGS value: "true" - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp9.default.svc.cluster.local + value: ds-pipeline-testdsp6.default.svc.cluster.local - name: ML_PIPELINE_SERVICE_PORT value: '8888' - name: METADATA_ENVOY_SERVICE_SERVICE_HOST - value: ds-pipeline-md-testdsp9 + value: ds-pipeline-md-testdsp6 - name: METADATA_ENVOY_SERVICE_SERVICE_PORT value: "9090" - name: AWS_ACCESS_KEY_ID valueFrom: secretKeyRef: key: "accesskey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: AWS_SECRET_ACCESS_KEY valueFrom: secretKeyRef: key: "secretkey" - name: "ds-pipeline-s3-testdsp9" + name: "ds-pipeline-s3-testdsp6" - name: AWS_REGION value: "minio" - name: AWS_S3_ENDPOINT - value: "minio-testdsp9.default.svc.cluster.local" + value: "minio-testdsp6.default.svc.cluster.local" - name: AWS_SSL value: "false" - name: DISABLE_GKE_METADATA value: 'true' - image: frontend:test9 + image: frontend:test6 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting livenessProbe: exec: @@ -116,15 +116,15 @@ spec: args: - --https-address=:8443 - --provider=openshift - - --openshift-service-account=ds-pipeline-ui-testdsp9 + - --openshift-service-account=ds-pipeline-ui-testdsp6 - --upstream=http://localhost:3000 - --tls-cert=/etc/tls/private/tls.crt - --tls-key=/etc/tls/private/tls.key - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-ui-testdsp9","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-ui-testdsp9","verb":"get","resourceAPIGroup":"route.openshift.io"}' + - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-ui-testdsp6","namespace":"default"}}' + - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-ui-testdsp6","verb":"get","resourceAPIGroup":"route.openshift.io"}' - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test9 + image: oauth-proxy:test6 ports: - containerPort: 8443 name: https @@ -159,13 +159,13 @@ spec: volumeMounts: - mountPath: /etc/tls/private name: proxy-tls - serviceAccountName: ds-pipeline-ui-testdsp9 + serviceAccountName: ds-pipeline-ui-testdsp6 volumes: - configMap: - name: ds-pipeline-ui-configmap-testdsp9 + name: ds-pipeline-ui-configmap-testdsp6 defaultMode: 420 name: config-volume - name: proxy-tls secret: - secretName: ds-pipelines-ui-proxy-tls-testdsp9 + secretName: ds-pipelines-ui-proxy-tls-testdsp6 defaultMode: 420 diff --git a/controllers/testdata/declarative/case_9/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_6/expected/created/persistence-agent_deployment.yaml similarity index 79% rename from controllers/testdata/declarative/case_9/expected/created/persistence-agent_deployment.yaml rename to controllers/testdata/declarative/case_6/expected/created/persistence-agent_deployment.yaml index 27fcf1353..f0d592c80 100644 --- a/controllers/testdata/declarative/case_9/expected/created/persistence-agent_deployment.yaml +++ b/controllers/testdata/declarative/case_6/expected/created/persistence-agent_deployment.yaml @@ -1,26 +1,26 @@ apiVersion: apps/v1 kind: Deployment metadata: - name: ds-pipeline-persistenceagent-testdsp9 + name: ds-pipeline-persistenceagent-testdsp6 namespace: default labels: - app: ds-pipeline-persistenceagent-testdsp9 + app: ds-pipeline-persistenceagent-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: selector: matchLabels: - app: ds-pipeline-persistenceagent-testdsp9 + app: ds-pipeline-persistenceagent-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 template: metadata: annotations: cluster-autoscaler.kubernetes.io/safe-to-evict: "true" labels: - app: ds-pipeline-persistenceagent-testdsp9 + app: ds-pipeline-persistenceagent-testdsp6 component: data-science-pipelines - dspa: testdsp9 + dspa: testdsp6 spec: containers: - env: @@ -36,7 +36,7 @@ spec: value: "" - name: EXECUTIONTYPE value: Workflow - image: persistenceagent:test9 + image: persistenceagent:test6 # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting name: ds-pipeline-persistenceagent command: @@ -44,8 +44,8 @@ spec: - "--logtostderr=true" - "--ttlSecondsAfterWorkflowFinish=86400" - "--numWorker=5" - - "--mlPipelineAPIServerName=ds-pipeline-testdsp9" - - "--namespace=testdsp9" + - "--mlPipelineAPIServerName=ds-pipeline-testdsp6" + - "--namespace=testdsp6" - "--mlPipelineServiceHttpPort=8888" - "--mlPipelineServiceGRPCPort=8887" livenessProbe: @@ -76,8 +76,8 @@ spec: volumeMounts: - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token name: persistenceagent-sa-token - subPath: ds-pipeline-persistenceagent-testdsp9-token - serviceAccountName: ds-pipeline-persistenceagent-testdsp9 + subPath: ds-pipeline-persistenceagent-testdsp6-token + serviceAccountName: ds-pipeline-persistenceagent-testdsp6 volumes: - name: persistenceagent-sa-token projected: @@ -85,5 +85,5 @@ spec: - serviceAccountToken: audience: pipelines.kubeflow.org expirationSeconds: 3600 - path: ds-pipeline-persistenceagent-testdsp9-token + path: ds-pipeline-persistenceagent-testdsp6-token defaultMode: 420 diff --git a/controllers/testdata/declarative/case_7/config.yaml b/controllers/testdata/declarative/case_7/config.yaml deleted file mode 100644 index 2a5f895cc..000000000 --- a/controllers/testdata/declarative/case_7/config.yaml +++ /dev/null @@ -1,15 +0,0 @@ -# When a minimal DSPA is deployed -Images: - ApiServer: api-server:test7 - Artifact: artifact-manager:test7 - PersistentAgent: persistenceagent:test7 - ScheduledWorkflow: scheduledworkflow:test7 - Cache: ubi-minimal:test7 - MoveResultsImage: busybox:test7 - MlPipelineUI: frontend:test7 - MariaDB: mariadb:test7 - Minio: minio:test7 - OAuthProxy: oauth-proxy:test7 -DSPO: - ApiServer: - IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_7/deploy/cr.yaml b/controllers/testdata/declarative/case_7/deploy/cr.yaml deleted file mode 100644 index ac4aa3279..000000000 --- a/controllers/testdata/declarative/case_7/deploy/cr.yaml +++ /dev/null @@ -1,93 +0,0 @@ -apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 -kind: DataSciencePipelinesApplication -metadata: - name: testdsp7 -spec: - dspVersion: v2 - podToPodTLS: false - apiServer: - deploy: true - image: api-server:test7 - applyTektonCustomResource: true - archiveLogs: false - artifactImage: artifact-manager:test7 - cacheImage: ubi-minimal:test7 - moveResultsImage: busybox:test7 - argoLauncherImage: argolauncherimage:test7 - argoDriverImage: argodriverimage:test7 - injectDefaultScript: true - stripEOF: true - enableOauth: true - enableSamplePipeline: true - terminateStatus: Cancelled - trackArtifacts: true - dbConfigConMaxLifetimeSec: 125 - collectMetrics: true - autoUpdatePipelineDefaultVersion: true - resources: - requests: - cpu: "1231m" - memory: "1Gi" - limits: - cpu: "2522m" - memory: "5Gi" - persistenceAgent: - deploy: true - image: persistenceagent:test7 - numWorkers: 5 - resources: - requests: - cpu: "1233m" - memory: "1Gi" - limits: - cpu: "2524m" - memory: "5Gi" - scheduledWorkflow: - deploy: true - image: scheduledworkflow:test7 - cronScheduleTimezone: EST - resources: - requests: - cpu: "1235m" - memory: "1Gi" - limits: - cpu: "2526m" - memory: "5Gi" - mlpipelineUI: - deploy: true - image: frontend:test7 - configMap: some-test-configmap - resources: - requests: - cpu: "1239m" - memory: "1Gi" - limits: - cpu: "2530m" - memory: "5Gi" - database: - mariaDB: - deploy: true - image: mariadb:test7 - username: testuser - pipelineDBName: randomDBName - pvcSize: 32Gi - resources: - requests: - cpu: "1212m" - memory: "1Gi" - limits: - cpu: "2554m" - memory: "5Gi" - objectStorage: - minio: - deploy: true - image: minio:test7 - bucket: mlpipeline - pvcSize: 40Gi - resources: - requests: - cpu: "1334m" - memory: "1Gi" - limits: - cpu: "2535m" - memory: "5Gi" diff --git a/controllers/testdata/declarative/case_7/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/apiserver_deployment.yaml deleted file mode 100644 index 2458af03d..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/apiserver_deployment.yaml +++ /dev/null @@ -1,210 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-testdsp7 - namespace: default - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - env: - - name: POD_NAMESPACE - value: "default" - - name: DBCONFIG_USER - value: "testuser" - - name: DBCONFIG_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp7" - - name: DBCONFIG_DBNAME - value: "randomDBName" - - name: DBCONFIG_HOST - value: "mariadb-testdsp7.default.svc.cluster.local" - - name: DBCONFIG_PORT - value: "3306" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "125" - - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST - value: "ds-pipeline-visualizationserver" - - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT - value: "8888" - - name: OBJECTSTORECONFIG_CREDENTIALSSECRET - value: "ds-pipeline-s3-testdsp7" - - name: OBJECTSTORECONFIG_CREDENTIALSACCESSKEYKEY - value: "accesskey" - - name: OBJECTSTORECONFIG_CREDENTIALSSECRETKEYKEY - value: "secretkey" - - name: DEFAULTPIPELINERUNNERSERVICEACCOUNT - value: "pipeline-runner-testdsp7" - - name: OBJECTSTORECONFIG_BUCKETNAME - value: "mlpipeline" - - name: OBJECTSTORECONFIG_ACCESSKEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp7" - - name: OBJECTSTORECONFIG_SECRETACCESSKEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp7" - - name: OBJECTSTORECONFIG_SECURE - value: "false" - - name: MINIO_SERVICE_SERVICE_HOST - value: "minio-testdsp7.default.svc.cluster.local" - - name: MINIO_SERVICE_SERVICE_PORT - value: "9000" - - name: V2_LAUNCHER_IMAGE - value: "argolauncherimage:test7" - - name: V2_DRIVER_IMAGE - value: "argodriverimage:test7" - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-testdsp7.default.svc.cluster.local" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "8080" - - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp7.default.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - - name: SIGNED_URL_EXPIRY_TIME_SECONDS - value: "60" - - name: EXECUTIONTYPE - value: Workflow - - name: DB_DRIVER_NAME - value: mysql - - name: DBCONFIG_MYSQLCONFIG_USER - value: testuser - - name: DBCONFIG_MYSQLCONFIG_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp7" - - name: DBCONFIG_MYSQLCONFIG_DBNAME - value: "randomDBName" - - name: DBCONFIG_MYSQLCONFIG_HOST - value: "mariadb-testdsp7.default.svc.cluster.local" - - name: DBCONFIG_MYSQLCONFIG_PORT - value: "3306" - image: api-server:test7 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-api-server - command: ['/bin/apiserver'] - args: - - --config=/config - - -logtostderr=true - - --sampleconfig=/config/sample_config.json - ports: - - containerPort: 8888 - name: http - protocol: TCP - - containerPort: 8887 - name: grpc - protocol: TCP - livenessProbe: - httpGet: - path: /apis/v1beta1/healthz - port: http - scheme: HTTPS - readinessProbe: - httpGet: - path: /apis/v1beta1/healthz - port: http - scheme: HTTPS - resources: - requests: - cpu: 1231m - memory: 1Gi - limits: - cpu: 2522m - memory: 5Gi - volumeMounts: - - name: server-config - mountPath: /config/config.json - subPath: config.json - - mountPath: /config/sample_config.json - name: sample-config - subPath: sample_config.json - - mountPath: /samples/ - name: sample-pipeline - - name: oauth-proxy - args: - - --https-address=:8443 - - --provider=openshift - - --openshift-service-account=ds-pipeline-testdsp7 - - --upstream=http://localhost:8888 - - --tls-cert=/etc/tls/private/tls.crt - - --tls-key=/etc/tls/private/tls.key - - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-testdsp7","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-testdsp7","verb":"get","resourceAPIGroup":"route.openshift.io"}' - - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test7 - ports: - - containerPort: 8443 - name: oauth - protocol: TCP - livenessProbe: - httpGet: - path: /oauth/healthz - port: oauth - scheme: HTTPS - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /oauth/healthz - port: oauth - scheme: HTTPS - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/tls/private - name: proxy-tls - volumes: - - name: proxy-tls - secret: - secretName: ds-pipelines-proxy-tls-testdsp7 - defaultMode: 420 - - name: server-config - configMap: - defaultMode: 420 - name: ds-pipeline-server-config-testdsp7 - - configMap: - defaultMode: 420 - name: sample-config-testdsp7 - name: sample-config - - configMap: - defaultMode: 420 - name: sample-pipeline-testdsp7 - name: sample-pipeline - serviceAccountName: ds-pipeline-testdsp7 diff --git a/controllers/testdata/declarative/case_7/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_7/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index a1550c013..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp7.default.svc.cluster.local:9000 cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp7 - namespace: default - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_7/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/mariadb_deployment.yaml deleted file mode 100644 index e982d3b31..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/mariadb_deployment.yaml +++ /dev/null @@ -1,79 +0,0 @@ ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: mariadb-testdsp7 - namespace: default - labels: - app: mariadb-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - strategy: - type: Recreate # Need this since backing PVC is ReadWriteOnce, which creates resource lock condition in default Rolling strategy - selector: - matchLabels: - app: mariadb-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - labels: - app: mariadb-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - name: mariadb - image: mariadb:test7 - ports: - - containerPort: 3306 - protocol: TCP - readinessProbe: - exec: - command: - - /bin/sh - - "-i" - - "-c" - - >- - MYSQL_PWD=$MYSQL_PASSWORD mysql -h 127.0.0.1 -u $MYSQL_USER -D - $MYSQL_DATABASE -e 'SELECT 1' - failureThreshold: 3 - initialDelaySeconds: 5 - periodSeconds: 10 - successThreshold: 1 - timeoutSeconds: 1 - livenessProbe: - failureThreshold: 3 - initialDelaySeconds: 30 - periodSeconds: 10 - successThreshold: 1 - tcpSocket: - port: 3306 - timeoutSeconds: 1 - env: - - name: MYSQL_USER - value: "testuser" - - name: MYSQL_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp7" - - name: MYSQL_DATABASE - value: "randomDBName" - - name: MYSQL_ALLOW_EMPTY_PASSWORD - value: "true" - resources: - requests: - cpu: 1212m - memory: 1Gi - limits: - cpu: 2554m - memory: 5Gi - volumeMounts: - - name: mariadb-persistent-storage - mountPath: /var/lib/mysql - volumes: - - name: mariadb-persistent-storage - persistentVolumeClaim: - claimName: mariadb-testdsp7 diff --git a/controllers/testdata/declarative/case_7/expected/created/minio_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/minio_deployment.yaml deleted file mode 100644 index da4a1627b..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/minio_deployment.yaml +++ /dev/null @@ -1,75 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: minio-testdsp7 - namespace: default - labels: - app: minio-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: minio-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - strategy: - type: Recreate - template: - metadata: - labels: - app: minio-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - args: - - server - - /data - env: - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp7" - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp7" - image: minio:test7 - name: minio - ports: - - containerPort: 9000 - protocol: TCP - livenessProbe: - tcpSocket: - port: 9000 - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - tcpSocket: - port: 9000 - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - requests: - cpu: 1334m - memory: 1Gi - limits: - cpu: 2535m - memory: 5Gi - volumeMounts: - - mountPath: /data - name: data - subPath: minio - volumes: - - name: data - persistentVolumeClaim: - claimName: minio-testdsp7 diff --git a/controllers/testdata/declarative/case_7/expected/created/mlpipelines-ui_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/mlpipelines-ui_deployment.yaml deleted file mode 100644 index abf2d040d..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/mlpipelines-ui_deployment.yaml +++ /dev/null @@ -1,171 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-ui-testdsp7 - namespace: default - labels: - app: ds-pipeline-ui-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: ds-pipeline-ui-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-ui-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - env: - - name: VIEWER_TENSORBOARD_POD_TEMPLATE_SPEC_PATH - value: /etc/config/viewer-pod-template.json - - name: MINIO_NAMESPACE - valueFrom: - fieldRef: - apiVersion: v1 - fieldPath: metadata.namespace - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp7" - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp7" - - name: ALLOW_CUSTOM_VISUALIZATIONS - value: "true" - - name: ARGO_ARCHIVE_LOGS - value: "true" - - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp7.default.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT - value: '8888' - - name: METADATA_ENVOY_SERVICE_SERVICE_HOST - value: ds-pipeline-md-testdsp7 - - name: METADATA_ENVOY_SERVICE_SERVICE_PORT - value: "9090" - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp7" - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp7" - - name: AWS_REGION - value: "minio" - - name: AWS_S3_ENDPOINT - value: "minio-testdsp7.default.svc.cluster.local" - - name: AWS_SSL - value: "false" - - name: DISABLE_GKE_METADATA - value: 'true' - image: frontend:test7 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - livenessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:3000/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - name: ds-pipeline-ui - ports: - - containerPort: 3000 - protocol: TCP - readinessProbe: - exec: - command: - - wget - - -q - - -S - - -O - - '-' - - http://localhost:3000/apis/v1beta1/healthz - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 1239m - memory: 1Gi - limits: - cpu: 2530m - memory: 5Gi - volumeMounts: - - mountPath: /etc/config - name: config-volume - readOnly: true - - name: oauth-proxy - args: - - --https-address=:8443 - - --provider=openshift - - --openshift-service-account=ds-pipeline-ui-testdsp7 - - --upstream=http://localhost:3000 - - --tls-cert=/etc/tls/private/tls.crt - - --tls-key=/etc/tls/private/tls.key - - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-ui-testdsp7","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-ui-testdsp7","verb":"get","resourceAPIGroup":"route.openshift.io"}' - - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test7 - ports: - - containerPort: 8443 - name: https - protocol: TCP - livenessProbe: - httpGet: - path: /oauth/healthz - port: 8443 - scheme: HTTPS - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /oauth/healthz - port: 8443 - scheme: HTTPS - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/tls/private - name: proxy-tls - serviceAccountName: ds-pipeline-ui-testdsp7 - volumes: - - configMap: - name: some-test-configmap - defaultMode: 420 - name: config-volume - - name: proxy-tls - secret: - secretName: ds-pipelines-ui-proxy-tls-testdsp7 - defaultMode: 420 diff --git a/controllers/testdata/declarative/case_7/expected/created/persistence-agent_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/persistence-agent_deployment.yaml deleted file mode 100644 index a5f4e31fb..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/persistence-agent_deployment.yaml +++ /dev/null @@ -1,89 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-persistenceagent-testdsp7 - namespace: default - labels: - app: ds-pipeline-persistenceagent-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: ds-pipeline-persistenceagent-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-persistenceagent-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - env: - - name: NAMESPACE - value: "default" - - name: TTL_SECONDS_AFTER_WORKFLOW_FINISH - value: "86400" - - name: NUM_WORKERS - value: "2" - - name: KUBEFLOW_USERID_HEADER - value: kubeflow-userid - - name: KUBEFLOW_USERID_PREFIX - value: "" - - name: EXECUTIONTYPE - value: Workflow - image: persistenceagent:test7 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-persistenceagent - command: - - persistence_agent - - "--logtostderr=true" - - "--ttlSecondsAfterWorkflowFinish=86400" - - "--numWorker=5" - - "--mlPipelineAPIServerName=ds-pipeline-testdsp7" - - "--namespace=testdsp7" - - "--mlPipelineServiceHttpPort=8888" - - "--mlPipelineServiceGRPCPort=8887" - livenessProbe: - exec: - command: - - test - - -x - - persistence_agent - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - test - - -x - - persistence_agent - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 1233m - memory: 1Gi - limits: - cpu: 2524m - memory: 5Gi - volumeMounts: - - mountPath: /var/run/secrets/kubeflow/tokens/persistenceagent-sa-token - name: persistenceagent-sa-token - subPath: ds-pipeline-persistenceagent-testdsp7-token - serviceAccountName: ds-pipeline-persistenceagent-testdsp7 - volumes: - - name: persistenceagent-sa-token - projected: - sources: - - serviceAccountToken: - audience: pipelines.kubeflow.org - expirationSeconds: 3600 - path: ds-pipeline-persistenceagent-testdsp7-token - defaultMode: 420 diff --git a/controllers/testdata/declarative/case_7/expected/created/sample-config.yaml.tmpl b/controllers/testdata/declarative/case_7/expected/created/sample-config.yaml.tmpl deleted file mode 100644 index c7bfcafe5..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/sample-config.yaml.tmpl +++ /dev/null @@ -1,17 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: sample-config-testdsp7 - namespace: default - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines -data: - sample_config.json: |- - [ - { - "name": "[Demo] iris-training", - "description": "[source code](https://github.com/opendatahub-io/data-science-pipelines/tree/master/samples/iris-sklearn) A simple pipeline to demonstrate a basic ML Training workflow", - "file": "/samples/iris-pipeline-compiled.yaml" - } - ] diff --git a/controllers/testdata/declarative/case_7/expected/created/sample-pipeline.yaml.tmpl b/controllers/testdata/declarative/case_7/expected/created/sample-pipeline.yaml.tmpl deleted file mode 100644 index 832cedfcc..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/sample-pipeline.yaml.tmpl +++ /dev/null @@ -1,254 +0,0 @@ -apiVersion: v1 -kind: ConfigMap -metadata: - name: sample-pipeline-testdsp7 - namespace: default - labels: - app: ds-pipeline-testdsp7 - component: data-science-pipelines -data: - iris-pipeline-compiled.yaml: |- - # PIPELINE DEFINITION - # Name: iris-training-pipeline - # Inputs: - # neighbors: int [Default: 3.0] - # standard_scaler: bool [Default: True] - # Outputs: - # train-model-metrics: system.ClassificationMetrics - components: - comp-create-dataset: - executorLabel: exec-create-dataset - outputDefinitions: - artifacts: - iris_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-normalize-dataset: - executorLabel: exec-normalize-dataset - inputDefinitions: - artifacts: - input_iris_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - standard_scaler: - parameterType: BOOLEAN - outputDefinitions: - artifacts: - normalized_iris_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - comp-train-model: - executorLabel: exec-train-model - inputDefinitions: - artifacts: - normalized_iris_dataset: - artifactType: - schemaTitle: system.Dataset - schemaVersion: 0.0.1 - parameters: - n_neighbors: - parameterType: NUMBER_INTEGER - outputDefinitions: - artifacts: - metrics: - artifactType: - schemaTitle: system.ClassificationMetrics - schemaVersion: 0.0.1 - model: - artifactType: - schemaTitle: system.Model - schemaVersion: 0.0.1 - deploymentSpec: - executors: - exec-create-dataset: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - create_dataset - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ - \ && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef create_dataset(iris_dataset: Output[Dataset]):\n import pandas\ - \ as pd\n\n csv_url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data'\n\ - \ col_names = [\n 'Sepal_Length', 'Sepal_Width', 'Petal_Length',\ - \ 'Petal_Width', 'Labels'\n ]\n df = pd.read_csv(csv_url, names=col_names)\n\ - \n with open(iris_dataset.path, 'w') as f:\n df.to_csv(f)\n\n" - image: quay.io/opendatahub/ds-pipelines-sample-base:v1.0 - exec-normalize-dataset: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - normalize_dataset - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ - \ 'scikit-learn==1.4.0' && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef normalize_dataset(\n input_iris_dataset: Input[Dataset],\n\ - \ normalized_iris_dataset: Output[Dataset],\n standard_scaler: bool,\n\ - ):\n\n import pandas as pd\n from sklearn.preprocessing import MinMaxScaler\n\ - \ from sklearn.preprocessing import StandardScaler\n\n with open(input_iris_dataset.path)\ - \ as f:\n df = pd.read_csv(f)\n labels = df.pop('Labels')\n\n\ - \ scaler = StandardScaler() if standard_scaler else MinMaxScaler()\n\n\ - \ df = pd.DataFrame(scaler.fit_transform(df))\n df['Labels'] = labels\n\ - \ normalized_iris_dataset.metadata['state'] = \"Normalized\"\n with\ - \ open(normalized_iris_dataset.path, 'w') as f:\n df.to_csv(f)\n\n" - image: quay.io/opendatahub/ds-pipelines-sample-base:v1.0 - exec-train-model: - container: - args: - - --executor_input - - '{{$}}' - - --function_to_execute - - train_model - command: - - sh - - -c - - "\nif ! [ -x \"$(command -v pip)\" ]; then\n python3 -m ensurepip ||\ - \ python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1\ - \ python3 -m pip install --quiet --no-warn-script-location 'kfp==2.7.0'\ - \ '--no-deps' 'typing-extensions>=3.7.4,<5; python_version<\"3.9\"' &&\ - \ python3 -m pip install --quiet --no-warn-script-location 'pandas==2.2.0'\ - \ 'scikit-learn==1.4.0' && \"$0\" \"$@\"\n" - - sh - - -ec - - 'program_path=$(mktemp -d) - - - printf "%s" "$0" > "$program_path/ephemeral_component.py" - - _KFP_RUNTIME=true python3 -m kfp.dsl.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@" - - ' - - "\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import\ - \ *\n\ndef train_model(\n normalized_iris_dataset: Input[Dataset],\n\ - \ model: Output[Model],\n metrics: Output[ClassificationMetrics],\n\ - \ n_neighbors: int,\n):\n import pickle\n\n import pandas as pd\n\ - \ from sklearn.model_selection import train_test_split\n from sklearn.neighbors\ - \ import KNeighborsClassifier\n\n from sklearn.metrics import roc_curve\n\ - \ from sklearn.model_selection import train_test_split, cross_val_predict\n\ - \ from sklearn.metrics import confusion_matrix\n\n\n with open(normalized_iris_dataset.path)\ - \ as f:\n df = pd.read_csv(f)\n\n y = df.pop('Labels')\n X\ - \ = df\n\n X_train, X_test, y_train, y_test = train_test_split(X, y,\ - \ random_state=0)\n\n clf = KNeighborsClassifier(n_neighbors=n_neighbors)\n\ - \ clf.fit(X_train, y_train)\n\n predictions = cross_val_predict(\n\ - \ clf, X_train, y_train, cv=3)\n metrics.log_confusion_matrix(\n\ - \ ['Iris-Setosa', 'Iris-Versicolour', 'Iris-Virginica'],\n \ - \ confusion_matrix(\n y_train,\n predictions).tolist()\ - \ # .tolist() to convert np array to list.\n )\n\n model.metadata['framework']\ - \ = 'scikit-learn'\n with open(model.path, 'wb') as f:\n pickle.dump(clf,\ - \ f)\n\n" - image: quay.io/opendatahub/ds-pipelines-sample-base:v1.0 - pipelineInfo: - name: iris-training-pipeline - root: - dag: - outputs: - artifacts: - train-model-metrics: - artifactSelectors: - - outputArtifactKey: metrics - producerSubtask: train-model - tasks: - create-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-create-dataset - taskInfo: - name: create-dataset - normalize-dataset: - cachingOptions: - enableCache: true - componentRef: - name: comp-normalize-dataset - dependentTasks: - - create-dataset - inputs: - artifacts: - input_iris_dataset: - taskOutputArtifact: - outputArtifactKey: iris_dataset - producerTask: create-dataset - parameters: - standard_scaler: - runtimeValue: - constant: true - taskInfo: - name: normalize-dataset - train-model: - cachingOptions: - enableCache: true - componentRef: - name: comp-train-model - dependentTasks: - - normalize-dataset - inputs: - artifacts: - normalized_iris_dataset: - taskOutputArtifact: - outputArtifactKey: normalized_iris_dataset - producerTask: normalize-dataset - parameters: - n_neighbors: - componentInputParameter: neighbors - taskInfo: - name: train-model - inputDefinitions: - parameters: - neighbors: - defaultValue: 3.0 - isOptional: true - parameterType: NUMBER_INTEGER - standard_scaler: - defaultValue: true - isOptional: true - parameterType: BOOLEAN - outputDefinitions: - artifacts: - train-model-metrics: - artifactType: - schemaTitle: system.ClassificationMetrics - schemaVersion: 0.0.1 - schemaVersion: 2.1.0 - sdkVersion: kfp-2.7.0 diff --git a/controllers/testdata/declarative/case_7/expected/created/scheduled-workflow_deployment.yaml b/controllers/testdata/declarative/case_7/expected/created/scheduled-workflow_deployment.yaml deleted file mode 100644 index 2a0d4fd06..000000000 --- a/controllers/testdata/declarative/case_7/expected/created/scheduled-workflow_deployment.yaml +++ /dev/null @@ -1,65 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-scheduledworkflow-testdsp7 - namespace: default - labels: - app: ds-pipeline-scheduledworkflow-testdsp7 - component: data-science-pipelines - dspa: testdsp7 -spec: - selector: - matchLabels: - app: ds-pipeline-scheduledworkflow-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - template: - metadata: - annotations: - cluster-autoscaler.kubernetes.io/safe-to-evict: "true" - labels: - app: ds-pipeline-scheduledworkflow-testdsp7 - component: data-science-pipelines - dspa: testdsp7 - spec: - containers: - - env: - - name: NAMESPACE - value: "default" - - name: CRON_SCHEDULE_TIMEZONE - value: "EST" - - name: EXECUTIONTYPE - value: PipelineRun - image: scheduledworkflow:test7 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-scheduledworkflow - command: - - controller - - "--logtostderr=true" - - "--namespace=default" - livenessProbe: - exec: - command: - - test - - -x - - controller - initialDelaySeconds: 30 - periodSeconds: 5 - timeoutSeconds: 2 - readinessProbe: - exec: - command: - - test - - -x - - controller - initialDelaySeconds: 3 - periodSeconds: 5 - timeoutSeconds: 2 - resources: - requests: - cpu: 1235m - memory: 1Gi - limits: - cpu: 2526m - memory: 5Gi - serviceAccountName: ds-pipeline-scheduledworkflow-testdsp7 diff --git a/controllers/testdata/declarative/case_8/config.yaml b/controllers/testdata/declarative/case_8/config.yaml deleted file mode 100644 index c868b4025..000000000 --- a/controllers/testdata/declarative/case_8/config.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# When a minimal DSPA is deployed -Images: - ApiServer: api-server:test8 - Artifact: artifact-manager:test8 - PersistentAgent: persistenceagent:test8 - ScheduledWorkflow: scheduledworkflow:test8 - Cache: ubi-minimal:test8 - MoveResultsImage: busybox:test8 - MlPipelineUI: frontend:test8 - MariaDB: mariadb:test8 - Minio: minio:test8 - OAuthProxy: oauth-proxy:test8 -ImagesV2: - Argo: - ApiServer: api-server:test8 - ArgoLauncherImage: argolauncherimage:test8 - ArgoDriverImage: argodriverimage:test8 -DSPO: - ApiServer: - IncludeOwnerReference: false diff --git a/controllers/testdata/declarative/case_8/expected/created/apiserver_deployment.yaml b/controllers/testdata/declarative/case_8/expected/created/apiserver_deployment.yaml deleted file mode 100644 index 66a002060..000000000 --- a/controllers/testdata/declarative/case_8/expected/created/apiserver_deployment.yaml +++ /dev/null @@ -1,219 +0,0 @@ -apiVersion: apps/v1 -kind: Deployment -metadata: - name: ds-pipeline-testdsp8 - namespace: default - labels: - app: ds-pipeline-testdsp8 - component: data-science-pipelines - dspa: testdsp8 -spec: - selector: - matchLabels: - app: ds-pipeline-testdsp8 - component: data-science-pipelines - dspa: testdsp8 - template: - metadata: - labels: - app: ds-pipeline-testdsp8 - component: data-science-pipelines - dspa: testdsp8 - spec: - containers: - - env: - - name: POD_NAMESPACE - value: "default" - - name: DBCONFIG_USER - value: "mlpipeline" - - name: DBCONFIG_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp8" - - name: DBCONFIG_DBNAME - value: "mlpipeline" - - name: DBCONFIG_HOST - value: "mariadb-testdsp8.default.svc.cluster.local" - - name: DBCONFIG_PORT - value: "3306" - - name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_NAME - value: dsp-trusted-ca-testdsp8 - - name: ARTIFACT_COPY_STEP_CABUNDLE_CONFIGMAP_KEY - value: testcabundleconfigmapkey8.crt - - name: ARTIFACT_COPY_STEP_CABUNDLE_MOUNTPATH - value: /dsp-custom-certs - - name: SSL_CERT_DIR - value: "/dsp-custom-certs:/etc/ssl/certs:/etc/pki/tls/certs" - - name: AUTO_UPDATE_PIPELINE_DEFAULT_VERSION - value: "true" - - name: DBCONFIG_CONMAXLIFETIMESEC - value: "120" - - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST - value: "ds-pipeline-visualizationserver" - - name: ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT - value: "8888" - - name: OBJECTSTORECONFIG_CREDENTIALSSECRET - value: "ds-pipeline-s3-testdsp8" - - name: OBJECTSTORECONFIG_CREDENTIALSACCESSKEYKEY - value: "accesskey" - - name: OBJECTSTORECONFIG_CREDENTIALSSECRETKEYKEY - value: "secretkey" - - name: DEFAULTPIPELINERUNNERSERVICEACCOUNT - value: "pipeline-runner-testdsp8" - - name: OBJECTSTORECONFIG_BUCKETNAME - value: "mlpipeline" - - name: OBJECTSTORECONFIG_ACCESSKEY - valueFrom: - secretKeyRef: - key: "accesskey" - name: "ds-pipeline-s3-testdsp8" - - name: OBJECTSTORECONFIG_SECRETACCESSKEY - valueFrom: - secretKeyRef: - key: "secretkey" - name: "ds-pipeline-s3-testdsp8" - - name: OBJECTSTORECONFIG_SECURE - value: "false" - - name: MINIO_SERVICE_SERVICE_HOST - value: "minio-testdsp8.default.svc.cluster.local" - - name: MINIO_SERVICE_SERVICE_PORT - value: "9000" - - name: V2_LAUNCHER_IMAGE - value: "argolauncherimage:test8" - - name: V2_DRIVER_IMAGE - value: "argodriverimage:test8" - - name: METADATA_GRPC_SERVICE_SERVICE_HOST - value: "ds-pipeline-metadata-grpc-testdsp8.default.svc.cluster.local" - - name: METADATA_GRPC_SERVICE_SERVICE_PORT - value: "8080" - - name: ML_PIPELINE_SERVICE_HOST - value: ds-pipeline-testdsp8.default.svc.cluster.local - - name: ML_PIPELINE_SERVICE_PORT_GRPC - value: "8887" - - name: SIGNED_URL_EXPIRY_TIME_SECONDS - value: "60" - - name: ML_PIPELINE_TLS_ENABLED - value: "true" - - name: METADATA_TLS_ENABLED - value: "true" - - name: EXECUTIONTYPE - value: Workflow - - name: DB_DRIVER_NAME - value: mysql - - name: DBCONFIG_MYSQLCONFIG_USER - value: mlpipeline - - name: DBCONFIG_MYSQLCONFIG_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp8" - - name: DBCONFIG_MYSQLCONFIG_DBNAME - value: "mlpipeline" - - name: DBCONFIG_MYSQLCONFIG_HOST - value: "mariadb-testdsp8.default.svc.cluster.local" - - name: DBCONFIG_MYSQLCONFIG_PORT - value: "3306" - image: api-server:test8 - # imagePullPolicy: default - https://kubernetes.io/docs/concepts/containers/images/#imagepullpolicy-defaulting - name: ds-pipeline-api-server - command: ['/bin/apiserver'] - args: - - --config=/config - - -logtostderr=true - - --tlsCertPath=/etc/tls/private/tls.crt - - --tlsCertKeyPath=/etc/tls/private/tls.key - ports: - - containerPort: 8888 - name: http - protocol: TCP - - containerPort: 8887 - name: grpc - protocol: TCP - livenessProbe: - httpGet: - path: /apis/v1beta1/healthz - port: http - scheme: HTTPS - readinessProbe: - httpGet: - path: /apis/v1beta1/healthz - port: http - scheme: HTTPS - resources: - requests: - cpu: 250m - memory: 500Mi - limits: - cpu: 500m - memory: 1Gi - volumeMounts: - - name: server-config - mountPath: /config/config.json - subPath: config.json - - mountPath: /etc/tls/private - name: proxy-tls - - name: ca-bundle - mountPath: /dsp-custom-certs - - name: oauth-proxy - args: - - --https-address=:8443 - - --provider=openshift - - --openshift-service-account=ds-pipeline-testdsp8 - - --upstream=https://ds-pipeline-testdsp8.default.svc.cluster.local:8888 - - --upstream-ca=/var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt - - --tls-cert=/etc/tls/private/tls.crt - - --tls-key=/etc/tls/private/tls.key - - --cookie-secret=SECRET - - '--openshift-delegate-urls={"/": {"group":"route.openshift.io","resource":"routes","verb":"get","name":"ds-pipeline-testdsp8","namespace":"default"}}' - - '--openshift-sar={"namespace":"default","resource":"routes","resourceName":"ds-pipeline-testdsp8","verb":"get","resourceAPIGroup":"route.openshift.io"}' - - --skip-auth-regex='(^/metrics|^/apis/v1beta1/healthz)' - image: oauth-proxy:test8 - ports: - - containerPort: 8443 - name: oauth - protocol: TCP - livenessProbe: - httpGet: - path: /oauth/healthz - port: oauth - scheme: HTTPS - initialDelaySeconds: 30 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /oauth/healthz - port: oauth - scheme: HTTPS - initialDelaySeconds: 5 - timeoutSeconds: 1 - periodSeconds: 5 - successThreshold: 1 - failureThreshold: 3 - resources: - limits: - cpu: 100m - memory: 256Mi - requests: - cpu: 100m - memory: 256Mi - volumeMounts: - - mountPath: /etc/tls/private - name: proxy-tls - volumes: - - name: proxy-tls - secret: - secretName: ds-pipelines-proxy-tls-testdsp8 - defaultMode: 420 - - name: server-config - configMap: - name: ds-pipeline-server-config-testdsp8 - defaultMode: 420 - - name: ca-bundle - configMap: - name: dsp-trusted-ca-testdsp8 - defaultMode: 420 - serviceAccountName: ds-pipeline-testdsp8 diff --git a/controllers/testdata/declarative/case_8/expected/created/configmap_artifact_script.yaml b/controllers/testdata/declarative/case_8/expected/created/configmap_artifact_script.yaml deleted file mode 100644 index 7bfa6c0d3..000000000 --- a/controllers/testdata/declarative/case_8/expected/created/configmap_artifact_script.yaml +++ /dev/null @@ -1,42 +0,0 @@ -apiVersion: v1 -data: - artifact_script: |- - #!/usr/bin/env sh - push_artifact() { - workspace_dir=$(echo $(context.taskRun.name) | sed -e "s/$(context.pipeline.name)-//g") - workspace_dest=/workspace/${workspace_dir}/artifacts/$(context.pipelineRun.name)/$(context.taskRun.name) - artifact_name=$(basename $2) - - aws_cp() { - - aws s3 --endpoint http://minio-testdsp8.default.svc.cluster.local:9000 --ca-bundle /dsp-custom-certs/testcabundleconfigmapkey8.crt cp $1.tgz s3://mlpipeline/artifacts/$PIPELINERUN/$PIPELINETASK/$1.tgz - - } - - if [ -f "$workspace_dest/$artifact_name" ]; then - echo sending to: ${workspace_dest}/${artifact_name} - tar -cvzf $1.tgz -C ${workspace_dest} ${artifact_name} - aws_cp $1 - elif [ -f "$2" ]; then - tar -cvzf $1.tgz -C $(dirname $2) ${artifact_name} - aws_cp $1 - else - echo "$2 file does not exist. Skip artifact tracking for $1" - fi - } - push_log() { - cat /var/log/containers/$PODNAME*$NAMESPACE*step-main*.log > step-main.log - push_artifact main-log step-main.log - } - strip_eof() { - if [ -f "$2" ]; then - awk 'NF' $2 | head -c -1 > $1_temp_save && cp $1_temp_save $2 - fi - } -kind: ConfigMap -metadata: - name: ds-pipeline-artifact-script-testdsp8 - namespace: default - labels: - app: ds-pipeline-testdsp5 - component: data-science-pipelines diff --git a/controllers/testdata/declarative/case_8/expected/created/mariadb_deployment.yaml b/controllers/testdata/declarative/case_8/expected/created/mariadb_deployment.yaml deleted file mode 100644 index 9a0b5a11b..000000000 --- a/controllers/testdata/declarative/case_8/expected/created/mariadb_deployment.yaml +++ /dev/null @@ -1,97 +0,0 @@ ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: mariadb-testdsp8 - namespace: default - labels: - app: mariadb-testdsp8 - component: data-science-pipelines - dspa: testdsp8 -spec: - strategy: - type: Recreate # Need this since backing PVC is ReadWriteOnce, which creates resource lock condition in default Rolling strategy - selector: - matchLabels: - app: mariadb-testdsp8 - component: data-science-pipelines - dspa: testdsp8 - template: - metadata: - labels: - app: mariadb-testdsp8 - component: data-science-pipelines - dspa: testdsp8 - spec: - containers: - - name: mariadb - image: mariadb:test8 - ports: - - containerPort: 3306 - protocol: TCP - readinessProbe: - exec: - command: - - /bin/sh - - "-i" - - "-c" - - >- - MYSQL_PWD=$MYSQL_PASSWORD mysql -h 127.0.0.1 -u $MYSQL_USER -D - $MYSQL_DATABASE -e 'SELECT 1' - failureThreshold: 3 - initialDelaySeconds: 5 - periodSeconds: 10 - successThreshold: 1 - timeoutSeconds: 1 - livenessProbe: - failureThreshold: 3 - initialDelaySeconds: 30 - periodSeconds: 10 - successThreshold: 1 - tcpSocket: - port: 3306 - timeoutSeconds: 1 - env: - - name: MYSQL_USER - value: "mlpipeline" - - name: MYSQL_PASSWORD - valueFrom: - secretKeyRef: - key: "password" - name: "ds-pipeline-db-testdsp8" - - name: MYSQL_DATABASE - value: "mlpipeline" - - name: MYSQL_ALLOW_EMPTY_PASSWORD - value: "true" - resources: - requests: - cpu: 300m - memory: 800Mi - limits: - cpu: "1" - memory: 1Gi - volumeMounts: - - name: mariadb-persistent-storage - mountPath: /var/lib/mysql - - name: mariadb-tls - mountPath: /.mariadb/certs - - name: mariadb-tls-config - mountPath: /etc/my.cnf.d/mariadb-tls-config.cnf - subPath: mariadb-tls-config.cnf - volumes: - - name: mariadb-persistent-storage - persistentVolumeClaim: - claimName: mariadb-testdsp8 - - name: mariadb-tls - secret: - secretName: ds-pipelines-mariadb-tls-testdsp8 - items: - - key: tls.crt - path: tls.crt - - key: tls.key - path: tls.key - defaultMode: 420 - - name: mariadb-tls-config - configMap: - name: ds-pipelines-mariadb-tls-config-testdsp8 - defaultMode: 420 diff --git a/controllers/testdata/declarative/case_9/config.yaml b/controllers/testdata/declarative/case_9/config.yaml deleted file mode 100644 index dbcd4d0d9..000000000 --- a/controllers/testdata/declarative/case_9/config.yaml +++ /dev/null @@ -1,14 +0,0 @@ -Images: - MlPipelineUI: frontend:test9 - MariaDB: mariadb:test9 - Minio: minio:test9 - OAuthProxy: oauth-proxy:test9 -ImagesV2: - Argo: - ApiServer: api-server:test9 - ArgoLauncherImage: argolauncherimage:test9 - ArgoDriverImage: argodriverimage:test9 - PersistentAgent: persistenceagent:test9 -DSPO: - ApiServer: - IncludeOwnerReference: false diff --git a/controllers/testutil/equalities.go b/controllers/testutil/equalities.go index 336922e39..bc0a9a557 100644 --- a/controllers/testutil/equalities.go +++ b/controllers/testutil/equalities.go @@ -17,6 +17,7 @@ limitations under the License. package testutil import ( + "errors" "fmt" "strings" @@ -168,7 +169,7 @@ func deploymentsAreEqual(expected, actual *unstructured.Unstructured) (bool, err } if len(expectedDep.Spec.Template.Spec.Containers) != len(actualDep.Spec.Template.Spec.Containers) { - return false, notEqualMsg("Containers") + return false, notEqualMsg("Container lengths") } for i := range expectedDep.Spec.Template.Spec.Containers { expectedContainer := expectedDep.Spec.Template.Spec.Containers[i] @@ -224,5 +225,5 @@ func notDeeplyEqualMsg(value string, diff []string) error { for _, d := range diff { errStr += fmt.Sprintln("\t" + d) } - return fmt.Errorf(errStr) + return errors.New(errStr) } diff --git a/controllers/testutil/util.go b/controllers/testutil/util.go index f5d1c962c..7bfd74841 100644 --- a/controllers/testutil/util.go +++ b/controllers/testutil/util.go @@ -210,8 +210,9 @@ func GenerateDeclarativeTestCases(t *testing.T) []Case { func CreateEmptyDSPA() *dspav1alpha1.DataSciencePipelinesApplication { dspa := &dspav1alpha1.DataSciencePipelinesApplication{ Spec: dspav1alpha1.DSPASpec{ + PodToPodTLS: boolPtr(false), APIServer: &dspav1alpha1.APIServer{Deploy: false}, - MLMD: &dspav1alpha1.MLMD{Deploy: false}, + MLMD: &dspav1alpha1.MLMD{Deploy: true}, // MLMD is required PersistenceAgent: &dspav1alpha1.PersistenceAgent{Deploy: false}, ScheduledWorkflow: &dspav1alpha1.ScheduledWorkflow{Deploy: false}, MlPipelineUI: &dspav1alpha1.MlPipelineUI{ diff --git a/controllers/workflow_controller_test.go b/controllers/workflow_controller_test.go index f8b0abd56..957f115d7 100644 --- a/controllers/workflow_controller_test.go +++ b/controllers/workflow_controller_test.go @@ -33,7 +33,8 @@ func TestDeployWorkflowController(t *testing.T) { // Construct DSPASpec with deployed WorkflowController dspa := &dspav1alpha1.DataSciencePipelinesApplication{ Spec: dspav1alpha1.DSPASpec{ - APIServer: &dspav1alpha1.APIServer{}, + PodToPodTLS: boolPtr(false), + APIServer: &dspav1alpha1.APIServer{}, WorkflowController: &dspav1alpha1.WorkflowController{ Deploy: true, }, @@ -43,6 +44,7 @@ func TestDeployWorkflowController(t *testing.T) { Deploy: true, }, }, + MLMD: &dspav1alpha1.MLMD{Deploy: true}, ObjectStorage: &dspav1alpha1.ObjectStorage{ DisableHealthCheck: false, Minio: &dspav1alpha1.Minio{ diff --git a/main.go b/main.go index 324b1225e..b7622e8f6 100644 --- a/main.go +++ b/main.go @@ -81,7 +81,7 @@ func initConfig(configPath string) error { for _, c := range config.GetConfigRequiredFields() { if !viper.IsSet(c) { - return fmt.Errorf(fmt.Sprintf("Missing required field in config: %s", c)) + return fmt.Errorf("missing required field in config: %s", c) } } diff --git a/scripts/release/params.py b/scripts/release/params.py index 82d83c5a3..973a412e9 100644 --- a/scripts/release/params.py +++ b/scripts/release/params.py @@ -2,50 +2,37 @@ import requests -V1_TAG = "v1.6.4" ODH_QUAY_ORG = "opendatahub" -QUAY_REPOS_V1 = { +QUAY_REPOS = { + "IMAGES_DSPO": "data-science-pipelines-operator", "IMAGES_APISERVER": "ds-pipelines-api-server", - "IMAGES_ARTIFACT": "ds-pipelines-artifact-manager", - "IMAGES_PERSISTENTAGENT": "ds-pipelines-persistenceagent", + "IMAGES_PERSISTENCEAGENT": "ds-pipelines-persistenceagent", "IMAGES_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow", - "IMAGES_MLMDENVOY": "ds-pipelines-metadata-envoy", - "IMAGES_MLMDGRPC": "ds-pipelines-metadata-grpc", - "IMAGES_MLMDWRITER": "ds-pipelines-metadata-writer", -} - -QUAY_REPOS_V2 = { - "IMAGES_DSPO": "data-science-pipelines-operator", - "V2_LAUNCHER_IMAGE": "ds-pipelines-launcher", - "V2_DRIVER_IMAGE": "ds-pipelines-driver", - "IMAGESV2_ARGO_APISERVER": "ds-pipelines-api-server", - "IMAGESV2_ARGO_PERSISTENCEAGENT": "ds-pipelines-persistenceagent", - "IMAGESV2_ARGO_SCHEDULEDWORKFLOW": "ds-pipelines-scheduledworkflow", + "IMAGES_LAUNCHER": "ds-pipelines-launcher", + "IMAGES_DRIVER": "ds-pipelines-driver", } TAGGED_REPOS = { - "IMAGESV2_ARGO_WORKFLOWCONTROLLER" : { + "IMAGES_ARGO_WORKFLOWCONTROLLER" : { "TAG": "3.3.10-upstream", "REPO": "ds-pipelines-argo-workflowcontroller" }, - "IMAGESV2_ARGO_ARGOEXEC" : { + "IMAGES_ARGO_EXEC" : { "TAG": "3.3.10-upstream", "REPO": "ds-pipelines-argo-argoexec" }, - "IMAGESV2_ARGO_MLMDGRPC": { + "IMAGES_MLMDGRPC": { "TAG": "main-94ae1e9", "REPO": "mlmd-grpc-server" }, } STATIC_REPOS = { - "IMAGESV2_ARGO_MLMDENVOY": "registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:a744c1b386fd5e4f94e43543e829df1bfdd1b564137917372a11da06872f4bcb", + "IMAGES_MLMDENVOY": "registry.redhat.io/openshift-service-mesh/proxyv2-rhel8@sha256:a744c1b386fd5e4f94e43543e829df1bfdd1b564137917372a11da06872f4bcb", "IMAGES_MARIADB": "registry.redhat.io/rhel8/mariadb-103@sha256:3d30992e60774f887c4e7959c81b0c41b0d82d042250b3b56f05ab67fd4cdee1", "IMAGES_OAUTHPROXY": "registry.redhat.io/openshift4/ose-oauth-proxy@sha256:ab112105ac37352a2a4916a39d6736f5db6ab4c29bad4467de8d613e80e9bb33", - "IMAGES_CACHE": "registry.redhat.io/ubi8/ubi-minimal@sha256:5d2d4d4dbec470f8ffb679915e2a8ae25ad754cd9193fa966deee1ecb7b3ee00", - "IMAGES_MOVERESULTSIMAGE": "registry.redhat.io/ubi8/ubi-micro@sha256:396baed3d689157d96aa7d8988fdfea7eb36684c8335eb391cf1952573e689c1", } OTHER_OPTIONS = { @@ -116,8 +103,7 @@ def generate_params(args): env_var_lines = [] - fetch_images(QUAY_REPOS_V1, overrides, env_var_lines, quay_org, V1_TAG) - fetch_images(QUAY_REPOS_V2, overrides, env_var_lines, quay_org, tag) + fetch_images(QUAY_REPOS, overrides, env_var_lines, quay_org, tag) for image in TAGGED_REPOS: target_repo = {image: TAGGED_REPOS[image]["REPO"]} target_tag = TAGGED_REPOS[image]["TAG"]