diff --git a/.travis.yml b/.travis.yml
index 1c646db8227..d86e882c566 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -66,13 +66,15 @@ matrix:
env: TOXENV=py35
script: &1
# Additional dependencies
- - pip3 install jsonschema==3.0.1
+ - pip3 install coverage coveralls jsonschema==3.0.1
# DSL tests
- cd $TRAVIS_BUILD_DIR/sdk/python
- - python3 setup.py install
- - python3 tests/dsl/main.py
- - python3 tests/compiler/main.py
- - $TRAVIS_BUILD_DIR/sdk/python/tests/run_tests.sh
+ - python3 setup.py develop
+ - cd $TRAVIS_BUILD_DIR # Changing the current directory to the repo root for correct coverall paths
+ - coverage run --source=kfp --append sdk/python/tests/dsl/main.py
+ - coverage run --source=kfp --append sdk/python/tests/compiler/main.py
+ - coverage run --source=kfp --append -m unittest discover --verbose --start-dir sdk/python/tests --top-level-directory=sdk/python
+ - coveralls
# Visualization test
- cd $TRAVIS_BUILD_DIR/backend/src/apiserver/visualization
diff --git a/backend/src/apiserver/client_manager.go b/backend/src/apiserver/client_manager.go
index c56459e6515..5c241a90691 100644
--- a/backend/src/apiserver/client_manager.go
+++ b/backend/src/apiserver/client_manager.go
@@ -46,6 +46,9 @@ const (
podNamespace = "POD_NAMESPACE"
initConnectionTimeout = "InitConnectionTimeout"
+
+ visualizationServiceHost = "ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_HOST"
+ visualizationServicePort = "ML_PIPELINE_VISUALIZATIONSERVER_SERVICE_PORT"
)
// Container for all service clients
diff --git a/backend/src/apiserver/main.go b/backend/src/apiserver/main.go
index 45da3d9a70d..e7bac0ed9e3 100644
--- a/backend/src/apiserver/main.go
+++ b/backend/src/apiserver/main.go
@@ -79,7 +79,14 @@ func startRpcServer(resourceManager *resource.ResourceManager) {
api.RegisterRunServiceServer(s, server.NewRunServer(resourceManager))
api.RegisterJobServiceServer(s, server.NewJobServer(resourceManager))
api.RegisterReportServiceServer(s, server.NewReportServer(resourceManager))
- api.RegisterVisualizationServiceServer(s, server.NewVisualizationServer(resourceManager))
+ api.RegisterVisualizationServiceServer(
+ s,
+ server.NewVisualizationServer(
+ resourceManager,
+ getStringConfig(visualizationServiceHost),
+ getStringConfig(visualizationServicePort),
+ getDurationConfig(initConnectionTimeout),
+ ))
// Register reflection service on gRPC server.
reflection.Register(s)
diff --git a/backend/src/apiserver/server/BUILD.bazel b/backend/src/apiserver/server/BUILD.bazel
index 83c307b0b8a..3ffdcea7483 100644
--- a/backend/src/apiserver/server/BUILD.bazel
+++ b/backend/src/apiserver/server/BUILD.bazel
@@ -27,6 +27,7 @@ go_library(
"//backend/src/common/util:go_default_library",
"//backend/src/crd/pkg/apis/scheduledworkflow/v1beta1:go_default_library",
"@com_github_argoproj_argo//pkg/apis/workflow/v1alpha1:go_default_library",
+ "@com_github_cenkalti_backoff//:go_default_library",
"@com_github_golang_glog//:go_default_library",
"@com_github_golang_protobuf//jsonpb:go_default_library_gen",
"@com_github_robfig_cron//:go_default_library",
diff --git a/backend/src/apiserver/server/visualization_server.go b/backend/src/apiserver/server/visualization_server.go
index bcaf519f95a..9c4006dcbc3 100644
--- a/backend/src/apiserver/server/visualization_server.go
+++ b/backend/src/apiserver/server/visualization_server.go
@@ -4,6 +4,8 @@ import (
"context"
"encoding/json"
"fmt"
+ "github.com/cenkalti/backoff"
+ "github.com/golang/glog"
"github.com/kubeflow/pipelines/backend/api/go_client"
"github.com/kubeflow/pipelines/backend/src/apiserver/resource"
"github.com/kubeflow/pipelines/backend/src/common/util"
@@ -11,11 +13,13 @@ import (
"net/http"
"net/url"
"strings"
+ "time"
)
type VisualizationServer struct {
- resourceManager *resource.ResourceManager
- serviceURL string
+ resourceManager *resource.ResourceManager
+ serviceURL string
+ isServiceAvailable bool
}
func (s *VisualizationServer) CreateVisualization(ctx context.Context, request *go_client.CreateVisualizationRequest) (*go_client.Visualization, error) {
@@ -56,6 +60,12 @@ func (s *VisualizationServer) validateCreateVisualizationRequest(request *go_cli
// service to generate HTML visualizations from a request.
// It returns the generated HTML as a string and any error that is encountered.
func (s *VisualizationServer) generateVisualizationFromRequest(request *go_client.CreateVisualizationRequest) ([]byte, error) {
+ if !s.isServiceAvailable {
+ return nil, util.NewInternalServerError(
+ fmt.Errorf("service not available"),
+ "Service not available",
+ )
+ }
visualizationType := strings.ToLower(go_client.Visualization_Type_name[int32(request.Visualization.Type)])
arguments := fmt.Sprintf("--type %s --source %s --arguments '%s'", visualizationType, request.Visualization.Source, request.Visualization.Arguments)
resp, err := http.PostForm(s.serviceURL, url.Values{"arguments": {arguments}})
@@ -73,6 +83,33 @@ func (s *VisualizationServer) generateVisualizationFromRequest(request *go_clien
return body, nil
}
-func NewVisualizationServer(resourceManager *resource.ResourceManager) *VisualizationServer {
- return &VisualizationServer{resourceManager: resourceManager, serviceURL: "http://visualization-service.kubeflow"}
+func isVisualizationServiceAlive(serviceURL string, initConnectionTimeout time.Duration) bool {
+ var operation = func() error {
+ _, err := http.Get(serviceURL)
+ if err != nil {
+ glog.Error("Unable to verify visualization service is alive!", err)
+ return err
+ }
+ return nil
+ }
+ b := backoff.NewExponentialBackOff()
+ b.MaxElapsedTime = initConnectionTimeout
+ err := backoff.Retry(operation, b)
+ return err == nil
+}
+
+func NewVisualizationServer(resourceManager *resource.ResourceManager, serviceHost string, servicePort string, initConnectionTimeout time.Duration) *VisualizationServer {
+ serviceURL := fmt.Sprintf("http://%s:%s", serviceHost, servicePort)
+ isServiceAvailable := isVisualizationServiceAlive(serviceURL, initConnectionTimeout)
+ return &VisualizationServer{
+ resourceManager: resourceManager,
+ serviceURL: serviceURL,
+ // TODO: isServiceAvailable is used to determine if the new visualization
+ // service is alive. If this is true, then the service is alive and
+ // requests can be made to it. Otherwise, if it is false, the service is
+ // not alive and requests should not be made. This prevents timeouts and
+ // counteracts current instabilities with the service. This should be
+ // removed after the visualization service is deemed stable.
+ isServiceAvailable: isServiceAvailable,
+ }
}
diff --git a/backend/src/apiserver/server/visualization_server_test.go b/backend/src/apiserver/server/visualization_server_test.go
index 5edfd664841..82c39e27012 100644
--- a/backend/src/apiserver/server/visualization_server_test.go
+++ b/backend/src/apiserver/server/visualization_server_test.go
@@ -11,7 +11,10 @@ import (
func TestValidateCreateVisualizationRequest(t *testing.T) {
clients, manager, _ := initWithExperiment(t)
defer clients.Close()
- server := NewVisualizationServer(manager)
+ server := &VisualizationServer{
+ resourceManager: manager,
+ isServiceAvailable: false,
+ }
visualization := &go_client.Visualization{
Type: go_client.Visualization_ROC_CURVE,
Source: "gs://ml-pipeline/roc/data.csv",
@@ -27,7 +30,10 @@ func TestValidateCreateVisualizationRequest(t *testing.T) {
func TestValidateCreateVisualizationRequest_ArgumentsAreEmpty(t *testing.T) {
clients, manager, _ := initWithExperiment(t)
defer clients.Close()
- server := NewVisualizationServer(manager)
+ server := &VisualizationServer{
+ resourceManager: manager,
+ isServiceAvailable: false,
+ }
visualization := &go_client.Visualization{
Type: go_client.Visualization_ROC_CURVE,
Source: "gs://ml-pipeline/roc/data.csv",
@@ -43,7 +49,10 @@ func TestValidateCreateVisualizationRequest_ArgumentsAreEmpty(t *testing.T) {
func TestValidateCreateVisualizationRequest_SourceIsEmpty(t *testing.T) {
clients, manager, _ := initWithExperiment(t)
defer clients.Close()
- server := NewVisualizationServer(manager)
+ server := &VisualizationServer{
+ resourceManager: manager,
+ isServiceAvailable: false,
+ }
visualization := &go_client.Visualization{
Type: go_client.Visualization_ROC_CURVE,
Source: "",
@@ -59,7 +68,10 @@ func TestValidateCreateVisualizationRequest_SourceIsEmpty(t *testing.T) {
func TestValidateCreateVisualizationRequest_ArgumentsNotValidJSON(t *testing.T) {
clients, manager, _ := initWithExperiment(t)
defer clients.Close()
- server := NewVisualizationServer(manager)
+ server := &VisualizationServer{
+ resourceManager: manager,
+ isServiceAvailable: false,
+ }
visualization := &go_client.Visualization{
Type: go_client.Visualization_ROC_CURVE,
Source: "gs://ml-pipeline/roc/data.csv",
@@ -80,7 +92,11 @@ func TestGenerateVisualization(t *testing.T) {
rw.Write([]byte("roc_curve"))
}))
defer httpServer.Close()
- server := &VisualizationServer{resourceManager: manager, serviceURL: httpServer.URL}
+ server := &VisualizationServer{
+ resourceManager: manager,
+ serviceURL: httpServer.URL,
+ isServiceAvailable: true,
+ }
visualization := &go_client.Visualization{
Type: go_client.Visualization_ROC_CURVE,
Source: "gs://ml-pipeline/roc/data.csv",
@@ -90,8 +106,34 @@ func TestGenerateVisualization(t *testing.T) {
Visualization: visualization,
}
body, err := server.generateVisualizationFromRequest(request)
- assert.Equal(t, []byte("roc_curve"), body)
assert.Nil(t, err)
+ assert.Equal(t, []byte("roc_curve"), body)
+}
+
+func TestGenerateVisualization_ServiceNotAvailableError(t *testing.T) {
+ clients, manager, _ := initWithExperiment(t)
+ defer clients.Close()
+ httpServer := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
+ assert.Equal(t, "/", req.URL.String())
+ rw.WriteHeader(500)
+ }))
+ defer httpServer.Close()
+ server := &VisualizationServer{
+ resourceManager: manager,
+ serviceURL: httpServer.URL,
+ isServiceAvailable: false,
+ }
+ visualization := &go_client.Visualization{
+ Type: go_client.Visualization_ROC_CURVE,
+ Source: "gs://ml-pipeline/roc/data.csv",
+ Arguments: "{}",
+ }
+ request := &go_client.CreateVisualizationRequest{
+ Visualization: visualization,
+ }
+ body, err := server.generateVisualizationFromRequest(request)
+ assert.Nil(t, body)
+ assert.Equal(t, "InternalServerError: Service not available: service not available", err.Error())
}
func TestGenerateVisualization_ServerError(t *testing.T) {
@@ -102,7 +144,11 @@ func TestGenerateVisualization_ServerError(t *testing.T) {
rw.WriteHeader(500)
}))
defer httpServer.Close()
- server := &VisualizationServer{resourceManager: manager, serviceURL: httpServer.URL}
+ server := &VisualizationServer{
+ resourceManager: manager,
+ serviceURL: httpServer.URL,
+ isServiceAvailable: true,
+ }
visualization := &go_client.Visualization{
Type: go_client.Visualization_ROC_CURVE,
Source: "gs://ml-pipeline/roc/data.csv",
diff --git a/components/dataflow/predict/component.yaml b/components/dataflow/predict/component.yaml
index 30f2098b97a..6fae9083fb3 100644
--- a/components/dataflow/predict/component.yaml
+++ b/components/dataflow/predict/component.yaml
@@ -15,7 +15,7 @@ outputs:
- {name: Predictions dir, type: GCSPath, description: 'GCS or local directory.'} #Will contain prediction_results-* and schema.json files; TODO: Split outputs and replace dir with single file # type: {GCSPath: {path_type: Directory}}
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tf-predict:151c5349f13bea9d626c988563c04c0a86210c21
command: [python2, /ml/predict.py]
args: [
--data, {inputValue: Data file pattern},
diff --git a/components/dataflow/tfdv/component.yaml b/components/dataflow/tfdv/component.yaml
index f5fffcffb08..b72a358fa73 100644
--- a/components/dataflow/tfdv/component.yaml
+++ b/components/dataflow/tfdv/component.yaml
@@ -18,7 +18,7 @@ outputs:
- {name: Validation result, type: String, description: Indicates whether anomalies were detected or not.}
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfdv:151c5349f13bea9d626c988563c04c0a86210c21
command: [python2, /ml/validate.py]
args: [
--csv-data-for-inference, {inputValue: Inference data},
diff --git a/components/dataflow/tfma/component.yaml b/components/dataflow/tfma/component.yaml
index 975fa6108b9..bc865d6d4c0 100644
--- a/components/dataflow/tfma/component.yaml
+++ b/components/dataflow/tfma/component.yaml
@@ -17,7 +17,7 @@ outputs:
- {name: Analysis results dir, type: GCSPath, description: GCS or local directory where the analysis results should were written.} # type: {GCSPath: {path_type: Directory}}
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tfma:151c5349f13bea9d626c988563c04c0a86210c21
command: [python2, /ml/model_analysis.py]
args: [
--model, {inputValue: Model},
diff --git a/components/dataflow/tft/component.yaml b/components/dataflow/tft/component.yaml
index bffa00681ca..c77ea6886ba 100644
--- a/components/dataflow/tft/component.yaml
+++ b/components/dataflow/tft/component.yaml
@@ -12,7 +12,7 @@ outputs:
- {name: Transformed data dir, type: GCSPath} # type: {GCSPath: {path_type: Directory}}
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-dataflow-tft:151c5349f13bea9d626c988563c04c0a86210c21
command: [python2, /ml/transform.py]
args: [
--train, {inputValue: Training data file pattern},
diff --git a/components/gcp/bigquery/query/README.md b/components/gcp/bigquery/query/README.md
index 9eee74dd4b9..8e9331f013a 100644
--- a/components/gcp/bigquery/query/README.md
+++ b/components/gcp/bigquery/query/README.md
@@ -89,7 +89,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
bigquery_query_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/bigquery/query/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/bigquery/query/component.yaml')
help(bigquery_query_op)
```
diff --git a/components/gcp/bigquery/query/component.yaml b/components/gcp/bigquery/query/component.yaml
index 0d8842e3efd..07f29549b40 100644
--- a/components/gcp/bigquery/query/component.yaml
+++ b/components/gcp/bigquery/query/component.yaml
@@ -57,7 +57,7 @@ outputs:
type: GCSPath
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.bigquery, query,
--query, {inputValue: query},
diff --git a/components/gcp/bigquery/query/sample.ipynb b/components/gcp/bigquery/query/sample.ipynb
index 1c13c105792..98b6e75ad07 100644
--- a/components/gcp/bigquery/query/sample.ipynb
+++ b/components/gcp/bigquery/query/sample.ipynb
@@ -108,7 +108,7 @@
"import kfp.components as comp\n",
"\n",
"bigquery_query_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/bigquery/query/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/bigquery/query/component.yaml')\n",
"help(bigquery_query_op)"
]
},
diff --git a/components/gcp/container/component_sdk/python/setup.py b/components/gcp/container/component_sdk/python/setup.py
index c79a908620f..32947a265b5 100644
--- a/components/gcp/container/component_sdk/python/setup.py
+++ b/components/gcp/container/component_sdk/python/setup.py
@@ -15,7 +15,7 @@
from setuptools import setup
PACKAGE_NAME = 'kfp-component'
-VERSION = '0.1.26'
+VERSION = '0.1.27'
setup(
name=PACKAGE_NAME,
diff --git a/components/gcp/dataflow/launch_python/README.md b/components/gcp/dataflow/launch_python/README.md
index a66c46cd2e8..8279bd18ed7 100644
--- a/components/gcp/dataflow/launch_python/README.md
+++ b/components/gcp/dataflow/launch_python/README.md
@@ -77,7 +77,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataflow_python_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_python/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_python/component.yaml')
help(dataflow_python_op)
```
diff --git a/components/gcp/dataflow/launch_python/component.yaml b/components/gcp/dataflow/launch_python/component.yaml
index 55756e88a3f..920408aa48d 100644
--- a/components/gcp/dataflow/launch_python/component.yaml
+++ b/components/gcp/dataflow/launch_python/component.yaml
@@ -51,7 +51,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataflow, launch_python,
--python_file_path, {inputValue: python_file_path},
diff --git a/components/gcp/dataflow/launch_python/sample.ipynb b/components/gcp/dataflow/launch_python/sample.ipynb
index b2e12b732b0..db277bae853 100644
--- a/components/gcp/dataflow/launch_python/sample.ipynb
+++ b/components/gcp/dataflow/launch_python/sample.ipynb
@@ -95,7 +95,7 @@
"import kfp.components as comp\n",
"\n",
"dataflow_python_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_python/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_python/component.yaml')\n",
"help(dataflow_python_op)"
]
},
diff --git a/components/gcp/dataflow/launch_template/README.md b/components/gcp/dataflow/launch_template/README.md
index 09416415497..92a0f9771f1 100644
--- a/components/gcp/dataflow/launch_template/README.md
+++ b/components/gcp/dataflow/launch_template/README.md
@@ -67,7 +67,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataflow_template_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_template/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_template/component.yaml')
help(dataflow_template_op)
```
diff --git a/components/gcp/dataflow/launch_template/component.yaml b/components/gcp/dataflow/launch_template/component.yaml
index 3817bac9401..09f29d0fab9 100644
--- a/components/gcp/dataflow/launch_template/component.yaml
+++ b/components/gcp/dataflow/launch_template/component.yaml
@@ -61,7 +61,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataflow, launch_template,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataflow/launch_template/sample.ipynb b/components/gcp/dataflow/launch_template/sample.ipynb
index e14ce393054..e5dd3eb1c0c 100644
--- a/components/gcp/dataflow/launch_template/sample.ipynb
+++ b/components/gcp/dataflow/launch_template/sample.ipynb
@@ -85,7 +85,7 @@
"import kfp.components as comp\n",
"\n",
"dataflow_template_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_template/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_template/component.yaml')\n",
"help(dataflow_template_op)"
]
},
diff --git a/components/gcp/dataproc/create_cluster/README.md b/components/gcp/dataproc/create_cluster/README.md
index 05a5cfeacc3..94ceb6d7d85 100644
--- a/components/gcp/dataproc/create_cluster/README.md
+++ b/components/gcp/dataproc/create_cluster/README.md
@@ -74,7 +74,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataproc_create_cluster_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/create_cluster/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/create_cluster/component.yaml')
help(dataproc_create_cluster_op)
```
diff --git a/components/gcp/dataproc/create_cluster/component.yaml b/components/gcp/dataproc/create_cluster/component.yaml
index f517d1631e1..c5cad047204 100644
--- a/components/gcp/dataproc/create_cluster/component.yaml
+++ b/components/gcp/dataproc/create_cluster/component.yaml
@@ -68,7 +68,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataproc, create_cluster,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataproc/create_cluster/sample.ipynb b/components/gcp/dataproc/create_cluster/sample.ipynb
index bacbb130e5c..5c22e8731b8 100644
--- a/components/gcp/dataproc/create_cluster/sample.ipynb
+++ b/components/gcp/dataproc/create_cluster/sample.ipynb
@@ -92,7 +92,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_create_cluster_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/create_cluster/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/create_cluster/component.yaml')\n",
"help(dataproc_create_cluster_op)"
]
},
diff --git a/components/gcp/dataproc/delete_cluster/README.md b/components/gcp/dataproc/delete_cluster/README.md
index 88876748544..c9167e81ea9 100644
--- a/components/gcp/dataproc/delete_cluster/README.md
+++ b/components/gcp/dataproc/delete_cluster/README.md
@@ -56,7 +56,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataproc_delete_cluster_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/delete_cluster/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/delete_cluster/component.yaml')
help(dataproc_delete_cluster_op)
```
diff --git a/components/gcp/dataproc/delete_cluster/component.yaml b/components/gcp/dataproc/delete_cluster/component.yaml
index d16d2a58a80..15654a8f24e 100644
--- a/components/gcp/dataproc/delete_cluster/component.yaml
+++ b/components/gcp/dataproc/delete_cluster/component.yaml
@@ -36,7 +36,7 @@ inputs:
type: Integer
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataproc, delete_cluster,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataproc/delete_cluster/sample.ipynb b/components/gcp/dataproc/delete_cluster/sample.ipynb
index 57c28ae1db8..db162b43eb4 100644
--- a/components/gcp/dataproc/delete_cluster/sample.ipynb
+++ b/components/gcp/dataproc/delete_cluster/sample.ipynb
@@ -75,7 +75,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_delete_cluster_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/delete_cluster/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/delete_cluster/component.yaml')\n",
"help(dataproc_delete_cluster_op)"
]
},
diff --git a/components/gcp/dataproc/submit_hadoop_job/README.md b/components/gcp/dataproc/submit_hadoop_job/README.md
index 8f5679ccca9..23487186c16 100644
--- a/components/gcp/dataproc/submit_hadoop_job/README.md
+++ b/components/gcp/dataproc/submit_hadoop_job/README.md
@@ -72,7 +72,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataproc_submit_hadoop_job_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_hadoop_job/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_hadoop_job/component.yaml')
help(dataproc_submit_hadoop_job_op)
```
diff --git a/components/gcp/dataproc/submit_hadoop_job/component.yaml b/components/gcp/dataproc/submit_hadoop_job/component.yaml
index d62609a1bea..8e73c7a4d7a 100644
--- a/components/gcp/dataproc/submit_hadoop_job/component.yaml
+++ b/components/gcp/dataproc/submit_hadoop_job/component.yaml
@@ -78,7 +78,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataproc, submit_hadoop_job,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb
index e559182f9cd..7f3bdf98e2d 100644
--- a/components/gcp/dataproc/submit_hadoop_job/sample.ipynb
+++ b/components/gcp/dataproc/submit_hadoop_job/sample.ipynb
@@ -90,7 +90,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_hadoop_job_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_hadoop_job/component.yaml')\n",
"help(dataproc_submit_hadoop_job_op)"
]
},
diff --git a/components/gcp/dataproc/submit_hive_job/README.md b/components/gcp/dataproc/submit_hive_job/README.md
index e7b85bfe39a..5d55816e3b0 100644
--- a/components/gcp/dataproc/submit_hive_job/README.md
+++ b/components/gcp/dataproc/submit_hive_job/README.md
@@ -63,7 +63,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataproc_submit_hive_job_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_hive_job/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_hive_job/component.yaml')
help(dataproc_submit_hive_job_op)
```
diff --git a/components/gcp/dataproc/submit_hive_job/component.yaml b/components/gcp/dataproc/submit_hive_job/component.yaml
index 54250a22e5a..3140de61005 100644
--- a/components/gcp/dataproc/submit_hive_job/component.yaml
+++ b/components/gcp/dataproc/submit_hive_job/component.yaml
@@ -73,7 +73,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataproc, submit_hive_job,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataproc/submit_hive_job/sample.ipynb b/components/gcp/dataproc/submit_hive_job/sample.ipynb
index 5cc72ab39d9..81c8f46225e 100644
--- a/components/gcp/dataproc/submit_hive_job/sample.ipynb
+++ b/components/gcp/dataproc/submit_hive_job/sample.ipynb
@@ -81,7 +81,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_hive_job_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_hive_job/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_hive_job/component.yaml')\n",
"help(dataproc_submit_hive_job_op)"
]
},
diff --git a/components/gcp/dataproc/submit_pig_job/README.md b/components/gcp/dataproc/submit_pig_job/README.md
index d5816ccfea1..10800068599 100644
--- a/components/gcp/dataproc/submit_pig_job/README.md
+++ b/components/gcp/dataproc/submit_pig_job/README.md
@@ -66,7 +66,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataproc_submit_pig_job_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_pig_job/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_pig_job/component.yaml')
help(dataproc_submit_pig_job_op)
```
diff --git a/components/gcp/dataproc/submit_pig_job/component.yaml b/components/gcp/dataproc/submit_pig_job/component.yaml
index 26a4fb9f998..52e314afe1c 100644
--- a/components/gcp/dataproc/submit_pig_job/component.yaml
+++ b/components/gcp/dataproc/submit_pig_job/component.yaml
@@ -73,7 +73,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataproc, submit_pig_job,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataproc/submit_pig_job/sample.ipynb b/components/gcp/dataproc/submit_pig_job/sample.ipynb
index 225a5a0785a..000c798d5d8 100644
--- a/components/gcp/dataproc/submit_pig_job/sample.ipynb
+++ b/components/gcp/dataproc/submit_pig_job/sample.ipynb
@@ -84,7 +84,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_pig_job_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_pig_job/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_pig_job/component.yaml')\n",
"help(dataproc_submit_pig_job_op)"
]
},
diff --git a/components/gcp/dataproc/submit_pyspark_job/README.md b/components/gcp/dataproc/submit_pyspark_job/README.md
index c868fa211ed..ab550f7a7c1 100644
--- a/components/gcp/dataproc/submit_pyspark_job/README.md
+++ b/components/gcp/dataproc/submit_pyspark_job/README.md
@@ -67,7 +67,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataproc_submit_pyspark_job_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_pyspark_job/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_pyspark_job/component.yaml')
help(dataproc_submit_pyspark_job_op)
```
diff --git a/components/gcp/dataproc/submit_pyspark_job/component.yaml b/components/gcp/dataproc/submit_pyspark_job/component.yaml
index d3ab4f617ec..30036ea18c0 100644
--- a/components/gcp/dataproc/submit_pyspark_job/component.yaml
+++ b/components/gcp/dataproc/submit_pyspark_job/component.yaml
@@ -67,7 +67,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataproc, submit_pyspark_job,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb
index 9396fd73723..0500ee12f9b 100644
--- a/components/gcp/dataproc/submit_pyspark_job/sample.ipynb
+++ b/components/gcp/dataproc/submit_pyspark_job/sample.ipynb
@@ -86,7 +86,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_pyspark_job_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_pyspark_job/component.yaml')\n",
"help(dataproc_submit_pyspark_job_op)"
]
},
diff --git a/components/gcp/dataproc/submit_spark_job/README.md b/components/gcp/dataproc/submit_spark_job/README.md
index bfef2e4c770..742384c807a 100644
--- a/components/gcp/dataproc/submit_spark_job/README.md
+++ b/components/gcp/dataproc/submit_spark_job/README.md
@@ -80,7 +80,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataproc_submit_spark_job_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_spark_job/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_spark_job/component.yaml')
help(dataproc_submit_spark_job_op)
```
diff --git a/components/gcp/dataproc/submit_spark_job/component.yaml b/components/gcp/dataproc/submit_spark_job/component.yaml
index 825e6769952..d585b97e0af 100644
--- a/components/gcp/dataproc/submit_spark_job/component.yaml
+++ b/components/gcp/dataproc/submit_spark_job/component.yaml
@@ -74,7 +74,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataproc, submit_spark_job,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataproc/submit_spark_job/sample.ipynb b/components/gcp/dataproc/submit_spark_job/sample.ipynb
index c9c74130ef3..b4344e3fe16 100644
--- a/components/gcp/dataproc/submit_spark_job/sample.ipynb
+++ b/components/gcp/dataproc/submit_spark_job/sample.ipynb
@@ -99,7 +99,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_spark_job_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_spark_job/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_spark_job/component.yaml')\n",
"help(dataproc_submit_spark_job_op)"
]
},
diff --git a/components/gcp/dataproc/submit_sparksql_job/README.md b/components/gcp/dataproc/submit_sparksql_job/README.md
index 2e89031b42d..f8ad799e700 100644
--- a/components/gcp/dataproc/submit_sparksql_job/README.md
+++ b/components/gcp/dataproc/submit_sparksql_job/README.md
@@ -62,7 +62,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
dataproc_submit_sparksql_job_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_sparksql_job/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_sparksql_job/component.yaml')
help(dataproc_submit_sparksql_job_op)
```
diff --git a/components/gcp/dataproc/submit_sparksql_job/component.yaml b/components/gcp/dataproc/submit_sparksql_job/component.yaml
index 1a9b30535a7..119e8592d35 100644
--- a/components/gcp/dataproc/submit_sparksql_job/component.yaml
+++ b/components/gcp/dataproc/submit_sparksql_job/component.yaml
@@ -73,7 +73,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.dataproc, submit_sparksql_job,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb
index d70ac4d8019..bfdd7e83a08 100644
--- a/components/gcp/dataproc/submit_sparksql_job/sample.ipynb
+++ b/components/gcp/dataproc/submit_sparksql_job/sample.ipynb
@@ -81,7 +81,7 @@
"import kfp.components as comp\n",
"\n",
"dataproc_submit_sparksql_job_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataproc/submit_sparksql_job/component.yaml')\n",
"help(dataproc_submit_sparksql_job_op)"
]
},
diff --git a/components/gcp/ml_engine/batch_predict/README.md b/components/gcp/ml_engine/batch_predict/README.md
index f98c6250b4f..6020191e4fd 100644
--- a/components/gcp/ml_engine/batch_predict/README.md
+++ b/components/gcp/ml_engine/batch_predict/README.md
@@ -94,7 +94,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
mlengine_batch_predict_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/batch_predict/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/batch_predict/component.yaml')
help(mlengine_batch_predict_op)
```
diff --git a/components/gcp/ml_engine/batch_predict/component.yaml b/components/gcp/ml_engine/batch_predict/component.yaml
index 8b4a4c7bfdf..0a3bbf1fb36 100644
--- a/components/gcp/ml_engine/batch_predict/component.yaml
+++ b/components/gcp/ml_engine/batch_predict/component.yaml
@@ -67,7 +67,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.ml_engine, batch_predict,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/ml_engine/batch_predict/sample.ipynb b/components/gcp/ml_engine/batch_predict/sample.ipynb
index ab20f707fec..e8de2f10656 100644
--- a/components/gcp/ml_engine/batch_predict/sample.ipynb
+++ b/components/gcp/ml_engine/batch_predict/sample.ipynb
@@ -112,7 +112,7 @@
"import kfp.components as comp\n",
"\n",
"mlengine_batch_predict_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/batch_predict/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/batch_predict/component.yaml')\n",
"help(mlengine_batch_predict_op)"
]
},
diff --git a/components/gcp/ml_engine/deploy/README.md b/components/gcp/ml_engine/deploy/README.md
index 22db172dafa..c41fabe8bd8 100644
--- a/components/gcp/ml_engine/deploy/README.md
+++ b/components/gcp/ml_engine/deploy/README.md
@@ -110,7 +110,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
mlengine_deploy_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/deploy/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/deploy/component.yaml')
help(mlengine_deploy_op)
```
diff --git a/components/gcp/ml_engine/deploy/component.yaml b/components/gcp/ml_engine/deploy/component.yaml
index b0a9d86ff65..5b4280cefdb 100644
--- a/components/gcp/ml_engine/deploy/component.yaml
+++ b/components/gcp/ml_engine/deploy/component.yaml
@@ -93,7 +93,7 @@ outputs:
type: String
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.ml_engine, deploy,
--model_uri, {inputValue: model_uri},
diff --git a/components/gcp/ml_engine/deploy/sample.ipynb b/components/gcp/ml_engine/deploy/sample.ipynb
index df6799c041f..2037bd2416b 100644
--- a/components/gcp/ml_engine/deploy/sample.ipynb
+++ b/components/gcp/ml_engine/deploy/sample.ipynb
@@ -128,7 +128,7 @@
"import kfp.components as comp\n",
"\n",
"mlengine_deploy_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/deploy/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/deploy/component.yaml')\n",
"help(mlengine_deploy_op)"
]
},
diff --git a/components/gcp/ml_engine/train/README.md b/components/gcp/ml_engine/train/README.md
index 3e148b7b98c..2e843f38f0a 100644
--- a/components/gcp/ml_engine/train/README.md
+++ b/components/gcp/ml_engine/train/README.md
@@ -86,7 +86,7 @@ KFP_PACKAGE = 'https://storage.googleapis.com/ml-pipeline/release/0.1.14/kfp.tar
import kfp.components as comp
mlengine_train_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/train/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/train/component.yaml')
help(mlengine_train_op)
```
diff --git a/components/gcp/ml_engine/train/component.yaml b/components/gcp/ml_engine/train/component.yaml
index 5102db4e96d..0f6dd63adcc 100644
--- a/components/gcp/ml_engine/train/component.yaml
+++ b/components/gcp/ml_engine/train/component.yaml
@@ -101,7 +101,7 @@ outputs:
type: GCSPath
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-gcp:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-gcp:151c5349f13bea9d626c988563c04c0a86210c21
args: [
kfp_component.google.ml_engine, train,
--project_id, {inputValue: project_id},
diff --git a/components/gcp/ml_engine/train/sample.ipynb b/components/gcp/ml_engine/train/sample.ipynb
index c9ed4a1cace..c36b84602aa 100644
--- a/components/gcp/ml_engine/train/sample.ipynb
+++ b/components/gcp/ml_engine/train/sample.ipynb
@@ -104,7 +104,7 @@
"import kfp.components as comp\n",
"\n",
"mlengine_train_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/train/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/train/component.yaml')\n",
"help(mlengine_train_op)"
]
},
diff --git a/components/kubeflow/deployer/component.yaml b/components/kubeflow/deployer/component.yaml
index 50916b1ced5..583615f702f 100644
--- a/components/kubeflow/deployer/component.yaml
+++ b/components/kubeflow/deployer/component.yaml
@@ -11,7 +11,7 @@ inputs:
# - {name: Endppoint URI, type: Serving URI, description: 'URI of the deployed prediction service..'}
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:151c5349f13bea9d626c988563c04c0a86210c21
command: [/bin/deploy.sh]
args: [
--model-export-path, {inputValue: Model dir},
diff --git a/components/kubeflow/dnntrainer/component.yaml b/components/kubeflow/dnntrainer/component.yaml
index a1ff245bd8c..70b5a25a9ee 100644
--- a/components/kubeflow/dnntrainer/component.yaml
+++ b/components/kubeflow/dnntrainer/component.yaml
@@ -15,7 +15,7 @@ outputs:
- {name: Training output dir, type: GCSPath, description: 'GCS or local directory.'} # type: {GCSPath: {path_type: Directory}}
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21
command: [python2, -m, trainer.task]
args: [
--transformed-data-dir, {inputValue: Transformed data dir},
diff --git a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py
index 4d5b7febbbf..be10cdc8115 100644
--- a/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py
+++ b/components/kubeflow/launcher/kubeflow_tfjob_launcher_op.py
@@ -17,7 +17,7 @@
def kubeflow_tfjob_launcher_op(container_image, command, number_of_workers: int, number_of_parameter_servers: int, tfjob_timeout_minutes: int, output_dir=None, step_name='TFJob-launcher'):
return dsl.ContainerOp(
name = step_name,
- image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:0517114dc2b365a4a6d95424af6157ead774eff3',
+ image = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf:151c5349f13bea9d626c988563c04c0a86210c21',
arguments = [
'--workers', number_of_workers,
'--pss', number_of_parameter_servers,
diff --git a/components/kubeflow/launcher/src/train.template.yaml b/components/kubeflow/launcher/src/train.template.yaml
index fd851389080..4e8d660e72b 100644
--- a/components/kubeflow/launcher/src/train.template.yaml
+++ b/components/kubeflow/launcher/src/train.template.yaml
@@ -26,7 +26,7 @@ spec:
spec:
containers:
- name: tensorflow
- image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21
command:
- python
- -m
@@ -49,7 +49,7 @@ spec:
spec:
containers:
- name: tensorflow
- image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21
command:
- python
- -m
@@ -72,7 +72,7 @@ spec:
spec:
containers:
- name: tensorflow
- image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-kubeflow-tf-trainer:151c5349f13bea9d626c988563c04c0a86210c21
command:
- python
- -m
diff --git a/components/local/confusion_matrix/component.yaml b/components/local/confusion_matrix/component.yaml
index 4e9a098a7ac..12d21d15e10 100644
--- a/components/local/confusion_matrix/component.yaml
+++ b/components/local/confusion_matrix/component.yaml
@@ -9,7 +9,7 @@ inputs:
# - {name: Metrics, type: Metrics}
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:151c5349f13bea9d626c988563c04c0a86210c21
command: [python2, /ml/confusion_matrix.py]
args: [
--predictions, {inputValue: Predictions},
diff --git a/components/local/roc/component.yaml b/components/local/roc/component.yaml
index 54d09b81ca3..4110ca34ee0 100644
--- a/components/local/roc/component.yaml
+++ b/components/local/roc/component.yaml
@@ -11,7 +11,7 @@ inputs:
# - {name: Metrics, type: Metrics}
implementation:
container:
- image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:0517114dc2b365a4a6d95424af6157ead774eff3
+ image: gcr.io/ml-pipeline/ml-pipeline-local-confusion-matrix:151c5349f13bea9d626c988563c04c0a86210c21
command: [python2, /ml/roc.py]
args: [
--predictions, {inputValue: Predictions dir},
diff --git a/frontend/mock-backend/fixed-data.ts b/frontend/mock-backend/fixed-data.ts
index 3cf00842b75..8f5e273091d 100644
--- a/frontend/mock-backend/fixed-data.ts
+++ b/frontend/mock-backend/fixed-data.ts
@@ -17,6 +17,7 @@ import helloWorldWithStepsRun from './hello-world-with-steps-runtime';
import coinflipRun from './mock-coinflip-runtime';
import errorRun from './mock-error-runtime';
import xgboostRun from './mock-xgboost-runtime';
+import jsonRun from './json-runtime';
import { ApiExperiment } from '../src/apis/experiment';
import { ApiJob } from '../src/apis/job';
import { ApiPipeline } from '../src/apis/pipeline';
@@ -347,6 +348,40 @@ const runs: ApiRunDetail[] = [
status: 'Error',
},
},
+ {
+ pipeline_runtime: {
+ workflow_manifest: JSON.stringify(jsonRun),
+ },
+ run: {
+ created_at: new Date('2018-05-17T21:58:23.000Z'),
+ description: 'A simple run with json input',
+ id: '183ac01f-dc26-4ebf-b817-7b3f96fdc3ac',
+ metrics: [{
+ format: RunMetricFormat.PERCENTAGE,
+ name: 'accuracy',
+ node_id: 'json-12abc',
+ number_value: 0.5423,
+ }],
+ name: 'json-12abc',
+ pipeline_spec: {
+ parameters: [
+ { name: 'paramName1', value: 'paramVal1' },
+ { name: 'paramName2', value: 'paramVal2' },
+ ],
+ pipeline_id: pipelines[2].id,
+ pipeline_name: pipelines[2].name,
+ },
+ resource_references: [{
+ key: {
+ id: 'a4d4f8c6-ce9c-4200-a92e-c48ec759b733',
+ type: ApiResourceType.EXPERIMENT,
+ },
+ relationship: ApiRelationship.OWNER,
+ }],
+ scheduled_at: new Date('2018-05-17T21:58:23.000Z'),
+ status: 'Running',
+ }
+ },
{
pipeline_runtime: {
workflow_manifest: JSON.stringify(helloWorldRun),
diff --git a/frontend/mock-backend/json-runtime.ts b/frontend/mock-backend/json-runtime.ts
new file mode 100644
index 00000000000..f4f8351e107
--- /dev/null
+++ b/frontend/mock-backend/json-runtime.ts
@@ -0,0 +1,99 @@
+// Copyright 2019 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// tslint:disable:object-literal-sort-keys
+export default {
+ metadata: {
+ name: 'json-12abc',
+ generateName: 'json-',
+ namespace: 'default',
+ selfLink: '/apis/argoproj.io/v1alpha1/namespaces/default/workflows/json-7sm94',
+ uid: 'dfc82af5-c5cb-43b1-822b-52487cb872d2',
+ resourceVersion: '1322',
+ creationTimestamp: '2018-06-06T00:04:49Z',
+ labels: {
+ 'workflows.argoproj.io/completed': 'true',
+ 'workflows.argoproj.io/phase': 'Succeeded'
+ }
+ },
+ spec: {
+ templates: [
+ {
+ name: 'whalesay1',
+ inputs: {},
+ outputs: {},
+ metadata: {},
+ container: {
+ name: '',
+ image: 'docker/whalesay:latest',
+ command: [
+ 'cowsay'
+ ],
+ args: [
+ '{{workflow.parameters.message}}'
+ ],
+ resources: {}
+ }
+ }
+ ],
+ entrypoint: 'whalesay1',
+ arguments: {
+ parameters: [
+ {
+ name: 'message',
+ value: 'hello world'
+ }
+ ]
+ }
+ },
+ status: {
+ phase: 'Succeeded',
+ startedAt: '2018-06-06T00:04:49Z',
+ finishedAt: '2018-06-06T00:05:23Z',
+ nodes: {
+ 'json-12abc': {
+ id: 'json-12abc',
+ name: 'json-12abc',
+ displayName: 'json-12abc',
+ type: 'Pod',
+ templateName: 'whalesay1',
+ phase: 'Succeeded',
+ startedAt: '2018-06-06T00:04:49Z',
+ finishedAt: '2018-06-06T00:05:23Z',
+ inputs: {
+ parameters: [
+ {
+ name: 'JSON Data',
+ value: JSON.stringify({
+ 'string1': 'a',
+ 'string2': 'b',
+ 'number1': 1,
+ 'number2': 2.2,
+ 'object': {
+ 'string': 'a',
+ 'number': 2
+ },
+ 'array': [
+ 'a',
+ 'b',
+ 'c'
+ ]
+ })
+ }
+ ]
+ }
+ }
+ }
+ }
+};
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index ae282897763..d6eec28a170 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -160,12 +160,6 @@
"integrity": "sha512-fOM/Jhv51iyugY7KOBZz2ThfT1gwvsGCfWxpLpZDgkGjpEO4Le9cld07OdskikLjDUQJ43dzDaVRSFwQlpdqVg==",
"dev": true
},
- "@types/codemirror": {
- "version": "0.0.60",
- "resolved": "https://registry.npmjs.org/@types/codemirror/-/codemirror-0.0.60.tgz",
- "integrity": "sha512-fhuwXmN81SeDz/33kFruCcLgEOAz+asgWNkuszUlm5W1YIjn/qEVU+dwV1qC12rJ6duLBTWWumlDaxtckZpFXw==",
- "dev": true
- },
"@types/connect": {
"version": "3.4.32",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.32.tgz",
@@ -3024,11 +3018,6 @@
"resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
"integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c="
},
- "codemirror": {
- "version": "5.42.2",
- "resolved": "https://registry.npmjs.org/codemirror/-/codemirror-5.42.2.tgz",
- "integrity": "sha512-Tkv6im39VuhduFMsDA3MlXcC/kKas3Z0PI1/8N88QvFQbtOeiiwnfFJE4juGyC8/a4sb1BSxQlzsil8XLQdxRw=="
- },
"collection-visit": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz",
@@ -11264,11 +11253,6 @@
}
}
},
- "react-codemirror2": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/react-codemirror2/-/react-codemirror2-5.1.0.tgz",
- "integrity": "sha512-Cksbgbviuf2mJfMyrKmcu7ycK6zX/ukuQO8dvRZdFWqATf5joalhjFc6etnBdGCcPA2LbhIwz+OPnQxLN/j1Fw=="
- },
"react-dev-utils": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-5.0.3.tgz",
diff --git a/frontend/package.json b/frontend/package.json
index 168405881c7..4c35fedc052 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -7,7 +7,6 @@
"@material-ui/icons": "^3.0.1",
"@types/js-yaml": "^3.11.2",
"brace": "^0.11.1",
- "codemirror": "^5.40.2",
"d3": "^5.7.0",
"d3-dsv": "^1.0.10",
"dagre": "^0.8.2",
@@ -20,7 +19,6 @@
"re-resizable": "^4.9.0",
"react": "^16.7.0",
"react-ace": "^7.0.2",
- "react-codemirror2": "^5.1.0",
"react-dom": "^16.5.2",
"react-dropzone": "^5.1.0",
"react-router-dom": "^4.3.1",
@@ -53,7 +51,6 @@
"vr-test": "ts-node -O '{\"module\": \"commonjs\"}' backstop.ts"
},
"devDependencies": {
- "@types/codemirror": "0.0.60",
"@types/d3": "^5.0.0",
"@types/d3-dsv": "^1.0.33",
"@types/dagre": "^0.7.40",
diff --git a/frontend/src/components/DetailsTable.tsx b/frontend/src/components/DetailsTable.tsx
index 4d0ce73b8f4..3f43b955c5b 100644
--- a/frontend/src/components/DetailsTable.tsx
+++ b/frontend/src/components/DetailsTable.tsx
@@ -14,17 +14,16 @@
* limitations under the License.
*/
-import 'codemirror/lib/codemirror.css';
-import 'codemirror/mode/javascript/javascript.js';
import * as React from 'react';
import { stylesheet } from 'typestyle';
import { color, spacing, commonCss } from '../Css';
-import { UnControlled as CodeMirror } from 'react-codemirror2';
+import Editor from './Editor';
+import 'brace';
+import 'brace/ext/language_tools';
+import 'brace/mode/json';
+import 'brace/theme/github';
export const css = stylesheet({
- codeMirrorGutter: {
- width: 6,
- },
key: {
color: color.strong,
flex: '0 0 50%',
@@ -57,25 +56,16 @@ export default (props: DetailsTableProps) => {
try {
const parsedJson = JSON.parse(f[1]);
// Nulls, booleans, strings, and numbers can all be parsed as JSON, but we don't care
- // about rendering those using CodeMirror. Note that `typeOf null` returns 'object'
+ // about rendering. Note that `typeOf null` returns 'object'
if (parsedJson === null || typeof parsedJson !== 'object') {
throw new Error('Parsed JSON was neither an array nor an object. Using default renderer');
}
return (
{f[0]}
- editor.refresh()}
- options={{
- gutters: [css.codeMirrorGutter],
- lineWrapping: true,
- mode: 'application/json',
- readOnly: true,
- theme: 'default',
- }}
- />
+
);
} catch (err) {
diff --git a/frontend/src/components/__snapshots__/DetailsTable.test.tsx.snap b/frontend/src/components/__snapshots__/DetailsTable.test.tsx.snap
index 6ba6b42d71f..fd6e8640512 100644
--- a/frontend/src/components/__snapshots__/DetailsTable.test.tsx.snap
+++ b/frontend/src/components/__snapshots__/DetailsTable.test.tsx.snap
@@ -115,21 +115,44 @@ exports[`DetailsTable does render arrays as JSON 1`] = `
>
key
-
@@ -148,21 +171,44 @@ exports[`DetailsTable does render arrays as JSON 2`] = `
>
key
-
@@ -208,25 +254,48 @@ exports[`DetailsTable shows key and JSON value in row 1`] = `
>
key
-
@@ -312,25 +381,48 @@ exports[`DetailsTable shows keys and values for multiple rows 1`] = `
>
key2
-
key5
-
{
}
{selectedTab === 1 && !!templateString &&
- editor.refresh()}
- options={{
- lineNumbers: true,
- lineWrapping: true,
- mode: 'text/yaml',
- readOnly: true,
- theme: 'default',
- }}
+ height='100%' width='100%' mode='yaml' theme='github'
+ editorProps={{ $blockScrolling: true }}
+ readOnly={true} highlightActiveLine={true} showGutter={true}
/>
}
diff --git a/frontend/src/pages/__snapshots__/PipelineDetails.test.tsx.snap b/frontend/src/pages/__snapshots__/PipelineDetails.test.tsx.snap
index f086dcaa941..f31dbf4e947 100644
--- a/frontend/src/pages/__snapshots__/PipelineDetails.test.tsx.snap
+++ b/frontend/src/pages/__snapshots__/PipelineDetails.test.tsx.snap
@@ -780,18 +780,48 @@ exports[`PipelineDetails shows pipeline source code when config tab is clicked 1
-
diff --git a/manifests/kustomize/README.md b/manifests/kustomize/README.md
index 2db18c1a68f..72cb24f31fd 100644
--- a/manifests/kustomize/README.md
+++ b/manifests/kustomize/README.md
@@ -5,7 +5,7 @@ This folder contains Kubeflow Pipelines Kustomize manifests for a light weight d
If you want to skip any customization, you can deploy Kubeflow Pipelines by running
```
-export PIPELINE_VERSION=master
+export PIPELINE_VERSION=0.1.26
kubectl apply -f https://raw.githubusercontent.com/kubeflow/pipelines/$PIPELINE_VERSION/manifests/kustomize/namespaced-install.yaml
```
@@ -29,15 +29,12 @@ To get latest kubectl, visit [here](https://kubernetes.io/docs/tasks/tools/insta
## Change deploy namespace
To deploy Kubeflow Pipelines in namespace FOO,
-- Edit [kustomization.yaml](namespaced-install/kustomization.yaml) namespace section to FOO
+- Edit [kustomization.yaml](env/dev/kustomization.yaml) namespace section to FOO
- Then run
```
-kubectl kustomize . | kubectl apply -f -
+kubectl kustomize env/dev | kubectl apply -f -
```
-## Reinstall with existing data
-TODO
-
## Disable the public endpoint
By default, the deployment install an [invert proxy agent](https://github.com/google/inverting-proxy) that exposes a public URL. If you want to skip installing it,
- Comment out the proxy component in the [kustomization.yaml](base/kustomization.yaml).
@@ -62,7 +59,7 @@ kubectl delete -f https://raw.githubusercontent.com/kubeflow/pipelines/$PIPELINE
Or if you deploy through kustomize
```
-kubectl kustomize . | kubectl delete -f -
+kubectl kustomize env/dev | kubectl delete -f -
```
# FAQ
If sample code requires a "user-gcp-sa" secret, you could create one by
diff --git a/manifests/kustomize/base/argo/kustomization.yaml b/manifests/kustomize/base/argo/kustomization.yaml
index 890cb4cb396..1a00e577709 100644
--- a/manifests/kustomize/base/argo/kustomization.yaml
+++ b/manifests/kustomize/base/argo/kustomization.yaml
@@ -2,6 +2,7 @@ apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
+- minio-artifact-secret.yaml
- workflow-controller-configmap.yaml
- workflow-controller-deployment.yaml
- workflow-controller-role.yaml
diff --git a/manifests/kustomize/base/minio/minio-artifact-secret.yaml b/manifests/kustomize/base/argo/minio-artifact-secret.yaml
similarity index 100%
rename from manifests/kustomize/base/minio/minio-artifact-secret.yaml
rename to manifests/kustomize/base/argo/minio-artifact-secret.yaml
diff --git a/manifests/kustomize/base/kustomization.yaml b/manifests/kustomize/base/kustomization.yaml
index adae06469f0..99fe9419597 100644
--- a/manifests/kustomize/base/kustomization.yaml
+++ b/manifests/kustomize/base/kustomization.yaml
@@ -4,18 +4,12 @@ kind: Kustomization
bases:
- argo
- crds
-- minio
-- mysql
- pipeline
- proxy
images:
- name: argoproj/workflow-controller
newTag: v2.3.0
-- name: minio/minio
- newTag: RELEASE.2018-02-09T22-40-05Z
-- name: mysql
- newTag: "5.6"
- name: gcr.io/ml-pipeline/api-server
newTag: 0.1.26
- name: gcr.io/ml-pipeline/persistenceagent
@@ -28,3 +22,5 @@ images:
newTag: 0.1.26
- name: gcr.io/ml-pipeline/inverse-proxy-agent
newTag: 0.1.26
+- name: gcr.io/ml-pipeline/visualization-server
+ newTag: 0.1.26
diff --git a/manifests/kustomize/base/pipeline/kustomization.yaml b/manifests/kustomize/base/pipeline/kustomization.yaml
index 35d5e3c1afb..0f2b82b68db 100644
--- a/manifests/kustomize/base/pipeline/kustomization.yaml
+++ b/manifests/kustomize/base/pipeline/kustomization.yaml
@@ -24,6 +24,8 @@ resources:
- ml-pipeline-viewer-crd-rolebinding.yaml
- ml-pipeline-viewer-crd-deployment.yaml
- ml-pipeline-viewer-crd-sa.yaml
+- ml-pipeline-visualization-deployment.yaml
+- ml-pipeline-visualization-service.yaml
- pipeline-runner-role.yaml
- pipeline-runner-rolebinding.yaml
- pipeline-runner-sa.yaml
\ No newline at end of file
diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-visualization-deployment.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-visualization-deployment.yaml
new file mode 100644
index 00000000000..901b5db1263
--- /dev/null
+++ b/manifests/kustomize/base/pipeline/ml-pipeline-visualization-deployment.yaml
@@ -0,0 +1,21 @@
+apiVersion: apps/v1beta2
+kind: Deployment
+metadata:
+ labels:
+ app: ml-pipeline-visualizationserver
+ name: ml-pipeline-visualizationserver
+spec:
+ selector:
+ matchLabels:
+ app: ml-pipeline-visualizationserver
+ template:
+ metadata:
+ labels:
+ app: ml-pipeline-visualizationserver
+ spec:
+ containers:
+ - image: gcr.io/ml-pipeline/visualization-server:0.1.26
+ imagePullPolicy: IfNotPresent
+ name: ml-pipeline-visualizationserver
+ ports:
+ - containerPort: 8888
\ No newline at end of file
diff --git a/manifests/kustomize/base/pipeline/ml-pipeline-visualization-service.yaml b/manifests/kustomize/base/pipeline/ml-pipeline-visualization-service.yaml
new file mode 100644
index 00000000000..83c7dd67504
--- /dev/null
+++ b/manifests/kustomize/base/pipeline/ml-pipeline-visualization-service.yaml
@@ -0,0 +1,12 @@
+apiVersion: v1
+kind: Service
+metadata:
+ name: ml-pipeline-visualizationserver
+spec:
+ ports:
+ - name: http
+ port: 8888
+ protocol: TCP
+ targetPort: 8888
+ selector:
+ app: ml-pipeline-visualizationserver
\ No newline at end of file
diff --git a/manifests/kustomize/env/dev/kustomization.yaml b/manifests/kustomize/env/dev/kustomization.yaml
new file mode 100644
index 00000000000..339b4a6c8f8
--- /dev/null
+++ b/manifests/kustomize/env/dev/kustomization.yaml
@@ -0,0 +1,16 @@
+apiVersion: kustomize.config.k8s.io/v1beta1
+kind: Kustomization
+
+bases:
+ - ../../namespaced
+ - minio
+ - mysql
+
+# Replace with your namespace
+namespace: kubeflow
+
+images:
+ - name: mysql
+ newTag: "5.6"
+ - name: minio/minio
+ newTag: RELEASE.2018-02-09T22-40-05Z
\ No newline at end of file
diff --git a/manifests/kustomize/base/minio/kustomization.yaml b/manifests/kustomize/env/dev/minio/kustomization.yaml
similarity index 82%
rename from manifests/kustomize/base/minio/kustomization.yaml
rename to manifests/kustomize/env/dev/minio/kustomization.yaml
index 731cc557d4c..8ed66b30347 100644
--- a/manifests/kustomize/base/minio/kustomization.yaml
+++ b/manifests/kustomize/env/dev/minio/kustomization.yaml
@@ -2,7 +2,6 @@ apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
-- minio-artifact-secret.yaml
- minio-deployment.yaml
- minio-pvc.yaml
- minio-service.yaml
diff --git a/manifests/kustomize/base/minio/minio-deployment.yaml b/manifests/kustomize/env/dev/minio/minio-deployment.yaml
similarity index 90%
rename from manifests/kustomize/base/minio/minio-deployment.yaml
rename to manifests/kustomize/env/dev/minio/minio-deployment.yaml
index f7e93c22f36..a7a33eaff47 100644
--- a/manifests/kustomize/base/minio/minio-deployment.yaml
+++ b/manifests/kustomize/env/dev/minio/minio-deployment.yaml
@@ -22,7 +22,7 @@ spec:
value: minio
- name: MINIO_SECRET_KEY
value: minio123
- image: gcr.io/ml-pipeline/minio:RELEASE.2018-02-09T22-40-05Z
+ image: gcr.io/ml-pipeline/minio:RELEASE.2019-08-14T20-37-41Z
name: minio
ports:
- containerPort: 9000
diff --git a/manifests/kustomize/base/minio/minio-pvc.yaml b/manifests/kustomize/env/dev/minio/minio-pvc.yaml
similarity index 100%
rename from manifests/kustomize/base/minio/minio-pvc.yaml
rename to manifests/kustomize/env/dev/minio/minio-pvc.yaml
diff --git a/manifests/kustomize/base/minio/minio-service.yaml b/manifests/kustomize/env/dev/minio/minio-service.yaml
similarity index 100%
rename from manifests/kustomize/base/minio/minio-service.yaml
rename to manifests/kustomize/env/dev/minio/minio-service.yaml
diff --git a/manifests/kustomize/base/mysql/kustomization.yaml b/manifests/kustomize/env/dev/mysql/kustomization.yaml
similarity index 100%
rename from manifests/kustomize/base/mysql/kustomization.yaml
rename to manifests/kustomize/env/dev/mysql/kustomization.yaml
diff --git a/manifests/kustomize/base/mysql/mysql-deployment.yaml b/manifests/kustomize/env/dev/mysql/mysql-deployment.yaml
similarity index 100%
rename from manifests/kustomize/base/mysql/mysql-deployment.yaml
rename to manifests/kustomize/env/dev/mysql/mysql-deployment.yaml
diff --git a/manifests/kustomize/base/mysql/mysql-pv-claim.yaml b/manifests/kustomize/env/dev/mysql/mysql-pv-claim.yaml
similarity index 100%
rename from manifests/kustomize/base/mysql/mysql-pv-claim.yaml
rename to manifests/kustomize/env/dev/mysql/mysql-pv-claim.yaml
diff --git a/manifests/kustomize/base/mysql/mysql-service.yaml b/manifests/kustomize/env/dev/mysql/mysql-service.yaml
similarity index 100%
rename from manifests/kustomize/base/mysql/mysql-service.yaml
rename to manifests/kustomize/env/dev/mysql/mysql-service.yaml
diff --git a/manifests/kustomize/env/gcp/.gitignore b/manifests/kustomize/env/gcp/.gitignore
new file mode 100644
index 00000000000..44acf8f2d0f
--- /dev/null
+++ b/manifests/kustomize/env/gcp/.gitignore
@@ -0,0 +1,2 @@
+# Ignore the GCP service account ADC file
+application_default_credentials.json
\ No newline at end of file
diff --git a/manifests/kustomize/env/gcp/kustomization.yaml b/manifests/kustomize/env/gcp/kustomization.yaml
new file mode 100644
index 00000000000..5345ff2852f
--- /dev/null
+++ b/manifests/kustomize/env/gcp/kustomization.yaml
@@ -0,0 +1,26 @@
+apiVersion: kustomize.config.k8s.io/v1beta1
+kind: Kustomization
+
+bases:
+ - ../../namespaced
+ - minio
+ - mysql
+
+# Replace with your namespace
+namespace: kubeflow
+
+patchesStrategicMerge:
+ - ml-pipeline-apiserver-deployment-patch.yaml
+
+images:
+ - name: gcr.io/cloudsql-docker/gce-proxy
+ newTag: "1.14"
+ - name: minio/minio
+ newTag: RELEASE.2019-08-14T20-37-41Z
+
+secretGenerator:
+ - name: user-gcp-sa
+ files:
+ # Create a service account key and stored as application_default_credentials.json in the same folder.
+ # https://cloud.google.com/iam/docs/creating-managing-service-account-keys#creating_service_account_keys
+ - application_default_credentials.json
\ No newline at end of file
diff --git a/manifests/kustomize/env/gcp/minio/kustomization.yaml b/manifests/kustomize/env/gcp/minio/kustomization.yaml
new file mode 100644
index 00000000000..db573c477db
--- /dev/null
+++ b/manifests/kustomize/env/gcp/minio/kustomization.yaml
@@ -0,0 +1,6 @@
+apiVersion: kustomize.config.k8s.io/v1beta1
+kind: Kustomization
+
+resources:
+- minio-gcs-gateway-deployment.yaml
+- minio-gcs-gateway-service.yaml
\ No newline at end of file
diff --git a/manifests/kustomize/env/gcp/minio/minio-gcs-gateway-deployment.yaml b/manifests/kustomize/env/gcp/minio/minio-gcs-gateway-deployment.yaml
new file mode 100644
index 00000000000..67711e43f3f
--- /dev/null
+++ b/manifests/kustomize/env/gcp/minio/minio-gcs-gateway-deployment.yaml
@@ -0,0 +1,37 @@
+apiVersion: extensions/v1beta1
+kind: Deployment
+metadata:
+ name: minio-deployment
+spec:
+ strategy:
+ type: Recreate
+ template:
+ metadata:
+ labels:
+ app: minio
+ spec:
+ containers:
+ - name: minio
+ image: minio/minio:RELEASE.2019-08-14T20-37-41Z
+ args:
+ - gateway
+ - gcs
+ # Replace this with your own GCP project
+ - yang-experiment-6
+ env:
+ - name: MINIO_ACCESS_KEY
+ value: "minio"
+ - name: MINIO_SECRET_KEY
+ value: "minio123"
+ - name: GOOGLE_APPLICATION_CREDENTIALS
+ value: "/etc/credentials/application_default_credentials.json"
+ ports:
+ - containerPort: 9000
+ volumeMounts:
+ - name: gcp-sa-token
+ mountPath: "/etc/credentials"
+ readOnly: true
+ volumes:
+ - name: gcp-sa-token
+ secret:
+ secretName: user-gcp-sa
\ No newline at end of file
diff --git a/manifests/kustomize/env/gcp/minio/minio-gcs-gateway-service.yaml b/manifests/kustomize/env/gcp/minio/minio-gcs-gateway-service.yaml
new file mode 100644
index 00000000000..7dd18174965
--- /dev/null
+++ b/manifests/kustomize/env/gcp/minio/minio-gcs-gateway-service.yaml
@@ -0,0 +1,11 @@
+apiVersion: v1
+kind: Service
+metadata:
+ name: minio-service
+spec:
+ ports:
+ - port: 9000
+ targetPort: 9000
+ protocol: TCP
+ selector:
+ app: minio
\ No newline at end of file
diff --git a/manifests/kustomize/env/gcp/ml-pipeline-apiserver-deployment-patch.yaml b/manifests/kustomize/env/gcp/ml-pipeline-apiserver-deployment-patch.yaml
new file mode 100644
index 00000000000..1400971e350
--- /dev/null
+++ b/manifests/kustomize/env/gcp/ml-pipeline-apiserver-deployment-patch.yaml
@@ -0,0 +1,16 @@
+apiVersion: apps/v1beta2
+kind: Deployment
+metadata:
+ name: ml-pipeline
+spec:
+ template:
+ spec:
+ containers:
+ - name: ml-pipeline-api-server
+ env:
+ - name: OBJECTSTORECONFIG_BUCKETNAME
+ # Replace with your own bucket name
+ value: 'yang-experiment-6-mlpipeline'
+ - name: DBCONFIG_PASSWORD
+ # Replace with your own CloudSQL password
+ value: '123'
diff --git a/manifests/kustomize/env/gcp/mysql/cloudsql-proxy-deployment.yaml b/manifests/kustomize/env/gcp/mysql/cloudsql-proxy-deployment.yaml
new file mode 100644
index 00000000000..d0a5bec0ccd
--- /dev/null
+++ b/manifests/kustomize/env/gcp/mysql/cloudsql-proxy-deployment.yaml
@@ -0,0 +1,42 @@
+apiVersion: extensions/v1beta1
+kind: Deployment
+metadata:
+ name: cloudsqlproxy
+spec:
+ replicas: 1
+ template:
+ metadata:
+ labels:
+ app: cloudsqlproxy
+ spec:
+ containers:
+ - image: gcr.io/cloudsql-docker/gce-proxy:1.14
+ name: cloudsqlproxy
+ command: ["/cloud_sql_proxy",
+ "-dir=/cloudsql",
+ # Replace with your own CloudSQL instance ID
+ "-instances=yang-experiment-6:us-central1:kfp-test=tcp:0.0.0.0:3306",
+ "-credential_file=/credentials/application_default_credentials.json",
+ "term_timeout=10s"]
+ # set term_timeout if require graceful handling of shutdown
+ # NOTE: proxy will stop accepting new connections; only wait on existing connections
+ lifecycle:
+ preStop:
+ exec:
+ # (optional) add a preStop hook so that termination is delayed
+ # this is required if your server still require new connections (e.g., connection pools)
+ command: ['sleep', '10']
+ ports:
+ - name: port-database1
+ containerPort: 3306
+ volumeMounts:
+ - mountPath: /cloudsql
+ name: cloudsql
+ - mountPath: /credentials
+ name: gcp-sa-token
+ volumes:
+ - name: cloudsql
+ emptyDir:
+ - name: gcp-sa-token
+ secret:
+ secretName: user-gcp-sa
\ No newline at end of file
diff --git a/manifests/kustomize/env/gcp/mysql/kustomization.yaml b/manifests/kustomize/env/gcp/mysql/kustomization.yaml
new file mode 100644
index 00000000000..704e59c336c
--- /dev/null
+++ b/manifests/kustomize/env/gcp/mysql/kustomization.yaml
@@ -0,0 +1,6 @@
+apiVersion: kustomize.config.k8s.io/v1beta1
+kind: Kustomization
+
+resources:
+- cloudsql-proxy-deployment.yaml
+- mysql-service.yaml
\ No newline at end of file
diff --git a/manifests/kustomize/env/gcp/mysql/mysql-service.yaml b/manifests/kustomize/env/gcp/mysql/mysql-service.yaml
new file mode 100644
index 00000000000..79f6e46116d
--- /dev/null
+++ b/manifests/kustomize/env/gcp/mysql/mysql-service.yaml
@@ -0,0 +1,10 @@
+apiVersion: v1
+kind: Service
+metadata:
+ name: mysql
+spec:
+ ports:
+ - port: 3306
+ targetPort: port-database1
+ selector:
+ app: cloudsqlproxy
diff --git a/manifests/kustomize/namespaced-install/README.md b/manifests/kustomize/namespaced-install/README.md
deleted file mode 100644
index 87e84672f24..00000000000
--- a/manifests/kustomize/namespaced-install/README.md
+++ /dev/null
@@ -1,2 +0,0 @@
-This directory shows how to deploy to a non-default namespace.
-To deploy to your own namespace, replace `kubeflow`.
\ No newline at end of file
diff --git a/manifests/kustomize/namespaced-install/kustomization.yaml b/manifests/kustomize/namespaced/kustomization.yaml
similarity index 94%
rename from manifests/kustomize/namespaced-install/kustomization.yaml
rename to manifests/kustomize/namespaced/kustomization.yaml
index db6f2a8c408..7f04d68c127 100644
--- a/manifests/kustomize/namespaced-install/kustomization.yaml
+++ b/manifests/kustomize/namespaced/kustomization.yaml
@@ -2,13 +2,11 @@ apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
bases:
-- ../base
+ - ../base
resources:
- namespace.yaml
-namespace: kubeflow
-
patchesStrategicMerge:
- workflow-controller-configmap.yaml
- ml-pipeline-persistenceagent-deployment-patch.yaml
diff --git a/manifests/kustomize/namespaced-install/kustomizeconfig/namespace.yaml b/manifests/kustomize/namespaced/kustomizeconfig/namespace.yaml
similarity index 100%
rename from manifests/kustomize/namespaced-install/kustomizeconfig/namespace.yaml
rename to manifests/kustomize/namespaced/kustomizeconfig/namespace.yaml
diff --git a/manifests/kustomize/namespaced-install/ml-pipeline-persistenceagent-deployment-patch.yaml b/manifests/kustomize/namespaced/ml-pipeline-persistenceagent-deployment-patch.yaml
similarity index 100%
rename from manifests/kustomize/namespaced-install/ml-pipeline-persistenceagent-deployment-patch.yaml
rename to manifests/kustomize/namespaced/ml-pipeline-persistenceagent-deployment-patch.yaml
diff --git a/manifests/kustomize/namespaced-install/ml-pipeline-scheduledworkflow-deployment-patch.yaml b/manifests/kustomize/namespaced/ml-pipeline-scheduledworkflow-deployment-patch.yaml
similarity index 100%
rename from manifests/kustomize/namespaced-install/ml-pipeline-scheduledworkflow-deployment-patch.yaml
rename to manifests/kustomize/namespaced/ml-pipeline-scheduledworkflow-deployment-patch.yaml
diff --git a/manifests/kustomize/namespaced-install/ml-pipeline-ui-deployment-patch.yaml b/manifests/kustomize/namespaced/ml-pipeline-ui-deployment-patch.yaml
similarity index 100%
rename from manifests/kustomize/namespaced-install/ml-pipeline-ui-deployment-patch.yaml
rename to manifests/kustomize/namespaced/ml-pipeline-ui-deployment-patch.yaml
diff --git a/manifests/kustomize/namespaced-install/ml-pipeline-viewer-crd-deployment-patch.yaml b/manifests/kustomize/namespaced/ml-pipeline-viewer-crd-deployment-patch.yaml
similarity index 100%
rename from manifests/kustomize/namespaced-install/ml-pipeline-viewer-crd-deployment-patch.yaml
rename to manifests/kustomize/namespaced/ml-pipeline-viewer-crd-deployment-patch.yaml
diff --git a/manifests/kustomize/namespaced-install/namespace.yaml b/manifests/kustomize/namespaced/namespace.yaml
similarity index 100%
rename from manifests/kustomize/namespaced-install/namespace.yaml
rename to manifests/kustomize/namespaced/namespace.yaml
diff --git a/manifests/kustomize/namespaced-install/workflow-controller-configmap.yaml b/manifests/kustomize/namespaced/workflow-controller-configmap.yaml
similarity index 100%
rename from manifests/kustomize/namespaced-install/workflow-controller-configmap.yaml
rename to manifests/kustomize/namespaced/workflow-controller-configmap.yaml
diff --git a/samples/contrib/resnet-cmle/resnet-train-pipeline.py b/samples/contrib/resnet-cmle/resnet-train-pipeline.py
index 2e01e878e10..648ee83b038 100644
--- a/samples/contrib/resnet-cmle/resnet-train-pipeline.py
+++ b/samples/contrib/resnet-cmle/resnet-train-pipeline.py
@@ -23,11 +23,11 @@
import os
dataflow_python_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/dataflow/launch_python/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/dataflow/launch_python/component.yaml')
cloudml_train_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/train/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/train/component.yaml')
cloudml_deploy_op = comp.load_component_from_url(
- 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/deploy/component.yaml')
+ 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/deploy/component.yaml')
def resnet_preprocess_op(project_id: 'GcpProject', output: 'GcsUri', staging_dir: 'GcsUri', train_csv: 'GcsUri[text/csv]',
diff --git a/samples/core/ai-platform/Chicago Crime Pipeline.ipynb b/samples/core/ai-platform/Chicago Crime Pipeline.ipynb
index 0d700cf5886..c335ca48c01 100644
--- a/samples/core/ai-platform/Chicago Crime Pipeline.ipynb
+++ b/samples/core/ai-platform/Chicago Crime Pipeline.ipynb
@@ -112,7 +112,7 @@
"outputs": [],
"source": [
"bigquery_query_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/bigquery/query/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/bigquery/query/component.yaml')\n",
"\n",
"QUERY = \"\"\"\n",
" SELECT count(*) as count, TIMESTAMP_TRUNC(date, DAY) as day\n",
@@ -148,7 +148,7 @@
"outputs": [],
"source": [
"mlengine_train_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/train/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/train/component.yaml')\n",
"\n",
"def train(project_id,\n",
" trainer_args,\n",
@@ -186,7 +186,7 @@
"outputs": [],
"source": [
"mlengine_deploy_op = comp.load_component_from_url(\n",
- " 'https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/gcp/ml_engine/deploy/component.yaml')\n",
+ " 'https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/gcp/ml_engine/deploy/component.yaml')\n",
"\n",
"def deploy(\n",
" project_id,\n",
diff --git a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb
index 16215b0df15..25e98ce2233 100644
--- a/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb
+++ b/samples/core/kubeflow_tf_serving/kubeflow_tf_serving.ipynb
@@ -71,7 +71,7 @@
"EXPERIMENT_NAME = 'serving_component'\n",
"MODEL_VERSION = '1' # A number representing the version model \n",
"OUTPUT_BUCKET = 'gs://%s-serving-component' % PROJECT_NAME # A GCS bucket for asset outputs\n",
- "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:fe639f41661d8e17fcda64ff8242127620b80ba0'\n",
+ "KUBEFLOW_DEPLOYER_IMAGE = 'gcr.io/ml-pipeline/ml-pipeline-kubeflow-deployer:151c5349f13bea9d626c988563c04c0a86210c21'\n",
"MODEL_PATH = '%s/%s' % (OUTPUT_BUCKET,MODEL_NAME) \n",
"MODEL_VERSION_PATH = '%s/%s/%s' % (OUTPUT_BUCKET,MODEL_NAME,MODEL_VERSION)"
]
diff --git a/samples/core/tfx_cab_classification/tfx_cab_classification.py b/samples/core/tfx_cab_classification/tfx_cab_classification.py
index fabfc21ad65..db47320b5fb 100755
--- a/samples/core/tfx_cab_classification/tfx_cab_classification.py
+++ b/samples/core/tfx_cab_classification/tfx_cab_classification.py
@@ -22,16 +22,16 @@
platform = 'GCP'
-dataflow_tf_data_validation_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/dataflow/tfdv/component.yaml')
-dataflow_tf_transform_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/dataflow/tft/component.yaml')
-tf_train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/kubeflow/dnntrainer/component.yaml')
-dataflow_tf_model_analyze_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/dataflow/tfma/component.yaml')
-dataflow_tf_predict_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/dataflow/predict/component.yaml')
+dataflow_tf_data_validation_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/dataflow/tfdv/component.yaml')
+dataflow_tf_transform_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/dataflow/tft/component.yaml')
+tf_train_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/kubeflow/dnntrainer/component.yaml')
+dataflow_tf_model_analyze_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/dataflow/tfma/component.yaml')
+dataflow_tf_predict_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/dataflow/predict/component.yaml')
-confusion_matrix_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/local/confusion_matrix/component.yaml')
-roc_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/local/roc/component.yaml')
+confusion_matrix_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/local/confusion_matrix/component.yaml')
+roc_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/local/roc/component.yaml')
-kubeflow_deploy_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/kubeflow/deployer/component.yaml')
+kubeflow_deploy_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/kubeflow/deployer/component.yaml')
@dsl.pipeline(
name='TFX Taxi Cab Classification Pipeline Example',
diff --git a/samples/core/xgboost_training_cm/xgboost_training_cm.py b/samples/core/xgboost_training_cm/xgboost_training_cm.py
index f5a6d0575fb..f581eae7623 100755
--- a/samples/core/xgboost_training_cm/xgboost_training_cm.py
+++ b/samples/core/xgboost_training_cm/xgboost_training_cm.py
@@ -20,8 +20,8 @@
from kfp import dsl
from kfp import gcp
-confusion_matrix_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/local/confusion_matrix/component.yaml')
-roc_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/48dd338c8ab328084633c51704cda77db79ac8c2/components/local/roc/component.yaml')
+confusion_matrix_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/local/confusion_matrix/component.yaml')
+roc_op = components.load_component_from_url('https://raw.githubusercontent.com/kubeflow/pipelines/a97f1d0ad0e7b92203f35c5b0b9af3a314952e05/components/local/roc/component.yaml')
# ! Please do not forget to enable the Dataproc API in your cluster https://console.developers.google.com/apis/api/dataproc.googleapis.com/overview
@@ -36,7 +36,7 @@ def dataproc_create_cluster_op(
):
return dsl.ContainerOp(
name='Dataproc - Create cluster',
- image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:0517114dc2b365a4a6d95424af6157ead774eff3',
+ image='gcr.io/ml-pipeline/ml-pipeline-dataproc-create-cluster:151c5349f13bea9d626c988563c04c0a86210c21',
arguments=[
'--project', project,
'--region', region,
@@ -56,7 +56,7 @@ def dataproc_delete_cluster_op(
):
return dsl.ContainerOp(
name='Dataproc - Delete cluster',
- image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:0517114dc2b365a4a6d95424af6157ead774eff3',
+ image='gcr.io/ml-pipeline/ml-pipeline-dataproc-delete-cluster:151c5349f13bea9d626c988563c04c0a86210c21',
arguments=[
'--project', project,
'--region', region,
@@ -76,7 +76,7 @@ def dataproc_analyze_op(
):
return dsl.ContainerOp(
name='Dataproc - Analyze',
- image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:0517114dc2b365a4a6d95424af6157ead774eff3',
+ image='gcr.io/ml-pipeline/ml-pipeline-dataproc-analyze:151c5349f13bea9d626c988563c04c0a86210c21',
arguments=[
'--project', project,
'--region', region,
@@ -103,7 +103,7 @@ def dataproc_transform_op(
):
return dsl.ContainerOp(
name='Dataproc - Transform',
- image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:0517114dc2b365a4a6d95424af6157ead774eff3',
+ image='gcr.io/ml-pipeline/ml-pipeline-dataproc-transform:151c5349f13bea9d626c988563c04c0a86210c21',
arguments=[
'--project', project,
'--region', region,
@@ -141,7 +141,7 @@ def dataproc_train_op(
return dsl.ContainerOp(
name='Dataproc - Train XGBoost model',
- image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:0517114dc2b365a4a6d95424af6157ead774eff3',
+ image='gcr.io/ml-pipeline/ml-pipeline-dataproc-train:151c5349f13bea9d626c988563c04c0a86210c21',
arguments=[
'--project', project,
'--region', region,
@@ -174,7 +174,7 @@ def dataproc_predict_op(
):
return dsl.ContainerOp(
name='Dataproc - Predict with XGBoost model',
- image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:0517114dc2b365a4a6d95424af6157ead774eff3',
+ image='gcr.io/ml-pipeline/ml-pipeline-dataproc-predict:151c5349f13bea9d626c988563c04c0a86210c21',
arguments=[
'--project', project,
'--region', region,
diff --git a/sdk/python/kfp/components/_airflow_op.py b/sdk/python/kfp/components/_airflow_op.py
index 5b1fdf9e047..92244e60e73 100644
--- a/sdk/python/kfp/components/_airflow_op.py
+++ b/sdk/python/kfp/components/_airflow_op.py
@@ -70,6 +70,7 @@ def _create_component_spec_from_airflow_op(
variables_output_names = variables_to_output or []
xcoms_output_names = xcoms_to_output or []
modules_to_capture = modules_to_capture or [op_class.__module__]
+ modules_to_capture.append(_run_airflow_op.__module__)
output_names = []
if result_output_name is not None:
diff --git a/sdk/python/kfp/components/_python_op.py b/sdk/python/kfp/components/_python_op.py
index e73bbb52fa1..2771501d1bf 100644
--- a/sdk/python/kfp/components/_python_op.py
+++ b/sdk/python/kfp/components/_python_op.py
@@ -23,6 +23,7 @@
import inspect
from pathlib import Path
+import typing
from typing import TypeVar, Generic, List
T = TypeVar('T')
@@ -146,8 +147,9 @@ def annotation_to_type_struct(annotation):
return None
if isinstance(annotation, type):
return str(annotation.__name__)
- else:
- return str(annotation)
+ if hasattr(annotation, '__forward_arg__'): # Handling typing.ForwardRef('Type_name') (the name was _ForwardRef in python 3.5-3.6)
+ return str(annotation.__forward_arg__) # It can only be string
+ return str(annotation)
for parameter in parameters:
type_struct = annotation_to_type_struct(parameter.annotation)
diff --git a/sdk/python/kfp/components/modelbase.py b/sdk/python/kfp/components/modelbase.py
index a3b2e3c92e9..7748bdc9936 100644
--- a/sdk/python/kfp/components/modelbase.py
+++ b/sdk/python/kfp/components/modelbase.py
@@ -282,7 +282,7 @@ def __repr__(self):
return self.__class__.__name__ + '(' + ', '.join(param + '=' + repr(getattr(self, param)) for param in self._get_field_names()) + ')'
def __eq__(self, other):
- return self.__class__ == other.__class__ and {k: getattr(self, k) for k in self._get_field_names()} == {k: getattr(self, k) for k in other._get_field_names()}
+ return self.__class__ == other.__class__ and {k: getattr(self, k) for k in self._get_field_names()} == {k: getattr(other, k) for k in other._get_field_names()}
def __ne__(self, other):
return not self == other
\ No newline at end of file
diff --git a/sdk/python/kfp/dsl/_resource_op.py b/sdk/python/kfp/dsl/_resource_op.py
index b07207662bf..2e69fd3f8f3 100644
--- a/sdk/python/kfp/dsl/_resource_op.py
+++ b/sdk/python/kfp/dsl/_resource_op.py
@@ -105,6 +105,10 @@ def __init__(self,
if merge_strategy and action != "apply":
ValueError("You can't set merge_strategy when action != 'apply'")
+
+ # if action is delete, there should not be any outputs, success_condition, and failure_condition
+ if action == "delete" and (success_condition or failure_condition or attribute_outputs):
+ ValueError("You can't set success_condition, failure_condition, or attribute_outputs when action == 'delete'")
init_resource = {
"action": action,
@@ -117,6 +121,13 @@ def __init__(self,
self.k8s_resource = k8s_resource
+ # if action is delete, there should not be any outputs, success_condition, and failure_condition
+ if action == "delete":
+ self.attribute_outputs = {}
+ self.outputs = {}
+ self.output = None
+ return
+
# Set attribute_outputs
extra_attribute_outputs = \
attribute_outputs if attribute_outputs else {}
diff --git a/sdk/python/setup.py b/sdk/python/setup.py
index 690ca76746e..f2cc3f65f95 100644
--- a/sdk/python/setup.py
+++ b/sdk/python/setup.py
@@ -15,7 +15,7 @@
from setuptools import setup
NAME = 'kfp'
-VERSION = '0.1.26'
+VERSION = '0.1.27'
REQUIRES = [
'urllib3>=1.15,<1.25', #Fixing the version conflict with the "requests" package
diff --git a/sdk/python/tests/compiler/compiler_tests.py b/sdk/python/tests/compiler/compiler_tests.py
index be543f351ef..b1afb93e177 100644
--- a/sdk/python/tests/compiler/compiler_tests.py
+++ b/sdk/python/tests/compiler/compiler_tests.py
@@ -633,3 +633,35 @@ def init_container_pipeline():
init_container = init_containers[0]
self.assertEqual(init_container, {'image':'alpine:latest', 'command': ['echo', 'bye'], 'name': 'echo'})
+
+ def test_delete_resource_op(self):
+ """Test a pipeline with a delete resource operation."""
+ from kubernetes import client as k8s
+
+ @dsl.pipeline()
+ def some_pipeline():
+ # create config map object with k6 load test script
+ config_map = k8s.V1ConfigMap(
+ api_version="v1",
+ data={"foo": "bar"},
+ kind="ConfigMap",
+ metadata=k8s.V1ObjectMeta(
+ name="foo-bar-cm",
+ namespace="default"
+ )
+ )
+ # delete the config map in k8s
+ dsl.ResourceOp(
+ name="delete-config-map",
+ action="delete",
+ k8s_resource=config_map
+ )
+
+ workflow_dict = kfp.compiler.Compiler()._compile(some_pipeline)
+ delete_op_template = [template for template in workflow_dict['spec']['templates'] if template['name'] == 'delete-config-map'][0]
+
+ # delete resource operation should not have success condition, failure condition or output parameters.
+ # See https://github.com/argoproj/argo/blob/5331fc02e257266a4a5887dfe6277e5a0b42e7fc/cmd/argoexec/commands/resource.go#L30
+ self.assertIsNone(delete_op_template.get("successCondition"))
+ self.assertIsNone(delete_op_template.get("failureCondition"))
+ self.assertDictEqual(delete_op_template.get("outputs"), {})
diff --git a/sdk/python/tests/components/test_components.py b/sdk/python/tests/components/test_components.py
index 047741e199a..2c621a000a5 100644
--- a/sdk/python/tests/components/test_components.py
+++ b/sdk/python/tests/components/test_components.py
@@ -17,7 +17,6 @@
import unittest
from pathlib import Path
-sys.path.insert(0, __file__ + '/../../../')
import kfp
import kfp.components as comp
diff --git a/sdk/python/tests/components/test_graph_components.py b/sdk/python/tests/components/test_graph_components.py
index dcee65d2096..b9d295546d2 100644
--- a/sdk/python/tests/components/test_graph_components.py
+++ b/sdk/python/tests/components/test_graph_components.py
@@ -17,7 +17,6 @@
import unittest
from pathlib import Path
-sys.path.insert(0, __file__ + '/../../../')
import kfp.components as comp
from kfp.components._structures import ComponentReference, ComponentSpec, ContainerSpec, GraphInputArgument, GraphSpec, InputSpec, InputValuePlaceholder, GraphImplementation, OutputPathPlaceholder, OutputSpec, TaskOutputArgument, TaskSpec
diff --git a/sdk/python/tests/components/test_python_op.py b/sdk/python/tests/components/test_python_op.py
index 5a90df81663..4e495484432 100644
--- a/sdk/python/tests/components/test_python_op.py
+++ b/sdk/python/tests/components/test_python_op.py
@@ -184,6 +184,81 @@ def add_multiply_two_numbers(a: float, b: float) -> NamedTuple('DummyName', [('s
self.helper_test_2_in_2_out_component_using_local_call(func, op, output_names=['sum', 'product'])
+ def test_extract_component_interface(self):
+ from typing import NamedTuple
+ def my_func(
+ required_param,
+ int_param: int = 42,
+ float_param : float = 3.14,
+ str_param : str = 'string',
+ bool_param : bool = True,
+ none_param = None,
+ custom_type_param: 'Custom type' = None,
+ ) -> NamedTuple('DummyName', [
+ #('required_param',), # All typing.NamedTuple fields must have types
+ ('int_param', int),
+ ('float_param', float),
+ ('str_param', str),
+ ('bool_param', bool),
+ #('custom_type_param', 'Custom type'), #SyntaxError: Forward reference must be an expression -- got 'Custom type'
+ ('custom_type_param', 'CustomType'),
+ ]
+ ):
+ '''Function docstring'''
+ pass
+
+ component_spec = comp._python_op._extract_component_interface(my_func)
+
+ from kfp.components._structures import InputSpec, OutputSpec
+ self.assertEqual(
+ component_spec.inputs,
+ [
+ InputSpec(name='required_param'),
+ InputSpec(name='int_param', type='int', default='42', optional=True),
+ InputSpec(name='float_param', type='float', default='3.14', optional=True),
+ InputSpec(name='str_param', type='str', default='string', optional=True),
+ InputSpec(name='bool_param', type='bool', default='True', optional=True),
+ InputSpec(name='none_param', optional=True), # No default='None'
+ InputSpec(name='custom_type_param', type='Custom type', optional=True),
+ ]
+ )
+ self.assertEqual(
+ component_spec.outputs,
+ [
+ OutputSpec(name='int_param', type='int'),
+ OutputSpec(name='float_param', type='float'),
+ OutputSpec(name='str_param', type='str'),
+ OutputSpec(name='bool_param', type='bool'),
+ #OutputSpec(name='custom_type_param', type='Custom type', default='None'),
+ OutputSpec(name='custom_type_param', type='CustomType'),
+ ]
+ )
+
+ self.maxDiff = None
+ self.assertDictEqual(
+ component_spec.to_dict(),
+ {
+ 'name': 'My func',
+ 'description': 'Function docstring\n',
+ 'inputs': [
+ {'name': 'required_param'},
+ {'name': 'int_param', 'type': 'int', 'default': '42', 'optional': True},
+ {'name': 'float_param', 'type': 'float', 'default': '3.14', 'optional': True},
+ {'name': 'str_param', 'type': 'str', 'default': 'string', 'optional': True},
+ {'name': 'bool_param', 'type': 'bool', 'default': 'True', 'optional': True},
+ {'name': 'none_param', 'optional': True}, # No default='None'
+ {'name': 'custom_type_param', 'type': 'Custom type', 'optional': True},
+ ],
+ 'outputs': [
+ {'name': 'int_param', 'type': 'int'},
+ {'name': 'float_param', 'type': 'float'},
+ {'name': 'str_param', 'type': 'str'},
+ {'name': 'bool_param', 'type': 'bool'},
+ {'name': 'custom_type_param', 'type': 'CustomType'},
+ ]
+ }
+ )
+
@unittest.skip #TODO: #Simplified multi-output syntax is not implemented yet
def test_func_to_container_op_multiple_named_typed_outputs_using_list_syntax(self):
def add_multiply_two_numbers(a: float, b: float) -> [('sum', float), ('product', float)]:
diff --git a/sdk/python/tests/components/test_structure_model_base.py b/sdk/python/tests/components/test_structure_model_base.py
index 5077d68e83e..95bd8bad0df 100644
--- a/sdk/python/tests/components/test_structure_model_base.py
+++ b/sdk/python/tests/components/test_structure_model_base.py
@@ -230,5 +230,20 @@ def test_handle_from_to_dict_for_union_dict_class(self):
TestModel1.from_dict({'prop_0': '', 'prop_5': [val5.to_dict(), None]})
+ def test_handle_comparisons(self):
+ class A(ModelBase):
+ def __init__(self, a, b):
+ super().__init__(locals())
+
+ self.assertEqual(A(1, 2), A(1, 2))
+ self.assertNotEqual(A(1, 2), A(1, 3))
+
+ class B(ModelBase):
+ def __init__(self, a, b):
+ super().__init__(locals())
+
+ self.assertNotEqual(A(1, 2), B(1, 2))
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/deploy-kubeflow.sh b/test/deploy-kubeflow.sh
deleted file mode 100755
index e81fc26ed8f..00000000000
--- a/test/deploy-kubeflow.sh
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -ex
-
-TEST_CLUSTER_PREFIX=${WORKFLOW_FILE%.*}
-TEST_CLUSTER=$(echo $TEST_CLUSTER_PREFIX | cut -d _ -f 1)-${PULL_PULL_SHA:0:7}-${RANDOM}
-
-# Install ksonnet
-KS_VERSION="0.13.0"
-
-curl -LO https://github.com/ksonnet/ksonnet/releases/download/v${KS_VERSION}/ks_${KS_VERSION}_linux_amd64.tar.gz
-tar -xzf ks_${KS_VERSION}_linux_amd64.tar.gz
-chmod +x ./ks_${KS_VERSION}_linux_amd64/ks
-# Add ks to PATH
-PATH=$PATH:`pwd`/ks_${KS_VERSION}_linux_amd64
-
-## Download latest kubeflow release source code
-KUBEFLOW_SRC=${DIR}/kubeflow_latest_release
-mkdir ${KUBEFLOW_SRC}
-cd ${KUBEFLOW_SRC}
-export KUBEFLOW_TAG=pipelines
-curl https://raw.githubusercontent.com/kubeflow/kubeflow/${KUBEFLOW_TAG}/scripts/download.sh | bash
-
-export CLIENT_ID=${RANDOM}
-export CLIENT_SECRET=${RANDOM}
-KFAPP=${TEST_CLUSTER}
-
-function clean_up {
- echo "Clean up..."
- cd ${DIR}/${KFAPP}
- ${KUBEFLOW_SRC}/scripts/kfctl.sh delete all
- # delete the storage
- gcloud deployment-manager --project=${PROJECT} deployments delete ${KFAPP}-storage --quiet
-}
-trap clean_up EXIT SIGINT SIGTERM
-
-cd ${DIR}
-${KUBEFLOW_SRC}/scripts/kfctl.sh init ${KFAPP} --platform ${PLATFORM} --project ${PROJECT} --skipInitProject
-
-cd ${KFAPP}
-${KUBEFLOW_SRC}/scripts/kfctl.sh generate platform
-${KUBEFLOW_SRC}/scripts/kfctl.sh apply platform
-${KUBEFLOW_SRC}/scripts/kfctl.sh generate k8s
-${KUBEFLOW_SRC}/scripts/kfctl.sh apply k8s
-
-gcloud container clusters get-credentials ${TEST_CLUSTER}
diff --git a/test/deploy-pipeline.sh b/test/deploy-pipeline.sh
deleted file mode 100755
index 45063c41c53..00000000000
--- a/test/deploy-pipeline.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -ex
-
-
-usage()
-{
- echo "usage: deploy.sh
- [--gcr_image_base_dir the gcr image base directory including images such as apiImage and persistenceAgentImage]
- [--gcr_image_tag the tags for images such as apiImage and persistenceAgentImage]
- [-h help]"
-}
-GCR_IMAGE_TAG=latest
-
-while [ "$1" != "" ]; do
- case $1 in
- --gcr_image_base_dir ) shift
- GCR_IMAGE_BASE_DIR=$1
- ;;
- --gcr_image_tag ) shift
- GCR_IMAGE_TAG=$1
- ;;
- -h | --help ) usage
- exit
- ;;
- * ) usage
- exit 1
- esac
- shift
-done
-
-cd ${DIR}/${KFAPP}
-
-## Update pipeline component image
-pushd ks_app
-# Delete pipeline component first before applying so we guarantee the pipeline component is new.
-ks delete default -c pipeline
-sleep 60s
-
-ks param set pipeline apiImage ${GCR_IMAGE_BASE_DIR}/api-server:${GCR_IMAGE_TAG}
-ks param set pipeline persistenceAgentImage ${GCR_IMAGE_BASE_DIR}/persistenceagent:${GCR_IMAGE_TAG}
-ks param set pipeline scheduledWorkflowImage ${GCR_IMAGE_BASE_DIR}/scheduledworkflow:${GCR_IMAGE_TAG}
-ks param set pipeline uiImage ${GCR_IMAGE_BASE_DIR}/frontend:${GCR_IMAGE_TAG}
-# Swap the metadata/artifact storage PD to avoid reusing the old data.
-# We should remove this hack when we deprecate ksonnet.
-# See https://github.com/kubeflow/pipelines/pull/1805#issuecomment-520204987 for context
-ks param set pipeline minioPd ${KFAPP}-storage-metadata-store
-ks param set pipeline mysqlPd ${KFAPP}-storage-artifact-store
-ks apply default -c pipeline
-popd
diff --git a/test/manifests/kustomization.yaml b/test/manifests/kustomization.yaml
index d785bf95edf..bc55ef3361c 100644
--- a/test/manifests/kustomization.yaml
+++ b/test/manifests/kustomization.yaml
@@ -4,4 +4,4 @@ kind: Kustomization
# Actual image overrides will be added in test scripts.
images: []
resources:
-- ../../manifests/kustomize/namespaced-install
+- ../../manifests/kustomize/env/dev
diff --git a/test/minikube/OWNERS b/test/minikube/OWNERS
deleted file mode 100644
index 23d368bc30f..00000000000
--- a/test/minikube/OWNERS
+++ /dev/null
@@ -1,6 +0,0 @@
-approvers:
- - vicaire
- - IronPan
-reviewers:
- - vicaire
- - IronPan
\ No newline at end of file
diff --git a/test/minikube/install_and_start_minikube_without_vm.sh b/test/minikube/install_and_start_minikube_without_vm.sh
deleted file mode 100755
index 88736744ad4..00000000000
--- a/test/minikube/install_and_start_minikube_without_vm.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash -ex
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#This scripts installs Minikube, configures it to run without root access and starts it without VM
-#See https://github.com/kubernetes/minikube#linux-continuous-integration-without-vm-support
-
-MINIKUBE_VERSION=${MINIKUBE_VERSION:-latest}
-KUBECTL_VERSION=${KUBECTL_VERSION:-$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)}
-KUBERNETES_VERSION=${KUBERNETES_VERSION:-v1.12.2}
-
-curl -Lo minikube https://storage.googleapis.com/minikube/releases/${MINIKUBE_VERSION}/minikube-linux-amd64 && chmod +x minikube && sudo cp minikube /usr/local/bin/ && rm minikube
-curl -Lo kubectl https://storage.googleapis.com/kubernetes-release/release/${KUBECTL_VERSION}/bin/linux/amd64/kubectl && chmod +x kubectl && sudo cp kubectl /usr/local/bin/ && rm kubectl
-
-export MINIKUBE_WANTUPDATENOTIFICATION=false
-export MINIKUBE_WANTREPORTERRORPROMPT=false
-export MINIKUBE_HOME=$HOME
-export CHANGE_MINIKUBE_NONE_USER=true
-mkdir -p $HOME/.kube
-touch $HOME/.kube/config
-
-export KUBECONFIG=$HOME/.kube/config
-sudo -E /usr/local/bin/minikube start --vm-driver=none --kubernetes-version=$KUBERNETES_VERSION
-
-# this for loop waits until kubectl can access the api server that Minikube has created
-for i in {1..150}; do # timeout for 5 minutes
- kubectl get po &> /dev/null
- if [ $? -ne 1 ]; then
- break
- fi
- sleep 2
-done
-
-# kubectl commands are now able to interact with Minikube cluster
diff --git a/test/minikube/install_argo_client.sh b/test/minikube/install_argo_client.sh
deleted file mode 100755
index d55772aa536..00000000000
--- a/test/minikube/install_argo_client.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash -ex
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-ARGO_VERSION=${ARGO_VERSION:-v2.3.0}
-
-sudo curl -sSL -o /usr/local/bin/argo https://github.com/argoproj/argo/releases/download/${ARGO_VERSION}/argo-linux-amd64
-sudo chmod +x /usr/local/bin/argo
diff --git a/test/minikube/install_docker.sh b/test/minikube/install_docker.sh
deleted file mode 100755
index 7f2cb0110ac..00000000000
--- a/test/minikube/install_docker.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash -ex
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-DOCKER_VERSION=18.06.1~ce~3-0~$(. /etc/os-release; echo "$ID")
-
-sudo apt-get update -y
-sudo apt-get install -y \
- apt-transport-https \
- ca-certificates \
- curl \
- software-properties-common
-
-curl -fsSL https://download.docker.com/linux/$(. /etc/os-release; echo "$ID")/gpg | sudo apt-key add -
-
-sudo add-apt-repository \
- "deb [arch=amd64] https://download.docker.com/linux/$(. /etc/os-release; echo "$ID") \
- $(lsb_release -cs) \
- stable"
-
-sudo apt-get update -y
-sudo apt-get install docker-ce="${DOCKER_VERSION}" -y
diff --git a/test/minikube/install_docker_minikube_argo.sh b/test/minikube/install_docker_minikube_argo.sh
deleted file mode 100755
index 3b88e188861..00000000000
--- a/test/minikube/install_docker_minikube_argo.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/bash -ex
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-repo_test_dir=$(dirname $0)
-
-DOCKER_VERSION=18.06.1~ce~3-0~$(. /etc/os-release; echo "$ID") "${repo_test_dir}/install_docker.sh"
-MINIKUBE_VERSION=v0.30.0 KUBECTL_VERSION=v1.12.2 KUBERNETES_VERSION=v1.12.2 "${repo_test_dir}/install_and_start_minikube_without_vm.sh"
-ARGO_VERSION=v2.3.0 "${repo_test_dir}/install_argo_client.sh"
-sudo apt-get install socat #needed for port forwarding
diff --git a/test/presubmit-tests-gce-minikube.sh b/test/presubmit-tests-gce-minikube.sh
deleted file mode 100755
index 1c683b0470f..00000000000
--- a/test/presubmit-tests-gce-minikube.sh
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/bin/bash -ex
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#set -o nounset
-
-#This script runs the presubmit tests on a Minikube cluster.
-#The script creates a new GCE VM, sets up Minikube, copies the SSH keys and repo and and then runs the tests.
-#This script is usually run from the PROW cluster, but it can be run locally:
-#ssh_key_file=~/.ssh/id_rsa WORKSPACE=/var/tmp PULL_PULL_SHA=master test/presubmit-tests-gce-minikube.sh --workflow_file integration_test_gke.yaml --test_result_folder api_integration_test
-
-WORKSPACE=$WORKSPACE
-PULL_PULL_SHA=$PULL_PULL_SHA
-ARTIFACT_DIR=$WORKSPACE/_artifacts
-PROJECT_ID=${PROJECT_ID:-ml-pipeline-test}
-ZONE=${ZONE:-us-west1-a}
-
-repo_test_dir=$(dirname $0)
-
-instance_name=${instance_name:-test-minikube-${PULL_PULL_SHA:0:6}-$(date +%s)-$(echo "$@" | md5sum | cut -c 1-6)}
-
-firewall_rule_name=allow-prow-ssh-$instance_name
-
-# activating the service account
-gcloud auth activate-service-account --key-file="${GOOGLE_APPLICATION_CREDENTIALS}"
-
-#Function to delete VM
-function delete_vm {
- if [ "$keep_created_vm" != true ]; then
- echo "Deleting VM $instance_name"
- gcloud compute instances delete $instance_name --zone=$ZONE --quiet
- fi
- echo "Deleting the SSH firewall rule $firewall_rule_name"
- gcloud compute firewall-rules delete $firewall_rule_name
-}
-
-#Setting the exit handler to delete VM. The VM will be deleted when the script exists (either completes or fails)
-#TODO: Find a more resilent way to clean up VMs. Right now the VM is not deleted if the machine running this script fails. (See https://github.com/kubeflow/pipelines/issues/1064)
-trap delete_vm EXIT
-
-#Creating the VM
-gcloud config set project $PROJECT_ID
-gcloud config set compute/zone $ZONE
-
-machine_type=n1-standard-16
-boot_disk_size=200GB
-
-gcloud compute instances create $instance_name --zone=$ZONE --machine-type=$machine_type --boot-disk-size=$boot_disk_size --scopes=storage-rw --tags=presubmit-test-vm
-
-#Adding firewall entry that allows the current instance to access the newly created VM using SSH.
-#This is needed for cases when the current instance is in different project (e.g. in the Prow cluster project)
-self_external_ip=$(curl -sSL "http://metadata/computeMetadata/v1/instance/network-interfaces/0/access-configs/0/external-ip" -H "Metadata-Flavor: Google")
-gcloud compute firewall-rules create $firewall_rule_name --allow tcp:22 --source-ranges=${self_external_ip}/32 --target-tags=presubmit-test-vm
-
-#Workaround the problems with prow cluster and GCE SSH access.
-#Prow tests run as root. GCE instances do not allow SSH access for root.
-if [ "$(whoami)" == root ]; then
- export USER=not-root
-fi
-
-#Copy service account keys
-gcloud compute scp --zone=$ZONE --verbosity=error "$GOOGLE_APPLICATION_CREDENTIALS" $instance_name:"~/service-account.json"
-
-#Copy repo
-tar --directory=.. -cz pipelines | gcloud compute ssh --zone=$ZONE $instance_name -- tar -xz #Assumes that the current directory on target VM is ~
-
-#Installing software on VM
-gcloud compute ssh --zone=$ZONE $instance_name -- "~/pipelines/test/minikube/install_docker_minikube_argo.sh"
-
-#Running the presubmit tests
-gcloud compute ssh --zone=$ZONE $instance_name -- PULL_PULL_SHA="$PULL_PULL_SHA" PULL_BASE_SHA="$PULL_BASE_SHA" WORKSPACE="~/${WORKSPACE}" GOOGLE_APPLICATION_CREDENTIALS="~/service-account.json" "~/pipelines/test/presubmit-tests.sh" --cluster-type none "$@"
-
-#Copy back the artifacts
-mkdir -p "${ARTIFACT_DIR}"
-gcloud compute scp --zone=$ZONE --verbosity=error --recurse $instance_name:"~/${ARTIFACT_DIR}/*" "${ARTIFACT_DIR}/"