From d737c448723b9f541a3543012b4414c17b2eab5c Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 28 May 2020 20:17:15 -0700 Subject: [PATCH 01/11] Components - Apache Parquet converters (#3834) * Components - Apache Parquet converters Added components that convert to and from Apache Parquet data format * Added sample pipeline --- .../ApacheParquet/_samples/sample_pipeline.py | 37 +++++++++ .../from_ApacheArrowFeather/component.py | 27 +++++++ .../from_ApacheArrowFeather/component.yaml | 74 ++++++++++++++++++ .../ApacheParquet/from_CSV/component.py | 26 +++++++ .../ApacheParquet/from_CSV/component.yaml | 72 ++++++++++++++++++ .../ApacheParquet/from_TSV/component.py | 26 +++++++ .../ApacheParquet/from_TSV/component.yaml | 72 ++++++++++++++++++ .../to_ApacheArrowFeather/component.py | 27 +++++++ .../to_ApacheArrowFeather/component.yaml | 75 +++++++++++++++++++ 9 files changed, 436 insertions(+) create mode 100644 components/_converters/ApacheParquet/_samples/sample_pipeline.py create mode 100644 components/_converters/ApacheParquet/from_ApacheArrowFeather/component.py create mode 100644 components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml create mode 100644 components/_converters/ApacheParquet/from_CSV/component.py create mode 100644 components/_converters/ApacheParquet/from_CSV/component.yaml create mode 100644 components/_converters/ApacheParquet/from_TSV/component.py create mode 100644 components/_converters/ApacheParquet/from_TSV/component.yaml create mode 100644 components/_converters/ApacheParquet/to_ApacheArrowFeather/component.py create mode 100644 components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml diff --git a/components/_converters/ApacheParquet/_samples/sample_pipeline.py b/components/_converters/ApacheParquet/_samples/sample_pipeline.py new file mode 100644 index 00000000000..6aa283ae1c4 --- /dev/null +++ b/components/_converters/ApacheParquet/_samples/sample_pipeline.py @@ -0,0 +1,37 @@ +import kfp +from kfp import components + +component_store = components.ComponentStore(url_search_prefixes=['https://raw.githubusercontent.com/kubeflow/pipelines/0d7d6f41c92bdc05c2825232afe2b47e5cb6c4b3/components/']) + +chicago_taxi_dataset_op = component_store.load_component(name='datasets/Chicago_Taxi_Trips') +convert_csv_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_CSV') +convert_tsv_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_TSV') +convert_apache_parquet_to_apache_arrow_feather_op = component_store.load_component(name='_converters/ApacheParquet/to_ApacheArrowFeather') +convert_apache_arrow_feather_to_apache_parquet_op = component_store.load_component(name='_converters/ApacheParquet/from_ApacheArrowFeather') + + +def parquet_pipeline(): + csv = chicago_taxi_dataset_op( + where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', + select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', + limit=10000, + ).output + + tsv = chicago_taxi_dataset_op( + where='trip_start_timestamp >= "2019-01-01" AND trip_start_timestamp < "2019-02-01"', + select='tips,trip_seconds,trip_miles,pickup_community_area,dropoff_community_area,fare,tolls,extras,trip_total', + limit=10000, + format='tsv', + ).output + + csv_parquet = convert_csv_to_apache_parquet_op(csv).output + csv_parquet_feather = convert_apache_parquet_to_apache_arrow_feather_op(csv_parquet).output + csv_parquet_feather_parquet = convert_apache_arrow_feather_to_apache_parquet_op(csv_parquet_feather).output + + tsv_parquet = convert_tsv_to_apache_parquet_op(tsv).output + tsv_parquet_feather = convert_apache_parquet_to_apache_arrow_feather_op(tsv_parquet).output + tsv_parquet_feather_parquet = convert_apache_arrow_feather_to_apache_parquet_op(tsv_parquet_feather).output + +if __name__ == '__main__': + kfp_endpoint = None + kfp.Client(host=kfp_endpoint).create_run_from_pipeline_func(parquet_pipeline, arguments={}) diff --git a/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.py b/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.py new file mode 100644 index 00000000000..a6949b7ce3d --- /dev/null +++ b/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.py @@ -0,0 +1,27 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def convert_apache_arrow_feather_to_apache_parquet( + data_path: InputPath('ApacheArrowFeather'), + output_data_path: OutputPath('ApacheParquet'), +): + '''Converts Apache Arrow Feather to Apache Parquet. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import feather, parquet + + table = feather.read_table(data_path) + parquet.write_table(table, output_data_path) + + +if __name__ == '__main__': + create_component_from_func( + convert_apache_arrow_feather_to_apache_parquet, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['pyarrow==0.17.1'] + ) diff --git a/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml b/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml new file mode 100644 index 00000000000..5c14ed7772d --- /dev/null +++ b/components/_converters/ApacheParquet/from_ApacheArrowFeather/component.yaml @@ -0,0 +1,74 @@ +name: Convert apache arrow feather to apache parquet +description: |- + Converts Apache Arrow Feather to Apache Parquet. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: ApacheArrowFeather} +outputs: +- {name: output_data, type: ApacheParquet} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def convert_apache_arrow_feather_to_apache_parquet( + data_path, + output_data_path, + ): + '''Converts Apache Arrow Feather to Apache Parquet. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import feather, parquet + + table = feather.read_table(data_path) + parquet.write_table(table, output_data_path) + + import argparse + _parser = argparse.ArgumentParser(prog='Convert apache arrow feather to apache parquet', description='Converts Apache Arrow Feather to Apache Parquet.\n\n [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html)\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = convert_apache_arrow_feather_to_apache_parquet(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --output-data + - {outputPath: output_data} diff --git a/components/_converters/ApacheParquet/from_CSV/component.py b/components/_converters/ApacheParquet/from_CSV/component.py new file mode 100644 index 00000000000..101aa78311e --- /dev/null +++ b/components/_converters/ApacheParquet/from_CSV/component.py @@ -0,0 +1,26 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def convert_csv_to_apache_parquet( + data_path: InputPath('CSV'), + output_data_path: OutputPath('ApacheParquet'), +): + '''Converts CSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import csv, parquet + + table = csv.read_csv(data_path) + parquet.write_table(table, output_data_path) + + +if __name__ == '__main__': + create_component_from_func( + convert_csv_to_apache_parquet, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['pyarrow==0.17.1'] + ) diff --git a/components/_converters/ApacheParquet/from_CSV/component.yaml b/components/_converters/ApacheParquet/from_CSV/component.yaml new file mode 100644 index 00000000000..bb3ac32e2f6 --- /dev/null +++ b/components/_converters/ApacheParquet/from_CSV/component.yaml @@ -0,0 +1,72 @@ +name: Convert csv to apache parquet +description: |- + Converts CSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: CSV} +outputs: +- {name: output_data, type: ApacheParquet} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def convert_csv_to_apache_parquet( + data_path, + output_data_path, + ): + '''Converts CSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import csv, parquet + + table = csv.read_csv(data_path) + parquet.write_table(table, output_data_path) + + import argparse + _parser = argparse.ArgumentParser(prog='Convert csv to apache parquet', description='Converts CSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = convert_csv_to_apache_parquet(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --output-data + - {outputPath: output_data} diff --git a/components/_converters/ApacheParquet/from_TSV/component.py b/components/_converters/ApacheParquet/from_TSV/component.py new file mode 100644 index 00000000000..d297171a93a --- /dev/null +++ b/components/_converters/ApacheParquet/from_TSV/component.py @@ -0,0 +1,26 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def convert_tsv_to_apache_parquet( + data_path: InputPath('TSV'), + output_data_path: OutputPath('ApacheParquet'), +): + '''Converts TSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import csv, parquet + + table = csv.read_csv(data_path, parse_options=csv.ParseOptions(delimiter='\t')) + parquet.write_table(table, output_data_path) + + +if __name__ == '__main__': + create_component_from_func( + convert_tsv_to_apache_parquet, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['pyarrow==0.17.1'] + ) diff --git a/components/_converters/ApacheParquet/from_TSV/component.yaml b/components/_converters/ApacheParquet/from_TSV/component.yaml new file mode 100644 index 00000000000..499370281c3 --- /dev/null +++ b/components/_converters/ApacheParquet/from_TSV/component.yaml @@ -0,0 +1,72 @@ +name: Convert tsv to apache parquet +description: |- + Converts TSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: TSV} +outputs: +- {name: output_data, type: ApacheParquet} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install + --quiet --no-warn-script-location 'pyarrow==0.17.1' --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def convert_tsv_to_apache_parquet( + data_path, + output_data_path, + ): + '''Converts TSV table to Apache Parquet. + + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import csv, parquet + + table = csv.read_csv(data_path, parse_options=csv.ParseOptions(delimiter='\t')) + parquet.write_table(table, output_data_path) + + import argparse + _parser = argparse.ArgumentParser(prog='Convert tsv to apache parquet', description='Converts TSV table to Apache Parquet.\n\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = convert_tsv_to_apache_parquet(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --output-data + - {outputPath: output_data} diff --git a/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.py b/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.py new file mode 100644 index 00000000000..0129334ba7f --- /dev/null +++ b/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.py @@ -0,0 +1,27 @@ +from kfp.components import InputPath, OutputPath, create_component_from_func + +def convert_apache_parquet_to_apache_arrow_feather( + data_path: InputPath('ApacheParquet'), + output_data_path: OutputPath('ApacheArrowFeather'), +): + '''Converts Apache Parquet to Apache Arrow Feather. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import feather, parquet + + data_frame = parquet.read_pandas(data_path).to_pandas() + feather.write_feather(data_frame, output_data_path) + + +if __name__ == '__main__': + convert_apache_parquet_to_apache_arrow_feather_op = create_component_from_func( + convert_apache_parquet_to_apache_arrow_feather, + output_component_file='component.yaml', + base_image='python:3.7', + packages_to_install=['pyarrow==0.17.1', 'pandas==1.0.3'] + ) diff --git a/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml b/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml new file mode 100644 index 00000000000..28f64056da3 --- /dev/null +++ b/components/_converters/ApacheParquet/to_ApacheArrowFeather/component.yaml @@ -0,0 +1,75 @@ +name: Convert apache parquet to apache arrow feather +description: |- + Converts Apache Parquet to Apache Arrow Feather. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov +inputs: +- {name: data, type: ApacheParquet} +outputs: +- {name: output_data, type: ApacheArrowFeather} +implementation: + container: + image: python:3.7 + command: + - sh + - -c + - (PIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location + 'pyarrow==0.17.1' 'pandas==1.0.3' || PIP_DISABLE_PIP_VERSION_CHECK=1 python3 + -m pip install --quiet --no-warn-script-location 'pyarrow==0.17.1' 'pandas==1.0.3' + --user) && "$0" "$@" + - python3 + - -u + - -c + - | + def _make_parent_dirs_and_return_path(file_path: str): + import os + os.makedirs(os.path.dirname(file_path), exist_ok=True) + return file_path + + def convert_apache_parquet_to_apache_arrow_feather( + data_path, + output_data_path, + ): + '''Converts Apache Parquet to Apache Arrow Feather. + + [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html) + [Apache Parquet](https://parquet.apache.org/) + + Annotations: + author: Alexey Volkov + ''' + from pyarrow import feather, parquet + + data_frame = parquet.read_pandas(data_path).to_pandas() + feather.write_feather(data_frame, output_data_path) + + import argparse + _parser = argparse.ArgumentParser(prog='Convert apache parquet to apache arrow feather', description='Converts Apache Parquet to Apache Arrow Feather.\n\n [Apache Arrow Feather](https://arrow.apache.org/docs/python/feather.html)\n [Apache Parquet](https://parquet.apache.org/)\n\n Annotations:\n author: Alexey Volkov ') + _parser.add_argument("--data", dest="data_path", type=str, required=True, default=argparse.SUPPRESS) + _parser.add_argument("--output-data", dest="output_data_path", type=_make_parent_dirs_and_return_path, required=True, default=argparse.SUPPRESS) + _parsed_args = vars(_parser.parse_args()) + _output_files = _parsed_args.pop("_output_paths", []) + + _outputs = convert_apache_parquet_to_apache_arrow_feather(**_parsed_args) + + _output_serializers = [ + + ] + + import os + for idx, output_file in enumerate(_output_files): + try: + os.makedirs(os.path.dirname(output_file)) + except OSError: + pass + with open(output_file, 'w') as f: + f.write(_output_serializers[idx](_outputs[idx])) + args: + - --data + - {inputPath: data} + - --output-data + - {outputPath: output_data} From 0e59b68775a7cb75c6d636227f0c63d87a543b34 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 28 May 2020 21:07:14 -0700 Subject: [PATCH 02/11] Testing - Fixed SDK Travis tests (#3838) --- .travis.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 039fb0bfb44..0ab34211aac 100644 --- a/.travis.yml +++ b/.travis.yml @@ -48,9 +48,7 @@ matrix: - cd $TRAVIS_BUILD_DIR/sdk/python - python3 -m pip install -e . - cd $TRAVIS_BUILD_DIR # Changing the current directory to the repo root for correct coverall paths - - coverage run --source=kfp --append sdk/python/tests/dsl/main.py - - coverage run --source=kfp --append sdk/python/tests/compiler/main.py - - coverage run --source=kfp --append -m unittest discover --verbose --start-dir sdk/python/tests --top-level-directory=sdk/python + - coverage run --source=kfp --append -m unittest discover --verbose --start-dir sdk/python --pattern '*test*.py' #- coveralls # Test against TFX From 699ce937daf584ffb55bc6d9c673ccdf26260b66 Mon Sep 17 00:00:00 2001 From: Shotaro Kohama Date: Thu, 28 May 2020 21:07:22 -0700 Subject: [PATCH 03/11] Modify docstrings to replace 'InitContainer' to 'UserContainer' (#3863) --- sdk/python/kfp/dsl/_container_op.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/python/kfp/dsl/_container_op.py b/sdk/python/kfp/dsl/_container_op.py index 0c81a6c1a58..3fb7e6a0d63 100644 --- a/sdk/python/kfp/dsl/_container_op.py +++ b/sdk/python/kfp/dsl/_container_op.py @@ -700,7 +700,7 @@ def __init__(self, Args: name: the name of the op. It does not have to be unique within a pipeline because the pipeline will generates a unique new name in case of conflicts. - init_containers: the list of `InitContainer` objects describing the InitContainer + init_containers: the list of `UserContainer` objects describing the InitContainer to deploy before the `main` container. sidecars: the list of `Sidecar` objects describing the sidecar containers to deploy together with the `main` container. @@ -886,7 +886,7 @@ def add_init_container(self, init_container: UserContainer): """Add a init container to the Op. Args: - init_container: InitContainer object. + init_container: UserContainer object. """ self.init_containers.append(init_container) @@ -939,7 +939,7 @@ def foo_pipeline(tag: str, pull_image_policy: str): op = dsl.ContainerOp(name='foo', image='busybox:%s' % tag, # pass in init_container list - init_containers=[dsl.InitContainer('print', 'busybox:latest', command='echo "hello"')], + init_containers=[dsl.UserContainer('print', 'busybox:latest', command='echo "hello"')], # pass in sidecars list sidecars=[dsl.Sidecar('print', 'busybox:latest', command='echo "hello"')], # pass in k8s container kwargs @@ -986,7 +986,7 @@ def __init__( arguments: the arguments of the command. The command can include "%s" and supply a PipelineParam as the string replacement. For example, ('echo %s' % input_param). At container run time the argument will be 'echo param_value'. - init_containers: the list of `InitContainer` objects describing the InitContainer + init_containers: the list of `UserContainer` objects describing the InitContainer to deploy before the `main` container. sidecars: the list of `Sidecar` objects describing the sidecar containers to deploy together with the `main` container. From 58ff65f330c84388c63a4aeda6331f3e2f99d3e9 Mon Sep 17 00:00:00 2001 From: Gautam Kumar Date: Thu, 28 May 2020 22:05:14 -0700 Subject: [PATCH 04/11] fixing case when status is None (#3865) * fixing case when status is None * Fixing status update --- .../tests/integration_tests/utils/kfp_client_utils.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py b/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py index 2949f7b3c0a..a3ef367e2fb 100644 --- a/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py +++ b/components/aws/sagemaker/tests/integration_tests/utils/kfp_client_utils.py @@ -26,7 +26,9 @@ def compile_and_run_pipeline( def wait_for_job_completion(client, run_id, timeout, status_to_check): response = client.wait_for_run_completion(run_id, timeout) - status = response.run.status.lower() == status_to_check + status = None + if response.run.status: + status = response.run.status.lower() == status_to_check return status @@ -36,8 +38,9 @@ def wait_for_job_status(client, run_id, timeout, status_to_check="succeeded"): else: time.sleep(timeout) response = client.get_run(run_id) - status = response.run.status.lower() == status_to_check - + status = None + if response.run.status: + status = response.run.status.lower() == status_to_check return status From da1bc6978d137b2db9d3c530d7c368f24be5a4e4 Mon Sep 17 00:00:00 2001 From: jingzhang36 Date: Fri, 29 May 2020 13:40:10 +0800 Subject: [PATCH 05/11] re-enable upgrade test since 0.5.2 is cut (#3696) * re-enable upgrade test since 0.5.0 is cut * renable upgrade test; and in upgrade test, sort resource references field before comparison * those types have been defined elsewhere * expect service account --- backend/test/integration/upgrade_test.go | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/backend/test/integration/upgrade_test.go b/backend/test/integration/upgrade_test.go index 4245f229c70..a732f09c994 100644 --- a/backend/test/integration/upgrade_test.go +++ b/backend/test/integration/upgrade_test.go @@ -2,6 +2,7 @@ package integration import ( "io/ioutil" + "sort" "testing" "time" @@ -60,12 +61,8 @@ func (s *UpgradeTests) TestPrepare() { func (s *UpgradeTests) TestVerify() { s.VerifyExperiments() s.VerifyPipelines() - // TODO(jingzhang36): temporarily comment out the verification of runs and - // jobs since this PR changes the API response and hence a diff between the - // response from previous release and that from this PR is expected. - // Will put them back after the next release is cut. - // s.VerifyRuns() - // s.VerifyJobs() + s.VerifyRuns() + s.VerifyJobs() } // Check the namespace have ML job installed and ready @@ -341,6 +338,7 @@ func (s *UpgradeTests) VerifyJobs() { Name: "hello-world.yaml", Relationship: job_model.APIRelationshipCREATOR, }, }, + ServiceAccount: "pipeline-runner", MaxConcurrency: 10, NoCatchup: true, Enabled: true, @@ -350,6 +348,8 @@ func (s *UpgradeTests) VerifyJobs() { Trigger: &job_model.APITrigger{}, } + sort.Sort(JobResourceReferenceSorter(job.ResourceReferences)) + sort.Sort(JobResourceReferenceSorter(expectedJob.ResourceReferences)) assert.Equal(t, expectedJob, job) } @@ -377,10 +377,13 @@ func checkHelloWorldRunDetail(t *testing.T, runDetail *run_model.APIRunDetail) { Name: "hello-world.yaml", Relationship: run_model.APIRelationshipCREATOR, }, }, - CreatedAt: runDetail.Run.CreatedAt, - ScheduledAt: runDetail.Run.ScheduledAt, - FinishedAt: runDetail.Run.FinishedAt, + ServiceAccount: "pipeline-runner", + CreatedAt: runDetail.Run.CreatedAt, + ScheduledAt: runDetail.Run.ScheduledAt, + FinishedAt: runDetail.Run.FinishedAt, } + sort.Sort(RunResourceReferenceSorter(expectedRun.ResourceReferences)) + sort.Sort(RunResourceReferenceSorter(runDetail.Run.ResourceReferences)) assert.Equal(t, expectedRun, runDetail.Run) } From da4acbbd73faaf60708ba10a988fecfd2f794535 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Thu, 28 May 2020 23:47:15 -0700 Subject: [PATCH 06/11] SDK - Python Components - Stop generating output saving code if no outputs (#3836) Removed dead code from the generated python command-line wrapper. --- sdk/python/kfp/components/_python_op.py | 42 +++++--- .../parallelfor_item_argument_resolving.yaml | 98 ------------------- 2 files changed, 26 insertions(+), 114 deletions(-) diff --git a/sdk/python/kfp/components/_python_op.py b/sdk/python/kfp/components/_python_op.py index 3d964840c0d..75c78ba427a 100644 --- a/sdk/python/kfp/components/_python_op.py +++ b/sdk/python/kfp/components/_python_op.py @@ -555,10 +555,13 @@ def get_serializer_and_register_definitions(type_name) -> str: arg_parse_code_lines = list(definitions) + arg_parse_code_lines - arg_parse_code_lines.extend([ + arg_parse_code_lines.append( '_parsed_args = vars(_parser.parse_args())', - '_output_files = _parsed_args.pop("_output_paths", [])', - ]) + ) + if outputs_passed_through_func_return_tuple: + arg_parse_code_lines.append( + '_output_files = _parsed_args.pop("_output_paths", [])', + ) # Putting singular return values in a list to be "zipped" with the serializers and output paths outputs_to_list_code = '' @@ -573,17 +576,7 @@ def get_serializer_and_register_definitions(type_name) -> str: output_serialization_code = ''.join(' {},\n'.format(s) for s in output_serialization_expression_strings) - full_source = \ -'''\ -{pre_func_code} - -{extra_code} - -{func_code} - -{arg_parse_code} - -_outputs = {func_name}(**_parsed_args) + full_output_handling_code = ''' {outputs_to_list_code} @@ -599,16 +592,33 @@ def get_serializer_and_register_definitions(type_name) -> str: pass with open(output_file, 'w') as f: f.write(_output_serializers[idx](_outputs[idx])) +'''.format( + output_serialization_code=output_serialization_code, + outputs_to_list_code=outputs_to_list_code, + ) + + full_source = \ +'''\ +{pre_func_code} + +{extra_code} + +{func_code} + +{arg_parse_code} + +_outputs = {func_name}(**_parsed_args) '''.format( func_name=func.__name__, func_code=func_code, pre_func_code=pre_func_code, extra_code=extra_code, arg_parse_code='\n'.join(arg_parse_code_lines), - output_serialization_code=output_serialization_code, - outputs_to_list_code=outputs_to_list_code, ) + if outputs_passed_through_func_return_tuple: + full_source += full_output_handling_code + #Removing consecutive blank lines import re full_source = re.sub('\n\n\n+', '\n\n', full_source).strip('\n') + '\n' diff --git a/sdk/python/tests/compiler/testdata/parallelfor_item_argument_resolving.yaml b/sdk/python/tests/compiler/testdata/parallelfor_item_argument_resolving.yaml index 3b270e17b12..0eec5e5ceaa 100644 --- a/sdk/python/tests/compiler/testdata/parallelfor_item_argument_resolving.yaml +++ b/sdk/python/tests/compiler/testdata/parallelfor_item_argument_resolving.yaml @@ -27,22 +27,8 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: @@ -71,22 +57,8 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: @@ -115,22 +87,8 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: @@ -159,22 +117,8 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: @@ -203,22 +147,8 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: @@ -247,22 +177,8 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: @@ -291,22 +207,8 @@ spec: _parser = argparse.ArgumentParser(prog='Consume', description='') _parser.add_argument("--param1", dest="param1", type=str, required=True, default=argparse.SUPPRESS) _parsed_args = vars(_parser.parse_args()) - _output_files = _parsed_args.pop("_output_paths", []) _outputs = consume(**_parsed_args) - - _output_serializers = [ - - ] - - import os - for idx, output_file in enumerate(_output_files): - try: - os.makedirs(os.path.dirname(output_file)) - except OSError: - pass - with open(output_file, 'w') as f: - f.write(_output_serializers[idx](_outputs[idx])) image: "tensorflow/tensorflow:1.13.2-py3" inputs: parameters: From 1e2b9d4e7e22ba3337fd4f0690de46ca21ac8343 Mon Sep 17 00:00:00 2001 From: Jiaxiao Zheng Date: Fri, 29 May 2020 08:55:16 -0700 Subject: [PATCH 07/11] [SDK] Add first party component label (#3861) * add OOB component dict and utility function * add test * add a transformer, which appends the component name label * add transformer function, compiler and test * move telemetry test * fix none uri * applies comments * revert dependency on frozendict * fixes some tests * resolve comments --- .../kfp/compiler/_default_transformers.py | 49 +++++++++++++++++++ sdk/python/kfp/compiler/compiler.py | 3 +- sdk/python/tests/compiler/compiler_tests.py | 28 +++++++++++ 3 files changed, 79 insertions(+), 1 deletion(-) diff --git a/sdk/python/kfp/compiler/_default_transformers.py b/sdk/python/kfp/compiler/_default_transformers.py index bb33d212668..4c59555cfcf 100644 --- a/sdk/python/kfp/compiler/_default_transformers.py +++ b/sdk/python/kfp/compiler/_default_transformers.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import re from typing import Callable, Dict, Optional, Text + from ..dsl._container_op import BaseOp, ContainerOp # Pod label indicating the SDK type from which the pipeline is @@ -20,6 +22,16 @@ _SDK_ENV_LABEL = 'pipelines.kubeflow.org/pipeline-sdk-type' _SDK_ENV_DEFAULT = 'kfp' +# Common prefix of KFP OOB components url paths. +_OOB_COMPONENT_PATH_PREFIX = 'https://raw.githubusercontent.com/kubeflow/'\ + 'pipelines' + +# Key for component origin path pod label. +COMPONENT_PATH_LABEL_KEY = 'pipelines.kubeflow.org/component_origin_path' + +# Key for component spec digest pod label. +COMPONENT_DIGEST_LABEL_KEY = 'pipelines.kubeflow.org/component_digest' + def get_default_telemetry_labels() -> Dict[Text, Text]: """Returns the default pod labels for telemetry purpose.""" @@ -68,3 +80,40 @@ def _add_pod_labels(task): return task return _add_pod_labels + + +def _remove_suffix(string: Text, suffix: Text) -> Text: + """Removes the suffix from a string.""" + if suffix and string.endswith(suffix): + return string[:-len(suffix)] + else: + return string + + +def add_name_for_oob_components() -> Callable: + """Adds the OOB component name if applicable.""" + + def _add_name_for_oob_components(task): + # Detect the component origin uri in component_ref if exists, and + # attach the OOB component name as a pod label. + component_ref = getattr(task, '_component_ref', None) + if component_ref: + if component_ref.url: + origin_path = _remove_suffix( + component_ref.url, 'component.yaml').rstrip('/') + # Only include KFP OOB components. + if origin_path.startswith(_OOB_COMPONENT_PATH_PREFIX): + origin_path = origin_path.split('/', 7)[-1] + else: + return task + # Clean the label to comply with the k8s label convention. + origin_path = re.sub('[^-a-z0-9A-Z_.]', '.', origin_path) + origin_path_label = origin_path[-63:].strip('-_.') + task.add_pod_label(COMPONENT_PATH_LABEL_KEY, origin_path_label) + if component_ref.digest: + task.add_pod_label( + COMPONENT_DIGEST_LABEL_KEY, component_ref.digest) + + return task + + return _add_name_for_oob_components \ No newline at end of file diff --git a/sdk/python/kfp/compiler/compiler.py b/sdk/python/kfp/compiler/compiler.py index d6cbddb203f..bb70b45d488 100644 --- a/sdk/python/kfp/compiler/compiler.py +++ b/sdk/python/kfp/compiler/compiler.py @@ -27,7 +27,7 @@ from .. import dsl from ._k8s_helper import convert_k8s_obj_to_json, sanitize_k8s_name from ._op_to_template import _op_to_template -from ._default_transformers import add_pod_env, add_pod_labels, get_default_telemetry_labels +from ._default_transformers import add_pod_env, add_pod_labels, add_name_for_oob_components, get_default_telemetry_labels from ..components.structures import InputSpec from ..components._yaml_utils import dump_yaml @@ -836,6 +836,7 @@ def _create_workflow(self, if allow_telemetry: pod_labels = get_default_telemetry_labels() op_transformers.append(add_pod_labels(pod_labels)) + op_transformers.append(add_name_for_oob_components()) op_transformers.extend(pipeline_conf.op_transformers) diff --git a/sdk/python/tests/compiler/compiler_tests.py b/sdk/python/tests/compiler/compiler_tests.py index 593e5053622..3a175ed15f6 100644 --- a/sdk/python/tests/compiler/compiler_tests.py +++ b/sdk/python/tests/compiler/compiler_tests.py @@ -26,6 +26,8 @@ import unittest import yaml +from kfp import components +from kfp.compiler._default_transformers import COMPONENT_DIGEST_LABEL_KEY, COMPONENT_PATH_LABEL_KEY from kfp.dsl._component import component from kfp.dsl import ContainerOp, pipeline from kfp.dsl.types import Integer, InconsistentTypeException @@ -40,6 +42,11 @@ def some_op(): command=['sleep 1'], ) +_TEST_GCS_DOWNLOAD_COMPONENT_URL = 'https://raw.githubusercontent.com/kubeflow/'\ + 'pipelines/2dac60c400ad8767b452649d08f328df'\ + 'af230f96/components/google-cloud/storage/'\ + 'download/component.yaml' + class TestCompiler(unittest.TestCase): # Define the places of samples covered by unit tests. @@ -711,6 +718,27 @@ def some_pipeline(): container = template.get('container', None) if container: self.assertEqual(template['retryStrategy']['limit'], 5) + + def test_oob_component_label(self): + gcs_download_op = components.load_component_from_url( + _TEST_GCS_DOWNLOAD_COMPONENT_URL) + + @dsl.pipeline(name='some_pipeline') + def some_pipeline(): + _download_task = gcs_download_op('gs://some_bucket/some_dir/some_file') + + workflow_dict = compiler.Compiler()._compile(some_pipeline) + + found_download_task = False + for template in workflow_dict['spec']['templates']: + if template.get('container', None): + found_download_task = True + self.assertEqual( + template['metadata']['labels'][COMPONENT_PATH_LABEL_KEY], + 'google-cloud.storage.download') + self.assertIsNotNone( + template['metadata']['labels'].get(COMPONENT_DIGEST_LABEL_KEY)) + self.assertTrue(found_download_task, 'download task not found in workflow.') def test_image_pull_policy(self): def some_op(): From 37a63638c7afa3ac876588984ceb41d8e9ae3831 Mon Sep 17 00:00:00 2001 From: Nicholas Thomson Date: Fri, 29 May 2020 11:01:15 -0700 Subject: [PATCH 08/11] [AWS SageMaker] Add working FSx setup and test (#3831) * Add working FSx setup and test * Removed duplicate test function * Replaced failure return with exit * Update parallel methods to export * Update EKS cluster name outside parallel task * Add SKIP_FSX_TEST in buildspec * Add revoke security group ingress * Add default pytest FSx values --- .../codebuild/integration-test.buildspec.yml | 2 +- .../tests/integration_tests/.env.example | 5 +- .../component_tests/test_train_component.py | 3 +- .../tests/integration_tests/conftest.py | 36 +++++++++ .../tests/integration_tests/pytest.ini | 4 +- .../config/fsx-mnist-training/config.yaml | 36 +++++++++ .../resources/definition/training_pipeline.py | 4 + .../tests/integration_tests/scripts/fsx_setup | 79 +++++++++++++++++++ .../scripts/run_integration_tests | 44 ++++++++++- .../tests/integration_tests/utils/__init__.py | 17 ++++ 10 files changed, 223 insertions(+), 7 deletions(-) create mode 100644 components/aws/sagemaker/tests/integration_tests/resources/config/fsx-mnist-training/config.yaml create mode 100755 components/aws/sagemaker/tests/integration_tests/scripts/fsx_setup diff --git a/components/aws/sagemaker/codebuild/integration-test.buildspec.yml b/components/aws/sagemaker/codebuild/integration-test.buildspec.yml index 09dafe53cd4..d40afba2a13 100644 --- a/components/aws/sagemaker/codebuild/integration-test.buildspec.yml +++ b/components/aws/sagemaker/codebuild/integration-test.buildspec.yml @@ -2,7 +2,7 @@ version: 0.2 env: variables: - CONTAINER_VARIABLES: "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI EKS_PRIVATE_SUBNETS EKS_PUBLIC_SUBNETS PYTEST_ADDOPTS S3_DATA_BUCKET EKS_EXISTING_CLUSTER SAGEMAKER_EXECUTION_ROLE_ARN REGION" + CONTAINER_VARIABLES: "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI EKS_PRIVATE_SUBNETS EKS_PUBLIC_SUBNETS PYTEST_ADDOPTS S3_DATA_BUCKET EKS_EXISTING_CLUSTER SAGEMAKER_EXECUTION_ROLE_ARN REGION SKIP_FSX_TESTS" phases: build: diff --git a/components/aws/sagemaker/tests/integration_tests/.env.example b/components/aws/sagemaker/tests/integration_tests/.env.example index 33c04cd60f8..b4c0aa92b68 100644 --- a/components/aws/sagemaker/tests/integration_tests/.env.example +++ b/components/aws/sagemaker/tests/integration_tests/.env.example @@ -9,4 +9,7 @@ SAGEMAKER_EXECUTION_ROLE_ARN=arn:aws:iam::123456789012:role/service-role/AmazonS S3_DATA_BUCKET=my-data-bucket # If you hope to use an existing EKS cluster, rather than creating a new one. -# EKS_EXISTING_CLUSTER=my-eks-cluster \ No newline at end of file +# EKS_EXISTING_CLUSTER=my-eks-cluster + +# If you would like to skip the FSx set-up and tests +# SKIP_FSX_TESTS=true \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py b/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py index 037350f0b3f..b4402d37125 100644 --- a/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py +++ b/components/aws/sagemaker/tests/integration_tests/component_tests/test_train_component.py @@ -12,7 +12,8 @@ [ pytest.param( "resources/config/simple-mnist-training", marks=pytest.mark.canary_test - ) + ), + pytest.param("resources/config/fsx-mnist-training", marks=pytest.mark.fsx_test), ], ) def test_trainingjob( diff --git a/components/aws/sagemaker/tests/integration_tests/conftest.py b/components/aws/sagemaker/tests/integration_tests/conftest.py index c022cadd2c0..475d2edd4bd 100644 --- a/components/aws/sagemaker/tests/integration_tests/conftest.py +++ b/components/aws/sagemaker/tests/integration_tests/conftest.py @@ -35,6 +35,24 @@ def pytest_addoption(parser): required=False, help="Cluster namespace where kubeflow pipelines is installed", ) + parser.addoption( + "--fsx-subnet", + required=False, + help="The subnet in which FSx is installed", + default="", + ) + parser.addoption( + "--fsx-security-group", + required=False, + help="The security group SageMaker should use when running the FSx test", + default="", + ) + parser.addoption( + "--fsx-id", + required=False, + help="The file system ID of the FSx instance", + default="", + ) @pytest.fixture(scope="session", autouse=True) @@ -67,6 +85,24 @@ def kfp_namespace(request): return request.config.getoption("--kfp-namespace") +@pytest.fixture(scope="session", autouse=True) +def fsx_subnet(request): + os.environ["FSX_SUBNET"] = request.config.getoption("--fsx-subnet") + return request.config.getoption("--fsx-subnet") + + +@pytest.fixture(scope="session", autouse=True) +def fsx_security_group(request): + os.environ["FSX_SECURITY_GROUP"] = request.config.getoption("--fsx-security-group") + return request.config.getoption("--fsx-security-group") + + +@pytest.fixture(scope="session", autouse=True) +def fsx_id(request): + os.environ["FSX_ID"] = request.config.getoption("--fsx-id") + return request.config.getoption("--fsx-id") + + @pytest.fixture(scope="session") def boto3_session(region): return boto3.Session(region_name=region) diff --git a/components/aws/sagemaker/tests/integration_tests/pytest.ini b/components/aws/sagemaker/tests/integration_tests/pytest.ini index b8b25ae727d..1aeed4a6a7f 100644 --- a/components/aws/sagemaker/tests/integration_tests/pytest.ini +++ b/components/aws/sagemaker/tests/integration_tests/pytest.ini @@ -1,4 +1,6 @@ [pytest] +junit_family = xunit2 addopts = -rA markers = - canary_test: test to be run as part of canaries. \ No newline at end of file + canary_test: test to be run as part of canaries. + fsx_test: tests for FSx features \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/resources/config/fsx-mnist-training/config.yaml b/components/aws/sagemaker/tests/integration_tests/resources/config/fsx-mnist-training/config.yaml new file mode 100644 index 00000000000..48dafc6c50a --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/resources/config/fsx-mnist-training/config.yaml @@ -0,0 +1,36 @@ +PipelineDefinition: resources/definition/training_pipeline.py +TestName: fsx-mnist-training +Timeout: 3600 +ExpectedTrainingImage: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 +Arguments: + region: ((REGION)) + image: ((KMEANS_REGISTRY)).dkr.ecr.((REGION)).amazonaws.com/kmeans:1 + training_input_mode: File + hyperparameters: + k: "10" + feature_dim: "784" + channels: + - ChannelName: train + DataSource: + FileSystemDataSource: + FileSystemType: FSxLustre + FileSystemAccessMode: ro + FileSystemId: ((FSX_ID)) + DirectoryPath: /fsx/mnist_kmeans_example/input + CompressionType: None + ContentType: text/csv;label_size=0 + RecordWrapperType: None + InputMode: File + vpc_security_group_ids: ((FSX_SECURITY_GROUP)) + vpc_subnets: ((FSX_SUBNET)) + instance_type: ml.m5.xlarge + instance_count: 1 + volume_size: 50 + max_run_time: 3600 + model_artifact_path: s3://((DATA_BUCKET))/mnist_kmeans_example/output + network_isolation: "True" + traffic_encryption: "False" + spot_instance: "False" + max_wait_time: 3600 + checkpoint_config: "{}" + role: ((ROLE_ARN)) diff --git a/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py b/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py index ad8eab23bff..16e490f789c 100644 --- a/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py +++ b/components/aws/sagemaker/tests/integration_tests/resources/definition/training_pipeline.py @@ -25,6 +25,8 @@ def training_pipeline( spot_instance="", max_wait_time="", checkpoint_config="{}", + vpc_security_group_ids="", + vpc_subnets="", role="", ): sagemaker_train_op( @@ -45,6 +47,8 @@ def training_pipeline( spot_instance=spot_instance, max_wait_time=max_wait_time, checkpoint_config=checkpoint_config, + vpc_security_group_ids=vpc_security_group_ids, + vpc_subnets=vpc_subnets, role=role, ) diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/fsx_setup b/components/aws/sagemaker/tests/integration_tests/scripts/fsx_setup new file mode 100755 index 00000000000..3319b423534 --- /dev/null +++ b/components/aws/sagemaker/tests/integration_tests/scripts/fsx_setup @@ -0,0 +1,79 @@ +#!/usr/bin/env bash + +# Helper script that provides a set of methods to configure VPC, EFS and FSx +# ready for the full suite of integration tests. + +function create_fsx_security_group() { + echo "[Creating FSx Security Group] Creating security group" + + IFS=',' read -r -a subnets_list <<< "$EKS_PRIVATE_SUBNETS" + local vpc_id="$(aws ec2 describe-subnets --subnet-ids "${subnets_list[0]}" \ + --output text --query "Subnets[0].VpcId" --region ${REGION})" + + local fsx_security_group="${DEPLOY_NAME}-fsx-sg" + FSX_SECURITY_GROUP_ID="$(aws ec2 create-security-group --region "${REGION}" \ + --vpc-id ${vpc_id} \ + --description "Security group for FSx in ${DEPLOY_NAME}" \ + --group-name "${fsx_security_group}" --output text --query "GroupId")" + + # Open FSx port to internal security group + aws ec2 authorize-security-group-ingress \ + --region "${REGION}" --group-id "${FSX_SECURITY_GROUP_ID}" \ + --protocol tcp --port 988 --source-group "${FSX_SECURITY_GROUP_ID}" + + echo "[Creating FSx Security Group] Created security group ${FSX_SECURITY_GROUP_ID}" +} + +function cleanup_fsx_security_group() { + if [ ! -z "${FSX_SECURITY_GROUP_ID}" ]; then + # You must remove any self-referencing ingress rules before deleting a SG + aws ec2 revoke-security-group-ingress --region "${REGION}" \ + --group-id "${FSX_SECURITY_GROUP_ID}" --protocol tcp --port 988 \ + --source-group "${FSX_SECURITY_GROUP_ID}" + + aws ec2 delete-security-group --group-id "${FSX_SECURITY_GROUP_ID}" --region "${REGION}" + fi +} + +# Creates a new FSX LUSTRE instance and automatically imports the data set from S3. +function create_fsx_instance() { + echo "[Creating FSx] Creating file system" + IFS=',' read -r -a subnets_list <<< "$EKS_PRIVATE_SUBNETS" + + local fs_id=$(aws fsx create-file-system \ + --file-system-type LUSTRE \ + --lustre-configuration ImportPath=s3://${S3_DATA_BUCKET}/mnist_kmeans_example \ + --storage-capacity 1200 \ + --subnet-ids "${subnets_list[0]}" \ + --security-group-ids "${FSX_SECURITY_GROUP_ID}" \ + --tags Key="Name",Value=fsx-integ-lustre \ + --region "${REGION}" \ + --output text \ + --query "FileSystem.FileSystemId") + + echo "[Creating FSx] Waiting for file system to be in state AVAILABLE" + + local fs_status="CREATING" + until [[ "${fs_status}" != "CREATING" ]]; do + fs_status="$(aws fsx describe-file-systems --region "${REGION}" --file-system-id ${fs_id} --output text --query "FileSystems[0].Lifecycle")" + sleep 10 + done + aws fsx --region "${REGION}" describe-file-systems --file-system-id ${fs_id} + + if [[ "${fs_status}" != "AVAILABLE" ]]; then + echo "[Creating FSx] FSx cluster never reached state 'Available'" + exit 1 + fi + + FSX_ID="${fs_id}" + + echo "[Creating FSx] File system now available as ${FSX_ID}" + + return 0 +} + +function delete_fsx_instance() { + if [ ! -z "${FSX_ID}" ]; then + aws fsx delete-file-system --file-system-id "${FSX_ID}" --region "${REGION}" + fi +} \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests b/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests index 6ad3fb9db28..3dfbabb8888 100755 --- a/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests +++ b/components/aws/sagemaker/tests/integration_tests/scripts/run_integration_tests @@ -9,6 +9,7 @@ usage(){ } cwd=$(dirname $(realpath $0)) +source "$cwd"/fsx_setup ### Input parameters DEPLOY_NAME="sagemaker-kfp-"$(date '+%Y-%m-%d-%H-%M-%S')"" # The name given to the entire deployment (tagging all resources) @@ -30,6 +31,8 @@ PYTEST_MARKER=${PYTEST_MARKER:-""} S3_DATA_BUCKET=${S3_DATA_BUCKET:-""} SAGEMAKER_EXECUTION_ROLE_ARN=${SAGEMAKER_EXECUTION_ROLE_ARN:-""} +SKIP_FSX_TESTS=${SKIP_FSX_TESTS:-"false"} + while getopts ":n:r:s:" opt; do case $opt in n) @@ -65,12 +68,25 @@ if [ "$S3_DATA_BUCKET" == "" ]; then exit 1 fi +if [[ "$SKIP_FSX_TESTS" == "false" && "$EKS_PRIVATE_SUBNETS" == "" ]]; then + echo "Missing EKS private subnets" + usage + exit 1 +fi + function cleanup() { set +e cleanup_kfp delete_generated_role + if [[ "${SKIP_FSX_TESTS}" == "false" ]]; then + delete_fsx_instance + # Sleep in order for the security group to detach before attempting to delete it + sleep 15s + cleanup_fsx_security_group + fi + if [[ -z "${EKS_EXISTING_CLUSTER}" ]]; then delete_eks fi @@ -81,8 +97,6 @@ trap cleanup EXIT set -e function launch_eks() { - EKS_CLUSTER_NAME="${DEPLOY_NAME}-eks-cluster" - echo "[Creating EKS] Launching EKS cluster $EKS_CLUSTER_NAME" eksctl_args=( --managed --nodes "${EKS_NODE_COUNT}" --node-type=c5.xlarge --timeout=30m --region "${REGION}" --auto-kubeconfig --version "${EKS_CLUSTER_VERSION}" ) @@ -150,11 +164,26 @@ function cleanup_kfp() { } if [[ -z "${EKS_EXISTING_CLUSTER}" ]]; then - launch_eks + # Launch all of these in parallel to reduce start-up time + EKS_CLUSTER_NAME="${DEPLOY_NAME}-eks-cluster" + launch_eks & + + if [[ "${SKIP_FSX_TESTS}" == "false" ]]; then + create_fsx_security_group + create_fsx_instance + fi + + wait else aws eks update-kubeconfig --name "${EKS_EXISTING_CLUSTER}" --region "$REGION" EKS_CLUSTER_NAME="${EKS_EXISTING_CLUSTER}" DEPLOY_NAME="${EKS_EXISTING_CLUSTER}" + + if [[ "${SKIP_FSX_TESTS}" == "false" ]]; then + create_fsx_security_group + create_fsx_instance + fi + wait fi generate_iam_role_name @@ -163,6 +192,15 @@ install_kfp install_generated_role pytest_args=( --region "${REGION}" --role-arn "${SAGEMAKER_EXECUTION_ROLE_ARN}" --s3-data-bucket "${S3_DATA_BUCKET}" --minio-service-port "${MINIO_LOCAL_PORT}" --kfp-namespace "${KFP_NAMESPACE}" ) + +if [[ "${SKIP_FSX_TESTS}" == "true" ]]; then + pytest_args+=( -m "not fsx_test" ) +else + # Get the VPC arguments for the FSx test + IFS=',' read -r -a private_subnets <<< "$EKS_PRIVATE_SUBNETS" + pytest_args+=( --fsx-subnet "${private_subnets[0]}" --fsx-security-group "${FSX_SECURITY_GROUP_ID}" --fsx-id "${FSX_ID}" ) +fi + [ ! -z "${PYTEST_MARKER}" ] && pytest_args+=( -m "${PYTEST_MARKER}" ) cd tests/integration_tests && python -m pytest "${pytest_args[@]}" --junitxml ./integration_tests.log -n $(nproc) \ No newline at end of file diff --git a/components/aws/sagemaker/tests/integration_tests/utils/__init__.py b/components/aws/sagemaker/tests/integration_tests/utils/__init__.py index 7b3be9448d0..7e3e71c658e 100644 --- a/components/aws/sagemaker/tests/integration_tests/utils/__init__.py +++ b/components/aws/sagemaker/tests/integration_tests/utils/__init__.py @@ -30,6 +30,18 @@ def get_kfp_namespace(): return os.environ.get("NAMESPACE") +def get_fsx_subnet(): + return os.environ.get("FSX_SUBNET") + + +def get_fsx_security_group(): + return os.environ.get("FSX_SECURITY_GROUP") + + +def get_fsx_id(): + return os.environ.get("FSX_ID") + + def get_algorithm_image_registry(region, algorithm): return get_image_uri(region, algorithm).split(".")[0] @@ -61,12 +73,17 @@ def replace_placeholders(input_filename, output_filename): "((ROLE_ARN))": get_role_arn(), "((DATA_BUCKET))": get_s3_data_bucket(), "((KMEANS_REGISTRY))": get_algorithm_image_registry(region, "kmeans"), + "((FSX_ID))": get_fsx_id(), + "((FSX_SUBNET))": get_fsx_subnet(), + "((FSX_SECURITY_GROUP))": get_fsx_security_group(), } filedata = "" with open(input_filename, "r") as f: filedata = f.read() for replace_key, replace_value in variables_to_replace.items(): + if replace_value is None: + continue filedata = filedata.replace(replace_key, replace_value) with open(output_filename, "w") as f: From 3010e85bbedacd35bc3035fcbb3fb0f234107748 Mon Sep 17 00:00:00 2001 From: Alexey Volkov Date: Fri, 29 May 2020 12:05:34 -0700 Subject: [PATCH 09/11] SDK - Tests - Fixed the test_func_to_container_op_with_imported_func2 test case (#3837) --- sdk/python/kfp/components_tests/test_python_op.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/sdk/python/kfp/components_tests/test_python_op.py b/sdk/python/kfp/components_tests/test_python_op.py index 252335e936d..6d06c53a83b 100644 --- a/sdk/python/kfp/components_tests/test_python_op.py +++ b/sdk/python/kfp/components_tests/test_python_op.py @@ -257,11 +257,12 @@ def test_func_to_container_op_with_imported_func(self): self.helper_test_2_in_1_out_component_using_local_call(func, op) def test_func_to_container_op_with_imported_func2(self): - from .test_data.module2_which_depends_on_module1 import module2_func_with_deps as module2_func_with_deps - func = module2_func_with_deps + from .test_data import module1 + from .test_data import module2_which_depends_on_module1 + func = module2_which_depends_on_module1.module2_func_with_deps op = comp.func_to_container_op(func, use_code_pickling=True, modules_to_capture=[ - 'tests.components.test_data.module1', - 'tests.components.test_data.module2_which_depends_on_module1' + module1.__name__, # '*.components_tests.test_data.module1' + func.__module__, # '*.components_tests.test_data.module2_which_depends_on_module1' ]) self.helper_test_2_in_1_out_component_using_local_call(func, op) From 3cae116992ac87d3dbf7621fd12a07978640ee19 Mon Sep 17 00:00:00 2001 From: Jiaxin Shan Date: Sat, 30 May 2020 21:11:53 -0700 Subject: [PATCH 10/11] Generate clickable artifact url for s3 URI (#3531) * Generate clickable artifact url for s3 URI Signed-off-by: Jiaxin Shan * Format code using prettier@1.19.1 * Fix unit test failure * Use encoded string in bucket url --- frontend/src/components/ArtifactLink.tsx | 9 ++++++++- frontend/src/lib/Utils.test.ts | 9 +++++++++ frontend/src/lib/Utils.tsx | 20 ++++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) diff --git a/frontend/src/components/ArtifactLink.tsx b/frontend/src/components/ArtifactLink.tsx index 6f3ab9b557a..e3e9be2c2fd 100644 --- a/frontend/src/components/ArtifactLink.tsx +++ b/frontend/src/components/ArtifactLink.tsx @@ -1,5 +1,9 @@ import * as React from 'react'; -import { generateGcsConsoleUri, generateMinioArtifactUrl } from '../lib/Utils'; +import { + generateGcsConsoleUri, + generateS3ArtifactUrl, + generateMinioArtifactUrl, +} from '../lib/Utils'; /** * A component that renders an artifact URL as clickable link if URL is correct @@ -12,6 +16,9 @@ export const ArtifactLink: React.FC<{ artifactUri?: string }> = ({ artifactUri } if (gcsConsoleUrl) { clickableUrl = gcsConsoleUrl; } + } + if (artifactUri.startsWith('s3:')) { + clickableUrl = generateS3ArtifactUrl(artifactUri); } else if (artifactUri.startsWith('http:') || artifactUri.startsWith('https:')) { clickableUrl = artifactUri; } else if (artifactUri.startsWith('minio:')) { diff --git a/frontend/src/lib/Utils.test.ts b/frontend/src/lib/Utils.test.ts index 5de4a894284..8da86efbe0e 100644 --- a/frontend/src/lib/Utils.test.ts +++ b/frontend/src/lib/Utils.test.ts @@ -19,6 +19,7 @@ import { enabledDisplayString, formatDateString, generateMinioArtifactUrl, + generateS3ArtifactUrl, getRunDuration, getRunDurationFromWorkflow, logger, @@ -253,4 +254,12 @@ describe('Utils', () => { expect(generateMinioArtifactUrl('ZZZ://my-bucket/a/b/c')).toBe(undefined); }); }); + + describe('generateS3ArtifactUrl', () => { + it('handles s3:// URIs', () => { + expect(generateS3ArtifactUrl('s3://my-bucket/a/b/c')).toBe( + 'artifacts/get?source=s3&bucket=my-bucket&key=a%2Fb%2Fc', + ); + }); + }); }); diff --git a/frontend/src/lib/Utils.tsx b/frontend/src/lib/Utils.tsx index a0ad3f4d2f5..451b55ae0eb 100644 --- a/frontend/src/lib/Utils.tsx +++ b/frontend/src/lib/Utils.tsx @@ -328,6 +328,26 @@ export function generateMinioArtifactUrl(minioUri: string, peek?: number): strin return generateArtifactUrl('minio', matches[1], matches[2], peek); } +const S3_URI_PREFIX = 's3://'; +/** + * Generates an HTTPS API URL from s3:// uri + * + * @param s3Uri S3 uri that starts with s3://, like s3://ml-pipeline/path/file + * @returns A URL that leads to the artifact data. Returns undefined when s3Uri is not valid. + */ +export function generateS3ArtifactUrl(s3Uri: string): string | undefined { + if (!s3Uri.startsWith(S3_URI_PREFIX)) { + return undefined; + } + + // eslint-disable-next-line no-useless-escape + const matches = s3Uri.match(/^s3:\/\/([^\/]+)\/(.+)$/); + if (matches == null) { + return undefined; + } + return generateArtifactUrl('s3', matches[1], matches[2]); +} + export function buildQuery(queriesMap: { [key: string]: string | number | undefined }): string { const queryContent = Object.entries(queriesMap) .filter((entry): entry is [string, string | number] => entry[1] != null) From 286492591b41ae6c8b74d71363547ef827278d6d Mon Sep 17 00:00:00 2001 From: jingzhang36 Date: Mon, 1 Jun 2020 12:26:25 +0800 Subject: [PATCH 11/11] More documentation on backend API methods (#3758) * list experiment desc * changes should be made in proto * add comments and descriptions * comments/descriptions in run.proto * comments in job.proto and pipeline.proto * try starting a new line * newline doesnt help * add swagger gen'ed file * address comments * regenerate json and client via swagger * address comments * regenerate go_http_client and swagger from proto * two periods * re-generate --- backend/api/experiment.proto | 25 +++- backend/api/generate_api.sh | 2 +- .../archive_experiment_parameters.go | 5 +- .../experiment_service_client.go | 8 +- .../list_experiment_parameters.go | 14 ++- .../unarchive_experiment_parameters.go | 5 +- .../experiment_model/api_experiment.go | 2 +- .../job_service/list_jobs_parameters.go | 14 ++- .../job_model/api_list_jobs_response.go | 4 +- .../delete_pipeline_parameters.go | 5 +- .../delete_pipeline_version_parameters.go | 5 +- .../get_pipeline_parameters.go | 5 +- .../get_pipeline_version_parameters.go | 5 +- ...et_pipeline_version_template_parameters.go | 5 +- .../get_template_parameters.go | 5 +- .../list_pipeline_versions_parameters.go | 14 ++- .../list_pipelines_parameters.go | 14 ++- .../pipeline_service_client.go | 14 +-- .../api_get_template_response.go | 3 +- .../api_list_pipeline_versions_response.go | 4 +- .../api_list_pipelines_response.go | 4 +- .../go_http_client/pipeline_model/api_url.go | 2 +- .../run_service/archive_run_parameters.go | 5 +- .../run_service/delete_run_parameters.go | 5 +- .../run_service/get_run_parameters.go | 5 +- .../run_service/list_runs_parameters.go | 14 ++- .../run_service/retry_run_parameters.go | 5 +- .../run_service/run_service_client.go | 2 +- .../run_service/terminate_run_parameters.go | 5 +- .../run_service/unarchive_run_parameters.go | 5 +- .../run_model/api_list_runs_response.go | 4 +- .../api/go_http_client/run_model/api_run.go | 4 +- backend/api/job.proto | 25 ++-- backend/api/pipeline.proto | 53 +++++++- backend/api/run.proto | 39 ++++-- backend/api/swagger/experiment.swagger.json | 19 +-- backend/api/swagger/job.swagger.json | 16 ++- .../swagger/kfp_api_single_file.swagger.json | 115 ++++++++++++------ backend/api/swagger/pipeline.swagger.json | 41 +++++-- backend/api/swagger/run.swagger.json | 37 ++++-- 40 files changed, 414 insertions(+), 149 deletions(-) diff --git a/backend/api/experiment.proto b/backend/api/experiment.proto index 0ca43bbbc89..4c1429c6ca7 100644 --- a/backend/api/experiment.proto +++ b/backend/api/experiment.proto @@ -59,7 +59,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { }; service ExperimentService { - //Create a new experiment. + // Creates a new experiment. rpc CreateExperiment(CreateExperimentRequest) returns (Experiment) { option (google.api.http) = { post: "/apis/v1beta1/experiments" @@ -67,35 +67,38 @@ service ExperimentService { }; } - //Find a specific experiment by ID. + // Finds a specific experiment by ID. rpc GetExperiment(GetExperimentRequest) returns (Experiment) { option (google.api.http) = { get: "/apis/v1beta1/experiments/{id}" }; } - //Find all experiments. + // Finds all experiments. Supports pagination, and sorting on certain fields. rpc ListExperiment(ListExperimentsRequest) returns (ListExperimentsResponse) { option (google.api.http) = { get: "/apis/v1beta1/experiments" }; } - //Delete an experiment. + // Deletes an experiment without deleting the experiment's runs and jobs. To + // avoid unexpected behaviors, delete an experiment's runs and jobs before + // deleting the experiment. rpc DeleteExperiment(DeleteExperimentRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/experiments/{id}" }; } - //Archive an experiment. + // Archives an experiment and the experiment's runs and jobs. rpc ArchiveExperiment(ArchiveExperimentRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/experiments/{id}:archive" }; } - //Restore an archived experiment. + // Restores an archived experiment. The experiment's archived runs and jobs + // will stay archived. rpc UnarchiveExperiment(UnarchiveExperimentRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/experiments/{id}:unarchive" @@ -114,7 +117,14 @@ message GetExperimentRequest { } message ListExperimentsRequest { + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListExperiment call or can be omitted when fetching the first page. string page_token = 1; + + // The number of experiments to be listed per page. If there are more + // experiments than this number, the response message will contain a + // nextPageToken field you can use to fetch the next page. int32 page_size = 2; // Can be format of "field_name", "field_name asc" or "field_name des" @@ -171,13 +181,16 @@ message Experiment { STORAGESTATE_ARCHIVED = 2; } + // Output. Specifies whether this experiment is in archived or available state. StorageState storage_state = 6; } message ArchiveExperimentRequest { + // The ID of the experiment to be archived. string id = 1; } message UnarchiveExperimentRequest { + // The ID of the experiment to be restored. string id = 1; } diff --git a/backend/api/generate_api.sh b/backend/api/generate_api.sh index 8937e235aa5..ab131af6866 100755 --- a/backend/api/generate_api.sh +++ b/backend/api/generate_api.sh @@ -57,7 +57,7 @@ jq -s ' reduce .[] as $item ({}; . * $item) | .info.title = "Kubeflow Pipelines API" | .info.description = "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition." | - .info.version = "0.1.38" + .info.version = "0.5.1" ' ${DIR}/swagger/{run,job,pipeline,experiment,pipeline.upload}.swagger.json > "${DIR}/swagger/kfp_api_single_file.swagger.json" # Generate Go HTTP client from the swagger files. diff --git a/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go index 471e6a5bfc6..276ca5340c6 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/archive_experiment_parameters.go @@ -75,7 +75,10 @@ for the archive experiment operation typically these are written to a http.Reque */ type ArchiveExperimentParams struct { - /*ID*/ + /*ID + The ID of the experiment to be archived. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go b/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go index 8544a203002..423ddcffe6e 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/experiment_service_client.go @@ -39,7 +39,7 @@ type Client struct { } /* -ArchiveExperiment archives an experiment +ArchiveExperiment archives an experiment and the experiment s runs and jobs */ func (a *Client) ArchiveExperiment(params *ArchiveExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*ArchiveExperimentOK, error) { // TODO: Validate the params before sending @@ -97,7 +97,7 @@ func (a *Client) CreateExperiment(params *CreateExperimentParams, authInfo runti } /* -DeleteExperiment deletes an experiment +DeleteExperiment deletes an experiment without deleting the experiment s runs and jobs to avoid unexpected behaviors delete an experiment s runs and jobs before deleting the experiment */ func (a *Client) DeleteExperiment(params *DeleteExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*DeleteExperimentOK, error) { // TODO: Validate the params before sending @@ -155,7 +155,7 @@ func (a *Client) GetExperiment(params *GetExperimentParams, authInfo runtime.Cli } /* -ListExperiment finds all experiments +ListExperiment finds all experiments supports pagination and sorting on certain fields */ func (a *Client) ListExperiment(params *ListExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*ListExperimentOK, error) { // TODO: Validate the params before sending @@ -184,7 +184,7 @@ func (a *Client) ListExperiment(params *ListExperimentParams, authInfo runtime.C } /* -UnarchiveExperiment restores an archived experiment +UnarchiveExperiment restores an archived experiment the experiment s archived runs and jobs will stay archived */ func (a *Client) UnarchiveExperiment(params *UnarchiveExperimentParams, authInfo runtime.ClientAuthInfoWriter) (*UnarchiveExperimentOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go index e724148ca81..95f33123716 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/list_experiment_parameters.go @@ -95,9 +95,19 @@ type ListExperimentParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of experiments to be listed per page. If there are more + experiments than this number, the response message will contain a + nextPageToken field you can use to fetch the next page. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListExperiment call or can be omitted when fetching the first page. + + */ PageToken *string /*ResourceReferenceKeyID The ID of the resource that referred to. diff --git a/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go b/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go index 297a2959eb7..b8260fa4639 100644 --- a/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go +++ b/backend/api/go_http_client/experiment_client/experiment_service/unarchive_experiment_parameters.go @@ -75,7 +75,10 @@ for the unarchive experiment operation typically these are written to a http.Req */ type UnarchiveExperimentParams struct { - /*ID*/ + /*ID + The ID of the experiment to be restored. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/experiment_model/api_experiment.go b/backend/api/go_http_client/experiment_model/api_experiment.go index 3f7e7520537..cf4cdb627c9 100644 --- a/backend/api/go_http_client/experiment_model/api_experiment.go +++ b/backend/api/go_http_client/experiment_model/api_experiment.go @@ -50,7 +50,7 @@ type APIExperiment struct { // For Experiment, the only valid resource reference is a single Namespace. ResourceReferences []*APIResourceReference `json:"resource_references"` - // storage state + // Output. Specifies whether this experiment is in archived or available state. StorageState ExperimentStorageState `json:"storage_state,omitempty"` } diff --git a/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go b/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go index 41f4d691118..e3737432438 100644 --- a/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go +++ b/backend/api/go_http_client/job_client/job_service/list_jobs_parameters.go @@ -95,9 +95,19 @@ type ListJobsParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of jobs to be listed per page. If there are more jobs than this + number, the response message will contain a nextPageToken field you can use + to fetch the next page. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListJobs call or can be omitted when fetching the first page. + + */ PageToken *string /*ResourceReferenceKeyID The ID of the resource that referred to. diff --git a/backend/api/go_http_client/job_model/api_list_jobs_response.go b/backend/api/go_http_client/job_model/api_list_jobs_response.go index 4af64499ce2..64464d46b42 100644 --- a/backend/api/go_http_client/job_model/api_list_jobs_response.go +++ b/backend/api/go_http_client/job_model/api_list_jobs_response.go @@ -35,10 +35,10 @@ type APIListJobsResponse struct { // A list of jobs returned. Jobs []*APIJob `json:"jobs"` - // next page token + // The token to list the next page of jobs. NextPageToken string `json:"next_page_token,omitempty"` - // total size + // The total number of jobs for the given query. TotalSize int32 `json:"total_size,omitempty"` } diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go index 1c919efca4d..7da5fa3f8b2 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_parameters.go @@ -75,7 +75,10 @@ for the delete pipeline operation typically these are written to a http.Request */ type DeletePipelineParams struct { - /*ID*/ + /*ID + The ID of the pipeline to be deleted. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go index 6fbb9b4a235..5b38996ac31 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/delete_pipeline_version_parameters.go @@ -75,7 +75,10 @@ for the delete pipeline version operation typically these are written to a http. */ type DeletePipelineVersionParams struct { - /*VersionID*/ + /*VersionID + The ID of the pipeline version to be deleted. + + */ VersionID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go index b58421251b5..b48781282c0 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_parameters.go @@ -75,7 +75,10 @@ for the get pipeline operation typically these are written to a http.Request */ type GetPipelineParams struct { - /*ID*/ + /*ID + The ID of the pipeline to be retrieved. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go index 3ac3c316fa2..4c450131b7a 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_parameters.go @@ -75,7 +75,10 @@ for the get pipeline version operation typically these are written to a http.Req */ type GetPipelineVersionParams struct { - /*VersionID*/ + /*VersionID + The ID of the pipeline version to be retrieved. + + */ VersionID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go index 818718fd829..e925887db06 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_pipeline_version_template_parameters.go @@ -75,7 +75,10 @@ for the get pipeline version template operation typically these are written to a */ type GetPipelineVersionTemplateParams struct { - /*VersionID*/ + /*VersionID + The ID of the pipeline version whose template is to be retrieved. + + */ VersionID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go index aa9ca0f3493..2f130281bd4 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/get_template_parameters.go @@ -75,7 +75,10 @@ for the get template operation typically these are written to a http.Request */ type GetTemplateParams struct { - /*ID*/ + /*ID + The ID of the pipeline whose template is to be retrieved. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go index 840d9ad0131..ca3e9b40a80 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipeline_versions_parameters.go @@ -94,9 +94,19 @@ type ListPipelineVersionsParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of pipeline versions to be listed per page. If there are more + pipeline versions than this number, the response message will contain a + nextPageToken field you can use to fetch the next page. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListPipelineVersions call or can be omitted when fetching the first page. + + */ PageToken *string /*ResourceKeyID The ID of the resource that referred to. diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go index d8ad913f541..0c004782af3 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/list_pipelines_parameters.go @@ -83,9 +83,19 @@ type ListPipelinesParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of pipelines to be listed per page. If there are more pipelines + than this number, the response message will contain a valid value in the + nextPageToken field. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListPipelines call. + + */ PageToken *string /*SortBy Can be format of "field_name", "field_name asc" or "field_name des" diff --git a/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go b/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go index 3f041cfd4bd..a516dcfa6f4 100644 --- a/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go +++ b/backend/api/go_http_client/pipeline_client/pipeline_service/pipeline_service_client.go @@ -39,7 +39,7 @@ type Client struct { } /* -CreatePipeline adds a pipeline +CreatePipeline creates a pipeline */ func (a *Client) CreatePipeline(params *CreatePipelineParams, authInfo runtime.ClientAuthInfoWriter) (*CreatePipelineOK, error) { // TODO: Validate the params before sending @@ -68,7 +68,7 @@ func (a *Client) CreatePipeline(params *CreatePipelineParams, authInfo runtime.C } /* -CreatePipelineVersion create pipeline version API +CreatePipelineVersion adds a pipeline version to the specified pipeline */ func (a *Client) CreatePipelineVersion(params *CreatePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*CreatePipelineVersionOK, error) { // TODO: Validate the params before sending @@ -97,7 +97,7 @@ func (a *Client) CreatePipelineVersion(params *CreatePipelineVersionParams, auth } /* -DeletePipeline deletes a pipeline +DeletePipeline deletes a pipeline and its pipeline versions */ func (a *Client) DeletePipeline(params *DeletePipelineParams, authInfo runtime.ClientAuthInfoWriter) (*DeletePipelineOK, error) { // TODO: Validate the params before sending @@ -126,7 +126,7 @@ func (a *Client) DeletePipeline(params *DeletePipelineParams, authInfo runtime.C } /* -DeletePipelineVersion delete pipeline version API +DeletePipelineVersion deletes a pipeline version by pipeline version ID if the deleted pipeline version is the default pipeline version the pipeline s default version changes to the pipeline s most recent pipeline version if there are no remaining pipeline versions the pipeline will have no default version examines the run service api ipynb notebook to learn more about creating a run using a pipeline version https github com kubeflow pipelines blob master tools benchmarks run service api ipynb */ func (a *Client) DeletePipelineVersion(params *DeletePipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*DeletePipelineVersionOK, error) { // TODO: Validate the params before sending @@ -184,7 +184,7 @@ func (a *Client) GetPipeline(params *GetPipelineParams, authInfo runtime.ClientA } /* -GetPipelineVersion get pipeline version API +GetPipelineVersion gets a pipeline version by pipeline version ID */ func (a *Client) GetPipelineVersion(params *GetPipelineVersionParams, authInfo runtime.ClientAuthInfoWriter) (*GetPipelineVersionOK, error) { // TODO: Validate the params before sending @@ -213,7 +213,7 @@ func (a *Client) GetPipelineVersion(params *GetPipelineVersionParams, authInfo r } /* -GetPipelineVersionTemplate get pipeline version template API +GetPipelineVersionTemplate returns a y a m l template that contains the specified pipeline version s description parameters and metadata */ func (a *Client) GetPipelineVersionTemplate(params *GetPipelineVersionTemplateParams, authInfo runtime.ClientAuthInfoWriter) (*GetPipelineVersionTemplateOK, error) { // TODO: Validate the params before sending @@ -271,7 +271,7 @@ func (a *Client) GetTemplate(params *GetTemplateParams, authInfo runtime.ClientA } /* -ListPipelineVersions list pipeline versions API +ListPipelineVersions lists all pipeline versions of a given pipeline */ func (a *Client) ListPipelineVersions(params *ListPipelineVersionsParams, authInfo runtime.ClientAuthInfoWriter) (*ListPipelineVersionsOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/pipeline_model/api_get_template_response.go b/backend/api/go_http_client/pipeline_model/api_get_template_response.go index b1ddc6420f2..4a1eeca8671 100644 --- a/backend/api/go_http_client/pipeline_model/api_get_template_response.go +++ b/backend/api/go_http_client/pipeline_model/api_get_template_response.go @@ -29,7 +29,8 @@ import ( // swagger:model apiGetTemplateResponse type APIGetTemplateResponse struct { - // template + // The template of the pipeline specified in a GetTemplate request, or of a + // pipeline version specified in a GetPipelinesVersionTemplate request. Template string `json:"template,omitempty"` } diff --git a/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go b/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go index a741068ead5..bbec0593820 100644 --- a/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go +++ b/backend/api/go_http_client/pipeline_model/api_list_pipeline_versions_response.go @@ -32,10 +32,10 @@ import ( // swagger:model apiListPipelineVersionsResponse type APIListPipelineVersionsResponse struct { - // next page token + // The token to list the next page of pipeline versions. NextPageToken string `json:"next_page_token,omitempty"` - // total size + // The total number of pipeline versions for the given query. TotalSize int32 `json:"total_size,omitempty"` // versions diff --git a/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go b/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go index a0b430061ee..89176d21dbc 100644 --- a/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go +++ b/backend/api/go_http_client/pipeline_model/api_list_pipelines_response.go @@ -32,13 +32,13 @@ import ( // swagger:model apiListPipelinesResponse type APIListPipelinesResponse struct { - // next page token + // The token to list the next page of pipelines. NextPageToken string `json:"next_page_token,omitempty"` // pipelines Pipelines []*APIPipeline `json:"pipelines"` - // total size + // The total number of pipelines for the given query. TotalSize int32 `json:"total_size,omitempty"` } diff --git a/backend/api/go_http_client/pipeline_model/api_url.go b/backend/api/go_http_client/pipeline_model/api_url.go index f2b59f4cf56..d48a8ab071f 100644 --- a/backend/api/go_http_client/pipeline_model/api_url.go +++ b/backend/api/go_http_client/pipeline_model/api_url.go @@ -29,7 +29,7 @@ import ( // swagger:model apiUrl type APIURL struct { - // pipeline url + // URL of the pipeline definition or the pipeline version definition. PipelineURL string `json:"pipeline_url,omitempty"` } diff --git a/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go b/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go index 6199fd7e4f4..4b7220bf0bb 100644 --- a/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/archive_run_parameters.go @@ -75,7 +75,10 @@ for the archive run operation typically these are written to a http.Request */ type ArchiveRunParams struct { - /*ID*/ + /*ID + The ID of the run to be archived. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go b/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go index a69d318fc13..2083d995f1e 100644 --- a/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/delete_run_parameters.go @@ -75,7 +75,10 @@ for the delete run operation typically these are written to a http.Request */ type DeleteRunParams struct { - /*ID*/ + /*ID + The ID of the run to be deleted. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/get_run_parameters.go b/backend/api/go_http_client/run_client/run_service/get_run_parameters.go index d25095d8a20..2deb3990f44 100644 --- a/backend/api/go_http_client/run_client/run_service/get_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/get_run_parameters.go @@ -75,7 +75,10 @@ for the get run operation typically these are written to a http.Request */ type GetRunParams struct { - /*RunID*/ + /*RunID + The ID of the run to be retrieved. + + */ RunID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go b/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go index 6494854ae46..71821a2e376 100644 --- a/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/list_runs_parameters.go @@ -95,9 +95,19 @@ type ListRunsParams struct { */ Filter *string - /*PageSize*/ + /*PageSize + The number of runs to be listed per page. If there are more runs than this + number, the response message will contain a nextPageToken field you can use + to fetch the next page. + + */ PageSize *int32 - /*PageToken*/ + /*PageToken + A page token to request the next page of results. The token is acquried + from the nextPageToken field of the response from the previous + ListRuns call or can be omitted when fetching the first page. + + */ PageToken *string /*ResourceReferenceKeyID The ID of the resource that referred to. diff --git a/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go b/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go index ebe257f8b14..c3b9da1c142 100644 --- a/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/retry_run_parameters.go @@ -75,7 +75,10 @@ for the retry run operation typically these are written to a http.Request */ type RetryRunParams struct { - /*RunID*/ + /*RunID + The ID of the run to be retried. + + */ RunID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/run_service_client.go b/backend/api/go_http_client/run_client/run_service/run_service_client.go index 215317ce101..f482d72d973 100644 --- a/backend/api/go_http_client/run_client/run_service/run_service_client.go +++ b/backend/api/go_http_client/run_client/run_service/run_service_client.go @@ -242,7 +242,7 @@ func (a *Client) ReportRunMetrics(params *ReportRunMetricsParams, authInfo runti } /* -RetryRun res initiate a failed or terminated run +RetryRun res initiates a failed or terminated run */ func (a *Client) RetryRun(params *RetryRunParams, authInfo runtime.ClientAuthInfoWriter) (*RetryRunOK, error) { // TODO: Validate the params before sending diff --git a/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go b/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go index 8ed4662e9c6..ff67c325695 100644 --- a/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/terminate_run_parameters.go @@ -75,7 +75,10 @@ for the terminate run operation typically these are written to a http.Request */ type TerminateRunParams struct { - /*RunID*/ + /*RunID + The ID of the run to be terminated. + + */ RunID string timeout time.Duration diff --git a/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go b/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go index f97c8bae979..88bc3e2bf7b 100644 --- a/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go +++ b/backend/api/go_http_client/run_client/run_service/unarchive_run_parameters.go @@ -75,7 +75,10 @@ for the unarchive run operation typically these are written to a http.Request */ type UnarchiveRunParams struct { - /*ID*/ + /*ID + The ID of the run to be restored. + + */ ID string timeout time.Duration diff --git a/backend/api/go_http_client/run_model/api_list_runs_response.go b/backend/api/go_http_client/run_model/api_list_runs_response.go index 7b929b8d8e0..a122cbf5f43 100644 --- a/backend/api/go_http_client/run_model/api_list_runs_response.go +++ b/backend/api/go_http_client/run_model/api_list_runs_response.go @@ -32,13 +32,13 @@ import ( // swagger:model apiListRunsResponse type APIListRunsResponse struct { - // next page token + // The token to list the next page of runs. NextPageToken string `json:"next_page_token,omitempty"` // runs Runs []*APIRun `json:"runs"` - // total size + // The total number of runs for the given query. TotalSize int32 `json:"total_size,omitempty"` } diff --git a/backend/api/go_http_client/run_model/api_run.go b/backend/api/go_http_client/run_model/api_run.go index 069cbdb845c..cc6402ce8d4 100644 --- a/backend/api/go_http_client/run_model/api_run.go +++ b/backend/api/go_http_client/run_model/api_run.go @@ -65,6 +65,8 @@ type APIRun struct { PipelineSpec *APIPipelineSpec `json:"pipeline_spec,omitempty"` // Optional input field. Specify which resource this run belongs to. + // When creating a run from a particular pipeline version, the pipeline + // version can be specified here. ResourceReferences []*APIResourceReference `json:"resource_references"` // Output. When this run is scheduled to run. This could be different from @@ -81,7 +83,7 @@ type APIRun struct { // One of [Pending, Running, Succeeded, Skipped, Failed, Error] Status string `json:"status,omitempty"` - // storage state + // Output. Specify whether this run is in archived or available mode. StorageState RunStorageState `json:"storage_state,omitempty"` } diff --git a/backend/api/job.proto b/backend/api/job.proto index 7910740a1fa..1678c23c8d7 100644 --- a/backend/api/job.proto +++ b/backend/api/job.proto @@ -63,7 +63,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { service JobService { - //Create a new job. + // Creates a new job. rpc CreateJob(CreateJobRequest) returns (Job) { option (google.api.http) = { post: "/apis/v1beta1/jobs" @@ -71,35 +71,35 @@ service JobService { }; } - //Find a specific job by ID. + // Finds a specific job by ID. rpc GetJob(GetJobRequest) returns (Job) { option (google.api.http) = { get: "/apis/v1beta1/jobs/{id}" }; } - //Find all jobs. + // Finds all jobs. rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { option (google.api.http) = { get: "/apis/v1beta1/jobs" }; } - //Restarts a job that was previously stopped. All runs associated with the job will continue. + // Restarts a job that was previously stopped. All runs associated with the job will continue. rpc EnableJob(EnableJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/jobs/{id}/enable" }; } - //Stops a job and all its associated runs. The job is not deleted. + // Stops a job and all its associated runs. The job is not deleted. rpc DisableJob(DisableJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/jobs/{id}/disable" }; } - //Delete a job. + // Deletes a job. rpc DeleteJob(DeleteJobRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/jobs/{id}" @@ -118,7 +118,14 @@ message GetJobRequest { } message ListJobsRequest { + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListJobs call or can be omitted when fetching the first page. string page_token = 1; + + // The number of jobs to be listed per page. If there are more jobs than this + // number, the response message will contain a nextPageToken field you can use + // to fetch the next page. int32 page_size = 2; // Can be format of "field_name", "field_name asc" or "field_name des". @@ -139,7 +146,11 @@ message ListJobsRequest { message ListJobsResponse { // A list of jobs returned. repeated Job jobs = 1; + + // The total number of jobs for the given query. int32 total_size = 3; + + // The token to list the next page of jobs. string next_page_token = 2; } @@ -252,4 +263,4 @@ message Job { // If false, the job will catch up on each past interval. bool no_catchup = 17; } -// Next field number of Job will be 19 \ No newline at end of file +// Next field number of Job will be 19 diff --git a/backend/api/pipeline.proto b/backend/api/pipeline.proto index e00133202fb..33cd20963d1 100644 --- a/backend/api/pipeline.proto +++ b/backend/api/pipeline.proto @@ -61,7 +61,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { }; service PipelineService { - //Add a pipeline. + // Creates a pipeline. rpc CreatePipeline(CreatePipelineRequest) returns (Pipeline) { option (google.api.http) = { post: "/apis/v1beta1/pipelines" @@ -69,34 +69,35 @@ service PipelineService { }; } - //Find a specific pipeline by ID. + // Finds a specific pipeline by ID. rpc GetPipeline(GetPipelineRequest) returns (Pipeline) { option (google.api.http) = { get: "/apis/v1beta1/pipelines/{id}" }; } - //Find all pipelines. + // Finds all pipelines. rpc ListPipelines(ListPipelinesRequest) returns (ListPipelinesResponse) { option (google.api.http) = { get: "/apis/v1beta1/pipelines" }; } - //Delete a pipeline. + // Deletes a pipeline and its pipeline versions. rpc DeletePipeline(DeletePipelineRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/pipelines/{id}" }; } - //Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. + // Returns a single YAML template that contains the description, parameters, and metadata associated with the pipeline provided. rpc GetTemplate(GetTemplateRequest) returns (GetTemplateResponse) { option (google.api.http) = { get: "/apis/v1beta1/pipelines/{id}/templates" }; } + // Adds a pipeline version to the specified pipeline. rpc CreatePipelineVersion(CreatePipelineVersionRequest) returns (PipelineVersion) { option (google.api.http) = { @@ -105,12 +106,14 @@ service PipelineService { }; } + // Gets a pipeline version by pipeline version ID. rpc GetPipelineVersion(GetPipelineVersionRequest) returns (PipelineVersion) { option (google.api.http) = { get: "/apis/v1beta1/pipeline_versions/{version_id}" }; } + // Lists all pipeline versions of a given pipeline. rpc ListPipelineVersions(ListPipelineVersionsRequest) returns (ListPipelineVersionsResponse) { option (google.api.http) = { @@ -118,6 +121,12 @@ service PipelineService { }; } + // Deletes a pipeline version by pipeline version ID. If the deleted pipeline + // version is the default pipeline version, the pipeline's default version + // changes to the pipeline's most recent pipeline version. If there are no + // remaining pipeline versions, the pipeline will have no default version. + // Examines the run_service_api.ipynb notebook to learn more about creating a + // run using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb). rpc DeletePipelineVersion(DeletePipelineVersionRequest) returns (google.protobuf.Empty) { option (google.api.http) = { @@ -125,6 +134,7 @@ service PipelineService { }; } + // Returns a YAML template that contains the specified pipeline version's description, parameters and metadata. rpc GetPipelineVersionTemplate(GetPipelineVersionTemplateRequest) returns (GetTemplateResponse) { option (google.api.http) = { get: "/apis/v1beta1/pipeline_versions/{version_id}/templates" @@ -133,6 +143,7 @@ service PipelineService { } message Url { + // URL of the pipeline definition or the pipeline version definition. string pipeline_url = 1; } @@ -144,12 +155,21 @@ message CreatePipelineRequest { } message GetPipelineRequest { + // The ID of the pipeline to be retrieved. string id = 1; } message ListPipelinesRequest { + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListPipelines call. string page_token = 1; + + // The number of pipelines to be listed per page. If there are more pipelines + // than this number, the response message will contain a valid value in the + // nextPageToken field. int32 page_size = 2; + // Can be format of "field_name", "field_name asc" or "field_name des" // Ascending by default. string sort_by = 3; @@ -162,23 +182,32 @@ message ListPipelinesRequest { message ListPipelinesResponse { repeated Pipeline pipelines = 1; + + // The total number of pipelines for the given query. int32 total_size = 3; + + // The token to list the next page of pipelines. string next_page_token = 2; } message DeletePipelineRequest { + // The ID of the pipeline to be deleted. string id = 1; } message GetTemplateRequest { + // The ID of the pipeline whose template is to be retrieved. string id = 1; } message GetTemplateResponse { + // The template of the pipeline specified in a GetTemplate request, or of a + // pipeline version specified in a GetPipelinesVersionTemplate request. string template = 1; } message GetPipelineVersionTemplateRequest { + // The ID of the pipeline version whose template is to be retrieved. string version_id = 1; } @@ -189,13 +218,22 @@ message CreatePipelineVersionRequest { } message GetPipelineVersionRequest { + // The ID of the pipeline version to be retrieved. string version_id = 1; } message ListPipelineVersionsRequest { // ResourceKey specifies the pipeline whose versions are to be listed. ResourceKey resource_key = 1; + + // The number of pipeline versions to be listed per page. If there are more + // pipeline versions than this number, the response message will contain a + // nextPageToken field you can use to fetch the next page. int32 page_size = 2; + + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListPipelineVersions call or can be omitted when fetching the first page. string page_token = 3; // Can be format of "field_name", "field_name asc" or "field_name des" @@ -208,11 +246,16 @@ message ListPipelineVersionsRequest { message ListPipelineVersionsResponse { repeated PipelineVersion versions = 1; + + // The token to list the next page of pipeline versions. string next_page_token = 2; + + // The total number of pipeline versions for the given query. int32 total_size = 3; } message DeletePipelineVersionRequest { + // The ID of the pipeline version to be deleted. string version_id = 1; } diff --git a/backend/api/run.proto b/backend/api/run.proto index 78698a3682e..44cfc35c265 100644 --- a/backend/api/run.proto +++ b/backend/api/run.proto @@ -61,7 +61,7 @@ option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = { service RunService { - //Create a new run. + // Creates a new run. rpc CreateRun(CreateRunRequest) returns (RunDetail) { option (google.api.http) = { post: "/apis/v1beta1/runs" @@ -69,35 +69,35 @@ service RunService { }; } - //Find a specific run by ID. + // Finds a specific run by ID. rpc GetRun(GetRunRequest) returns (RunDetail) { option (google.api.http) = { get: "/apis/v1beta1/runs/{run_id}" }; } - //Find all runs. + // Finds all runs. rpc ListRuns(ListRunsRequest) returns (ListRunsResponse) { option (google.api.http) = { get: "/apis/v1beta1/runs" }; } - //Archive a run. + // Archives a run. rpc ArchiveRun(ArchiveRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{id}:archive" }; } - //Restore an archived run. + // Restores an archived run. rpc UnarchiveRun(UnarchiveRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{id}:unarchive" }; } - //Delete a run. + // Deletes a run. rpc DeleteRun(DeleteRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/apis/v1beta1/runs/{id}" @@ -116,21 +116,21 @@ service RunService { }; } - //Find a run's artifact data. + // Finds a run's artifact data. rpc ReadArtifact(ReadArtifactRequest) returns (ReadArtifactResponse) { option (google.api.http) = { get: "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read" }; } - //Terminate an active run. + // Terminates an active run. rpc TerminateRun(TerminateRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}/terminate" }; } - //Re-initiate a failed or terminated run. + // Re-initiates a failed or terminated run. rpc RetryRun(RetryRunRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/apis/v1beta1/runs/{run_id}/retry" @@ -143,12 +143,21 @@ message CreateRunRequest { } message GetRunRequest { + // The ID of the run to be retrieved. string run_id = 1; } message ListRunsRequest { + // A page token to request the next page of results. The token is acquried + // from the nextPageToken field of the response from the previous + // ListRuns call or can be omitted when fetching the first page. string page_token = 1; + + // The number of runs to be listed per page. If there are more runs than this + // number, the response message will contain a nextPageToken field you can use + // to fetch the next page. int32 page_size = 2; + // Can be format of "field_name", "field_name asc" or "field_name des" // (Example, "name asc" or "id des"). Ascending by default. string sort_by = 3; @@ -165,28 +174,37 @@ message ListRunsRequest { } message TerminateRunRequest { + // The ID of the run to be terminated. string run_id = 1; } message RetryRunRequest { + // The ID of the run to be retried. string run_id = 1; } message ListRunsResponse { repeated Run runs = 1; + + // The total number of runs for the given query. int32 total_size = 3; + + // The token to list the next page of runs. string next_page_token = 2; } message ArchiveRunRequest { + // The ID of the run to be archived. string id = 1; } message UnarchiveRunRequest { + // The ID of the run to be restored. string id = 1; } message DeleteRunRequest { + // The ID of the run to be deleted. string id = 1; } @@ -203,6 +221,7 @@ message Run { STORAGESTATE_ARCHIVED = 1; } + // Output. Specify whether this run is in archived or available mode. StorageState storage_state = 10; // Optional input field. Describing the purpose of the run @@ -213,6 +232,8 @@ message Run { PipelineSpec pipeline_spec = 4; // Optional input field. Specify which resource this run belongs to. + // When creating a run from a particular pipeline version, the pipeline + // version can be specified here. repeated ResourceReference resource_references = 5; // Optional input field. Specify which Kubernetes service account this run uses. diff --git a/backend/api/swagger/experiment.swagger.json b/backend/api/swagger/experiment.swagger.json index 8c5976150a1..3da02768f32 100644 --- a/backend/api/swagger/experiment.swagger.json +++ b/backend/api/swagger/experiment.swagger.json @@ -17,7 +17,7 @@ "paths": { "/apis/v1beta1/experiments": { "get": { - "summary": "Find all experiments.", + "summary": "Finds all experiments. Supports pagination, and sorting on certain fields.", "operationId": "ListExperiment", "responses": { "200": { @@ -36,12 +36,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListExperiment call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of experiments to be listed per page. If there are more\nexperiments than this number, the response message will contain a\nnextPageToken field you can use to fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -90,7 +92,7 @@ ] }, "post": { - "summary": "Create a new experiment.", + "summary": "Creates a new experiment.", "operationId": "CreateExperiment", "responses": { "200": { @@ -124,7 +126,7 @@ }, "/apis/v1beta1/experiments/{id}": { "get": { - "summary": "Find a specific experiment by ID.", + "summary": "Finds a specific experiment by ID.", "operationId": "GetExperiment", "responses": { "200": { @@ -154,7 +156,7 @@ ] }, "delete": { - "summary": "Delete an experiment.", + "summary": "Deletes an experiment without deleting the experiment's runs and jobs. To\navoid unexpected behaviors, delete an experiment's runs and jobs before\ndeleting the experiment.", "operationId": "DeleteExperiment", "responses": { "200": { @@ -186,7 +188,7 @@ }, "/apis/v1beta1/experiments/{id}:archive": { "post": { - "summary": "Archive an experiment.", + "summary": "Archives an experiment and the experiment's runs and jobs.", "operationId": "ArchiveExperiment", "responses": { "200": { @@ -205,6 +207,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the experiment to be archived.", "in": "path", "required": true, "type": "string" @@ -217,7 +220,7 @@ }, "/apis/v1beta1/experiments/{id}:unarchive": { "post": { - "summary": "Restore an archived experiment.", + "summary": "Restores an archived experiment. The experiment's archived runs and jobs\nwill stay archived.", "operationId": "UnarchiveExperiment", "responses": { "200": { @@ -236,6 +239,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the experiment to be restored.", "in": "path", "required": true, "type": "string" @@ -285,7 +289,8 @@ "description": "Optional input field. Specify which resource this run belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." }, "storage_state": { - "$ref": "#/definitions/ExperimentStorageState" + "$ref": "#/definitions/ExperimentStorageState", + "description": "Output. Specifies whether this experiment is in archived or available state." } } }, diff --git a/backend/api/swagger/job.swagger.json b/backend/api/swagger/job.swagger.json index e71c7ffd941..3efa3a3e2e3 100644 --- a/backend/api/swagger/job.swagger.json +++ b/backend/api/swagger/job.swagger.json @@ -17,7 +17,7 @@ "paths": { "/apis/v1beta1/jobs": { "get": { - "summary": "Find all jobs.", + "summary": "Finds all jobs.", "operationId": "ListJobs", "responses": { "200": { @@ -36,12 +36,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListJobs call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of jobs to be listed per page. If there are more jobs than this\nnumber, the response message will contain a nextPageToken field you can use\nto fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -90,7 +92,7 @@ ] }, "post": { - "summary": "Create a new job.", + "summary": "Creates a new job.", "operationId": "CreateJob", "responses": { "200": { @@ -124,7 +126,7 @@ }, "/apis/v1beta1/jobs/{id}": { "get": { - "summary": "Find a specific job by ID.", + "summary": "Finds a specific job by ID.", "operationId": "GetJob", "responses": { "200": { @@ -154,7 +156,7 @@ ] }, "delete": { - "summary": "Delete a job.", + "summary": "Deletes a job.", "operationId": "DeleteJob", "responses": { "200": { @@ -364,10 +366,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of jobs for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of jobs." } } }, diff --git a/backend/api/swagger/kfp_api_single_file.swagger.json b/backend/api/swagger/kfp_api_single_file.swagger.json index 86f1df96fbf..38c1ed68b7c 100644 --- a/backend/api/swagger/kfp_api_single_file.swagger.json +++ b/backend/api/swagger/kfp_api_single_file.swagger.json @@ -2,7 +2,7 @@ "swagger": "2.0", "info": { "title": "Kubeflow Pipelines API", - "version": "0.1.38", + "version": "0.5.1", "description": "This file contains REST API specification for Kubeflow Pipelines. The file is autogenerated from the swagger definition." }, "schemes": [ @@ -18,7 +18,7 @@ "paths": { "/apis/v1beta1/runs": { "get": { - "summary": "Find all runs.", + "summary": "Finds all runs.", "operationId": "ListRuns", "responses": { "200": { @@ -37,12 +37,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListRuns call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of runs to be listed per page. If there are more runs than this\nnumber, the response message will contain a nextPageToken field you can use\nto fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -91,7 +93,7 @@ ] }, "post": { - "summary": "Create a new run.", + "summary": "Creates a new run.", "operationId": "CreateRun", "responses": { "200": { @@ -124,7 +126,7 @@ }, "/apis/v1beta1/runs/{id}": { "delete": { - "summary": "Delete a run.", + "summary": "Deletes a run.", "operationId": "DeleteRun", "responses": { "200": { @@ -143,6 +145,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be deleted.", "in": "path", "required": true, "type": "string" @@ -155,7 +158,7 @@ }, "/apis/v1beta1/runs/{id}:archive": { "post": { - "summary": "Archive a run.", + "summary": "Archives a run.", "operationId": "ArchiveRun", "responses": { "200": { @@ -174,6 +177,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be archived.", "in": "path", "required": true, "type": "string" @@ -186,7 +190,7 @@ }, "/apis/v1beta1/runs/{id}:unarchive": { "post": { - "summary": "Restore an archived run.", + "summary": "Restores an archived run.", "operationId": "UnarchiveRun", "responses": { "200": { @@ -205,6 +209,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be restored.", "in": "path", "required": true, "type": "string" @@ -217,7 +222,7 @@ }, "/apis/v1beta1/runs/{run_id}": { "get": { - "summary": "Find a specific run by ID.", + "summary": "Finds a specific run by ID.", "operationId": "GetRun", "responses": { "200": { @@ -236,6 +241,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be retrieved.", "in": "path", "required": true, "type": "string" @@ -248,7 +254,7 @@ }, "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read": { "get": { - "summary": "Find a run's artifact data.", + "summary": "Finds a run's artifact data.", "operationId": "ReadArtifact", "responses": { "200": { @@ -294,7 +300,7 @@ }, "/apis/v1beta1/runs/{run_id}/retry": { "post": { - "summary": "Re-initiate a failed or terminated run.", + "summary": "Re-initiates a failed or terminated run.", "operationId": "RetryRun", "responses": { "200": { @@ -313,6 +319,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be retried.", "in": "path", "required": true, "type": "string" @@ -325,7 +332,7 @@ }, "/apis/v1beta1/runs/{run_id}/terminate": { "post": { - "summary": "Terminate an active run.", + "summary": "Terminates an active run.", "operationId": "TerminateRun", "responses": { "200": { @@ -344,6 +351,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be terminated.", "in": "path", "required": true, "type": "string" @@ -396,7 +404,7 @@ }, "/apis/v1beta1/jobs": { "get": { - "summary": "Find all jobs.", + "summary": "Finds all jobs.", "operationId": "ListJobs", "responses": { "200": { @@ -415,12 +423,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListJobs call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of jobs to be listed per page. If there are more jobs than this\nnumber, the response message will contain a nextPageToken field you can use\nto fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -469,7 +479,7 @@ ] }, "post": { - "summary": "Create a new job.", + "summary": "Creates a new job.", "operationId": "CreateJob", "responses": { "200": { @@ -503,7 +513,7 @@ }, "/apis/v1beta1/jobs/{id}": { "get": { - "summary": "Find a specific job by ID.", + "summary": "Finds a specific job by ID.", "operationId": "GetJob", "responses": { "200": { @@ -533,7 +543,7 @@ ] }, "delete": { - "summary": "Delete a job.", + "summary": "Deletes a job.", "operationId": "DeleteJob", "responses": { "200": { @@ -629,6 +639,7 @@ }, "/apis/v1beta1/pipeline_versions": { "get": { + "summary": "Lists all pipeline versions of a given pipeline.", "operationId": "ListPipelineVersions", "responses": { "200": { @@ -670,6 +681,7 @@ }, { "name": "page_size", + "description": "The number of pipeline versions to be listed per page. If there are more\npipeline versions than this number, the response message will contain a\nnextPageToken field you can use to fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -677,6 +689,7 @@ }, { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListPipelineVersions call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" @@ -701,6 +714,7 @@ ] }, "post": { + "summary": "Adds a pipeline version to the specified pipeline.", "operationId": "CreatePipelineVersion", "responses": { "200": { @@ -734,6 +748,7 @@ }, "/apis/v1beta1/pipeline_versions/{version_id}": { "get": { + "summary": "Gets a pipeline version by pipeline version ID.", "operationId": "GetPipelineVersion", "responses": { "200": { @@ -752,6 +767,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version to be retrieved.", "in": "path", "required": true, "type": "string" @@ -762,6 +778,7 @@ ] }, "delete": { + "summary": "Deletes a pipeline version by pipeline version ID. If the deleted pipeline\nversion is the default pipeline version, the pipeline's default version\nchanges to the pipeline's most recent pipeline version. If there are no\nremaining pipeline versions, the pipeline will have no default version.\nExamines the run_service_api.ipynb notebook to learn more about creating a\nrun using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb).", "operationId": "DeletePipelineVersion", "responses": { "200": { @@ -780,6 +797,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version to be deleted.", "in": "path", "required": true, "type": "string" @@ -792,6 +810,7 @@ }, "/apis/v1beta1/pipeline_versions/{version_id}/templates": { "get": { + "summary": "Returns a YAML template that contains the specified pipeline version's description, parameters and metadata.", "operationId": "GetPipelineVersionTemplate", "responses": { "200": { @@ -810,6 +829,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version whose template is to be retrieved.", "in": "path", "required": true, "type": "string" @@ -822,7 +842,7 @@ }, "/apis/v1beta1/pipelines": { "get": { - "summary": "Find all pipelines.", + "summary": "Finds all pipelines.", "operationId": "ListPipelines", "responses": { "200": { @@ -841,12 +861,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListPipelines call.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of pipelines to be listed per page. If there are more pipelines\nthan this number, the response message will contain a valid value in the\nnextPageToken field.", "in": "query", "required": false, "type": "integer", @@ -872,7 +894,7 @@ ] }, "post": { - "summary": "Add a pipeline.", + "summary": "Creates a pipeline.", "operationId": "CreatePipeline", "responses": { "200": { @@ -905,7 +927,7 @@ }, "/apis/v1beta1/pipelines/{id}": { "get": { - "summary": "Find a specific pipeline by ID.", + "summary": "Finds a specific pipeline by ID.", "operationId": "GetPipeline", "responses": { "200": { @@ -924,6 +946,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline to be retrieved.", "in": "path", "required": true, "type": "string" @@ -934,7 +957,7 @@ ] }, "delete": { - "summary": "Delete a pipeline.", + "summary": "Deletes a pipeline and its pipeline versions.", "operationId": "DeletePipeline", "responses": { "200": { @@ -953,6 +976,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline to be deleted.", "in": "path", "required": true, "type": "string" @@ -984,6 +1008,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline whose template is to be retrieved.", "in": "path", "required": true, "type": "string" @@ -996,7 +1021,7 @@ }, "/apis/v1beta1/experiments": { "get": { - "summary": "Find all experiments.", + "summary": "Finds all experiments. Supports pagination, and sorting on certain fields.", "operationId": "ListExperiment", "responses": { "200": { @@ -1015,12 +1040,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListExperiment call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of experiments to be listed per page. If there are more\nexperiments than this number, the response message will contain a\nnextPageToken field you can use to fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -1069,7 +1096,7 @@ ] }, "post": { - "summary": "Create a new experiment.", + "summary": "Creates a new experiment.", "operationId": "CreateExperiment", "responses": { "200": { @@ -1103,7 +1130,7 @@ }, "/apis/v1beta1/experiments/{id}": { "get": { - "summary": "Find a specific experiment by ID.", + "summary": "Finds a specific experiment by ID.", "operationId": "GetExperiment", "responses": { "200": { @@ -1133,7 +1160,7 @@ ] }, "delete": { - "summary": "Delete an experiment.", + "summary": "Deletes an experiment without deleting the experiment's runs and jobs. To\navoid unexpected behaviors, delete an experiment's runs and jobs before\ndeleting the experiment.", "operationId": "DeleteExperiment", "responses": { "200": { @@ -1165,7 +1192,7 @@ }, "/apis/v1beta1/experiments/{id}:archive": { "post": { - "summary": "Archive an experiment.", + "summary": "Archives an experiment and the experiment's runs and jobs.", "operationId": "ArchiveExperiment", "responses": { "200": { @@ -1184,6 +1211,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the experiment to be archived.", "in": "path", "required": true, "type": "string" @@ -1196,7 +1224,7 @@ }, "/apis/v1beta1/experiments/{id}:unarchive": { "post": { - "summary": "Restore an archived experiment.", + "summary": "Restores an archived experiment. The experiment's archived runs and jobs\nwill stay archived.", "operationId": "UnarchiveExperiment", "responses": { "200": { @@ -1215,6 +1243,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the experiment to be restored.", "in": "path", "required": true, "type": "string" @@ -1387,10 +1416,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of runs for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of runs." } } }, @@ -1545,7 +1576,8 @@ "description": "Required input field. Name provided by user,\nor auto generated if run is created by scheduled job. Not unique." }, "storage_state": { - "$ref": "#/definitions/RunStorageState" + "$ref": "#/definitions/RunStorageState", + "description": "Output. Specify whether this run is in archived or available mode." }, "description": { "type": "string", @@ -1560,7 +1592,7 @@ "items": { "$ref": "#/definitions/apiResourceReference" }, - "description": "Optional input field. Specify which resource this run belongs to." + "description": "Optional input field. Specify which resource this run belongs to.\nWhen creating a run from a particular pipeline version, the pipeline\nversion can be specified here." }, "service_account": { "type": "string", @@ -1778,10 +1810,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of jobs for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of jobs." } } }, @@ -1822,7 +1856,8 @@ "type": "object", "properties": { "template": { - "type": "string" + "type": "string", + "description": "The template of the pipeline specified in a GetTemplate request, or of a\npipeline version specified in a GetPipelinesVersionTemplate request." } } }, @@ -1836,11 +1871,13 @@ } }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of pipeline versions." }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of pipeline versions for the given query." } } }, @@ -1855,10 +1892,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of pipelines for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of pipelines." } } }, @@ -1948,7 +1987,8 @@ "type": "object", "properties": { "pipeline_url": { - "type": "string" + "type": "string", + "description": "URL of the pipeline definition or the pipeline version definition." } } }, @@ -1989,7 +2029,8 @@ "description": "Optional input field. Specify which resource this run belongs to.\nFor Experiment, the only valid resource reference is a single Namespace." }, "storage_state": { - "$ref": "#/definitions/ExperimentStorageState" + "$ref": "#/definitions/ExperimentStorageState", + "description": "Output. Specifies whether this experiment is in archived or available state." } } }, diff --git a/backend/api/swagger/pipeline.swagger.json b/backend/api/swagger/pipeline.swagger.json index f83c5f06def..9aa7f7e0d29 100644 --- a/backend/api/swagger/pipeline.swagger.json +++ b/backend/api/swagger/pipeline.swagger.json @@ -17,6 +17,7 @@ "paths": { "/apis/v1beta1/pipeline_versions": { "get": { + "summary": "Lists all pipeline versions of a given pipeline.", "operationId": "ListPipelineVersions", "responses": { "200": { @@ -58,6 +59,7 @@ }, { "name": "page_size", + "description": "The number of pipeline versions to be listed per page. If there are more\npipeline versions than this number, the response message will contain a\nnextPageToken field you can use to fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -65,6 +67,7 @@ }, { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListPipelineVersions call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" @@ -89,6 +92,7 @@ ] }, "post": { + "summary": "Adds a pipeline version to the specified pipeline.", "operationId": "CreatePipelineVersion", "responses": { "200": { @@ -122,6 +126,7 @@ }, "/apis/v1beta1/pipeline_versions/{version_id}": { "get": { + "summary": "Gets a pipeline version by pipeline version ID.", "operationId": "GetPipelineVersion", "responses": { "200": { @@ -140,6 +145,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version to be retrieved.", "in": "path", "required": true, "type": "string" @@ -150,6 +156,7 @@ ] }, "delete": { + "summary": "Deletes a pipeline version by pipeline version ID. If the deleted pipeline\nversion is the default pipeline version, the pipeline's default version\nchanges to the pipeline's most recent pipeline version. If there are no\nremaining pipeline versions, the pipeline will have no default version.\nExamines the run_service_api.ipynb notebook to learn more about creating a\nrun using a pipeline version (https://github.com/kubeflow/pipelines/blob/master/tools/benchmarks/run_service_api.ipynb).", "operationId": "DeletePipelineVersion", "responses": { "200": { @@ -168,6 +175,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version to be deleted.", "in": "path", "required": true, "type": "string" @@ -180,6 +188,7 @@ }, "/apis/v1beta1/pipeline_versions/{version_id}/templates": { "get": { + "summary": "Returns a YAML template that contains the specified pipeline version's description, parameters and metadata.", "operationId": "GetPipelineVersionTemplate", "responses": { "200": { @@ -198,6 +207,7 @@ "parameters": [ { "name": "version_id", + "description": "The ID of the pipeline version whose template is to be retrieved.", "in": "path", "required": true, "type": "string" @@ -210,7 +220,7 @@ }, "/apis/v1beta1/pipelines": { "get": { - "summary": "Find all pipelines.", + "summary": "Finds all pipelines.", "operationId": "ListPipelines", "responses": { "200": { @@ -229,12 +239,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListPipelines call.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of pipelines to be listed per page. If there are more pipelines\nthan this number, the response message will contain a valid value in the\nnextPageToken field.", "in": "query", "required": false, "type": "integer", @@ -260,7 +272,7 @@ ] }, "post": { - "summary": "Add a pipeline.", + "summary": "Creates a pipeline.", "operationId": "CreatePipeline", "responses": { "200": { @@ -293,7 +305,7 @@ }, "/apis/v1beta1/pipelines/{id}": { "get": { - "summary": "Find a specific pipeline by ID.", + "summary": "Finds a specific pipeline by ID.", "operationId": "GetPipeline", "responses": { "200": { @@ -312,6 +324,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline to be retrieved.", "in": "path", "required": true, "type": "string" @@ -322,7 +335,7 @@ ] }, "delete": { - "summary": "Delete a pipeline.", + "summary": "Deletes a pipeline and its pipeline versions.", "operationId": "DeletePipeline", "responses": { "200": { @@ -341,6 +354,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline to be deleted.", "in": "path", "required": true, "type": "string" @@ -372,6 +386,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the pipeline whose template is to be retrieved.", "in": "path", "required": true, "type": "string" @@ -388,7 +403,8 @@ "type": "object", "properties": { "template": { - "type": "string" + "type": "string", + "description": "The template of the pipeline specified in a GetTemplate request, or of a\npipeline version specified in a GetPipelinesVersionTemplate request." } } }, @@ -402,11 +418,13 @@ } }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of pipeline versions." }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of pipeline versions for the given query." } } }, @@ -421,10 +439,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of pipelines for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of pipelines." } } }, @@ -593,7 +613,8 @@ "type": "object", "properties": { "pipeline_url": { - "type": "string" + "type": "string", + "description": "URL of the pipeline definition or the pipeline version definition." } } }, diff --git a/backend/api/swagger/run.swagger.json b/backend/api/swagger/run.swagger.json index f05ed108ea3..6121b43f68e 100644 --- a/backend/api/swagger/run.swagger.json +++ b/backend/api/swagger/run.swagger.json @@ -17,7 +17,7 @@ "paths": { "/apis/v1beta1/runs": { "get": { - "summary": "Find all runs.", + "summary": "Finds all runs.", "operationId": "ListRuns", "responses": { "200": { @@ -36,12 +36,14 @@ "parameters": [ { "name": "page_token", + "description": "A page token to request the next page of results. The token is acquried\nfrom the nextPageToken field of the response from the previous\nListRuns call or can be omitted when fetching the first page.", "in": "query", "required": false, "type": "string" }, { "name": "page_size", + "description": "The number of runs to be listed per page. If there are more runs than this\nnumber, the response message will contain a nextPageToken field you can use\nto fetch the next page.", "in": "query", "required": false, "type": "integer", @@ -90,7 +92,7 @@ ] }, "post": { - "summary": "Create a new run.", + "summary": "Creates a new run.", "operationId": "CreateRun", "responses": { "200": { @@ -123,7 +125,7 @@ }, "/apis/v1beta1/runs/{id}": { "delete": { - "summary": "Delete a run.", + "summary": "Deletes a run.", "operationId": "DeleteRun", "responses": { "200": { @@ -142,6 +144,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be deleted.", "in": "path", "required": true, "type": "string" @@ -154,7 +157,7 @@ }, "/apis/v1beta1/runs/{id}:archive": { "post": { - "summary": "Archive a run.", + "summary": "Archives a run.", "operationId": "ArchiveRun", "responses": { "200": { @@ -173,6 +176,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be archived.", "in": "path", "required": true, "type": "string" @@ -185,7 +189,7 @@ }, "/apis/v1beta1/runs/{id}:unarchive": { "post": { - "summary": "Restore an archived run.", + "summary": "Restores an archived run.", "operationId": "UnarchiveRun", "responses": { "200": { @@ -204,6 +208,7 @@ "parameters": [ { "name": "id", + "description": "The ID of the run to be restored.", "in": "path", "required": true, "type": "string" @@ -216,7 +221,7 @@ }, "/apis/v1beta1/runs/{run_id}": { "get": { - "summary": "Find a specific run by ID.", + "summary": "Finds a specific run by ID.", "operationId": "GetRun", "responses": { "200": { @@ -235,6 +240,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be retrieved.", "in": "path", "required": true, "type": "string" @@ -247,7 +253,7 @@ }, "/apis/v1beta1/runs/{run_id}/nodes/{node_id}/artifacts/{artifact_name}:read": { "get": { - "summary": "Find a run's artifact data.", + "summary": "Finds a run's artifact data.", "operationId": "ReadArtifact", "responses": { "200": { @@ -293,7 +299,7 @@ }, "/apis/v1beta1/runs/{run_id}/retry": { "post": { - "summary": "Re-initiate a failed or terminated run.", + "summary": "Re-initiates a failed or terminated run.", "operationId": "RetryRun", "responses": { "200": { @@ -312,6 +318,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be retried.", "in": "path", "required": true, "type": "string" @@ -324,7 +331,7 @@ }, "/apis/v1beta1/runs/{run_id}/terminate": { "post": { - "summary": "Terminate an active run.", + "summary": "Terminates an active run.", "operationId": "TerminateRun", "responses": { "200": { @@ -343,6 +350,7 @@ "parameters": [ { "name": "run_id", + "description": "The ID of the run to be terminated.", "in": "path", "required": true, "type": "string" @@ -457,10 +465,12 @@ }, "total_size": { "type": "integer", - "format": "int32" + "format": "int32", + "description": "The total number of runs for the given query." }, "next_page_token": { - "type": "string" + "type": "string", + "description": "The token to list the next page of runs." } } }, @@ -615,7 +625,8 @@ "description": "Required input field. Name provided by user,\nor auto generated if run is created by scheduled job. Not unique." }, "storage_state": { - "$ref": "#/definitions/RunStorageState" + "$ref": "#/definitions/RunStorageState", + "description": "Output. Specify whether this run is in archived or available mode." }, "description": { "type": "string", @@ -630,7 +641,7 @@ "items": { "$ref": "#/definitions/apiResourceReference" }, - "description": "Optional input field. Specify which resource this run belongs to." + "description": "Optional input field. Specify which resource this run belongs to.\nWhen creating a run from a particular pipeline version, the pipeline\nversion can be specified here." }, "service_account": { "type": "string",