Skip to content

Commit

Permalink
use click for dsl-compile command
Browse files Browse the repository at this point in the history
  • Loading branch information
connor-mccarthy committed Apr 13, 2022
1 parent 5aaf6a4 commit c30d994
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 77 deletions.
4 changes: 2 additions & 2 deletions samples/test/utils/kfp/samples/test/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,8 +396,8 @@ def run_v2_pipeline(
if file.endswith(".ipynb"):
pyfile = tempfile.mktemp(suffix='.py', prefix="pipeline_py_code")
_nb_sample_to_py(file, pyfile)
from kfp.compiler.main import compile_pyfile
compile_pyfile(pyfile=pyfile, package_path=original_pipeline_spec)
from kfp.cli.compile import dsl_compile
dsl_compile(py=pyfile, output=original_pipeline_spec)

# remove following overriding logic once we use create_run_from_job_spec to trigger kfp pipeline run
with open(original_pipeline_spec) as f:
Expand Down
117 changes: 52 additions & 65 deletions sdk/python/kfp/compiler/main.py → sdk/python/kfp/cli/compile.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2020 The Kubeflow Authors
# Copyright 2020-2022 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -12,50 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""KFP SDK compiler CLI tool."""

import argparse
import json
import logging
import os
import sys
from typing import Any, Callable, List, Mapping, Optional

import click
from kfp import compiler
from kfp.components import pipeline_context


def parse_arguments() -> argparse.Namespace:
"""Parse command line arguments."""

parser = argparse.ArgumentParser()
parser.add_argument(
'--py',
type=str,
required=True,
help='local absolute path to a py file.')
parser.add_argument(
'--function',
type=str,
help='The name of the function to compile if there are multiple.')
parser.add_argument(
'--pipeline-parameters',
type=json.loads,
help='The pipeline parameters in JSON dict format.')
parser.add_argument(
'--namespace', type=str, help='The namespace for the pipeline function')
parser.add_argument(
'--output',
type=str,
required=True,
help='local path to the output PipelineJob json file.')
parser.add_argument(
'--disable-type-check',
action='store_true',
help='disable the type check, default is enabled.')

args = parser.parse_args()
return args


def _compile_pipeline_function(
pipeline_funcs: List[Callable],
function_name: Optional[str],
Expand Down Expand Up @@ -119,46 +86,66 @@ def __exit__(self, *args):
pipeline_context.pipeline_decorator_handler = self.old_handler


def compile_pyfile(
pyfile: str,
package_path: str,
@click.command()
@click.option(
'--py', type=str, required=True, help='Local absolute path to a py file.')
@click.option(
'--output',
type=str,
required=True,
help='Local path to the output PipelineJob JSON file.')
@click.option(
'--function',
'function_name',
type=str,
default=None,
help='The name of the function to compile if there are multiple.')
@click.option(
'--pipeline-parameters',
type=str,
default=None,
help='The pipeline parameters in JSON dict format.')
@click.option(
'--disable-type-check',
is_flag=True,
default=False,
help='Disable the type check. Default: type check is not disabled.')
def dsl_compile(
py: str,
output: str,
function_name: Optional[str] = None,
pipeline_parameters: Optional[Mapping[str, Any]] = None,
type_check: bool = True,
pipeline_parameters: str = None,
disable_type_check: bool = True,
) -> None:
"""Compiles a pipeline written in a .py file.
Args:
pyfile: The path to the .py file that contains the pipeline definition.
function_name: The name of the pipeline function.
pipeline_parameters: The pipeline parameters as a dict of {name: value}.
package_path: The output path of the compiled result.
type_check: Whether to enable the type checking.
"""
sys.path.insert(0, os.path.dirname(pyfile))
"""Compiles a pipeline written in a .py file."""
sys.path.insert(0, os.path.dirname(py))
try:
filename = os.path.basename(pyfile)
filename = os.path.basename(py)
with PipelineCollectorContext() as pipeline_funcs:
__import__(os.path.splitext(filename)[0])
try:
parsed_parameters = json.loads(
pipeline_parameters) if pipeline_parameters is not None else {}
except json.JSONDecodeError as e:
logging.error(
f"Failed to parse --pipeline-parameters argument: {pipeline_parameters}"
)
raise e
_compile_pipeline_function(
pipeline_funcs=pipeline_funcs,
function_name=function_name,
pipeline_parameters=pipeline_parameters,
package_path=package_path,
type_check=type_check,
pipeline_parameters=parsed_parameters,
package_path=output,
type_check=not disable_type_check,
)
finally:
del sys.path[0]


def main():
args = parse_arguments()
if args.py is None:
raise ValueError('The --py option must be specified.')
compile_pyfile(
pyfile=args.py,
function_name=args.function,
pipeline_parameters=args.pipeline_parameters,
package_path=args.output,
type_check=not args.disable_type_check,
)
logging.basicConfig(format='%(message)s', level=logging.INFO)
try:
dsl_compile(obj={}, auto_envvar_prefix='KFP')
except Exception as e:
click.echo(str(e), err=True)
sys.exit(1)
25 changes: 16 additions & 9 deletions sdk/python/kfp/compiler_cli_tests/compiler_cli_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,29 +54,31 @@ def _test_compile_py_to_yaml(
):
test_data_dir = os.path.join(os.path.dirname(__file__), 'test_data')
py_file = os.path.join(test_data_dir, '{}.py'.format(file_base_name))
tmpdir = tempfile.mkdtemp()
golden_compiled_file = os.path.join(test_data_dir,
file_base_name + '.yaml')

if additional_arguments is None:
additional_arguments = []

def _compile(target_output_file: str):
subprocess.check_call([
'dsl-compile', '--py', py_file, '--output',
target_output_file
] + additional_arguments)
try:
subprocess.check_output(
['dsl-compile', '--py', py_file, '--output', compiled_file]
+ additional_arguments,
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as exc:
raise Exception(exc.output) from exc

def _load_compiled_file(filename: str):
with open(filename, 'r') as f:
contents = yaml.load(f)
contents = yaml.safe_load(f)
# Correct the sdkVersion
pipeline_spec = contents[
'pipelineSpec'] if 'pipelineSpec' in contents else contents
del pipeline_spec['sdkVersion']
return _ignore_kfp_version_helper(contents)

try:
with tempfile.TemporaryDirectory() as tmpdir:
compiled_file = os.path.join(tmpdir,
file_base_name + '-pipeline.yaml')
_compile(target_output_file=compiled_file)
Expand All @@ -94,14 +96,19 @@ def _load_compiled_file(filename: str):
golden = _load_compiled_file(golden_compiled_file)

self.assertEqual(golden, compiled)
finally:
shutil.rmtree(tmpdir)

def test_two_step_pipeline(self):
self._test_compile_py_to_yaml(
'two_step_pipeline',
['--pipeline-parameters', '{"text":"Hello KFP!"}'])

def test_two_step_pipeline_failure_parameter_parse(self):
with self.assertRaisesRegex(
Exception, r"Failed to parse --pipeline-parameters argument:"):
self._test_compile_py_to_yaml(
'two_step_pipeline',
['--pipeline-parameters', '{"text":"Hello KFP!}'])

def test_pipeline_with_importer(self):
self._test_compile_py_to_yaml('pipeline_with_importer')

Expand Down
2 changes: 1 addition & 1 deletion sdk/python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def find_version(*file_path_parts: str) -> str:
include_package_data=True,
entry_points={
'console_scripts': [
'dsl-compile = kfp.compiler.main:main',
'dsl-compile = kfp.cli.compile:main',
'dsl-compile-deprecated = kfp.deprecated.compiler.main:main',
'kfp=kfp.cli.__main__:main',
]
Expand Down

0 comments on commit c30d994

Please sign in to comment.