diff --git a/CHANGELOG.D/506.feature b/CHANGELOG.D/506.feature new file mode 100644 index 00000000..b356ff77 --- /dev/null +++ b/CHANGELOG.D/506.feature @@ -0,0 +1,2 @@ +Added new sections `defaults`, `images`, `volumes` to the `project.yml` file. The work the same as the do +in `live`/`batch` except they are global -- everything defined in `project.yml` applies to all workflows. diff --git a/neuro_flow/ast.py b/neuro_flow/ast.py index f892d310..9cd55806 100644 --- a/neuro_flow/ast.py +++ b/neuro_flow/ast.py @@ -61,6 +61,10 @@ class Project(Base): owner: SimpleOptStrExpr # user name can contain "-" role: SimpleOptStrExpr + images: Optional[Mapping[str, "Image"]] = field(metadata={"allow_none": True}) + volumes: Optional[Mapping[str, "Volume"]] = field(metadata={"allow_none": True}) + defaults: Optional["BatchFlowDefaults"] = field(metadata={"allow_none": True}) + # There are 'batch' for pipelined mode and 'live' for interactive one # (while 'batches' are technically just non-interactive jobs. @@ -265,7 +269,7 @@ class TaskModuleCall(BaseModuleCall, TaskBase): @dataclass(frozen=True) -class FlowDefaults(Base): +class FlowDefaults(WithSpecifiedFields, Base): tags: Optional[BaseExpr[SequenceT]] = field(metadata={"allow_none": True}) env: Optional[BaseExpr[MappingT]] = field(metadata={"allow_none": True}) diff --git a/neuro_flow/config_loader.py b/neuro_flow/config_loader.py index 47c94ef1..b001c0b1 100644 --- a/neuro_flow/config_loader.py +++ b/neuro_flow/config_loader.py @@ -2,6 +2,7 @@ import abc import aiohttp +import logging import secrets import sys import tarfile @@ -45,6 +46,9 @@ from async_generator import asynccontextmanager +log = logging.getLogger(__name__) + + @dataclasses.dataclass(frozen=True) class ActionSpec: scheme: str @@ -203,13 +207,20 @@ def workspace(self) -> LocalPath: @asynccontextmanager async def project_stream(self) -> AsyncIterator[Optional[TextIO]]: - for ext in (".yml", ".yaml"): - path = self._workspace / "project" - path = path.with_suffix(ext) - if path.exists(): - with path.open() as f: - yield f - return + for dir in (self._config_dir, self._workspace): + for ext in (".yml", ".yaml"): + path = dir / "project" + path = path.with_suffix(ext) + if path.exists(): + with path.open() as f: + if dir == self._workspace: + log.warning( + f"Using project yaml file from workspace instead" + f" of config directory {self._config_dir}. Please move " + "it there, reading from workspace will be removed soon." + ) + yield f + return yield None def flow_path(self, name: str) -> LocalPath: diff --git a/neuro_flow/context.py b/neuro_flow/context.py index 841bcd68..e6387a5f 100644 --- a/neuro_flow/context.py +++ b/neuro_flow/context.py @@ -713,7 +713,12 @@ async def setup_batch_flow_ctx( async def setup_defaults_env_tags_ctx( ctx: WithFlowContext, ast_defaults: Optional[ast.FlowDefaults], + ast_global_defaults: Optional[ast.FlowDefaults], ) -> Tuple[DefaultsConf, EnvCtx, TagsCtx]: + if ast_defaults is not None and ast_global_defaults is not None: + ast_defaults = await merge_asts(ast_defaults, ast_global_defaults) + elif ast_global_defaults: + ast_defaults = ast_global_defaults env: EnvCtx tags: TagsCtx volumes: List[str] @@ -1028,7 +1033,12 @@ async def setup_params_ctx( async def setup_strategy_ctx( ctx: RootABC, ast_defaults: Optional[ast.BatchFlowDefaults], + ast_global_defaults: Optional[ast.BatchFlowDefaults], ) -> StrategyCtx: + if ast_defaults is not None and ast_global_defaults is not None: + ast_defaults = await merge_asts(ast_defaults, ast_global_defaults) + elif ast_global_defaults: + ast_defaults = ast_global_defaults if ast_defaults is None: return StrategyCtx() fail_fast = await ast_defaults.fail_fast.eval(ctx) @@ -1127,12 +1137,45 @@ def check_module_call_is_local(action_name: str, call_ast: ast.BaseModuleCall) - ) -class MixinProtocol(Protocol): +class SupportsAstMerge(Protocol): @property def _specified_fields(self) -> AbstractSet[str]: ... +_MergeTarget = TypeVar("_MergeTarget", bound=SupportsAstMerge) + + +async def merge_asts(child: _MergeTarget, parent: SupportsAstMerge) -> _MergeTarget: + child_fields = {f.name for f in dataclasses.fields(child)} + for field in parent._specified_fields: + if field == "inherits" or field not in child_fields: + continue + field_present = field in child._specified_fields + child_value = getattr(child, field) + parent_value = getattr(parent, field) + merge_supported = isinstance(parent_value, BaseSequenceExpr) or isinstance( + parent_value, BaseMappingExpr + ) + if not field_present or (child_value is None and merge_supported): + child = replace( + child, + **{field: parent_value}, + _specified_fields=child._specified_fields | {field}, + ) + elif isinstance(parent_value, BaseSequenceExpr): + assert isinstance(child_value, BaseSequenceExpr) + child = replace( + child, **{field: ConcatSequenceExpr(child_value, parent_value)} + ) + elif isinstance(parent_value, BaseMappingExpr): + assert isinstance(child_value, BaseMappingExpr) + child = replace( + child, **{field: MergeMappingsExpr(child_value, parent_value)} + ) + return child + + class MixinApplyTarget(Protocol): @property def inherits(self) -> Optional[Sequence[StrExpr]]: @@ -1147,7 +1190,7 @@ def _specified_fields(self) -> AbstractSet[str]: async def apply_mixins( - base: _MixinApplyTarget, mixins: Mapping[str, MixinProtocol] + base: _MixinApplyTarget, mixins: Mapping[str, SupportsAstMerge] ) -> _MixinApplyTarget: if base.inherits is None: return base @@ -1161,31 +1204,7 @@ async def apply_mixins( start=mixin_expr.start, end=mixin_expr.end, ) - for field in mixin._specified_fields: - if field == "inherits": - continue # Do not inherit 'inherits' field - field_present = field in base._specified_fields - base_value = getattr(base, field) - mixin_value = getattr(mixin, field) - merge_supported = isinstance(mixin_value, BaseSequenceExpr) or isinstance( - mixin_value, BaseMappingExpr - ) - if not field_present or (base_value is None and merge_supported): - base = replace( - base, - **{field: mixin_value}, - _specified_fields=base._specified_fields | {field}, - ) - elif isinstance(mixin_value, BaseSequenceExpr): - assert isinstance(base_value, BaseSequenceExpr) - base = replace( - base, **{field: ConcatSequenceExpr(base_value, mixin_value)} - ) - elif isinstance(mixin_value, BaseMappingExpr): - assert isinstance(base_value, BaseMappingExpr) - base = replace( - base, **{field: MergeMappingsExpr(base_value, mixin_value)} - ) + base = await merge_asts(base, mixin) return base @@ -1438,6 +1457,7 @@ async def create( cls, config_loader: ConfigLoader, config_name: str = "live" ) -> "RunningLiveFlow": ast_flow = await config_loader.fetch_flow(config_name) + ast_project = await config_loader.fetch_project() assert isinstance(ast_flow, ast.LiveFlow) @@ -1451,14 +1471,24 @@ async def create( ) defaults, env, tags = await setup_defaults_env_tags_ctx( - step_1_ctx, ast_flow.defaults + step_1_ctx, ast_flow.defaults, ast_project.defaults ) + volumes = { + **(await setup_volumes_ctx(step_1_ctx, ast_project.volumes)), + **(await setup_volumes_ctx(step_1_ctx, ast_flow.volumes)), + } + + images = { + **(await setup_images_ctx(step_1_ctx, step_1_ctx, ast_project.images)), + **(await setup_images_ctx(step_1_ctx, step_1_ctx, ast_flow.images)), + } + live_ctx = step_1_ctx.to_live_ctx( env=env, tags=tags, - volumes=await setup_volumes_ctx(step_1_ctx, ast_flow.volumes), - images=await setup_images_ctx(step_1_ctx, step_1_ctx, ast_flow.images), + volumes=volumes, + images=images, ) return cls(ast_flow, live_ctx, config_loader, defaults) @@ -1963,6 +1993,7 @@ async def create( local_info: Optional[LocallyPreparedInfo] = None, ) -> "RunningBatchFlow": ast_flow = await config_loader.fetch_flow(batch) + ast_project = await config_loader.fetch_project() assert isinstance(ast_flow, ast.BatchFlow) @@ -1979,35 +2010,66 @@ async def create( _client=config_loader.client, ) if local_info is None: - early_images = await setup_images_early( - step_1_ctx, step_1_ctx, ast_flow.images - ) + early_images: Mapping[str, EarlyImageCtx] = { + **( + await setup_images_early(step_1_ctx, step_1_ctx, ast_project.images) + ), + **(await setup_images_early(step_1_ctx, step_1_ctx, ast_flow.images)), + } else: early_images = local_info.early_images defaults, env, tags = await setup_defaults_env_tags_ctx( - step_1_ctx, ast_flow.defaults + step_1_ctx, ast_flow.defaults, ast_project.defaults ) + volumes = { + **(await setup_volumes_ctx(step_1_ctx, ast_project.volumes)), + **(await setup_volumes_ctx(step_1_ctx, ast_flow.volumes)), + } + + images = { + **( + await setup_images_ctx( + step_1_ctx, step_1_ctx, ast_project.images, early_images + ) + ), + **( + await setup_images_ctx( + step_1_ctx, step_1_ctx, ast_flow.images, early_images + ) + ), + } + step_2_ctx = step_1_ctx.to_step_2( env=env, tags=tags, - volumes=await setup_volumes_ctx(step_1_ctx, ast_flow.volumes), - images=await setup_images_ctx( - step_1_ctx, step_1_ctx, ast_flow.images, early_images - ), + volumes=volumes, + images=images, ) + if ast_project.defaults: + base_cache = await setup_cache( + step_2_ctx, + CacheConf(), + ast_project.defaults.cache, + ast.CacheStrategy.INHERIT, + ) + else: + base_cache = CacheConf() + if ast_flow.defaults: ast_cache = ast_flow.defaults.cache else: ast_cache = None cache_conf = await setup_cache( - step_2_ctx, CacheConf(), ast_cache, ast.CacheStrategy.INHERIT + step_2_ctx, base_cache, ast_cache, ast.CacheStrategy.INHERIT ) batch_ctx = step_2_ctx.to_batch_ctx( - strategy=await setup_strategy_ctx(step_2_ctx, ast_flow.defaults), + strategy=await setup_strategy_ctx( + step_2_ctx, ast_flow.defaults, ast_project.defaults + ), ) mixins = await setup_mixins(ast_flow.mixins) diff --git a/neuro_flow/parser.py b/neuro_flow/parser.py index 71dc76c5..5b0e54d5 100644 --- a/neuro_flow/parser.py +++ b/neuro_flow/parser.py @@ -427,79 +427,6 @@ def parse_cache(ctor: BaseConstructor, node: yaml.MappingNode) -> ast.Cache: ) -# #### Project parser #### - - -class ProjectLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): - def __init__(self, stream: TextIO) -> None: - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - - -PROJECT = {"id": SimpleIdExpr, "owner": SimpleOptStrExpr, "role": SimpleOptStrExpr} - - -def parse_project_main(ctor: BaseConstructor, node: yaml.MappingNode) -> ast.Project: - ret = parse_dict( - ctor, - node, - PROJECT, - ast.Project, - ) - return ret - - -ProjectLoader.add_path_resolver("project:main", []) # type: ignore -ProjectLoader.add_constructor("project:main", parse_project_main) # type: ignore - - -def parse_project_stream(stream: TextIO) -> ast.Project: - ret: ast.Project - loader = ProjectLoader(stream) - try: - ret = loader.get_single_data() - assert isinstance(ret, ast.Project) - return ret - finally: - loader.dispose() # type: ignore[no-untyped-call] - - -def make_default_project(workspace_stem: str) -> ast.Project: - project_id = workspace_stem.replace("-", "_") - if not project_id.isidentifier(): - raise ValueError( - f'Workspace directory name "{workspace_stem}" is invalid identifier' - ) - if project_id == project_id.upper(): - raise ValueError( - f'Workspace directory name "{workspace_stem}" is invalid ' - "identifier, uppercase names are reserved for internal usage" - ) - return ast.Project( - _start=Pos(0, 0, LocalPath("")), - _end=Pos(0, 0, LocalPath("")), - id=SimpleIdExpr( - Pos(0, 0, LocalPath("")), - Pos(0, 0, LocalPath("")), - project_id, - ), - owner=SimpleOptStrExpr( - Pos(0, 0, LocalPath("")), - Pos(0, 0, LocalPath("")), - None, - ), - role=SimpleOptStrExpr( - Pos(0, 0, LocalPath("")), - Pos(0, 0, LocalPath("")), - None, - ), - ) - - # #### Flow parser #### @@ -1526,3 +1453,126 @@ def parse_bake_meta(meta_file: LocalPath) -> Mapping[str, str]: result = BakeMetaLoader(f).get_single_data() assert isinstance(result, dict) return result + + +# #### Project parser #### + + +class ProjectLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): + def __init__(self, stream: TextIO) -> None: + Reader.__init__(self, stream) + Scanner.__init__(self) + Parser.__init__(self) + Composer.__init__(self) + BaseConstructor.__init__(self) + BaseResolver.__init__(self) + + +PROJECT = { + "id": SimpleIdExpr, + "owner": SimpleOptStrExpr, + "role": SimpleOptStrExpr, + "images": None, + "volumes": None, + "defaults": None, +} + + +def parse_project_main(ctor: BaseConstructor, node: yaml.MappingNode) -> ast.Project: + ret = parse_dict( + ctor, + node, + PROJECT, + ast.Project, + ) + return ret + + +def parse_project_defaults( + ctor: FlowLoader, node: yaml.MappingNode +) -> ast.BatchFlowDefaults: + return parse_dict( + ctor, + node, + { + "tags": ExprOrSeq(StrExpr, type2str), + "env": ExprOrMapping(StrExpr, type2str), + "volumes": ExprOrSeq(OptStrExpr, type2str), + "workdir": OptRemotePathExpr, + "life_span": OptTimeDeltaExpr, + "preset": OptStrExpr, + "schedule_timeout": OptTimeDeltaExpr, + "fail_fast": OptBoolExpr, + "max_parallel": OptIntExpr, + "cache": None, + }, + ast.BatchFlowDefaults, + ) + + +ProjectLoader.add_path_resolver("project:main", []) # type: ignore +ProjectLoader.add_constructor("project:main", parse_project_main) # type: ignore + +ProjectLoader.add_path_resolver( # type: ignore + "project:cache", [(dict, "defaults"), (dict, "cache")] +) +ProjectLoader.add_constructor("project:cache", parse_cache) # type: ignore + +ProjectLoader.add_path_resolver( # type: ignore + "project:defaults", [(dict, "defaults")] +) +ProjectLoader.add_constructor( # type: ignore + "project:defaults", parse_project_defaults +) + +ProjectLoader.add_path_resolver("project:images", [(dict, "images")]) # type: ignore +ProjectLoader.add_constructor("project:images", parse_images) # type: ignore + +ProjectLoader.add_path_resolver("project:volumes", [(dict, "volumes")]) # type: ignore +ProjectLoader.add_constructor("project:volumes", parse_volumes) # type: ignore + + +def parse_project_stream(stream: TextIO) -> ast.Project: + ret: ast.Project + loader = ProjectLoader(stream) + try: + ret = loader.get_single_data() + assert isinstance(ret, ast.Project) + return ret + finally: + loader.dispose() # type: ignore[no-untyped-call] + + +def make_default_project(workspace_stem: str) -> ast.Project: + project_id = workspace_stem.replace("-", "_") + if not project_id.isidentifier(): + raise ValueError( + f'Workspace directory name "{workspace_stem}" is invalid identifier' + ) + if project_id == project_id.upper(): + raise ValueError( + f'Workspace directory name "{workspace_stem}" is invalid ' + "identifier, uppercase names are reserved for internal usage" + ) + return ast.Project( + _start=Pos(0, 0, LocalPath("")), + _end=Pos(0, 0, LocalPath("")), + id=SimpleIdExpr( + Pos(0, 0, LocalPath("")), + Pos(0, 0, LocalPath("")), + project_id, + ), + owner=SimpleOptStrExpr( + Pos(0, 0, LocalPath("")), + Pos(0, 0, LocalPath("")), + None, + ), + role=SimpleOptStrExpr( + Pos(0, 0, LocalPath("")), + Pos(0, 0, LocalPath("")), + None, + ), + defaults=None, + images=None, + volumes=None, + ) diff --git a/tests/e2e/assets/ws/project.yml b/tests/e2e/assets/ws/.neuro/project.yml similarity index 100% rename from tests/e2e/assets/ws/project.yml rename to tests/e2e/assets/ws/.neuro/project.yml diff --git a/tests/unit/test_batch_parser.py b/tests/unit/test_batch_parser.py index 3cd3183f..e4b01997 100644 --- a/tests/unit/test_batch_parser.py +++ b/tests/unit/test_batch_parser.py @@ -122,6 +122,17 @@ def test_parse_minimal(assets: pathlib.Path) -> None: defaults=ast.BatchFlowDefaults( _start=Pos(22, 2, config_file), _end=Pos(34, 0, config_file), + _specified_fields={ + "env", + "fail_fast", + "volumes", + "life_span", + "schedule_timeout", + "max_parallel", + "preset", + "workdir", + "tags", + }, tags=SequenceItemsExpr( [ StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "tag-a"), @@ -843,6 +854,7 @@ def test_parse_matrix_with_strategy(assets: pathlib.Path) -> None: defaults=ast.BatchFlowDefaults( Pos(2, 2, config_file), Pos(7, 0, config_file), + _specified_fields={"fail_fast", "cache", "max_parallel"}, tags=None, env=None, volumes=None, @@ -1095,6 +1107,7 @@ def test_parse_args(assets: pathlib.Path) -> None: defaults=ast.BatchFlowDefaults( _start=Pos(7, 2, config_file), _end=Pos(10, 0, config_file), + _specified_fields={"tags"}, tags=SequenceItemsExpr( [ StrExpr( diff --git a/tests/unit/test_context.py b/tests/unit/test_context.py index 9b90e490..10e6a11f 100644 --- a/tests/unit/test_context.py +++ b/tests/unit/test_context.py @@ -8,6 +8,7 @@ from yarl import URL from neuro_flow import ast +from neuro_flow.ast import CacheStrategy from neuro_flow.config_loader import BatchLocalCL, ConfigLoader, LiveLocalCL from neuro_flow.context import ( EMPTY_ROOT, @@ -134,6 +135,36 @@ async def test_images(live_config_loader: ConfigLoader) -> None: assert ctx.images["image_a"].build_preset == "gpu-small" +async def test_project_level_defaults_live( + assets: pathlib.Path, client: Client +) -> None: + ws = assets / "with_project_yaml" + config_dir = ConfigDir( + workspace=ws, + config_dir=ws, + ) + cl = LiveLocalCL(config_dir, client) + try: + flow = await RunningLiveFlow.create(cl, "live") + job = await flow.get_job("test", {}) + assert "tag-a" in job.tags + assert "tag-b" in job.tags + assert job.env["global_a"] == "val-a" + assert job.env["global_b"] == "val-b" + assert job.env["global_b"] == "val-b" + assert job.volumes == [ + "storage:common:/mnt/common:rw", + "storage:dir:/var/dir:ro", + ] + assert job.workdir == RemotePath("/global/dir") + assert job.life_span == 100800.0 + assert job.preset == "cpu-large" + assert job.schedule_timeout == 2157741.0 + assert job.image == "image:banana" + finally: + await cl.close() + + async def test_local_remote_path_images( client: Client, live_config_loader: ConfigLoader ) -> None: @@ -1085,3 +1116,59 @@ async def test_batch_task_with_no_image(assets: pathlib.Path, client: Client) -> finally: await cl.close() + + +async def test_early_images_include_globals( + assets: pathlib.Path, client: Client +) -> None: + ws = assets / "with_project_yaml" + config_dir = ConfigDir( + workspace=ws, + config_dir=ws, + ) + cl = BatchLocalCL(config_dir, client) + try: + flow = await RunningBatchFlow.create(cl, "batch", "bake-id") + assert flow.early_images["image_a"].ref == "image:banana" + assert flow.early_images["image_a"].context == ws / "dir" + assert flow.early_images["image_a"].dockerfile == ws / "dir/Dockerfile" + + assert flow.early_images["image_b"].ref == "image:main" + assert flow.early_images["image_b"].context == ws / "dir" + assert flow.early_images["image_b"].dockerfile == ws / "dir/Dockerfile" + + finally: + await cl.close() + + +async def test_batch_with_project_globals(assets: pathlib.Path, client: Client) -> None: + ws = assets / "with_project_yaml" + config_dir = ConfigDir( + workspace=ws, + config_dir=ws, + ) + cl = BatchLocalCL(config_dir, client) + try: + flow = await RunningBatchFlow.create(cl, "batch", "bake-id") + task = await flow.get_task((), "task", needs={}, state={}) + assert "tag-a" in task.tags + assert "tag-b" in task.tags + assert task.env["global_a"] == "val-a" + assert task.env["global_b"] == "val-b" + assert task.volumes == [ + "storage:common:/mnt/common:rw", + "storage:dir:/var/dir:ro", + ] + assert task.workdir == RemotePath("/global/dir") + assert task.life_span == 100800.0 + assert task.preset == "cpu-large" + assert task.schedule_timeout == 2157741.0 + assert task.image == "image:main" + + assert not task.strategy.fail_fast + assert task.strategy.max_parallel == 20 + assert task.cache.strategy == CacheStrategy.NONE + assert task.cache.life_span == 9000.0 + + finally: + await cl.close() diff --git a/tests/unit/test_live_parser.py b/tests/unit/test_live_parser.py index a8306755..63a97748 100644 --- a/tests/unit/test_live_parser.py +++ b/tests/unit/test_live_parser.py @@ -326,6 +326,15 @@ def test_parse_full(assets: pathlib.Path) -> None: defaults=ast.FlowDefaults( Pos(26, 2, config_file), Pos(36, 0, config_file), + _specified_fields={ + "env", + "volumes", + "life_span", + "schedule_timeout", + "preset", + "workdir", + "tags", + }, tags=SequenceItemsExpr( [ StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "tag-a"), @@ -655,6 +664,15 @@ def test_parse_full_exprs(assets: pathlib.Path) -> None: defaults=ast.FlowDefaults( Pos(21, 2, config_file), Pos(28, 0, config_file), + _specified_fields={ + "env", + "volumes", + "life_span", + "schedule_timeout", + "preset", + "workdir", + "tags", + }, tags=SequenceExpr( Pos(0, 0, config_file), Pos(0, 0, config_file), diff --git a/tests/unit/test_project_parser.py b/tests/unit/test_project_parser.py new file mode 100644 index 00000000..d4654ef9 --- /dev/null +++ b/tests/unit/test_project_parser.py @@ -0,0 +1,194 @@ +import pathlib + +from neuro_flow import ast +from neuro_flow.expr import ( + MappingItemsExpr, + OptBoolExpr, + OptIntExpr, + OptLocalPathExpr, + OptRemotePathExpr, + OptStrExpr, + OptTimeDeltaExpr, + RemotePathExpr, + SequenceItemsExpr, + SimpleIdExpr, + SimpleOptStrExpr, + StrExpr, + URIExpr, +) +from neuro_flow.parser import parse_project_stream +from neuro_flow.tokenizer import Pos + + +def test_parse_full(assets: pathlib.Path) -> None: + config_file = assets / "with_project_yaml" / "project.yml" + with config_file.open() as stream: + project = parse_project_stream(stream) + assert project == ast.Project( + Pos(0, 0, config_file), + Pos(42, 0, config_file), + id=SimpleIdExpr( + Pos(0, 0, config_file), + Pos(0, 0, config_file), + "test_project", + ), + owner=SimpleOptStrExpr( + Pos(0, 0, config_file), + Pos(0, 0, config_file), + "test-owner", + ), + role=SimpleOptStrExpr( + Pos(0, 0, config_file), + Pos(0, 0, config_file), + "test-owner/roles/test-role", + ), + images={ + "image_a": ast.Image( + Pos(5, 4, config_file), + Pos(17, 0, config_file), + ref=StrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "image:banana" + ), + context=OptStrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "dir" + ), + dockerfile=OptStrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "dir/Dockerfile" + ), + build_args=SequenceItemsExpr( + [ + StrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "--arg1" + ), + StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "val1"), + StrExpr( + Pos(0, 0, config_file), + Pos(0, 0, config_file), + "--arg2=val2", + ), + ] + ), + env=MappingItemsExpr( + { + "SECRET_ENV": StrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "secret:key" + ), + } + ), + volumes=SequenceItemsExpr( + [ + OptStrExpr( + Pos(0, 0, config_file), + Pos(0, 0, config_file), + "secret:key:/var/secret/key.txt", + ), + ] + ), + build_preset=OptStrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "gpu-small" + ), + force_rebuild=OptBoolExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), None + ), + ) + }, + volumes={ + "volume_a": ast.Volume( + Pos(19, 4, config_file), + Pos(23, 2, config_file), + remote=URIExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "storage:dir" + ), + mount=RemotePathExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "/var/dir" + ), + read_only=OptBoolExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), True + ), + local=OptLocalPathExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "dir" + ), + ), + "volume_b": ast.Volume( + Pos(24, 4, config_file), + Pos(26, 0, config_file), + remote=URIExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "storage:other" + ), + mount=RemotePathExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "/var/other" + ), + read_only=OptBoolExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), None + ), + local=OptLocalPathExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), None + ), + ), + }, + defaults=ast.BatchFlowDefaults( + Pos(27, 2, config_file), + Pos(42, 0, config_file), + _specified_fields={ + "fail_fast", + "tags", + "cache", + "env", + "volumes", + "life_span", + "schedule_timeout", + "workdir", + "max_parallel", + "preset", + }, + tags=SequenceItemsExpr( + [ + StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "tag-a"), + StrExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), "tag-b"), + ] + ), + env=MappingItemsExpr( + { + "global_a": StrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "val-a" + ), + "global_b": StrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "val-b" + ), + } + ), + volumes=SequenceItemsExpr( + [ + OptStrExpr( + Pos(0, 0, config_file), + Pos(0, 0, config_file), + "storage:common:/mnt/common:rw", + ), + ] + ), + workdir=OptRemotePathExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "/global/dir" + ), + life_span=OptTimeDeltaExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "1d4h" + ), + preset=OptStrExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "cpu-large" + ), + schedule_timeout=OptTimeDeltaExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "24d23h22m21s" + ), + cache=ast.Cache( + Pos(40, 4, config_file), + Pos(42, 0, config_file), + strategy=ast.CacheStrategy.NONE, + life_span=OptTimeDeltaExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), "2h30m" + ), + ), + fail_fast=OptBoolExpr( + Pos(0, 0, config_file), Pos(0, 0, config_file), False + ), + max_parallel=OptIntExpr(Pos(0, 0, config_file), Pos(0, 0, config_file), 20), + ), + ) diff --git a/tests/unit/with_project_yaml/batch.yml b/tests/unit/with_project_yaml/batch.yml new file mode 100644 index 00000000..de7e1106 --- /dev/null +++ b/tests/unit/with_project_yaml/batch.yml @@ -0,0 +1,12 @@ +kind: batch +images: + image_b: + ref: image:main + context: dir + dockerfile: dir/Dockerfile +tasks: +- id: task + image: image:main + bash: echo OK + volumes: + - ${{ volumes.volume_a.ref }} diff --git a/tests/unit/with_project_yaml/live.yml b/tests/unit/with_project_yaml/live.yml new file mode 100644 index 00000000..8c78e6c9 --- /dev/null +++ b/tests/unit/with_project_yaml/live.yml @@ -0,0 +1,6 @@ +kind: live +jobs: + test: + image: ${{ images.image_a.ref }} + volumes: + - ${{ volumes.volume_a.ref }} diff --git a/tests/unit/with_project_yaml/project.yml b/tests/unit/with_project_yaml/project.yml new file mode 100644 index 00000000..9a72e256 --- /dev/null +++ b/tests/unit/with_project_yaml/project.yml @@ -0,0 +1,42 @@ +id: test_project +owner: test-owner +role: test-owner/roles/test-role +images: + image_a: + ref: image:banana + context: dir + dockerfile: dir/Dockerfile + build_args: + - --arg1 + - val1 + - --arg2=val2 + env: + SECRET_ENV: secret:key + volumes: + - secret:key:/var/secret/key.txt + build_preset: gpu-small +volumes: + volume_a: + remote: storage:dir + mount: /var/dir + read_only: true + local: dir + volume_b: + remote: storage:other + mount: /var/other +defaults: + tags: [tag-a, tag-b] + env: + global_a: val-a + global_b: val-b + volumes: + - storage:common:/mnt/common:rw + workdir: /global/dir + life_span: 1d4h + preset: cpu-large + schedule_timeout: 24d23h22m21s + fail_fast: false + max_parallel: 20 + cache: + strategy: none + life_span: 2h30m