Skip to content

Commit

Permalink
small things
Browse files Browse the repository at this point in the history
Signed-off-by: Ayush Kamat <[email protected]>
  • Loading branch information
ayushkamat committed Feb 8, 2024
1 parent 872eb72 commit 6e4292d
Show file tree
Hide file tree
Showing 5 changed files with 243 additions and 108 deletions.
8 changes: 6 additions & 2 deletions latch_cli/centromere/ctx.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import os
import re
import sys
import time
import traceback
from dataclasses import dataclass
from pathlib import Path
Expand Down Expand Up @@ -220,8 +222,6 @@ def __init__(
elif system == "Darwin":
res = subprocess.run(["open", new_meta]).returncode
elif system == "Windows":
import os

res = os.system(str(new_meta.resolve()))
else:
res = None
Expand Down Expand Up @@ -283,6 +283,10 @@ def __init__(
if not self.disable_auto_version:
hash = hash_directory(self.pkg_root)
self.version = f"{self.version}-{hash[:6]}"

if os.environ.get("LATCH_NEW_VERSION_ALWAYS") is not None:
self.version = f"{self.version}-{int(time.monotonic())}"

click.echo(f" {self.version}\n")

if self.nucleus_check_version(self.version, self.workflow_name):
Expand Down
4 changes: 4 additions & 0 deletions latch_cli/extras/common/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import textwrap
import types
from typing import Type, Union, get_args, get_origin

from typing_extensions import TypeGuard
Expand All @@ -25,6 +26,9 @@ def is_primitive_value(val: object) -> TypeGuard[Union[None, str, bool, int, flo


def type_repr(t: Type, *, add_namespace: bool = False) -> str:
if getattr(t, "__name__", None) == "NoneType":
return "None"

if is_primitive_type(t) or t is LatchFile or t is LatchDir:
return t.__name__

Expand Down
53 changes: 29 additions & 24 deletions latch_cli/extras/nextflow/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
from functools import lru_cache as cache

import json
from dataclasses import dataclass, field
import sys
from dataclasses import asdict, dataclass, field
from enum import Enum
from pathlib import Path
from typing import Dict, List, Optional, Set, Tuple, TypedDict
Expand All @@ -28,6 +29,11 @@ class Vertex:
id: str
label: str
type: VertexType
statement: str
ret: List[str] = field(hash=False)
outputNames: List[str] = field(hash=False)
module: str
unaliased: str


@dataclass(frozen=True)
Expand All @@ -42,7 +48,11 @@ class _VertexContentJson(TypedDict):
id: str
label: str
type: VertexType
processMeta: Optional[Dict]
statement: str
ret: List[str]
outputNames: List[str]
module: str
unaliased: str


class _VertexJson(TypedDict):
Expand Down Expand Up @@ -80,13 +90,8 @@ def from_path(cls, p: Path) -> Self:
vertices: List[Vertex] = []
for v in payload["vertices"]:
c = v["content"]
vertices.append(
Vertex(
id=c["id"],
label=identifier_from_str(c["label"])[:128],
type=c["type"],
)
)
c["label"] = identifier_from_str(c["label"])[:128]
vertices.append(Vertex(**c))

edges: List[Edge] = []
edge_set: Set[Tuple[str, str]] = set()
Expand All @@ -98,7 +103,8 @@ def from_path(cls, p: Path) -> Self:
# disallow multiple edges
continue

edges.append(Edge(**c))
edge = Edge(**c)
edges.append(edge)
edge_set.add(t)

return cls(vertices, edges)
Expand All @@ -120,14 +126,14 @@ def dest(self, e: Edge) -> Vertex:
return self._vertices_by_id()[e.dest]

@cache
def ancestors(self) -> Dict[Vertex, List[Vertex]]:
res: Dict[Vertex, List[Vertex]] = {}
def ancestors(self) -> Dict[Vertex, List[Tuple[Vertex, Edge]]]:
res: Dict[Vertex, List[Tuple[Vertex, Edge]]] = {}
for v in self.vertices:
res[v] = []

by_id = self._vertices_by_id()
for edge in self.edges:
res[by_id[edge.dest]].append(by_id[edge.src])
res[by_id[edge.dest]].append((by_id[edge.src], edge))

return res

Expand All @@ -144,14 +150,14 @@ def inbound_edges(self) -> Dict[Vertex, List[Edge]]:
return res

@cache
def descendants(self) -> Dict[Vertex, List[Vertex]]:
res: Dict[Vertex, List[Vertex]] = {}
def descendants(self) -> Dict[Vertex, List[Tuple[Vertex, Edge]]]:
res: Dict[Vertex, List[Tuple[Vertex, Edge]]] = {}
for v in self.vertices:
res[v] = []

by_id = self._vertices_by_id()
for edge in self.edges:
res[by_id[edge.src]].append(by_id[edge.dest])
res[by_id[edge.src]].append((by_id[edge.dest], edge))

return res

Expand Down Expand Up @@ -214,20 +220,18 @@ def _resolve_subworkflows_helper(

sub_dag = dags[v.label]
for sub_v in sub_dag.vertices:
new_vertices.append(
Vertex(
id="_".join([v.id, sub_v.id]),
label=sub_v.label,
type=sub_v.type,
)
)
args = asdict(sub_v)
args["id"] = "_".join([v.id, sub_v.id])

new_vertices.append(Vertex(**args))

for sub_e in sub_dag.edges:
new_edges.append(
Edge(
label=sub_e.label,
src="_".join([v.id, sub_e.src]),
dest="_".join([v.id, sub_e.dest]),
branch=sub_e.branch,
)
)

Expand All @@ -254,6 +258,7 @@ def _resolve_subworkflows_helper(
label=e.label,
src=src,
dest=dest,
branch=e.branch,
)
)

Expand Down Expand Up @@ -285,7 +290,7 @@ def resolve_subworkflows(cls, dags: Dict[str, Self]) -> Dict[str, Self]:
return res

def _toposort_helper(self, cur: Vertex, res: List[Vertex]):
for x in self.ancestors()[cur]:
for x, _ in self.ancestors()[cur]:
self._toposort_helper(x, res)

res.append(cur)
Expand Down
2 changes: 1 addition & 1 deletion latch_cli/extras/nextflow/serialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ def serialize_nf(
image_name: str,
dkr_repo: str,
):
serialize(nf_wf, output_dir, image_name, dkr_repo)
serialize(nf_wf, output_dir, image_name, dkr_repo, write_spec=True)
Loading

0 comments on commit 6e4292d

Please sign in to comment.