Skip to content

Commit

Permalink
prep for version support
Browse files Browse the repository at this point in the history
  • Loading branch information
wpbonelli committed Nov 5, 2024
1 parent 2fc3107 commit 5984f18
Show file tree
Hide file tree
Showing 4 changed files with 153 additions and 143 deletions.
4 changes: 2 additions & 2 deletions autotest/test_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def test_dfn_load(dfn_name):
open(DFN_PATH / f"{dfn_name}.dfn", "r") as dfn_file,
):
name = Dfn.Name.parse(dfn_name)
common, _ = Dfn._load(common_file)
common, _ = Dfn._load_v1_flat(common_file)
Dfn.load(dfn_file, name=name, common=common)


Expand All @@ -34,7 +34,7 @@ def test_make_targets(dfn_name, function_tmpdir):
open(DFN_PATH / f"{dfn_name}.dfn", "r") as dfn_file,
):
name = Dfn.Name.parse(dfn_name)
common, _ = Dfn._load(common_file)
common, _ = Dfn._load_v1_flat(common_file)
dfn = Dfn.load(dfn_file, name=name, common=common)

make_targets(dfn, function_tmpdir, verbose=True)
Expand Down
233 changes: 123 additions & 110 deletions flopy/mf6/utils/codegen/dfn.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Any,
Dict,
List,
Literal,
NamedTuple,
Optional,
Tuple,
Expand All @@ -14,6 +15,8 @@

from boltons.dictutils import OMD

from flopy.utils.utl_import import import_optional_dependency

_SCALARS = {
"keyword",
"integer",
Expand Down Expand Up @@ -172,6 +175,8 @@ def parse(cls, v: str) -> "Dfn.Name":
def __str__(self) -> str:
return "-".join(self)

Version = Literal[1]

name: Optional[Name]
meta: Optional[Dict[str, Any]]

Expand All @@ -186,14 +191,9 @@ def __init__(
self.meta = meta

@staticmethod
def _load(f, common: Optional[dict] = None) -> Tuple[OMD, List[str]]:
"""
Internal use only. Loads the DFN as a flat multi-dictionary* with a
list of string metadata, which are then parsed into structured form.
*The point of this is to losslessly handle duplicate variable names.
"""
def _load_v1_flat(
f, common: Optional[dict] = None, **kwargs
) -> Tuple[OMD, List[str]]:
var = dict()
flat = list()
meta = list()
Expand Down Expand Up @@ -270,33 +270,22 @@ def _load(f, common: Optional[dict] = None) -> Tuple[OMD, List[str]]:
if any(var):
flat.append((var["name"], var))

# the point of the OMD is to losslessly handle duplicate variable names
return OMD(flat), meta

@classmethod
def load(
cls,
f,
name: Optional[Name] = None,
refs: Optional[Dfns] = None,
**kwargs,
) -> "Dfn":
"""
Load an input definition from a DFN file.
Notes
-----
Loads the DFN as a flat multidict with `_load()`
then walks composite variables and builds a tree.
"""

flat, meta = Dfn._load(f, **kwargs)
refs = refs or dict()
def _load_v1(cls, f, name, **kwargs) -> "Dfn":
flat, meta = Dfn._load_v1_flat(f, **kwargs)
refs = kwargs.pop("refs", dict())
fkeys = dict()

def _map(spec: Dict[str, Any]) -> Var:
"""
Convert a variable specification from its representation
in an input definition file to a Pythonic form.
Convert an input variable specification from its shape
in a classic definition file to a Python-friendly form.
This involves trimming unneeded attributes and setting
some others.
Notes
-----
Expand All @@ -316,50 +305,30 @@ def _map(spec: Dict[str, Any]) -> Var:

_name = spec["name"]
_type = spec.get("type", None)
block = spec.get("block", None)
shape = spec.get("shape", None)
shape = None if shape == "" else shape
block = spec.get("block", None)
children = dict()
default = spec.get("default", None)
default = (
_try_literal_eval(default) if _type != "string" else default
)
description = spec.get("description", "")
children = dict()
fkey = refs.get(_name, None)

# if var is a foreign key, register it
fkey = refs.get(_name, None)
if fkey:
fkeys[_name] = fkey

def _choices() -> Vars:
"""Load a union's children (choices)."""
names = _type.split()[1:]
return {
v["name"]: _map(v)
for v in flat.values(multi=True)
if v["name"] in names and v.get("in_record", False)
}

def _fields() -> Vars:
"""Load a record's scalar children (fields)."""
names = _type.split()[1:]
return {
v["name"]: _map(v)
for v in flat.values(multi=True)
if v["name"] in names
and v.get("in_record", False)
and not v["type"].startswith("record")
}
def _items() -> Vars:
"""Load a list's children (items: record or union of records)."""

# list, child is the item type
if _type.startswith("recarray"):
names = _type.split()[1:]
types = [
v["type"]
for v in flat.values(multi=True)
if v["name"] in names and v.get("in_record", False)
]

n_names = len(names)
if n_names < 1:
raise ValueError(f"Missing recarray definition: {_type}")
Expand All @@ -371,19 +340,18 @@ def _fields() -> Vars:
# be defined with a nested record (explicit) or with a
# set of fields directly in the recarray (implicit). an
# irregular list is always defined with a nested union.
is_explicit_composite = n_names == 1 and (
is_explicit = n_names == 1 and (
types[0].startswith("record")
or types[0].startswith("keystring")
)
is_implicit_scalar_record = all(t in _SCALARS for t in types)

if is_explicit_composite:
if is_explicit:
child = next(iter(flat.getlist(names[0])))
children = {names[0]: _map(child)}
_type = "list"
elif is_implicit_scalar_record:
return {names[0]: _map(child)}
elif all(t in _SCALARS for t in types):
# implicit simple record (all fields are scalars)
fields = _fields()
children = {
return {
_name: Var(
name=_name,
type="record",
Expand All @@ -394,9 +362,8 @@ def _fields() -> Vars:
),
)
}
_type = "list"
else:
# implicit complex record (i.e. some fields are records or unions)
# implicit complex record (some fields are records or unions)
fields = {
v["name"]: _map(v)
for v in flat.values(multi=True)
Expand All @@ -410,7 +377,7 @@ def _fields() -> Vars:
if single and "keystring" in first["type"]
else "record"
)
children = {
return {
name_: Var(
name=name_,
type=child_type,
Expand All @@ -421,80 +388,115 @@ def _fields() -> Vars:
),
)
}
_type = "list"

# union (product) type
def _choices() -> Vars:
"""Load a union's children (choices)."""
names = _type.split()[1:]
return {
v["name"]: _map(v)
for v in flat.values(multi=True)
if v["name"] in names and v.get("in_record", False)
}

def _fields() -> Vars:
"""Load a record's children (fields)."""
names = _type.split()[1:]
return {
v["name"]: _map(v)
for v in flat.values(multi=True)
if v["name"] in names
and v.get("in_record", False)
and not v["type"].startswith("record")
}

if _type.startswith("recarray"):
children = _items()
_type = "list"

elif _type.startswith("keystring"):
children = _choices()
_type = "union"

# record (sum) type
elif _type.startswith("record"):
children = _fields()
_type = "record"

# at this point, if it has a shape, it's an array. check its type
elif shape is not None:
if _type not in _SCALARS:
raise TypeError(f"Unsupported array type: {_type}")
# for now, we can tell a var is an array if its type
# is scalar and it has a shape. once we have proper
# typing, this can be read off the type itself.
elif shape is not None and _type not in _SCALARS:
raise TypeError(f"Unsupported array type: {_type}")

# if the var is a foreign key, swap in the referenced variable
ref = refs.get(_name, None)
if not ref:
# if var is a foreign key, return subpkg var instead
if fkey:
return Var(
name=_name,
name=fkey["param" if name == ("sim", "nam") else "val"],
type=_type,
shape=shape,
block=block,
description=description,
default=default,
children=children,
meta={"ref": fkey},
children=None,
description=(
f"* Contains data for the {fkey['abbr']} package. Data can be "
f"stored in a dictionary containing data for the {fkey['abbr']} "
"package with variable names as keys and package data as "
f"values. Data just for the {fkey['val']} variable is also "
f"acceptable. See {fkey['abbr']} package documentation for more "
"information"
),
default=None,
fkey=fkey,
)

return Var(
name=ref["param" if name == ("sim", "nam") else "val"],
name=_name,
type=_type,
shape=shape,
block=block,
description=(
f"* Contains data for the {ref['abbr']} package. Data can be "
f"stored in a dictionary containing data for the {ref['abbr']} "
"package with variable names as keys and package data as "
f"values. Data just for the {ref['val']} variable is also "
f"acceptable. See {ref['abbr']} package documentation for more "
"information"
),
default=None,
children=None,
meta={"ref": ref},
children=children,
description=description,
default=default,
)

# pass the original DFN representation as
# metadata so the shim can use it for now
_vars = list(flat.values(multi=True))

# convert input variable specs to
# structured form, descending into
# composites recursively as needed
vars_ = {
var["name"]: _map(var)
for var in flat.values(multi=True)
if not var.get("in_record", False)
}

return cls(
vars_,
{
var["name"]: _map(var)
for var in flat.values(multi=True)
if not var.get("in_record", False)
},
name,
{
"dfn": (_vars, meta),
"refs": fkeys,
"dfn": (
# pass the original DFN representation as
# metadata so templates can use it for now,
# eventually we can hopefully drop this
list(flat.values(multi=True)),
meta,
),
"fkeys": fkeys,
},
)

@staticmethod
def load_all(dfndir: PathLike) -> Dict[str, "Dfn"]:
"""Load all input definitions from the given directory."""
@classmethod
def load(
cls,
f,
name: Optional[Name] = None,
version: Version = 1,
**kwargs,
) -> "Dfn":
"""
Load an input definition from a DFN file.
"""

if version == 1:
return cls._load_v1(f, name, **kwargs)
else:
raise ValueError(
f"Unsupported version, expected one of {version.__args__}"
)

@staticmethod
def _load_all_v1(dfndir: PathLike) -> Dfns:
# find definition files
paths = [
p
Expand All @@ -508,7 +510,7 @@ def load_all(dfndir: PathLike) -> Dict[str, "Dfn"]:
common = None
else:
with open(common_path, "r") as f:
common, _ = Dfn._load(f)
common, _ = Dfn._load_v1_flat(f)

# load subpackage references first
refs: Refs = {}
Expand All @@ -529,3 +531,14 @@ def load_all(dfndir: PathLike) -> Dict[str, "Dfn"]:
dfns[name] = dfn

return dfns

@staticmethod
def load_all(dfndir: PathLike, version: Version = 1) -> Dfns:
"""Load all input definitions from the given directory."""

if version == 1:
return Dfn._load_all_v1(dfndir)
else:
raise ValueError(
f"Unsupported version, expected one of {version.__args__}"
)
Loading

0 comments on commit 5984f18

Please sign in to comment.