Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Error for unrecognized lift manifest fields. #47

Merged
merged 9 commits into from
Sep 24, 2023
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# Release Notes

## 0.2.2

Unrecognized lift manifest configuration data now generates an informative error instead of
silently skipping the unrecognized configuration.

## 0.2.1

Fix command descriptions not being rendered in the JSON lift manifest of built scies.
Expand Down
2 changes: 1 addition & 1 deletion lift.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,4 @@ remove_re = [
]
[lift.commands.env.replace]
SHIV_ROOT = "{scie.bindings}/shiv_root"
SCIENCE_DOC_LOCAL = "{docsite}/index.html"
SCIENCE_DOC_LOCAL = "{docsite}"
2 changes: 1 addition & 1 deletion science/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@

from packaging.version import Version

__version__ = "0.2.1"
__version__ = "0.2.2"

VERSION = Version(__version__)
18 changes: 16 additions & 2 deletions science/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

from __future__ import annotations

import os
import tomllib
from collections import OrderedDict
from dataclasses import dataclass
Expand Down Expand Up @@ -45,7 +46,7 @@ def parse_config_str(config: str) -> Application:

def parse_build_info(data: Data) -> BuildInfo:
return BuildInfo.gather(
lift_toml=data.provenance, app_info=data.get_data("app_info", default={}).data
lift_toml=data.provenance, app_info=data.get_data("app_info", default={}, used=True).data
)


Expand Down Expand Up @@ -102,9 +103,22 @@ def parse_interpreter_group(ig_data: Data) -> InterpreterGroup:
interpreters=[interpreters_by_id[Identifier(member)] for member in members],
)

return parse_dataclass(
application = parse_dataclass(
lift,
Application,
interpreters=tuple(interpreters_by_id.values()),
custom_parsers={BuildInfo: parse_build_info, InterpreterGroup: parse_interpreter_group},
)

unused_items = list(lift.iter_unused_items())
if unused_items:
eol = os.linesep
raise InputError(
f"The following `lift` manifest entries in {data.provenance.source} were not recognized:{eol}"
f"{os.linesep.join(key for key, _ in unused_items)}{eol}"
f"{eol}"
"Refer to the lift manifest format specification at https://science.scie.app/manifest.html or by "
"running `science doc open manifest`."
)

return application
84 changes: 65 additions & 19 deletions science/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@

from __future__ import annotations

import dataclasses
import os
from collections import OrderedDict
from dataclasses import dataclass
from enum import Enum, auto
from typing import Any, Collection, Mapping, TypeVar, cast
from typing import Any, Collection, Iterator, Mapping, TypeVar

from science.errors import InputError
from science.frozendict import FrozenDict
Expand All @@ -16,11 +16,15 @@
_T = TypeVar("_T")


@dataclass(frozen=True)
@dataclass
class Data:
provenance: Provenance
data: FrozenDict[str, Any]
path: str = ""
_unused_data: dict[str, Any] = dataclasses.field(init=False)

def __post_init__(self) -> None:
self._unused_data = dict(self.data)

def config(self, key: str) -> str:
return f"`[{self.path}] {key}`" if self.path else f"`[{key}]`"
Expand All @@ -30,15 +34,20 @@ class Required(Enum):

REQUIRED = Required.VALUE

def get_data(self, key: str, default: dict[str, Any] | Required = REQUIRED) -> Data:
data = self.get_value(
key, expected_type=Mapping, default=default # type: ignore[type-abstract]
def get_data(
self, key: str, default: dict[str, Any] | Required = REQUIRED, used: bool = False
) -> Data:
raw_data = self.get_value(
key, expected_type=Mapping, default=default, used=used # type: ignore[type-abstract]
)
return Data(
data = Data(
provenance=self.provenance,
data=FrozenDict(data),
data=FrozenDict(raw_data),
path=f"{self.path}.{key}" if self.path else key,
)
if not used:
self._unused_data[key] = data
return data

def get_str(self, key: str, default: str | Required = REQUIRED) -> str:
return self.get_value(key, expected_type=str, default=default)
Expand All @@ -57,49 +66,72 @@ def get_list(
key: str,
expected_item_type: type[_T],
default: list[_T] | Required = REQUIRED,
used: bool = True,
) -> list[_T]:
value = self.get_value(
key, expected_type=Collection, default=default # type: ignore[type-abstract]
)
invalid_entries = OrderedDict(
(index, item)
for index, item in enumerate(value, start=1)
if not isinstance(item, expected_item_type)
key, expected_type=Collection, default=default, used=used # type: ignore[type-abstract]
)

items = []
invalid_entries = {}
for index, item in enumerate(value, start=1):
if isinstance(item, expected_item_type):
items.append(item)
else:
# As a last resort, see if the item is convertible to the expected_item_type via its constructor. This
# supports Enum and similar types.
try:
items.append(expected_item_type(item)) # type: ignore[call-arg]
except (TypeError, ValueError):
invalid_entries[index] = item

if invalid_entries:
invalid_items = [
f"item {index}: {item} of type {self._typename(type(item))}"
for index, item in invalid_entries.items()
]
expected_values = ""
if issubclass(expected_item_type, Enum):
expected_values = f" from {{{', '.join(repr(expected.value) for expected in expected_item_type)}}}"

raise InputError(
f"Expected {self.config(key)} defined in {self.provenance.source} to be a list "
f"with items of type {self._typename(expected_item_type)} but got "
f"with items of type {self._typename(expected_item_type)}{expected_values} but got "
f"{len(invalid_entries)} out of {len(value)} entries of the wrong type:{os.linesep}"
f"{os.linesep.join(invalid_items)}"
)
return cast(list[_T], value)
return items

def get_data_list(
self,
key: str,
default: list[dict] | Required = REQUIRED,
) -> list[Data]:
return [
data_list = [
Data(
provenance=self.provenance,
data=FrozenDict(data),
path=f"{self.path}.{key}[{index}]" if self.path else key,
)
for index, data in enumerate(
self.get_list(key, expected_item_type=Mapping, default=default), start=1
self.get_list(key, expected_item_type=Mapping, default=default, used=False), start=1
)
]
if data_list:
self._unused_data[key] = data_list
return data_list

@staticmethod
def _typename(type_: type) -> str:
return "toml table" if issubclass(type_, Mapping) else type_.__name__

def get_value(self, key: str, expected_type: type[_T], default: _T | Required = REQUIRED) -> _T:
def get_value(
self,
key: str,
expected_type: type[_T],
default: _T | Required = REQUIRED,
used: bool = True,
) -> _T:
if key not in self.data:
if default is self.REQUIRED:
raise InputError(
Expand All @@ -114,7 +146,21 @@ def get_value(self, key: str, expected_type: type[_T], default: _T | Required =
f"Expected a {self._typename(expected_type)} for {self.config(key)} but found "
f"{value} of type {self._typename(type(value))} in {self.provenance.source}."
)
if used:
self._unused_data.pop(key, None)
return value

def iter_unused_items(self) -> Iterator[tuple[str, Any]]:
for key, value in self._unused_data.items():
if isinstance(value, list) and all(isinstance(item, Data) for item in value):
for index, item in enumerate(value, start=1):
for sub_key, sub_value in item.iter_unused_items():
yield f"{key}[{index}].{sub_key}", sub_value
elif isinstance(value, Data):
for sub_key, sub_value in value.iter_unused_items():
yield f"{key}.{sub_key}", sub_value
else:
yield key, value

def __bool__(self):
return bool(self.data)
41 changes: 22 additions & 19 deletions science/dataclass/deserializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,40 +54,43 @@ def _parse_field(

if map_type := type_.issubtype(Mapping):
# default is Env
data_value = data.get_data(name, default=cast(dict[str, Any] | Data.Required, default))
data_value = data.get_data(
name, default=cast(dict[str, Any] | Data.Required, default), used=True
)
# We assume any mapping type used will be constructible with a single dict argument.
return cast(_F, map_type(data_value.data))

if type_.issubtype(Collection) and not type_.issubtype(str):
item_type = type_.item_type
items: list[Any] = []
if dataclasses.is_dataclass(item_type) or isinstance(item_type, Mapping):
data_list = data.get_data_list(name, default=cast(list | Data.Required, default))

if isinstance(item_type, Mapping):
return cast(
_F,
[
# We assume any mapping type used will be constructible with a single dict
# argument.
cast(Mapping, item_type(data_item.data)) # type: ignore[misc]
for data_item in data_list
],
items.extend(
# We assume any mapping type used will be constructible with a single dict
# argument.
cast(_F, item_type(data_item.data)) # type: ignore[misc]
for data_item in data_list
)
else:
return cast(
_F,
[
parse(data_item, item_type, custom_parsers=custom_parsers)
for data_item in data_list
],
items.extend(
parse(data_item, item_type, custom_parsers=custom_parsers)
for data_item in data_list
)
else:
return cast(
_F,
items.extend(
data.get_list(
name, expected_item_type=item_type, default=cast(list | Data.Required, default)
),
)
)

if type_.has_origin_type and (origin_type := type_.origin_type) is not list:
# I.E.: tuple, frozenset, etc.
return origin_type(items) # type: ignore[call-arg]

return cast(_F, items)

value = data.get_value(name, expected_type=object, default=default)
if value is default:
return cast(_F, value)
Expand Down Expand Up @@ -121,7 +124,7 @@ def _parse_field(


def parse(
data,
data: Data,
data_type: type[_D],
*,
custom_parsers: Mapping[type, typing.Callable[[Data], Any]] = FrozenDict(),
Expand Down
21 changes: 17 additions & 4 deletions science/exe.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,11 @@
import textwrap
import traceback
from io import BytesIO
from pathlib import Path
from pathlib import Path, PurePath
from textwrap import dedent
from types import TracebackType
from typing import BinaryIO
from urllib.parse import urlparse, urlunparse

import click
import click_log
Expand Down Expand Up @@ -199,10 +200,22 @@ def _doc(ctx: click.Context, site: str, local: Path | None) -> None:
"""
),
)
@click.argument("page", default=None, required=False)
@pass_doc
def _open_doc(doc: DocConfig, remote: bool) -> None:
"""Opens the local documentation in a browser."""
click.launch(doc.site if remote else f"file://{doc.local}")
def _open_doc(doc: DocConfig, remote: bool, page: str | None = None) -> None:
"""Opens the local documentation in a browser.

If an optional page argument is supplied, that page will be opened instead of the default doc site page.
"""
url = doc.site if remote else f"file://{doc.local}"
if not page:
if not remote:
url = f"{url}/index.html"
else:
url_info = urlparse(url)
page = f"{page}.html" if not PurePath(page).suffix else page
url = urlunparse(url_info._replace(path=f"{url_info.path}/{page}"))
click.launch(url)


@_main.group(cls=DYMGroup, name="provider")
Expand Down
4 changes: 2 additions & 2 deletions science/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,8 +487,8 @@ class Application(Dataclass):
interpreters: tuple[Interpreter, ...] = ()
interpreter_groups: tuple[InterpreterGroup, ...] = ()
files: tuple[File, ...] = ()
commands: frozenset[Command] = frozenset()
bindings: frozenset[Command] = frozenset()
commands: tuple[Command, ...] = ()
bindings: tuple[Command, ...] = ()
scie_jump: ScieJump | None = None
ptex: Ptex | None = None

Expand Down
2 changes: 1 addition & 1 deletion tests/data/interpreter-groups.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
name = "igs"
description = "Test interpreter group selection."

[lift.scie-jump]
[lift.scie_jump]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

N.B.: Before adding any tests, the existing tests failed here on the scie-jump mis-spell. This same silent (before) error was what bit @huonw in scie-pants recently.

version = "0.11.0"

[[lift.interpreters]]
Expand Down
Loading
Loading