Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tools: Export RDF data about features and boards #20395

Draft
wants to merge 17 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 22 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,27 @@ all: welcome
@exit 1

doc:
@./dist/tools/features_yaml2mx/features_yaml2mx.py \
@./dist/tools/python_with_requirements/python_with_requirements \
./dist/tools/features_yaml2mx/features_yaml2mx.py \
features.yaml \
--output-md doc/doxygen/src/feature_list.md
--output-md doc/doxygen/src/feature_list.md \
--output-ttl doc/rdf/features.ttl
@# The BUILD_IN_DOCKER is a workaround for the RISC-V
@# architecture tests otherwise triggering when building eg. in murdock
@# ("No RISC-V toolchain detected. Make sure a RISC-V toolchain is
@# installed."). If I had a penny for every time I refused to let
@# BUILD_IN_DOCKER be the simple workaround, I wouldn't get a penny
@# today (but I'd still be proud of my small collection so far).
unset BOARDS; \
BUILD_IN_DOCKER=1 \
./dist/tools/python_with_requirements/python_with_requirements \
./dist/tools/rdf/info_to_rdf.py \
doc/rdf/info.ttl
@./dist/tools/python_with_requirements/python_with_requirements \
./dist/tools/rdf/doxygen_to_rdf.py \
doc/rdf/doxygen.ttl
@./dist/tools/python_with_requirements/python_with_requirements \
./dist/tools/rdf/build_board_feature_table.py > doc/doxygen/src/feature_table.html
"$(MAKE)" -BC doc/doxygen

doc-man:
Expand Down Expand Up @@ -37,7 +55,8 @@ print-versions:
@./dist/tools/ci/print_toolchain_versions.sh

generate-features:
@./dist/tools/features_yaml2mx/features_yaml2mx.py \
@./dist/tools/python_with_requirements/python_with_requirements \
./dist/tools/features_yaml2mx/features_yaml2mx.py \
features.yaml \
--output-makefile makefiles/features_existing.inc.mk

Expand Down
113 changes: 108 additions & 5 deletions dist/tools/features_yaml2mx/features_yaml2mx.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,29 @@
Command line utility generate trivial Makefile listing all existing features in
RIOT and a matching documentation in Markdown format from single YAML file.
"""
import sys
import argparse
import yaml
import rdflib
from typing import Optional
import pycddl
import zcbor
from pathlib import Path
import cbor2

# We may offer building with a different ns_main later to compare statements
# about different versions of RIOT in a single graph, but for the time being,
# the assumption that what we are working with is the main branch is good
# enough.
ns_main = "https://ns.riot-os.org/by-version/main/"
ns = dict(
feature=rdflib.Namespace(ns_main + "feature/"),
featureskos=rdflib.Namespace(ns_main + "feature-skos/"),
terms=rdflib.Namespace(ns_main + "terms/"),

skos=rdflib.Namespace('http://www.w3.org/2004/02/skos/core#'),
dct=rdflib.Namespace('http://purl.org/dc/terms/'),
)

def collect_features(parsed):
"""
Expand Down Expand Up @@ -76,14 +96,14 @@
if "features" in group:
outfile.write("\n")
outfile.write("""\
| Feature | Description |
|:--------------------------------- |:----------------------------------------------------------------------------- |
| Feature | Description |
|:----------------------------------------------------------------------------------- |:----------------------------------------------------------------------------- |

Check failure on line 100 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E501 line too long (167 > 119 characters)
""")

for feature in group["features"]:
name = f"`{feature['name']}`"
description = feature['help'].strip().replace("\n", " ")
outfile.write(f"| {name:<33} | {description:<77} |\n")
outfile.write(f"| {name:<33} @anchor feature_{feature['name']:<33} | {description:<77} |\n")

for group in group.get('groups', []):
outfile.write("\n")
Expand Down Expand Up @@ -115,8 +135,70 @@
""")
write_md_section(outfile, parsed, 0)

def populate_graph(g: rdflib.Graph, skosscheme: rdflib.term.Node, parentnode: Optional[rdflib.term.Node], content: dict):

Check failure on line 138 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E302 expected 2 blank lines, found 1

Check failure on line 138 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E501 line too long (121 > 119 characters)
help = content.get("help", None)
title = content["title"]

thisnode = rdflib.BNode()
# Not making everything perfectly short-identifier-safe because it is not
# critical: the serialization takes care of falling back to the universal
# URI form eg. if there is a ² in there.
thisnode = ns['featureskos'][title
.replace(' ', '_')

Check failure on line 147 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E128 continuation line under-indented for visual indent
.replace('/', '_')
# https://wileylabs.github.io/askos/ trips over those when parsing Turtle
.replace('(', '_')
.replace(')', '_')
]
if (thisnode, None, None) in g:
raise RuntimeError(f"Tree contains duplicate category {title}. Ensure "
"unique names or alter the pattern in which featureskos: names "

Check failure on line 155 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E128 continuation line under-indented for visual indent
"are generated")

Check failure on line 156 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E128 continuation line under-indented for visual indent
if parentnode:
g.add((parentnode, ns['skos']['narrower'], thisnode))
# One can argue either way about whether we should include this statement
# that obviously follows from the line above;
# https://skos-play.sparna.fr/skos-testing-tool/ thinks we should. Same
# goes for `topConceptOf` below.
g.add((thisnode, ns['skos']['broader'], parentnode))
else:
g.add((skosscheme, ns['skos']['hasTopConcept'], thisnode))
# see comment about "broader" above
g.add((thisnode, ns['skos']['topConceptOf'], skosscheme))
g.add((thisnode, rdflib.RDF.type, ns['skos']['Concept']))
g.add((thisnode, rdflib.RDF.type, ns['skos']['Concept']))
g.add((thisnode, ns['skos']['inScheme'], skosscheme))
g.add((thisnode, ns['skos']['prefLabel'], rdflib.Literal(title, lang="en")))
if help:
g.add((thisnode, ns['skos']['definition'], rdflib.Literal(help, lang="en")))

for feature in content.get("features", []):
name = feature["name"]
help = feature["help"]
g.add((ns['feature'][name], rdflib.RDFS.label, rdflib.Literal(name, lang="en")))
g.add((ns['feature'][name], rdflib.RDFS.comment, rdflib.Literal(help, lang="en")))
g.add((ns['feature'][name], ns['terms']['doc'], rdflib.URIRef(f"https://doc.riot-os.org/md_doc_2doxygen_2src_2feature__list.html#feature_{name}")))

Check failure on line 180 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E501 line too long (155 > 119 characters)
g.add((ns['feature'][name], rdflib.RDF.type, ns['terms']['Feature']))
g.add((ns['feature'][name], ns['dct'].subject, thisnode))

for group in content.get("groups", []):
populate_graph(g, skosscheme, thisnode, group)

def write_ttl(outfile, parsed):

Check failure on line 187 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E302 expected 2 blank lines, found 1
g = rdflib.Graph()
for (k, v) in ns.items():
g.bind(k, v)

skosscheme = ns['featureskos']['scheme']
g.add((skosscheme, rdflib.RDF.type, ns['skos']['ConceptScheme']))
g.add((skosscheme, ns['skos']['prefLabel'], rdflib.Literal("RIOT OS feature hierarchy", lang="en")))
for content in parsed['groups']:
populate_graph(g, skosscheme, None, content)

def convert_features(yaml_file, mk_file, md_file):
outfile.write(b"# This file is generated by `make doc`\n\n")
g.serialize(outfile, format="turtle", encoding="utf-8")

def convert_features(yaml_file, mk_file, md_file, ttl_file: Optional[str]):

Check failure on line 201 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E302 expected 2 blank lines, found 1
"""
Convert the YAML file identified by the given path to a Makefile and
to a markdown file, if their paths are given.
Expand All @@ -129,10 +211,24 @@
:param md_file: Path to the markdown file to write the doc to or None
for not writing the doc
:type md_file: str or None
:param md_file: Path to the RDF N-triples file to write the doc to or
None for not writing the doc
"""
with open(yaml_file, 'rb') as file:
parsed = yaml.safe_load(file)

# PyCDDL is not perfect because it doesn't catch rogue elements: https://github.com/anweiss/cddl/issues/221
schema = pycddl.Schema(open(Path(__file__).parent / "schema.cddl").read())
schema.validate_cbor(cbor2.dumps(parsed))

# zcbor is not perfect either because it doesn't process the .within (and
# its error output just contains the whole input)
schema = zcbor.DataTranslator.from_cddl(
open(Path(__file__).parent / "schema-simplified.cddl").read(),
default_max_qty=sys.maxsize
)
schema.my_types['root'].from_yaml(open(yaml_file).read())

if mk_file is not None:
with open(mk_file, 'w', encoding="utf-8") as file:
write_makefile(file, yaml_file, parsed)
Expand All @@ -141,6 +237,10 @@
with open(md_file, 'w', encoding="utf-8") as file:
write_mdfile(file, yaml_file, parsed)

if ttl_file is not None:
with open(ttl_file, 'wb') as file:
write_ttl(file, parsed)


if __name__ == '__main__':
parser = argparse.ArgumentParser(
Expand All @@ -155,7 +255,10 @@
parser.add_argument('--output-makefile', type=str, default=None,
help="Output file to write the makefile to " +
"(default: no makefile generated)")
parser.add_argument('--output-ttl', type=str, default=None,
help="Output file to write RDF Turtle to " +
"(default: none generated)")

Check failure on line 260 in dist/tools/features_yaml2mx/features_yaml2mx.py

View workflow job for this annotation

GitHub Actions / static-tests

E127 continuation line over-indented for visual indent

args = parser.parse_args()

convert_features(args.INPUT, args.output_makefile, args.output_md)
convert_features(args.INPUT, args.output_makefile, args.output_md, args.output_ttl)
7 changes: 7 additions & 0 deletions dist/tools/features_yaml2mx/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
pyyaml == 6.0.0

rdflib == 7.0.0

pycddl == 0.6.1
cbor2 == 5.6.2
zcbor == 0.8.1
18 changes: 18 additions & 0 deletions dist/tools/features_yaml2mx/schema-simplified.cddl
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
; This version of schema.cddl was simplified for zcbor which
; does not support the `.within` control.
root = group-toplevel

group-toplevel = group
group-nested = group

group = {
? "title" => tstr,
? "help" => tstr,
? "features" => [ + feature ],
? "groups" => [ + group-nested ],
}

feature = {
"name" => tstr,
? "help" => tstr,
}
61 changes: 61 additions & 0 deletions dist/tools/python_with_requirements/python_with_requirements
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
#!/usr/bin/env python3

"""
Given a Python script accompanied by a requirements.txt, check whether the
requirements are installed locally. If so, run direclty; otherwise, create a
.venv next to the script, install any requirements and run through there.
"""

import os
import subprocess
import sys
import venv
from pathlib import Path
import importlib.metadata

# Would be nice but is not universally present
try:
import packaging.requirements
except ImportError:
packaging = None

pythonscript = Path(sys.argv[1])
requirements = pythonscript.parent / "requirements.txt"
localvenv = pythonscript.parent / ".venv"

class BadVersion(ValueError):
"""Package is available but not in the required version"""

try:
with requirements.open() as reqfile:
# There is no accepted way to parse requirements.txt files
#
# https://stackoverflow.com/questions/49689880/proper-way-to-parse-requirements-file-after-pip-upgrade-to-pip-10-x-x
for line in reqfile:
line, _, _ = line.partition("#")
line = line.strip()
if not line:
continue

if packaging:
req = packaging.requirements.Requirement(line)
existing = importlib.metadata.version(req.name)
if not req.specifier.contains(existing):
raise BadVersion
else:
req_name = line.split(" ", 1)[0]
existing = importlib.metadata.version(req_name)
# and as we can't compare versions, we're just disregarding
# them and hope they match

# This is completely disregarding the topic of extras -- but for
# our purposes of getting a quick run-or-venv decision, that's good
# enough
except (importlib.metadata.PackageNotFoundError, BadVersion):
venv.EnvBuilder(symlinks=True, upgrade=True, with_pip=True).create(localvenv)
venvpip = localvenv / "bin" / "pip"
subprocess.check_call([venvpip, "install", "--quiet", "-r", requirements])
venvpython = localvenv / "bin" / "python"
os.execv(venvpython, [venvpython] + sys.argv[1:])
else:
os.execv(sys.executable, [sys.executable] + sys.argv[1:])
Loading
Loading