From 5e803ebdd3482db75dc752baa3cca6866750eff5 Mon Sep 17 00:00:00 2001 From: Chris Duf Date: Wed, 26 Apr 2023 21:59:25 +0200 Subject: [PATCH] ci: bundle devicetree Python package dtsh implementation is tightly coupled to the edtlib (sometimes private) API, which: a) is part of the Zephyr project source tree (at zephyr/scripts/dts/python-devicetree): this is the "authoritative" source for the edtlib library source code b) has its own, work-in-progress, repository (as project zephyrproject-rtos/python-devicetree): it lacks behind (a) and is not intended for public use yet [1] c) is also available from PyPI (as project devicetree): this package also lacks behind a), and additionally its version numbers can't be easily matched with Zephyr tags The approach here is to: - NOT rely on PyPI to get edtlib (i.e.. remove 'devicetree' from the dtsh Python requirements in setup.py and friends) - bundle edtlib with dtsh, and update it with each new Zephyr stable version Limitation: bundling edtlib definitely plays against installing dtsh within the same Python virtual environment as west. [1]: https://github.com/zephyrproject-rtos/zephyr/tree/main/scripts/dts --- README.org | 82 +- pyrightconfig.json | 25 + pytest.ini | 9 + requirements-dev.txt | 7 + setup.py | 85 +- src/devicetree/README | 18 + src/devicetree/__init__.py | 4 + src/devicetree/dtlib.py | 2138 +++++++++++++++++++++++++ src/devicetree/edtlib.py | 3088 ++++++++++++++++++++++++++++++++++++ src/devicetree/grutils.py | 161 ++ 10 files changed, 5465 insertions(+), 152 deletions(-) create mode 100644 pyrightconfig.json create mode 100644 pytest.ini create mode 100644 requirements-dev.txt create mode 100644 src/devicetree/README create mode 100644 src/devicetree/__init__.py create mode 100644 src/devicetree/dtlib.py create mode 100644 src/devicetree/edtlib.py create mode 100644 src/devicetree/grutils.py diff --git a/README.org b/README.org index f79b878..f08d444 100644 --- a/README.org +++ b/README.org @@ -70,6 +70,7 @@ All kinds of feedback and contribution are encouraged: please refer to the botto # Install dtsh in a dedicated Python virtual environment $ python -m venv --prompt dtsh .venv $ . .venv/bin/activate +$ pip install --upgrade pip setuptools $ pip install --upgrade dtsh # Setting ZEPHYR_BASE will help dtsh in building a default bindings search path @@ -142,12 +143,6 @@ It's recommended to install ~dtsh~ in a dedicated Python virtual environment. A Python /best practice/ is to always install a consistent set of /scripts/ and their dependencies in a dedicated [[https://peps.python.org/pep-0405/][virtual environment]], with up-to-date ~pip~, ~setuptools~ and ~wheel~ packages. -#+begin_src sh -python -m venv .venv -. .venv/bin/activate -pip install --upgrade pip setuptools wheel -#+end_src - See also [[https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/][Installing packages using pip and virtual environments]]. *** Install from sources @@ -159,7 +154,7 @@ git clone https://github.com/dottspina/dtsh.git cd dtsh python -m venv .venv . .venv/bin/activate -pip install --upgrade pip setuptools wheel +pip install --upgrade pip setuptools pip install . #+end_src @@ -168,7 +163,7 @@ pip install . Install from [[https://pypi.org/project/dtsh/][PyPI]] in a dedicated Python virtual environment: #+begin_src sh -python -m venv --prompt dtsh .venv +python -m venv .venv . .venv/bin/activate pip install --upgrade pip setuptools pip install --upgrade dtsh @@ -179,8 +174,9 @@ pip install --upgrade dtsh To remove ~dtsh~ and all its direct dependencies from a dedicated virtual environment: #+begin_src sh -. /path/to/.venv/bin/activate -pip uninstall dtsh rich Pygments devicetree +cd dtsh +. .venv/bin/activate +pip uninstall dtsh rich Pygments #+end_src ** Run @@ -242,64 +238,8 @@ See also issue [[https://github.com/dottspina/dtsh/issues/1#issuecomment-1278281 ** Zephyr integration -We'll assume a [[https://docs.zephyrproject.org/latest/develop/west/][west]]-managed Zephyr [[https://docs.zephyrproject.org/latest/develop/west/basics.html#example-workspace][workspace]] with a typical file layout -(see [[https://docs.zephyrproject.org/latest/develop/getting_started/#get-zephyr-and-install-python-dependencies][Get Zephyr and install Python dependencies]]): - -#+begin_src -zephyrproject/ # Workspace topdir -│ -│ # Per-workspace Python virtual environment, may be updated by west after manifest modification: -├── .venv/ -│ └── bin # Python run-time and Zephyr tools (e.g. west, pylink, pyocd) -│ └── lib # required Python libraries -│ -├── .west/ # marks the location of the workspace topdir -│ └── config # per-workspace local configuration file -│ -│ # The manifest repository, never modified by west after creation: -├── zephyr/ # .git/ repo -│ └── west.yml # manifest file -│ -│ # Projects managed by west: -├── modules/ -│ └── lib/ -│ └── tinycbor/ # .git/ project -├── net-tools/ # .git/ project -└── [ ... other projects ...] -#+end_src - -It's then possible to install ~dtsh~ in the same /command line development environment/ as ~west~: - -#+begin_src sh -# Activate the Python venv as usual, e.g.: -. /path/to/zephyrproject/.venv/bin/activate - -# Install latest dtsh release from PyPI -pip install dtsh -#+end_src - -And to simply run ~dtsh~ without any argument: - -#+begin_src sh -# Activate the Python venv as usual, e.g.: -. /path/to/zephyrproject/.venv/bin/activate -# Set the Zephyr kernel environment as usual, e.g.: -. /path/to/zephyrproject/zephyr/zephyr-env.sh - -# Build the Zephyr firmware as usual, e.g.: -west build $ZEPHYR_BASE/samples/sensor/bme680 -# Open the generated DTS file build/zephyr/zephyr.dts using default bindings -dtsh -#+end_src - -To remove ~dtsh~ from a Zephyr workspace: - -#+begin_src sh -. /path/to/zephyrproject/.venv/bin/activate -pip uninstall dtsh rich -#+end_src - -⚠ Be sure to NOT uninstall packages otherwise used within the Python virtual environment, e.g. ~rich~. +*WARNING*: It's no longer advised to install ~dtsh~ within the same Python virtual environment +as ~west~. * User's guide @@ -698,7 +638,8 @@ git clone https://github.com/dottspina/dtsh.git cd dtsh python -m venv .venv . .venv/bin/activate -pip install --upgrade pip setuptools wheel +pip install --upgrade pip setuptools +pip install -r requirements-dev.txt pip install --editable . #+end_src @@ -711,9 +652,6 @@ To run a few unit tests: #+begin_src sh cd dtsh . .venv/bin/activate -# install test requirements -pip install ".[test]" -# run unit tests python -m pytest tests #+end_src diff --git a/pyrightconfig.json b/pyrightconfig.json new file mode 100644 index 0000000..a701a4e --- /dev/null +++ b/pyrightconfig.json @@ -0,0 +1,25 @@ +{ + "include": [ + "src", + "tests" + ], + + "exclude": [ + "tests/bindings" + ], + + "pythonVersion": "3.8", + "pythonPlatform": "All", + + "venvPath": ".", + "venv": ".venv", + + "executionEnvironments": [ + { + "root": "tests", + "extraPaths": [ + "src" + ] + } + ] +} diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..52e264c --- /dev/null +++ b/pytest.ini @@ -0,0 +1,9 @@ +# pytest configuration. +# +# See: +# - https://docs.pytest.org/en/7.3.x/reference/customize.html#pytest-ini +# - https://docs.pytest.org/en/7.3.x/reference/reference.html#ini-options-ref + +[pytest] +testpaths = tests +pythonpath = src diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..aa65804 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,7 @@ +# Python requirements for development/tests. +# +mypy +types-PyYAML +pyright +pylint +pytest diff --git a/setup.py b/setup.py index a2d0c76..b3d2c34 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,4 @@ """Project configuration (setuptools). - -See: -https://packaging.python.org/en/latest/guides/distributing-packages-using-setuptools/#setup-args -https://github.com/pypa/sampleproject """ # Always prefer setuptools over distutils @@ -11,25 +7,13 @@ here = pathlib.Path(__file__).parent.resolve() -# Get the long description from the README file long_description = (here / "README.rst").read_text(encoding="utf-8") -# Arguments marked as "Required" below must be included for upload to PyPI. -# Fields marked as "Optional" may be commented out. setup( - # This is the name of your project. The first time you publish this - # package, this name will be registered for you. It will determine how - # users can install this project, e.g.: - # - # $ pip install sampleproject - # - # And where it will live on PyPI: https://pypi.org/project/sampleproject/ - # # There are some restrictions on what makes a valid project name # specification here: # https://packaging.python.org/specifications/core-metadata/#name # - # Required. name="dtsh", # Versions should comply with PEP 440: @@ -41,26 +25,17 @@ # # See also: https://peps.python.org/pep-0440/ # - # Required. - version="0.1.0a4", + version="0.1.0a5", # This is a one-line description or tagline of what your project does. This # corresponds to the "Summary" metadata field: # https://packaging.python.org/specifications/core-metadata/#summary # - # Optional. description="Shell-like interface with Zephyr devicetree and bindings", - # This is an optional longer description of your project that represents - # the body of text which users will see when they visit PyPI. - # - # Often, this is the same as your README, so you can just read it in from - # that file directly (as we have already done above) - # # This field corresponds to the "Description" metadata field: # https://packaging.python.org/specifications/core-metadata/#description-optional # - # Optional. long_description=long_description, # Denotes that our long_description is in Markdown; valid values are @@ -74,39 +49,22 @@ # This field corresponds to the "Description-Content-Type" metadata field: # https://packaging.python.org/specifications/core-metadata/#description-content-type-optional # - # Optional (see note above). #long_description_content_type="text/markdown", - # This should be a valid link to your project's main homepage. - # # This field corresponds to the "Home-Page" metadata field: # https://packaging.python.org/specifications/core-metadata/#home-page-optional # - # Optional. url="https://github.com/dottspina/dtsh", - # This should be your name or the name of the organization which owns the - # project. - # - # Optional. author="Chris Duf", - - # This should be a valid email address corresponding to the author listed - # above. - # - # Optional. author_email="chris@openmarl.org", - # The license argument is more typically used to indicate differences from well-known licenses. - # - # Optional. license="Apache License version 2.0", # Classifiers help users find your project by categorizing it. # # For a list of valid classifiers, see https://pypi.org/classifiers/ # - # Optional. classifiers=[ # How mature is this project? Common values are # 3 - Alpha @@ -122,46 +80,26 @@ # that you indicate you support Python 3. These classifiers are *not* # checked by 'pip install'. See instead 'python_requires' below. "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3 :: Only", ], - # This field adds keywords for your project which will appear on the - # project page. What does your project relate to? - # # Note that this is a list of additional keywords, separated # by commas, to be used to assist searching for the distribution in a # larger catalog. # - # Optional. keywords="devicetree, zephyr, dts, embedded", - # When your source code is in a subdirectory under the project root, e.g. - # `src/`, it is necessary to specify the `package_dir` argument. - # - # Optional. package_dir={"": "src"}, - - # You can just specify package directories manually here if your project is - # simple. Or you can use find_packages(). - # - # Alternatively, if you just want to distribute a single Python file, use - # the `py_modules` argument instead as follows, which will expect a file - # called `my_module.py` to exist: - # - # py_modules=["my_module"], - # - # Required. packages=find_packages(where="src"), # Specify which Python versions you support. In contrast to the # 'Programming Language' classifiers above, 'pip install' will check this # and refuse to install the project if the version does not match. See # https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires - python_requires=">=3.7, <4", + python_requires=">=3.8, <4", # This field lists other packages that your project depends on to run. # Any package you put here will be installed by pip when your project is @@ -170,8 +108,8 @@ # For an analysis of "install_requires" vs pip's requirements files see: # https://packaging.python.org/discussions/install-requires-vs-requirements/ # - # Optional. - install_requires=["devicetree", "rich", "Pygments"], + # Requirements for both devicetree and dtsh. + install_requires=["PyYAML>=5.1", "rich", "Pygments"], # List additional groups of dependencies here (e.g. development # dependencies). Users will be able to install these using the "extras" @@ -184,20 +122,17 @@ # # Optional. extras_require={ + "dev": ["mypy", "types-PyYAML", "pyright", "pylint", "pytest"], "test": ["pytest"], "dist": ["build", "twine"], }, # If there are data files included in your packages that need to be # installed, specify them here. - # package_data={ # Optional - # "sample": ["package_data.dat"], - # }, package_data={ "dtsh": ["theme"], }, - # Although 'package_data' is the preferred approach, in some case you may # need to place data files outside of your packages. See: # http://docs.python.org/distutils/setupscript.html#installing-additional-files @@ -212,10 +147,6 @@ # `pip` to create the appropriate form of executable for the target # platform. # - # For example, the following would provide a command called `sample` which - # executes the function `main` from this package when invoked: - # - # Optional. entry_points={ "console_scripts": [ "dtsh=dtsh.cli:run", @@ -227,12 +158,6 @@ # This field corresponds to the "Project-URL" metadata fields: # https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use # - # Examples listed include a pattern for specifying where the package tracks - # issues, where the source is hosted, where to say thanks to the package - # maintainers, and where to support the project financially. The key is - # what's used to render the link text on PyPI. - # - # Optional. project_urls={ # 'Documentation': 'https://packaging.python.org/tutorials/distributing-packages/', "Bug Reports": "https://github.com/dottspina/dtsh/issues", diff --git a/src/devicetree/README b/src/devicetree/README new file mode 100644 index 0000000..2454138 --- /dev/null +++ b/src/devicetree/README @@ -0,0 +1,18 @@ +This directory mirrors the edtlib implementation +found in Zephyr 3.3.0. + +See https://github.com/zephyrproject-rtos/zephyr/tree/v3.3.0/scripts/dts. + + +- edtlib: + Copyright (c) 2019 Nordic Semiconductor ASA + Copyright (c) 2019 Linaro Limited + License BSD-3-Clause + +- dtlib: + Copyright (c) 2019 Nordic Semiconductor + License BSD-3-Clause + +- grutils: + Copyright 2009-2013, 2019 Peter A. Bigot + License Apache-2.0 diff --git a/src/devicetree/__init__.py b/src/devicetree/__init__.py new file mode 100644 index 0000000..e9a5683 --- /dev/null +++ b/src/devicetree/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) 2021 Nordic Semiconductor ASA +# SPDX-License-Identifier: Apache-2.0 + +__all__ = ['edtlib', 'dtlib'] diff --git a/src/devicetree/dtlib.py b/src/devicetree/dtlib.py new file mode 100644 index 0000000..3cd5dba --- /dev/null +++ b/src/devicetree/dtlib.py @@ -0,0 +1,2138 @@ +# Copyright (c) 2019, Nordic Semiconductor +# SPDX-License-Identifier: BSD-3-Clause + +# Tip: You can view just the documentation with 'pydoc3 devicetree.dtlib' + +""" +A library for extracting information from .dts (devicetree) files. See the +documentation for the DT and Node classes for more information. + +The top-level entry point of the library is the DT class. DT.__init__() takes a +.dts file to parse and a list of directories to search for any /include/d +files. +""" + +import collections +import enum +import errno +import os +import re +import string +import sys +import textwrap +from typing import Any, Dict, Iterable, List, \ + NamedTuple, NoReturn, Optional, Set, Tuple, Union + +# NOTE: tests/test_dtlib.py is the test suite for this library. + +class DTError(Exception): + "Exception raised for devicetree-related errors" + +class Node: + r""" + Represents a node in the devicetree ('node-name { ... };'). + + These attributes are available on Node instances: + + name: + The name of the node (a string). + + unit_addr: + The portion after the '@' in the node's name, or the empty string if the + name has no '@' in it. + + Note that this is a string. Run int(node.unit_addr, 16) to get an + integer. + + props: + A dict that maps the properties defined on the node to + their values. 'props' is indexed by property name (a string), and values + are Property objects. + + To convert property values to Python numbers or strings, use + dtlib.to_num(), dtlib.to_nums(), or dtlib.to_string(). + + Property values are represented as 'bytes' arrays to support the full + generality of DTS, which allows assignments like + + x = "foo", < 0x12345678 >, [ 9A ]; + + This gives x the value b"foo\0\x12\x34\x56\x78\x9A". Numbers in DTS are + stored in big-endian format. + + nodes: + A dict containing the subnodes of the node, indexed by name. + + labels: + A list with all labels pointing to the node, in the same order as the + labels appear, but with duplicates removed. + + 'label_1: label_2: node { ... };' gives 'labels' the value + ["label_1", "label_2"]. + + parent: + The parent Node of the node. 'None' for the root node. + + path: + The path to the node as a string, e.g. "/foo/bar". + + dt: + The DT instance this node belongs to. + """ + + # + # Public interface + # + + def __init__(self, name: str, parent: Optional['Node'], dt: 'DT'): + """ + Node constructor. Not meant to be called directly by clients. + """ + # Remember to update DT.__deepcopy__() if you change this. + + self.name = name + self.props: Dict[str, 'Property'] = {} + self.nodes: Dict[str, 'Node'] = {} + self.labels: List[str] = [] + self.parent = parent + self.dt = dt + + self._omit_if_no_ref = False + self._is_referenced = False + + if name.count("@") > 1: + dt._parse_error("multiple '@' in node name") + if not name == "/": + for char in name: + if char not in _nodename_chars: + dt._parse_error(f"{self.path}: bad character '{char}' " + "in node name") + + @property + def unit_addr(self) -> str: + """ + See the class documentation. + """ + return self.name.partition("@")[2] + + @property + def path(self) -> str: + """ + See the class documentation. + """ + node_names = [] + + cur = self + while cur.parent: + node_names.append(cur.name) + cur = cur.parent + + return "/" + "/".join(reversed(node_names)) + + def node_iter(self) -> Iterable['Node']: + """ + Returns a generator for iterating over the node and its children, + recursively. + + For example, this will iterate over all nodes in the tree (like + dt.node_iter()). + + for node in dt.root.node_iter(): + ... + """ + yield self + for node in self.nodes.values(): + yield from node.node_iter() + + def _get_prop(self, name: str) -> 'Property': + # Returns the property named 'name' on the node, creating it if it + # doesn't already exist + + prop = self.props.get(name) + if not prop: + prop = Property(self, name) + self.props[name] = prop + return prop + + def _del(self) -> None: + # Removes the node from the tree + self.parent.nodes.pop(self.name) # type: ignore + + def __str__(self): + """ + Returns a DTS representation of the node. Called automatically if the + node is print()ed. + """ + s = "".join(label + ": " for label in self.labels) + + s += f"{self.name} {{\n" + + for prop in self.props.values(): + s += "\t" + str(prop) + "\n" + + for child in self.nodes.values(): + s += textwrap.indent(child.__str__(), "\t") + "\n" + + s += "};" + + return s + + def __repr__(self): + """ + Returns some information about the Node instance. Called automatically + if the Node instance is evaluated. + """ + return f"" + +# See Property.type +class Type(enum.IntEnum): + EMPTY = 0 + BYTES = 1 + NUM = 2 + NUMS = 3 + STRING = 4 + STRINGS = 5 + PATH = 6 + PHANDLE = 7 + PHANDLES = 8 + PHANDLES_AND_NUMS = 9 + COMPOUND = 10 + +class _MarkerType(enum.IntEnum): + # Types of markers in property values + + # References + PATH = 0 # &foo + PHANDLE = 1 # <&foo> + LABEL = 2 # foo: <1 2 3> + + # Start of data blocks of specific type + UINT8 = 3 # [00 01 02] (and also used for /incbin/) + UINT16 = 4 # /bits/ 16 <1 2 3> + UINT32 = 5 # <1 2 3> + UINT64 = 6 # /bits/ 64 <1 2 3> + STRING = 7 # "foo" + +class Property: + """ + Represents a property ('x = ...'). + + These attributes are available on Property instances: + + name: + The name of the property (a string). + + value: + The value of the property, as a 'bytes' string. Numbers are stored in + big-endian format, and strings are null-terminated. Putting multiple + comma-separated values in an assignment (e.g., 'x = < 1 >, "foo"') will + concatenate the values. + + See the to_*() methods for converting the value to other types. + + type: + The type of the property, inferred from the syntax used in the + assignment. This is one of the following constants (with example + assignments): + + Assignment | Property.type + ----------------------------+------------------------ + foo; | dtlib.Type.EMPTY + foo = []; | dtlib.Type.BYTES + foo = [01 02]; | dtlib.Type.BYTES + foo = /bits/ 8 <1>; | dtlib.Type.BYTES + foo = <1>; | dtlib.Type.NUM + foo = <>; | dtlib.Type.NUMS + foo = <1 2 3>; | dtlib.Type.NUMS + foo = <1 2>, <3>; | dtlib.Type.NUMS + foo = "foo"; | dtlib.Type.STRING + foo = "foo", "bar"; | dtlib.Type.STRINGS + foo = <&l>; | dtlib.Type.PHANDLE + foo = <&l1 &l2 &l3>; | dtlib.Type.PHANDLES + foo = <&l1 &l2>, <&l3>; | dtlib.Type.PHANDLES + foo = <&l1 1 2 &l2 3 4>; | dtlib.Type.PHANDLES_AND_NUMS + foo = <&l1 1 2>, <&l2 3 4>; | dtlib.Type.PHANDLES_AND_NUMS + foo = &l; | dtlib.Type.PATH + *Anything else* | dtlib.Type.COMPOUND + + *Anything else* includes properties mixing phandle (<&label>) and node + path (&label) references with other data. + + Data labels in the property value do not influence the type. + + labels: + A list with all labels pointing to the property, in the same order as the + labels appear, but with duplicates removed. + + 'label_1: label2: x = ...' gives 'labels' the value + ["label_1", "label_2"]. + + offset_labels: + A dictionary that maps any labels within the property's value to their + offset, in bytes. For example, 'x = < 0 label_1: 1 label_2: >' gives + 'offset_labels' the value {"label_1": 4, "label_2": 8}. + + Iteration order will match the order of the labels on Python versions + that preserve dict insertion order. + + node: + The Node the property is on. + """ + + # + # Public interface + # + + def __init__(self, node: Node, name: str): + # Remember to update DT.__deepcopy__() if you change this. + + if "@" in name: + node.dt._parse_error("'@' is only allowed in node names") + + self.name = name + self.value = b"" + self.labels: List[str] = [] + # We have to wait to set this until later, when we've got + # the entire tree. + self.offset_labels: Dict[str, int] = {} + self.node: Node = node + + self._label_offset_lst: List[Tuple[str, int]] = [] + + # A list of [offset, label, type] lists (sorted by offset), + # giving the locations of references within the value. 'type' + # is either _MarkerType.PATH, for a node path reference, + # _MarkerType.PHANDLE, for a phandle reference, or + # _MarkerType.LABEL, for a label on/within data. Node paths + # and phandles need to be patched in after parsing. + self._markers: List[List] = [] + + @property + def type(self) -> Type: + """ + See the class docstring. + """ + # Data labels (e.g. 'foo = label: <3>') are irrelevant, so filter them + # out + types = [marker[1] for marker in self._markers + if marker[1] != _MarkerType.LABEL] + + if not types: + return Type.EMPTY + + if types == [_MarkerType.UINT8]: + return Type.BYTES + + if types == [_MarkerType.UINT32]: + return Type.NUM if len(self.value) == 4 else Type.NUMS + + # Treat 'foo = <1 2 3>, <4 5>, ...' as Type.NUMS too + if set(types) == {_MarkerType.UINT32}: + return Type.NUMS + + if set(types) == {_MarkerType.STRING}: + return Type.STRING if len(types) == 1 else Type.STRINGS + + if types == [_MarkerType.PATH]: + return Type.PATH + + if types == [_MarkerType.UINT32, _MarkerType.PHANDLE] and \ + len(self.value) == 4: + return Type.PHANDLE + + if set(types) == {_MarkerType.UINT32, _MarkerType.PHANDLE}: + if len(self.value) == 4*types.count(_MarkerType.PHANDLE): + # Array with just phandles in it + return Type.PHANDLES + # Array with both phandles and numbers + return Type.PHANDLES_AND_NUMS + + return Type.COMPOUND + + def to_num(self, signed=False) -> int: + """ + Returns the value of the property as a number. + + Raises DTError if the property was not assigned with this syntax (has + Property.type Type.NUM): + + foo = < 1 >; + + signed (default: False): + If True, the value will be interpreted as signed rather than + unsigned. + """ + if self.type is not Type.NUM: + _err("expected property '{0}' on {1} in {2} to be assigned with " + "'{0} = < (number) >;', not '{3}'" + .format(self.name, self.node.path, self.node.dt.filename, + self)) + + return int.from_bytes(self.value, "big", signed=signed) + + def to_nums(self, signed=False) -> List[int]: + """ + Returns the value of the property as a list of numbers. + + Raises DTError if the property was not assigned with this syntax (has + Property.type Type.NUM or Type.NUMS): + + foo = < 1 2 ... >; + + signed (default: False): + If True, the values will be interpreted as signed rather than + unsigned. + """ + if self.type not in (Type.NUM, Type.NUMS): + _err("expected property '{0}' on {1} in {2} to be assigned with " + "'{0} = < (number) (number) ... >;', not '{3}'" + .format(self.name, self.node.path, self.node.dt.filename, + self)) + + return [int.from_bytes(self.value[i:i + 4], "big", signed=signed) + for i in range(0, len(self.value), 4)] + + def to_bytes(self) -> bytes: + """ + Returns the value of the property as a raw 'bytes', like + Property.value, except with added type checking. + + Raises DTError if the property was not assigned with this syntax (has + Property.type Type.BYTES): + + foo = [ 01 ... ]; + """ + if self.type is not Type.BYTES: + _err("expected property '{0}' on {1} in {2} to be assigned with " + "'{0} = [ (byte) (byte) ... ];', not '{3}'" + .format(self.name, self.node.path, self.node.dt.filename, + self)) + + return self.value + + def to_string(self) -> str: + """ + Returns the value of the property as a string. + + Raises DTError if the property was not assigned with this syntax (has + Property.type Type.STRING): + + foo = "string"; + + This function might also raise UnicodeDecodeError if the string is + not valid UTF-8. + """ + if self.type is not Type.STRING: + _err("expected property '{0}' on {1} in {2} to be assigned with " + "'{0} = \"string\";', not '{3}'" + .format(self.name, self.node.path, self.node.dt.filename, + self)) + + try: + ret = self.value.decode("utf-8")[:-1] # Strip null + except UnicodeDecodeError: + _err(f"value of property '{self.name}' ({self.value!r}) " + f"on {self.node.path} in {self.node.dt.filename} " + "is not valid UTF-8") + + return ret # The separate 'return' appeases the type checker. + + def to_strings(self) -> List[str]: + """ + Returns the value of the property as a list of strings. + + Raises DTError if the property was not assigned with this syntax (has + Property.type Type.STRING or Type.STRINGS): + + foo = "string", "string", ... ; + + Also raises DTError if any of the strings are not valid UTF-8. + """ + if self.type not in (Type.STRING, Type.STRINGS): + _err("expected property '{0}' on {1} in {2} to be assigned with " + "'{0} = \"string\", \"string\", ... ;', not '{3}'" + .format(self.name, self.node.path, self.node.dt.filename, + self)) + + try: + ret = self.value.decode("utf-8").split("\0")[:-1] + except UnicodeDecodeError: + _err(f"value of property '{self.name}' ({self.value!r}) " + f"on {self.node.path} in {self.node.dt.filename} " + "is not valid UTF-8") + + return ret # The separate 'return' appeases the type checker. + + def to_node(self) -> Node: + """ + Returns the Node the phandle in the property points to. + + Raises DTError if the property was not assigned with this syntax (has + Property.type Type.PHANDLE). + + foo = < &bar >; + """ + if self.type is not Type.PHANDLE: + _err("expected property '{0}' on {1} in {2} to be assigned with " + "'{0} = < &foo >;', not '{3}'" + .format(self.name, self.node.path, self.node.dt.filename, + self)) + + return self.node.dt.phandle2node[int.from_bytes(self.value, "big")] + + def to_nodes(self) -> List[Node]: + """ + Returns a list with the Nodes the phandles in the property point to. + + Raises DTError if the property value contains anything other than + phandles. All of the following are accepted: + + foo = < > + foo = < &bar >; + foo = < &bar &baz ... >; + foo = < &bar ... >, < &baz ... >; + """ + def type_ok(): + if self.type in (Type.PHANDLE, Type.PHANDLES): + return True + # Also accept 'foo = < >;' + return self.type is Type.NUMS and not self.value + + if not type_ok(): + _err("expected property '{0}' on {1} in {2} to be assigned with " + "'{0} = < &foo &bar ... >;', not '{3}'" + .format(self.name, self.node.path, + self.node.dt.filename, self)) + + return [self.node.dt.phandle2node[int.from_bytes(self.value[i:i + 4], + "big")] + for i in range(0, len(self.value), 4)] + + def to_path(self) -> Node: + """ + Returns the Node referenced by the path stored in the property. + + Raises DTError if the property was not assigned with either of these + syntaxes (has Property.type Type.PATH or Type.STRING): + + foo = &bar; + foo = "/bar"; + + For the second case, DTError is raised if the path does not exist. + """ + if self.type not in (Type.PATH, Type.STRING): + _err("expected property '{0}' on {1} in {2} to be assigned with " + "either '{0} = &foo' or '{0} = \"/path/to/node\"', not '{3}'" + .format(self.name, self.node.path, self.node.dt.filename, + self)) + + try: + path = self.value.decode("utf-8")[:-1] + except UnicodeDecodeError: + _err(f"value of property '{self.name}' ({self.value!r}) " + f"on {self.node.path} in {self.node.dt.filename} " + "is not valid UTF-8") + + try: + ret = self.node.dt.get_node(path) + except DTError: + _err(f"property '{self.name}' on {self.node.path} in " + f"{self.node.dt.filename} points to the non-existent node " + f'"{path}"') + + return ret # The separate 'return' appeases the type checker. + + def __str__(self): + s = "".join(label + ": " for label in self.labels) + self.name + if not self.value: + return s + ";" + + s += " =" + + for i, (pos, marker_type, ref) in enumerate(self._markers): + if i < len(self._markers) - 1: + next_marker = self._markers[i + 1] + else: + next_marker = None + + # End of current marker + end = next_marker[0] if next_marker else len(self.value) + + if marker_type is _MarkerType.STRING: + # end - 1 to strip off the null terminator + s += f' "{_decode_and_escape(self.value[pos:end - 1])}"' + if end != len(self.value): + s += "," + elif marker_type is _MarkerType.PATH: + s += " &" + ref + if end != len(self.value): + s += "," + else: + # <> or [] + + if marker_type is _MarkerType.LABEL: + s += f" {ref}:" + elif marker_type is _MarkerType.PHANDLE: + s += " &" + ref + pos += 4 + # Subtle: There might be more data between the phandle and + # the next marker, so we can't 'continue' here + else: # marker_type is _MarkerType.UINT* + elm_size = _TYPE_TO_N_BYTES[marker_type] + s += _N_BYTES_TO_START_STR[elm_size] + + while pos != end: + num = int.from_bytes(self.value[pos:pos + elm_size], + "big") + if elm_size == 1: + s += f" {num:02X}" + else: + s += f" {hex(num)}" + + pos += elm_size + + if pos != 0 and \ + (not next_marker or + next_marker[1] not in (_MarkerType.PHANDLE, _MarkerType.LABEL)): + + s += _N_BYTES_TO_END_STR[elm_size] + if pos != len(self.value): + s += "," + + return s + ";" + + + def __repr__(self): + return f"" + + # + # Internal functions + # + + def _add_marker(self, marker_type: _MarkerType, data: Any = None): + # Helper for registering markers in the value that are processed after + # parsing. See _fixup_props(). 'marker_type' identifies the type of + # marker, and 'data' has any optional data associated with the marker. + + # len(self.value) gives the current offset. This function is called + # while the value is built. We use a list instead of a tuple to be able + # to fix up offsets later (they might increase if the value includes + # path references, e.g. 'foo = &bar, <3>;', which are expanded later). + self._markers.append([len(self.value), marker_type, data]) + + # For phandle references, add a dummy value with the same length as a + # phandle. This is handy for the length check in _register_phandles(). + if marker_type is _MarkerType.PHANDLE: + self.value += b"\0\0\0\0" + +class _T(enum.IntEnum): + # Token IDs used by the DT lexer. + + # These values must be contiguous and start from 1. + INCLUDE = 1 + LINE = 2 + STRING = 3 + DTS_V1 = 4 + PLUGIN = 5 + MEMRESERVE = 6 + BITS = 7 + DEL_PROP = 8 + DEL_NODE = 9 + OMIT_IF_NO_REF = 10 + LABEL = 11 + CHAR_LITERAL = 12 + REF = 13 + INCBIN = 14 + SKIP = 15 + EOF = 16 + + # These values must be larger than the above contiguous range. + NUM = 17 + PROPNODENAME = 18 + MISC = 19 + BYTE = 20 + BAD = 21 + +class _FileStackElt(NamedTuple): + # Used for maintaining the /include/ stack. + + filename: str + lineno: int + contents: str + pos: int + +_TokVal = Union[int, str] + +class _Token(NamedTuple): + id: int + val: _TokVal + + def __repr__(self): + id_repr = _T(self.id).name + return f'Token(id=_T.{id_repr}, val={repr(self.val)})' + +class DT: + """ + Represents a devicetree parsed from a .dts file (or from many files, if the + .dts file /include/s other files). Creating many instances of this class is + fine. The library has no global state. + + These attributes are available on DT instances: + + root: + A Node instance representing the root (/) node. + + alias2node: + A dictionary that maps maps alias strings (from /aliases) to Node + instances + + label2node: + A dictionary that maps each node label (a string) to the Node instance + for the node. + + label2prop: + A dictionary that maps each property label (a string) to a Property + instance. + + label2prop_offset: + A dictionary that maps each label (a string) within a property value + (e.g., 'x = label_1: < 1 label2: 2 >;') to a (prop, offset) tuple, where + 'prop' is a Property instance and 'offset' the byte offset (0 for label_1 + and 4 for label_2 in the example). + + phandle2node: + A dictionary that maps each phandle (a number) to a Node instance. + + memreserves: + A list of (labels, address, length) tuples for the /memreserve/s in the + .dts file, in the same order as they appear in the file. + + 'labels' is a possibly empty set with all labels preceding the memreserve + (e.g., 'label1: label2: /memreserve/ ...'). 'address' and 'length' are + numbers. + + filename: + The filename passed to the DT constructor. + """ + + # + # Public interface + # + + def __init__(self, filename: Optional[str], include_path: Iterable[str] = (), + force: bool = False): + """ + Parses a DTS file to create a DT instance. Raises OSError if 'filename' + can't be opened, and DTError for any parse errors. + + filename: + Path to the .dts file to parse. (If None, an empty devicetree + is created; this is unlikely to be what you want.) + + include_path: + An iterable (e.g. list or tuple) containing paths to search for + /include/d and /incbin/'d files. By default, files are only looked up + relative to the .dts file that contains the /include/ or /incbin/. + + force: + Try not to raise DTError even if the input tree has errors. + For experimental use; results not guaranteed. + """ + # Remember to update __deepcopy__() if you change this. + + self._root: Optional[Node] = None + self.alias2node: Dict[str, Node] = {} + self.label2node: Dict[str, Node] = {} + self.label2prop: Dict[str, Property] = {} + self.label2prop_offset: Dict[str, Tuple[Property, int]] = {} + self.phandle2node: Dict[int, Node] = {} + self.memreserves: List[Tuple[Set[str], int, int]] = [] + self.filename = filename + + self._force = force + + if filename is not None: + self._parse_file(filename, include_path) + + @property + def root(self) -> Node: + """ + See the class documentation. + """ + # This is necessary because mypy can't tell that we never + # treat self._root as a non-None value until it's initialized + # properly in _parse_dt(). + return self._root # type: ignore + + def get_node(self, path: str) -> Node: + """ + Returns the Node instance for the node with path or alias 'path' (a + string). Raises DTError if the path or alias doesn't exist. + + For example, both dt.get_node("/foo/bar") and dt.get_node("bar-alias") + will return the 'bar' node below: + + /dts-v1/; + + / { + foo { + bar_label: bar { + baz { + }; + }; + }; + + aliases { + bar-alias = &bar-label; + }; + }; + + Fetching subnodes via aliases is supported: + dt.get_node("bar-alias/baz") returns the 'baz' node. + """ + if path.startswith("/"): + return _root_and_path_to_node(self.root, path, path) + + # Path does not start with '/'. First component must be an alias. + alias, _, rest = path.partition("/") + if alias not in self.alias2node: + _err(f"no alias '{alias}' found -- did you forget the leading " + "'/' in the node path?") + + return _root_and_path_to_node(self.alias2node[alias], rest, path) + + def has_node(self, path: str) -> bool: + """ + Returns True if the path or alias 'path' exists. See Node.get_node(). + """ + try: + self.get_node(path) + return True + except DTError: + return False + + def node_iter(self) -> Iterable[Node]: + """ + Returns a generator for iterating over all nodes in the devicetree. + + For example, this will print the name of each node that has a property + called 'foo': + + for node in dt.node_iter(): + if "foo" in node.props: + print(node.name) + """ + yield from self.root.node_iter() + + def __str__(self): + """ + Returns a DTS representation of the devicetree. Called automatically if + the DT instance is print()ed. + """ + s = "/dts-v1/;\n\n" + + if self.memreserves: + for labels, address, offset in self.memreserves: + # List the labels in a consistent order to help with testing + for label in labels: + s += f"{label}: " + s += f"/memreserve/ {address:#018x} {offset:#018x};\n" + s += "\n" + + return s + str(self.root) + + def __repr__(self): + """ + Returns some information about the DT instance. Called automatically if + the DT instance is evaluated. + """ + if self.filename: + return f"DT(filename='{self.filename}', " \ + f"include_path={self._include_path})" + return super().__repr__() + + def __deepcopy__(self, memo): + """ + Implements support for the standard library copy.deepcopy() + function on DT instances. + """ + + # We need a new DT, obviously. Make a new, empty one. + ret = DT(None, (), self._force) + + # Now allocate new Node objects for every node in self, to use + # in the new DT. Set their parents to None for now and leave + # them without any properties. We will recursively initialize + # copies of parents before copies of children next. + path2node_copy = { + node.path: Node(node.name, None, ret) + for node in self.node_iter() + } + + # Point each copy of a node to the copy of its parent and set up + # copies of each property. + # + # Share data when possible. For example, Property.value has + # type 'bytes', which is immutable. We therefore don't need a + # copy and can just point to the original data. + + for node in self.node_iter(): + node_copy = path2node_copy[node.path] + + parent = node.parent + if parent is not None: + node_copy.parent = path2node_copy[parent.path] + + prop_name2prop_copy = { + prop.name: Property(node_copy, prop.name) + for prop in node.props.values() + } + for prop_name, prop_copy in prop_name2prop_copy.items(): + prop = node.props[prop_name] + prop_copy.value = prop.value + prop_copy.labels = prop.labels[:] + prop_copy.offset_labels = prop.offset_labels.copy() + prop_copy._label_offset_lst = prop._label_offset_lst[:] + prop_copy._markers = [marker[:] for marker in prop._markers] + node_copy.props = prop_name2prop_copy + + node_copy.nodes = { + child_name: path2node_copy[child_node.path] + for child_name, child_node in node.nodes.items() + } + + node_copy.labels = node.labels[:] + + node_copy._omit_if_no_ref = node._omit_if_no_ref + node_copy._is_referenced = node._is_referenced + + # The copied nodes and properties are initialized, so + # we can finish initializing the copied DT object now. + + ret._root = path2node_copy['/'] + + def copy_node_lookup_table(attr_name): + original = getattr(self, attr_name) + copy = { + key: path2node_copy[original[key].path] + for key in original + } + setattr(ret, attr_name, copy) + + copy_node_lookup_table('alias2node') + copy_node_lookup_table('label2node') + copy_node_lookup_table('phandle2node') + + ret_label2prop = {} + for label, prop in self.label2prop.items(): + node_copy = path2node_copy[prop.node.path] + prop_copy = node_copy.props[prop.name] + ret_label2prop[label] = prop_copy + ret.label2prop = ret_label2prop + + ret_label2prop_offset = {} + for label, prop_offset in self.label2prop_offset.items(): + prop, offset = prop_offset + node_copy = path2node_copy[prop.node.path] + prop_copy = node_copy.props[prop.name] + ret_label2prop_offset[label] = (prop_copy, offset) + ret.label2prop_offset = ret_label2prop_offset + + ret.memreserves = [ + (set(memreserve[0]), memreserve[1], memreserve[2]) + for memreserve in self.memreserves + ] + + ret.filename = self.filename + + return ret + + # + # Parsing + # + + def _parse_file(self, filename, include_path): + self._include_path = list(include_path) + + with open(filename, encoding="utf-8") as f: + self._file_contents = f.read() + + self._tok_i = self._tok_end_i = 0 + self._filestack: List[_FileStackElt] = [] + + self._lexer_state: int = _DEFAULT + self._saved_token: Optional[_Token] = None + + self._lineno: int = 1 + + self._parse_header() + self._parse_memreserves() + self._parse_dt() + + self._register_phandles() + self._fixup_props() + self._register_aliases() + self._remove_unreferenced() + self._register_labels() + + def _parse_header(self): + # Parses /dts-v1/ (expected) and /plugin/ (unsupported) at the start of + # files. There may be multiple /dts-v1/ at the start of a file. + + has_dts_v1 = False + + while self._peek_token().id == _T.DTS_V1: + has_dts_v1 = True + self._next_token() + self._expect_token(";") + # /plugin/ always comes after /dts-v1/ + if self._peek_token().id == _T.PLUGIN: + self._parse_error("/plugin/ is not supported") + + if not has_dts_v1: + self._parse_error("expected '/dts-v1/;' at start of file") + + def _parse_memreserves(self): + # Parses /memreserve/, which appears after /dts-v1/ + + while True: + # Labels before /memreserve/ + labels = [] + while self._peek_token().id == _T.LABEL: + _append_no_dup(labels, self._next_token().val) + + if self._peek_token().id == _T.MEMRESERVE: + self._next_token() + self.memreserves.append( + (labels, self._eval_prim(), self._eval_prim())) + self._expect_token(";") + elif labels: + self._parse_error("expected /memreserve/ after labels at " + "beginning of file") + else: + return + + def _parse_dt(self): + # Top-level parsing loop + + while True: + tok = self._next_token() + + if tok.val == "/": + # '/ { ... };', the root node + if not self._root: + self._root = Node(name="/", parent=None, dt=self) + self._parse_node(self.root) + + elif tok.id in (_T.LABEL, _T.REF): + # '&foo { ... };' or 'label: &foo { ... };'. The C tools only + # support a single label here too. + + if tok.id == _T.LABEL: + label = tok.val + tok = self._next_token() + if tok.id != _T.REF: + self._parse_error("expected label reference (&foo)") + else: + label = None + + try: + node = self._ref2node(tok.val) + except DTError as e: + self._parse_error(e) + node = self._parse_node(node) + + if label: + _append_no_dup(node.labels, label) + + elif tok.id == _T.DEL_NODE: + self._next_ref2node()._del() + self._expect_token(";") + + elif tok.id == _T.OMIT_IF_NO_REF: + self._next_ref2node()._omit_if_no_ref = True + self._expect_token(";") + + elif tok.id == _T.EOF: + if not self._root: + self._parse_error("no root node defined") + return + + else: + self._parse_error("expected '/' or label reference (&foo)") + + def _parse_node(self, node): + # Parses the '{ ... };' part of 'node-name { ... };'. Returns the new + # Node. + + # We need to track which child nodes were defined in this set + # of curly braces in order to reject duplicate node names. + current_child_names = set() + + self._expect_token("{") + while True: + labels, omit_if_no_ref = self._parse_propnode_labels() + tok = self._next_token() + + if tok.id == _T.PROPNODENAME: + if self._peek_token().val == "{": + # ' { ...', expect node + + # Fetch the existing node if it already exists. This + # happens when overriding nodes. + child = node.nodes.get(tok.val) + if child: + if child.name in current_child_names: + self._parse_error(f'{child.path}: duplicate node name') + else: + child = Node(name=tok.val, parent=node, dt=self) + current_child_names.add(tok.val) + + for label in labels: + _append_no_dup(child.labels, label) + + if omit_if_no_ref: + child._omit_if_no_ref = True + + node.nodes[child.name] = child + self._parse_node(child) + + else: + # Not ' { ...', expect property assignment + + if omit_if_no_ref: + self._parse_error( + "/omit-if-no-ref/ can only be used on nodes") + + prop = node._get_prop(tok.val) + + if self._check_token("="): + self._parse_assignment(prop) + elif not self._check_token(";"): + # ';' is for an empty property, like 'foo;' + self._parse_error("expected '{', '=', or ';'") + + for label in labels: + _append_no_dup(prop.labels, label) + + elif tok.id == _T.DEL_NODE: + tok2 = self._next_token() + if tok2.id != _T.PROPNODENAME: + self._parse_error("expected node name") + if tok2.val in node.nodes: + node.nodes[tok2.val]._del() + self._expect_token(";") + + elif tok.id == _T.DEL_PROP: + tok2 = self._next_token() + if tok2.id != _T.PROPNODENAME: + self._parse_error("expected property name") + node.props.pop(tok2.val, None) + self._expect_token(";") + + elif tok.val == "}": + self._expect_token(";") + return node + + else: + self._parse_error("expected node name, property name, or '}'") + + def _parse_propnode_labels(self): + # _parse_node() helpers for parsing labels and /omit-if-no-ref/s before + # nodes and properties. Returns a (