From 1912837e433a8fa146bcc12b87a45908a8ba0ca1 Mon Sep 17 00:00:00 2001 From: Andrei Litvin Date: Fri, 27 Oct 2023 14:09:56 -0400 Subject: [PATCH] Add python CSA DM XML parsing support for derived clusters (#30036) * Start preparing to store derived clusters * reformat, make sure parsing works * More leniency to allow ModeBase parsing * More leniency and documentation, be ready to attach base clusters * Refactor to add some separate derivation logic * Restyle * More work on actually handling inheritance * Restyle * Implement actual base class derivation * Add unit test for derived, make diffs a LOT better * Restyle * Switch to unified diff for a nicer diff view * Return after the first assert * Make type checker happy at places * Make mypy happy even on an edge case * Fix linter errors * Restyle * more typing for attrs * Some name changes for base clusters: use abstract to make it clear that other clusters could be base too * Also change variable name --------- Co-authored-by: Andrei Litvin --- scripts/py_matter_idl/files.gni | 1 + .../data_model_xml/handlers/__init__.py | 21 ++- .../data_model_xml/handlers/context.py | 15 +- .../data_model_xml/handlers/derivation.py | 173 ++++++++++++++++++ .../data_model_xml/handlers/handlers.py | 144 ++++++++++----- .../data_model_xml/handlers/parsing.py | 38 +++- .../matter_idl/generators/idl/__init__.py | 10 +- .../matter_idl/matter_idl_parser.py | 4 +- .../matter_idl/test_data_model_xml.py | 168 ++++++++++++++++- 9 files changed, 510 insertions(+), 64 deletions(-) create mode 100644 scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py diff --git a/scripts/py_matter_idl/files.gni b/scripts/py_matter_idl/files.gni index fb9f3991b87c4c..dab10a26bffbdc 100644 --- a/scripts/py_matter_idl/files.gni +++ b/scripts/py_matter_idl/files.gni @@ -27,6 +27,7 @@ matter_idl_generator_sources = [ "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py", "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/base.py", "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py", + "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py", "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py", "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py", "${chip_root}/scripts/py_matter_idl/matter_idl/generators/__init__.py", diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py index a2192ee010c46d..b1ece298a904db 100644 --- a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py @@ -12,11 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +from xml.sax.xmlreader import AttributesImpl + from matter_idl.matter_idl_types import Idl from .base import BaseHandler from .context import Context from .handlers import ClusterHandler +from .parsing import NormalizeName + +LOGGER = logging.getLogger('data-model-xml-data-parsing') + + +def contains_valid_cluster_id(attrs: AttributesImpl) -> bool: + # Does not check numeric format ... assuming scraper is smart enough for that + return 'id' in attrs and len(attrs['id']) > 0 class DataModelXmlHandler(BaseHandler): @@ -27,8 +38,14 @@ def __init__(self, context: Context, idl: Idl): super().__init__(context) self._idl = idl - def GetNextProcessor(self, name, attrs): + def GetNextProcessor(self, name, attrs: AttributesImpl): if name.lower() == 'cluster': - return ClusterHandler(self.context, self._idl, attrs) + if contains_valid_cluster_id(attrs): + return ClusterHandler.ForAttributes(self.context, self._idl, attrs) + + LOGGER.info( + "Found an abstract base cluster (no id): '%s'", attrs['name']) + + return ClusterHandler.IntoCluster(self.context, self._idl, self.context.AddAbstractBaseCluster(NormalizeName(attrs['name']), self.context.GetCurrentLocationMeta())) else: return BaseHandler(self.context) diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py index 3e3220ee699c87..fe96836da37463 100644 --- a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py @@ -16,7 +16,7 @@ import xml.sax.xmlreader from typing import List, Optional -from matter_idl.matter_idl_types import Idl, ParseMetaData +from matter_idl.matter_idl_types import Cluster, ClusterSide, Idl, ParseMetaData class IdlPostProcessor: @@ -82,6 +82,19 @@ def __init__(self, locator: Optional[xml.sax.xmlreader.Locator] = None): self.file_name = None self._not_handled: set[str] = set() self._idl_post_processors: list[IdlPostProcessor] = [] + self.abstract_base_clusters: dict[str, Cluster] = {} + + def AddAbstractBaseCluster(self, name: str, parse_meta: Optional[ParseMetaData] = None) -> Cluster: + """Creates a new cluster entry for the given name in the list of known + base clusters. + """ + assert name not in self.abstract_base_clusters # be unique + + cluster = Cluster(side=ClusterSide.CLIENT, name=name, + code=-1, parse_meta=parse_meta) + self.abstract_base_clusters[name] = cluster + + return cluster def GetCurrentLocationMeta(self) -> Optional[ParseMetaData]: if not self.locator: diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py new file mode 100644 index 00000000000000..ff4bd9ddc664c8 --- /dev/null +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py @@ -0,0 +1,173 @@ +# +# Copyright (c) 2023 Project CHIP Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +from typing import Iterable, Optional, Protocol, TypeVar + +from matter_idl.matter_idl_types import Attribute, Bitmap, Cluster, Command, Enum, Event, Idl, Struct + +from .context import Context, IdlPostProcessor +from .parsing import NormalizeName + +LOGGER = logging.getLogger('data-model-xml-data-parsing') + +T = TypeVar("T") + + +class HasName(Protocol): + name: str + + +NAMED = TypeVar('NAMED', bound=HasName) + + +def get_item_with_name(items: Iterable[NAMED], name: str) -> Optional[NAMED]: + """Find an item with the given name. + + Returns none if that item does not exist + """ + for item in items: + if item.name == name: + return item + return None + + +def merge_enum_into(e: Enum, cluster: Cluster): + existing = get_item_with_name(cluster.enums, e.name) + + if existing: + # Remove existing but merge constants into e + cluster.enums.remove(existing) + for value in existing.entries: + if not get_item_with_name(e.entries, value.name): + e.entries.append(value) + + cluster.enums.append(e) + + +def merge_bitmap_into(b: Bitmap, cluster: Cluster): + existing = get_item_with_name(cluster.bitmaps, b.name) + + if existing: + # Remove existing but merge constants into e + cluster.bitmaps.remove(existing) + for value in existing.entries: + if not get_item_with_name(b.entries, value.name): + b.entries.append(value) + + cluster.bitmaps.append(b) + + +def merge_event_into(e: Event, cluster: Cluster): + existing = get_item_with_name(cluster.events, e.name) + if existing: + LOGGER.error("TODO: Do not know how to merge event for %s::%s", + cluster.name, existing.name) + cluster.events.remove(existing) + + cluster.events.append(e) + + +def merge_attribute_into(a: Attribute, cluster: Cluster): + existing: Optional[Attribute] = None + for existing_a in cluster.attributes: + if existing_a.definition.name == a.definition.name: + existing = existing_a + break + + if existing: + # Do not provide merging as it seems only conformance is changed from + # the base cluster + # + # This should fix the correct types + # + # LOGGER.error("TODO: Do not know how to merge attribute for %s::%s", cluster.name, existing.definition.name) + cluster.attributes.remove(existing) + + cluster.attributes.append(a) + + +def merge_struct_into(s: Struct, cluster: Cluster): + existing = get_item_with_name(cluster.structs, s.name) + if existing: + # Do not provide merging as it seems XML only adds + # constraints and conformance to struct elements + # + # TODO: at some point we may be able to merge some things, + # if we find that derived clusters actually add useful things here + # + # LOGGER.error("TODO: Do not know how to merge structs for %s::%s", cluster.name, existing.name) + cluster.structs.remove(existing) + + cluster.structs.append(s) + + +def merge_command_into(c: Command, cluster: Cluster): + existing = get_item_with_name(cluster.commands, c.name) + + if existing: + LOGGER.error("TODO: Do not know how to merge command for %s::%s", + cluster.name, existing.name) + cluster.commands.remove(existing) + + cluster.commands.append(c) + + +def inherit_cluster_data(from_cluster: Cluster, into_cluster: Cluster): + for e in from_cluster.enums: + merge_enum_into(e, into_cluster) + + for b in from_cluster.bitmaps: + merge_bitmap_into(b, into_cluster) + + for ev in from_cluster.events: + merge_event_into(ev, into_cluster) + + for a in from_cluster.attributes: + merge_attribute_into(a, into_cluster) + + for s in from_cluster.structs: + merge_struct_into(s, into_cluster) + + for c in from_cluster.commands: + merge_command_into(c, into_cluster) + + +class AddBaseInfoPostProcessor(IdlPostProcessor): + def __init__(self, destination_cluster: Cluster, source_cluster_name: str, context: Context): + self.destination = destination_cluster + self.source_name = NormalizeName(source_cluster_name) + self.context = context + + def FinalizeProcessing(self, idl: Idl): + # attempt to find the base. It may be in the "names without ID" however it may also be inside + # existing clusters (e.g. Basic Information) + base: Optional[Cluster] = None + if self.source_name in self.context.abstract_base_clusters: + base = self.context.abstract_base_clusters[self.source_name] + else: + for c in idl.clusters: + if c.name == self.source_name: + base = c + break + + if not base: + LOGGER.error( + "Could not find the base cluster named '%s'", self.source_name) + return + + LOGGER.info("Copying base data from '%s' into '%s'", + base.name, self.destination.name) + inherit_cluster_data(from_cluster=base, into_cluster=self.destination) diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py index a162e62bfb5e15..0cae26bf5207ec 100644 --- a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py @@ -12,20 +12,36 @@ # See the License for the specific language governing permissions and # limitations under the License. +import enum import logging from typing import Optional +from xml.sax.xmlreader import AttributesImpl -from matter_idl.matter_idl_types import (Attribute, AttributeQuality, Bitmap, Cluster, ClusterSide, Command, CommandQuality, - ConstantEntry, DataType, Enum, Field, FieldQuality, Idl, Struct, StructTag) +from matter_idl.matter_idl_types import (ApiMaturity, Attribute, AttributeQuality, Bitmap, Cluster, ClusterSide, Command, + CommandQuality, ConstantEntry, DataType, Enum, Field, FieldQuality, Idl, Struct, StructTag) from .base import BaseHandler, HandledDepth from .context import Context +from .derivation import AddBaseInfoPostProcessor from .parsing import (ApplyConstraint, AttributesToAttribute, AttributesToBitFieldConstantEntry, AttributesToCommand, AttributesToEvent, AttributesToField, NormalizeDataType, NormalizeName, ParseInt, StringToAccessPrivilege) LOGGER = logging.getLogger('data-model-xml-parser') +def is_unused_name(attrs: AttributesImpl): + """Existing XML adds various entries for base/derived reserved items. + + Those items seem to have no actual meaning. + + https://github.com/csa-data-model/projects/issues/363 + """ + if 'name' not in attrs: + return False + + return attrs['name'] in {'base reserved', 'derived reserved'} + + class FeaturesHandler(BaseHandler): def __init__(self, context: Context, cluster: Cluster): @@ -37,10 +53,15 @@ def EndProcessing(self): if self._bitmap.entries: self._cluster.bitmaps.append(self._bitmap) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name in {"section", "optionalConform"}: return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "feature": + if is_unused_name(attrs): + LOGGER.warning( + f"Ignoring feature constant data for {attrs['name']}") + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) + self._bitmap.entries.append( AttributesToBitFieldConstantEntry(attrs)) # assume everything handled. Sub-item is only section @@ -50,7 +71,7 @@ def GetNextProcessor(self, name: str, attrs): class BitmapHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster @@ -85,7 +106,7 @@ def EndProcessing(self): self._cluster.bitmaps.append(self._bitmap) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -104,7 +125,7 @@ def __init__(self, context: Context, field: Field): self._field = field self._hadConditions = False - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): self._hadConditions = True return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -120,7 +141,7 @@ def __init__(self, context: Context, field: Field): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._field = field - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "constraint": ApplyConstraint(attrs, self._field) return BaseHandler(self.context, handled=HandledDepth.SINGLE_TAG) @@ -162,7 +183,7 @@ def GetNextProcessor(self, name: str, attrs): class StructHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster self._struct = Struct(name=NormalizeName(attrs["name"]), fields=[]) @@ -170,7 +191,7 @@ def __init__(self, context: Context, cluster: Cluster, attrs): def EndProcessing(self): self._cluster.structs.append(self._struct) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -183,7 +204,7 @@ def GetNextProcessor(self, name: str, attrs): class EventHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster self._event = AttributesToEvent(attrs) @@ -191,7 +212,7 @@ def __init__(self, context: Context, cluster: Cluster, attrs): def EndProcessing(self): self._cluster.events.append(self._event) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -212,7 +233,7 @@ def GetNextProcessor(self, name: str, attrs): class EnumHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster @@ -239,7 +260,7 @@ def EndProcessing(self): self._cluster.enums.append(self._enum) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -266,7 +287,7 @@ def __init__(self, context: Context, cluster: Cluster): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -277,7 +298,7 @@ def GetNextProcessor(self, name: str, attrs): class AttributeHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster self._attribute = AttributesToAttribute(attrs) @@ -290,7 +311,7 @@ def EndProcessing(self): self._cluster.attributes.append(self._attribute) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "enum": LOGGER.warning( f"Anonymous enumeration not supported when handling attribute {self._cluster.name}::{self._attribute.definition.name}") @@ -330,6 +351,9 @@ def GetNextProcessor(self, name: str, attrs): return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "mandatoryConform": return MandatoryConformFieldHandler(self.context, self._attribute.definition) + elif name == "provisionalConform": + self._attribute.api_maturity = ApiMaturity.PROVISIONAL + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "deprecateConform": self._deprecated = True return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -345,15 +369,18 @@ def __init__(self, context: Context, cluster: Cluster): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "attribute": + if is_unused_name(attrs): + LOGGER.warning(f"Ignoring attribute data for {attrs['name']}") + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) return AttributeHandler(self.context, self._cluster, attrs) else: return BaseHandler(self.context) class CommandHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster self._command: Optional[Command] = None @@ -382,8 +409,8 @@ def __init__(self, context: Context, cluster: Cluster, attrs): elif ("direction" in attrs) and attrs["direction"] == "responseFromServer": is_command = False # response else: - LOGGER.warn("Could not clearly determine command direction: %s" % - [item for item in attrs.items()]) + LOGGER.warning("Could not clearly determine command direction: %s" % + [item for item in attrs.items()]) # Do a best-guess. However we should NOT need to guess once # we have a good data set is_command = not attrs["name"].endswith("Response") @@ -414,7 +441,7 @@ def EndProcessing(self): if self._command: self._cluster.commands.append(self._command) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name in {"mandatoryConform", "optionalConform", "disallowConform"}: # Unclear how commands may be optional or mandatory return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -425,7 +452,7 @@ def GetNextProcessor(self, name: str, attrs): self._command.invokeacl = StringToAccessPrivilege( attrs["invokePrivilege"]) else: - LOGGER.warn( + LOGGER.warning( f"Ignoring invoke privilege for {self._struct.name}") if self._command: @@ -449,8 +476,20 @@ def __init__(self, context: Context, cluster: Cluster): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "command": + if is_unused_name(attrs): + LOGGER.warning(f"Ignoring command data for {attrs['name']}") + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) + + if 'id' not in attrs: + LOGGER.error( + f"Could not process command {attrs['name']}: no id") + # TODO: skip over these without failing the processing + # + # https://github.com/csa-data-model/projects/issues/364 + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) + return CommandHandler(self.context, self._cluster, attrs) elif name in {"mandatoryConform", "optionalConform"}: # Nothing to tag conformance @@ -464,7 +503,7 @@ def __init__(self, context: Context, cluster: Cluster): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -483,24 +522,43 @@ def GetNextProcessor(self, name: str, attrs): return BaseHandler(self.context) +class ClusterHandlerPostProcessing(enum.Enum): + FINALIZE_AND_ADD_TO_IDL = enum.auto() + NO_POST_PROCESSING = enum.auto() + + class ClusterHandler(BaseHandler): """ Handling /cluster elements.""" - def __init__(self, context: Context, idl: Idl, attrs): - super().__init__(context, handled=HandledDepth.SINGLE_TAG) - self._idl = idl - + @staticmethod + def ForAttributes(context: Context, idl: Idl, attrs: AttributesImpl): assert ("name" in attrs) assert ("id" in attrs) - self._cluster = Cluster( - side=ClusterSide.CLIENT, - name=NormalizeName(attrs["name"]), - code=ParseInt(attrs["id"]), - parse_meta=context.GetCurrentLocationMeta() - ) + return ClusterHandler(context, idl, + Cluster( + side=ClusterSide.CLIENT, + name=NormalizeName(attrs["name"]), + code=ParseInt(attrs["id"]), + parse_meta=context.GetCurrentLocationMeta() + ), ClusterHandlerPostProcessing.FINALIZE_AND_ADD_TO_IDL) + + @staticmethod + def IntoCluster(context: Context, idl: Idl, cluster: Cluster): + return ClusterHandler(context, idl, cluster, ClusterHandlerPostProcessing.NO_POST_PROCESSING) + + def __init__(self, context: Context, idl: Idl, cluster: Cluster, post_process: ClusterHandlerPostProcessing): + super().__init__(context, handled=HandledDepth.SINGLE_TAG) + self._idl = idl + self._cluster = cluster + self._post_processing = post_process def EndProcessing(self): + if self._post_processing == ClusterHandlerPostProcessing.NO_POST_PROCESSING: + return + + assert self._post_processing == ClusterHandlerPostProcessing.FINALIZE_AND_ADD_TO_IDL + # Global things MUST be available everywhere to_add = [ # type, code, name, is_list @@ -521,7 +579,7 @@ def EndProcessing(self): ), qualities=AttributeQuality.READABLE)) self._idl.clusters.append(self._cluster) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "revisionHistory": # Revision history COULD be used to find the latest revision of a cluster # however current IDL files do NOT have a revision info field @@ -533,12 +591,16 @@ def GetNextProcessor(self, name: str, attrs): # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "classification": - # Not an obvious mapping in the existing data model. - # - # TODO IFF hierarchy == derived, we should use baseCluster - # - # Other elements like role, picsCode, scope and primaryTransaction seem - # to not be used + if attrs['hierarchy'] == 'derived': + # This is a derived cluster. We have to add everything from the + # base cluster + self.context.AddIdlPostProcessor(AddBaseInfoPostProcessor( + destination_cluster=self._cluster, + source_cluster_name=attrs['baseCluster'], + context=self.context + )) + # other elements like picsCode, scope and primaryTransaction seem to have + # no direct mapping in the data model return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "features": return FeaturesHandler(self.context, self._cluster) diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py index c2753d4221cab4..948d698a5d4a6e 100644 --- a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py @@ -16,6 +16,7 @@ import re from dataclasses import dataclass from typing import Optional +from xml.sax.xmlreader import AttributesImpl from matter_idl.generators.types import GetDataTypeSizeInBits, IsSignedDataType from matter_idl.matter_idl_types import AccessPrivilege, Attribute, Command, ConstantEntry, DataType, Event, EventPriority, Field @@ -145,12 +146,21 @@ def FieldName(name: str) -> str: return name[0].lower() + name[1:] -def AttributesToField(attrs) -> Field: +def AttributesToField(attrs: AttributesImpl) -> Field: assert "name" in attrs assert "id" in attrs - assert "type" in attrs - t = ParseType(attrs["type"]) + if "type" in attrs: + attr_type = NormalizeDataType(attrs["type"]) + else: + # TODO: Generally we should not have this, however current implementation + # for derived clusters for example want to add things (like conformance + # specifically) WITHOUT re-stating things like types + # + # https://github.com/csa-data-model/projects/issues/365 + LOGGER.error(f"Attribute {attrs['name']} has no type") + attr_type = "sint32" + t = ParseType(attr_type) return Field( name=FieldName(attrs["name"]), @@ -160,16 +170,26 @@ def AttributesToField(attrs) -> Field: ) -def AttributesToBitFieldConstantEntry(attrs) -> ConstantEntry: +def AttributesToBitFieldConstantEntry(attrs: AttributesImpl) -> ConstantEntry: """Creates a constant entry appropriate for bitmaps. """ - assert ("name" in attrs) - assert ("bit" in attrs) + assert "name" in attrs + + if 'bit' not in attrs: + # TODO: multi-bit fields not supported in XML currently. Be lenient here to have some + # diff + # Issue: https://github.com/csa-data-model/projects/issues/347 + + LOGGER.error( + f"Constant {attrs['name']} has no bit value (may be multibit)") + return ConstantEntry(name="k" + NormalizeName(attrs["name"]), code=0) + + assert "bit" in attrs return ConstantEntry(name="k" + NormalizeName(attrs["name"]), code=1 << ParseInt(attrs["bit"])) -def AttributesToAttribute(attrs) -> Attribute: +def AttributesToAttribute(attrs: AttributesImpl) -> Attribute: assert "name" in attrs assert "id" in attrs @@ -193,7 +213,7 @@ def AttributesToAttribute(attrs) -> Attribute: ) -def AttributesToEvent(attrs) -> Event: +def AttributesToEvent(attrs: AttributesImpl) -> Event: assert "name" in attrs assert "id" in attrs assert "priority" in attrs @@ -231,7 +251,7 @@ def StringToAccessPrivilege(value: str) -> AccessPrivilege: raise Exception("UNKNOWN privilege level: %r" % value) -def AttributesToCommand(attrs) -> Command: +def AttributesToCommand(attrs: AttributesImpl) -> Command: assert "id" in attrs assert "name" in attrs diff --git a/scripts/py_matter_idl/matter_idl/generators/idl/__init__.py b/scripts/py_matter_idl/matter_idl/generators/idl/__init__.py index f53e35a7aba8e0..9ac9085f0cc68b 100644 --- a/scripts/py_matter_idl/matter_idl/generators/idl/__init__.py +++ b/scripts/py_matter_idl/matter_idl/generators/idl/__init__.py @@ -33,12 +33,16 @@ def human_text_string(value: Union[ClusterSide, StructTag, StructQuality, EventP if value == StructTag.RESPONSE: return "response" elif type(value) is FieldQuality: + # mypy seems confused if using `FieldQuality.OPTIONAL in value` + # directly, so do a useless cast here + quality: FieldQuality = value + result = "" - if FieldQuality.OPTIONAL in value: + if FieldQuality.OPTIONAL in quality: result += "optional " - if FieldQuality.NULLABLE in value: + if FieldQuality.NULLABLE in quality: result += "nullable " - if FieldQuality.FABRIC_SENSITIVE in value: + if FieldQuality.FABRIC_SENSITIVE in quality: result += "fabric_sensitive " return result.strip() elif type(value) is StructQuality: diff --git a/scripts/py_matter_idl/matter_idl/matter_idl_parser.py b/scripts/py_matter_idl/matter_idl/matter_idl_parser.py index 711e2535887b5e..ba909cb8fd24a8 100755 --- a/scripts/py_matter_idl/matter_idl/matter_idl_parser.py +++ b/scripts/py_matter_idl/matter_idl/matter_idl_parser.py @@ -310,10 +310,10 @@ def command_with_access(self, args): # NOTE: awkward inline because the order of 'meta, children' vs 'children, meta' was flipped # between lark versions in https://github.com/lark-parser/lark/pull/993 @v_args(meta=True, inline=True) - def command(self, meta, *args): + def command(self, meta, *tuple_args): # The command takes 4 arguments if no input argument, 5 if input # argument is provided - args = list(args) # convert from tuple + args = list(tuple_args) # convert from tuple if len(args) != 5: args.insert(2, None) diff --git a/scripts/py_matter_idl/matter_idl/test_data_model_xml.py b/scripts/py_matter_idl/matter_idl/test_data_model_xml.py index dfb4870d29d360..eea2308323abec 100755 --- a/scripts/py_matter_idl/matter_idl/test_data_model_xml.py +++ b/scripts/py_matter_idl/matter_idl/test_data_model_xml.py @@ -15,7 +15,8 @@ import io import unittest -from typing import List, Union +from difflib import unified_diff +from typing import List, Optional, Union try: from matter_idl.data_model_xml import ParseSource, ParseXmls @@ -27,10 +28,34 @@ os.path.join(os.path.dirname(__file__), '..'))) from matter_idl.data_model_xml import ParseSource, ParseXmls +from matter_idl.generators import GeneratorStorage +from matter_idl.generators.idl import IdlGenerator from matter_idl.matter_idl_parser import CreateParser from matter_idl.matter_idl_types import Idl +class GeneratorContentStorage(GeneratorStorage): + def __init__(self): + super().__init__() + self.content: Optional[str] = None + + def get_existing_data(self, relative_path: str): + # Force re-generation each time + return None + + def write_new_data(self, relative_path: str, content: str): + if self.content: + raise Exception( + "Unexpected extra data: single file generation expected") + self.content = content + + +def RenderAsIdlTxt(idl: Idl) -> str: + storage = GeneratorContentStorage() + IdlGenerator(storage=storage, idl=idl).render(dry_run=False) + return storage.content or "" + + def XmlToIdl(what: Union[str, List[str]]) -> Idl: if not isinstance(what, list): what = [what] @@ -53,6 +78,23 @@ def __init__(self, *args, **kargs): super().__init__(*args, **kargs) self.maxDiff = None + def assertIdlEqual(self, a: Idl, b: Idl): + if a == b: + # seems the same. This will just pass + self.assertEqual(a, b) + return + + # Not the same. Try to make a human readable diff: + a_txt = RenderAsIdlTxt(a) + b_txt = RenderAsIdlTxt(b) + + delta = unified_diff(a_txt.splitlines(keepends=True), + b_txt.splitlines(keepends=True), + fromfile='actual.matter', + tofile='expected.matter', + ) + self.assertEqual(a, b, '\n' + ''.join(delta)) + def testBasicInput(self): xml_idl = XmlToIdl(''' @@ -70,7 +112,121 @@ def testBasicInput(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) + + def testClusterDerivation(self): + # This test is based on a subset of ModeBase and Mode_Dishwasher original xml files + + xml_idl = XmlToIdl([ + # base ... + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ''', + # derived ... + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + ''', + ]) + + expected_idl = IdlTextToIdl(''' + client cluster DishwasherMode = 89 { + bitmap Feature: bitmap32 { + kOnOff = 0x1; + } + + struct ModeOptionStruct { + char_string<64> label = 0; + int8u mode = 1; + ModeTagStruct modeTags[] = 2; + } + + readonly attribute attrib_id attributeList[] = 65531; + readonly attribute event_id eventList[] = 65530; + readonly attribute command_id acceptedCommandList[] = 65529; + readonly attribute command_id generatedCommandList[] = 65528; + readonly attribute bitmap32 featureMap = 65532; + readonly attribute int16u clusterRevision = 65533; + + // baseline inserted after, so to pass the test add this at the end + readonly attribute ModeOptionStruct supportedModes[] = 0; + } + ''') + + self.assertIdlEqual(xml_idl, expected_idl) def testSignedTypes(self): @@ -108,7 +264,7 @@ def testSignedTypes(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) def testEnumRange(self): # Check heuristic for enum ranges @@ -183,7 +339,7 @@ def testEnumRange(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) def testAttributes(self): # Validate an attribute with a type list @@ -237,7 +393,7 @@ def testAttributes(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) def testComplexInput(self): # This parses a known copy of Switch.xml which happens to be fully @@ -434,7 +590,7 @@ def testComplexInput(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) if __name__ == '__main__':