From aa3729ea4c0849613509d4fc770d570d9eb57df7 Mon Sep 17 00:00:00 2001 From: Sergei Lissianoi <54454955+selissia@users.noreply.github.com> Date: Fri, 27 Oct 2023 12:15:17 -0400 Subject: [PATCH 1/8] Disable Extended Discovery in Silabs apps (#30059) --- examples/chef/silabs/include/CHIPProjectConfig.h | 2 -- examples/light-switch-app/silabs/include/CHIPProjectConfig.h | 2 -- examples/lighting-app/silabs/include/CHIPProjectConfig.h | 2 -- examples/lock-app/silabs/include/CHIPProjectConfig.h | 2 -- examples/pump-app/silabs/include/CHIPProjectConfig.h | 2 -- examples/smoke-co-alarm-app/silabs/include/CHIPProjectConfig.h | 2 -- examples/thermostat/silabs/include/CHIPProjectConfig.h | 2 -- examples/window-app/silabs/include/CHIPProjectConfig.h | 2 -- 8 files changed, 16 deletions(-) diff --git a/examples/chef/silabs/include/CHIPProjectConfig.h b/examples/chef/silabs/include/CHIPProjectConfig.h index 75c8d194f8f319..3d58f864782be3 100644 --- a/examples/chef/silabs/include/CHIPProjectConfig.h +++ b/examples/chef/silabs/include/CHIPProjectConfig.h @@ -120,5 +120,3 @@ * */ #define CHIP_CONFIG_MRP_LOCAL_ACTIVE_RETRY_INTERVAL (2000_ms32) - -#define CHIP_DEVICE_CONFIG_ENABLE_EXTENDED_DISCOVERY 1 diff --git a/examples/light-switch-app/silabs/include/CHIPProjectConfig.h b/examples/light-switch-app/silabs/include/CHIPProjectConfig.h index 3141defe9c5133..59aa54b25a0eb1 100644 --- a/examples/light-switch-app/silabs/include/CHIPProjectConfig.h +++ b/examples/light-switch-app/silabs/include/CHIPProjectConfig.h @@ -100,5 +100,3 @@ * */ #define CHIP_CONFIG_MRP_LOCAL_ACTIVE_RETRY_INTERVAL (2000_ms32) - -#define CHIP_DEVICE_CONFIG_ENABLE_EXTENDED_DISCOVERY 1 diff --git a/examples/lighting-app/silabs/include/CHIPProjectConfig.h b/examples/lighting-app/silabs/include/CHIPProjectConfig.h index 00df921596102a..fc137c8d61d7d9 100644 --- a/examples/lighting-app/silabs/include/CHIPProjectConfig.h +++ b/examples/lighting-app/silabs/include/CHIPProjectConfig.h @@ -100,5 +100,3 @@ * */ #define CHIP_CONFIG_MRP_LOCAL_ACTIVE_RETRY_INTERVAL (2000_ms32) - -#define CHIP_DEVICE_CONFIG_ENABLE_EXTENDED_DISCOVERY 1 diff --git a/examples/lock-app/silabs/include/CHIPProjectConfig.h b/examples/lock-app/silabs/include/CHIPProjectConfig.h index ae0cbaf1b3ac68..87d7485558c659 100644 --- a/examples/lock-app/silabs/include/CHIPProjectConfig.h +++ b/examples/lock-app/silabs/include/CHIPProjectConfig.h @@ -100,5 +100,3 @@ * */ #define CHIP_CONFIG_MRP_LOCAL_ACTIVE_RETRY_INTERVAL (2000_ms32) - -#define CHIP_DEVICE_CONFIG_ENABLE_EXTENDED_DISCOVERY 1 diff --git a/examples/pump-app/silabs/include/CHIPProjectConfig.h b/examples/pump-app/silabs/include/CHIPProjectConfig.h index 00df921596102a..fc137c8d61d7d9 100644 --- a/examples/pump-app/silabs/include/CHIPProjectConfig.h +++ b/examples/pump-app/silabs/include/CHIPProjectConfig.h @@ -100,5 +100,3 @@ * */ #define CHIP_CONFIG_MRP_LOCAL_ACTIVE_RETRY_INTERVAL (2000_ms32) - -#define CHIP_DEVICE_CONFIG_ENABLE_EXTENDED_DISCOVERY 1 diff --git a/examples/smoke-co-alarm-app/silabs/include/CHIPProjectConfig.h b/examples/smoke-co-alarm-app/silabs/include/CHIPProjectConfig.h index 692a486448d6c7..4d5f7f1a9807b9 100644 --- a/examples/smoke-co-alarm-app/silabs/include/CHIPProjectConfig.h +++ b/examples/smoke-co-alarm-app/silabs/include/CHIPProjectConfig.h @@ -98,5 +98,3 @@ * */ #define CHIP_CONFIG_MRP_LOCAL_ACTIVE_RETRY_INTERVAL (2000_ms32) - -#define CHIP_DEVICE_CONFIG_ENABLE_EXTENDED_DISCOVERY 1 diff --git a/examples/thermostat/silabs/include/CHIPProjectConfig.h b/examples/thermostat/silabs/include/CHIPProjectConfig.h index 614361fb34e9f5..2e2f08628fa609 100644 --- a/examples/thermostat/silabs/include/CHIPProjectConfig.h +++ b/examples/thermostat/silabs/include/CHIPProjectConfig.h @@ -108,5 +108,3 @@ * */ #define CHIP_CONFIG_MRP_LOCAL_ACTIVE_RETRY_INTERVAL (2000_ms32) - -#define CHIP_DEVICE_CONFIG_ENABLE_EXTENDED_DISCOVERY 1 diff --git a/examples/window-app/silabs/include/CHIPProjectConfig.h b/examples/window-app/silabs/include/CHIPProjectConfig.h index f1a69277f06079..2ee5e8735b2192 100644 --- a/examples/window-app/silabs/include/CHIPProjectConfig.h +++ b/examples/window-app/silabs/include/CHIPProjectConfig.h @@ -129,5 +129,3 @@ * */ #define CHIP_CONFIG_MRP_LOCAL_ACTIVE_RETRY_INTERVAL (2000_ms32) - -#define CHIP_DEVICE_CONFIG_ENABLE_EXTENDED_DISCOVERY 1 From 367c28e1dd6b989d0f7e34c2e92b1b09cec12ff8 Mon Sep 17 00:00:00 2001 From: Boris Zbarsky Date: Fri, 27 Oct 2023 12:51:25 -0400 Subject: [PATCH 2/8] Fix memory leak in UnauthenticatedSessionTable. (#30025) UnauthenticatedSessionTable essentially assumed that non-heap pools were used and was: 1) Never releasing its entries back to the pool. 2) Assuming that the pool would fill up and then its "reuse already allocated entry with zero refcount" code would kick in. Since heap pools never fill up, this meant that every single UnauthenticatedSession allocated was leaked. And we had a helpful "release them all on destruction" to cover up the leak at shutdown and prevent leak tools from finding it. This fix: * Preserves existing behavior for non-heap pools. * Switches to releasing UnauthenticatedSessions back to the pool in the heap case. --- src/transport/UnauthenticatedSessionTable.h | 105 +++++++++++++++++--- 1 file changed, 91 insertions(+), 14 deletions(-) diff --git a/src/transport/UnauthenticatedSessionTable.h b/src/transport/UnauthenticatedSessionTable.h index 29193bc0c4060f..1a698db312d52e 100644 --- a/src/transport/UnauthenticatedSessionTable.h +++ b/src/transport/UnauthenticatedSessionTable.h @@ -22,6 +22,7 @@ #include #include #include +#include #include #include #include @@ -34,8 +35,7 @@ namespace Transport { * @brief * An UnauthenticatedSession stores the binding of TransportAddress, and message counters. */ -class UnauthenticatedSession : public Session, - public ReferenceCounted, 0> +class UnauthenticatedSession : public Session, public ReferenceCounted { public: enum class SessionRole @@ -44,6 +44,7 @@ class UnauthenticatedSession : public Session, kResponder, }; +protected: UnauthenticatedSession(SessionRole sessionRole, NodeId ephemeralInitiatorNodeID, const ReliableMessageProtocolConfig & config) : mEphemeralInitiatorNodeId(ephemeralInitiatorNodeID), mSessionRole(sessionRole), mLastActivityTime(System::SystemClock().GetMonotonicTimestamp()), @@ -52,6 +53,7 @@ class UnauthenticatedSession : public Session, {} ~UnauthenticatedSession() override { VerifyOrDie(GetReferenceCount() == 0); } +public: UnauthenticatedSession(const UnauthenticatedSession &) = delete; UnauthenticatedSession & operator=(const UnauthenticatedSession &) = delete; UnauthenticatedSession(UnauthenticatedSession &&) = delete; @@ -68,8 +70,8 @@ class UnauthenticatedSession : public Session, Session::SessionType GetSessionType() const override { return Session::SessionType::kUnauthenticated; } - void Retain() override { ReferenceCounted, 0>::Retain(); } - void Release() override { ReferenceCounted, 0>::Release(); } + void Retain() override { ReferenceCounted::Retain(); } + void Release() override { ReferenceCounted::Release(); } bool IsActiveSession() const override { return true; } @@ -132,6 +134,23 @@ class UnauthenticatedSession : public Session, PeerMessageCounter & GetPeerMessageCounter() { return mPeerMessageCounter; } + static void Release(UnauthenticatedSession * obj) + { + // When using heap pools, we need to make sure to release ourselves back to + // the pool. When not using heap pools, we don't want the extra cost of the + // table pointer here, and the table itself handles entry reuse and cleanup + // as needed. +#if CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + obj->ReleaseSelfToPool(); +#else + // Just do nothing. +#endif // CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + } + +#if CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + virtual void ReleaseSelfToPool() = 0; +#endif // CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + private: const NodeId mEphemeralInitiatorNodeId; const SessionRole mSessionRole; @@ -142,6 +161,35 @@ class UnauthenticatedSession : public Session, PeerMessageCounter mPeerMessageCounter; }; +template +class UnauthenticatedSessionTable; + +namespace detail { + +template +class UnauthenticatedSessionPoolEntry : public UnauthenticatedSession +{ +public: + UnauthenticatedSessionPoolEntry(SessionRole sessionRole, NodeId ephemeralInitiatorNodeID, + const ReliableMessageProtocolConfig & config, + UnauthenticatedSessionTable & sessionTable) : + UnauthenticatedSession(sessionRole, ephemeralInitiatorNodeID, config) +#if CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + , + mSessionTable(sessionTable) +#endif // CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + {} + +private: +#if CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + virtual void ReleaseSelfToPool(); + + UnauthenticatedSessionTable & mSessionTable; +#endif // CHIP_SYSTEM_CONFIG_POOL_USE_HEAP +}; + +} // namespace detail + /* * @brief * An table which manages UnauthenticatedSessions @@ -153,7 +201,17 @@ template class UnauthenticatedSessionTable { public: - ~UnauthenticatedSessionTable() { mEntries.ReleaseAll(); } + ~UnauthenticatedSessionTable() + { +#if !CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + // When not using heap pools, our entries never actually get released + // back to the pool (which lets us make the entries 4 bytes smaller by + // not storing a reference to the table in them) and we LRU reuse ones + // that have 0 refcount. But we should release them all here, to ensure + // that we don't hit fatal asserts in our pool destructor. + mEntries.ReleaseAll(); +#endif // CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + } /** * Get a responder session with the given ephemeralInitiatorNodeID. If the session doesn't exist in the cache, allocate a new @@ -203,6 +261,9 @@ class UnauthenticatedSessionTable } private: + using EntryType = detail::UnauthenticatedSessionPoolEntry; + friend EntryType; + /** * Allocates a new session out of the internal resource pool. * @@ -213,17 +274,23 @@ class UnauthenticatedSessionTable CHIP_ERROR AllocEntry(UnauthenticatedSession::SessionRole sessionRole, NodeId ephemeralInitiatorNodeID, const ReliableMessageProtocolConfig & config, UnauthenticatedSession *& entry) { - entry = mEntries.CreateObject(sessionRole, ephemeralInitiatorNodeID, config); - if (entry != nullptr) + auto entryToUse = mEntries.CreateObject(sessionRole, ephemeralInitiatorNodeID, config, *this); + if (entryToUse != nullptr) + { + entry = entryToUse; return CHIP_NO_ERROR; + } - entry = FindLeastRecentUsedEntry(); - if (entry == nullptr) +#if !CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + entryToUse = FindLeastRecentUsedEntry(); +#endif // CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + if (entryToUse == nullptr) { return CHIP_ERROR_NO_MEMORY; } - mEntries.ResetObject(entry, sessionRole, ephemeralInitiatorNodeID, config); + mEntries.ResetObject(entryToUse, sessionRole, ephemeralInitiatorNodeID, config, *this); + entry = entryToUse; return CHIP_NO_ERROR; } @@ -242,12 +309,12 @@ class UnauthenticatedSessionTable return result; } - UnauthenticatedSession * FindLeastRecentUsedEntry() + EntryType * FindLeastRecentUsedEntry() { - UnauthenticatedSession * result = nullptr; + EntryType * result = nullptr; System::Clock::Timestamp oldestTime = System::Clock::Timestamp(std::numeric_limits::max()); - mEntries.ForEachActiveObject([&](UnauthenticatedSession * entry) { + mEntries.ForEachActiveObject([&](EntryType * entry) { if (entry->GetReferenceCount() == 0 && entry->GetLastActivityTime() < oldestTime) { result = entry; @@ -259,8 +326,18 @@ class UnauthenticatedSessionTable return result; } - ObjectPool mEntries; + void ReleaseEntry(EntryType * entry) { mEntries.ReleaseObject(entry); } + + ObjectPool mEntries; }; +#if CHIP_SYSTEM_CONFIG_POOL_USE_HEAP +template +void detail::UnauthenticatedSessionPoolEntry::ReleaseSelfToPool() +{ + mSessionTable.ReleaseEntry(this); +} +#endif // CHIP_SYSTEM_CONFIG_POOL_USE_HEAP + } // namespace Transport } // namespace chip From 9656351f912f31c381618f4f0ee374da7d6759a9 Mon Sep 17 00:00:00 2001 From: Junior Martinez <67972863+jmartinez-silabs@users.noreply.github.com> Date: Fri, 27 Oct 2023 13:35:33 -0400 Subject: [PATCH 3/8] disable tcp endpoint on wifi platform (#30040) --- src/platform/silabs/wifi_args.gni | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/platform/silabs/wifi_args.gni b/src/platform/silabs/wifi_args.gni index 3f09ad1994298c..ef7155e54132aa 100644 --- a/src/platform/silabs/wifi_args.gni +++ b/src/platform/silabs/wifi_args.gni @@ -49,7 +49,7 @@ chip_enable_openthread = false chip_inet_config_enable_ipv4 = false chip_inet_config_enable_dns_resolver = false -chip_inet_config_enable_tcp_endpoint = true +chip_inet_config_enable_tcp_endpoint = false chip_build_tests = false chip_config_memory_management = "platform" From 0371c056fdeb42911c031a6a74ba68e0036903c8 Mon Sep 17 00:00:00 2001 From: C Freeman Date: Fri, 27 Oct 2023 13:47:13 -0400 Subject: [PATCH 4/8] TC-DGGEN-3.1: Add (#30024) * TC-DGGEN-3.1: Add * Restyled by whitespace * Restyled by prettier-yaml --------- Co-authored-by: Restyled.io --- .../certification/Test_TC_DGGEN_3_1.yaml | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 src/app/tests/suites/certification/Test_TC_DGGEN_3_1.yaml diff --git a/src/app/tests/suites/certification/Test_TC_DGGEN_3_1.yaml b/src/app/tests/suites/certification/Test_TC_DGGEN_3_1.yaml new file mode 100644 index 00000000000000..bbfdb221792a68 --- /dev/null +++ b/src/app/tests/suites/certification/Test_TC_DGGEN_3_1.yaml @@ -0,0 +1,39 @@ +# Copyright (c) 2023 Project CHIP Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: 88.2.4. [TC-DGGEN-3.1] Matter Specification 1.2 errata [DUT as Server] + +PICS: + - DGGEN.S + +config: + nodeId: 0x12344321 + cluster: "General Diagnostics" + endpoint: 0 + +tests: + - label: "Step 1: Wait for the commissioned device to be retrieved" + cluster: "DelayCommands" + command: "WaitForCommissionee" + arguments: + values: + - name: "nodeId" + value: nodeId + + - label: "Step 2: TH reads AttributeList attribute" + command: "readAttribute" + attribute: "AttributeList" + response: + constraints: + excludes: [0x09] From ef16809fff2212bb03dc5a52618e574f34959441 Mon Sep 17 00:00:00 2001 From: Boris Zbarsky Date: Fri, 27 Oct 2023 13:51:27 -0400 Subject: [PATCH 5/8] Don't generate Objective-C compatibility headers for Matter.framework Swift APIs. (#30028) --- src/darwin/Framework/Matter.xcodeproj/project.pbxproj | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/darwin/Framework/Matter.xcodeproj/project.pbxproj b/src/darwin/Framework/Matter.xcodeproj/project.pbxproj index fc995f1ad61efa..fcc5d770f64889 100644 --- a/src/darwin/Framework/Matter.xcodeproj/project.pbxproj +++ b/src/darwin/Framework/Matter.xcodeproj/project.pbxproj @@ -2039,6 +2039,7 @@ SDKROOT = iphoneos; SUPPORTED_PLATFORMS = "macosx iphonesimulator iphoneos appletvos appletvsimulator watchos watchsimulator"; SUPPORTS_TEXT_BASED_API = NO; + SWIFT_INSTALL_OBJC_HEADER = NO; TARGETED_DEVICE_FAMILY = "1,2,3,4"; VERSIONING_SYSTEM = "apple-generic"; VERSION_INFO_PREFIX = ""; @@ -2211,6 +2212,7 @@ SDKROOT = iphoneos; SUPPORTED_PLATFORMS = "macosx iphonesimulator iphoneos appletvos appletvsimulator watchos watchsimulator"; SUPPORTS_TEXT_BASED_API = YES; + SWIFT_INSTALL_OBJC_HEADER = NO; TARGETED_DEVICE_FAMILY = "1,2,3,4"; VALIDATE_PRODUCT = YES; VERSIONING_SYSTEM = "apple-generic"; From e3a27679c534b75f3c0a1a5e3be1c5c212cdc779 Mon Sep 17 00:00:00 2001 From: Andrei Litvin Date: Fri, 27 Oct 2023 14:09:56 -0400 Subject: [PATCH 6/8] Add python CSA DM XML parsing support for derived clusters (#30036) * Start preparing to store derived clusters * reformat, make sure parsing works * More leniency to allow ModeBase parsing * More leniency and documentation, be ready to attach base clusters * Refactor to add some separate derivation logic * Restyle * More work on actually handling inheritance * Restyle * Implement actual base class derivation * Add unit test for derived, make diffs a LOT better * Restyle * Switch to unified diff for a nicer diff view * Return after the first assert * Make type checker happy at places * Make mypy happy even on an edge case * Fix linter errors * Restyle * more typing for attrs * Some name changes for base clusters: use abstract to make it clear that other clusters could be base too * Also change variable name --------- Co-authored-by: Andrei Litvin --- scripts/py_matter_idl/files.gni | 1 + .../data_model_xml/handlers/__init__.py | 21 ++- .../data_model_xml/handlers/context.py | 15 +- .../data_model_xml/handlers/derivation.py | 173 ++++++++++++++++++ .../data_model_xml/handlers/handlers.py | 144 ++++++++++----- .../data_model_xml/handlers/parsing.py | 38 +++- .../matter_idl/generators/idl/__init__.py | 10 +- .../matter_idl/matter_idl_parser.py | 4 +- .../matter_idl/test_data_model_xml.py | 168 ++++++++++++++++- 9 files changed, 510 insertions(+), 64 deletions(-) create mode 100644 scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py diff --git a/scripts/py_matter_idl/files.gni b/scripts/py_matter_idl/files.gni index fb9f3991b87c4c..dab10a26bffbdc 100644 --- a/scripts/py_matter_idl/files.gni +++ b/scripts/py_matter_idl/files.gni @@ -27,6 +27,7 @@ matter_idl_generator_sources = [ "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py", "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/base.py", "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py", + "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py", "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py", "${chip_root}/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py", "${chip_root}/scripts/py_matter_idl/matter_idl/generators/__init__.py", diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py index a2192ee010c46d..b1ece298a904db 100644 --- a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/__init__.py @@ -12,11 +12,22 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +from xml.sax.xmlreader import AttributesImpl + from matter_idl.matter_idl_types import Idl from .base import BaseHandler from .context import Context from .handlers import ClusterHandler +from .parsing import NormalizeName + +LOGGER = logging.getLogger('data-model-xml-data-parsing') + + +def contains_valid_cluster_id(attrs: AttributesImpl) -> bool: + # Does not check numeric format ... assuming scraper is smart enough for that + return 'id' in attrs and len(attrs['id']) > 0 class DataModelXmlHandler(BaseHandler): @@ -27,8 +38,14 @@ def __init__(self, context: Context, idl: Idl): super().__init__(context) self._idl = idl - def GetNextProcessor(self, name, attrs): + def GetNextProcessor(self, name, attrs: AttributesImpl): if name.lower() == 'cluster': - return ClusterHandler(self.context, self._idl, attrs) + if contains_valid_cluster_id(attrs): + return ClusterHandler.ForAttributes(self.context, self._idl, attrs) + + LOGGER.info( + "Found an abstract base cluster (no id): '%s'", attrs['name']) + + return ClusterHandler.IntoCluster(self.context, self._idl, self.context.AddAbstractBaseCluster(NormalizeName(attrs['name']), self.context.GetCurrentLocationMeta())) else: return BaseHandler(self.context) diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py index 3e3220ee699c87..fe96836da37463 100644 --- a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/context.py @@ -16,7 +16,7 @@ import xml.sax.xmlreader from typing import List, Optional -from matter_idl.matter_idl_types import Idl, ParseMetaData +from matter_idl.matter_idl_types import Cluster, ClusterSide, Idl, ParseMetaData class IdlPostProcessor: @@ -82,6 +82,19 @@ def __init__(self, locator: Optional[xml.sax.xmlreader.Locator] = None): self.file_name = None self._not_handled: set[str] = set() self._idl_post_processors: list[IdlPostProcessor] = [] + self.abstract_base_clusters: dict[str, Cluster] = {} + + def AddAbstractBaseCluster(self, name: str, parse_meta: Optional[ParseMetaData] = None) -> Cluster: + """Creates a new cluster entry for the given name in the list of known + base clusters. + """ + assert name not in self.abstract_base_clusters # be unique + + cluster = Cluster(side=ClusterSide.CLIENT, name=name, + code=-1, parse_meta=parse_meta) + self.abstract_base_clusters[name] = cluster + + return cluster def GetCurrentLocationMeta(self) -> Optional[ParseMetaData]: if not self.locator: diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py new file mode 100644 index 00000000000000..ff4bd9ddc664c8 --- /dev/null +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/derivation.py @@ -0,0 +1,173 @@ +# +# Copyright (c) 2023 Project CHIP Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +from typing import Iterable, Optional, Protocol, TypeVar + +from matter_idl.matter_idl_types import Attribute, Bitmap, Cluster, Command, Enum, Event, Idl, Struct + +from .context import Context, IdlPostProcessor +from .parsing import NormalizeName + +LOGGER = logging.getLogger('data-model-xml-data-parsing') + +T = TypeVar("T") + + +class HasName(Protocol): + name: str + + +NAMED = TypeVar('NAMED', bound=HasName) + + +def get_item_with_name(items: Iterable[NAMED], name: str) -> Optional[NAMED]: + """Find an item with the given name. + + Returns none if that item does not exist + """ + for item in items: + if item.name == name: + return item + return None + + +def merge_enum_into(e: Enum, cluster: Cluster): + existing = get_item_with_name(cluster.enums, e.name) + + if existing: + # Remove existing but merge constants into e + cluster.enums.remove(existing) + for value in existing.entries: + if not get_item_with_name(e.entries, value.name): + e.entries.append(value) + + cluster.enums.append(e) + + +def merge_bitmap_into(b: Bitmap, cluster: Cluster): + existing = get_item_with_name(cluster.bitmaps, b.name) + + if existing: + # Remove existing but merge constants into e + cluster.bitmaps.remove(existing) + for value in existing.entries: + if not get_item_with_name(b.entries, value.name): + b.entries.append(value) + + cluster.bitmaps.append(b) + + +def merge_event_into(e: Event, cluster: Cluster): + existing = get_item_with_name(cluster.events, e.name) + if existing: + LOGGER.error("TODO: Do not know how to merge event for %s::%s", + cluster.name, existing.name) + cluster.events.remove(existing) + + cluster.events.append(e) + + +def merge_attribute_into(a: Attribute, cluster: Cluster): + existing: Optional[Attribute] = None + for existing_a in cluster.attributes: + if existing_a.definition.name == a.definition.name: + existing = existing_a + break + + if existing: + # Do not provide merging as it seems only conformance is changed from + # the base cluster + # + # This should fix the correct types + # + # LOGGER.error("TODO: Do not know how to merge attribute for %s::%s", cluster.name, existing.definition.name) + cluster.attributes.remove(existing) + + cluster.attributes.append(a) + + +def merge_struct_into(s: Struct, cluster: Cluster): + existing = get_item_with_name(cluster.structs, s.name) + if existing: + # Do not provide merging as it seems XML only adds + # constraints and conformance to struct elements + # + # TODO: at some point we may be able to merge some things, + # if we find that derived clusters actually add useful things here + # + # LOGGER.error("TODO: Do not know how to merge structs for %s::%s", cluster.name, existing.name) + cluster.structs.remove(existing) + + cluster.structs.append(s) + + +def merge_command_into(c: Command, cluster: Cluster): + existing = get_item_with_name(cluster.commands, c.name) + + if existing: + LOGGER.error("TODO: Do not know how to merge command for %s::%s", + cluster.name, existing.name) + cluster.commands.remove(existing) + + cluster.commands.append(c) + + +def inherit_cluster_data(from_cluster: Cluster, into_cluster: Cluster): + for e in from_cluster.enums: + merge_enum_into(e, into_cluster) + + for b in from_cluster.bitmaps: + merge_bitmap_into(b, into_cluster) + + for ev in from_cluster.events: + merge_event_into(ev, into_cluster) + + for a in from_cluster.attributes: + merge_attribute_into(a, into_cluster) + + for s in from_cluster.structs: + merge_struct_into(s, into_cluster) + + for c in from_cluster.commands: + merge_command_into(c, into_cluster) + + +class AddBaseInfoPostProcessor(IdlPostProcessor): + def __init__(self, destination_cluster: Cluster, source_cluster_name: str, context: Context): + self.destination = destination_cluster + self.source_name = NormalizeName(source_cluster_name) + self.context = context + + def FinalizeProcessing(self, idl: Idl): + # attempt to find the base. It may be in the "names without ID" however it may also be inside + # existing clusters (e.g. Basic Information) + base: Optional[Cluster] = None + if self.source_name in self.context.abstract_base_clusters: + base = self.context.abstract_base_clusters[self.source_name] + else: + for c in idl.clusters: + if c.name == self.source_name: + base = c + break + + if not base: + LOGGER.error( + "Could not find the base cluster named '%s'", self.source_name) + return + + LOGGER.info("Copying base data from '%s' into '%s'", + base.name, self.destination.name) + inherit_cluster_data(from_cluster=base, into_cluster=self.destination) diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py index a162e62bfb5e15..0cae26bf5207ec 100644 --- a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/handlers.py @@ -12,20 +12,36 @@ # See the License for the specific language governing permissions and # limitations under the License. +import enum import logging from typing import Optional +from xml.sax.xmlreader import AttributesImpl -from matter_idl.matter_idl_types import (Attribute, AttributeQuality, Bitmap, Cluster, ClusterSide, Command, CommandQuality, - ConstantEntry, DataType, Enum, Field, FieldQuality, Idl, Struct, StructTag) +from matter_idl.matter_idl_types import (ApiMaturity, Attribute, AttributeQuality, Bitmap, Cluster, ClusterSide, Command, + CommandQuality, ConstantEntry, DataType, Enum, Field, FieldQuality, Idl, Struct, StructTag) from .base import BaseHandler, HandledDepth from .context import Context +from .derivation import AddBaseInfoPostProcessor from .parsing import (ApplyConstraint, AttributesToAttribute, AttributesToBitFieldConstantEntry, AttributesToCommand, AttributesToEvent, AttributesToField, NormalizeDataType, NormalizeName, ParseInt, StringToAccessPrivilege) LOGGER = logging.getLogger('data-model-xml-parser') +def is_unused_name(attrs: AttributesImpl): + """Existing XML adds various entries for base/derived reserved items. + + Those items seem to have no actual meaning. + + https://github.com/csa-data-model/projects/issues/363 + """ + if 'name' not in attrs: + return False + + return attrs['name'] in {'base reserved', 'derived reserved'} + + class FeaturesHandler(BaseHandler): def __init__(self, context: Context, cluster: Cluster): @@ -37,10 +53,15 @@ def EndProcessing(self): if self._bitmap.entries: self._cluster.bitmaps.append(self._bitmap) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name in {"section", "optionalConform"}: return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "feature": + if is_unused_name(attrs): + LOGGER.warning( + f"Ignoring feature constant data for {attrs['name']}") + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) + self._bitmap.entries.append( AttributesToBitFieldConstantEntry(attrs)) # assume everything handled. Sub-item is only section @@ -50,7 +71,7 @@ def GetNextProcessor(self, name: str, attrs): class BitmapHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster @@ -85,7 +106,7 @@ def EndProcessing(self): self._cluster.bitmaps.append(self._bitmap) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -104,7 +125,7 @@ def __init__(self, context: Context, field: Field): self._field = field self._hadConditions = False - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): self._hadConditions = True return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -120,7 +141,7 @@ def __init__(self, context: Context, field: Field): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._field = field - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "constraint": ApplyConstraint(attrs, self._field) return BaseHandler(self.context, handled=HandledDepth.SINGLE_TAG) @@ -162,7 +183,7 @@ def GetNextProcessor(self, name: str, attrs): class StructHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster self._struct = Struct(name=NormalizeName(attrs["name"]), fields=[]) @@ -170,7 +191,7 @@ def __init__(self, context: Context, cluster: Cluster, attrs): def EndProcessing(self): self._cluster.structs.append(self._struct) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -183,7 +204,7 @@ def GetNextProcessor(self, name: str, attrs): class EventHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster self._event = AttributesToEvent(attrs) @@ -191,7 +212,7 @@ def __init__(self, context: Context, cluster: Cluster, attrs): def EndProcessing(self): self._cluster.events.append(self._event) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -212,7 +233,7 @@ def GetNextProcessor(self, name: str, attrs): class EnumHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster @@ -239,7 +260,7 @@ def EndProcessing(self): self._cluster.enums.append(self._enum) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -266,7 +287,7 @@ def __init__(self, context: Context, cluster: Cluster): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -277,7 +298,7 @@ def GetNextProcessor(self, name: str, attrs): class AttributeHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster self._attribute = AttributesToAttribute(attrs) @@ -290,7 +311,7 @@ def EndProcessing(self): self._cluster.attributes.append(self._attribute) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "enum": LOGGER.warning( f"Anonymous enumeration not supported when handling attribute {self._cluster.name}::{self._attribute.definition.name}") @@ -330,6 +351,9 @@ def GetNextProcessor(self, name: str, attrs): return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "mandatoryConform": return MandatoryConformFieldHandler(self.context, self._attribute.definition) + elif name == "provisionalConform": + self._attribute.api_maturity = ApiMaturity.PROVISIONAL + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "deprecateConform": self._deprecated = True return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -345,15 +369,18 @@ def __init__(self, context: Context, cluster: Cluster): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "attribute": + if is_unused_name(attrs): + LOGGER.warning(f"Ignoring attribute data for {attrs['name']}") + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) return AttributeHandler(self.context, self._cluster, attrs) else: return BaseHandler(self.context) class CommandHandler(BaseHandler): - def __init__(self, context: Context, cluster: Cluster, attrs): + def __init__(self, context: Context, cluster: Cluster, attrs: AttributesImpl): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster self._command: Optional[Command] = None @@ -382,8 +409,8 @@ def __init__(self, context: Context, cluster: Cluster, attrs): elif ("direction" in attrs) and attrs["direction"] == "responseFromServer": is_command = False # response else: - LOGGER.warn("Could not clearly determine command direction: %s" % - [item for item in attrs.items()]) + LOGGER.warning("Could not clearly determine command direction: %s" % + [item for item in attrs.items()]) # Do a best-guess. However we should NOT need to guess once # we have a good data set is_command = not attrs["name"].endswith("Response") @@ -414,7 +441,7 @@ def EndProcessing(self): if self._command: self._cluster.commands.append(self._command) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name in {"mandatoryConform", "optionalConform", "disallowConform"}: # Unclear how commands may be optional or mandatory return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -425,7 +452,7 @@ def GetNextProcessor(self, name: str, attrs): self._command.invokeacl = StringToAccessPrivilege( attrs["invokePrivilege"]) else: - LOGGER.warn( + LOGGER.warning( f"Ignoring invoke privilege for {self._struct.name}") if self._command: @@ -449,8 +476,20 @@ def __init__(self, context: Context, cluster: Cluster): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "command": + if is_unused_name(attrs): + LOGGER.warning(f"Ignoring command data for {attrs['name']}") + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) + + if 'id' not in attrs: + LOGGER.error( + f"Could not process command {attrs['name']}: no id") + # TODO: skip over these without failing the processing + # + # https://github.com/csa-data-model/projects/issues/364 + return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) + return CommandHandler(self.context, self._cluster, attrs) elif name in {"mandatoryConform", "optionalConform"}: # Nothing to tag conformance @@ -464,7 +503,7 @@ def __init__(self, context: Context, cluster: Cluster): super().__init__(context, handled=HandledDepth.SINGLE_TAG) self._cluster = cluster - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "section": # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) @@ -483,24 +522,43 @@ def GetNextProcessor(self, name: str, attrs): return BaseHandler(self.context) +class ClusterHandlerPostProcessing(enum.Enum): + FINALIZE_AND_ADD_TO_IDL = enum.auto() + NO_POST_PROCESSING = enum.auto() + + class ClusterHandler(BaseHandler): """ Handling /cluster elements.""" - def __init__(self, context: Context, idl: Idl, attrs): - super().__init__(context, handled=HandledDepth.SINGLE_TAG) - self._idl = idl - + @staticmethod + def ForAttributes(context: Context, idl: Idl, attrs: AttributesImpl): assert ("name" in attrs) assert ("id" in attrs) - self._cluster = Cluster( - side=ClusterSide.CLIENT, - name=NormalizeName(attrs["name"]), - code=ParseInt(attrs["id"]), - parse_meta=context.GetCurrentLocationMeta() - ) + return ClusterHandler(context, idl, + Cluster( + side=ClusterSide.CLIENT, + name=NormalizeName(attrs["name"]), + code=ParseInt(attrs["id"]), + parse_meta=context.GetCurrentLocationMeta() + ), ClusterHandlerPostProcessing.FINALIZE_AND_ADD_TO_IDL) + + @staticmethod + def IntoCluster(context: Context, idl: Idl, cluster: Cluster): + return ClusterHandler(context, idl, cluster, ClusterHandlerPostProcessing.NO_POST_PROCESSING) + + def __init__(self, context: Context, idl: Idl, cluster: Cluster, post_process: ClusterHandlerPostProcessing): + super().__init__(context, handled=HandledDepth.SINGLE_TAG) + self._idl = idl + self._cluster = cluster + self._post_processing = post_process def EndProcessing(self): + if self._post_processing == ClusterHandlerPostProcessing.NO_POST_PROCESSING: + return + + assert self._post_processing == ClusterHandlerPostProcessing.FINALIZE_AND_ADD_TO_IDL + # Global things MUST be available everywhere to_add = [ # type, code, name, is_list @@ -521,7 +579,7 @@ def EndProcessing(self): ), qualities=AttributeQuality.READABLE)) self._idl.clusters.append(self._cluster) - def GetNextProcessor(self, name: str, attrs): + def GetNextProcessor(self, name: str, attrs: AttributesImpl): if name == "revisionHistory": # Revision history COULD be used to find the latest revision of a cluster # however current IDL files do NOT have a revision info field @@ -533,12 +591,16 @@ def GetNextProcessor(self, name: str, attrs): # Documentation data, skipped return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "classification": - # Not an obvious mapping in the existing data model. - # - # TODO IFF hierarchy == derived, we should use baseCluster - # - # Other elements like role, picsCode, scope and primaryTransaction seem - # to not be used + if attrs['hierarchy'] == 'derived': + # This is a derived cluster. We have to add everything from the + # base cluster + self.context.AddIdlPostProcessor(AddBaseInfoPostProcessor( + destination_cluster=self._cluster, + source_cluster_name=attrs['baseCluster'], + context=self.context + )) + # other elements like picsCode, scope and primaryTransaction seem to have + # no direct mapping in the data model return BaseHandler(self.context, handled=HandledDepth.ENTIRE_TREE) elif name == "features": return FeaturesHandler(self.context, self._cluster) diff --git a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py index c2753d4221cab4..948d698a5d4a6e 100644 --- a/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py +++ b/scripts/py_matter_idl/matter_idl/data_model_xml/handlers/parsing.py @@ -16,6 +16,7 @@ import re from dataclasses import dataclass from typing import Optional +from xml.sax.xmlreader import AttributesImpl from matter_idl.generators.types import GetDataTypeSizeInBits, IsSignedDataType from matter_idl.matter_idl_types import AccessPrivilege, Attribute, Command, ConstantEntry, DataType, Event, EventPriority, Field @@ -145,12 +146,21 @@ def FieldName(name: str) -> str: return name[0].lower() + name[1:] -def AttributesToField(attrs) -> Field: +def AttributesToField(attrs: AttributesImpl) -> Field: assert "name" in attrs assert "id" in attrs - assert "type" in attrs - t = ParseType(attrs["type"]) + if "type" in attrs: + attr_type = NormalizeDataType(attrs["type"]) + else: + # TODO: Generally we should not have this, however current implementation + # for derived clusters for example want to add things (like conformance + # specifically) WITHOUT re-stating things like types + # + # https://github.com/csa-data-model/projects/issues/365 + LOGGER.error(f"Attribute {attrs['name']} has no type") + attr_type = "sint32" + t = ParseType(attr_type) return Field( name=FieldName(attrs["name"]), @@ -160,16 +170,26 @@ def AttributesToField(attrs) -> Field: ) -def AttributesToBitFieldConstantEntry(attrs) -> ConstantEntry: +def AttributesToBitFieldConstantEntry(attrs: AttributesImpl) -> ConstantEntry: """Creates a constant entry appropriate for bitmaps. """ - assert ("name" in attrs) - assert ("bit" in attrs) + assert "name" in attrs + + if 'bit' not in attrs: + # TODO: multi-bit fields not supported in XML currently. Be lenient here to have some + # diff + # Issue: https://github.com/csa-data-model/projects/issues/347 + + LOGGER.error( + f"Constant {attrs['name']} has no bit value (may be multibit)") + return ConstantEntry(name="k" + NormalizeName(attrs["name"]), code=0) + + assert "bit" in attrs return ConstantEntry(name="k" + NormalizeName(attrs["name"]), code=1 << ParseInt(attrs["bit"])) -def AttributesToAttribute(attrs) -> Attribute: +def AttributesToAttribute(attrs: AttributesImpl) -> Attribute: assert "name" in attrs assert "id" in attrs @@ -193,7 +213,7 @@ def AttributesToAttribute(attrs) -> Attribute: ) -def AttributesToEvent(attrs) -> Event: +def AttributesToEvent(attrs: AttributesImpl) -> Event: assert "name" in attrs assert "id" in attrs assert "priority" in attrs @@ -231,7 +251,7 @@ def StringToAccessPrivilege(value: str) -> AccessPrivilege: raise Exception("UNKNOWN privilege level: %r" % value) -def AttributesToCommand(attrs) -> Command: +def AttributesToCommand(attrs: AttributesImpl) -> Command: assert "id" in attrs assert "name" in attrs diff --git a/scripts/py_matter_idl/matter_idl/generators/idl/__init__.py b/scripts/py_matter_idl/matter_idl/generators/idl/__init__.py index f53e35a7aba8e0..9ac9085f0cc68b 100644 --- a/scripts/py_matter_idl/matter_idl/generators/idl/__init__.py +++ b/scripts/py_matter_idl/matter_idl/generators/idl/__init__.py @@ -33,12 +33,16 @@ def human_text_string(value: Union[ClusterSide, StructTag, StructQuality, EventP if value == StructTag.RESPONSE: return "response" elif type(value) is FieldQuality: + # mypy seems confused if using `FieldQuality.OPTIONAL in value` + # directly, so do a useless cast here + quality: FieldQuality = value + result = "" - if FieldQuality.OPTIONAL in value: + if FieldQuality.OPTIONAL in quality: result += "optional " - if FieldQuality.NULLABLE in value: + if FieldQuality.NULLABLE in quality: result += "nullable " - if FieldQuality.FABRIC_SENSITIVE in value: + if FieldQuality.FABRIC_SENSITIVE in quality: result += "fabric_sensitive " return result.strip() elif type(value) is StructQuality: diff --git a/scripts/py_matter_idl/matter_idl/matter_idl_parser.py b/scripts/py_matter_idl/matter_idl/matter_idl_parser.py index 711e2535887b5e..ba909cb8fd24a8 100755 --- a/scripts/py_matter_idl/matter_idl/matter_idl_parser.py +++ b/scripts/py_matter_idl/matter_idl/matter_idl_parser.py @@ -310,10 +310,10 @@ def command_with_access(self, args): # NOTE: awkward inline because the order of 'meta, children' vs 'children, meta' was flipped # between lark versions in https://github.com/lark-parser/lark/pull/993 @v_args(meta=True, inline=True) - def command(self, meta, *args): + def command(self, meta, *tuple_args): # The command takes 4 arguments if no input argument, 5 if input # argument is provided - args = list(args) # convert from tuple + args = list(tuple_args) # convert from tuple if len(args) != 5: args.insert(2, None) diff --git a/scripts/py_matter_idl/matter_idl/test_data_model_xml.py b/scripts/py_matter_idl/matter_idl/test_data_model_xml.py index dfb4870d29d360..eea2308323abec 100755 --- a/scripts/py_matter_idl/matter_idl/test_data_model_xml.py +++ b/scripts/py_matter_idl/matter_idl/test_data_model_xml.py @@ -15,7 +15,8 @@ import io import unittest -from typing import List, Union +from difflib import unified_diff +from typing import List, Optional, Union try: from matter_idl.data_model_xml import ParseSource, ParseXmls @@ -27,10 +28,34 @@ os.path.join(os.path.dirname(__file__), '..'))) from matter_idl.data_model_xml import ParseSource, ParseXmls +from matter_idl.generators import GeneratorStorage +from matter_idl.generators.idl import IdlGenerator from matter_idl.matter_idl_parser import CreateParser from matter_idl.matter_idl_types import Idl +class GeneratorContentStorage(GeneratorStorage): + def __init__(self): + super().__init__() + self.content: Optional[str] = None + + def get_existing_data(self, relative_path: str): + # Force re-generation each time + return None + + def write_new_data(self, relative_path: str, content: str): + if self.content: + raise Exception( + "Unexpected extra data: single file generation expected") + self.content = content + + +def RenderAsIdlTxt(idl: Idl) -> str: + storage = GeneratorContentStorage() + IdlGenerator(storage=storage, idl=idl).render(dry_run=False) + return storage.content or "" + + def XmlToIdl(what: Union[str, List[str]]) -> Idl: if not isinstance(what, list): what = [what] @@ -53,6 +78,23 @@ def __init__(self, *args, **kargs): super().__init__(*args, **kargs) self.maxDiff = None + def assertIdlEqual(self, a: Idl, b: Idl): + if a == b: + # seems the same. This will just pass + self.assertEqual(a, b) + return + + # Not the same. Try to make a human readable diff: + a_txt = RenderAsIdlTxt(a) + b_txt = RenderAsIdlTxt(b) + + delta = unified_diff(a_txt.splitlines(keepends=True), + b_txt.splitlines(keepends=True), + fromfile='actual.matter', + tofile='expected.matter', + ) + self.assertEqual(a, b, '\n' + ''.join(delta)) + def testBasicInput(self): xml_idl = XmlToIdl(''' @@ -70,7 +112,121 @@ def testBasicInput(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) + + def testClusterDerivation(self): + # This test is based on a subset of ModeBase and Mode_Dishwasher original xml files + + xml_idl = XmlToIdl([ + # base ... + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ''', + # derived ... + ''' + + + + + + + + + + + + + + + + + + + + + + + + + + + ''', + ]) + + expected_idl = IdlTextToIdl(''' + client cluster DishwasherMode = 89 { + bitmap Feature: bitmap32 { + kOnOff = 0x1; + } + + struct ModeOptionStruct { + char_string<64> label = 0; + int8u mode = 1; + ModeTagStruct modeTags[] = 2; + } + + readonly attribute attrib_id attributeList[] = 65531; + readonly attribute event_id eventList[] = 65530; + readonly attribute command_id acceptedCommandList[] = 65529; + readonly attribute command_id generatedCommandList[] = 65528; + readonly attribute bitmap32 featureMap = 65532; + readonly attribute int16u clusterRevision = 65533; + + // baseline inserted after, so to pass the test add this at the end + readonly attribute ModeOptionStruct supportedModes[] = 0; + } + ''') + + self.assertIdlEqual(xml_idl, expected_idl) def testSignedTypes(self): @@ -108,7 +264,7 @@ def testSignedTypes(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) def testEnumRange(self): # Check heuristic for enum ranges @@ -183,7 +339,7 @@ def testEnumRange(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) def testAttributes(self): # Validate an attribute with a type list @@ -237,7 +393,7 @@ def testAttributes(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) def testComplexInput(self): # This parses a known copy of Switch.xml which happens to be fully @@ -434,7 +590,7 @@ def testComplexInput(self): } ''') - self.assertEqual(xml_idl, expected_idl) + self.assertIdlEqual(xml_idl, expected_idl) if __name__ == '__main__': From 638eb935cbd052943c152a2e8b19ed006c45deea Mon Sep 17 00:00:00 2001 From: lpbeliveau-silabs <112982107+lpbeliveau-silabs@users.noreply.github.com> Date: Fri, 27 Oct 2023 14:52:23 -0400 Subject: [PATCH 7/8] Added reporting attribute change callback in the group server where actions are modifying the group table (#30055) --- src/app/clusters/groups-server/groups-server.cpp | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/app/clusters/groups-server/groups-server.cpp b/src/app/clusters/groups-server/groups-server.cpp index 7318523c118fbe..10aa34351230b7 100644 --- a/src/app/clusters/groups-server/groups-server.cpp +++ b/src/app/clusters/groups-server/groups-server.cpp @@ -22,6 +22,7 @@ #include #include #include +#include #include #include #include @@ -90,6 +91,8 @@ static Status GroupAdd(FabricIndex fabricIndex, EndpointId endpointId, GroupId g } if (CHIP_NO_ERROR == err) { + MatterReportingAttributeChangeCallback(kRootEndpointId, GroupKeyManagement::Id, + GroupKeyManagement::Attributes::GroupTable::Id); return Status::Success; } @@ -109,6 +112,8 @@ static EmberAfStatus GroupRemove(FabricIndex fabricIndex, EndpointId endpointId, CHIP_ERROR err = provider->RemoveEndpoint(fabricIndex, groupId, endpointId); if (CHIP_NO_ERROR == err) { + MatterReportingAttributeChangeCallback(kRootEndpointId, GroupKeyManagement::Id, + GroupKeyManagement::Attributes::GroupTable::Id); return EMBER_ZCL_STATUS_SUCCESS; } @@ -322,7 +327,7 @@ bool emberAfGroupsClusterRemoveAllGroupsCallback(app::CommandHandler * commandOb provider->RemoveEndpoint(fabricIndex, commandPath.mEndpointId); status = Status::Success; - + MatterReportingAttributeChangeCallback(kRootEndpointId, GroupKeyManagement::Id, GroupKeyManagement::Attributes::GroupTable::Id); exit: commandObj->AddStatus(commandPath, status); if (Status::Success != status) From b3c844bd669a4af978e1e65b195ce0a65fb26b3e Mon Sep 17 00:00:00 2001 From: C Freeman Date: Fri, 27 Oct 2023 15:01:14 -0400 Subject: [PATCH 8/8] Re enable spec parsing (#30066) * Reapply "Cluster conformance checker script (#29895)" This reverts commit 8140975925a0f7d1662d6a57fe5aab2b722b14d7. * More resiliance to mismatches in spec * Restyled by isort --------- Co-authored-by: Restyled.io --- .github/workflows/tests.yaml | 1 + .../TC_DeviceBasicComposition.py | 128 +++- src/python_testing/TestConformanceSupport.py | 575 ++++++++++++++++++ src/python_testing/conformance_support.py | 263 ++++++++ src/python_testing/matter_testing_support.py | 21 +- src/python_testing/spec_parsing_support.py | 343 +++++++++++ 6 files changed, 1326 insertions(+), 5 deletions(-) create mode 100644 src/python_testing/TestConformanceSupport.py create mode 100644 src/python_testing/conformance_support.py create mode 100644 src/python_testing/spec_parsing_support.py diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 97897e8086c941..d6c580e643e9ff 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -468,6 +468,7 @@ jobs: scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_RVCCLEANM_1_2.py" --script-args "--int-arg PIXIT_ENDPOINT:1 --storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-all-clusters-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-all-clusters-app --factoryreset --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_RVCRUNM_1_2.py" --script-args "--int-arg PIXIT_ENDPOINT:1 --storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --script "src/python_testing/TestMatterTestingSupport.py" --script-args "--trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' + scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --script "src/python_testing/TestConformanceSupport.py" --script-args "--trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-lock-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-lock-app --factoryreset --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_DRLK_2_2.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --PICS src/app/tests/suites/certification/ci-pics-values --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-lock-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-lock-app --factoryreset --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_DRLK_2_3.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --PICS src/app/tests/suites/certification/ci-pics-values --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' scripts/run_in_python_env.sh out/venv './scripts/tests/run_python_test.py --app out/linux-x64-lock-ipv6only-no-ble-no-wifi-tsan-clang-test/chip-lock-app --factoryreset --app-args "--discriminator 1234 --KVS kvs1 --trace-to json:out/trace_data/app-{SCRIPT_BASE_NAME}.json" --script "src/python_testing/TC_DRLK_2_12.py" --script-args "--storage-path admin_storage.json --commissioning-method on-network --discriminator 1234 --passcode 20202021 --PICS src/app/tests/suites/certification/ci-pics-values --trace-to json:out/trace_data/test-{SCRIPT_BASE_NAME}.json --trace-to perfetto:out/trace_data/test-{SCRIPT_BASE_NAME}.perfetto"' diff --git a/src/python_testing/TC_DeviceBasicComposition.py b/src/python_testing/TC_DeviceBasicComposition.py index b333085fa2eef1..1f5dc38813a975 100644 --- a/src/python_testing/TC_DeviceBasicComposition.py +++ b/src/python_testing/TC_DeviceBasicComposition.py @@ -31,8 +31,11 @@ import chip.clusters.ClusterObjects import chip.tlv from chip.clusters.Attribute import ValueDecodeFailure -from matter_testing_support import AttributePathLocation, MatterBaseTest, async_test_body, default_matter_test_main +from conformance_support import ConformanceDecision, conformance_allowed +from matter_testing_support import (AttributePathLocation, ClusterPathLocation, CommandPathLocation, MatterBaseTest, + async_test_body, default_matter_test_main) from mobly import asserts +from spec_parsing_support import CommandType, build_xml_clusters def MatterTlvToJson(tlv_data: dict[int, Any]) -> dict[str, Any]: @@ -870,6 +873,129 @@ def test_DESC_2_2(self): if problems or root_problems: self.fail_current_test("Problems with tags lists") + def test_spec_conformance(self): + success = True + # TODO: provisional needs to be an input parameter + allow_provisional = True + clusters, problems = build_xml_clusters() + self.problems = self.problems + problems + for id in sorted(list(clusters.keys())): + print(f'{id} 0x{id:02x}: {clusters[id].name}') + for endpoint_id, endpoint in self.endpoints_tlv.items(): + for cluster_id, cluster in endpoint.items(): + if cluster_id not in clusters.keys(): + if (cluster_id & 0xFFFF_0000) != 0: + # manufacturer cluster + continue + location = ClusterPathLocation(endpoint_id=endpoint_id, cluster_id=cluster_id) + # TODO: update this from a warning once we have all the data + self.record_warning(self.get_test_name(), location=location, + problem='Standard cluster found on device, but is not present in spec data') + continue + + # TODO: switch to use global FEATURE_MAP_ID etc. once the IDM-10.1 change is merged. + FEATURE_MAP_ID = 0xFFFC + ATTRIBUTE_LIST_ID = 0xFFFB + ACCEPTED_COMMAND_ID = 0xFFF9 + GENERATED_COMMAND_ID = 0xFFF8 + + feature_map = cluster[FEATURE_MAP_ID] + attribute_list = cluster[ATTRIBUTE_LIST_ID] + all_command_list = cluster[ACCEPTED_COMMAND_ID] + cluster[GENERATED_COMMAND_ID] + + # Feature conformance checking + feature_masks = [1 << i for i in range(32) if feature_map & (1 << i)] + for f in feature_masks: + location = AttributePathLocation(endpoint_id=endpoint_id, cluster_id=cluster_id, attribute_id=FEATURE_MAP_ID) + if f not in clusters[cluster_id].features.keys(): + self.record_error(self.get_test_name(), location=location, problem=f'Unknown feature with mask 0x{f:02x}') + success = False + continue + xml_feature = clusters[cluster_id].features[f] + conformance_decision = xml_feature.conformance(feature_map, attribute_list, all_command_list) + if not conformance_allowed(conformance_decision, allow_provisional): + self.record_error(self.get_test_name(), location=location, + problem=f'Disallowed feature with mask 0x{f:02x}') + success = False + for feature_mask, xml_feature in clusters[cluster_id].features.items(): + conformance_decision = xml_feature.conformance(feature_map, attribute_list, all_command_list) + if conformance_decision == ConformanceDecision.MANDATORY and feature_mask not in feature_masks: + self.record_error(self.get_test_name(), location=location, + problem=f'Required feature with mask 0x{f:02x} is not present in feature map') + success = False + + # Attribute conformance checking + for attribute_id, attribute in cluster.items(): + location = AttributePathLocation(endpoint_id=endpoint_id, cluster_id=cluster_id, attribute_id=attribute_id) + if attribute_id not in clusters[cluster_id].attributes.keys(): + # TODO: Consolidate the range checks with IDM-10.1 once that lands + if attribute_id <= 0x4FFF: + # manufacturer attribute + self.record_error(self.get_test_name(), location=location, + problem='Standard attribute found on device, but not in spec') + success = False + continue + xml_attribute = clusters[cluster_id].attributes[attribute_id] + conformance_decision = xml_attribute.conformance(feature_map, attribute_list, all_command_list) + if not conformance_allowed(conformance_decision, allow_provisional): + location = AttributePathLocation(endpoint_id=endpoint_id, cluster_id=cluster_id, attribute_id=attribute_id) + self.record_error(self.get_test_name(), location=location, + problem=f'Attribute 0x{attribute_id:02x} is included, but is disallowed by conformance') + success = False + for attribute_id, xml_attribute in clusters[cluster_id].attributes.items(): + conformance_decision = xml_attribute.conformance(feature_map, attribute_list, all_command_list) + if conformance_decision == ConformanceDecision.MANDATORY and attribute_id not in cluster.keys(): + location = AttributePathLocation(endpoint_id=endpoint_id, cluster_id=cluster_id, attribute_id=attribute_id) + self.record_error(self.get_test_name(), location=location, + problem=f'Attribute 0x{attribute_id:02x} is required, but is not present on the DUT') + success = False + + def check_spec_conformance_for_commands(command_type: CommandType) -> bool: + success = True + # TODO: once IDM-10.1 lands, use the globals + global_attribute_id = 0xFFF9 if command_type == CommandType.ACCEPTED else 0xFFF8 + xml_commands_dict = clusters[cluster_id].accepted_commands if command_type == CommandType.ACCEPTED else clusters[cluster_id].generated_commands + command_list = cluster[global_attribute_id] + for command_id in command_list: + location = CommandPathLocation(endpoint_id=endpoint_id, cluster_id=cluster_id, command_id=command_id) + if command_id not in xml_commands_dict: + # TODO: Consolidate range checks with IDM-10.1 once that lands + if command_id <= 0xFF: + # manufacturer command + continue + self.record_error(self.get_test_name(), location=location, + problem='Standard command found on device, but not in spec') + success = False + continue + xml_command = xml_commands_dict[command_id] + conformance_decision = xml_command.conformance(feature_map, attribute_list, all_command_list) + if not conformance_allowed(conformance_decision, allow_provisional): + self.record_error(self.get_test_name(), location=location, + problem=f'Command 0x{command_id:02x} is included, but disallowed by conformance') + success = False + for command_id, xml_command in xml_commands_dict.items(): + conformance_decision = xml_command.conformance(feature_map, attribute_list, all_command_list) + if conformance_decision == ConformanceDecision.MANDATORY and command_id not in command_list: + location = CommandPathLocation(endpoint_id=endpoint_id, cluster_id=cluster_id, command_id=command_id) + self.record_error(self.get_test_name(), location=location, + problem=f'Command 0x{command_id:02x} is required, but is not present on the DUT') + success = False + return success + + # Command conformance checking + cmd_success = check_spec_conformance_for_commands(CommandType.ACCEPTED) + success = False if not cmd_success else success + cmd_success = check_spec_conformance_for_commands(CommandType.GENERATED) + success = False if not cmd_success else success + + # TODO: Add choice checkers + + if not success: + # TODO: Right now, we have failures in all-cluster, so we can't fail this test and keep it in CI. For now, just log. + # Issue tracking: #29812 + # self.fail_current_test("Problems with conformance") + logging.error("Problems found with conformance, this should turn into a test failure once #29812 is resolved") + if __name__ == "__main__": default_matter_test_main() diff --git a/src/python_testing/TestConformanceSupport.py b/src/python_testing/TestConformanceSupport.py new file mode 100644 index 00000000000000..53f9e885ff9449 --- /dev/null +++ b/src/python_testing/TestConformanceSupport.py @@ -0,0 +1,575 @@ +# +# Copyright (c) 2023 Project CHIP Authors +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import xml.etree.ElementTree as ElementTree + +from conformance_support import ConformanceDecision, ConformanceParseParameters, parse_callable_from_xml +from matter_testing_support import MatterBaseTest, async_test_body, default_matter_test_main +from mobly import asserts + + +class TestConformanceSupport(MatterBaseTest): + @async_test_body + async def setup_class(self): + super().setup_class() + # a small feature map + self.feature_names_to_bits = {'AB': 0x01, 'CD': 0x02} + + # none, AB, CD, AB&CD + self.feature_maps = [0x00, 0x01, 0x02, 0x03] + self.has_ab = [False, True, False, True] + self.has_cd = [False, False, True, True] + + self.attribute_names_to_values = {'attr1': 0x00, 'attr2': 0x01} + self.attribute_lists = [[], [0x00], [0x01], [0x00, 0x01]] + self.has_attr1 = [False, True, False, True] + self.has_attr2 = [False, False, True, True] + + self.command_names_to_values = {'cmd1': 0x00, 'cmd2': 0x01} + self.cmd_lists = [[], [0x00], [0x01], [0x00, 0x01]] + self.has_cmd1 = [False, True, False, True] + self.has_cmd2 = [False, False, True, True] + self.params = ConformanceParseParameters( + feature_map=self.feature_names_to_bits, attribute_map=self.attribute_names_to_values, command_map=self.command_names_to_values) + + @async_test_body + async def test_conformance_mandatory(self): + xml = '' + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for f in self.feature_maps: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + + @async_test_body + async def test_conformance_optional(self): + xml = '' + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for f in self.feature_maps: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + + @async_test_body + async def test_conformance_disallowed(self): + xml = '' + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for f in self.feature_maps: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.DISALLOWED) + + xml = '' + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for f in self.feature_maps: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.DISALLOWED) + + @async_test_body + async def test_conformance_provisional(self): + xml = '' + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for f in self.feature_maps: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.PROVISIONAL) + + @async_test_body + async def test_conformance_mandatory_on_condition(self): + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_ab[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + # single attribute mandatory + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, a in enumerate(self.attribute_lists): + if self.has_attr1[i]: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.NOT_APPLICABLE) + + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, a in enumerate(self.attribute_lists): + if self.has_attr2[i]: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.NOT_APPLICABLE) + + # test command in optional and in boolean - this is the same as attribute essentially, so testing every permutation is overkill + + @async_test_body + async def test_conformance_optional_on_condition(self): + # single feature optional + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_ab[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + # single attribute optional + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, a in enumerate(self.attribute_lists): + if self.has_attr1[i]: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.NOT_APPLICABLE) + + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, a in enumerate(self.attribute_lists): + if self.has_attr2[i]: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.NOT_APPLICABLE) + + # single command optional + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, c in enumerate(self.cmd_lists): + if self.has_cmd1[i]: + asserts.assert_equal(xml_callable(0x00, [], c), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(0x00, [], c), ConformanceDecision.NOT_APPLICABLE) + + xml = ('' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, c in enumerate(self.cmd_lists): + if self.has_cmd2[i]: + asserts.assert_equal(xml_callable(0x00, [], c), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(0x00, [], c), ConformanceDecision.NOT_APPLICABLE) + + @async_test_body + async def test_conformance_not_term_mandatory(self): + # single feature not mandatory + xml = ('' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if not self.has_ab[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + xml = ('' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if not self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + # single attribute not mandatory + xml = ('' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, a in enumerate(self.attribute_lists): + if not self.has_attr1[i]: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.NOT_APPLICABLE) + + xml = ('' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, a in enumerate(self.attribute_lists): + if not self.has_attr2[i]: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.NOT_APPLICABLE) + + @async_test_body + async def test_conformance_not_term_optional(self): + # single feature not optional + xml = ('' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if not self.has_ab[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + xml = ('' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if not self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + @async_test_body + async def test_conformance_and_term(self): + # and term for features only + xml = ('' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_ab[i] and self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + # and term for attributes only + xml = ('' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, a in enumerate(self.attribute_lists): + if self.has_attr1[i] and self.has_attr2[i]: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.NOT_APPLICABLE) + + # and term for feature and attribute + xml = ('' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + for j, a in enumerate(self.attribute_lists): + if self.has_ab[i] and self.has_attr2[j]: + asserts.assert_equal(xml_callable(f, a, []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, a, []), ConformanceDecision.NOT_APPLICABLE) + + @async_test_body + async def test_conformance_or_term(self): + # or term feature only + xml = ('' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_ab[i] or self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + # or term attribute only + xml = ('' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, a in enumerate(self.attribute_lists): + if self.has_attr1[i] or self.has_attr2[i]: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(0x00, a, []), ConformanceDecision.NOT_APPLICABLE) + + # or term feature and attribute + xml = ('' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + for j, a in enumerate(self.attribute_lists): + if self.has_ab[i] or self.has_attr2[j]: + asserts.assert_equal(xml_callable(f, a, []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, a, []), ConformanceDecision.NOT_APPLICABLE) + + @async_test_body + async def test_conformance_and_term_with_not(self): + # and term with not + xml = ('' + '' + '' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if not self.has_ab[i] and self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + @async_test_body + async def test_conformance_or_term_with_not(self): + # or term with not on second feature + xml = ('' + '' + '' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_ab[i] or not self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + # not around or term with + xml = ('' + '' + '' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if not (self.has_ab[i] or self.has_cd[i]): + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + @async_test_body + async def test_conformance_and_term_with_three_terms(self): + # and term with three features + xml = ('' + '' + '' + '' + '' + '' + '') + self.feature_names_to_bits['EF'] = 0x04 + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + # no features + asserts.assert_equal(xml_callable(0x00, [], []), ConformanceDecision.NOT_APPLICABLE) + # one feature + asserts.assert_equal(xml_callable(0x01, [], []), ConformanceDecision.NOT_APPLICABLE) + # all features + asserts.assert_equal(xml_callable(0x07, [], []), ConformanceDecision.OPTIONAL) + + # and term with one of each + xml = ('' + '' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + for j, a in enumerate(self.attribute_lists): + for k, c in enumerate(self.cmd_lists): + if self.has_ab[i] and self.has_attr1[j] and self.has_cmd1[k]: + asserts.assert_equal(xml_callable(f, a, c), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, a, c), ConformanceDecision.NOT_APPLICABLE) + + @async_test_body + async def test_conformance_or_term_with_three_terms(self): + # or term with three features + xml = ('' + '' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + # no features + asserts.assert_equal(xml_callable(0x00, [], []), ConformanceDecision.NOT_APPLICABLE) + # one feature + asserts.assert_equal(xml_callable(0x01, [], []), ConformanceDecision.OPTIONAL) + # all features + asserts.assert_equal(xml_callable(0x07, [], []), ConformanceDecision.OPTIONAL) + + # or term with one of each + xml = ('' + '' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + for j, a in enumerate(self.attribute_lists): + for k, c in enumerate(self.cmd_lists): + if self.has_ab[i] or self.has_attr1[j] or self.has_cmd1[k]: + asserts.assert_equal(xml_callable(f, a, c), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, a, c), ConformanceDecision.NOT_APPLICABLE) + + def test_conformance_otherwise(self): + # AB, O + xml = ('' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_ab[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + + # AB, [CD] + xml = ('' + '' + '' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_ab[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + elif self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.OPTIONAL) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.NOT_APPLICABLE) + + # AB & !CD, P + xml = ('' + '' + '' + '' + '' + '' + '' + '' + '' + '' + '') + et = ElementTree.fromstring(xml) + xml_callable = parse_callable_from_xml(et, self.params) + for i, f in enumerate(self.feature_maps): + if self.has_ab[i] and not self.has_cd[i]: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.MANDATORY) + else: + asserts.assert_equal(xml_callable(f, [], []), ConformanceDecision.PROVISIONAL) + + +if __name__ == "__main__": + default_matter_test_main() diff --git a/src/python_testing/conformance_support.py b/src/python_testing/conformance_support.py new file mode 100644 index 00000000000000..2dabb584c9d0f7 --- /dev/null +++ b/src/python_testing/conformance_support.py @@ -0,0 +1,263 @@ +# +# Copyright (c) 2023 Project CHIP Authors +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import xml.etree.ElementTree as ElementTree +from dataclasses import dataclass +from enum import Enum, auto +from typing import Callable + +from chip.tlv import uint + +OTHERWISE_CONFORM = 'otherwiseConform' +OPTIONAL_CONFORM = 'optionalConform' +PROVISIONAL_CONFORM = 'provisionalConform' +MANDATORY_CONFORM = 'mandatoryConform' +DEPRECATE_CONFORM = 'deprecateConform' +DISALLOW_CONFORM = 'disallowConform' +AND_TERM = 'andTerm' +OR_TERM = 'orTerm' +NOT_TERM = 'notTerm' +FEATURE_TAG = 'feature' +ATTRIBUTE_TAG = 'attribute' +COMMAND_TAG = 'command' + + +class ConformanceException(Exception): + def __init__(self, msg): + self.msg = msg + + def __str__(self): + return f"ConformanceException({self.msg})" + + +class ConformanceDecision(Enum): + MANDATORY = auto() + OPTIONAL = auto() + NOT_APPLICABLE = auto() + DISALLOWED = auto() + PROVISIONAL = auto() + + +@dataclass +class ConformanceParseParameters: + feature_map: dict[str, uint] + attribute_map: dict[str, uint] + command_map: dict[str, uint] + + +def conformance_allowed(conformance_decision: ConformanceDecision, allow_provisional: bool): + if conformance_decision == ConformanceDecision.NOT_APPLICABLE or conformance_decision == ConformanceDecision.DISALLOWED: + return False + if conformance_decision == ConformanceDecision.PROVISIONAL: + return allow_provisional + return True + + +def mandatory(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + return ConformanceDecision.MANDATORY + + +def optional(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + return ConformanceDecision.OPTIONAL + + +def deprecated(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + return ConformanceDecision.DISALLOWED + + +def disallowed(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + return ConformanceDecision.DISALLOWED + + +def provisional(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + return ConformanceDecision.PROVISIONAL + + +def feature(requiredFeature: uint) -> Callable: + def feature_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + if requiredFeature & feature_map != 0: + return ConformanceDecision.MANDATORY + return ConformanceDecision.NOT_APPLICABLE + return feature_inner + + +def attribute(requiredAttribute: uint) -> Callable: + def attribute_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + if requiredAttribute in attribute_list: + return ConformanceDecision.MANDATORY + return ConformanceDecision.NOT_APPLICABLE + return attribute_inner + + +def command(requiredCommand: uint) -> Callable: + def command_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + if requiredCommand in all_command_list: + return ConformanceDecision.MANDATORY + return ConformanceDecision.NOT_APPLICABLE + return command_inner + + +def optional_wrapper(op: Callable) -> Callable: + def optional_wrapper_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + decision = op(feature_map, attribute_list, all_command_list) + if decision == ConformanceDecision.MANDATORY or decision == ConformanceDecision.OPTIONAL: + return ConformanceDecision.OPTIONAL + elif decision == ConformanceDecision.NOT_APPLICABLE: + return ConformanceDecision.NOT_APPLICABLE + else: + raise ConformanceException(f'Optional wrapping invalid op {decision}') + return optional_wrapper_inner + + +def mandatory_wrapper(op: Callable) -> Callable: + def mandatory_wrapper_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + return op(feature_map, attribute_list, all_command_list) + return mandatory_wrapper_inner + + +def not_operation(op: Callable): + def not_operation_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + # not operations can't be used with anything that returns DISALLOWED + # not operations also can't be used with things that are optional + # ie, ![AB] doesn't make sense, nor does !O + decision = op(feature_map, attribute_list, all_command_list) + if decision == ConformanceDecision.OPTIONAL or decision == ConformanceDecision.DISALLOWED or decision == ConformanceDecision.PROVISIONAL: + raise ConformanceException('NOT operation on optional or disallowed item') + elif decision == ConformanceDecision.NOT_APPLICABLE: + return ConformanceDecision.MANDATORY + elif decision == ConformanceDecision.MANDATORY: + return ConformanceDecision.NOT_APPLICABLE + else: + raise ConformanceException('NOT called on item with non-conformance value') + return not_operation_inner + + +def and_operation(op_list: list[Callable]) -> Callable: + def and_operation_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + for op in op_list: + decision = op(feature_map, attribute_list, all_command_list) + # and operations can't happen on optional or disallowed + if decision == ConformanceDecision.OPTIONAL or decision == ConformanceDecision.DISALLOWED or decision == ConformanceDecision.PROVISIONAL: + raise ConformanceException('AND operation on optional or disallowed item') + elif decision == ConformanceDecision.NOT_APPLICABLE: + return ConformanceDecision.NOT_APPLICABLE + elif decision == ConformanceDecision.MANDATORY: + continue + else: + raise ConformanceException('Oplist item returned non-conformance value') + return ConformanceDecision.MANDATORY + return and_operation_inner + + +def or_operation(op_list: list[Callable]) -> Callable: + def or_operation_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + for op in op_list: + decision = op(feature_map, attribute_list, all_command_list) + if decision == ConformanceDecision.DISALLOWED or decision == ConformanceDecision.PROVISIONAL: + raise ConformanceException('OR operation on optional or disallowed item') + elif decision == ConformanceDecision.NOT_APPLICABLE: + continue + elif decision == ConformanceDecision.MANDATORY: + return ConformanceDecision.MANDATORY + elif decision == ConformanceDecision.OPTIONAL: + return ConformanceDecision.OPTIONAL + else: + raise ConformanceException('Oplist item returned non-conformance value') + return ConformanceDecision.NOT_APPLICABLE + return or_operation_inner + +# TODO: add xor operation once it's required +# TODO: how would equal and unequal operations work here? + + +def otherwise(op_list: list[Callable]) -> Callable: + def otherwise_inner(feature_map: uint, attribute_list: list[uint], all_command_list: list[uint]) -> ConformanceDecision: + # Otherwise operations apply from left to right. If any of them + # has a definite decision (optional, mandatory or disallowed), that is the one that applies + # Provisional items are meant to be marked as the first item in the list + # Deprecated items are either on their own, or follow an O as O,D. + # For O,D, optional applies (leftmost), but we should consider some way to warn here as well, + # possibly in another function + for op in op_list: + decision = op(feature_map, attribute_list, all_command_list) + if decision == ConformanceDecision.NOT_APPLICABLE: + continue + return decision + return ConformanceDecision.NOT_APPLICABLE + return otherwise_inner + + +def parse_callable_from_xml(element: ElementTree.Element, params: ConformanceParseParameters) -> Callable: + if len(list(element)) == 0: + # no subchildren here, so this can only be mandatory, optional, provisional, deprecated, disallowed, feature or attribute + if element.tag == MANDATORY_CONFORM: + return mandatory + elif element.tag == OPTIONAL_CONFORM: + return optional + elif element.tag == PROVISIONAL_CONFORM: + return provisional + elif element.tag == DEPRECATE_CONFORM: + return deprecated + elif element.tag == DISALLOW_CONFORM: + return disallowed + elif element.tag == FEATURE_TAG: + try: + return feature(params.feature_map[element.get('name')]) + except KeyError: + raise ConformanceException(f'Conformance specifies feature not in feature table: {element.get("name")}') + elif element.tag == ATTRIBUTE_TAG: + # Some command conformance tags are marked as attribute, so if this key isn't in attribute, try command + name = element.get('name') + if name in params.attribute_map: + return attribute(params.attribute_map[name]) + elif name in params.command_map: + return command(params.command_map[name]) + else: + raise ConformanceException(f'Conformance specifies attribute or command not in table: {name}') + elif element.tag == COMMAND_TAG: + return command(params.command_map[element.get('name')]) + else: + raise ConformanceException( + f'Unexpected xml conformance element with no children {str(element.tag)} {str(element.attrib)}') + + # First build the list, then create the callable for this element + ops = [] + for sub in element: + ops.append(parse_callable_from_xml(sub, params)) + + # optional can be a wrapper as well as a standalone + # This can be any of the boolean operations, optional or otherwise + if element.tag == OPTIONAL_CONFORM: + if len(ops) > 1: + raise ConformanceException(f'OPTIONAL term found with more than one subelement {list(element)}') + return optional_wrapper(ops[0]) + elif element.tag == MANDATORY_CONFORM: + if len(ops) > 1: + raise ConformanceException(f'MANDATORY term found with more than one subelement {list(element)}') + return mandatory_wrapper(ops[0]) + elif element.tag == AND_TERM: + return and_operation(ops) + elif element.tag == OR_TERM: + return or_operation(ops) + elif element.tag == NOT_TERM: + if len(ops) > 1: + raise ConformanceException(f'NOT term found with more than one subelement {list(element)}') + return not_operation(ops[0]) + elif element.tag == OTHERWISE_CONFORM: + return otherwise(ops) + else: + raise ConformanceException(f'Unexpected conformance tag with children {element}') diff --git a/src/python_testing/matter_testing_support.py b/src/python_testing/matter_testing_support.py index a394952445de60..398a01f6cd9d17 100644 --- a/src/python_testing/matter_testing_support.py +++ b/src/python_testing/matter_testing_support.py @@ -333,6 +333,19 @@ class CommandPathLocation: cluster_id: int command_id: int + +@dataclass +class ClusterPathLocation: + endpoint_id: int + cluster_id: int + + +@dataclass +class FeaturePathLocation: + endpoint_id: int + cluster_id: int + feature_code: str + # ProblemSeverity is not using StrEnum, but rather Enum, since StrEnum only # appeared in 3.11. To make it JSON serializable easily, multiple inheritance # from `str` is used. See https://stackoverflow.com/a/51976841. @@ -347,7 +360,7 @@ class ProblemSeverity(str, Enum): @dataclass class ProblemNotice: test_name: str - location: Union[AttributePathLocation, EventPathLocation, CommandPathLocation] + location: Union[AttributePathLocation, EventPathLocation, CommandPathLocation, ClusterPathLocation, FeaturePathLocation] severity: ProblemSeverity problem: str spec_location: str = "" @@ -551,13 +564,13 @@ async def send_single_cmd( def print_step(self, stepnum: typing.Union[int, str], title: str) -> None: logging.info(f'***** Test Step {stepnum} : {title}') - def record_error(self, test_name: str, location: Union[AttributePathLocation, EventPathLocation, CommandPathLocation], problem: str, spec_location: str = ""): + def record_error(self, test_name: str, location: Union[AttributePathLocation, EventPathLocation, CommandPathLocation, ClusterPathLocation, FeaturePathLocation], problem: str, spec_location: str = ""): self.problems.append(ProblemNotice(test_name, location, ProblemSeverity.ERROR, problem, spec_location)) - def record_warning(self, test_name: str, location: Union[AttributePathLocation, EventPathLocation, CommandPathLocation], problem: str, spec_location: str = ""): + def record_warning(self, test_name: str, location: Union[AttributePathLocation, EventPathLocation, CommandPathLocation, ClusterPathLocation, FeaturePathLocation], problem: str, spec_location: str = ""): self.problems.append(ProblemNotice(test_name, location, ProblemSeverity.WARNING, problem, spec_location)) - def record_note(self, test_name: str, location: Union[AttributePathLocation, EventPathLocation, CommandPathLocation], problem: str, spec_location: str = ""): + def record_note(self, test_name: str, location: Union[AttributePathLocation, EventPathLocation, CommandPathLocation, ClusterPathLocation, FeaturePathLocation], problem: str, spec_location: str = ""): self.problems.append(ProblemNotice(test_name, location, ProblemSeverity.NOTE, problem, spec_location)) def get_setup_payload_info(self) -> SetupPayloadInfo: diff --git a/src/python_testing/spec_parsing_support.py b/src/python_testing/spec_parsing_support.py new file mode 100644 index 00000000000000..9e014aa95dd2aa --- /dev/null +++ b/src/python_testing/spec_parsing_support.py @@ -0,0 +1,343 @@ +# +# Copyright (c) 2023 Project CHIP Authors +# All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import glob +import logging +import os +import xml.etree.ElementTree as ElementTree +from copy import deepcopy +from dataclasses import dataclass +from enum import Enum, auto +from typing import Callable + +from chip.tlv import uint +from conformance_support import (DEPRECATE_CONFORM, DISALLOW_CONFORM, MANDATORY_CONFORM, OPTIONAL_CONFORM, OTHERWISE_CONFORM, + PROVISIONAL_CONFORM, ConformanceDecision, ConformanceException, ConformanceParseParameters, + or_operation, parse_callable_from_xml) +from matter_testing_support import (AttributePathLocation, ClusterPathLocation, CommandPathLocation, EventPathLocation, + FeaturePathLocation, ProblemNotice, ProblemSeverity) + + +@dataclass +class XmlFeature: + code: str + name: str + conformance: Callable[[uint], ConformanceDecision] + + +@dataclass +class XmlAttribute: + name: str + datatype: str + conformance: Callable[[uint], ConformanceDecision] + + +@dataclass +class XmlCommand: + name: str + conformance: Callable[[uint], ConformanceDecision] + + +@dataclass +class XmlEvent: + name: str + conformance: Callable[[uint], ConformanceDecision] + + +@dataclass +class XmlCluster: + name: str + revision: int + derived: str + feature_map: dict[str, uint] + attribute_map: dict[str, uint] + command_map: dict[str, uint] + features: dict[str, XmlFeature] + attributes: dict[uint, XmlAttribute] + accepted_commands: dict[uint, XmlCommand] + generated_commands: dict[uint, XmlCommand] + events: dict[uint, XmlEvent] + + +class CommandType(Enum): + ACCEPTED = auto() + GENERATED = auto() + + +def has_zigbee_conformance(conformance: ElementTree.Element) -> bool: + # For clusters, things with zigbee conformance can share IDs with the matter elements, so we don't want them + + # TODO: it's actually possible for a thing to have a zigbee conformance AND to have other conformances, and we should check + # for that, but for now, this is fine because that hasn't happened in the cluster conformances YET. + # It does happen for device types, so we need to be careful there. + condition = conformance.iter('condition') + for c in condition: + try: + c.attrib['name'].lower() == "zigbee" + return True + except KeyError: + continue + return False + + +class ClusterParser: + def __init__(self, cluster, cluster_id, name): + self._problems: list[ProblemNotice] = [] + self._cluster = cluster + self._cluster_id = cluster_id + self._name = name + + self._derived = None + try: + classification = next(cluster.iter('classification')) + hierarchy = classification.attrib['hierarchy'] + if hierarchy.lower() == 'derived': + self._derived = classification.attrib['baseCluster'] + except (KeyError, StopIteration): + self._derived = None + + self.feature_elements = self.get_all_feature_elements() + self.attribute_elements = self.get_all_attribute_elements() + self.command_elements = self.get_all_command_elements() + self.event_elements = self.get_all_event_elements() + self.params = ConformanceParseParameters(feature_map=self.create_feature_map(), attribute_map=self.create_attribute_map(), + command_map=self.create_command_map()) + + def get_conformance(self, element: ElementTree.Element) -> ElementTree.Element: + for sub in element: + if sub.tag == OTHERWISE_CONFORM or sub.tag == MANDATORY_CONFORM or sub.tag == OPTIONAL_CONFORM or sub.tag == PROVISIONAL_CONFORM or sub.tag == DEPRECATE_CONFORM or sub.tag == DISALLOW_CONFORM: + return sub + + # Conformance is missing, so let's record the problem and treat it as optional for lack of a better choice + if element.tag == 'feature': + location = FeaturePathLocation(endpoint_id=0, cluster_id=self._cluster_id, feature_code=element.attrib['code']) + elif element.tag == 'command': + location = CommandPathLocation(endpoint_id=0, cluster_id=self._cluster_id, command_id=element.attrib['id']) + elif element.tag == 'attribute': + location = AttributePathLocation(endpoint_id=0, cluster_id=self._cluster_id, attribute_id=element.attrib['id']) + elif element.tag == 'event': + location = EventPathLocation(endpoint_id=0, cluster_id=self._cluster_id, event_id=element.attrib['id']) + else: + location = ClusterPathLocation(endpoing_id=0, cluster_id=self._cluster_id) + self._problems.append(ProblemNotice(test_name='Spec XML parsing', location=location, + severity=ProblemSeverity.WARNING, problem='Unable to find conformance element')) + + return ElementTree.Element(OPTIONAL_CONFORM) + + def get_all_type(self, type_container: str, type_name: str, key_name: str) -> list[tuple[ElementTree.Element, ElementTree.Element]]: + ret = [] + container_tags = self._cluster.iter(type_container) + for container in container_tags: + elements = container.iter(type_name) + for element in elements: + try: + element.attrib[key_name] + except KeyError: + # This is a conformance tag, which uses the same name + continue + conformance = self.get_conformance(element) + if has_zigbee_conformance(conformance): + continue + ret.append((element, conformance)) + return ret + + def get_all_feature_elements(self) -> list[tuple[ElementTree.Element, ElementTree.Element]]: + ''' Returns a list of features and their conformances''' + return self.get_all_type('features', 'feature', 'code') + + def get_all_attribute_elements(self) -> list[tuple[ElementTree.Element, ElementTree.Element]]: + ''' Returns a list of attributes and their conformances''' + return self.get_all_type('attributes', 'attribute', 'id') + + def get_all_command_elements(self) -> list[tuple[ElementTree.Element, ElementTree.Element]]: + ''' Returns a list of commands and their conformances ''' + return self.get_all_type('commands', 'command', 'id') + + def get_all_event_elements(self) -> list[tuple[ElementTree.Element, ElementTree.Element]]: + ''' Returns a list of events and their conformances''' + return self.get_all_type('events', 'event', 'id') + + def create_feature_map(self) -> dict[str, uint]: + features = {} + for element, conformance in self.feature_elements: + features[element.attrib['code']] = 1 << int(element.attrib['bit'], 0) + return features + + def create_attribute_map(self) -> dict[str, uint]: + attributes = {} + for element, conformance in self.attribute_elements: + attributes[element.attrib['name']] = int(element.attrib['id'], 0) + return attributes + + def create_command_map(self) -> dict[str, uint]: + commands = {} + for element, conformance in self.command_elements: + commands[element.attrib['name']] = int(element.attrib['id'], 0) + return commands + + def parse_conformance(self, conformance_xml: ElementTree.Element) -> Callable: + try: + return parse_callable_from_xml(conformance_xml, self.params) + except ConformanceException as ex: + # Just point to the general cluster, because something is mismatched, but it's not clear what + location = ClusterPathLocation(endpoint_id=0, cluster_id=self._cluster_id) + self._problems.append(ProblemNotice(test_name='Spec XML parsing', location=location, + severity=ProblemSeverity.WARNING, problem=str(ex))) + return None + + def parse_features(self) -> dict[uint, XmlFeature]: + features = {} + for element, conformance_xml in self.feature_elements: + mask = 1 << int(element.attrib['bit'], 0) + conformance = self.parse_conformance(conformance_xml) + if conformance is None: + continue + features[mask] = XmlFeature(code=element.attrib['code'], name=element.attrib['name'], + conformance=conformance) + return features + + def parse_attributes(self) -> dict[uint, XmlAttribute]: + attributes = {} + for element, conformance_xml in self.attribute_elements: + code = int(element.attrib['id'], 0) + # Some deprecated attributes don't have their types included, for now, lets just fallback to UNKNOWN + try: + datatype = element.attrib['type'] + except KeyError: + datatype = 'UNKNOWN' + conformance = self.parse_conformance(conformance_xml) + if conformance is None: + continue + if code in attributes: + # This is one of those fun ones where two different rows have the same id and name, but differ in conformance and ranges + # I don't have a good way to relate the ranges to the conformance, but they're both acceptable, so let's just or them. + conformance = or_operation([conformance, attributes[code].conformance]) + attributes[code] = XmlAttribute(name=element.attrib['name'], datatype=datatype, + conformance=conformance) + return attributes + + def parse_commands(self, command_type: CommandType) -> dict[uint, XmlAttribute]: + commands = {} + for element, conformance_xml in self.command_elements: + code = int(element.attrib['id'], 0) + dir = CommandType.ACCEPTED + try: + if element.attrib['direction'].lower() == 'responsefromserver': + dir = CommandType.GENERATED + except KeyError: + pass + if dir != command_type: + continue + code = int(element.attrib['id'], 0) + conformance = self.parse_conformance(conformance_xml) + if conformance is None: + continue + if code in commands: + conformance = or_operation([conformance, commands[code].conformance]) + commands[code] = XmlCommand(name=element.attrib['name'], conformance=conformance) + return commands + + def parse_events(self) -> dict[uint, XmlAttribute]: + events = {} + for element, conformance_xml in self.event_elements: + code = int(element.attrib['id'], 0) + conformance = self.parse_conformance(conformance_xml) + if conformance is None: + continue + if code in events: + conformance = or_operation([conformance, events[code].conformance]) + events[code] = XmlEvent(name=element.attrib['name'], conformance=conformance) + return events + + def create_cluster(self) -> XmlCluster: + return XmlCluster(revision=self._cluster.attrib['revision'], derived=self._derived, + name=self._name, feature_map=self.params.feature_map, + attribute_map=self.params.attribute_map, command_map=self.params.command_map, + features=self.parse_features(), + attributes=self.parse_attributes(), + accepted_commands=self.parse_commands(CommandType.ACCEPTED), + generated_commands=self.parse_commands(CommandType.GENERATED), + events=self.parse_events()) + + def get_problems(self) -> list[ProblemNotice]: + return self._problems + + +def build_xml_clusters() -> tuple[list[XmlCluster], list[ProblemNotice]]: + dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'data_model', 'clusters') + clusters: dict[int, XmlCluster] = {} + derived_clusters: dict[str, XmlCluster] = {} + ids_by_name = {} + problems = [] + for xml in glob.glob(f"{dir}/*.xml"): + logging.info(f'Parsing file {xml}') + tree = ElementTree.parse(f'{xml}') + root = tree.getroot() + cluster = root.iter('cluster') + for c in cluster: + name = c.attrib['name'] + if not c.attrib['id']: + # Fully derived clusters have no id, but also shouldn't appear on a device. + # We do need to keep them, though, because we need to update the derived + # clusters. We keep them in a special dict by name, so they can be thrown + # away later. + cluster_id = None + else: + cluster_id = int(c.attrib['id'], 0) + ids_by_name[name] = cluster_id + + parser = ClusterParser(c, cluster_id, name) + new = parser.create_cluster() + problems = problems + parser.get_problems() + + if cluster_id: + clusters[cluster_id] = new + else: + derived_clusters[name] = new + + # We have the information now about which clusters are derived, so we need to fix them up. Apply first the base cluster, + # then add the specific cluster overtop + for id, c in clusters.items(): + if c.derived: + base_name = c.derived + if base_name in ids_by_name: + base = clusters[ids_by_name[c.derived]] + else: + base = derived_clusters[base_name] + + feature_map = deepcopy(base.feature_map) + feature_map.update(c.feature_map) + attribute_map = deepcopy(base.attribute_map) + attribute_map.update(c.attribute_map) + command_map = deepcopy(base.command_map) + command_map.update(c.command_map) + features = deepcopy(base.features) + features.update(c.features) + attributes = deepcopy(base.attributes) + attributes.update(c.attributes) + accepted_commands = deepcopy(base.accepted_commands) + accepted_commands.update(c.accepted_commands) + generated_commands = deepcopy(base.generated_commands) + generated_commands.update(c.generated_commands) + events = deepcopy(base.events) + events.update(c.events) + new = XmlCluster(revision=c.revision, derived=c.derived, name=c.name, + feature_map=feature_map, attribute_map=attribute_map, command_map=command_map, + features=features, attributes=attributes, accepted_commands=accepted_commands, + generated_commands=generated_commands, events=events) + clusters[id] = new + return clusters, problems