From bb30c0e3b55125be651259a5a634fc0247cb8441 Mon Sep 17 00:00:00 2001 From: RaviPrasad B S Date: Mon, 16 Dec 2024 20:41:05 +0530 Subject: [PATCH] [#14] can-protocol-adapter initial version --- can-protocol-adapter/.gitignore | 14 + can-protocol-adapter/Cargo.lock | 1634 +++++++++++++++++ can-protocol-adapter/Cargo.toml | 44 + can-protocol-adapter/Dockerfile | 40 + can-protocol-adapter/README.md | 118 ++ can-protocol-adapter/build.sh | 67 + can-protocol-adapter/config.json | 59 + can-protocol-adapter/dbc/obd2_isotp.dbc | 105 ++ can-protocol-adapter/doc/configuration.md | 125 ++ .../doc/v0.1.0-initial-version.md | 20 + can-protocol-adapter/lib/Cargo.toml | 32 + can-protocol-adapter/lib/common/Cargo.toml | 37 + can-protocol-adapter/lib/common/src/lib.rs | 236 +++ can-protocol-adapter/lib/kuksa/Cargo.toml | 38 + can-protocol-adapter/lib/kuksa/src/lib.rs | 356 ++++ can-protocol-adapter/proto/README.md | 14 + .../proto/kuksa/val/v1/README.md | 6 + .../proto/kuksa/val/v1/types.proto | 288 +++ .../proto/kuksa/val/v1/val.proto | 126 ++ .../proto/sdv/databroker/v1/README.md | 12 + .../proto/sdv/databroker/v1/broker.proto | 96 + .../proto/sdv/databroker/v1/collector.proto | 97 + .../proto/sdv/databroker/v1/types.proto | 169 ++ can-protocol-adapter/src/can/comm.rs | 203 ++ can-protocol-adapter/src/can/decoder.rs | 112 ++ can-protocol-adapter/src/can/mod.rs | 15 + can-protocol-adapter/src/can/socket.rs | 124 ++ .../src/databroker-proto/Cargo.toml | 28 + .../src/databroker-proto/build.rs | 30 + .../src/databroker-proto/proto | 1 + .../src/databroker-proto/src/lib.rs | 147 ++ .../src/grpc/kuksa_val_v1/conversions.rs | 340 ++++ .../src/grpc/kuksa_val_v1/mod.rs | 15 + .../src/grpc/kuksa_val_v1/val.rs | 1292 +++++++++++++ can-protocol-adapter/src/grpc/mod.rs | 15 + .../src/grpc/sdv_databroker_v1/collector.rs | 266 +++ .../src/grpc/sdv_databroker_v1/conversions.rs | 378 ++++ .../src/grpc/sdv_databroker_v1/mod.rs | 16 + .../src/kuksa_feeder/feeder.rs | 149 ++ can-protocol-adapter/src/kuksa_feeder/mod.rs | 13 + can-protocol-adapter/src/main.rs | 160 ++ .../src/utils/adapter_config.rs | 88 + .../src/utils/adapter_utils.rs | 102 + can-protocol-adapter/src/utils/mod.rs | 14 + 44 files changed, 7241 insertions(+) create mode 100644 can-protocol-adapter/.gitignore create mode 100644 can-protocol-adapter/Cargo.lock create mode 100644 can-protocol-adapter/Cargo.toml create mode 100644 can-protocol-adapter/Dockerfile create mode 100644 can-protocol-adapter/README.md create mode 100755 can-protocol-adapter/build.sh create mode 100644 can-protocol-adapter/config.json create mode 100644 can-protocol-adapter/dbc/obd2_isotp.dbc create mode 100644 can-protocol-adapter/doc/configuration.md create mode 100644 can-protocol-adapter/doc/v0.1.0-initial-version.md create mode 100644 can-protocol-adapter/lib/Cargo.toml create mode 100644 can-protocol-adapter/lib/common/Cargo.toml create mode 100644 can-protocol-adapter/lib/common/src/lib.rs create mode 100644 can-protocol-adapter/lib/kuksa/Cargo.toml create mode 100644 can-protocol-adapter/lib/kuksa/src/lib.rs create mode 100755 can-protocol-adapter/proto/README.md create mode 100644 can-protocol-adapter/proto/kuksa/val/v1/README.md create mode 100644 can-protocol-adapter/proto/kuksa/val/v1/types.proto create mode 100644 can-protocol-adapter/proto/kuksa/val/v1/val.proto create mode 100644 can-protocol-adapter/proto/sdv/databroker/v1/README.md create mode 100644 can-protocol-adapter/proto/sdv/databroker/v1/broker.proto create mode 100644 can-protocol-adapter/proto/sdv/databroker/v1/collector.proto create mode 100644 can-protocol-adapter/proto/sdv/databroker/v1/types.proto create mode 100644 can-protocol-adapter/src/can/comm.rs create mode 100644 can-protocol-adapter/src/can/decoder.rs create mode 100644 can-protocol-adapter/src/can/mod.rs create mode 100644 can-protocol-adapter/src/can/socket.rs create mode 100644 can-protocol-adapter/src/databroker-proto/Cargo.toml create mode 100644 can-protocol-adapter/src/databroker-proto/build.rs create mode 120000 can-protocol-adapter/src/databroker-proto/proto create mode 100644 can-protocol-adapter/src/databroker-proto/src/lib.rs create mode 100644 can-protocol-adapter/src/grpc/kuksa_val_v1/conversions.rs create mode 100644 can-protocol-adapter/src/grpc/kuksa_val_v1/mod.rs create mode 100644 can-protocol-adapter/src/grpc/kuksa_val_v1/val.rs create mode 100644 can-protocol-adapter/src/grpc/mod.rs create mode 100644 can-protocol-adapter/src/grpc/sdv_databroker_v1/collector.rs create mode 100644 can-protocol-adapter/src/grpc/sdv_databroker_v1/conversions.rs create mode 100644 can-protocol-adapter/src/grpc/sdv_databroker_v1/mod.rs create mode 100644 can-protocol-adapter/src/kuksa_feeder/feeder.rs create mode 100644 can-protocol-adapter/src/kuksa_feeder/mod.rs create mode 100644 can-protocol-adapter/src/main.rs create mode 100644 can-protocol-adapter/src/utils/adapter_config.rs create mode 100644 can-protocol-adapter/src/utils/adapter_utils.rs create mode 100644 can-protocol-adapter/src/utils/mod.rs diff --git a/can-protocol-adapter/.gitignore b/can-protocol-adapter/.gitignore new file mode 100644 index 0000000..7d53bb9 --- /dev/null +++ b/can-protocol-adapter/.gitignore @@ -0,0 +1,14 @@ +######################################################################## +# Copyright (c) 2024 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +######################################################################## +# compiled files and executables +target diff --git a/can-protocol-adapter/Cargo.lock b/can-protocol-adapter/Cargo.lock new file mode 100644 index 0000000..a7100a1 --- /dev/null +++ b/can-protocol-adapter/Cargo.lock @@ -0,0 +1,1634 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.6.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23a1e53f0f5d86382dafe1cf314783b2044280f406e7e1506368220ad11b1338" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +dependencies = [ + "anstyle", + "windows-sys 0.59.0", +] + +[[package]] +name = "anyhow" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775" + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "async-trait" +version = "0.1.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "autotools" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef941527c41b0fc0dd48511a8154cd5fc7e29200a0ff8b7203c5d777dbc795cf" +dependencies = [ + "cc", +] + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" + +[[package]] +name = "can-dbc" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbe0d033ec316c3bb50e2e53d7ef3d8805e65c5f976d49daea65a12f7e0f9ce8" +dependencies = [ + "derive-getters", + "nom", +] + +[[package]] +name = "can-protocol-adapter" +version = "0.1.0" +dependencies = [ + "byteorder", + "can-dbc", + "clap", + "codegen", + "databroker-proto", + "env_logger", + "http", + "kuksa", + "kuksa-common", + "log", + "serde", + "serde_json", + "socketcan-isotp", + "tokio", + "tonic", +] + +[[package]] +name = "cc" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aeb932158bd710538c73702db6945cb68a8fb08c519e6e12706b94263b36db8" +dependencies = [ + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "4.5.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "clap_lex" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" + +[[package]] +name = "codegen" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff61280aed771c3070e7dcc9e050c66f1eb1e3b96431ba66f9f74641d02fc41d" +dependencies = [ + "indexmap 1.9.3", +] + +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + +[[package]] +name = "databroker-proto" +version = "0.4.7-dev.0" +dependencies = [ + "prost", + "prost-types", + "protobuf-src", + "tonic", + "tonic-build", +] + +[[package]] +name = "derive-getters" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2c35ab6e03642397cdda1dd58abbc05d418aef8e36297f336d5aba060fe8df" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "embedded-can" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9d2e857f87ac832df68fa498d18ddc679175cf3d2e4aa893988e5601baf9438" +dependencies = [ + "nb", +] + +[[package]] +name = "env_logger" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "fastrand" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-core", + "futures-task", + "pin-project-lite", + "pin-utils", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 2.6.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +dependencies = [ + "equivalent", + "hashbrown 0.15.1", +] + +[[package]] +name = "is-terminal" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" +dependencies = [ + "hermit-abi 0.4.0", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "kuksa" +version = "0.6.0-dev.0" +dependencies = [ + "databroker-proto", + "http", + "kuksa-common", + "tokio", + "tokio-stream", + "tonic", +] + +[[package]] +name = "kuksa-common" +version = "0.6.0-dev.0" +dependencies = [ + "databroker-proto", + "http", + "tokio", + "tokio-stream", + "tonic", +] + +[[package]] +name = "libc" +version = "0.2.159" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "wasi", + "windows-sys 0.52.0", +] + +[[package]] +name = "multimap" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" + +[[package]] +name = "nb" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d5439c4ad607c3c23abf66de8c8bf57ba8adcd1f129e699851a6e43935d339d" + +[[package]] +name = "nix" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", + "memoffset", + "pin-utils", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "object" +version = "0.36.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap 2.6.0", +] + +[[package]] +name = "pin-project" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "prettyplease" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" +dependencies = [ + "proc-macro2", + "syn 2.0.87", +] + +[[package]] +name = "proc-macro2" +version = "1.0.88" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c3a7fc5db1e57d5a779a352c8cdb57b29aa4c40cc69c3a68a7fedc815fbf2f9" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" +dependencies = [ + "bytes", + "heck", + "itertools", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 2.0.87", + "tempfile", +] + +[[package]] +name = "prost-derive" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "prost-types" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" +dependencies = [ + "prost", +] + +[[package]] +name = "protobuf-src" +version = "1.1.0+21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7ac8852baeb3cc6fb83b93646fb93c0ffe5d14bf138c945ceb4b9948ee0e3c1" +dependencies = [ + "autotools", +] + +[[package]] +name = "quote" +version = "1.0.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redox_syscall" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +dependencies = [ + "bitflags 2.6.0", +] + +[[package]] +name = "regex" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustix" +version = "0.38.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +dependencies = [ + "bitflags 2.6.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" +dependencies = [ + "log", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "serde" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.210" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "serde_json" +version = "1.0.132" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socketcan-isotp" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52cfc29f743f392f6ea9a99c4d700db9125df222e2e8f5066285ce8f59a9ff7d" +dependencies = [ + "bitflags 2.6.0", + "embedded-can", + "libc", + "nix", + "thiserror", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "tempfile" +version = "3.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +dependencies = [ + "cfg-if", + "fastrand", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3c6efbfc763e64eb85c11c25320f0737cb7364c4b6336db90aa9ebe27a0bbd" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b607164372e89797d78b8e23a6d67d5d1038c1c65efd52e1389ef8b77caba2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "tokio" +version = "1.40.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-util" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tonic" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64", + "bytes", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "rustls-pemfile", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic-build" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4ef6dd70a610078cb4e338a0f79d06bc759ff1b22d2120c2ff02ae264ba9c2" +dependencies = [ + "prettyplease", + "proc-macro2", + "prost-build", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "unicode-ident" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.87", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/can-protocol-adapter/Cargo.toml b/can-protocol-adapter/Cargo.toml new file mode 100644 index 0000000..8cb748f --- /dev/null +++ b/can-protocol-adapter/Cargo.toml @@ -0,0 +1,44 @@ +######################################################################## +# Copyright (c) 2024 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +######################################################################## + +[package] +name = "can-protocol-adapter" +version = "0.1.0" +edition = "2021" +license = "Apache-2.0" + +[profile.release] +strip = true +opt-level = "z" # Optimize for size +lto = true #Enable link time Optimization + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +log = "0.4.21" +env_logger = "0.10.2" +tokio = { version = "1.0", features = ["full"] } +clap = { version = "4", features = ["derive"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +socketcan-isotp = "1.0.2" +tonic = "0.11.0" +databroker-proto = { path = "src/databroker-proto"} +kuksa-common = { path = "lib/common"} +kuksa = { path = "lib/kuksa"} +can-dbc="6.0.0" +codegen = "0.2" +byteorder = "1.3" +http = "0.2.8" + + diff --git a/can-protocol-adapter/Dockerfile b/can-protocol-adapter/Dockerfile new file mode 100644 index 0000000..cf69cc5 --- /dev/null +++ b/can-protocol-adapter/Dockerfile @@ -0,0 +1,40 @@ +######################################################################## +# Copyright (c) 2024 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +######################################################################## + +FROM ghcr.io/rust-cross/rust-musl-cross:x86_64-musl AS builder-amd64 +ENV BUILDTARGET="x86_64-unknown-linux-musl" + + +FROM ghcr.io/rust-cross/rust-musl-cross:aarch64-musl AS builder-arm64 +ENV BUILDTARGET="aarch64-unknown-linux-musl" + +FROM builder-$TARGETARCH AS builder +ARG TARGETARCH + +# This will speed up fetching the crate.io index in the future, see +# https://blog.rust-lang.org/2022/06/22/sparse-registry-testing.html +ENV CARGO_UNSTABLE_SPARSE_REGISTRY=true + +RUN echo "Building for $TARGETARCH" \ + mkdir build +COPY . build/ +WORKDIR /home/rust/src/build + +RUN cargo build --package can-protocol-adapter --release --target $BUILDTARGET +RUN mv target/${BUILDTARGET}/release/can-protocol-adapter /home/rust + +FROM scratch AS final + +COPY --from=builder /home/rust/can-protocol-adapter can-protocol-adapter + +ENTRYPOINT [ "/can-protocol-adapter" ] diff --git a/can-protocol-adapter/README.md b/can-protocol-adapter/README.md new file mode 100644 index 0000000..cd06cc7 --- /dev/null +++ b/can-protocol-adapter/README.md @@ -0,0 +1,118 @@ +![Rust](https://img.shields.io/badge/rust-000000.svg?style=for-the-badge&logo=rust&logoColor=white)![Docker](https://img.shields.io/badge/docker-1D63ED.svg?style=for-the-badge&logo=docker&logoColor=white)![CAN](https://img.shields.io/badge/CAN-005BBB.svg?style=for-the-badge&logo=generic&logoColor=white) + + +# CAN Protocol Adapter + +The CAN Protocol Adapter is a **Rust-based solution** that offers communication between CAN devices and the Kuksa DataBroker using the Socket-CAN interface and ISO-TP (ISO Transport Protocol). + +It can be used for various applications, including communication between CAN bus systems, supporting ISO-TP for segmented data transfer and reception of large payloads beyond the standard 8-byte limit and reading vehicle OBD-II PID data for diagnostics and monitoring. + +## Key Features + +- **User Configuration** : JSON-based user configuration for setting up CAN and Socket CAN interfaces, defining CAN request-response details, specifying protocol parameters, and mapping VSS and PID signal names. + + +- **Socket CAN Integration** : Compatible with Socket CAN interfaces for CAN bus communication in Linux-based environments. + + +- **Read CAN Frame and Decode**: Read CAN frames from the bus and decode them using DBC files, mapping the signals to their corresponding values. + + +- **Register and Feed Data Points to Data Broker**: Register VSS signal data points with Kuksa Data Broker and feed the decoded or interpreted values into the data broker. + +## The basic functionality works as follows: + +The application connects to the SocketCAN interface and sends all CAN requests, as specified in the PID table of the config file, based on a defined interval. The responses are read, and the raw CAN data is parsed according to the DBC file signals. The CAN signals are then translated into VSS data points based on the VSS and DBC signal mappings described in the config file. The respective VSS data points are then sent to the Kuksa Databroker. + +![CAN-Protocol-adapter]() + +## Initial Version (v0.1.0) Features + +The details of the features, planned enhancements and known issues for the initial version (v0.1.0) can be found [here](). + + +## Getting started + +- **Prerequisites** + + Install CAN utils, e.g. in Ubuntu machine + + sudo apt update + sudo apt install can-utils + + Linux Kernel Requirements + + The minimum required Linux kernel version is 5.10, and can-isotp has been part of the Linux mainline since version 5.10. + +- **Configuration** + + The CAN Protocol Adapter offers different configuration options. For detailed instructions on how to use the CAN Adapter in various scenarios, please refer to [CAN Provider Configuration]() + +- **Sample ISOTP DBC File** + + The DBC file [obd2_isotp.dbc]() can be used for testing purposes. The repository contains a sample obd2_isotp DBC file with several signals as examples, which can be used to test OBD2 PIDs. + +- **Kuksa APIs supported** + + CAN protocol adapter implements kuksa.val.v1.VAL gRPC service interface currently. + +## Building the application +**Using cargo to Build:** + +To compile the project in release mode using cargo + + cargo build --release + +**Using the build.sh Script:** + +Alternatively, you can use the provided build.sh script to build the project. The script performs additional security checks using tools like Clippy, Audit, and Deny before building the project. + + ./build.sh --release + +The compiled binary is built as target/release/can-protocol-adapter. + +## Running the application + +In order to run the application you need to pass the config.json file as an argument like this: + + ./target/release/can-protocol-adapter –-config config.json + +To enable logging use + + RUST_LOG=debug ./target/release/can-protocol-adapter –-config config.json + +## Build Docker Image + +Build the can-protocol-adapter as a Docker container: + + docker build -f Dockerfile --progress=plain -t can-protocol-adapter:latest . + +## Build for a Different Architecture + +To build for a different architecture, pass TARGETARCH as build argument currently Dockerfile supports amd64 and arm64 architectures: + + docker build -f Dockerfile --progress=plain -t can-protocol-adapter:latest --build-arg TARGETARCH=arm64 . + +## Running the Docker Container + +Run the container with the following command: + + docker run --network=host -v //config.json:/config/config.json -v //.dbc:/data/.dbc can-protocol-adapter:latest --config /config/config.json + + + + + + + + + + + + + + + + + + diff --git a/can-protocol-adapter/build.sh b/can-protocol-adapter/build.sh new file mode 100755 index 0000000..f19455b --- /dev/null +++ b/can-protocol-adapter/build.sh @@ -0,0 +1,67 @@ + #!/bin/bash + # + # Copyright (c) 2024 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License 2.0 which is available at + # http://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # + + # This script builds the can-protocol-adapter for specified architectures in either debug or release mode. + # It also runs basic cargo quality checks like clippy, audit, and deny for code quality and security. + +# Define the target directory +TARGET_DIR="target/logs" + +# Create the target directory if it doesn't exist +mkdir -p $TARGET_DIR + +CLIPPY_LOG="$TARGET_DIR/clippy_report.log" +AUDIT_LOG="$TARGET_DIR/audit_report.log" +DENY_LOG="$TARGET_DIR/deny_report.log" +BUILD_LOG="$TARGET_DIR/build_output.log" + +display_help() { + echo "Usage: $0 [TARGET] [--release]" + echo " TARGET: The target platform (e.g. x86_64-unknown-linux-gnu, aarch64-unknown-linux-gnu)." + echo " --release: Build in release mode.If omitted, builds in debug mode." +} + +if [ -z "$1" ] || [[ "$1" == "--help" ]]; then + display_help + exit 0 +fi + +TARGET="$1" +RELEASE="$2" + +# Run cargo clippy with -D warnings +echo "Running cargo clippy..." +cargo clippy -- -D warnings 2>&1 | tee $CLIPPY_LOG +if [ $? -ne 0 ]; then + echo "Clippy failed! Check $CLIPPY_LOG for details." + exit 1 +fi + +# Run cargo audit +echo "Running cargo audit..." +cargo audit 2>&1 | tee $AUDIT_LOG +if [ $? -ne 0 ]; then + echo "Cargo audit failed! Check $AUDIT_LOG for details." + exit 1 +fi + +# If all checks passed, build the project +echo "Building the project..." +cargo build --target $TARGET $RELEASE 2>&1 | tee $BUILD_LOG +if [ $? -ne 0 ]; then + echo "Build failed! Check $BUILD_LOG for details." + exit 1 +fi + +echo "All tasks completed successfully!" diff --git a/can-protocol-adapter/config.json b/can-protocol-adapter/config.json new file mode 100644 index 0000000..e3826a4 --- /dev/null +++ b/can-protocol-adapter/config.json @@ -0,0 +1,59 @@ +{ + "general_config": { + "broker_ip": "localhost", + "broker_port": "55555", + "dbcfile" : "/dbc/obd2-isotp.dbc" + }, + "can_config": { + "can_interface": "can0", + "use_extended_id": false, + "tx_id": "0x7DF", + "rx_id": "0x7E8", + "socket_can_type": "SOCK_DGRAM", + "socket_can_protocol": "CAN_ISOTP" + }, + "pid_table": [ + { + "request_pid": "01 0D", + "response_pid": "41 0D", + "response_timeout_ms": 100, + "description": "Vehicle Speed", + "expected_response_length": 4, + "interval_ms": 500, + "dbc_signal_name" : "S1PID0D_VehicleSpeed", + "vss_signal": { + "signal_name": "Vehicle.OBD.Speed", + "datatype": "float", + "unit": "km/h" + } + }, + { + "request_pid": "01 0C", + "response_pid": "41 0C", + "response_timeout_ms": 100, + "description": "Engine RPM", + "expected_response_length": 3, + "interval_ms": 500, + "dbc_signal_name" : "S1PID0C_EngineRPM", + "vss_signal": { + "signal_name": "Vehicle.OBD.EngineSpeed", + "datatype": "float", + "unit": "rpm" + } + }, + { + "request_pid": "01 05", + "response_pid": "41 05", + "response_timeout_ms": 100, + "description": "Engine Coolant Temperature", + "expected_response_length": 3, + "interval_ms": 3000, + "dbc_signal_name" : "S1PID05_EngineCoolantTemp", + "vss_signal": { + "signal_name": "Vehicle.OBD.CoolantTemperature", + "datatype": "float", + "unit": "celsius" + } + } + ] +} diff --git a/can-protocol-adapter/dbc/obd2_isotp.dbc b/can-protocol-adapter/dbc/obd2_isotp.dbc new file mode 100644 index 0000000..e06d5b9 --- /dev/null +++ b/can-protocol-adapter/dbc/obd2_isotp.dbc @@ -0,0 +1,105 @@ +VERSION "" + + +NS_ : + NS_DESC_ + CM_ + BA_DEF_ + BA_ + VAL_ + CAT_DEF_ + CAT_ + FILTER + BA_DEF_DEF_ + EV_DATA_ + ENVVAR_DATA_ + SGTYPE_ + SGTYPE_VAL_ + BA_DEF_SGTYPE_ + BA_SGTYPE_ + SIG_TYPE_REF_ + VAL_TABLE_ + SIG_GROUP_ + SIG_VALTYPE_ + SIGTYPE_VALTYPE_ + BO_TX_BU_ + BA_DEF_REL_ + BA_REL_ + BA_DEF_DEF_REL_ + BU_SG_REL_ + BU_EV_REL_ + BU_BO_REL_ + SG_MUL_VAL_ + +BS_: + +BU_: + + +BO_ 2024 OBD2_ISOTP: 8 Vector__XXX + SG_ S M : 15|8@0+ (1,0) [0|15] "" Vector__XXX + SG_ S1PID m65M : 15|8@0+ (1,0) [0|255] "" Vector__XXX + SG_ S1PID00_PIDsSupported_01_11 m0 : 15|32@0+ (1,0) [0|4294967295] "" Vector__XXX + SG_ S1PID01_MonitorStatus m1 : 15|32@0+ (1,0) [0|4294967295] "" Vector__XXX + SG_ S1PID02_FreezeDTC m2 : 15|16@0+ (1,0) [0|65535] "" Vector__XXX + SG_ S1PID03_FuelSystemStatus m3 : 15|16@0+ (1,0) [0|65535] "" Vector__XXX + SG_ S1PID04_EngineLoad m4 : 15|8@0+ (0.39216,0) [0|100] "%" Vector__XXX + SG_ S1PID05_EngineCoolantTemp m5 : 15|8@0+ (1,-40) [-40|215] "degC" Vector__XXX + SG_ S1PID06_ShortFuelTrimBank1 m6 : 15|8@0+ (0.78125,-100) [-100|99.21875] "%" Vector__XXX + SG_ S1PID07_LongFuelTrimBank1 m7 : 15|8@0+ (0.78125,-100) [-100|99.21875] "%" Vector__XXX + SG_ S1PID08_ShortFuelTrimBank2 m8 : 15|8@0+ (0.78125,-100) [-100|99.21875] "%" Vector__XXX + SG_ S1PID09_LongFuelTrimBank2 m9 : 15|8@0+ (0.78125,-100) [-100|99.21875] "%" Vector__XXX + SG_ S1PID0A_FuelPressure m10 : 15|8@0+ (3,0) [0|765] "kPa" Vector__XXX + SG_ S1PID0B_IntakeManiAbsPress m11 : 15|8@0+ (1,0) [0|255] "kPa" Vector__XXX + SG_ S1PID0C_EngineRPM m12 : 15|16@0+ (0.25,0) [0|16383.75] "rpm" Vector__XXX + SG_ S1PID0D_VehicleSpeed m13 : 15|8@0+ (1,0) [0|255] "km/h" Vector__XXX + SG_ S1PID0E_TimingAdvance m14 : 15|8@0+ (0.5,-64) [-64|63.5] "deg before TDC" Vector__XXX + SG_ S1PID0F_IntakeAirTemperature m15 : 15|8@0+ (1,-40) [-40|215] "degC" Vector__XXX + SG_ S1PID10_MAFAirFlowRate m16 : 15|16@0+ (0.01,0) [0|655.35] "grams/sec" Vector__XXX + SG_ S1PID11_ThrottlePosition m17 : 15|8@0+ (0.39216,0) [0|100] "%" Vector__XXX + + + +CM_ BO_ 2024 "OBD_ISOTP"; +BA_DEF_ SG_ "SignalIgnore" INT 0 1; +BA_DEF_ BO_ "VFrameFormat" ENUM "StandardCAN","ExtendedCAN","StandardCAN_FD","ExtendedCAN_FD","J1939PG"; +BA_DEF_ BO_ "MessageIgnore" INT 0 1; +BA_DEF_ BO_ "TransportProtocolType" STRING ; +BA_DEF_ "BusType" STRING ; +BA_DEF_ "ProtocolType" STRING ; +BA_DEF_ "DatabaseCompiler" STRING ; +BA_DEF_DEF_ "SignalIgnore" 0; +BA_DEF_DEF_ "VFrameFormat" ""; +BA_DEF_DEF_ "MessageIgnore" 0; +BA_DEF_DEF_ "TransportProtocolType" ""; +BA_DEF_DEF_ "BusType" ""; +BA_DEF_DEF_ "ProtocolType" ""; +BA_DEF_DEF_ "DatabaseCompiler" "CSS Electronics (wwww.csselectronics.com)"; +BA_ "BusType" "CAN"; +BA_ "ProtocolType" "OBD"; +BA_ "VFrameFormat" BO_ 2024 0; + +BA_ "SignalIgnore" SG_ 2024 S 1; + +BA_ "TransportProtocolType" BO_ 2024 "ISOTP"; + + +SG_MUL_VAL_ 2024 S1PID S 65-65; +SG_MUL_VAL_ 2024 S1PID00_PIDsSupported_01_11 S1PID 0-0; +SG_MUL_VAL_ 2024 S1PID01_MonitorStatus S1PID 1-1; +SG_MUL_VAL_ 2024 S1PID02_FreezeDTC S1PID 2-2; +SG_MUL_VAL_ 2024 S1PID03_FuelSystemStatus S1PID 3-3; +SG_MUL_VAL_ 2024 S1PID04_EngineLoad S1PID 4-4; +SG_MUL_VAL_ 2024 S1PID05_EngineCoolantTemp S1PID 5-5; +SG_MUL_VAL_ 2024 S1PID06_ShortFuelTrimBank1 S1PID 6-6; +SG_MUL_VAL_ 2024 S1PID07_LongFuelTrimBank1 S1PID 7-7; +SG_MUL_VAL_ 2024 S1PID08_ShortFuelTrimBank2 S1PID 8-8; +SG_MUL_VAL_ 2024 S1PID09_LongFuelTrimBank2 S1PID 9-9; +SG_MUL_VAL_ 2024 S1PID0A_FuelPressure S1PID 10-10; +SG_MUL_VAL_ 2024 S1PID0B_IntakeManiAbsPress S1PID 11-11; +SG_MUL_VAL_ 2024 S1PID0C_EngineRPM S1PID 12-12; +SG_MUL_VAL_ 2024 S1PID0D_VehicleSpeed S1PID 13-13; +SG_MUL_VAL_ 2024 S1PID0E_TimingAdvance S1PID 14-14; +SG_MUL_VAL_ 2024 S1PID0F_IntakeAirTemperature S1PID 15-15; +SG_MUL_VAL_ 2024 S1PID10_MAFAirFlowRate S1PID 16-16; +SG_MUL_VAL_ 2024 S1PID11_ThrottlePosition S1PID 17-17; diff --git a/can-protocol-adapter/doc/configuration.md b/can-protocol-adapter/doc/configuration.md new file mode 100644 index 0000000..b1defb9 --- /dev/null +++ b/can-protocol-adapter/doc/configuration.md @@ -0,0 +1,125 @@ +# CAN Protocol Adapter Configuration + +This configuration file is used to manage the settings for a CAN interface with socket-can and ISO-TP (ISO-15765 Transport Protocol) support. The file is structured to allow configuration of both general settings and specific CAN-related settings, as well as the configuration of various parameter requests (PIDs) to fetch vehicle or CAN data. + +This setup can be used for any CAN network supporting ISO-TP communication, not limited to OBD-II systems. Below are the details of the configuration file sections and their usage. + +### General Configuration (general_config) + +This section defines basic settings required for communication with the CAN interface and the message broker: + + "general_config": { + "broker_ip": "localhost", + "broker_port": "55555", + "dbcfile": "/dbc/obd2_isotp.dbc" + } + +### broker_ip: + Specifies the IP address of the kuksa databroker. + + +### broker_port: + Specifies the port number for kuksa databroker. + + +### dbcfile: +Path to the DBC (CAN database) file, which contains signal definitions for the CAN network. This file defines how the CAN data should be interpreted, including signal names, units, and scaling. + +### CAN Configuration (can_config) +This section configures the CAN interface for communication using socket-can: + + "can_config": { + "can_interface": "can0", + "use_extended_id": false, + "tx_id": "0x7DF", + "rx_id": "0x7E8", + "socket_can_type": "SOCK_DGRAM", + "socket_can_protocol": "CAN_ISOTP" + } + +### can_interface: +Specifies the CAN interface to be used. + +### use_extended_id: +A boolean setting that indicates whether extended CAN IDs should be used. The default is false, meaning standard CAN IDs will be used. + +### tx_id: +The CAN ID used for transmitting isotp messages. + +### rx_id: +The CAN ID used for receiving messages. + +### socket_can_type: +Specifies the socket type for CAN communication, set to "SOCK_DGRAM", which is typical for datagram-based communication. + +### socket_can_protocol: +Defines the protocol for CAN communication. The default is "CAN_ISOTP", which refers to the ISO-15765 standard for transport protocols, used to handle large messages across the CAN bus. + +## PID Table (pid_table) +This section defines the list of parameter requests (PIDs) that the system will use to fetch specific data over the CAN bus. The system supports generic CAN data requests, not just OBD-II PIDs. + + "pid_table": [ + { + "request_pid": "01 0D", + "response_pid": "41 0D", + "response_timeout_ms": 100, + "description": "Vehicle Speed", + "expected_response_length": 4, + "interval_ms": 500, + "dbc_signal_name": "S01PID0D_VehicleSpeed", + "vss_signal": { + "signal_name": "Vehicle.CAN.Speed", + "datatype": "float", + "unit": "km/h" + } + }, + { + //other PID definitions + } + ] + +### request_pid: + The PID used to request data. + +### response_pid: + The PID returned in the response. + +### response_timeout_ms: + Timeout in milliseconds for waiting for a response. + +### description: + A brief description of what data the PID request and responses. + +### expected_response_length: + The expected number of bytes in the response. This helps validate the response format. + +### interval_ms: + The interval in milliseconds between repeated PID requests. + +### dbc_signal_name: + The name of the signal as defined in the DBC file. This helps map the raw CAN message data to a VSS signal datapoint. + +### vss_signal: + Defines the VSS signal datapoint details should be as per the VSS data used by kuksa databroker. + +### signal_name: + The name of the signal used in the system. + +### datatype: + The data type for the signal. + +### unit: + The unit of the signal. + + + + + + + + + + + + + diff --git a/can-protocol-adapter/doc/v0.1.0-initial-version.md b/can-protocol-adapter/doc/v0.1.0-initial-version.md new file mode 100644 index 0000000..1430fc8 --- /dev/null +++ b/can-protocol-adapter/doc/v0.1.0-initial-version.md @@ -0,0 +1,20 @@ + ## Features Supported + +- JSON based user configuration for CAN interfaces, requests/responses and VSS to PID mapping. +- Supports socket-can and ISOTP (ISO 15765-2) standards. +- CAN frame decoding using use-defined DBC files. +- User defined VSS datapoint registration and publish. +- Supports kuksa.val.v1.VAL gRPC service interface. + +## Planned Enhancements + +- 29-bit CAN Support. +- TLS Authentication and JWT token based Authorization. +- RAW CAN support. +- Test coverage Enhancement. + +## Known issues + +- The application is tested only with multiple CAN requests sent at an interval of 50ms ,behaviour below this interval is not tested and is not guaranteed. + + diff --git a/can-protocol-adapter/lib/Cargo.toml b/can-protocol-adapter/lib/Cargo.toml new file mode 100644 index 0000000..76ba234 --- /dev/null +++ b/can-protocol-adapter/lib/Cargo.toml @@ -0,0 +1,32 @@ +######################################################################## +# Copyright (c) 2024 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +######################################################################## +[workspace] +resolver = "2" + +members = [ + "common", + "kuksa" +] + +[workspace.dependencies] +databroker-proto = { path = "../src/databroker-proto" } +# prost has no features +prost = "0.11" +# prost-types has no features +prost-types = "0.11" +# tokio does not enable features by default +tokio = "1.17.0" +# tokio-stream has no features +tokio-stream = "0.1.8" +tonic = { version = "0.11.0", default-features = false } +tonic-build = { version = "0.8", default-features = false } diff --git a/can-protocol-adapter/lib/common/Cargo.toml b/can-protocol-adapter/lib/common/Cargo.toml new file mode 100644 index 0000000..cdb532b --- /dev/null +++ b/can-protocol-adapter/lib/common/Cargo.toml @@ -0,0 +1,37 @@ +#******************************************************************************** +# Copyright (c) 2022, 2023 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +[package] +name = "kuksa-common" +version = "0.6.0-dev.0" +authors = ["Eclipse KUKSA Project"] +edition = "2021" +license = "Apache-2.0" + +[dependencies] +databroker-proto = { workspace = true } +tonic = { workspace = true, features = ["transport", "channel"] } +tokio = { workspace = true, features = [ + "macros", +] } +tokio-stream = { workspace = true, features = ["sync"] } +http = "0.2.8" + +[lib] +name = "kuksa_common" +crate-type = ["lib"] +path = "src/lib.rs" + +[features] +default = ["tls"] +tls = ["tonic/tls"] diff --git a/can-protocol-adapter/lib/common/src/lib.rs b/can-protocol-adapter/lib/common/src/lib.rs new file mode 100644 index 0000000..ac6a739 --- /dev/null +++ b/can-protocol-adapter/lib/common/src/lib.rs @@ -0,0 +1,236 @@ +/******************************************************************************** +* Copyright (c) 2023 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +use std::convert::TryFrom; + +use databroker_proto::kuksa::val::v1::Error; +use http::Uri; +use tokio_stream::wrappers::BroadcastStream; +use tonic::transport::Channel; + +#[derive(Debug)] +pub struct Client { + uri: Uri, + token: Option, + #[cfg(feature = "tls")] + tls_config: Option, + channel: Option, + connection_state_subs: Option>, +} + +#[derive(Clone)] +pub enum ConnectionState { + Connected, + Disconnected, +} + +#[derive(Debug, Clone)] +pub enum ClientError { + Connection(String), + Status(tonic::Status), + Function(Vec), +} + +impl std::error::Error for ClientError {} +impl std::fmt::Display for ClientError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ClientError::Connection(con) => f.pad(con), + ClientError::Status(status) => f.pad(&format!("{status}")), + ClientError::Function(err) => { + let formatted_result: String = err + .iter() + .map(|element| { + format!( + "code: {}, message: {}, reason: {}", + element.code, element.message, element.reason + ) + }) + .collect::>() + .join(", "); // Join the elements with a comma and space + + f.pad(&formatted_result) + } + } + } +} + +#[derive(Debug)] +pub enum TokenError { + MalformedTokenError(String), +} + +impl std::error::Error for TokenError {} +impl std::fmt::Display for TokenError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TokenError::MalformedTokenError(msg) => f.pad(msg), + } + } +} + +pub fn to_uri(uri: impl AsRef) -> Result { + let uri = uri + .as_ref() + .parse::() + .map_err(|err| format!("{err}"))?; + let mut parts = uri.into_parts(); + + if parts.scheme.is_none() { + parts.scheme = Some("http".parse().expect("http should be valid scheme")); + } + + match &parts.authority { + Some(_authority) => { + // match (authority.port_u16(), port) { + // (Some(uri_port), Some(port)) => { + // if uri_port != port { + // parts.authority = format!("{}:{}", authority.host(), port) + // .parse::() + // .map_err(|err| format!("{}", err)) + // .ok(); + // } + // } + // (_, _) => {} + // } + } + None => return Err("No server uri specified".to_owned()), + } + parts.path_and_query = Some("".parse().expect("uri path should be empty string")); + tonic::transport::Uri::from_parts(parts).map_err(|err| format!("{err}")) +} + +impl Client { + pub fn new(uri: Uri) -> Self { + Client { + uri, + token: None, + #[cfg(feature = "tls")] + tls_config: None, + channel: None, + connection_state_subs: None, + } + } + + pub fn get_uri(&self) -> String { + self.uri.to_string() + } + + #[cfg(feature = "tls")] + pub fn set_tls_config(&mut self, tls_config: tonic::transport::ClientTlsConfig) { + self.tls_config = Some(tls_config); + } + + pub fn set_access_token(&mut self, token: impl AsRef) -> Result<(), TokenError> { + match tonic::metadata::AsciiMetadataValue::try_from(&format!("Bearer {}", token.as_ref())) { + Ok(token) => { + self.token = Some(token); + Ok(()) + } + Err(err) => Err(TokenError::MalformedTokenError(format!("{err}"))), + } + } + + pub fn is_connected(&self) -> bool { + self.channel.is_some() + } + + pub fn subscribe_to_connection_state(&mut self) -> BroadcastStream { + match &self.connection_state_subs { + Some(stream) => BroadcastStream::new(stream.subscribe()), + None => { + let (tx, rx1) = tokio::sync::broadcast::channel(1); + self.connection_state_subs = Some(tx); + BroadcastStream::new(rx1) + } + } + } + + async fn try_create_channel(&mut self) -> Result<&Channel, ClientError> { + #[cfg(feature = "tls")] + let mut builder = tonic::transport::Channel::builder(self.uri.clone()); + #[cfg(not(feature = "tls"))] + let builder = tonic::transport::Channel::builder(self.uri.clone()); + + #[cfg(feature = "tls")] + if let Some(tls_config) = &self.tls_config { + match builder.tls_config(tls_config.clone()) { + Ok(new_builder) => { + builder = new_builder; + } + Err(err) => { + return Err(ClientError::Connection(format!( + "Failed to configure TLS: {err}" + ))); + } + } + } + + match builder.connect().await { + Ok(channel) => { + if let Some(subs) = &self.connection_state_subs { + subs.send(ConnectionState::Connected).map_err(|err| { + ClientError::Connection(format!( + "Failed to notify connection state change: {err}" + )) + })?; + } + self.channel = Some(channel); + Ok(self.channel.as_ref().expect("Channel should exist")) + } + Err(err) => { + if let Some(subs) = &self.connection_state_subs { + subs.send(ConnectionState::Disconnected).unwrap_or_default(); + } + Err(ClientError::Connection(format!( + "Failed to connect to {}: {}", + self.uri, err + ))) + } + } + } + + pub async fn try_connect(&mut self) -> Result<(), ClientError> { + self.try_create_channel().await?; + Ok(()) + } + + pub async fn try_connect_to(&mut self, uri: tonic::transport::Uri) -> Result<(), ClientError> { + self.uri = uri; + self.try_create_channel().await?; + Ok(()) + } + + pub async fn get_channel(&mut self) -> Result<&Channel, ClientError> { + if self.channel.is_none() { + self.try_create_channel().await + } else { + match &self.channel { + Some(channel) => Ok(channel), + None => unreachable!(), + } + } + } + + pub fn get_auth_interceptor( + &mut self, + ) -> impl FnMut(tonic::Request<()>) -> Result, tonic::Status> + '_ { + move |mut req: tonic::Request<()>| { + if let Some(token) = &self.token { + // debug!("Inserting auth token: {:?}", token); + req.metadata_mut().insert("authorization", token.clone()); + } + Ok(req) + } + } +} diff --git a/can-protocol-adapter/lib/kuksa/Cargo.toml b/can-protocol-adapter/lib/kuksa/Cargo.toml new file mode 100644 index 0000000..c1796cb --- /dev/null +++ b/can-protocol-adapter/lib/kuksa/Cargo.toml @@ -0,0 +1,38 @@ +#******************************************************************************** +# Copyright (c) 2022, 2023 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +[package] +name = "kuksa" +version = "0.6.0-dev.0" +authors = ["Eclipse KUKSA Project"] +edition = "2021" +license = "Apache-2.0" + +[dependencies] +kuksa-common = { path = "../common"} +databroker-proto = { workspace = true } +tonic = { workspace = true, features = ["transport", "channel"] } +tokio = { workspace = true, features = [ + "macros", +] } +tokio-stream = { workspace = true, features = ["sync"] } +http = "0.2.8" + +[lib] +name = "kuksa" +crate-type = ["lib"] +path = "src/lib.rs" + +[features] +default = ["tls"] +tls = ["tonic/tls"] diff --git a/can-protocol-adapter/lib/kuksa/src/lib.rs b/can-protocol-adapter/lib/kuksa/src/lib.rs new file mode 100644 index 0000000..8bd1a15 --- /dev/null +++ b/can-protocol-adapter/lib/kuksa/src/lib.rs @@ -0,0 +1,356 @@ +/******************************************************************************** +* Copyright (c) 2023 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +use http::Uri; +use std::collections::HashMap; + +pub use databroker_proto::kuksa::val::{self as proto, v1::DataEntry}; + +pub use kuksa_common::{Client, ClientError}; + +#[derive(Debug)] +pub struct KuksaClient { + pub basic_client: Client, +} + +impl KuksaClient { + pub fn new(uri: Uri) -> Self { + KuksaClient { + basic_client: Client::new(uri), + } + } + + async fn set(&mut self, entry: DataEntry, _fields: Vec) -> Result<(), ClientError> { + let mut client = proto::v1::val_client::ValClient::with_interceptor( + self.basic_client.get_channel().await?.clone(), + self.basic_client.get_auth_interceptor(), + ); + let set_request = proto::v1::SetRequest { + updates: vec![proto::v1::EntryUpdate { + entry: Some(entry), + fields: _fields, + }], + }; + match client.set(set_request).await { + Ok(response) => { + let message = response.into_inner(); + let mut errors: Vec = Vec::new(); + if let Some(err) = message.error { + errors.push(err); + } + for error in message.errors { + if let Some(err) = error.error { + errors.push(err); + } + } + if errors.is_empty() { + Ok(()) + } else { + Err(ClientError::Function(errors)) + } + } + Err(err) => Err(ClientError::Status(err)), + } + } + + async fn get( + &mut self, + path: &str, + view: proto::v1::View, + _fields: Vec, + ) -> Result, ClientError> { + let mut client = proto::v1::val_client::ValClient::with_interceptor( + self.basic_client.get_channel().await?.clone(), + self.basic_client.get_auth_interceptor(), + ); + + let get_request = proto::v1::GetRequest { + entries: vec![proto::v1::EntryRequest { + path: path.to_string(), + view: view.into(), + fields: _fields, + }], + }; + + match client.get(get_request).await { + Ok(response) => { + let message = response.into_inner(); + let mut errors = Vec::new(); + if let Some(err) = message.error { + errors.push(err); + } + for error in message.errors { + if let Some(err) = error.error { + errors.push(err); + } + } + if !errors.is_empty() { + Err(ClientError::Function(errors)) + } else { + // since there is only one DataEntry in the vector return only the according DataEntry + Ok(message.entries.clone()) + } + } + Err(err) => Err(ClientError::Status(err)), + } + } + + pub async fn get_metadata(&mut self, paths: Vec<&str>) -> Result, ClientError> { + let mut metadata_result = Vec::new(); + + for path in paths { + match self + .get( + path, + proto::v1::View::Metadata, + vec![proto::v1::Field::Metadata.into()], + ) + .await + { + Ok(mut entry) => metadata_result.append(&mut entry), + Err(err) => return Err(err), + } + } + + Ok(metadata_result) + } + + pub async fn get_current_values( + &mut self, + paths: Vec, + ) -> Result, ClientError> { + let mut get_result = Vec::new(); + + for path in paths { + match self + .get( + &path, + proto::v1::View::CurrentValue, + vec![ + proto::v1::Field::Value.into(), + proto::v1::Field::Metadata.into(), + ], + ) + .await + { + Ok(mut entry) => get_result.append(&mut entry), + Err(err) => return Err(err), + } + } + + Ok(get_result) + } + + pub async fn get_target_values( + &mut self, + paths: Vec, + ) -> Result, ClientError> { + let mut get_result = Vec::new(); + + for path in paths { + match self + .get( + &path, + proto::v1::View::TargetValue, + vec![ + proto::v1::Field::ActuatorTarget.into(), + proto::v1::Field::Metadata.into(), + ], + ) + .await + { + Ok(mut entry) => get_result.append(&mut entry), + Err(err) => return Err(err), + } + } + + Ok(get_result) + } + + pub async fn set_current_values( + &mut self, + datapoints: HashMap, + ) -> Result<(), ClientError> { + for (path, datapoint) in datapoints { + match self + .set( + proto::v1::DataEntry { + path: path.clone(), + value: Some(datapoint), + actuator_target: None, + metadata: None, + }, + vec![ + proto::v1::Field::Value.into(), + proto::v1::Field::Path.into(), + ], + ) + .await + { + Ok(_) => { + continue; + } + Err(err) => return Err(err), + } + } + + Ok(()) + } + + pub async fn set_target_values( + &mut self, + datapoints: HashMap, + ) -> Result<(), ClientError> { + for (path, datapoint) in datapoints { + match self + .set( + proto::v1::DataEntry { + path: path.clone(), + value: None, + actuator_target: Some(datapoint), + metadata: None, + }, + vec![ + proto::v1::Field::ActuatorTarget.into(), + proto::v1::Field::Path.into(), + ], + ) + .await + { + Ok(_) => { + continue; + } + Err(err) => return Err(err), + } + } + + Ok(()) + } + + pub async fn set_metadata( + &mut self, + metadatas: HashMap, + ) -> Result<(), ClientError> { + for (path, metadata) in metadatas { + match self + .set( + proto::v1::DataEntry { + path: path.clone(), + value: None, + actuator_target: None, + metadata: Some(metadata), + }, + vec![ + proto::v1::Field::Metadata.into(), + proto::v1::Field::Path.into(), + ], + ) + .await + { + Ok(_) => { + continue; + } + Err(err) => return Err(err), + } + } + + Ok(()) + } + + pub async fn subscribe_current_values( + &mut self, + paths: Vec<&str>, + ) -> Result, ClientError> { + let mut client = proto::v1::val_client::ValClient::with_interceptor( + self.basic_client.get_channel().await?.clone(), + self.basic_client.get_auth_interceptor(), + ); + + let mut entries = Vec::new(); + for path in paths { + entries.push(proto::v1::SubscribeEntry { + path: path.to_string(), + view: proto::v1::View::CurrentValue.into(), + fields: vec![ + proto::v1::Field::Value.into(), + proto::v1::Field::Metadata.into(), + ], + }) + } + + let req = proto::v1::SubscribeRequest { entries }; + + match client.subscribe(req).await { + Ok(response) => Ok(response.into_inner()), + Err(err) => Err(ClientError::Status(err)), + } + } + + //masking subscribe curent values with subscribe due to plugability + pub async fn subscribe( + &mut self, + paths: Vec<&str>, + ) -> Result, ClientError> { + self.subscribe_current_values(paths).await + } + + pub async fn subscribe_target_values( + &mut self, + paths: Vec<&str>, + ) -> Result, ClientError> { + let mut client = proto::v1::val_client::ValClient::with_interceptor( + self.basic_client.get_channel().await?.clone(), + self.basic_client.get_auth_interceptor(), + ); + let mut entries = Vec::new(); + for path in paths { + entries.push(proto::v1::SubscribeEntry { + path: path.to_string(), + view: proto::v1::View::TargetValue.into(), + fields: vec![proto::v1::Field::ActuatorTarget.into()], + }) + } + + let req = proto::v1::SubscribeRequest { entries }; + + match client.subscribe(req).await { + Ok(response) => Ok(response.into_inner()), + Err(err) => Err(ClientError::Status(err)), + } + } + + pub async fn subscribe_metadata( + &mut self, + paths: Vec, + ) -> Result, ClientError> { + let mut client = proto::v1::val_client::ValClient::with_interceptor( + self.basic_client.get_channel().await?.clone(), + self.basic_client.get_auth_interceptor(), + ); + let mut entries = Vec::new(); + for path in paths { + entries.push(proto::v1::SubscribeEntry { + path: path.to_string(), + view: proto::v1::View::Metadata.into(), + fields: vec![proto::v1::Field::Metadata.into()], + }) + } + + let req = proto::v1::SubscribeRequest { entries }; + + match client.subscribe(req).await { + Ok(response) => Ok(response.into_inner()), + Err(err) => Err(ClientError::Status(err)), + } + } +} diff --git a/can-protocol-adapter/proto/README.md b/can-protocol-adapter/proto/README.md new file mode 100755 index 0000000..8d8e558 --- /dev/null +++ b/can-protocol-adapter/proto/README.md @@ -0,0 +1,14 @@ +# KUKSA.val Proto files + +The files in this directory shall correspond (or at least be compatible) to the Proto-files in +[KUKSA.val](https://github.com/eclipse/kuksa.val) for the KUKSA.val Databroker version used by this repository. + +## kuksa directory + +The files in this directory are copies of the files in +[kuksa.val/proto](https://github.com/eclipse/kuksa.val/tree/master/proto/kuksa/val/v1) directory. + +## sdv directory + +The files in this directory are copies of the files in +[kuksa.val/kuksa_databroker/proto/sdv](https://github.com/eclipse/kuksa.val/tree/master/kuksa_databroker/proto/sdv/databroker/v1) directory. diff --git a/can-protocol-adapter/proto/kuksa/val/v1/README.md b/can-protocol-adapter/proto/kuksa/val/v1/README.md new file mode 100644 index 0000000..9baba4b --- /dev/null +++ b/can-protocol-adapter/proto/kuksa/val/v1/README.md @@ -0,0 +1,6 @@ +# kuksa.val.v1 protobuf API + +This directory contain a Protobuf API supported by KUKSA.val Databroker, KUKSA.val Python Client and KUKSA.val Go Client. + +This API is under development and will eventually replace the +[sdv.databroker.v1](https://github.com/eclipse-kuksa/kuksa-databroker/tree/main/proto/sdv/databroker/v1) API. diff --git a/can-protocol-adapter/proto/kuksa/val/v1/types.proto b/can-protocol-adapter/proto/kuksa/val/v1/types.proto new file mode 100644 index 0000000..8914e7a --- /dev/null +++ b/can-protocol-adapter/proto/kuksa/val/v1/types.proto @@ -0,0 +1,288 @@ +/******************************************************************************** + * Copyright (c) 2022 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ + +syntax = "proto3"; + +// I added V1 as in databroker. Is this good practice? +package kuksa.val.v1; +import "google/protobuf/timestamp.proto"; + +option go_package = "kuksa/val/v1"; + +// Describes a VSS entry +// When requesting an entry, the amount of information returned can +// be controlled by specifying either a `View` or a set of `Field`s. +message DataEntry { + // Defines the full VSS path of the entry. + string path = 1; // [field: FIELD_PATH] + + // The value (datapoint) + Datapoint value = 2; // [field: FIELD_VALUE] + + // Actuator target (only used if the entry is an actuator) + Datapoint actuator_target = 3; // [field: FIELD_ACTUATOR_TARGET] + + // Metadata for this entry + Metadata metadata = 10; // [field: FIELD_METADATA] +} + +message Datapoint { + google.protobuf.Timestamp timestamp = 1; + + oneof value { + string string = 11; + bool bool = 12; + sint32 int32 = 13; + sint64 int64 = 14; + uint32 uint32 = 15; + uint64 uint64 = 16; + float float = 17; + double double = 18; + StringArray string_array = 21; + BoolArray bool_array = 22; + Int32Array int32_array = 23; + Int64Array int64_array = 24; + Uint32Array uint32_array = 25; + Uint64Array uint64_array = 26; + FloatArray float_array = 27; + DoubleArray double_array = 28; + } +} + +message Metadata { + // Data type + // The VSS data type of the entry (i.e. the value, min, max etc). + // + // NOTE: protobuf doesn't have int8, int16, uint8 or uint16 which means + // that these values must be serialized as int32 and uint32 respectively. + DataType data_type = 11; // [field: FIELD_METADATA_DATA_TYPE] + + // Entry type + EntryType entry_type = 12; // [field: FIELD_METADATA_ENTRY_TYPE] + + // Description + // Describes the meaning and content of the entry. + optional string description = 13; // [field: FIELD_METADATA_DESCRIPTION] + + // Comment [optional] + // A comment can be used to provide additional informal information + // on a entry. + optional string comment = 14; // [field: FIELD_METADATA_COMMENT] + + // Deprecation [optional] + // Whether this entry is deprecated. Can contain recommendations of what + // to use instead. + optional string deprecation = 15; // [field: FIELD_METADATA_DEPRECATION] + + // Unit [optional] + // The unit of measurement + optional string unit = 16; // [field: FIELD_METADATA_UNIT] + + // Value restrictions [optional] + // Restrict which values are allowed. + // Only restrictions matching the DataType {datatype} above are valid. + ValueRestriction value_restriction = 17; // [field: FIELD_METADATA_VALUE_RESTRICTION] + + // Entry type specific metadata + oneof entry_specific { + Actuator actuator = 20; // [field: FIELD_METADATA_ACTUATOR] + Sensor sensor = 30; // [field: FIELD_METADATA_SENSOR] + Attribute attribute = 40; // [field: FIELD_METADATA_ATTRIBUTE] + } +} + +/////////////////////// +// Actuator specific fields +message Actuator { + // Nothing for now +} + +//////////////////////// +// Sensor specific +message Sensor { + // Nothing for now +} + +//////////////////////// +// Attribute specific +message Attribute { + // Nothing for now. +} + +// Value restriction +// +// One ValueRestriction{type} for each type, since +// they don't make sense unless the types match +// +message ValueRestriction { + oneof type { + ValueRestrictionString string = 21; + // For signed VSS integers + ValueRestrictionInt signed = 22; + // For unsigned VSS integers + ValueRestrictionUint unsigned = 23; + // For floating point VSS values (float and double) + ValueRestrictionFloat floating_point = 24; + } +} + +message ValueRestrictionInt { + optional sint64 min = 1; + optional sint64 max = 2; + repeated sint64 allowed_values = 3; +} + +message ValueRestrictionUint { + optional uint64 min = 1; + optional uint64 max = 2; + repeated uint64 allowed_values = 3; +} + +message ValueRestrictionFloat { + optional double min = 1; + optional double max = 2; + + // allowed for doubles/floats not recommended + repeated double allowed_values = 3; +} + +// min, max doesn't make much sense for a string +message ValueRestrictionString { + repeated string allowed_values = 3; +} + +// VSS Data type of a signal +// +// Protobuf doesn't support int8, int16, uint8 or uint16. +// These are mapped to int32 and uint32 respectively. +// +enum DataType { + DATA_TYPE_UNSPECIFIED = 0; + DATA_TYPE_STRING = 1; + DATA_TYPE_BOOLEAN = 2; + DATA_TYPE_INT8 = 3; + DATA_TYPE_INT16 = 4; + DATA_TYPE_INT32 = 5; + DATA_TYPE_INT64 = 6; + DATA_TYPE_UINT8 = 7; + DATA_TYPE_UINT16 = 8; + DATA_TYPE_UINT32 = 9; + DATA_TYPE_UINT64 = 10; + DATA_TYPE_FLOAT = 11; + DATA_TYPE_DOUBLE = 12; + DATA_TYPE_TIMESTAMP = 13; + DATA_TYPE_STRING_ARRAY = 20; + DATA_TYPE_BOOLEAN_ARRAY = 21; + DATA_TYPE_INT8_ARRAY = 22; + DATA_TYPE_INT16_ARRAY = 23; + DATA_TYPE_INT32_ARRAY = 24; + DATA_TYPE_INT64_ARRAY = 25; + DATA_TYPE_UINT8_ARRAY = 26; + DATA_TYPE_UINT16_ARRAY = 27; + DATA_TYPE_UINT32_ARRAY = 28; + DATA_TYPE_UINT64_ARRAY = 29; + DATA_TYPE_FLOAT_ARRAY = 30; + DATA_TYPE_DOUBLE_ARRAY = 31; + DATA_TYPE_TIMESTAMP_ARRAY = 32; +} + +// Entry type +enum EntryType { + ENTRY_TYPE_UNSPECIFIED = 0; + ENTRY_TYPE_ATTRIBUTE = 1; + ENTRY_TYPE_SENSOR = 2; + ENTRY_TYPE_ACTUATOR = 3; +} + +// A `View` specifies a set of fields which should +// be populated in a `DataEntry` (in a response message) +enum View { + VIEW_UNSPECIFIED = 0; // Unspecified. Equivalent to VIEW_CURRENT_VALUE unless `fields` are explicitly set. + VIEW_CURRENT_VALUE = 1; // Populate DataEntry with value. + VIEW_TARGET_VALUE = 2; // Populate DataEntry with actuator target. + VIEW_METADATA = 3; // Populate DataEntry with metadata. + VIEW_FIELDS = 10; // Populate DataEntry only with requested fields. + VIEW_ALL = 20; // Populate DataEntry with everything. +} + +// A `Field` corresponds to a specific field of a `DataEntry`. +// +// It can be used to: +// * populate only specific fields of a `DataEntry` response. +// * specify which fields of a `DataEntry` should be set as +// part of a `Set` request. +// * subscribe to only specific fields of a data entry. +// * convey which fields of an updated `DataEntry` have changed. +enum Field { + FIELD_UNSPECIFIED = 0; // "*" i.e. everything + FIELD_PATH = 1; // path + FIELD_VALUE = 2; // value + FIELD_ACTUATOR_TARGET = 3; // actuator_target + FIELD_METADATA = 10; // metadata.* + FIELD_METADATA_DATA_TYPE = 11; // metadata.data_type + FIELD_METADATA_DESCRIPTION = 12; // metadata.description + FIELD_METADATA_ENTRY_TYPE = 13; // metadata.entry_type + FIELD_METADATA_COMMENT = 14; // metadata.comment + FIELD_METADATA_DEPRECATION = 15; // metadata.deprecation + FIELD_METADATA_UNIT = 16; // metadata.unit + FIELD_METADATA_VALUE_RESTRICTION = 17; // metadata.value_restriction.* + FIELD_METADATA_ACTUATOR = 20; // metadata.actuator.* + FIELD_METADATA_SENSOR = 30; // metadata.sensor.* + FIELD_METADATA_ATTRIBUTE = 40; // metadata.attribute.* +} + +// Error response shall be an HTTP-like code. +// Should follow https://www.w3.org/TR/viss2-transport/#status-codes. +message Error { + uint32 code = 1; + string reason = 2; + string message = 3; +} + +// Used in get/set requests to report errors for specific entries +message DataEntryError { + string path = 1; // vss path + Error error = 2; +} + +message StringArray { + repeated string values = 1; +} + +message BoolArray { + repeated bool values = 1; +} + +message Int32Array { + repeated sint32 values = 1; +} + +message Int64Array { + repeated sint64 values = 1; +} + +message Uint32Array { + repeated uint32 values = 1; +} + +message Uint64Array { + repeated uint64 values = 1; +} + +message FloatArray { + repeated float values = 1; +} + +message DoubleArray { + repeated double values = 1; +} diff --git a/can-protocol-adapter/proto/kuksa/val/v1/val.proto b/can-protocol-adapter/proto/kuksa/val/v1/val.proto new file mode 100644 index 0000000..dde49b5 --- /dev/null +++ b/can-protocol-adapter/proto/kuksa/val/v1/val.proto @@ -0,0 +1,126 @@ +/******************************************************************************** + * Copyright (c) 2022 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ + +syntax = "proto3"; + +package kuksa.val.v1; + +option go_package = "kuksa/val/v1"; + +import "kuksa/val/v1/types.proto"; + +// Note on authorization: +// Tokens (auth-token or auth-uuid) are sent as (GRPC / http2) metadata. +// +// The auth-token is a JWT compliant token as the examples found here: +// https://github.com/eclipse-kuksa/kuksa-databroker/tree/main/certificates/jwt +// +// See also https://github.com/eclipse-kuksa/kuksa-databroker/blob/main/doc/authorization.md#jwt-access-token +// +// Upon reception of auth-token, server shall generate an auth-uuid in metadata +// that the client can use instead of auth-token in subsequent calls. + +service VAL { + // Get entries + rpc Get(GetRequest) returns (GetResponse); + + // Set entries + rpc Set(SetRequest) returns (SetResponse); + + rpc StreamedUpdate(stream StreamedUpdateRequest) returns (stream StreamedUpdateResponse); + + // Subscribe to a set of entries + // + // Returns a stream of notifications. + // + // InvalidArgument is returned if the request is malformed. + rpc Subscribe(SubscribeRequest) returns (stream SubscribeResponse); + + // Shall return information that allows the client to determine + // what server/server implementation/version it is talking to + // eg. kuksa-databroker 0.5.1 + rpc GetServerInfo(GetServerInfoRequest) returns (GetServerInfoResponse); +} + +// Define which data we want +message EntryRequest { + string path = 1; + View view = 2; + repeated Field fields = 3; +} + +// Request a set of entries. +message GetRequest { + repeated EntryRequest entries = 1; +} + +// Global errors are specified in `error`. +// Errors for individual entries are specified in `errors`. +message GetResponse { + repeated DataEntry entries = 1; + repeated DataEntryError errors = 2; + Error error = 3; +} + +// Define the data we want to set +message EntryUpdate { + DataEntry entry = 1; + repeated Field fields = 2; +} + +// A list of entries to be updated +message SetRequest { + repeated EntryUpdate updates = 1; +} + +// Global errors are specified in `error`. +// Errors for individual entries are specified in `errors`. +message SetResponse { + Error error = 1; + repeated DataEntryError errors = 2; +} + +message StreamedUpdateRequest { + repeated EntryUpdate updates = 1; +} + +message StreamedUpdateResponse { + Error error = 1; + repeated DataEntryError errors = 2; +} + +// Define what to subscribe to +message SubscribeEntry { + string path = 1; + View view = 2; + repeated Field fields = 3; +} + +// Subscribe to changes in datapoints. +message SubscribeRequest { + repeated SubscribeEntry entries = 1; +} + +// A subscription response +message SubscribeResponse { + repeated EntryUpdate updates = 1; +} + +message GetServerInfoRequest { + // Nothing yet +} + +message GetServerInfoResponse { + string name = 1; + string version = 2; +} diff --git a/can-protocol-adapter/proto/sdv/databroker/v1/README.md b/can-protocol-adapter/proto/sdv/databroker/v1/README.md new file mode 100644 index 0000000..a669e5a --- /dev/null +++ b/can-protocol-adapter/proto/sdv/databroker/v1/README.md @@ -0,0 +1,12 @@ +# sdv.databroker.v1 protobuf API + +This directory contain a Protobuf API supported by KUKSA.val Databroker. + +As of today KUKSA.val Databroker supports both this API and the +[kuksa.val.v1](https://github.com/eclipse-kuksa/kuksa-databroker/tree/main/proto/kuksa/val/v1) API. +The [kuksa.val.v1](https://github.com/eclipse-kuksa/kuksa-databroker/tree/main/proto/kuksa/val/v1) API is the newer API and is still +in development. It does not yet support all features supported by this API. + +This API may in the future be deprecated. It is recommended to use +the [kuksa.val.v1](https://github.com/eclipse-kuksa/kuksa-databroker/tree/main/proto/kuksa/val/v1) API, unless you need +functionality currently only provided by this API. diff --git a/can-protocol-adapter/proto/sdv/databroker/v1/broker.proto b/can-protocol-adapter/proto/sdv/databroker/v1/broker.proto new file mode 100644 index 0000000..f8ffaf8 --- /dev/null +++ b/can-protocol-adapter/proto/sdv/databroker/v1/broker.proto @@ -0,0 +1,96 @@ +/******************************************************************************** + * Copyright (c) 2022 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ + +syntax = "proto3"; + +package sdv.databroker.v1; + +import "sdv/databroker/v1/types.proto"; + +service Broker { + // Request a set of datapoints (values) + // + // Returns a list of requested data points. + // + // InvalidArgument is returned if the request is malformed. + rpc GetDatapoints(GetDatapointsRequest) returns (GetDatapointsReply); + + // Set a datapoint (values) + rpc SetDatapoints(SetDatapointsRequest) returns (SetDatapointsReply); + + // Subscribe to a set of data points or conditional expressions + // using the Data Broker Query Syntax (described in QUERY.md) + // + // Returns a stream of replies. + // + // InvalidArgument is returned if the request is malformed. + rpc Subscribe(SubscribeRequest) returns (stream SubscribeReply); + + // Request the metadata of a set of datapoints + // + // Returns metadata of the requested data points that exist. + rpc GetMetadata(GetMetadataRequest) returns (GetMetadataReply); +} + +message GetDatapointsRequest { + // A list of requested data points. + repeated string datapoints = 1; +} + +message GetDatapointsReply { + // Contains the values of the requested data points. + // If a requested data point is not available, the corresponding Datapoint + // will have the respective failure value set. + map datapoints = 1; +} + +message SetDatapointsRequest { + // A map of data points to set + map datapoints = 1; +} + +message SetDatapointsReply { + // A map of errors (if any) + map errors = 1; +} + +message SubscribeRequest { + // Subscribe to a set of data points (or expressions) described + // by the provided query. + // The query syntax is a subset of SQL and is described in more + // detail in the QUERY.md file. + string query = 2; +} + +message SubscribeReply { + // Contains the fields specified by the query. + // If a requested data point value is not available, the corresponding + // Datapoint will have it's respective failure value set. + map fields = 1; +} + +message GetMetadataRequest { + // Request metadata for a list of data points referenced by their names. + // e.g. "Vehicle.Cabin.Seat.Row1.Pos1.Position" or "Vehicle.Speed". + // + // If no names are provided, metadata for all known data points will be + // returned. + repeated string names = 1; +} + +message GetMetadataReply { + // Contains metadata of the requested data points. If a data point + // doesn't exist (i.e. not known to the Data Broker) the corresponding + // Metadata isn't part of the returned list. + repeated Metadata list = 1; +} diff --git a/can-protocol-adapter/proto/sdv/databroker/v1/collector.proto b/can-protocol-adapter/proto/sdv/databroker/v1/collector.proto new file mode 100644 index 0000000..c67a5de --- /dev/null +++ b/can-protocol-adapter/proto/sdv/databroker/v1/collector.proto @@ -0,0 +1,97 @@ +/******************************************************************************** + * Copyright (c) 2022 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ + +syntax = "proto3"; + +import "sdv/databroker/v1/types.proto"; + +package sdv.databroker.v1; + +service Collector { + // Register new datapoint (metadata) + // + // If the registration of at least one of the passed data point fails, the overall registration + // is rejected and the gRPC status code ABORTED is returned (to indicate the "aborted" registration). + // The details, which data point(s) caused the failure and the reason, is passed in back in human- + // readable form in the status message. Possible failure resaons are: + // * PERMISSION_DENIED - Not allowed to register this name + // * ALREADY_REGISTERED - The data point is already registered by some other feeder + // * RE_REGISTRATION_MISMATCH - Already registered by this feeder but with differing metadata + // * INVALID_NAME - The passed name of the datapoint has an invalid structure + // * INVALID_VALUE_TYPE - The passed ValueType is not supported + // * INVALID_CHANGE_TYPE - The passed ChangeType is not supported + rpc RegisterDatapoints(RegisterDatapointsRequest) returns (RegisterDatapointsReply); + + // Provide a set of updated datapoint values to the broker. + // This is the unary equivalent of `StreamDatapoints` below and is better suited for cases + // where the frequency of updates is rather low. + // + // NOTE: The values provided in a single request are handled as a single update in the + // data broker. This ensures that any clients requesting (or subscribing to) a set of + // datapoints will get a consistent update, i.e. that either all values are updated or + // none are. + // + // Returns: any errors encountered updating the datapoints + // + rpc UpdateDatapoints(UpdateDatapointsRequest) returns (UpdateDatapointsReply); + + // Provide a stream with updated datapoint values to the broker. + // This is the streaming equivalent of `UpdateDatapoints` above and is better suited for + // cases where the frequency of updates is high. + // + // NOTE: The values provided in a single request are handled as a single update in the + // data broker. This ensures that any clients requesting (or subscribing to) a set of + // datapoints will get a consistent update, i.e. that either all values are updated or + // none are. + // + // Returns: any errors encountered updating the datapoints + // + rpc StreamDatapoints(stream StreamDatapointsRequest) returns (stream StreamDatapointsReply); +} + +message UpdateDatapointsRequest { + map datapoints = 1; +} + +message UpdateDatapointsReply { + map errors = 1; // If empty, everything went well +} + +message StreamDatapointsRequest { + map datapoints = 1; +} + +message StreamDatapointsReply { + map errors = 1; // If empty, everything went well +} + +message RegisterDatapointsRequest { + repeated RegistrationMetadata list = 1; +} + +message RegistrationMetadata { + // Name of the data point + // (e.g. "Vehicle.Cabin.Seat.Row1.Pos1.Position" or "Vehicle.Speed") + string name = 1; + DataType data_type = 2; + string description = 3; + ChangeType change_type = 4; + + // int32 min_update_hz = 10; // Only for CONTINUOUS + // int32 max_update_hz = 11; // Only for CONTINUOUS +}; + +message RegisterDatapointsReply { + // Maps each data point name passed in RegisterDatapointsRequest to a data point id + map results = 1; +} diff --git a/can-protocol-adapter/proto/sdv/databroker/v1/types.proto b/can-protocol-adapter/proto/sdv/databroker/v1/types.proto new file mode 100644 index 0000000..4498809 --- /dev/null +++ b/can-protocol-adapter/proto/sdv/databroker/v1/types.proto @@ -0,0 +1,169 @@ +/******************************************************************************** + * Copyright (c) 2022 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ + +syntax = "proto3"; + +import "google/protobuf/timestamp.proto"; + +package sdv.databroker.v1; + +// Data type of a signal +// +// Protobuf doesn't support int8, int16, uint8 or uint16. +// These are mapped to sint32 and uint32 respectively. +// +enum DataType { + STRING = 0; + BOOL = 1; + INT8 = 2; + INT16 = 3; + INT32 = 4; + INT64 = 5; + UINT8 = 6; + UINT16 = 7; + UINT32 = 8; + UINT64 = 9; + FLOAT = 10; + DOUBLE = 11; + STRING_ARRAY = 20; + BOOL_ARRAY = 21; + INT8_ARRAY = 22; + INT16_ARRAY = 23; + INT32_ARRAY = 24; + INT64_ARRAY = 25; + UINT8_ARRAY = 26; + UINT16_ARRAY = 27; + UINT32_ARRAY = 28; + UINT64_ARRAY = 29; + FLOAT_ARRAY = 30; + DOUBLE_ARRAY = 31; +} + +enum DatapointError { + UNKNOWN_DATAPOINT = 0; + INVALID_TYPE = 1; + ACCESS_DENIED = 2; + INTERNAL_ERROR = 3; + OUT_OF_BOUNDS = 4; +} + +enum EntryType { + ENTRY_TYPE_UNSPECIFIED = 0; + ENTRY_TYPE_SENSOR = 1; + ENTRY_TYPE_ACTUATOR = 2; + ENTRY_TYPE_ATTRIBUTE = 3; +} + +enum ChangeType { + STATIC = 0; // Value never changes + ON_CHANGE = 1; // Updates are provided every time the value changes (i.e. + // window is open / closed) + CONTINUOUS = 2; // Value is updated continuously. Broker needs to tell + // provider the preferred (update) frequency. +} + +message StringArray { + repeated string values = 1; +} + +message BoolArray { + repeated bool values = 1; +} + +message Int32Array { + repeated sint32 values = 1; +} + +message Int64Array { + repeated sint64 values = 1; +} + +message Uint32Array { + repeated uint32 values = 1; +} + +message Uint64Array { + repeated uint64 values = 1; +} + +message FloatArray { + repeated float values = 1; +} + +message DoubleArray { + repeated double values = 1; +} + +message Datapoint { + // Timestamp of the value + google.protobuf.Timestamp timestamp = 1; + + // values + oneof value { + Failure failure_value = 10; + string string_value = 11; + bool bool_value = 12; + sint32 int32_value = 13; + sint64 int64_value = 14; + uint32 uint32_value = 15; + uint64 uint64_value = 16; + float float_value = 17; + double double_value = 18; + StringArray string_array = 21; + BoolArray bool_array = 22; + Int32Array int32_array = 23; + Int64Array int64_array = 24; + Uint32Array uint32_array = 25; + Uint64Array uint64_array = 26; + FloatArray float_array = 27; + DoubleArray double_array = 28; + } + + enum Failure { + // The data point is known, but doesn't have a valid value + INVALID_VALUE = 0; + // The data point is known, but no value is available + NOT_AVAILABLE = 1; + // Unknown datapoint + UNKNOWN_DATAPOINT = 2; + // Access denied + ACCESS_DENIED = 3; + // Unexpected internal error + INTERNAL_ERROR = 4; + } +} + +message Allowed { + oneof values { + StringArray string_values = 1; + Int32Array int32_values = 3; + Int64Array int64_values = 4; + Uint32Array uint32_values = 5; + Uint64Array uint64_values = 6; + FloatArray float_values = 7; + DoubleArray double_values = 8; + } +} + +message Metadata { + int32 id = 1; + EntryType entry_type = 2; + string name = 4; + DataType data_type = 5; + ChangeType change_type = 6; // CONTINUOUS or STATIC or ON_CHANGE + string description = 7; + + Allowed allowed = 10; + // int32 min_update_hz = 10; // Only for CONTINUOUS + // int32 max_update_hz = 11; // Only for CONTINUOUS +}; diff --git a/can-protocol-adapter/src/can/comm.rs b/can-protocol-adapter/src/can/comm.rs new file mode 100644 index 0000000..cbec3a4 --- /dev/null +++ b/can-protocol-adapter/src/can/comm.rs @@ -0,0 +1,203 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ +use super::decoder; +use super::socket; +use crate::kuksa_feeder::feeder::Feeder; +use crate::time::Instant; +use crate::utils::adapter_config::AdapterConfig; +use log::{debug, error, trace, warn}; +use std::error::Error as StdError; +use std::sync::Arc; +use tokio::sync::mpsc; +use tokio::sync::Mutex; +use tokio::time::{self, sleep, Duration}; + +pub async fn initialize_can_socket( + adapter_config: &AdapterConfig, +) -> Result> { + // Get CAN interface, socket type and protocol from the adapter config. + let can_interface = adapter_config.can_config.can_interface.clone(); + let socket_can_type = adapter_config.can_config.socket_can_type.clone(); + let socket_can_protocol = adapter_config.can_config.socket_can_protocol.clone(); + + // Create a new socket instance. + let mut socket = socket::Socket::new(can_interface, socket_can_type, socket_can_protocol); + + // Open the CAN socket. + socket.open_socket( + adapter_config.can_config.rx_id, + adapter_config.can_config.tx_id, + )?; + + // Set the socket to non-blocking mode. + socket.set_nonblocking()?; + + Ok(socket) +} + +pub async fn send_can_data( + socket: Arc>, + adapter_config: Arc, + pid_tx: mpsc::Sender, + mut pid_rx: mpsc::Receiver, +) { + // Time interval for reading and sending requests. + let mut interval = time::interval(Duration::from_millis(1)); + // Read the next request inteval for each PID. + let mut next_request_times: Vec = adapter_config + .pid_table + .iter() + .map(|entry| Instant::now() + Duration::from_millis(entry.interval_ms as u64)) + .collect(); + + loop { + interval.tick().await; + + // Iterate over each entry in the PID table. + for (i, entry) in adapter_config.pid_table.iter().enumerate() { + // Check if it is time to send the next request for current PID. + if Instant::now() >= next_request_times[i] { + // Lock the socket mutex for exclusive access. + let mut socket_lock = socket.lock().await; + // Write the request PID to the socket. + if let Err(err) = socket_lock.write_socket(&entry.request_pid) { + error!("Error sending CAN data: {}", err); + continue; + } + //Release the socket lock. + drop(socket_lock); + debug!("CAN Tx: PID: {:X?}", entry.request_pid); + if let Err(err) = pid_tx.send(i).await { + error!("Error sending request info: {}", err); + } + // Wait for a response from the receive_can_data function , Max timeout 1 second + match time::timeout(Duration::from_secs(1), pid_rx.recv()).await { + Ok(Some(_)) => { + trace!("Received response for PID: {:X?}", entry.request_pid); + } + Err(_) => { + warn!( + "Timeout waiting for response for PID: {:X?}", + entry.request_pid + ); + } + Ok(None) => { + error!("Response channel closed."); + return; + } + } + // Update the next request time for this PID. + next_request_times[i] = + Instant::now() + Duration::from_millis(entry.interval_ms as u64); + } + } + } +} + +pub async fn receive_can_data( + socket: Arc>, + adapter_config: Arc, + feeder: Arc>, + decoder: Arc>, + mut res_rx: mpsc::Receiver, + res_tx: mpsc::Sender, +) { + loop { + // Receive the index of the request PID. + match res_rx.recv().await { + Some(index) => { + trace!("Received index: {}", index); + // Get the corresponding entry from the PID table. + if let Some(entry) = adapter_config.pid_table.get(index) { + // Calculate the timeout duration for waiting for a response. + let delay_duration = Duration::from_millis(entry.response_timeout_ms as u64); + // Sleep for the configured response timeout to allow the CAN response to arrive. + sleep(delay_duration).await; + //Lock the socket mutex for exclusive access and read data from the socket. + let mut socket_lock = socket.lock().await; + let (notify, data) = match socket_lock.read_socket() { + // Define data outside the match arm. + Ok(data) => { + debug!("CAN Rx: Data:{:X?}", data); + + let notify = if data.len() >= 2 + && entry.response_pid.len() >= 2 + && data[0..2] == entry.response_pid[0..2] + { + trace!("Received matching response for index: {}", index); + true + } else { + warn!("Received mismatched response for index: {}", index); + false + }; + (notify, Some(data)) // Return data with notify + } + Err(err) => { + error!("Error receiving CAN data: {}", err); + (false, None) // Return None for data + } + }; + // Release the socket lock. + drop(socket_lock); + + // If a matching response was received as per response pid entry, process the data. + if notify { + let data = data.expect("Data should be present if notify is true"); + // Send a notification to the sender task. + if let Err(err) = res_tx.send(true).await { + error!("Error sending response notification: {}", err); + } + let dbc_signal = entry.dbc_signal_name.clone(); + let decoder = decoder.lock().await; + match decoder.decode_message_by_name(&dbc_signal, data) { + Ok(decoded_value) => { + debug!( + "Decoded value for signal {}: {}", + dbc_signal, decoded_value + ); + // Set the decoded value in the feeder. + let vss_signal = entry.vss_signal.signal_name.clone(); + let vss_datatype = entry.vss_signal.datatype.clone(); + let _feeder_handle = tokio::spawn({ + let feeder_instance = feeder.clone(); + async move { + let mut feeder = feeder_instance.lock().await; + if let Err(e) = feeder + .set_datapoint_values( + &vss_signal, + decoded_value, + &vss_datatype, + ) + .await + { + error!("Error setting datapoint value: {}", e); + } + } + }); + } + Err(err) => { + error!("Error decoding message for {}: {}", dbc_signal, err); + } + } + } + } else { + error!("Invalid index received: {}", index); + } + } + None => { + error!("Sender task closed the channel."); + break; + } + } + } +} diff --git a/can-protocol-adapter/src/can/decoder.rs b/can-protocol-adapter/src/can/decoder.rs new file mode 100644 index 0000000..c6b11d6 --- /dev/null +++ b/can-protocol-adapter/src/can/decoder.rs @@ -0,0 +1,112 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ +use byteorder::{BigEndian, LittleEndian, ReadBytesExt}; +use can_dbc::DBC; +use log::trace; +use std::fs::File; +use std::io::prelude::*; +use std::mem; + +pub struct Decoder { + dbc: Option, +} + +impl Decoder { + pub fn new(dbc_file_path: &str) -> Result { + let mut f = File::open(dbc_file_path).map_err(|e| format!("Failed to open file: {}", e))?; + let mut buffer = Vec::new(); + f.read_to_end(&mut buffer) + .map_err(|e| format!("Failed to read file: {}", e))?; + let dbc = + DBC::from_slice(&buffer).map_err(|e| format!("Failed to parse DBC file: {:?}", e))?; + for message in dbc.messages() { + for signal in message.signals() { + trace!( + "Parsed signal: {} start_bit: {}, size: {}, factor: {}, offset: {}", + signal.name(), + signal.start_bit(), + signal.signal_size(), + signal.factor(), + signal.offset() + ); + } + } + Ok(Self { dbc: Some(dbc) }) + } + + pub fn decode_message_by_name(&self, signal_name: &str, msg: Vec) -> Result { + let dbc = self + .dbc + .as_ref() + .ok_or("DBC not loaded. Call `new()` first.")?; + + let signal = dbc + .messages() + .iter() + .flat_map(|message| message.signals()) + .find(|signal| signal.name() == signal_name) + .ok_or_else(|| format!("Signal '{}' not found in DBC", signal_name))?; + + let mut padded_msg = msg.clone(); + + // Pad with zeros to ensure 8 bytes + padded_msg.resize(8, 0); + + let msg64: u64 = match signal.byte_order() { + can_dbc::ByteOrder::BigEndian => (&padded_msg[..]) + .read_u64::() + .map_err(|e| e.to_string())?, + can_dbc::ByteOrder::LittleEndian => (&padded_msg[..]) + .read_u64::() + .map_err(|e| e.to_string())?, + }; + + trace!("Signal: {}", signal_name); + trace!("Start Bit: {}", signal.start_bit()); + trace!("Signal Size: {}", signal.signal_size()); + trace!("Byte Order: {:?}", signal.byte_order()); + trace!("Factor: {}", signal.factor()); + trace!("Offset: {}", signal.offset()); + trace!("CAN Rx: Data:{:X?}", msg); + trace!("Message (msg64): {:X}", msg64); + + let data: u64; + match signal.byte_order() { + can_dbc::ByteOrder::BigEndian => { + let u64_size_in_bits = mem::size_of::() * 8; + let shifted_value = msg64 + >> (u64_size_in_bits as u64 + - ((signal.start_bit() + 1) + signal.signal_size())); + let bit_mask: u64 = (1 << signal.signal_size()) - 1; + data = shifted_value & bit_mask; + trace!( + "shifted_value: {:X?} bit_mask: {:X?}", + shifted_value, + bit_mask + ); + } + can_dbc::ByteOrder::LittleEndian => { + let shifted_value = msg64 >> (signal.start_bit + 1); + let bit_mask = (1 << signal.signal_size()) - 1; + data = shifted_value & bit_mask; + trace!( + "shifted_value: {:X?} bit_mask: {:X?}", + shifted_value, + bit_mask + ); + } + }; + let result: f64 = (data as f64) * signal.factor() + signal.offset(); + Ok(result) + } +} diff --git a/can-protocol-adapter/src/can/mod.rs b/can-protocol-adapter/src/can/mod.rs new file mode 100644 index 0000000..9519798 --- /dev/null +++ b/can-protocol-adapter/src/can/mod.rs @@ -0,0 +1,15 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ +pub mod comm; +pub mod decoder; +pub mod socket; diff --git a/can-protocol-adapter/src/can/socket.rs b/can-protocol-adapter/src/can/socket.rs new file mode 100644 index 0000000..e321571 --- /dev/null +++ b/can-protocol-adapter/src/can/socket.rs @@ -0,0 +1,124 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ +use socketcan_isotp::{self, IsoTpSocket, StandardId}; +use std::error::Error as StdError; +use std::sync::{Arc, Mutex}; + +pub struct Socket { + pub interface_name: String, + #[allow(dead_code)] // 'socket_type' is intended for future socket implementation for raw-can. + pub socket_type: String, + pub protocol: String, + tp_socket: Option>>, +} + +impl Socket { + pub fn new(interface_name: String, socket_type: String, protocol: String) -> Self { + Socket { + interface_name, + socket_type, + protocol, + tp_socket: None, + } + } + + pub fn open_socket(&mut self, rxid: u32, txid: u32) -> Result<(), Box> { + if self.protocol == "CAN_ISOTP" { + let socket = IsoTpSocket::open( + &self.interface_name, + StandardId::new(rxid.try_into().unwrap()).expect("Invalid rx id"), + StandardId::new(txid.try_into().unwrap()).expect("Invalid rx id"), + )?; + self.tp_socket = Some(Arc::new(Mutex::new(socket))); + Ok(()) + } else { + Err("Invalid protocol".into()) + } + } + + pub fn set_nonblocking(&mut self) -> Result<(), Box> { + if let Some(tp_socket) = &self.tp_socket { + let socket_lock = tp_socket.lock().unwrap(); + socket_lock.set_nonblocking(true)?; // Enable non-blocking mode + Ok(()) + } else { + Err("Socket not opened".into()) + } + } + + pub fn read_socket(&mut self) -> Result, Box> { + if let Some(tp_socket) = &self.tp_socket { + let mut socket = tp_socket.lock().unwrap(); + let buffer = socket.read()?; + Ok(buffer.to_vec()) + } else { + Err("Socket not opened".into()) + } + } + + pub fn write_socket(&mut self, data: &[u8]) -> Result<(), Box> { + if let Some(tp_socket) = &self.tp_socket { + let socket = tp_socket.lock().unwrap(); + socket.write(data)?; + Ok(()) + } else { + Err("Socket not opened".into()) + } + } +} + +#[cfg(test)] +#[test] +fn test_open_socket_success_isotp() -> Result<(), Box> { + let mut socket = Socket::new( + "vcan0".to_string(), + "socket_type".to_string(), + "CAN_ISOTP".to_string(), + ); + assert!(socket.open_socket(0x123, 0x456).is_ok()); + assert!(socket.tp_socket.is_some()); + Ok(()) +} +#[test] +fn test_open_socket_invalid_protocol() { + let mut socket = Socket::new( + "vcan0".to_string(), + "socket_type".to_string(), + "INVALID_PROTOCOL".to_string(), + ); + assert!(socket.open_socket(0x123, 0x456).is_err()); + assert!(socket.tp_socket.is_none()); +} +#[test] +fn test_write_socket_not_opened() { + let mut socket = Socket::new( + "vcan0".to_string(), + "socketcan".to_string(), + "CAN_ISOTP".to_string(), + ); + + let test_data = vec![0x01, 0x02, 0x03, 0x04]; + let result = socket.write_socket(&test_data); + assert!(result.is_err()); +} + +#[test] +fn test_read_socket_not_opened() { + let mut socket = Socket::new( + "vcan0".to_string(), + "socketcan".to_string(), + "CAN_ISOTP".to_string(), + ); + let result = socket.read_socket(); + assert!(result.is_err()); +} diff --git a/can-protocol-adapter/src/databroker-proto/Cargo.toml b/can-protocol-adapter/src/databroker-proto/Cargo.toml new file mode 100644 index 0000000..30a8e45 --- /dev/null +++ b/can-protocol-adapter/src/databroker-proto/Cargo.toml @@ -0,0 +1,28 @@ +#******************************************************************************** +# Copyright (c) 2022 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License 2.0 which is available at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +#*******************************************************************************/ + +[package] +name = "databroker-proto" +version = "0.4.7-dev.0" +authors = ["Eclipse KUKSA Project"] +edition = "2021" +license = "Apache-2.0" + +[dependencies] +tonic = "0.11.0" +prost = "0.12.6" +prost-types = "0.12.6" + +[build-dependencies] +tonic-build = "0.11.0" +protobuf-src = "1.1.0" diff --git a/can-protocol-adapter/src/databroker-proto/build.rs b/can-protocol-adapter/src/databroker-proto/build.rs new file mode 100644 index 0000000..d02a006 --- /dev/null +++ b/can-protocol-adapter/src/databroker-proto/build.rs @@ -0,0 +1,30 @@ +/******************************************************************************** +* Copyright (c) 2022, 2023 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +fn main() -> Result<(), Box> { + std::env::set_var("PROTOC", protobuf_src::protoc()); + tonic_build::configure() + .compile_well_known_types(false) + .protoc_arg("--experimental_allow_proto3_optional") + .compile( + &[ + "proto/sdv/databroker/v1/broker.proto", + "proto/sdv/databroker/v1/types.proto", + "proto/sdv/databroker/v1/collector.proto", + "proto/kuksa/val/v1/val.proto", + "proto/kuksa/val/v1/types.proto", + ], + &["proto"], + )?; + Ok(()) +} diff --git a/can-protocol-adapter/src/databroker-proto/proto b/can-protocol-adapter/src/databroker-proto/proto new file mode 120000 index 0000000..b943f93 --- /dev/null +++ b/can-protocol-adapter/src/databroker-proto/proto @@ -0,0 +1 @@ +../../proto \ No newline at end of file diff --git a/can-protocol-adapter/src/databroker-proto/src/lib.rs b/can-protocol-adapter/src/databroker-proto/src/lib.rs new file mode 100644 index 0000000..83fe200 --- /dev/null +++ b/can-protocol-adapter/src/databroker-proto/src/lib.rs @@ -0,0 +1,147 @@ +/******************************************************************************** +* Copyright (c) 2022, 2023 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +#![allow(unknown_lints)] +#![allow(clippy::derive_partial_eq_without_eq)] +pub mod sdv { + pub mod databroker { + pub mod v1 { + tonic::include_proto!("sdv.databroker.v1"); + } + } +} + +pub mod kuksa { + pub mod val { + pub mod v1 { + tonic::include_proto!("kuksa.val.v1"); + + use datapoint::Value; + use std::{any::Any, fmt::Display, str::FromStr}; + + #[derive(Debug)] + pub struct ParsingError { + message: String, + } + + impl ParsingError { + pub fn new>(message: T) -> Self { + ParsingError { + message: message.into(), + } + } + } + + impl Display for ParsingError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.message.fmt(f) + } + } + + impl std::error::Error for ParsingError {} + + impl FromStr for DataType { + type Err = ParsingError; + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "string" => Ok(DataType::String), + "string[]" => Ok(DataType::StringArray), + "bool" => Ok(DataType::Boolean), + "bool[]" => Ok(DataType::BooleanArray), + "int8" => Ok(DataType::Int8), + "int8[]" => Ok(DataType::Int8Array), + "int16" => Ok(DataType::Int16), + "int16[]" => Ok(DataType::Int16Array), + "int32" => Ok(DataType::Int32), + "int32[]" => Ok(DataType::Int32Array), + "int64" => Ok(DataType::Int64), + "int64[]" => Ok(DataType::Int64Array), + "uint8" => Ok(DataType::Uint8), + "uint8[]" => Ok(DataType::Uint8Array), + "uint16" => Ok(DataType::Uint16), + "uint16[]" => Ok(DataType::Uint16Array), + "uint32" => Ok(DataType::Uint32), + "uint32[]" => Ok(DataType::Uint32Array), + "uint64" => Ok(DataType::Uint64), + "uint64[]" => Ok(DataType::Uint64Array), + "float" => Ok(DataType::Float), + "float[]" => Ok(DataType::FloatArray), + "double" => Ok(DataType::Double), + "double[]" => Ok(DataType::DoubleArray), + _ => Err(ParsingError::new(format!("unsupported data type '{s}'"))), + } + } + } + + impl Value { + pub fn new>( + vss_type: T, + value: &str, + ) -> Result { + let dt: DataType = vss_type.into(); + match dt { + DataType::String => Ok(Value::String(value.to_string())), + DataType::Boolean => value + .parse::() + .map(Value::Bool) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Int8 => value + .parse::() + .map(|v| Value::Int32(v as i32)) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Int16 => value + .parse::() + .map(|v| Value::Int32(v as i32)) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Int32 => value + .parse::() + .map(Value::Int32) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Int64 => value + .parse::() + .map(Value::Int64) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Uint8 => value + .parse::() + .map(|v| Value::Uint32(v as u32)) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Uint16 => value + .parse::() + .map(|v| Value::Uint32(v as u32)) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Uint32 => value + .parse::() + .map(Value::Uint32) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Uint64 => value + .parse::() + .map(Value::Uint64) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Float => value + .parse::() + .map(Value::Float) + .map_err(|e| ParsingError::new(e.to_string())), + DataType::Double => value + .parse::() + .map(Value::Double) + .map_err(|e| ParsingError::new(e.to_string())), + _ => Err(ParsingError::new(format!( + "data type '{:?}' not supported for parsing string into typed value", + dt.type_id() + ))), + } + } + } + } + } +} diff --git a/can-protocol-adapter/src/grpc/kuksa_val_v1/conversions.rs b/can-protocol-adapter/src/grpc/kuksa_val_v1/conversions.rs new file mode 100644 index 0000000..d9b972d --- /dev/null +++ b/can-protocol-adapter/src/grpc/kuksa_val_v1/conversions.rs @@ -0,0 +1,340 @@ +/******************************************************************************** +* Copyright (c) 2022 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +use databroker_proto::kuksa::val::v1 as proto; + +use crate::broker; + +use std::convert::TryFrom; +use std::time::SystemTime; + +impl From<&broker::EntryType> for proto::EntryType { + fn from(from: &broker::EntryType) -> Self { + match from { + broker::EntryType::Sensor => proto::EntryType::Sensor, + broker::EntryType::Attribute => proto::EntryType::Attribute, + broker::EntryType::Actuator => proto::EntryType::Actuator, + } + } +} + +impl From for proto::DataType { + fn from(from: broker::DataType) -> Self { + match from { + broker::DataType::String => proto::DataType::String, + broker::DataType::Bool => proto::DataType::Boolean, + broker::DataType::Int8 => proto::DataType::Int8, + broker::DataType::Int16 => proto::DataType::Int16, + broker::DataType::Int32 => proto::DataType::Int32, + broker::DataType::Int64 => proto::DataType::Int64, + broker::DataType::Uint8 => proto::DataType::Uint8, + broker::DataType::Uint16 => proto::DataType::Uint16, + broker::DataType::Uint32 => proto::DataType::Uint32, + broker::DataType::Uint64 => proto::DataType::Uint64, + broker::DataType::Float => proto::DataType::Float, + broker::DataType::Double => proto::DataType::Double, + broker::DataType::StringArray => proto::DataType::StringArray, + broker::DataType::BoolArray => proto::DataType::BooleanArray, + broker::DataType::Int8Array => proto::DataType::Int8Array, + broker::DataType::Int16Array => proto::DataType::Int16Array, + broker::DataType::Int32Array => proto::DataType::Int32Array, + broker::DataType::Int64Array => proto::DataType::Int64Array, + broker::DataType::Uint8Array => proto::DataType::Uint8Array, + broker::DataType::Uint16Array => proto::DataType::Uint16Array, + broker::DataType::Uint32Array => proto::DataType::Uint32Array, + broker::DataType::Uint64Array => proto::DataType::Uint64Array, + broker::DataType::FloatArray => proto::DataType::FloatArray, + broker::DataType::DoubleArray => proto::DataType::DoubleArray, + } + } +} + +impl From for Option { + fn from(from: broker::Datapoint) -> Self { + match from.value { + broker::DataValue::NotAvailable => None, + broker::DataValue::Bool(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Bool(value)), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::String(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::String(value)), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Int32(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Int32(value)), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Int64(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Int64(value)), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Uint32(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Uint32(value)), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Uint64(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Uint64(value)), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Float(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Float(value)), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Double(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Double(value)), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::BoolArray(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::BoolArray(proto::BoolArray { + values, + })), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::StringArray(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::StringArray(proto::StringArray { + values, + })), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Int32Array(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Int32Array(proto::Int32Array { + values, + })), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Int64Array(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Int64Array(proto::Int64Array { + values, + })), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Uint32Array(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Uint32Array(proto::Uint32Array { + values, + })), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::Uint64Array(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Uint64Array(proto::Uint64Array { + values, + })), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::FloatArray(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::FloatArray(proto::FloatArray { + values, + })), + timestamp: Some(from.ts.into()), + }), + broker::DataValue::DoubleArray(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::DoubleArray(proto::DoubleArray { + values, + })), + timestamp: Some(from.ts.into()), + }), + } + } +} + +impl From for Option { + fn from(from: broker::DataValue) -> Self { + match from { + broker::DataValue::NotAvailable => None, + broker::DataValue::Bool(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Bool(value)), + timestamp: None, + }), + broker::DataValue::String(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::String(value)), + timestamp: None, + }), + broker::DataValue::Int32(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Int32(value)), + timestamp: None, + }), + broker::DataValue::Int64(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Int64(value)), + timestamp: None, + }), + broker::DataValue::Uint32(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Uint32(value)), + timestamp: None, + }), + broker::DataValue::Uint64(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Uint64(value)), + timestamp: None, + }), + broker::DataValue::Float(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Float(value)), + timestamp: None, + }), + broker::DataValue::Double(value) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Double(value)), + timestamp: None, + }), + broker::DataValue::BoolArray(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::BoolArray(proto::BoolArray { + values, + })), + timestamp: None, + }), + broker::DataValue::StringArray(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::StringArray(proto::StringArray { + values, + })), + timestamp: None, + }), + broker::DataValue::Int32Array(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Int32Array(proto::Int32Array { + values, + })), + timestamp: None, + }), + broker::DataValue::Int64Array(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Int64Array(proto::Int64Array { + values, + })), + timestamp: None, + }), + broker::DataValue::Uint32Array(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Uint32Array(proto::Uint32Array { + values, + })), + timestamp: None, + }), + broker::DataValue::Uint64Array(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::Uint64Array(proto::Uint64Array { + values, + })), + timestamp: None, + }), + broker::DataValue::FloatArray(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::FloatArray(proto::FloatArray { + values, + })), + timestamp: None, + }), + broker::DataValue::DoubleArray(values) => Some(proto::Datapoint { + value: Some(proto::datapoint::Value::DoubleArray(proto::DoubleArray { + values, + })), + timestamp: None, + }), + } + } +} + +impl From> for broker::DataValue { + fn from(from: Option) -> Self { + match from { + Some(value) => match value { + proto::datapoint::Value::String(value) => broker::DataValue::String(value), + proto::datapoint::Value::Bool(value) => broker::DataValue::Bool(value), + proto::datapoint::Value::Int32(value) => broker::DataValue::Int32(value), + proto::datapoint::Value::Int64(value) => broker::DataValue::Int64(value), + proto::datapoint::Value::Uint32(value) => broker::DataValue::Uint32(value), + proto::datapoint::Value::Uint64(value) => broker::DataValue::Uint64(value), + proto::datapoint::Value::Float(value) => broker::DataValue::Float(value), + proto::datapoint::Value::Double(value) => broker::DataValue::Double(value), + proto::datapoint::Value::StringArray(array) => { + broker::DataValue::StringArray(array.values) + } + proto::datapoint::Value::BoolArray(array) => { + broker::DataValue::BoolArray(array.values) + } + proto::datapoint::Value::Int32Array(array) => { + broker::DataValue::Int32Array(array.values) + } + proto::datapoint::Value::Int64Array(array) => { + broker::DataValue::Int64Array(array.values) + } + proto::datapoint::Value::Uint32Array(array) => { + broker::DataValue::Uint32Array(array.values) + } + proto::datapoint::Value::Uint64Array(array) => { + broker::DataValue::Uint64Array(array.values) + } + proto::datapoint::Value::FloatArray(array) => { + broker::DataValue::FloatArray(array.values) + } + proto::datapoint::Value::DoubleArray(array) => { + broker::DataValue::DoubleArray(array.values) + } + }, + None => broker::DataValue::NotAvailable, + } + } +} + +impl From<&broker::Field> for proto::Field { + fn from(from: &broker::Field) -> Self { + match from { + broker::Field::Datapoint => proto::Field::Value, + broker::Field::ActuatorTarget => proto::Field::ActuatorTarget, + broker::Field::MetadataUnit => proto::Field::MetadataUnit, + } + } +} + +impl TryFrom<&proto::Field> for broker::Field { + type Error = &'static str; + + fn try_from(from: &proto::Field) -> Result { + match from { + proto::Field::Value => Ok(broker::Field::Datapoint), + proto::Field::ActuatorTarget => Ok(broker::Field::ActuatorTarget), + _ => Err("Unknown field"), + } + } +} + +impl From for broker::Datapoint { + fn from(from: proto::Datapoint) -> Self { + Self { + ts: SystemTime::now(), + source_ts: match from.timestamp { + Some(ts) => match std::convert::TryInto::try_into(ts) { + Ok(ts) => Some(ts), + Err(_) => None, + }, + None => None, + }, + value: broker::DataValue::from(from.value), + } + } +} + +impl From for proto::DataEntry { + fn from(from: broker::EntryUpdate) -> Self { + Self { + path: from.path.unwrap_or_default(), + value: match from.datapoint { + Some(datapoint) => Option::::from(datapoint), + None => None, + }, + actuator_target: match from.actuator_target { + Some(Some(actuator_target)) => Option::::from(actuator_target), + Some(None) => None, + None => None, + }, + metadata: { + let metadata = proto::Metadata { + unit: from.unit, + ..Default::default() + }; + Some(metadata) + }, + } + } +} diff --git a/can-protocol-adapter/src/grpc/kuksa_val_v1/mod.rs b/can-protocol-adapter/src/grpc/kuksa_val_v1/mod.rs new file mode 100644 index 0000000..2da4088 --- /dev/null +++ b/can-protocol-adapter/src/grpc/kuksa_val_v1/mod.rs @@ -0,0 +1,15 @@ +/******************************************************************************** +* Copyright (c) 2022 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +mod conversions; +mod val; diff --git a/can-protocol-adapter/src/grpc/kuksa_val_v1/val.rs b/can-protocol-adapter/src/grpc/kuksa_val_v1/val.rs new file mode 100644 index 0000000..9cbbfc8 --- /dev/null +++ b/can-protocol-adapter/src/grpc/kuksa_val_v1/val.rs @@ -0,0 +1,1292 @@ +/******************************************************************************** +* Copyright (c) 2022, 2023 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +use std::collections::HashMap; +use std::collections::HashSet; +use std::iter::FromIterator; +use std::pin::Pin; +use tokio::select; +use tokio::sync::mpsc; + +use databroker_proto::kuksa::val::v1 as proto; +use databroker_proto::kuksa::val::v1::{DataEntryError, EntryUpdate}; +use tokio_stream::wrappers::ReceiverStream; +use tokio_stream::Stream; +use tokio_stream::StreamExt; +use tonic::{Response, Status, Streaming}; +use tracing::debug; +use tracing::info; + +use crate::broker; +use crate::broker::ReadError; +use crate::broker::SubscriptionError; +use crate::broker::{AuthorizedAccess, EntryReadAccess}; +use crate::glob::Matcher; +use crate::permissions::Permissions; + +const MAX_REQUEST_PATH_LENGTH: usize = 1000; + +#[tonic::async_trait] +impl proto::val_server::Val for broker::DataBroker { + async fn get( + &self, + request: tonic::Request, + ) -> Result, tonic::Status> { + debug!(?request); + let permissions = match request.extensions().get::() { + Some(permissions) => { + debug!(?permissions); + permissions.clone() + } + None => return Err(tonic::Status::unauthenticated("Unauthenticated")), + }; + let broker = self.authorized_access(&permissions); + + let requested = request.into_inner().entries; + if requested.is_empty() { + Err(tonic::Status::new( + tonic::Code::InvalidArgument, + "No datapoints requested".to_string(), + )) + } else { + let mut entries = Vec::new(); + let mut errors = Vec::new(); + /* + * valid_requests: A collection of valid requests, each represented as a tuple with five fields: + * - Matcher: Matcher which wraps glob string handling. + * - Fields: A HashSet of proto::Field objects extracted from the request. + * - RequestPath: The original request path, used for error reporting when no entries match. + * - IsMatch: A boolean flag indicating whether the current request matches any entry. + * - Error: An optional ReadError representing a permission error that may occur when querying a valid path entry. + */ + let mut valid_requests: Vec<( + Matcher, + HashSet, + String, + bool, + Option, + )> = Vec::new(); + + // Fill valid_requests structure. + for request in requested { + if request.path.len() > MAX_REQUEST_PATH_LENGTH { + errors.push(proto::DataEntryError { + path: request.path, + error: Some(proto::Error { + code: 400, + reason: "bad_request".to_owned(), + message: "The provided path is too long".to_owned(), + }), + }); + continue; + } + + match Matcher::new(&request.path) { + Ok(matcher) => { + let view = proto::View::try_from(request.view).map_err(|_| { + tonic::Status::invalid_argument(format!( + "Invalid View (id: {}", + request.view + )) + })?; + let fields = + HashSet::::from_iter(request.fields.iter().filter_map( + |id| proto::Field::try_from(*id).ok(), // Ignore unknown fields for now + )); + let view_fields = combine_view_and_fields(view, fields); + debug!("Getting fields: {:?}", view_fields); + + valid_requests.push((matcher, view_fields, request.path, false, None)); + } + Err(_) => { + errors.push(proto::DataEntryError { + path: request.path, + error: Some(proto::Error { + code: 400, + reason: "bad_request".to_owned(), + message: "Bad Wildcard Pattern Request".to_owned(), + }), + }); + } + } + } + if !valid_requests.is_empty() { + for (matcher, view_fields, _, is_match, op_error) in &mut valid_requests { + broker + .for_each_entry(|entry| { + let mut result_fields: HashSet = HashSet::new(); + let glob_path = &entry.metadata().glob_path; + if matcher.is_match(glob_path) { + // Update the `is_match` to indicate a valid and used request path. + *is_match = true; + if view_fields.contains(&proto::Field::Metadata) { + result_fields.extend(view_fields.clone()); + } + if view_fields.contains(&proto::Field::ActuatorTarget) + || view_fields.contains(&proto::Field::Value) + { + match entry.datapoint() { + Ok(_) => { + // If the entry's path matches the regex and there is access permission, + // add the result fields to the current entry. + result_fields.extend(view_fields.clone()); + } + Err(error) => { + //Propagate the error + *op_error = Some(error); + } + } + } + } + // If there are result fields, add them to the entries list. + if !result_fields.is_empty() { + let proto_entry = + proto_entry_from_entry_and_fields(entry, result_fields); + debug!("Getting datapoint: {:?}", proto_entry); + entries.push(proto_entry); + } + }) + .await; + + // Not found any matches meaning it could be a branch path request + // Only support branches like Vehicle.Cabin.Sunroof but not like **.Sunroof + if !matcher.as_string().starts_with("**") + && !matcher.as_string().ends_with("/**") + && !(*is_match) + { + if let Ok(branch_matcher) = Matcher::new(&(matcher.as_string() + "/**")) { + broker + .for_each_entry(|entry| { + let mut result_fields: HashSet = HashSet::new(); + let glob_path = &entry.metadata().glob_path; + if branch_matcher.is_match(glob_path) { + // Update the `is_match` to indicate a valid and used request path. + *is_match = true; + if view_fields.contains(&proto::Field::Metadata) { + result_fields.extend(view_fields.clone()); + } + if view_fields.contains(&proto::Field::ActuatorTarget) + || view_fields.contains(&proto::Field::Value) + { + match entry.datapoint() { + Ok(_) => { + // If the entry's path matches the regex and there is access permission, + // add the result fields to the current entry. + result_fields.extend(view_fields.clone()); + } + Err(error) => { + //Propagate the error + *op_error = Some(error); + } + } + } + } + // If there are result fields, add them to the entries list. + if !result_fields.is_empty() { + let proto_entry = + proto_entry_from_entry_and_fields(entry, result_fields); + debug!("Getting datapoint: {:?}", proto_entry); + entries.push(proto_entry); + } + }) + .await; + } + } + } + } + + /* + * Handle Unmatched or Permission Errors + * + * After processing valid requests, this section iterates over the `valid_requests` vector + * to check if any requests didn't have matching entries or encountered permission errors. + * + * For each unmatched request, a "not_found" error message is added to the `errors` list. + * For requests with permission errors, a "forbidden" error message is added. + */ + for (_, _, path, is_match, error) in valid_requests { + if !is_match { + errors.push(proto::DataEntryError { + path: path.to_owned(), + error: Some(proto::Error { + code: 404, + reason: "not_found".to_owned(), + message: "No entries found for the provided path".to_owned(), + }), + }); + } else if let Some(_error) = error { + // clear the entries vector since we only want to return rerrors + // and not partial success + entries.clear(); + errors.push(proto::DataEntryError { + path: path.to_owned(), + error: Some(proto::Error { + code: 403, + reason: "forbidden".to_owned(), + message: "Permission denied for some entries".to_owned(), + }), + }); + } + } + + // Not sure how to handle the "global error". + // Fall back to just use the first path specific error if any + let error = match errors.first() { + Some(first) => first.error.clone(), + None => None, + }; + + let response = proto::GetResponse { + entries, + errors, + error, + }; + Ok(tonic::Response::new(response)) + } + } + + async fn set( + &self, + request: tonic::Request, + ) -> Result, tonic::Status> { + debug!(?request); + let permissions = match request.extensions().get::() { + Some(permissions) => { + debug!(?permissions); + permissions.clone() + } + None => return Err(tonic::Status::unauthenticated("Unauthenticated")), + }; + + let broker = self.authorized_access(&permissions); + + let entry_updates = request.into_inner().updates; + + // Collect errors encountered + let mut errors = Vec::::new(); + let mut updates = Vec::<(i32, broker::EntryUpdate)>::new(); + + for request in entry_updates { + match &request.entry { + Some(entry) => match broker.get_id_by_path(&entry.path).await { + Some(id) => match validate_entry_update(&broker, &request, id).await { + Ok(result) => updates.push(result), + Err(e) => return Err(e), + }, + None => { + let message = format!("{} not found", entry.path); + errors.push(proto::DataEntryError { + path: entry.path.clone(), + error: Some(proto::Error { + code: 404, + reason: "not_found".to_string(), + message, + }), + }) + } + }, + None => { + return Err(tonic::Status::invalid_argument( + "Path is required".to_string(), + )); + } + } + } + + match broker.update_entries(updates).await { + Ok(()) => {} + Err(err) => { + debug!("Failed to set datapoint: {:?}", err); + for (id, error) in err.into_iter() { + if let Some(metadata) = broker.get_metadata(id).await { + let path = metadata.path.clone(); + let data_entry_error = convert_to_data_entry_error(&path, &error); + errors.push(data_entry_error); + } + } + } + } + + Ok(tonic::Response::new(proto::SetResponse { + error: None, + errors, + })) + } + + type StreamedUpdateStream = + ReceiverStream>; + + async fn streamed_update( + &self, + request: tonic::Request>, + ) -> Result, tonic::Status> { + debug!(?request); + let permissions = match request.extensions().get::() { + Some(permissions) => { + debug!(?permissions); + permissions.clone() + } + None => return Err(tonic::Status::unauthenticated("Unauthenticated")), + }; + let mut stream = request.into_inner(); + + let mut shutdown_trigger = self.get_shutdown_trigger(); + + // Copy (to move into task below) + let broker = self.clone(); + + // Create stream (to be returned); when changing buffer size, throughput should be measured + let (sender, receiver) = mpsc::channel(10); + // Listening on stream + tokio::spawn(async move { + info!("Update Stream opened"); + let permissions = permissions; + let broker = broker.authorized_access(&permissions); + loop { + select! { + message = stream.message() => { + match message { + Ok(request) => { + match request { + Some(req) => { + let entry_updates = req.updates; + + // Collect errors encountered + let mut errors = Vec::::new(); + let mut updates = Vec::<(i32, broker::EntryUpdate)>::new(); + + for request in entry_updates { + match &request.entry { + Some(entry) => match broker.get_id_by_path(&entry.path).await { + Some(id) => { + match validate_entry_update(&broker, &request, id).await { + Ok(result) => { + updates.push(result); + } + Err(e) => { + let message = format!("Data present in the request is invalid: {}", e.message()); + errors.push(proto::DataEntryError { + path: entry.path.clone(), + error: Some(proto::Error { + code: 400, + reason: "invalid_data".to_string(), + message, + }) + }) + } + } + } + None => { + let message = format!("{} not found", entry.path); + errors.push(proto::DataEntryError { + path: entry.path.clone(), + error: Some(proto::Error { + code: 404, + reason: "not_found".to_string(), + message, + }) + }) + } + }, + None => { + errors.push(proto::DataEntryError { + path: "".to_string(), + error: Some(proto::Error { + code: 400, + reason: "invalid_data".to_string(), + message: "Data present in the request is invalid: Path is required".to_string() + }) + }) + } + } + } + + match broker.update_entries(updates).await { + Ok(_) => {} + Err(err) => { + debug!("Failed to set datapoint: {:?}", err); + for (id, error) in err.into_iter() { + if let Some(metadata) = broker.get_metadata(id).await { + let path = metadata.path.clone(); + let data_entry_error = convert_to_data_entry_error(&path, &error); + errors.push(data_entry_error); + } + } + } + } + + if let Err(err) = sender.send( + Ok(proto::StreamedUpdateResponse { + errors: errors.clone(), + error: if let Some(wrapper_error) = errors.first() { + wrapper_error.error.clone() + } else { + None + }, + }) + ).await { + debug!("Failed to send errors: {}", err); + } + } + None => { + debug!("provider: no more messages"); + break; + } + } + }, + Err(err) => { + debug!("provider: connection broken: {:?}", err); + break; + }, + } + }, + _ = shutdown_trigger.recv() => { + debug!("provider: shutdown received"); + break; + } + } + } + }); + + // Return the stream + Ok(Response::new(ReceiverStream::new(receiver))) + } + + type SubscribeStream = Pin< + Box< + dyn Stream> + + Send + + Sync + + 'static, + >, + >; + + async fn subscribe( + &self, + request: tonic::Request, + ) -> Result, tonic::Status> { + debug!(?request); + let permissions = match request.extensions().get::() { + Some(permissions) => { + debug!(?permissions); + permissions.clone() + } + None => return Err(tonic::Status::unauthenticated("Unauthenticated")), + }; + let broker = self.authorized_access(&permissions); + + let request = request.into_inner(); + + if request.entries.is_empty() { + return Err(tonic::Status::invalid_argument( + "Subscription request must contain at least one entry.", + )); + } + + let mut valid_requests: HashMap)> = HashMap::new(); + + for entry in &request.entries { + if entry.path.len() > MAX_REQUEST_PATH_LENGTH { + tonic::Status::new( + tonic::Code::InvalidArgument, + "The provided path is too long", + ); + continue; + } + + match Matcher::new(&entry.path) { + Ok(matcher) => { + let mut fields = HashSet::new(); + for id in &entry.fields { + if let Ok(field) = proto::Field::try_from(*id) { + match field { + proto::Field::Value => { + fields.insert(broker::Field::Datapoint); + } + proto::Field::ActuatorTarget => { + fields.insert(broker::Field::ActuatorTarget); + } + proto::Field::MetadataUnit => { + fields.insert(broker::Field::MetadataUnit); + } + _ => { + // Just ignore other fields for now + } + } + }; + } + valid_requests.insert(entry.path.clone(), (matcher, fields)); + } + Err(_) => { + tonic::Status::new(tonic::Code::InvalidArgument, "Invalid Pattern Argument"); + continue; + } + } + } + + let mut entries: HashMap> = HashMap::new(); + + if !valid_requests.is_empty() { + for (path, (matcher, fields)) in valid_requests { + let mut requested_path_found = false; + let mut permission_error = false; + broker + .for_each_entry(|entry| { + let glob_path = &entry.metadata().glob_path; + if matcher.is_match(glob_path) { + requested_path_found = true; + entries + .entry(entry.metadata().id) + .and_modify(|existing_fields| { + existing_fields.extend(fields.clone()); + }) + .or_insert(fields.clone()); + + match entry.datapoint() { + Ok(_) => {} + Err(_) => permission_error = true, + } + } + }) + .await; + if !requested_path_found { + // Not found any matches meaning it could be a branch path request + // Only support branches like Vehicle.Cabin.Sunroof but not like **.Sunroof + if !matcher.as_string().starts_with("**") + && !matcher.as_string().ends_with("/**") + { + if let Ok(branch_matcher) = Matcher::new(&(matcher.as_string() + "/**")) { + broker + .for_each_entry(|entry| { + let glob_path = &entry.metadata().glob_path; + if branch_matcher.is_match(glob_path) { + requested_path_found = true; + entries + .entry(entry.metadata().id) + .and_modify(|existing_fields| { + existing_fields.extend(fields.clone()); + }) + .or_insert(fields.clone()); + + match entry.datapoint() { + Ok(_) => {} + Err(_) => permission_error = true, + } + } + }) + .await; + } + } + if !requested_path_found { + let message = format!("No entries found for the provided. Path: {}", path); + return Err(tonic::Status::new(tonic::Code::NotFound, message)); + } + } + if permission_error { + let message = format!("Permission denied for some entries. Path: {}", path); + return Err(tonic::Status::new(tonic::Code::PermissionDenied, message)); + } + } + } + + match broker.subscribe(entries).await { + Ok(stream) => { + let stream = convert_to_proto_stream(stream); + Ok(tonic::Response::new(Box::pin(stream))) + } + Err(SubscriptionError::NotFound) => { + Err(tonic::Status::new(tonic::Code::NotFound, "Path not found")) + } + Err(SubscriptionError::InvalidInput) => Err(tonic::Status::new( + tonic::Code::InvalidArgument, + "Invalid Argument", + )), + Err(SubscriptionError::InternalError) => { + Err(tonic::Status::new(tonic::Code::Internal, "Internal Error")) + } + } + } + + async fn get_server_info( + &self, + _request: tonic::Request, + ) -> Result, tonic::Status> { + let server_info = proto::GetServerInfoResponse { + name: "databroker".to_owned(), + version: self.get_version().to_owned(), + }; + Ok(tonic::Response::new(server_info)) + } +} + +async fn validate_entry_update( + broker: &AuthorizedAccess<'_, '_>, + request: &EntryUpdate, + id: i32, +) -> Result<(i32, broker::EntryUpdate), Status> { + let entry = &request.entry.clone().unwrap(); + + let fields = HashSet::::from_iter(request.fields.iter().filter_map( + |id| proto::Field::try_from(*id).ok(), // Ignore unknown fields for now + )); + + if entry.actuator_target.is_some() { + if let Some(metadata) = broker.get_metadata(id).await { + if metadata.entry_type != broker::EntryType::Actuator { + return Err(tonic::Status::invalid_argument( + "Tried to set a target value for a non-actuator. Non-actuators have no target value.".to_string(), + )); + } + } + } + + let entry = match &request.entry { + Some(entry) => entry, + None => return Err(tonic::Status::invalid_argument("Empty entry".to_string())), + }; + + debug!("Setting fields: {:?}", fields); + let update = broker::EntryUpdate::from_proto_entry_and_fields(entry, fields); + + Ok((id, update)) +} + +fn convert_to_data_entry_error(path: &String, error: &broker::UpdateError) -> DataEntryError { + match error { + broker::UpdateError::NotFound => DataEntryError { + path: path.clone(), + error: Some(proto::Error { + code: 404, + reason: String::from("not found"), + message: format!("no datapoint registered for path {path}"), + }), + }, + broker::UpdateError::WrongType => DataEntryError { + path: path.clone(), + error: Some(proto::Error { + code: 400, + reason: String::from("type mismatch"), + message: "cannot set existing datapoint to value of different type".to_string(), + }), + }, + broker::UpdateError::UnsupportedType => DataEntryError { + path: path.clone(), + error: Some(proto::Error { + code: 400, + reason: String::from("unsupported type"), + message: "cannot set datapoint to value of unsupported type".to_string(), + }), + }, + broker::UpdateError::OutOfBounds => DataEntryError { + path: path.clone(), + error: Some(proto::Error { + code: 400, + reason: String::from("value out of bounds"), + message: String::from("given value exceeds type's boundaries"), + }), + }, + broker::UpdateError::PermissionDenied => DataEntryError { + path: path.clone(), + error: Some(proto::Error { + code: 403, + reason: String::from("forbidden"), + message: format!("Access was denied for {path}"), + }), + }, + broker::UpdateError::PermissionExpired => DataEntryError { + path: path.clone(), + error: Some(proto::Error { + code: 401, + reason: String::from("unauthorized"), + message: String::from("Unauthorized"), + }), + }, + } +} + +fn convert_to_proto_stream( + input: impl Stream, +) -> impl Stream> { + input.map(move |item| { + let mut updates = Vec::new(); + for update in item.updates { + updates.push(proto::EntryUpdate { + entry: Some(proto::DataEntry::from(update.update)), + fields: update + .fields + .iter() + .map(|field| proto::Field::from(field) as i32) + .collect(), + }); + } + let response = proto::SubscribeResponse { updates }; + Ok(response) + }) +} + +fn proto_entry_from_entry_and_fields( + entry: EntryReadAccess, + fields: HashSet, +) -> proto::DataEntry { + let path = entry.metadata().path.to_string(); + let value = if fields.contains(&proto::Field::Value) { + match entry.datapoint() { + Ok(value) => Option::::from(value.clone()), + Err(_) => None, + } + } else { + None + }; + let actuator_target = if fields.contains(&proto::Field::ActuatorTarget) { + match entry.actuator_target() { + Ok(value) => match value { + Some(value) => Option::::from(value.clone()), + None => None, + }, + Err(_) => None, + } + } else { + None + }; + let metadata = { + let mut metadata = proto::Metadata::default(); + let mut metadata_is_set = false; + + let all = fields.contains(&proto::Field::Metadata); + + if all || fields.contains(&proto::Field::MetadataDataType) { + metadata_is_set = true; + metadata.data_type = proto::DataType::from(entry.metadata().data_type.clone()) as i32; + } + if all || fields.contains(&proto::Field::MetadataDescription) { + metadata_is_set = true; + metadata.description = Some(entry.metadata().description.clone()); + } + if all || fields.contains(&proto::Field::MetadataEntryType) { + metadata_is_set = true; + metadata.entry_type = proto::EntryType::from(&entry.metadata().entry_type) as i32; + } + if all || fields.contains(&proto::Field::MetadataComment) { + metadata_is_set = true; + // TODO: Add to Metadata + metadata.comment = None; + } + if all || fields.contains(&proto::Field::MetadataDeprecation) { + metadata_is_set = true; + // TODO: Add to Metadata + metadata.deprecation = None; + } + if all || fields.contains(&proto::Field::MetadataUnit) { + metadata_is_set = true; + metadata.unit.clone_from(&entry.metadata().unit); + } + if all || fields.contains(&proto::Field::MetadataValueRestriction) { + metadata_is_set = true; + metadata.value_restriction = match entry.metadata().allowed.as_ref() { + Some(allowed) => match allowed { + broker::DataValue::StringArray(vec) => Some(proto::ValueRestriction { + r#type: Some(proto::value_restriction::Type::String( + proto::ValueRestrictionString { + allowed_values: vec.clone(), + }, + )), + }), + broker::DataValue::Int32Array(vec) => Some(proto::ValueRestriction { + r#type: Some(proto::value_restriction::Type::Signed( + proto::ValueRestrictionInt { + allowed_values: vec.iter().cloned().map(i64::from).collect(), + min: None, // TODO: Implement + max: None, // TODO: Implement + }, + )), + }), + broker::DataValue::Int64Array(vec) => Some(proto::ValueRestriction { + r#type: Some(proto::value_restriction::Type::Signed( + proto::ValueRestrictionInt { + allowed_values: vec.clone(), + min: None, // TODO: Implement + max: None, // TODO: Implement + }, + )), + }), + broker::DataValue::Uint32Array(vec) => Some(proto::ValueRestriction { + r#type: Some(proto::value_restriction::Type::Unsigned( + proto::ValueRestrictionUint { + allowed_values: vec.iter().cloned().map(u64::from).collect(), + min: None, // TODO: Implement + max: None, // TODO: Implement + }, + )), + }), + broker::DataValue::Uint64Array(vec) => Some(proto::ValueRestriction { + r#type: Some(proto::value_restriction::Type::Unsigned( + proto::ValueRestrictionUint { + allowed_values: vec.clone(), + min: None, // TODO: Implement + max: None, // TODO: Implement + }, + )), + }), + broker::DataValue::FloatArray(vec) => Some(proto::ValueRestriction { + r#type: Some(proto::value_restriction::Type::FloatingPoint( + proto::ValueRestrictionFloat { + allowed_values: vec.iter().cloned().map(f64::from).collect(), + min: None, // TODO: Implement + max: None, // TODO: Implement + }, + )), + }), + broker::DataValue::DoubleArray(vec) => Some(proto::ValueRestriction { + r#type: Some(proto::value_restriction::Type::FloatingPoint( + proto::ValueRestrictionFloat { + allowed_values: vec.clone(), + min: None, // TODO: Implement + max: None, // TODO: Implement + }, + )), + }), + _ => None, + }, + None => None, + } + } + if all || fields.contains(&proto::Field::MetadataActuator) { + metadata_is_set = true; + // TODO: Add to Metadata + metadata.entry_specific = match entry.metadata().entry_type { + broker::EntryType::Actuator => { + // Some(proto::metadata::EntrySpecific::Actuator( + // proto::Actuator::default(), + // )); + None + } + broker::EntryType::Sensor | broker::EntryType::Attribute => None, + }; + } + if all || fields.contains(&proto::Field::MetadataSensor) { + metadata_is_set = true; + // TODO: Add to Metadata + metadata.entry_specific = match entry.metadata().entry_type { + broker::EntryType::Sensor => { + // Some(proto::metadata::EntrySpecific::Sensor( + // proto::Sensor::default(), + // )); + None + } + broker::EntryType::Attribute | broker::EntryType::Actuator => None, + }; + } + if all || fields.contains(&proto::Field::MetadataAttribute) { + metadata_is_set = true; + // TODO: Add to Metadata + metadata.entry_specific = match entry.metadata().entry_type { + broker::EntryType::Attribute => { + // Some(proto::metadata::EntrySpecific::Attribute( + // proto::Attribute::default(), + // )); + None + } + broker::EntryType::Sensor | broker::EntryType::Actuator => None, + }; + } + + if metadata_is_set { + Some(metadata) + } else { + None + } + }; + proto::DataEntry { + path, + value, + actuator_target, + metadata, + } +} + +fn combine_view_and_fields( + view: proto::View, + fields: impl IntoIterator, +) -> HashSet { + let mut combined = HashSet::new(); + + combined.extend(fields); + + match view { + proto::View::Unspecified => { + // If no fields are specified, View::Unspecified will + // default to the equivalent of View::CurrentValue + if combined.is_empty() { + combined.insert(proto::Field::Path); + combined.insert(proto::Field::Value); + } + } + proto::View::CurrentValue => { + combined.insert(proto::Field::Path); + combined.insert(proto::Field::Value); + } + proto::View::TargetValue => { + combined.insert(proto::Field::Path); + combined.insert(proto::Field::ActuatorTarget); + } + proto::View::Metadata => { + combined.insert(proto::Field::Path); + combined.insert(proto::Field::Metadata); + } + proto::View::Fields => {} + proto::View::All => { + combined.insert(proto::Field::Path); + combined.insert(proto::Field::Value); + combined.insert(proto::Field::ActuatorTarget); + combined.insert(proto::Field::Metadata); + } + } + + combined +} + +impl broker::EntryUpdate { + fn from_proto_entry_and_fields( + entry: &proto::DataEntry, + fields: HashSet, + ) -> Self { + let datapoint = if fields.contains(&proto::Field::Value) { + entry + .value + .as_ref() + .map(|value| broker::Datapoint::from(value.clone())) + } else { + None + }; + let actuator_target = if fields.contains(&proto::Field::ActuatorTarget) { + match &entry.actuator_target { + Some(datapoint) => Some(Some(broker::Datapoint::from(datapoint.clone()))), + None => Some(None), + } + } else { + None + }; + Self { + path: None, + datapoint, + actuator_target, + entry_type: None, + data_type: None, + description: None, + allowed: None, + unit: None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{broker::DataBroker, permissions}; + use databroker_proto::kuksa::val::v1::val_server::Val; + + #[tokio::test] + async fn test_update_datapoint_using_wrong_type() { + let broker = DataBroker::default(); + let authorized_access = broker.authorized_access(&permissions::ALLOW_ALL); + + authorized_access + .add_entry( + "test.datapoint1".to_owned(), + broker::DataType::Bool, + broker::ChangeType::OnChange, + broker::EntryType::Sensor, + "Test datapoint 1".to_owned(), + None, + None, + ) + .await + .expect("Register datapoint should succeed"); + + let mut req = tonic::Request::new(proto::SetRequest { + updates: vec![proto::EntryUpdate { + fields: vec![proto::Field::Value as i32], + entry: Some(proto::DataEntry { + path: "test.datapoint1".to_owned(), + value: Some(proto::Datapoint { + timestamp: Some(std::time::SystemTime::now().into()), + value: Some(proto::datapoint::Value::Int32(1456)), + }), + metadata: None, + actuator_target: None, + }), + }], + }); + + // Manually insert permissions + req.extensions_mut().insert(permissions::ALLOW_ALL.clone()); + + match proto::val_server::Val::set(&broker, req) + .await + .map(|res| res.into_inner()) + { + Ok(set_response) => { + assert!( + !set_response.errors.is_empty(), + "databroker should not allow updating boolean datapoint with an int32" + ); + let error = set_response.errors[0] + .to_owned() + .error + .expect("error details are missing"); + assert_eq!(error.code, 400, "unexpected error code"); + } + Err(_status) => panic!("failed to execute set request"), + } + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 1)] + async fn test_streamed_update_with_valid_datapoint() { + let broker = DataBroker::default(); + let authorized_access = broker.authorized_access(&permissions::ALLOW_ALL); + + authorized_access + .add_entry( + "Vehicle.Speed".to_owned(), + broker::DataType::Float, + broker::ChangeType::OnChange, + broker::EntryType::Sensor, + "Test datapoint 1".to_owned(), + None, + Some("km/h".to_owned()), + ) + .await + .expect("Register datapoint should succeed"); + + let streamed_update_request = proto::StreamedUpdateRequest { + updates: vec![proto::EntryUpdate { + fields: vec![proto::Field::Value as i32], + entry: Some(proto::DataEntry { + path: "Vehicle.Speed".to_owned(), + value: Some(proto::Datapoint { + timestamp: Some(std::time::SystemTime::now().into()), + value: Some(proto::datapoint::Value::Float(120.0)), + }), + metadata: None, + actuator_target: None, + }), + }], + }; + + let mut streaming_request = tonic_mock::streaming_request(vec![streamed_update_request]); + streaming_request + .extensions_mut() + .insert(permissions::ALLOW_ALL.clone()); + match broker.streamed_update(streaming_request).await { + Ok(response) => { + tokio::spawn(async move { + let stream = response.into_inner(); + let mut receiver = stream.into_inner(); + let option = receiver.recv(); + assert!(option.await.is_none()) // no errors should occur and no ack is delivered + }); + } + Err(_) => { + panic!("Should not happen") + } + } + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 1)] + async fn test_streamed_update_with_invalid_datapoint() { + let broker = DataBroker::default(); + + let streamed_update_request = proto::StreamedUpdateRequest { + updates: vec![proto::EntryUpdate { + fields: vec![proto::Field::Value as i32], + entry: Some(proto::DataEntry { + path: "Vehicle.Invalid.Speed".to_owned(), + value: Some(proto::Datapoint { + timestamp: Some(std::time::SystemTime::now().into()), + value: Some(proto::datapoint::Value::Float(120.0)), + }), + metadata: None, + actuator_target: None, + }), + }], + }; + + let mut streaming_request = tonic_mock::streaming_request(vec![streamed_update_request]); + streaming_request + .extensions_mut() + .insert(permissions::ALLOW_ALL.clone()); + match broker.streamed_update(streaming_request).await { + Ok(response) => { + tokio::spawn(async move { + let stream = response.into_inner(); + let mut receiver = stream.into_inner(); + let option = receiver.recv().await; + assert!(option.is_some()); + let result = option.unwrap(); + let error_opt = result.unwrap().error; + let error = error_opt.unwrap(); + assert_eq!(error.code, 404); + assert_eq!(error.reason, "not_found"); + assert_eq!(error.message, "Vehicle.Invalid.Speed not found") + }); + } + Err(_) => { + panic!("Should not happen") + } + } + } + + #[tokio::test] + async fn test_get_datapoint_using_wildcard() { + let broker = DataBroker::default(); + let authorized_access = broker.authorized_access(&permissions::ALLOW_ALL); + + authorized_access + .add_entry( + "test.datapoint1".to_owned(), + broker::DataType::Bool, + broker::ChangeType::OnChange, + broker::EntryType::Sensor, + "Test datapoint 1".to_owned(), + None, + None, + ) + .await + .expect("Register datapoint should succeed"); + + authorized_access + .add_entry( + "test.branch.datapoint2".to_owned(), + broker::DataType::Bool, + broker::ChangeType::OnChange, + broker::EntryType::Sensor, + "Test branch datapoint 2".to_owned(), + None, + None, + ) + .await + .expect("Register datapoint should succeed"); + + let mut wildcard_req = tonic::Request::new(proto::GetRequest { + entries: vec![proto::EntryRequest { + path: "test.**".to_owned(), + view: proto::View::Metadata as i32, + fields: vec![proto::Field::Value as i32], + }], + }); + + // Manually insert permissions + wildcard_req + .extensions_mut() + .insert(permissions::ALLOW_ALL.clone()); + + match proto::val_server::Val::get(&broker, wildcard_req) + .await + .map(|res| res.into_inner()) + { + Ok(get_response) => { + assert!( + get_response.errors.is_empty(), + "databroker should not return any error" + ); + + let entries_size = get_response.entries.len(); + assert_eq!(entries_size, 2); + } + Err(_status) => panic!("failed to execute get request"), + } + } + + #[tokio::test] + async fn test_get_datapoint_bad_request_pattern_or_not_found() { + let broker = DataBroker::default(); + let authorized_access = broker.authorized_access(&permissions::ALLOW_ALL); + + authorized_access + .add_entry( + "test.datapoint1".to_owned(), + broker::DataType::Bool, + broker::ChangeType::OnChange, + broker::EntryType::Sensor, + "Test datapoint 1".to_owned(), + None, + None, + ) + .await + .expect("Register datapoint should succeed"); + + let mut wildcard_req = tonic::Request::new(proto::GetRequest { + entries: vec![proto::EntryRequest { + path: "test. **".to_owned(), + view: proto::View::Metadata as i32, + fields: vec![proto::Field::Value as i32], + }], + }); + + // Manually insert permissions + wildcard_req + .extensions_mut() + .insert(permissions::ALLOW_ALL.clone()); + + match proto::val_server::Val::get(&broker, wildcard_req) + .await + .map(|res| res.into_inner()) + { + Ok(get_response) => { + assert!( + !get_response.errors.is_empty(), + "databroker should not allow bad request wildcard pattern" + ); + let error = get_response + .error + .to_owned() + .expect("error details are missing"); + assert_eq!(error.code, 400, "unexpected error code"); + assert_eq!(error.reason, "bad_request", "unexpected error reason"); + } + Err(_status) => panic!("failed to execute get request"), + } + + let mut not_found_req = tonic::Request::new(proto::GetRequest { + entries: vec![proto::EntryRequest { + path: "test.notfound".to_owned(), + view: proto::View::Metadata as i32, + fields: vec![proto::Field::Value as i32], + }], + }); + + // Manually insert permissions + not_found_req + .extensions_mut() + .insert(permissions::ALLOW_ALL.clone()); + + match proto::val_server::Val::get(&broker, not_found_req) + .await + .map(|res| res.into_inner()) + { + Ok(get_response) => { + assert!( + !get_response.errors.is_empty(), + "databroker should not allow bad request wildcard pattern" + ); + let error = get_response + .error + .to_owned() + .expect("error details are missing"); + assert_eq!(error.code, 404, "unexpected error code"); + assert_eq!(error.reason, "not_found", "unexpected error reason"); + } + Err(_status) => panic!("failed to execute get request"), + } + } +} diff --git a/can-protocol-adapter/src/grpc/mod.rs b/can-protocol-adapter/src/grpc/mod.rs new file mode 100644 index 0000000..8c3be24 --- /dev/null +++ b/can-protocol-adapter/src/grpc/mod.rs @@ -0,0 +1,15 @@ +/******************************************************************************** +* Copyright (c) 2022 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +mod kuksa_val_v1; +mod sdv_databroker_v1; diff --git a/can-protocol-adapter/src/grpc/sdv_databroker_v1/collector.rs b/can-protocol-adapter/src/grpc/sdv_databroker_v1/collector.rs new file mode 100644 index 0000000..4bec170 --- /dev/null +++ b/can-protocol-adapter/src/grpc/sdv_databroker_v1/collector.rs @@ -0,0 +1,266 @@ +/******************************************************************************** +* Copyright (c) 2022 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +use std::collections::HashMap; + +use databroker_proto::sdv::databroker::v1 as proto; + +use tokio::select; +use tokio::sync::mpsc; +use tokio_stream::wrappers::ReceiverStream; +use tonic::{Code, Response, Status}; +use tracing::debug; + +use crate::{ + broker::{self, RegistrationError}, + permissions::Permissions, +}; + +#[tonic::async_trait] +impl proto::collector_server::Collector for broker::DataBroker { + async fn update_datapoints( + &self, + request: tonic::Request, + ) -> Result, tonic::Status> { + debug!(?request); + let permissions = match request.extensions().get::() { + Some(permissions) => { + debug!(?permissions); + permissions.clone() + } + None => return Err(tonic::Status::unauthenticated("Unauthenticated")), + }; + let broker = self.authorized_access(&permissions); + + // Collect errors encountered + let mut errors = HashMap::new(); + + let message = request.into_inner(); + let ids: Vec<(i32, broker::EntryUpdate)> = message + .datapoints + .iter() + .map(|(id, datapoint)| { + ( + *id, + broker::EntryUpdate { + path: None, + datapoint: Some(broker::Datapoint::from(datapoint)), + actuator_target: None, + entry_type: None, + data_type: None, + description: None, + allowed: None, + unit: None, + }, + ) + }) + .collect(); + + match broker.update_entries(ids).await { + Ok(()) => {} + Err(err) => { + debug!("Failed to set datapoint: {:?}", err); + errors = err + .iter() + .map(|(id, error)| (*id, proto::DatapointError::from(error) as i32)) + .collect(); + } + } + + Ok(Response::new(proto::UpdateDatapointsReply { errors })) + } + + type StreamDatapointsStream = ReceiverStream>; + + async fn stream_datapoints( + &self, + request: tonic::Request>, + ) -> Result, tonic::Status> { + debug!(?request); + let permissions = match request.extensions().get::() { + Some(permissions) => { + debug!(?permissions); + permissions.clone() + } + None => return Err(tonic::Status::unauthenticated("Unauthenticated")), + }; + + let mut stream = request.into_inner(); + + let mut shutdown_trigger = self.get_shutdown_trigger(); + + // Copy (to move into task below) + let broker = self.clone(); + + // Create error stream (to be returned) + let (error_sender, error_receiver) = mpsc::channel(10); + + // Listening on stream + tokio::spawn(async move { + let permissions = permissions; + let broker = broker.authorized_access(&permissions); + loop { + select! { + message = stream.message() => { + match message { + Ok(request) => { + match request { + Some(req) => { + let ids: Vec<(i32, broker::EntryUpdate)> = req.datapoints + .iter() + .map(|(id, datapoint)| + ( + *id, + broker::EntryUpdate { + path: None, + datapoint: Some(broker::Datapoint::from(datapoint)), + actuator_target: None, + entry_type: None, + data_type: None, + description: None, + allowed: None, + unit: None, + } + ) + ) + .collect(); + // TODO: Check if sender is allowed to provide datapoint with this id + match broker + .update_entries(ids) + .await + { + Ok(_) => {} + Err(err) => { + if let Err(err) = error_sender.send( + Ok(proto::StreamDatapointsReply { + errors: err.iter().map(|(id, error)| { + (*id, proto::DatapointError::from(error) as i32) + }).collect(), + }) + ).await { + debug!("Failed to send errors: {}", err); + } + } + } + }, + None => { + debug!("provider: no more messages"); + break; + } + } + }, + Err(err) => { + debug!("provider: connection broken: {:?}", err); + break; + }, + } + }, + _ = shutdown_trigger.recv() => { + debug!("provider: shutdown received"); + break; + } + } + } + }); + + // Return the error stream + Ok(Response::new(ReceiverStream::new(error_receiver))) + } + + async fn register_datapoints( + &self, + request: tonic::Request, + ) -> Result, Status> { + debug!(?request); + let permissions = match request.extensions().get::() { + Some(permissions) => { + debug!(?permissions); + permissions.clone() + } + None => return Err(tonic::Status::unauthenticated("Unauthenticated")), + }; + let broker = self.authorized_access(&permissions); + + let mut results = HashMap::new(); + let mut error = None; + + for metadata in request.into_inner().list { + match ( + proto::DataType::try_from(metadata.data_type), + proto::ChangeType::try_from(metadata.change_type), + ) { + (Ok(value_type), Ok(change_type)) => { + match broker + .add_entry( + metadata.name.clone(), + broker::DataType::from(&value_type), + broker::ChangeType::from(&change_type), + broker::types::EntryType::Sensor, + metadata.description, + None, + None, + ) + .await + { + Ok(id) => results.insert(metadata.name, id), + Err(RegistrationError::PermissionDenied) => { + // Registration error + error = Some(Status::new( + Code::PermissionDenied, + format!("Failed to register {}", metadata.name), + )); + break; + } + Err(RegistrationError::PermissionExpired) => { + // Registration error + error = Some(Status::new( + Code::Unauthenticated, + format!("Failed to register {}", metadata.name), + )); + break; + } + Err(RegistrationError::ValidationError) => { + // Registration error + error = Some(Status::new( + Code::InvalidArgument, + format!("Failed to register {}", metadata.name), + )); + break; + } + }; + } + (Err(_), _) => { + // Invalid data type + error = Some(Status::new( + Code::InvalidArgument, + format!("Unsupported data type provided for {}", metadata.name), + )); + break; + } + (_, Err(_)) => { + // Invalid change type + error = Some(Status::new( + Code::InvalidArgument, + format!("Unsupported change type provided for {}", metadata.name), + )); + break; + } + } + } + + match error { + Some(error) => Err(error), + None => Ok(Response::new(proto::RegisterDatapointsReply { results })), + } + } +} diff --git a/can-protocol-adapter/src/grpc/sdv_databroker_v1/conversions.rs b/can-protocol-adapter/src/grpc/sdv_databroker_v1/conversions.rs new file mode 100644 index 0000000..d52600e --- /dev/null +++ b/can-protocol-adapter/src/grpc/sdv_databroker_v1/conversions.rs @@ -0,0 +1,378 @@ +/******************************************************************************** +* Copyright (c) 2022 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + +use databroker_proto::sdv::databroker::v1 as proto; + +use prost_types::Timestamp; +use std::convert::TryInto; +use std::time::SystemTime; + +use crate::broker; + +impl From<&proto::Datapoint> for broker::Datapoint { + fn from(datapoint: &proto::Datapoint) -> Self { + let value = broker::DataValue::from(datapoint); + let ts = SystemTime::now(); + + match &datapoint.timestamp { + Some(source_timestamp) => { + let source: Option = match source_timestamp.clone().try_into() { + Ok(source) => Some(source), + Err(_) => None, + }; + broker::Datapoint { + ts, + source_ts: source, + value, + } + } + None => broker::Datapoint { + ts, + source_ts: None, + value, + }, + } + } +} + +impl From<&broker::Datapoint> for proto::Datapoint { + fn from(datapoint: &broker::Datapoint) -> Self { + let value = match &datapoint.value { + broker::DataValue::Bool(value) => proto::datapoint::Value::BoolValue(*value), + broker::DataValue::String(value) => { + proto::datapoint::Value::StringValue(value.to_owned()) + } + broker::DataValue::Int32(value) => proto::datapoint::Value::Int32Value(*value), + broker::DataValue::Int64(value) => proto::datapoint::Value::Int64Value(*value), + broker::DataValue::Uint32(value) => proto::datapoint::Value::Uint32Value(*value), + broker::DataValue::Uint64(value) => proto::datapoint::Value::Uint64Value(*value), + broker::DataValue::Float(value) => proto::datapoint::Value::FloatValue(*value), + broker::DataValue::Double(value) => proto::datapoint::Value::DoubleValue(*value), + broker::DataValue::BoolArray(array) => { + proto::datapoint::Value::BoolArray(proto::BoolArray { + values: array.clone(), + }) + } + broker::DataValue::StringArray(array) => { + proto::datapoint::Value::StringArray(proto::StringArray { + values: array.clone(), + }) + } + broker::DataValue::Int32Array(array) => { + proto::datapoint::Value::Int32Array(proto::Int32Array { + values: array.clone(), + }) + } + broker::DataValue::Int64Array(array) => { + proto::datapoint::Value::Int64Array(proto::Int64Array { + values: array.clone(), + }) + } + broker::DataValue::Uint32Array(array) => { + proto::datapoint::Value::Uint32Array(proto::Uint32Array { + values: array.clone(), + }) + } + broker::DataValue::Uint64Array(array) => { + proto::datapoint::Value::Uint64Array(proto::Uint64Array { + values: array.clone(), + }) + } + broker::DataValue::FloatArray(array) => { + proto::datapoint::Value::FloatArray(proto::FloatArray { + values: array.clone(), + }) + } + broker::DataValue::DoubleArray(array) => { + proto::datapoint::Value::DoubleArray(proto::DoubleArray { + values: array.clone(), + }) + } + broker::DataValue::NotAvailable => proto::datapoint::Value::FailureValue( + proto::datapoint::Failure::NotAvailable as i32, + ), + }; + + proto::Datapoint { + timestamp: Some(datapoint.ts.into()), + value: Some(value), + } + } +} + +impl From<&broker::QueryField> for proto::Datapoint { + fn from(query_field: &broker::QueryField) -> Self { + let value = match &query_field.value { + broker::DataValue::Bool(value) => proto::datapoint::Value::BoolValue(*value), + broker::DataValue::String(value) => { + proto::datapoint::Value::StringValue(value.to_owned()) + } + broker::DataValue::Int32(value) => proto::datapoint::Value::Int32Value(*value), + broker::DataValue::Int64(value) => proto::datapoint::Value::Int64Value(*value), + broker::DataValue::Uint32(value) => proto::datapoint::Value::Uint32Value(*value), + broker::DataValue::Uint64(value) => proto::datapoint::Value::Uint64Value(*value), + broker::DataValue::Float(value) => proto::datapoint::Value::FloatValue(*value), + broker::DataValue::Double(value) => proto::datapoint::Value::DoubleValue(*value), + broker::DataValue::BoolArray(array) => { + proto::datapoint::Value::BoolArray(proto::BoolArray { + values: array.clone(), + }) + } + broker::DataValue::StringArray(array) => { + proto::datapoint::Value::StringArray(proto::StringArray { + values: array.clone(), + }) + } + broker::DataValue::Int32Array(array) => { + proto::datapoint::Value::Int32Array(proto::Int32Array { + values: array.clone(), + }) + } + broker::DataValue::Int64Array(array) => { + proto::datapoint::Value::Int64Array(proto::Int64Array { + values: array.clone(), + }) + } + broker::DataValue::Uint32Array(array) => { + proto::datapoint::Value::Uint32Array(proto::Uint32Array { + values: array.clone(), + }) + } + broker::DataValue::Uint64Array(array) => { + proto::datapoint::Value::Uint64Array(proto::Uint64Array { + values: array.clone(), + }) + } + broker::DataValue::FloatArray(array) => { + proto::datapoint::Value::FloatArray(proto::FloatArray { + values: array.clone(), + }) + } + broker::DataValue::DoubleArray(array) => { + proto::datapoint::Value::DoubleArray(proto::DoubleArray { + values: array.clone(), + }) + } + broker::DataValue::NotAvailable => proto::datapoint::Value::FailureValue( + proto::datapoint::Failure::NotAvailable.into(), + ), + }; + + proto::Datapoint { + timestamp: Some(Timestamp::from(SystemTime::now())), + value: Some(value), + } + } +} + +impl From<&proto::DataType> for broker::DataType { + fn from(data_type: &proto::DataType) -> Self { + match data_type { + proto::DataType::Bool => broker::DataType::Bool, + proto::DataType::String => broker::DataType::String, + proto::DataType::Int8 => broker::DataType::Int8, + proto::DataType::Int16 => broker::DataType::Int16, + proto::DataType::Int32 => broker::DataType::Int32, + proto::DataType::Int64 => broker::DataType::Int64, + proto::DataType::Uint8 => broker::DataType::Uint8, + proto::DataType::Uint16 => broker::DataType::Uint16, + proto::DataType::Uint32 => broker::DataType::Uint32, + proto::DataType::Uint64 => broker::DataType::Uint64, + proto::DataType::Float => broker::DataType::Float, + proto::DataType::Double => broker::DataType::Double, + proto::DataType::StringArray => broker::DataType::StringArray, + proto::DataType::BoolArray => broker::DataType::BoolArray, + proto::DataType::Int8Array => broker::DataType::Int8Array, + proto::DataType::Int16Array => broker::DataType::Int16Array, + proto::DataType::Int32Array => broker::DataType::Int32Array, + proto::DataType::Int64Array => broker::DataType::Int64Array, + proto::DataType::Uint8Array => broker::DataType::Uint8Array, + proto::DataType::Uint16Array => broker::DataType::Uint16Array, + proto::DataType::Uint32Array => broker::DataType::Uint32Array, + proto::DataType::Uint64Array => broker::DataType::Uint64Array, + proto::DataType::FloatArray => broker::DataType::FloatArray, + proto::DataType::DoubleArray => broker::DataType::DoubleArray, + } + } +} + +impl From<&proto::Datapoint> for broker::DataValue { + fn from(datapoint: &proto::Datapoint) -> Self { + match &datapoint.value { + Some(value) => match value { + proto::datapoint::Value::StringValue(value) => { + broker::DataValue::String(value.to_owned()) + } + proto::datapoint::Value::BoolValue(value) => broker::DataValue::Bool(*value), + proto::datapoint::Value::Int32Value(value) => broker::DataValue::Int32(*value), + proto::datapoint::Value::Int64Value(value) => broker::DataValue::Int64(*value), + proto::datapoint::Value::Uint32Value(value) => broker::DataValue::Uint32(*value), + proto::datapoint::Value::Uint64Value(value) => broker::DataValue::Uint64(*value), + proto::datapoint::Value::FloatValue(value) => broker::DataValue::Float(*value), + proto::datapoint::Value::DoubleValue(value) => broker::DataValue::Double(*value), + proto::datapoint::Value::StringArray(array) => { + broker::DataValue::StringArray(array.values.clone()) + } + proto::datapoint::Value::BoolArray(array) => { + broker::DataValue::BoolArray(array.values.clone()) + } + proto::datapoint::Value::Int32Array(array) => { + broker::DataValue::Int32Array(array.values.clone()) + } + proto::datapoint::Value::Int64Array(array) => { + broker::DataValue::Int64Array(array.values.clone()) + } + proto::datapoint::Value::Uint32Array(array) => { + broker::DataValue::Uint32Array(array.values.clone()) + } + proto::datapoint::Value::Uint64Array(array) => { + broker::DataValue::Uint64Array(array.values.clone()) + } + proto::datapoint::Value::FloatArray(array) => { + broker::DataValue::FloatArray(array.values.clone()) + } + proto::datapoint::Value::DoubleArray(array) => { + broker::DataValue::DoubleArray(array.values.clone()) + } + proto::datapoint::Value::FailureValue(_) => broker::DataValue::NotAvailable, + }, + None => broker::DataValue::NotAvailable, + } + } +} + +impl From<&broker::DataType> for proto::DataType { + fn from(value_type: &broker::DataType) -> Self { + match value_type { + broker::DataType::Bool => proto::DataType::Bool, + broker::DataType::String => proto::DataType::String, + broker::DataType::Int8 => proto::DataType::Int8, + broker::DataType::Int16 => proto::DataType::Int16, + broker::DataType::Int32 => proto::DataType::Int32, + broker::DataType::Int64 => proto::DataType::Int64, + broker::DataType::Uint8 => proto::DataType::Uint8, + broker::DataType::Uint16 => proto::DataType::Uint16, + broker::DataType::Uint32 => proto::DataType::Uint32, + broker::DataType::Uint64 => proto::DataType::Uint64, + broker::DataType::Float => proto::DataType::Float, + broker::DataType::Double => proto::DataType::Double, + broker::DataType::StringArray => proto::DataType::StringArray, + broker::DataType::BoolArray => proto::DataType::BoolArray, + broker::DataType::Int8Array => proto::DataType::Int8Array, + broker::DataType::Int16Array => proto::DataType::Int16Array, + broker::DataType::Int32Array => proto::DataType::Int32Array, + broker::DataType::Int64Array => proto::DataType::Int64Array, + broker::DataType::Uint8Array => proto::DataType::Uint8Array, + broker::DataType::Uint16Array => proto::DataType::Uint16Array, + broker::DataType::Uint32Array => proto::DataType::Uint32Array, + broker::DataType::Uint64Array => proto::DataType::Uint64Array, + broker::DataType::FloatArray => proto::DataType::FloatArray, + broker::DataType::DoubleArray => proto::DataType::DoubleArray, + } + } +} + +impl From<&broker::EntryType> for proto::EntryType { + fn from(entry_type: &broker::EntryType) -> Self { + match entry_type { + broker::EntryType::Sensor => proto::EntryType::Sensor, + broker::EntryType::Attribute => proto::EntryType::Attribute, + broker::EntryType::Actuator => proto::EntryType::Actuator, + } + } +} + +impl From<&proto::ChangeType> for broker::ChangeType { + fn from(change_type: &proto::ChangeType) -> Self { + match change_type { + proto::ChangeType::OnChange => broker::ChangeType::OnChange, + proto::ChangeType::Continuous => broker::ChangeType::Continuous, + proto::ChangeType::Static => broker::ChangeType::Static, + } + } +} + +impl From<&broker::Metadata> for proto::Metadata { + fn from(metadata: &broker::Metadata) -> Self { + proto::Metadata { + id: metadata.id, + entry_type: proto::EntryType::from(&metadata.entry_type) as i32, + name: metadata.path.to_owned(), + data_type: proto::DataType::from(&metadata.data_type) as i32, + change_type: proto::ChangeType::Continuous as i32, // TODO: Add to metadata + description: metadata.description.to_owned(), + allowed: match metadata.allowed.as_ref() { + Some(broker::DataValue::StringArray(vec)) => Some(proto::Allowed { + values: Some(proto::allowed::Values::StringValues(proto::StringArray { + values: vec.clone(), + })), + }), + Some(broker::DataValue::Int32Array(vec)) => Some(proto::Allowed { + values: Some(proto::allowed::Values::Int32Values(proto::Int32Array { + values: vec.clone(), + })), + }), + Some(broker::DataValue::Int64Array(vec)) => Some(proto::Allowed { + values: Some(proto::allowed::Values::Int64Values(proto::Int64Array { + values: vec.clone(), + })), + }), + Some(broker::DataValue::Uint32Array(vec)) => Some(proto::Allowed { + values: Some(proto::allowed::Values::Uint32Values(proto::Uint32Array { + values: vec.clone(), + })), + }), + Some(broker::DataValue::Uint64Array(vec)) => Some(proto::Allowed { + values: Some(proto::allowed::Values::Uint64Values(proto::Uint64Array { + values: vec.clone(), + })), + }), + Some(broker::DataValue::FloatArray(vec)) => Some(proto::Allowed { + values: Some(proto::allowed::Values::FloatValues(proto::FloatArray { + values: vec.clone(), + })), + }), + Some(broker::DataValue::DoubleArray(vec)) => Some(proto::Allowed { + values: Some(proto::allowed::Values::DoubleValues(proto::DoubleArray { + values: vec.clone(), + })), + }), + Some(broker::DataValue::BoolArray(_)) + | Some(broker::DataValue::NotAvailable) + | Some(broker::DataValue::Bool(_)) + | Some(broker::DataValue::String(_)) + | Some(broker::DataValue::Int32(_)) + | Some(broker::DataValue::Int64(_)) + | Some(broker::DataValue::Uint32(_)) + | Some(broker::DataValue::Uint64(_)) + | Some(broker::DataValue::Float(_)) + | Some(broker::DataValue::Double(_)) + | None => None, + }, + } + } +} + +impl From<&broker::UpdateError> for proto::DatapointError { + fn from(error: &broker::UpdateError) -> Self { + match error { + broker::UpdateError::NotFound => proto::DatapointError::UnknownDatapoint, + broker::UpdateError::WrongType | broker::UpdateError::UnsupportedType => { + proto::DatapointError::InvalidType + } + broker::UpdateError::OutOfBounds => proto::DatapointError::OutOfBounds, + broker::UpdateError::PermissionDenied => proto::DatapointError::AccessDenied, + broker::UpdateError::PermissionExpired => proto::DatapointError::AccessDenied, + } + } +} diff --git a/can-protocol-adapter/src/grpc/sdv_databroker_v1/mod.rs b/can-protocol-adapter/src/grpc/sdv_databroker_v1/mod.rs new file mode 100644 index 0000000..3690bcb --- /dev/null +++ b/can-protocol-adapter/src/grpc/sdv_databroker_v1/mod.rs @@ -0,0 +1,16 @@ +/******************************************************************************** +* Copyright (c) 2022 Contributors to the Eclipse Foundation +* +* See the NOTICE file(s) distributed with this work for additional +* information regarding copyright ownership. +* +* This program and the accompanying materials are made available under the +* terms of the Apache License 2.0 which is available at +* http://www.apache.org/licenses/LICENSE-2.0 +* +* SPDX-License-Identifier: Apache-2.0 +********************************************************************************/ + + +mod collector; +mod conversions; diff --git a/can-protocol-adapter/src/kuksa_feeder/feeder.rs b/can-protocol-adapter/src/kuksa_feeder/feeder.rs new file mode 100644 index 0000000..258a865 --- /dev/null +++ b/can-protocol-adapter/src/kuksa_feeder/feeder.rs @@ -0,0 +1,149 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ +use http::Uri; +use log::{error, info}; +use std::collections::HashMap; +use std::error::Error as StdError; + +use databroker_proto::kuksa::val::{self as proto, v1::Datapoint}; +use kuksa::KuksaClient; +use proto::v1; +/// The `Feeder` struct manages the connection to a Kuksa Data Broker +/// and provides methods to interact with it. +pub struct Feeder { + client: Option, + broker_ip: String, + broker_port: String, +} + +impl Feeder { + pub fn new(broker_ip: String, broker_port: String) -> Self { + Self { + client: None, + broker_ip, + broker_port, + } + } + + pub async fn connect_to_databroker(&mut self) -> Result<(), Box> { + match self.client { + Some(_) => { + info!("Already connected to data broker."); + Ok(()) + } + None => { + let broker_address = format!("http://{}:{}/", self.broker_ip, self.broker_port); + let uri: Uri = broker_address.parse().expect("Invalid broker URI"); + // Create a new Kuksa client instance + let mut kuksa_client = KuksaClient::new(uri); + // Attempt connection + match kuksa_client.basic_client.try_connect().await { + Ok(_) => { + info!( + "Successfully connected to the databroker to {}", + broker_address + ); + self.client = Some(kuksa_client); + } + Err(err) => { + error!("Failed to connect to Kuksa server: {}", err); + return Err(err.into()); + } + }; + Ok(()) + } + } + } + + pub async fn register_datapoints( + &mut self, + datapoints: HashMap, + ) -> Result<(), Box> { + match &mut self.client { + Some(client) => { + // Attempt to register datapoints + let result = client.set_metadata(datapoints).await; + + match result { + Ok(_) => { + info!("Successfully set metadata values."); + Ok(()) + } + Err(err) => { + error!("Failed to set metadata: {}", err); + Err(Box::new(err)) + } + } + } + None => { + let err_msg = "Not connected to databroker"; + error!("{}", err_msg); + Err(err_msg.into()) + } + } + } + + pub async fn set_datapoint_values( + &mut self, + signal: &str, + signal_value: f64, + value_type: &str, + ) -> Result<(), Box> { + // Convert the signal value to the corresponding protobuf Value type. + let value = match value_type { + "float" => Some(proto::v1::datapoint::Value::Float(signal_value as f32)), + "Int32" => Some(proto::v1::datapoint::Value::Int32(signal_value as i32)), + "String" => Some(proto::v1::datapoint::Value::String( + signal_value.to_string(), + )), + "double" | "Double" => Some(proto::v1::datapoint::Value::Double(signal_value)), + "uint32" | "UInt32" => Some(proto::v1::datapoint::Value::Uint32(signal_value as u32)), + "bool" | "Bool" | "boolean" | "Boolean" => { + Some(proto::v1::datapoint::Value::Bool(signal_value != 0.0)) + } + _ => { + error!("Unsupported value type: {}", value_type); + return Err("Unsupported value type".into()); + } + }; + // Create the Datapoint with the converted value. + let mut datapoints = HashMap::new(); + datapoints.insert( + signal.to_string(), + Datapoint { + value, + timestamp: None, + }, + ); + // Set the datapoint value using the client. + match self.client.as_mut() { + Some(client) => match client.set_current_values(datapoints).await { + Ok(_) => { + info!( + "Successfully set datapoint value for signal: {}, value: {}", + signal, signal_value + ); + Ok(()) + } + Err(err) => { + error!("Failed to set datapoint value for {}: {}", signal, err); + Err(err.into()) + } + }, + None => { + error!("Not connected to databroker"); + Err("Not connected to databroker".into()) + } + } + } +} diff --git a/can-protocol-adapter/src/kuksa_feeder/mod.rs b/can-protocol-adapter/src/kuksa_feeder/mod.rs new file mode 100644 index 0000000..d95f372 --- /dev/null +++ b/can-protocol-adapter/src/kuksa_feeder/mod.rs @@ -0,0 +1,13 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ +pub mod feeder; diff --git a/can-protocol-adapter/src/main.rs b/can-protocol-adapter/src/main.rs new file mode 100644 index 0000000..b58df66 --- /dev/null +++ b/can-protocol-adapter/src/main.rs @@ -0,0 +1,160 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ +use clap::Parser; +use log::{debug, error, info}; +use std::error::Error; +use std::sync::Arc; +use tokio::sync::mpsc; +use tokio::sync::Mutex; +use tokio::time::{self}; + +mod can; +mod kuksa_feeder; +mod utils; + +use can::comm; +use can::decoder::Decoder; +use kuksa_feeder::feeder::Feeder; +use utils::adapter_config::AdapterConfig; +use utils::adapter_utils; + +#[derive(Parser)] +struct Args { + #[arg(short, long, help = "Path to JSON configuration file")] + config: String, +} + +#[tokio::main] +async fn main() -> Result<(), Box> { + env_logger::init_from_env(env_logger::Env::new().default_filter_or("info")); + + //Parse command line arguments to get the configuration file path. + let args = Args::parse(); + info!( + "Starting CAN Protocol adapter v{} with config file: {}", + env!("CARGO_PKG_VERSION"), + &args.config + ); + // Read adapter configuration from the JSON file. + let adapter_config = match adapter_utils::read_config(&args.config) { + Ok(adapter_config) => adapter_config, + Err(err) => { + panic!( + "Failed to open configuration file at path {}, {}", + &args.config, err + ); + } + }; + + // Validate the adapter configuration. + adapter_utils::validate_adapter_config(&adapter_config)?; + + // Get broker IP and port from the adapter configuration. + let broker_ip = adapter_config.general_config.broker_ip.clone(); + let broker_port = adapter_config.general_config.broker_port.clone(); + + // Create a new Feeder instance and connect to the data broker. + let mut feeder = Feeder::new(broker_ip.clone(), broker_port.clone()); + match feeder.connect_to_databroker().await { + Ok(_) => { + debug!( + "Successfully connected to the databroker to {}:{}", + broker_ip, broker_port + ); + } + Err(e) => { + error!("Failed to connect to databroker: {:?}", e); + return Err(e); + } + } + + //Load the DBC file and create a Decoder instance. + let dbc_file_path = adapter_config.general_config.dbcfile.clone(); + let decoder = match Decoder::new(&dbc_file_path) { + Ok(decoder) => { + let log_message = format!( + "DBC file loaded from path: {}. DBC File Parsing successful.", + dbc_file_path + ); + info!("{}", log_message); + decoder + } + Err(_e) => { + panic!("Critical error: Could not parse DBC file. Exiting."); + } + }; + + // Register the user defined datapoints with the data broker. + let datapoints = adapter_utils::datapoints_from_config(&adapter_config); + match feeder.register_datapoints(datapoints).await { + Ok(_) => { + info!("Successfully registered datapoints."); + } + Err(e) => { + error!("Failed to register datapoints: {:?}", e); + return Err(e); + } + } + + // Initialize the CAN socket. + let socket = match comm::initialize_can_socket(&adapter_config).await { + Ok(socket) => socket, + Err(err) => { + error!("Error initializing socket: {}", err); + return Err(err); + } + }; + + // Channels for inter-task communication + let (pid_tx, pid_rx) = mpsc::channel::(256); + let (res_tx, res_rx) = mpsc::channel::(256); + + // Create shared resources using Arc and Mutex. + let shared_socket = Arc::new(Mutex::new(socket)); + let shared_feeder = Arc::new(Mutex::new(feeder)); + let shared_decoder = Arc::new(Mutex::new(decoder)); + let adapter_config = Arc::new(adapter_config); + + // Spawn a task for sending CAN data. + let send_task_handle = tokio::spawn({ + let socket_instance = Arc::clone(&shared_socket); + let adapter_config = Arc::clone(&adapter_config); + async move { + comm::send_can_data(socket_instance, adapter_config, pid_tx, res_rx).await; + } + }); + + // Spawn a task for receiving CAN data. + let receive_task_handle = tokio::spawn({ + let socket_instance = Arc::clone(&shared_socket); + let adapter_config = Arc::clone(&adapter_config); + let feeder_instance = Arc::clone(&shared_feeder); + async move { + comm::receive_can_data( + socket_instance, + adapter_config, + feeder_instance, + shared_decoder, + pid_rx, + res_tx, + ) + .await; + } + }); + + // Wait for both tasks to complete. + let _ = send_task_handle.await; + let _ = receive_task_handle.await; + + Ok(()) +} diff --git a/can-protocol-adapter/src/utils/adapter_config.rs b/can-protocol-adapter/src/utils/adapter_config.rs new file mode 100644 index 0000000..162b193 --- /dev/null +++ b/can-protocol-adapter/src/utils/adapter_config.rs @@ -0,0 +1,88 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ + +use serde::{Deserialize, Deserializer}; + +#[derive(Deserialize, Clone)] +pub struct AdapterConfig { + pub general_config: GeneralConfig, + pub can_config: CanConfig, + pub pid_table: Vec, +} + +#[derive(Deserialize, Clone)] +pub struct GeneralConfig { + pub broker_ip: String, + pub broker_port: String, + pub dbcfile: String, +} + +#[derive(Deserialize, Clone)] +pub struct CanConfig { + pub can_interface: String, + // The 'use_extended_id' field is currently unused but will be used in future development. + #[allow(dead_code)] + pub use_extended_id: bool, + #[serde(deserialize_with = "from_hex_string")] + pub tx_id: u32, + #[serde(deserialize_with = "from_hex_string")] + pub rx_id: u32, + pub socket_can_type: String, + pub socket_can_protocol: String, +} + +#[derive(Deserialize, Clone)] +pub struct PidEntry { + #[serde(deserialize_with = "from_hex_string_to_bytes")] + pub request_pid: Vec, + #[serde(deserialize_with = "from_hex_string_to_bytes")] + pub response_pid: Vec, + pub response_timeout_ms: u32, + // The 'description' field is currently unused. + #[allow(dead_code)] + pub description: String, + // The 'expected_response_length' field is currently unused but will be used in future development. + #[allow(dead_code)] + pub expected_response_length: u32, + pub interval_ms: u32, + pub dbc_signal_name: String, + pub vss_signal: VssSignal, +} + +#[derive(Deserialize, Clone)] +pub struct VssSignal { + pub signal_name: String, + pub datatype: String, + pub unit: String, +} + +fn from_hex_string<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let s: &str = Deserialize::deserialize(deserializer)?; + u32::from_str_radix(s.trim_start_matches("0x"), 16).map_err(serde::de::Error::custom) +} + +fn from_hex_string_to_bytes<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let s: &str = Deserialize::deserialize(deserializer)?; + let bytes: Result, _> = s + .split_whitespace() + .map(|hex_str| u8::from_str_radix(hex_str.trim_start_matches("0x"), 16)) + .collect(); + + bytes.map_err(serde::de::Error::custom) +} diff --git a/can-protocol-adapter/src/utils/adapter_utils.rs b/can-protocol-adapter/src/utils/adapter_utils.rs new file mode 100644 index 0000000..e7142be --- /dev/null +++ b/can-protocol-adapter/src/utils/adapter_utils.rs @@ -0,0 +1,102 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ + +use crate::AdapterConfig; +use databroker_proto::kuksa::val as proto; +use log::warn; +use proto::v1; +use std::collections::HashMap; +use std::{error::Error, fs}; + +pub fn read_config(path: &str) -> Result> { + let config_str = fs::read_to_string(path)?; + let config = serde_json::from_str(&config_str)?; + + Ok(config) +} + +pub fn validate_adapter_config(config: &AdapterConfig) -> Result<(), Box> { + // Validate general config + if config.general_config.dbcfile.is_empty() { + return Err("CAN interface not specified in configuration".into()); + } + if config.general_config.broker_ip.is_empty() { + return Err("Broker IP address not specified in configuration".into()); + } + if config.general_config.broker_port.is_empty() { + return Err("Invalid Broker port specified in configuration".into()); + } + + // Validate CAN config + if config.can_config.can_interface.is_empty() { + return Err("CAN interface not specified in CAN configuration".into()); + } + if config.can_config.socket_can_type.is_empty() { + return Err("Socket CAN type not specified in CAN configuration".into()); + } + if config.can_config.socket_can_protocol.is_empty() { + return Err("Socket CAN protocol not specified in CAN configurationn".into()); + } + + // Validate PID table entries + if config.pid_table.is_empty() { + return Err("No PID entries found in configuration".into()); + } + let mut valid_entry_found = false; + for (i, entry) in config.pid_table.iter().enumerate() { + if entry.request_pid.is_empty() { + warn!("Warning: Request PID is empty for entry {} in PID table", i); + continue; + } + valid_entry_found = true; + } + if !valid_entry_found { + return Err("No valid PID entries found in configuration".into()); + } + + if config.pid_table.len() == 1 { + warn!("Warning: Only one valid PID entry found in configuration."); + } + + Ok(()) +} + +pub fn datapoints_from_config(adapter_config: &AdapterConfig) -> HashMap { + adapter_config + .pid_table + .iter() + .map(|pid_entry| { + let vss_signal = &pid_entry.vss_signal; + ( + vss_signal.signal_name.to_string(), + v1::Metadata { + entry_type: 12, + comment: Some("none".to_string()), + deprecation: None, + value_restriction: None, + entry_specific: None, + description: Some(format!("{} ({})", vss_signal.signal_name, vss_signal.unit)), + data_type: match vss_signal.datatype.as_str() { + "float" => v1::DataType::Float as i32, + other_type => { + // Handle other types or use an appropriate error handling mechanism + // Panic should ideally be avoided in production code. Consider returning a Result. + panic!("Unsupported datatype: {}", other_type) + } + }, + unit: Some("km/h".to_string()), // Adjust if needed + }, + ) + }) + .collect() +} diff --git a/can-protocol-adapter/src/utils/mod.rs b/can-protocol-adapter/src/utils/mod.rs new file mode 100644 index 0000000..b32240d --- /dev/null +++ b/can-protocol-adapter/src/utils/mod.rs @@ -0,0 +1,14 @@ +/******************************************************************************** + * Copyright (c) 2024 Contributors to the Eclipse Foundation + * + * See the NOTICE file(s) distributed with this work for additional + * information regarding copyright ownership. + * + * This program and the accompanying materials are made available under the + * terms of the Apache License 2.0 which is available at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * SPDX-License-Identifier: Apache-2.0 + ********************************************************************************/ +pub mod adapter_config; +pub mod adapter_utils;