diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index 376d4fb5167b..cdd9347e7980 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -159,14 +159,15 @@ Update instructions: ### Application-level store -| Crate | Description | -|----------------------|--------------------------------------------------------------------------| -| re_entity_db | In-memory storage of Rerun entities | -| re_query | Querying data in the re_chunk_store | -| re_dataframe | The Rerun public data APIs. | -| re_types | The built-in Rerun data types, component types, and archetypes. | -| re_types_blueprint | The core traits and types that power Rerun's Blueprint sub-system. | -| re_log_encoding | Helpers for encoding and transporting Rerun log messages | +| Crate | Description | +|-----------------------|--------------------------------------------------------------------------| +| re_entity_db | In-memory storage of Rerun entities | +| re_query | Querying data in the re_chunk_store | +| re_dataframe | The Rerun public data APIs. | +| re_remote_store_types | Rerun remote store gRPC API types | +| re_types | The built-in Rerun data types, component types, and archetypes. | +| re_types_blueprint | The core traits and types that power Rerun's Blueprint sub-system. | +| re_log_encoding | Helpers for encoding and transporting Rerun log messages | ### Low-level store @@ -193,12 +194,13 @@ Update instructions: ### Build support -| Crate | Description | -|----------------------------|------------------------------------------------------------------| -| re_build_info | Information about the build. Use together with re_build_tools | -| re_build_tools | build.rs helpers for generating build info | -| re_types_builder | Generates code for Rerun's SDKs from flatbuffers definitions. | -| re_dev_tools | Various tools for Rerun development. Each tool has a subcommand. | +| Crate | Description | +|-------------------------------|------------------------------------------------------------------| +| re_build_info | Information about the build. Use together with re_build_tools | +| re_build_tools | build.rs helpers for generating build info | +| re_remote_store_types_builder | Generates code for Rerun remote store gRPC API | +| re_types_builder | Generates code for Rerun's SDKs from flatbuffers definitions. | +| re_dev_tools | Various tools for Rerun development. Each tool has a subcommand. | ### Utilities diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e424cfb9f79..774f572fdbe2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ ## [0.19.0](https://github.com/rerun-io/rerun/compare/0.18.2...0.19.0) - Dataframes & Video support + 📖 Release blogpost: Coming soon! diff --git a/Cargo.lock b/Cargo.lock index b035239b42ff..dda16c809997 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -630,7 +630,7 @@ dependencies = [ "polling 2.8.0", "rustix 0.37.27", "slab", - "socket2", + "socket2 0.4.9", "waker-fn", ] @@ -731,6 +731,28 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.79", +] + [[package]] name = "async-task" version = "4.4.0" @@ -851,6 +873,53 @@ dependencies = [ "thiserror", ] +[[package]] +name = "axum" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "504e3947307ac8326a5437504c517c4b56716c9d98fac0028c2acc7ca47d70ae" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper 1.0.1", + "tower 0.5.1", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper 1.0.1", + "tower-layer", + "tower-service", +] + [[package]] name = "az" version = "1.2.1" @@ -878,12 +947,6 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - [[package]] name = "base64" version = "0.22.1" @@ -1025,9 +1088,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.4.0" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" [[package]] name = "calloop" @@ -2284,9 +2347,9 @@ dependencies = [ [[package]] name = "ewebsock" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bbed098b2bf9abcfe50eeaa01ae77a2a1da931bdcd83d23fcd7b8f941cd52c9" +checksum = "735ef60c8cd338b55f972d66c95558a71298d6f8bce1e1347d4c95b082ab2c37" dependencies = [ "document-features", "js-sys", @@ -2735,6 +2798,25 @@ dependencies = [ "bitflags 2.6.0", ] +[[package]] +name = "h2" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap 2.1.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "1.8.2" @@ -2860,15 +2942,38 @@ dependencies = [ [[package]] name = "http" -version = "0.2.9" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" dependencies = [ "bytes", "fnv", "itoa", ] +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "pin-project-lite", +] + [[package]] name = "httparse" version = "1.8.0" @@ -2887,6 +2992,59 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-timeout" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3203a961e5c83b6f5498933e78b6b263e208c197b63e9c6c53cc82ffd3f63793" +dependencies = [ + "hyper", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "socket2 0.5.7", + "tokio", + "tower-service", + "tracing", +] + [[package]] name = "iana-time-zone" version = "0.1.60" @@ -3408,6 +3566,12 @@ dependencies = [ "libc", ] +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + [[package]] name = "matrixmultiply" version = "0.3.7" @@ -4125,8 +4289,8 @@ dependencies = [ "anyhow", "clap", "glam", - "prost", - "prost-build", + "prost 0.12.6", + "prost-build 0.12.6", "protoc-prebuilt", "re_build_tools", "rerun", @@ -4529,7 +4693,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ "bytes", - "prost-derive", + "prost-derive 0.12.6", +] + +[[package]] +name = "prost" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +dependencies = [ + "bytes", + "prost-derive 0.13.3", ] [[package]] @@ -4546,8 +4720,29 @@ dependencies = [ "once_cell", "petgraph", "prettyplease", - "prost", - "prost-types", + "prost 0.12.6", + "prost-types 0.12.6", + "regex", + "syn 2.0.79", + "tempfile", +] + +[[package]] +name = "prost-build" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15" +dependencies = [ + "bytes", + "heck 0.4.1", + "itertools 0.13.0", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost 0.13.3", + "prost-types 0.13.3", "regex", "syn 2.0.79", "tempfile", @@ -4566,13 +4761,35 @@ dependencies = [ "syn 2.0.79", ] +[[package]] +name = "prost-derive" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +dependencies = [ + "anyhow", + "itertools 0.13.0", + "proc-macro2", + "quote", + "syn 2.0.79", +] + [[package]] name = "prost-types" version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" dependencies = [ - "prost", + "prost 0.12.6", +] + +[[package]] +name = "prost-types" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" +dependencies = [ + "prost 0.13.3", ] [[package]] @@ -5404,6 +5621,26 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "re_remote_store_types" +version = "0.20.0-alpha.1+dev" +dependencies = [ + "prost 0.13.3", + "re_dataframe", + "re_log_types", + "thiserror", + "tonic", +] + +[[package]] +name = "re_remote_store_types_builder" +version = "0.20.0-alpha.1+dev" +dependencies = [ + "camino", + "re_log", + "tonic-build", +] + [[package]] name = "re_renderer" version = "0.20.0-alpha.1+dev" @@ -6717,23 +6954,33 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.11" +version = "0.23.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" +checksum = "415d9944693cb90382053259f89fbb077ea730ad7273047ec63b19bc9b160ba8" dependencies = [ "log", + "once_cell", "ring", + "rustls-pki-types", "rustls-webpki", - "sct", + "subtle", + "zeroize", ] +[[package]] +name = "rustls-pki-types" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" + [[package]] name = "rustls-webpki" -version = "0.101.7" +version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ "ring", + "rustls-pki-types", "untrusted", ] @@ -6770,16 +7017,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "semver" version = "1.0.17" @@ -6994,9 +7231,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.10.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" dependencies = [ "serde", ] @@ -7069,6 +7306,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "spawn_viewer" version = "0.20.0-alpha.1+dev" @@ -7144,6 +7391,12 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa7986063f7c0ab374407e586d7048a3d5aac94f103f751088bf398e07cd5400" +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + [[package]] name = "syn" version = "1.0.109" @@ -7166,6 +7419,18 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" + [[package]] name = "sysinfo" version = "0.30.2" @@ -7424,6 +7689,57 @@ dependencies = [ "ahash", ] +[[package]] +name = "tokio" +version = "1.38.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb2caba9f80616f438e09748d5acda951967e1ea58508ef53d9c6402485a46df" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "pin-project-lite", + "socket2 0.5.7", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.79", +] + +[[package]] +name = "tokio-stream" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + [[package]] name = "toml" version = "0.8.19" @@ -7470,6 +7786,96 @@ dependencies = [ "winnow 0.6.18", ] +[[package]] +name = "tonic" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.22.1", + "bytes", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost 0.13.3", + "socket2 0.5.7", + "tokio", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic-build" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" +dependencies = [ + "prettyplease", + "proc-macro2", + "prost-build 0.13.3", + "prost-types 0.13.3", + "quote", + "syn 2.0.79", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 0.1.2", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + [[package]] name = "tracing" version = "0.1.40" @@ -7502,6 +7908,12 @@ dependencies = [ "once_cell", ] +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + [[package]] name = "ttf-parser" version = "0.19.0" @@ -7510,9 +7922,9 @@ checksum = "44dcf002ae3b32cd25400d6df128c5babec3927cd1eb7ce813cfff20eb6c3746" [[package]] name = "tungstenite" -version = "0.20.1" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +checksum = "6e2e2ce1e47ed2994fd43b04c8f618008d4cabdd5ee34027cf14f9d918edd9c8" dependencies = [ "byteorder", "bytes", @@ -7522,11 +7934,11 @@ dependencies = [ "log", "rand", "rustls", + "rustls-pki-types", "sha1", "thiserror", - "url", "utf-8", - "webpki-roots 0.24.0", + "webpki-roots", ] [[package]] @@ -7632,20 +8044,20 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.9.1" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97" +checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "flate2", "log", "once_cell", "rustls", - "rustls-webpki", + "rustls-pki-types", "serde", "serde_json", "url", - "webpki-roots 0.25.4", + "webpki-roots", ] [[package]] @@ -7748,6 +8160,15 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -8086,19 +8507,13 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.24.0" +version = "0.26.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" +checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" dependencies = [ - "rustls-webpki", + "rustls-pki-types", ] -[[package]] -name = "webpki-roots" -version = "0.25.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" - [[package]] name = "weezl" version = "0.1.7" @@ -8766,6 +9181,12 @@ dependencies = [ "syn 2.0.79", ] +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + [[package]] name = "zip" version = "0.6.6" diff --git a/Cargo.toml b/Cargo.toml index 0b2748769c20..ea41a9a9f7e7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,6 +37,7 @@ version = "0.20.0-alpha.1+dev" re_build_info = { path = "crates/build/re_build_info", version = "=0.20.0-alpha.1", default-features = false } re_build_tools = { path = "crates/build/re_build_tools", version = "=0.20.0-alpha.1", default-features = false } re_dev_tools = { path = "crates/build/re_dev_tools", version = "=0.20.0-alpha.1", default-features = false } +re_remote_store_types_builder = { path = "crates/build/re_remote_store_types_builder", version = "=0.20.0-alpha.1", default-features = false } re_types_builder = { path = "crates/build/re_types_builder", version = "=0.20.0-alpha.1", default-features = false } # crates/store: @@ -50,6 +51,7 @@ re_format_arrow = { path = "crates/store/re_format_arrow", version = "=0.20.0-al re_log_encoding = { path = "crates/store/re_log_encoding", version = "=0.20.0-alpha.1", default-features = false } re_log_types = { path = "crates/store/re_log_types", version = "=0.20.0-alpha.1", default-features = false } re_query = { path = "crates/store/re_query", version = "=0.20.0-alpha.1", default-features = false } +re_remote_store_types = { path = "crates/build/re_remote_store_types", version = "=0.20.0-alpha.1", default-features = false } re_sdk_comms = { path = "crates/store/re_sdk_comms", version = "=0.20.0-alpha.1", default-features = false } re_types = { path = "crates/store/re_types", version = "=0.20.0-alpha.1", default-features = false } re_types_blueprint = { path = "crates/store/re_types_blueprint", version = "=0.20.0-alpha.1", default-features = false } @@ -103,7 +105,7 @@ re_viewport_blueprint = { path = "crates/viewer/re_viewport_blueprint", version re_web_viewer_server = { path = "crates/viewer/re_web_viewer_server", version = "=0.20.0-alpha.1", default-features = false } # Rerun crates in other repos: -ewebsock = "0.6.0" +ewebsock = "0.7.0" re_math = "0.20.0" # If this package fails to build, install `nasm` locally, or build through `pixi`. @@ -224,6 +226,7 @@ pollster = "0.3" prettyplease = "0.2" proc-macro2 = { version = "1.0", default-features = false } profiling = { version = "1.0.12", default-features = false } +prost = "0.13.3" puffin = "0.19.1" puffin_http = "0.16" pyo3 = "0.22.5" @@ -263,12 +266,14 @@ tinystl = { version = "0.0.3", default-features = false } tinyvec = { version = "1.6", features = ["alloc", "rustc_1_55"] } tobj = "4.0" toml = { version = "0.8.10", default-features = false } +tonic = "0.12.3" +tonic-build = "0.12.3" tracing = { version = "0.1", default-features = false } -tungstenite = { version = "0.20", default-features = false } +tungstenite = { version = "0.23", default-features = false } type-map = "0.5" typenum = "1.15" unindent = "0.2" -ureq = "2.6" +ureq = "2.9.2" url = "2.3" uuid = "1.1" vec1 = "1.8" diff --git a/crates/build/re_remote_store_types_builder/Cargo.toml b/crates/build/re_remote_store_types_builder/Cargo.toml new file mode 100644 index 000000000000..f104efa1caa0 --- /dev/null +++ b/crates/build/re_remote_store_types_builder/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "re_remote_store_types_builder" +authors.workspace = true +edition.workspace = true +homepage.workspace = true +include.workspace = true +license.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +re_log = { workspace = true, features = ["setup"] } + +# External +camino.workspace = true +tonic-build.workspace = true + +[lints] +workspace = true diff --git a/crates/build/re_remote_store_types_builder/README.md b/crates/build/re_remote_store_types_builder/README.md new file mode 100644 index 000000000000..615f8490ad09 --- /dev/null +++ b/crates/build/re_remote_store_types_builder/README.md @@ -0,0 +1,12 @@ +# re_remote_store_types_builder + +Part of the [`rerun`](https://github.com/rerun-io/rerun) family of crates. + +[![Latest version](https://img.shields.io/crates/v/re_remote_store_types_builder.svg)](https://crates.io/crates/re_remote_store_types_builder) +[![Documentation](https://docs.rs/re_remote_store_types_builder/badge.svg)](https://docs.rs/re_remote_store_types_builder) +![MIT](https://img.shields.io/badge/license-MIT-blue.svg) +![Apache](https://img.shields.io/badge/license-Apache-blue.svg) + +This crate implements Rerun's code generation for the remote store node gRPC API definition. + +You can generate the code with `pixi run codegen-rstore`. diff --git a/crates/build/re_remote_store_types_builder/src/bin/build_re_remote_store_types.rs b/crates/build/re_remote_store_types_builder/src/bin/build_re_remote_store_types.rs new file mode 100644 index 000000000000..3f6d884da31b --- /dev/null +++ b/crates/build/re_remote_store_types_builder/src/bin/build_re_remote_store_types.rs @@ -0,0 +1,43 @@ +//! This binary runs the remote store gRPC service codegen manually. +//! +//! It is easiest to call this using `pixi run codegen-rstore`, +//! which will set up the necessary tools. + +#![allow(clippy::unwrap_used)] + +use camino::Utf8Path; + +const PROTOBUF_DEFINITIONS_DIR_PATH: &str = "crates/store/re_remote_store_types/proto"; +const PROTOBUF_REMOTE_STORE_V0_RELATIVE_PATH: &str = "rerun/v0/remote_store.proto"; +const RUST_V0_OUTPUT_DIR_PATH: &str = "crates/store/re_remote_store_types/src/v0"; + +fn main() { + re_log::setup_logging(); + + let workspace_dir = Utf8Path::new(env!("CARGO_MANIFEST_DIR")) + .parent() + .and_then(|p| p.parent()) + .and_then(|p| p.parent()) + .unwrap(); + + // Check for something that only exists in root: + assert!( + workspace_dir.join("CODE_OF_CONDUCT.md").exists(), + "failed to find workspace root" + ); + + let definitions_dir_path = workspace_dir.join(PROTOBUF_DEFINITIONS_DIR_PATH); + let rust_generated_output_dir_path = workspace_dir.join(RUST_V0_OUTPUT_DIR_PATH); + + re_log::info!( + definitions=?definitions_dir_path, + output=?rust_generated_output_dir_path, + "Running codegen for storage node types", + ); + + re_remote_store_types_builder::generate_rust_code( + definitions_dir_path, + &[PROTOBUF_REMOTE_STORE_V0_RELATIVE_PATH], + rust_generated_output_dir_path, + ); +} diff --git a/crates/build/re_remote_store_types_builder/src/lib.rs b/crates/build/re_remote_store_types_builder/src/lib.rs new file mode 100644 index 000000000000..f9ca751381a2 --- /dev/null +++ b/crates/build/re_remote_store_types_builder/src/lib.rs @@ -0,0 +1,26 @@ +//! This crate contains logic for generating remote store gRPC API types as defined in +//! `re_remote_store_types` proto files. We are currently generating both client and server +//! definitions in the same file. +//! + +#![allow(clippy::unwrap_used)] + +use std::path::Path; + +/// Generate rust from from protobuf definitions. We rely on `tonic_build` to do the heavy lifting. +/// `tonic_build` relies on `prost` which itself relies on `protoc`. +/// +/// Note: make sure to invoke this via `pixi run codegen-rstore` in order to use the right `protoc` version. +pub fn generate_rust_code( + definitions_dir: impl AsRef, + proto_paths: &[impl AsRef], + output_dir: impl AsRef, +) { + tonic_build::configure() + .out_dir(output_dir.as_ref()) + .build_client(true) + .build_server(true) + .build_transport(true) + .compile_protos(proto_paths, &[definitions_dir]) + .unwrap(); +} diff --git a/crates/store/re_dataframe/src/lib.rs b/crates/store/re_dataframe/src/lib.rs index 4a3904b9a415..0816e6707803 100644 --- a/crates/store/re_dataframe/src/lib.rs +++ b/crates/store/re_dataframe/src/lib.rs @@ -16,7 +16,9 @@ pub use self::external::re_chunk_store::{ SparseFillStrategy, TimeColumnSelector, ViewContentsSelector, }; #[doc(no_inline)] -pub use self::external::re_log_types::{EntityPathFilter, ResolvedTimeRange, TimeInt, Timeline}; +pub use self::external::re_log_types::{ + EntityPath, EntityPathFilter, ResolvedTimeRange, TimeInt, Timeline, +}; #[doc(no_inline)] pub use self::external::re_query::Caches as QueryCache; diff --git a/crates/store/re_remote_store_types/Cargo.toml b/crates/store/re_remote_store_types/Cargo.toml new file mode 100644 index 000000000000..933745526dfb --- /dev/null +++ b/crates/store/re_remote_store_types/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "re_remote_store_types" +authors.workspace = true +edition.workspace = true +homepage.workspace = true +include.workspace = true +license.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[dependencies] +re_log_types.workspace = true +re_dataframe.workspace = true + +# External +prost.workspace = true +thiserror.workspace = true +tonic.workspace = true + +[lints] +workspace = true diff --git a/crates/store/re_remote_store_types/README.md b/crates/store/re_remote_store_types/README.md new file mode 100644 index 000000000000..418df5c16895 --- /dev/null +++ b/crates/store/re_remote_store_types/README.md @@ -0,0 +1,14 @@ +# re_remote_store_types + +Part of the [`rerun`](https://github.com/rerun-io/rerun) family of crates. + +[![Latest version](https://img.shields.io/crates/v/re_remote_store_types.svg)](https://crates.io/crates/re_remote_store_types) +[![Documentation](https://docs.rs/re_remote_store_types/badge.svg)](https://docs.rs/re_remote_store_types) +![MIT](https://img.shields.io/badge/license-MIT-blue.svg) +![Apache](https://img.shields.io/badge/license-Apache-blue.svg) + +Rerun remote store node gRPC API service types (client and server). + +This crate includes both the language-agnostic definitions (protobuf) as well as the generated code. + +The code is generated with `pixi run codegen-rstore`. diff --git a/crates/store/re_remote_store_types/proto/rerun/v0/common.proto b/crates/store/re_remote_store_types/proto/rerun/v0/common.proto new file mode 100644 index 000000000000..0f5d979cd81c --- /dev/null +++ b/crates/store/re_remote_store_types/proto/rerun/v0/common.proto @@ -0,0 +1,163 @@ +syntax = "proto3"; + +package rerun.remote_store.v0; + +// unique recording identifier. At this point in time it is the same id as the ChunkStore's StoreId +message RecordingId { + string id = 1; +} + +// A recording can have multiple timelines, each is identified by a name, for example `log_tick`, `log_time`, etc. +message Timeline { + string name = 1; +} + +// A time range between start and end time points. Each 64 bit number can represent different time point data +// depending on the timeline it is associated with. Time range is inclusive for both start and end time points. +message TimeRange { + int64 start = 1; + int64 end = 2; +} + +// arrow IPC serialized schema +message Schema { + bytes arrow_schema = 1; +} + +message Query { + // The subset of the database that the query will run on: a set of EntityPath(s) and their + // associated Component(s) + ViewContents view_contents = 1; + + // Whether the view_contents should ignore semantically empty columns + // A semantically empty column is a column that either contains no data at all, or where all + // values are either nulls or empty arrays ([]). + bool include_semantically_empty_columns = 2; + + // Whether the view_contents should ignore columns corresponding to indicator components + // Indicator components are marker components, generally automatically inserted by Rerun, that + // helps keep track of the original context in which a piece of data was logged/sent. + bool include_indicator_columns = 3; + + // Whether the view_contents should ignore columns corresponding to Clear-related components + bool include_tombstone_columns = 4; + + // The index used to filter out _rows_ from the view contents. + // Only rows where at least 1 column contains non-null data at that index will be kept in the + // final dataset. If left unspecified, the results will only contain static data. + IndexColumnSelector filtered_index = 5; + + // The range of index values used to filter out _rows_ from the view contents + // Only rows where at least 1 of the view-contents contains non-null data within that range will be kept in + // the final dataset. + // This has no effect if filtered_index isn't set. + // This has no effect if using_index_values is set. + IndexRange filtered_index_range = 6; + + // The specific index values used to filter out _rows_ from the view contents. + // Only rows where at least 1 column contains non-null data at these specific values will be kept + // in the final dataset. + // This has no effect if filtered_index isn't set. + // This has no effect if using_index_values is set. + IndexValues filtered_index_values = 7; + + // The specific index values used to sample _rows_ from the view contents. + // The final dataset will contain one row per sampled index value, regardless of whether data + // existed for that index value in the view contents. + // The semantics of the query are consistent with all other settings: the results will be + // sorted on the filtered_index, and only contain unique index values. + // + // This has no effect if filtered_index isn't set. + // If set, this overrides both filtered_index_range and filtered_index_values. + IndexValues using_index_values = 8; + + // The component column used to filter out _rows_ from the view contents. + // Only rows where this column contains non-null data be kept in the final dataset. + ComponentColumnSelector filtered_is_not_null = 9; + + /// The specific _columns_ to sample from the final view contents. + /// The order of the samples will be respected in the final result. + /// + /// If unspecified, it means - everything. + ColumnSelection column_selection = 10; + + // Specifies how null values should be filled in the returned dataframe. + SparseFillStrategy sparse_fill_strategy = 11; +} + +message ColumnSelection { + repeated ColumnSelector columns = 1; +} + +message ColumnSelector { + oneof selector_type { + ComponentColumnSelector component_column = 2; + TimeColumnSelector time_column = 3; + } +} + +message IndexColumnSelector { + // TODO(zehiko) we need to add support for other types of index selectors + Timeline timeline = 1; +} + +message IndexRange { + // TODO(zehiko) support for other ranges for other index selectors + TimeRange time_range = 1; +} + +message IndexValues { + // TODO(zehiko) we need to add support for other types of index selectors + repeated TimeInt time_points = 1; +} + +message SampledIndexValues { + repeated TimeInt sample_points = 1; +} + +// A 64-bit number describing either nanoseconds, sequence numbers or fully static data. +message TimeInt { + int64 time = 1; +} + +message ViewContents { + repeated ViewContentsPart contents = 1; +} + +message ViewContentsPart { + EntityPath path = 1; + ComponentsSet components = 2; +} + +message ComponentsSet { + repeated Component components = 1; +} + +// The unique identifier of an entity, e.g. `camera/3/points` +// See for more on entity paths. +message EntityPath { + string path = 1; +} + +// Component describes semantic data that can be used by any number of rerun's archetypes. +message Component { + // component name needs to be a string as user can define their own component + string name = 1; +} + +// Used to telect a time column. +message TimeColumnSelector { + Timeline timeline = 1; +} + +// Used to select a component based on its EntityPath and Component name. +message ComponentColumnSelector { + EntityPath entity_path = 1; + Component component = 2; +} + +// Specifies how null values should be filled in the returned dataframe. +enum SparseFillStrategy { + NONE = 0; + LATEST_AT_GLOBAL = 1; +} diff --git a/crates/store/re_remote_store_types/proto/rerun/v0/remote_store.proto b/crates/store/re_remote_store_types/proto/rerun/v0/remote_store.proto new file mode 100644 index 000000000000..55822e1fbd37 --- /dev/null +++ b/crates/store/re_remote_store_types/proto/rerun/v0/remote_store.proto @@ -0,0 +1,91 @@ +syntax = "proto3"; + +package rerun.remote_store.v0; + +import "rerun/v0/common.proto"; + +service StorageNode { + rpc ListRecordings(ListRecordingsRequest) returns (ListRecordingsResponse) {} + rpc Query(QueryRequest) returns (stream QueryResponse) {} + rpc GetRecordingMetadata(GetRecordingMetadataRequest) returns (GetRecordingMetadataResponse) {} + // TODO(zehiko) - should this be singular recording registration? Currently we can have 1 rrd => many recordings + rpc RegisterRecordings(RegisterRecordingsRequest) returns (RegisterRecordingsResponse) {} +} + +// ---------------- RegisterRecording ------------------ + +message RegisterRecordingsRequest { + string description = 1; + ObjectStorage obj_storage = 2; + // TODO(zehiko) should this be auto-discoverable? + RecordingType typ = 3; +} + +message ObjectStorage { + string bucket_name = 1; + string url = 2; +} + +message RegisterRecordingsResponse { + // Note / TODO(zehiko): this implies we read the record (for example go through entire .rrd file + // chunk by chunk) and extract the metadata. So we might want to 1/ not do this i.e. + // only do it as part of explicit GetMetadata request or 2/ do it if Request has "include_metadata=true" + // or 3/ do it always + repeated RecordingMetadata metadata = 2; +} + +// ---------------- GetRecordingMetadata ----------------- + +message GetRecordingMetadataRequest { + RecordingId recording_id = 1; +} + +message GetRecordingMetadataResponse { + RecordingMetadata metadata = 1; +} + +message RecordingMetadata { + RecordingId id = 1; + Schema schema = 2; + repeated TimeMetadata time_metadata = 3; +} + +message TimeMetadata { + Timeline timeline = 1; + TimeRange time_range = 2; +} + +// ---------------- Query ----------------- + +message QueryRequest { + // unique identifier of the recording + RecordingId recording_id = 1; + // query to execute + Query query = 2; +} + +message QueryResponse { + // single record batch (encoding TBD - TODO). + bytes record_batch = 1; +} + + +// ----------------- ListRecordings ----------------- + +message ListRecordingsRequest {} + +message ListRecordingsResponse { + repeated RecordingInfo recordings = 1; +} + +message RecordingInfo { + RecordingId id = 1; + string description = 2; + string storage_url = 3; + uint64 size_bytes = 4; + RecordingType typ = 5; +} + +enum RecordingType { + RRD = 0; +} diff --git a/crates/store/re_remote_store_types/src/lib.rs b/crates/store/re_remote_store_types/src/lib.rs new file mode 100644 index 000000000000..6edddad23c91 --- /dev/null +++ b/crates/store/re_remote_store_types/src/lib.rs @@ -0,0 +1,393 @@ +//! This crate contains generated types for the remote store gRPC service API. +//! Generation is done using the `re_remote_store_types_builder` crate. +//! +//! We want clear separation between 'internal' types and gRPC types and don't want +//! to use gRPC types in the rerun viewer codebase. That's why we implement all the +//! necessary conversion code (in the form of `From` and `TryFrom` traits) in this crate. +//! + +/// Generated types for the remote store gRPC service API v0. +pub mod v0 { + // Ignoring all warnings for the auto-generated code. + #[allow(clippy::doc_markdown)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[allow(clippy::enum_variant_names)] + #[allow(clippy::unwrap_used)] + #[allow(clippy::wildcard_imports)] + #[allow(clippy::manual_is_variant_and)] + #[path = "../v0/rerun.remote_store.v0.rs"] + mod _v0; + + pub use self::_v0::*; + + // ==== below are all necessary transforms from internal rerun types to protobuf types ===== + + use std::collections::BTreeSet; + + #[derive(Debug, thiserror::Error)] + pub enum TypeConversionError { + #[error("missing required field: {0}")] + MissingField(&'static str), + } + + impl From for TimeRange { + fn from(time_range: re_log_types::ResolvedTimeRange) -> Self { + Self { + start: time_range.min().as_i64(), + end: time_range.max().as_i64(), + } + } + } + + impl TryFrom for re_dataframe::QueryExpression { + type Error = TypeConversionError; + + fn try_from(value: Query) -> Result { + let filtered_index = value + .filtered_index + .ok_or(TypeConversionError::MissingField("filtered_index"))? + .try_into()?; + + let selection = value + .column_selection + .map(|cs| { + cs.columns + .into_iter() + .map(re_dataframe::ColumnSelector::try_from) + .collect::, _>>() + }) + .transpose()?; + + let filtered_is_not_null = value + .filtered_is_not_null + .map(re_dataframe::ComponentColumnSelector::try_from) + .transpose()?; + + Ok(Self { + view_contents: value.view_contents.map(|vc| vc.into()), + include_semantically_empty_columns: value.include_semantically_empty_columns, + include_indicator_columns: value.include_indicator_columns, + include_tombstone_columns: value.include_tombstone_columns, + filtered_index: Some(filtered_index), + filtered_index_range: value + .filtered_index_range + .map(|ir| ir.try_into()) + .transpose()?, + filtered_index_values: value + .filtered_index_values + .map(|iv| iv.time_points.into_iter().map(|v| v.into()).collect()), + using_index_values: value + .using_index_values + .map(|uiv| uiv.time_points.into_iter().map(|v| v.into()).collect()), + filtered_is_not_null, + sparse_fill_strategy: re_dataframe::SparseFillStrategy::default(), // TODO(zehiko) implement support for sparse fill strategy + selection, + }) + } + } + + impl From for re_dataframe::ViewContentsSelector { + fn from(value: ViewContents) -> Self { + value + .contents + .into_iter() + .map(|part| { + #[allow(clippy::unwrap_used)] // TODO(zehiko) + let entity_path = Into::::into(part.path.unwrap()); + let column_selector = part.components.map(|cs| { + cs.components + .into_iter() + .map(|c| re_dataframe::external::re_chunk::ComponentName::new(&c.name)) + .collect::>() + }); + (entity_path, column_selector) + }) + .collect::() + } + } + + impl From for re_log_types::EntityPath { + fn from(value: EntityPath) -> Self { + Self::from(value.path) + } + } + + impl TryFrom for re_log_types::Timeline { + type Error = TypeConversionError; + + fn try_from(value: IndexColumnSelector) -> Result { + let timeline_name = value + .timeline + .ok_or(TypeConversionError::MissingField("timeline"))? + .name; + + // TODO(cmc): QueryExpression::filtered_index gotta be a selector + #[allow(clippy::match_same_arms)] + let timeline = match timeline_name.as_str() { + "log_time" => Self::new_temporal(timeline_name), + "log_tick" => Self::new_sequence(timeline_name), + "frame" => Self::new_sequence(timeline_name), + "frame_nr" => Self::new_sequence(timeline_name), + _ => Self::new_temporal(timeline_name), + }; + + Ok(timeline) + } + } + + impl TryFrom for re_dataframe::IndexRange { + type Error = TypeConversionError; + + fn try_from(value: IndexRange) -> Result { + let time_range = value + .time_range + .ok_or(TypeConversionError::MissingField("time_range"))?; + + Ok(Self::new(time_range.start, time_range.end)) + } + } + + impl From for re_log_types::TimeInt { + fn from(value: TimeInt) -> Self { + Self::new_temporal(value.time) + } + } + + impl TryFrom for re_dataframe::ComponentColumnSelector { + type Error = TypeConversionError; + + fn try_from(value: ComponentColumnSelector) -> Result { + let entity_path = value + .entity_path + .ok_or(TypeConversionError::MissingField("entity_path"))? + .into(); + + let component_name = value + .component + .ok_or(TypeConversionError::MissingField("component"))? + .name; + + Ok(Self { + entity_path, + component_name, + }) + } + } + + impl TryFrom for re_dataframe::TimeColumnSelector { + type Error = TypeConversionError; + + fn try_from(value: TimeColumnSelector) -> Result { + let timeline = value + .timeline + .ok_or(TypeConversionError::MissingField("timeline"))?; + + Ok(Self { + timeline: timeline.name.into(), + }) + } + } + + impl TryFrom for re_dataframe::ColumnSelector { + type Error = TypeConversionError; + + fn try_from(value: ColumnSelector) -> Result { + match value + .selector_type + .ok_or(TypeConversionError::MissingField("selector_type"))? + { + column_selector::SelectorType::ComponentColumn(component_column_selector) => { + let selector: re_dataframe::ComponentColumnSelector = + component_column_selector.try_into()?; + Ok(selector.into()) + } + column_selector::SelectorType::TimeColumn(time_column_selector) => { + let selector: re_dataframe::TimeColumnSelector = + time_column_selector.try_into()?; + + Ok(selector.into()) + } + } + } + } + + // ---- conversion from rerun's QueryExpression into protobuf Query ---- + + impl From for Query { + fn from(value: re_dataframe::QueryExpression) -> Self { + let view_contents = value + .view_contents + .map(|vc| { + vc.into_iter() + .map(|(path, components)| ViewContentsPart { + path: Some(path.into()), + components: components.map(|cs| ComponentsSet { + components: cs + .into_iter() + .map(|c| Component { + name: c.to_string(), + }) + .collect(), + }), + }) + .collect::>() + }) + .map(|cs| ViewContents { contents: cs }); + + Self { + view_contents, + include_semantically_empty_columns: value.include_semantically_empty_columns, + include_indicator_columns: value.include_indicator_columns, + include_tombstone_columns: value.include_tombstone_columns, + filtered_index: value.filtered_index.map(|timeline| IndexColumnSelector { + timeline: Some(Timeline { + name: timeline.name().to_string(), + }), + }), + filtered_index_range: value.filtered_index_range.map(|ir| IndexRange { + time_range: Some(ir.into()), + }), + filtered_index_values: value.filtered_index_values.map(|iv| IndexValues { + time_points: iv + .into_iter() + // TODO(zehiko) is this desired behavior for TimeInt::STATIC? + .map(|v| TimeInt { time: v.as_i64() }) + .collect(), + }), + using_index_values: value.using_index_values.map(|uiv| IndexValues { + time_points: uiv + .into_iter() + .map(|v| TimeInt { time: v.as_i64() }) + .collect(), + }), + filtered_is_not_null: value.filtered_is_not_null.map(|cs| { + ComponentColumnSelector { + entity_path: Some(cs.entity_path.into()), + component: Some(Component { + name: cs.component_name, + }), + } + }), + column_selection: value.selection.map(|cs| ColumnSelection { + columns: cs.into_iter().map(|c| c.into()).collect(), + }), + sparse_fill_strategy: SparseFillStrategy::None.into(), // TODO(zehiko) implement + } + } + } + + impl From for EntityPath { + fn from(value: re_dataframe::EntityPath) -> Self { + Self { + path: value.to_string(), + } + } + } + + impl From for ColumnSelector { + fn from(value: re_dataframe::ColumnSelector) -> Self { + match value { + re_dataframe::ColumnSelector::Component(ccs) => Self { + selector_type: Some(column_selector::SelectorType::ComponentColumn( + ComponentColumnSelector { + entity_path: Some(ccs.entity_path.into()), + component: Some(Component { + name: ccs.component_name, + }), + }, + )), + }, + re_dataframe::ColumnSelector::Time(tcs) => Self { + selector_type: Some(column_selector::SelectorType::TimeColumn( + TimeColumnSelector { + timeline: Some(Timeline { + name: tcs.timeline.to_string(), + }), + }, + )), + }, + } + } + } +} + +#[cfg(test)] +mod tests { + + use crate::v0::{ + column_selector::SelectorType, ColumnSelection, ColumnSelector, Component, + ComponentColumnSelector, ComponentsSet, EntityPath, IndexColumnSelector, IndexRange, + IndexValues, Query, SparseFillStrategy, TimeInt, TimeRange, Timeline, ViewContents, + ViewContentsPart, + }; + + #[test] + pub fn test_query_conversion() { + let grpc_query_before = Query { + view_contents: Some(ViewContents { + contents: vec![ViewContentsPart { + path: Some(EntityPath { + path: "/somepath".to_owned(), + }), + components: Some(ComponentsSet { + components: vec![Component { + name: "component".to_owned(), + }], + }), + }], + }), + include_indicator_columns: false, + include_semantically_empty_columns: true, + include_tombstone_columns: true, + filtered_index: Some(IndexColumnSelector { + timeline: Some(Timeline { + name: "log_time".to_owned(), + }), + }), + filtered_index_range: Some(IndexRange { + time_range: Some(TimeRange { start: 0, end: 100 }), + }), + filtered_index_values: Some(IndexValues { + time_points: vec![ + TimeInt { time: 0 }, + TimeInt { time: 1 }, + TimeInt { time: 2 }, + ], + }), + using_index_values: Some(IndexValues { + time_points: vec![ + TimeInt { time: 3 }, + TimeInt { time: 4 }, + TimeInt { time: 5 }, + ], + }), + filtered_is_not_null: Some(ComponentColumnSelector { + entity_path: Some(EntityPath { + path: "/somepath/c".to_owned(), + }), + component: Some(Component { + name: "component".to_owned(), + }), + }), + column_selection: Some(ColumnSelection { + columns: vec![ColumnSelector { + selector_type: Some(SelectorType::ComponentColumn(ComponentColumnSelector { + entity_path: Some(EntityPath { + path: "/somepath/c".to_owned(), + }), + component: Some(Component { + name: "component".to_owned(), + }), + })), + }], + }), + sparse_fill_strategy: SparseFillStrategy::None.into(), + }; + + let query_expression_native: re_dataframe::QueryExpression = + grpc_query_before.clone().try_into().unwrap(); + let grpc_query_after = query_expression_native.into(); + + assert_eq!(grpc_query_before, grpc_query_after); + } +} diff --git a/crates/store/re_remote_store_types/src/v0/rerun.remote_store.v0.rs b/crates/store/re_remote_store_types/src/v0/rerun.remote_store.v0.rs new file mode 100644 index 000000000000..4b6342b7e00e --- /dev/null +++ b/crates/store/re_remote_store_types/src/v0/rerun.remote_store.v0.rs @@ -0,0 +1,799 @@ +// This file is @generated by prost-build. +/// unique recording identifier. At this point in time it is the same id as the ChunkStore's StoreId +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RecordingId { + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, +} +/// A recording can have multiple timelines, each is identified by a name, for example `log_tick`, `log_time`, etc. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Timeline { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, +} +/// A time range between start and end time points. Each 64 bit number can represent different time point data +/// depending on the timeline it is associated with. Time range is inclusive for both start and end time points. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct TimeRange { + #[prost(int64, tag = "1")] + pub start: i64, + #[prost(int64, tag = "2")] + pub end: i64, +} +/// arrow IPC serialized schema +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Schema { + #[prost(bytes = "vec", tag = "1")] + pub arrow_schema: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Query { + /// The subset of the database that the query will run on: a set of EntityPath(s) and their + /// associated Component(s) + #[prost(message, optional, tag = "1")] + pub view_contents: ::core::option::Option, + /// Whether the view_contents should ignore semantically empty columns + /// A semantically empty column is a column that either contains no data at all, or where all + /// values are either nulls or empty arrays (\[\]). + #[prost(bool, tag = "2")] + pub include_semantically_empty_columns: bool, + /// Whether the view_contents should ignore columns corresponding to indicator components + /// Indicator components are marker components, generally automatically inserted by Rerun, that + /// helps keep track of the original context in which a piece of data was logged/sent. + #[prost(bool, tag = "3")] + pub include_indicator_columns: bool, + /// Whether the view_contents should ignore columns corresponding to Clear-related components + #[prost(bool, tag = "4")] + pub include_tombstone_columns: bool, + /// The index used to filter out _rows_ from the view contents. + /// Only rows where at least 1 column contains non-null data at that index will be kept in the + /// final dataset. If left unspecified, the results will only contain static data. + #[prost(message, optional, tag = "5")] + pub filtered_index: ::core::option::Option, + /// The range of index values used to filter out _rows_ from the view contents + /// Only rows where at least 1 of the view-contents contains non-null data within that range will be kept in + /// the final dataset. + /// This has no effect if filtered_index isn't set. + /// This has no effect if using_index_values is set. + #[prost(message, optional, tag = "6")] + pub filtered_index_range: ::core::option::Option, + /// The specific index values used to filter out _rows_ from the view contents. + /// Only rows where at least 1 column contains non-null data at these specific values will be kept + /// in the final dataset. + /// This has no effect if filtered_index isn't set. + /// This has no effect if using_index_values is set. + #[prost(message, optional, tag = "7")] + pub filtered_index_values: ::core::option::Option, + /// The specific index values used to sample _rows_ from the view contents. + /// The final dataset will contain one row per sampled index value, regardless of whether data + /// existed for that index value in the view contents. + /// The semantics of the query are consistent with all other settings: the results will be + /// sorted on the filtered_index, and only contain unique index values. + /// + /// This has no effect if filtered_index isn't set. + /// If set, this overrides both filtered_index_range and filtered_index_values. + #[prost(message, optional, tag = "8")] + pub using_index_values: ::core::option::Option, + /// The component column used to filter out _rows_ from the view contents. + /// Only rows where this column contains non-null data be kept in the final dataset. + #[prost(message, optional, tag = "9")] + pub filtered_is_not_null: ::core::option::Option, + /// / The specific _columns_ to sample from the final view contents. + /// / The order of the samples will be respected in the final result. + /// / + /// / If unspecified, it means - everything. + #[prost(message, optional, tag = "10")] + pub column_selection: ::core::option::Option, + /// Specifies how null values should be filled in the returned dataframe. + #[prost(enumeration = "SparseFillStrategy", tag = "11")] + pub sparse_fill_strategy: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ColumnSelection { + #[prost(message, repeated, tag = "1")] + pub columns: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ColumnSelector { + #[prost(oneof = "column_selector::SelectorType", tags = "2, 3")] + pub selector_type: ::core::option::Option, +} +/// Nested message and enum types in `ColumnSelector`. +pub mod column_selector { + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum SelectorType { + #[prost(message, tag = "2")] + ComponentColumn(super::ComponentColumnSelector), + #[prost(message, tag = "3")] + TimeColumn(super::TimeColumnSelector), + } +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct IndexColumnSelector { + /// TODO(zehiko) we need to add support for other types of index selectors + #[prost(message, optional, tag = "1")] + pub timeline: ::core::option::Option, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct IndexRange { + /// TODO(zehiko) support for other ranges for other index selectors + #[prost(message, optional, tag = "1")] + pub time_range: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct IndexValues { + /// TODO(zehiko) we need to add support for other types of index selectors + #[prost(message, repeated, tag = "1")] + pub time_points: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SampledIndexValues { + #[prost(message, repeated, tag = "1")] + pub sample_points: ::prost::alloc::vec::Vec, +} +/// A 64-bit number describing either nanoseconds, sequence numbers or fully static data. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct TimeInt { + #[prost(int64, tag = "1")] + pub time: i64, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ViewContents { + #[prost(message, repeated, tag = "1")] + pub contents: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ViewContentsPart { + #[prost(message, optional, tag = "1")] + pub path: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub components: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ComponentsSet { + #[prost(message, repeated, tag = "1")] + pub components: ::prost::alloc::vec::Vec, +} +/// The unique identifier of an entity, e.g. `camera/3/points` +/// See <> for more on entity paths. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct EntityPath { + #[prost(string, tag = "1")] + pub path: ::prost::alloc::string::String, +} +/// Component describes semantic data that can be used by any number of rerun's archetypes. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Component { + /// component name needs to be a string as user can define their own component + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, +} +/// Used to telect a time column. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TimeColumnSelector { + #[prost(message, optional, tag = "1")] + pub timeline: ::core::option::Option, +} +/// Used to select a component based on its EntityPath and Component name. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ComponentColumnSelector { + #[prost(message, optional, tag = "1")] + pub entity_path: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub component: ::core::option::Option, +} +/// Specifies how null values should be filled in the returned dataframe. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SparseFillStrategy { + None = 0, + LatestAtGlobal = 1, +} +impl SparseFillStrategy { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::None => "NONE", + Self::LatestAtGlobal => "LATEST_AT_GLOBAL", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NONE" => Some(Self::None), + "LATEST_AT_GLOBAL" => Some(Self::LatestAtGlobal), + _ => None, + } + } +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RegisterRecordingsRequest { + #[prost(string, tag = "1")] + pub description: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub obj_storage: ::core::option::Option, + /// TODO(zehiko) should this be auto-discoverable? + #[prost(enumeration = "RecordingType", tag = "3")] + pub typ: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ObjectStorage { + #[prost(string, tag = "1")] + pub bucket_name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub url: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RegisterRecordingsResponse { + /// Note / TODO(zehiko): this implies we read the record (for example go through entire .rrd file + /// chunk by chunk) and extract the metadata. So we might want to 1/ not do this i.e. + /// only do it as part of explicit GetMetadata request or 2/ do it if Request has "include_metadata=true" + /// or 3/ do it always + #[prost(message, repeated, tag = "2")] + pub metadata: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetRecordingMetadataRequest { + #[prost(message, optional, tag = "1")] + pub recording_id: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GetRecordingMetadataResponse { + #[prost(message, optional, tag = "1")] + pub metadata: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RecordingMetadata { + #[prost(message, optional, tag = "1")] + pub id: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub schema: ::core::option::Option, + #[prost(message, repeated, tag = "3")] + pub time_metadata: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TimeMetadata { + #[prost(message, optional, tag = "1")] + pub timeline: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub time_range: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryRequest { + /// unique identifier of the recording + #[prost(message, optional, tag = "1")] + pub recording_id: ::core::option::Option, + /// query to execute + #[prost(message, optional, tag = "2")] + pub query: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryResponse { + /// single record batch (encoding TBD - TODO). + #[prost(bytes = "vec", tag = "1")] + pub record_batch: ::prost::alloc::vec::Vec, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct ListRecordingsRequest {} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListRecordingsResponse { + #[prost(message, repeated, tag = "1")] + pub recordings: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RecordingInfo { + #[prost(message, optional, tag = "1")] + pub id: ::core::option::Option, + #[prost(string, tag = "2")] + pub description: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub storage_url: ::prost::alloc::string::String, + #[prost(uint64, tag = "4")] + pub size_bytes: u64, + #[prost(enumeration = "RecordingType", tag = "5")] + pub typ: i32, +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum RecordingType { + Rrd = 0, +} +impl RecordingType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Rrd => "RRD", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "RRD" => Some(Self::Rrd), + _ => None, + } + } +} +/// Generated client implementations. +pub mod storage_node_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value + )] + use tonic::codegen::http::Uri; + use tonic::codegen::*; + #[derive(Debug, Clone)] + pub struct StorageNodeClient { + inner: tonic::client::Grpc, + } + impl StorageNodeClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl StorageNodeClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> StorageNodeClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + >>::Error: + Into + std::marker::Send + std::marker::Sync, + { + StorageNodeClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + pub async fn list_recordings( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/rerun.remote_store.v0.StorageNode/ListRecordings", + ); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new( + "rerun.remote_store.v0.StorageNode", + "ListRecordings", + )); + self.inner.unary(req, path, codec).await + } + pub async fn query( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = + http::uri::PathAndQuery::from_static("/rerun.remote_store.v0.StorageNode/Query"); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new( + "rerun.remote_store.v0.StorageNode", + "Query", + )); + self.inner.server_streaming(req, path, codec).await + } + pub async fn get_recording_metadata( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/rerun.remote_store.v0.StorageNode/GetRecordingMetadata", + ); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new( + "rerun.remote_store.v0.StorageNode", + "GetRecordingMetadata", + )); + self.inner.unary(req, path, codec).await + } + /// TODO(zehiko) - should this be singular recording registration? Currently we can have 1 rrd => many recordings + pub async fn register_recordings( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result, tonic::Status> + { + self.inner.ready().await.map_err(|e| { + tonic::Status::unknown(format!("Service was not ready: {}", e.into())) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/rerun.remote_store.v0.StorageNode/RegisterRecordings", + ); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new( + "rerun.remote_store.v0.StorageNode", + "RegisterRecordings", + )); + self.inner.unary(req, path, codec).await + } + } +} +/// Generated server implementations. +pub mod storage_node_server { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value + )] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with StorageNodeServer. + #[async_trait] + pub trait StorageNode: std::marker::Send + std::marker::Sync + 'static { + async fn list_recordings( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + /// Server streaming response type for the Query method. + type QueryStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + std::marker::Send + + 'static; + async fn query( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + async fn get_recording_metadata( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + /// TODO(zehiko) - should this be singular recording registration? Currently we can have 1 rrd => many recordings + async fn register_recordings( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + } + #[derive(Debug)] + pub struct StorageNodeServer { + inner: Arc, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + impl StorageNodeServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor(inner: T, interceptor: F) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for StorageNodeServer + where + T: StorageNode, + B: Body + std::marker::Send + 'static, + B::Error: Into + std::marker::Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + match req.uri().path() { + "/rerun.remote_store.v0.StorageNode/ListRecordings" => { + #[allow(non_camel_case_types)] + struct ListRecordingsSvc(pub Arc); + impl tonic::server::UnaryService + for ListRecordingsSvc + { + type Response = super::ListRecordingsResponse; + type Future = BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::list_recordings(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = ListRecordingsSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/rerun.remote_store.v0.StorageNode/Query" => { + #[allow(non_camel_case_types)] + struct QuerySvc(pub Arc); + impl tonic::server::ServerStreamingService for QuerySvc { + type Response = super::QueryResponse; + type ResponseStream = T::QueryStream; + type Future = + BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = + async move { ::query(&inner, request).await }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = QuerySvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/rerun.remote_store.v0.StorageNode/GetRecordingMetadata" => { + #[allow(non_camel_case_types)] + struct GetRecordingMetadataSvc(pub Arc); + impl + tonic::server::UnaryService + for GetRecordingMetadataSvc + { + type Response = super::GetRecordingMetadataResponse; + type Future = BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::get_recording_metadata(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = GetRecordingMetadataSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/rerun.remote_store.v0.StorageNode/RegisterRecordings" => { + #[allow(non_camel_case_types)] + struct RegisterRecordingsSvc(pub Arc); + impl + tonic::server::UnaryService + for RegisterRecordingsSvc + { + type Response = super::RegisterRecordingsResponse; + type Future = BoxFuture, tonic::Status>; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::register_recordings(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let method = RegisterRecordingsSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => Box::pin(async move { + let mut response = http::Response::new(empty_body()); + let headers = response.headers_mut(); + headers.insert( + tonic::Status::GRPC_STATUS, + (tonic::Code::Unimplemented as i32).into(), + ); + headers.insert( + http::header::CONTENT_TYPE, + tonic::metadata::GRPC_CONTENT_TYPE, + ); + Ok(response) + }), + } + } + } + impl Clone for StorageNodeServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + /// Generated gRPC service name + pub const SERVICE_NAME: &str = "rerun.remote_store.v0.StorageNode"; + impl tonic::server::NamedService for StorageNodeServer { + const NAME: &'static str = SERVICE_NAME; + } +} diff --git a/crates/store/re_ws_comms/src/client.rs b/crates/store/re_ws_comms/src/client.rs index 7d793fc9648c..6fa617f90023 100644 --- a/crates/store/re_ws_comms/src/client.rs +++ b/crates/store/re_ws_comms/src/client.rs @@ -16,6 +16,7 @@ pub fn viewer_to_server( // We set a very high limit, because we should be able to trust the server. // See https://github.com/rerun-io/rerun/issues/5268 for more max_incoming_frame_size: 2 * gigs, + ..ewebsock::Options::default() }; ewebsock::ws_receive( diff --git a/crates/utils/re_log/src/lib.rs b/crates/utils/re_log/src/lib.rs index 195a984e5a3f..7befc2d8515c 100644 --- a/crates/utils/re_log/src/lib.rs +++ b/crates/utils/re_log/src/lib.rs @@ -72,6 +72,7 @@ const CRATES_AT_INFO_LEVEL: &[&str] = &[ // These are quite spammy on debug, drowning out what we care about: "h2", "hyper", + "prost_build", "ureq", // only let rustls log in debug mode: https://github.com/rerun-io/rerun/issues/3104 #[cfg(debug_assertions)] diff --git a/deny.toml b/deny.toml index d99c623c3e5d..467180881539 100644 --- a/deny.toml +++ b/deny.toml @@ -60,7 +60,6 @@ skip = [ { name = "raw-window-handle" }, # Pretty small crate; some crates still on old version { name = "redox_syscall" }, # Plenty of versions in the wild { name = "spin" }, # Old version used by rusttls - { name = "webpki-roots" }, # ureq and tungstenite are on different version 😭 ] skip-tree = [ { name = "async-io" }, # Old version via rfd @@ -71,6 +70,7 @@ skip-tree = [ { name = "prost-derive" }, # only used in objectron example { name = "toml_edit" }, # Old version via egui-winit, newer used by rustdoc-json { name = "windows" }, # Old version used by accesskit_windows, newer version used by wgpu + { name = "tower" }, # tonic depends on 0.4.3, but also transitively (axum) on 0.5.1 ] diff --git a/pixi.toml b/pixi.toml index 02779023fee6..8993e4633740 100644 --- a/pixi.toml +++ b/pixi.toml @@ -117,6 +117,9 @@ examples-pypi = ["examples-common", "python-pypi"] # Run the codegen. Optionally pass `--profile` argument if you want. codegen = "cargo --quiet run --package re_types_builder -- " +# Run the codegen for remote store types. +codegen-rstore = "cargo --quiet run --package re_remote_store_types_builder && pixi run -e cpp format" + # Generate the Rerun CLI manual. # NOTE:must be --all-features, otherwise we might miss some optional commands. diff --git a/scripts/lint.py b/scripts/lint.py index ece47a151c5f..504b015a6c5e 100755 --- a/scripts/lint.py +++ b/scripts/lint.py @@ -1199,6 +1199,7 @@ def main() -> None: "./.pytest_cache", "./CODE_STYLE.md", "./crates/build/re_types_builder/src/reflection.rs", # auto-generated + "./crates/store/re_remote_store_types/src/v0/rerun.remote_store.v0.rs", # auto-generated "./docs/content/reference/cli.md", # auto-generated "./examples/assets", "./examples/python/detect_and_track_objects/cache/version.txt",