From b84d1f9ed3e11ef206fc6c5f12028496836e2522 Mon Sep 17 00:00:00 2001 From: George Mulhearn <57472912+gmulhearn@users.noreply.github.com> Date: Wed, 4 Dec 2024 08:50:28 +1000 Subject: [PATCH 1/2] bump version to 0.66 (#1307) Signed-off-by: George Mulhearn Co-authored-by: George Mulhearn --- Cargo.lock | 20 +++++++++---------- Cargo.toml | 2 +- .../src/controllers/general.rs | 2 +- aries/aries_vcx/README.md | 2 +- did_core/did_methods/did_peer/README.md | 2 +- did_core/did_parser_nom/README.md | 2 +- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fa3db8a79d..da0ea55b61 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -487,7 +487,7 @@ dependencies = [ [[package]] name = "aries-vcx-agent" -version = "0.65.0" +version = "0.66.0" dependencies = [ "anoncreds_types", "aries_vcx", @@ -532,7 +532,7 @@ dependencies = [ [[package]] name = "aries_vcx" -version = "0.65.0" +version = "0.66.0" dependencies = [ "anoncreds_types", "aries_vcx_anoncreds", @@ -580,7 +580,7 @@ dependencies = [ [[package]] name = "aries_vcx_anoncreds" -version = "0.65.0" +version = "0.66.0" dependencies = [ "anoncreds", "anoncreds_types", @@ -598,7 +598,7 @@ dependencies = [ [[package]] name = "aries_vcx_ledger" -version = "0.65.0" +version = "0.66.0" dependencies = [ "anoncreds_types", "aries_vcx_wallet", @@ -619,7 +619,7 @@ dependencies = [ [[package]] name = "aries_vcx_wallet" -version = "0.65.0" +version = "0.66.0" dependencies = [ "anyhow", "aries-askar", @@ -1864,7 +1864,7 @@ dependencies = [ [[package]] name = "diddoc_legacy" -version = "0.65.0" +version = "0.66.0" dependencies = [ "display_as_json", "serde", @@ -3214,7 +3214,7 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "messages" -version = "0.65.0" +version = "0.66.0" dependencies = [ "chrono", "derive_more", @@ -4454,7 +4454,7 @@ dependencies = [ [[package]] name = "shared" -version = "0.65.0" +version = "0.66.0" dependencies = [ "bs58", "lazy_static", @@ -4981,7 +4981,7 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "test_utils" -version = "0.65.0" +version = "0.66.0" dependencies = [ "anoncreds_types", "aries_vcx_anoncreds", @@ -5366,7 +5366,7 @@ dependencies = [ [[package]] name = "uniffi_aries_vcx" -version = "0.65.0" +version = "0.66.0" dependencies = [ "android_logger", "aries_vcx", diff --git a/Cargo.toml b/Cargo.toml index 1ff032934e..61286e8ce8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ members = [ ] [workspace.package] -version = "0.65.0" +version = "0.66.0" authors = [ "Absa Group Limited", "Hyperledger Indy Contributors ", diff --git a/aries/agents/aath-backchannel/src/controllers/general.rs b/aries/agents/aath-backchannel/src/controllers/general.rs index 9bc394d42b..d58d6fc740 100644 --- a/aries/agents/aath-backchannel/src/controllers/general.rs +++ b/aries/agents/aath-backchannel/src/controllers/general.rs @@ -28,7 +28,7 @@ pub async fn get_status(agent: web::Data>) -> impl Responde #[get("/version")] pub async fn get_version() -> impl Responder { // Update this with aries-vcx - HttpResponse::Ok().body("0.65.0") + HttpResponse::Ok().body("0.66.0") } #[get("/did")] diff --git a/aries/aries_vcx/README.md b/aries/aries_vcx/README.md index cb9df708d1..4cfeeb6291 100644 --- a/aries/aries_vcx/README.md +++ b/aries/aries_vcx/README.md @@ -23,7 +23,7 @@ To use `aries_vcx` in your project, you need to add GitHub dependency to your `C define a version through a `tag`: ```toml -aries-vcx = { tag = "0.65.0", git = "https://github.com/hyperledger/aries-vcx" } +aries-vcx = { tag = "0.66.0", git = "https://github.com/hyperledger/aries-vcx" } ``` It's also advisable to follow these [instructions](TUTORIAL.md) to check your environment is properly configured. diff --git a/did_core/did_methods/did_peer/README.md b/did_core/did_methods/did_peer/README.md index a398c7767a..3ad1efa587 100644 --- a/did_core/did_methods/did_peer/README.md +++ b/did_core/did_methods/did_peer/README.md @@ -23,7 +23,7 @@ Add the Peer DID library as a dependency in your `Cargo.toml` file: ```toml [dependencies] -peer_did = { tag = "0.65.0", git = "https://github.com/hyperledger/aries-vcx" } +peer_did = { tag = "0.66.0", git = "https://github.com/hyperledger/aries-vcx" } ``` ## Demo diff --git a/did_core/did_parser_nom/README.md b/did_core/did_parser_nom/README.md index c45e476337..ad2bd20e38 100644 --- a/did_core/did_parser_nom/README.md +++ b/did_core/did_parser_nom/README.md @@ -17,5 +17,5 @@ Add the did_parser library as a dependency in your `Cargo.toml` file: ```toml [dependencies] -did_parser_nom = { tag = "0.65.0", git = "https://github.com/hyperledger/aries-vcx" } +did_parser_nom = { tag = "0.66.0", git = "https://github.com/hyperledger/aries-vcx" } ``` From 0e3bed00782ef6983dae11853d073253295f2bb0 Mon Sep 17 00:00:00 2001 From: George Mulhearn <57472912+gmulhearn@users.noreply.github.com> Date: Tue, 10 Dec 2024 08:51:11 +1000 Subject: [PATCH 2/2] (feat) Cheqd DID resolver #1300 (#1305) * try permissions in CI Signed-off-by: George Mulhearn * new crate Signed-off-by: George Mulhearn * resolution working Signed-off-by: George Mulhearn * check in the proto types and have a standalone generator helper (rather than generate at build time) Signed-off-by: George Mulhearn * regen lock Signed-off-by: George Mulhearn * cheqd did parser Signed-off-by: George Mulhearn * cheqd did url tests Signed-off-by: George Mulhearn * re-gen lock Signed-off-by: George Mulhearn * resolver system tests are working Signed-off-by: George Mulhearn * run int tests in CI Signed-off-by: George Mulhearn * clippy Signed-off-by: George Mulhearn * resolution with contexts Signed-off-by: George Mulhearn * finish some TODOs, and add doc metadata Signed-off-by: George Mulhearn * update readme Signed-off-by: George Mulhearn * try bumping rust ver on vdrproxy Signed-off-by: George Mulhearn * lock auto update Signed-off-by: George Mulhearn * readme updates Signed-off-by: George Mulhearn * remove some debugs Signed-off-by: George Mulhearn * more error info Signed-off-by: George Mulhearn * updated hyper in did:web. fixes clients Signed-off-by: George Mulhearn * fix up tls. works on android Signed-off-by: George Mulhearn * some cleaning Signed-off-by: George Mulhearn --------- Signed-off-by: George Mulhearn Co-authored-by: George Mulhearn --- Cargo.lock | 250 +++- Cargo.toml | 2 + README.md | 1 + .../src/msg_types/protocols/did_exchange.rs | 2 - did_core/did_doc/src/schema/mod.rs | 16 + did_core/did_doc/src/schema/utils/mod.rs | 9 + .../verification_method_type.rs | 38 +- did_core/did_methods/did_cheqd/Cargo.toml | 39 + did_core/did_methods/did_cheqd/README.md | 14 + .../did_cheqd/cheqd_proto_gen/Cargo.toml | 11 + .../proto/cheqd/did/v2/diddoc.proto | 138 ++ .../proto/cheqd/did/v2/query.proto | 87 ++ .../proto/cheqd/resource/v2/query.proto | 79 + .../proto/cheqd/resource/v2/resource.proto | 89 ++ .../base/query/v1beta1/pagination.proto | 48 + .../proto/gogoproto/gogo.proto | 144 ++ .../proto/google/api/annotations.proto | 31 + .../proto/google/api/http.proto | 371 +++++ .../proto/google/protobuf/descriptor.proto | 1307 +++++++++++++++++ .../proto/google/protobuf/timestamp.proto | 144 ++ .../did_cheqd/cheqd_proto_gen/src/main.rs | 16 + .../did_methods/did_cheqd/src/error/mod.rs | 29 + .../did_cheqd/src/error/parsing.rs | 56 + did_core/did_methods/did_cheqd/src/lib.rs | 3 + .../did_cheqd/src/proto/cheqd.did.v2.rs | 387 +++++ .../did_cheqd/src/proto/cheqd.resource.v2.rs | 344 +++++ .../src/proto/cosmos.base.query.v1beta1.rs | 55 + .../did_cheqd/src/proto/google.api.rs | 360 +++++ .../did_methods/did_cheqd/src/proto/mod.rs | 24 + .../did_cheqd/src/resolution/mod.rs | 2 + .../did_cheqd/src/resolution/resolver.rs | 241 +++ .../did_cheqd/src/resolution/transformer.rs | 212 +++ .../did_methods/did_cheqd/tests/resolution.rs | 92 ++ .../src/peer_did/numalgos/numalgo2/mod.rs | 1 - .../did_methods/did_resolver_web/Cargo.toml | 14 +- .../did_resolver_web/src/error/mod.rs | 6 +- .../src/resolution/resolver.rs | 22 +- .../did_resolver_web/tests/resolution.rs | 57 +- .../src/did/parsing/did_cheqd.rs | 68 + .../did_parser_nom/src/did/parsing/mod.rs | 4 + did_core/did_parser_nom/tests/did/negative.rs | 28 +- did_core/did_parser_nom/tests/did/positive.rs | 10 + .../did_parser_nom/tests/did_url/positive.rs | 23 + justfile | 2 +- 44 files changed, 4815 insertions(+), 61 deletions(-) create mode 100644 did_core/did_methods/did_cheqd/Cargo.toml create mode 100644 did_core/did_methods/did_cheqd/README.md create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/Cargo.toml create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/did/v2/diddoc.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/did/v2/query.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/resource/v2/query.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/resource/v2/resource.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cosmos/base/query/v1beta1/pagination.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/gogoproto/gogo.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/api/annotations.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/api/http.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/protobuf/descriptor.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/protobuf/timestamp.proto create mode 100644 did_core/did_methods/did_cheqd/cheqd_proto_gen/src/main.rs create mode 100644 did_core/did_methods/did_cheqd/src/error/mod.rs create mode 100644 did_core/did_methods/did_cheqd/src/error/parsing.rs create mode 100644 did_core/did_methods/did_cheqd/src/lib.rs create mode 100644 did_core/did_methods/did_cheqd/src/proto/cheqd.did.v2.rs create mode 100644 did_core/did_methods/did_cheqd/src/proto/cheqd.resource.v2.rs create mode 100644 did_core/did_methods/did_cheqd/src/proto/cosmos.base.query.v1beta1.rs create mode 100644 did_core/did_methods/did_cheqd/src/proto/google.api.rs create mode 100644 did_core/did_methods/did_cheqd/src/proto/mod.rs create mode 100644 did_core/did_methods/did_cheqd/src/resolution/mod.rs create mode 100644 did_core/did_methods/did_cheqd/src/resolution/resolver.rs create mode 100644 did_core/did_methods/did_cheqd/src/resolution/transformer.rs create mode 100644 did_core/did_methods/did_cheqd/tests/resolution.rs create mode 100644 did_core/did_parser_nom/src/did/parsing/did_cheqd.rs diff --git a/Cargo.lock b/Cargo.lock index da0ea55b61..33bfd98d4d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -853,7 +853,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", "itoa", "matchit", @@ -868,7 +868,7 @@ dependencies = [ "serde_urlencoded", "sync_wrapper 1.0.1", "tokio", - "tower", + "tower 0.5.1", "tower-layer", "tower-service", "tracing", @@ -1087,9 +1087,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" [[package]] name = "bytestring" @@ -1182,6 +1182,13 @@ dependencies = [ "zeroize", ] +[[package]] +name = "cheqd_proto_gen" +version = "0.1.0" +dependencies = [ + "tonic-build", +] + [[package]] name = "chrono" version = "0.4.38" @@ -1712,6 +1719,28 @@ dependencies = [ "syn 2.0.79", ] +[[package]] +name = "did_cheqd" +version = "0.1.0" +dependencies = [ + "async-trait", + "bytes", + "chrono", + "did_resolver", + "http-body-util", + "hyper-tls", + "hyper-util", + "native-tls", + "prost", + "prost-types", + "serde", + "serde_json", + "thiserror", + "tokio", + "tonic", + "url", +] + [[package]] name = "did_doc" version = "0.1.0" @@ -1854,8 +1883,10 @@ version = "0.1.0" dependencies = [ "async-trait", "did_resolver", - "hyper 0.14.30", - "hyper-tls 0.5.0", + "http-body-util", + "hyper 1.5.1", + "hyper-tls", + "hyper-util", "serde_json", "thiserror", "tokio", @@ -2194,6 +2225,12 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "flate2" version = "1.0.34" @@ -2698,9 +2735,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.30" +version = "0.14.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" dependencies = [ "bytes", "futures-channel", @@ -2722,9 +2759,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", @@ -2749,7 +2786,7 @@ checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", "http 1.1.0", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", "rustls 0.23.13", "rustls-pki-types", @@ -2759,16 +2796,16 @@ dependencies = [ ] [[package]] -name = "hyper-tls" -version = "0.5.0" +name = "hyper-timeout" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "bytes", - "hyper 0.14.30", - "native-tls", + "hyper 1.5.1", + "hyper-util", + "pin-project-lite", "tokio", - "tokio-native-tls", + "tower-service", ] [[package]] @@ -2779,7 +2816,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", "native-tls", "tokio", @@ -2789,16 +2826,16 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", "futures-util", "http 1.1.0", "http-body 1.0.1", - "hyper 1.4.1", + "hyper 1.5.1", "pin-project-lite", "socket2", "tokio", @@ -3350,6 +3387,12 @@ dependencies = [ "data-encoding-macro", ] +[[package]] +name = "multimap" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" + [[package]] name = "native-tls" version = "0.2.12" @@ -3690,6 +3733,16 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap 2.6.0", +] + [[package]] name = "phf" version = "0.11.2" @@ -3708,6 +3761,26 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pin-project" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.79", +] + [[package]] name = "pin-project-lite" version = "0.2.14" @@ -3827,6 +3900,16 @@ dependencies = [ "yansi", ] +[[package]] +name = "prettyplease" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" +dependencies = [ + "proc-macro2", + "syn 2.0.79", +] + [[package]] name = "primeorder" version = "0.13.6" @@ -3869,6 +3952,59 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "prost" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0487d90e047de87f984913713b85c601c05609aad5b0df4b4573fbf69aa13f" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c1318b19085f08681016926435853bbf7858f9c082d0999b80550ff5d9abe15" +dependencies = [ + "bytes", + "heck 0.5.0", + "itertools", + "log", + "multimap", + "once_cell", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 2.0.79", + "tempfile", +] + +[[package]] +name = "prost-derive" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9552f850d5f0964a4e4d0bf306459ac29323ddfbae05e35a7c0d35cb0803cc5" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 2.0.79", +] + +[[package]] +name = "prost-types" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4759aa0d3a6232fb8dbdb97b61de2c20047c68aca932c7ed76da9d788508d670" +dependencies = [ + "prost", +] + [[package]] name = "public_key" version = "0.1.0" @@ -3995,7 +4131,7 @@ dependencies = [ "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper 0.14.31", "ipnet", "js-sys", "log", @@ -4033,9 +4169,9 @@ dependencies = [ "http 1.1.0", "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-rustls", - "hyper-tls 0.6.0", + "hyper-tls", "hyper-util", "ipnet", "js-sys", @@ -4201,9 +4337,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" [[package]] name = "rustls-webpki" @@ -5174,6 +5310,66 @@ dependencies = [ "serde", ] +[[package]] +name = "tonic" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +dependencies = [ + "async-trait", + "base64 0.22.1", + "bytes", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.1", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic-build" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" +dependencies = [ + "prettyplease", + "proc-macro2", + "prost-build", + "prost-types", + "quote", + "syn 2.0.79", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 61286e8ce8..5a604c6592 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,8 @@ members = [ "misc/simple_message_relay", "misc/display_as_json", "did_core/did_methods/did_jwk", + "did_core/did_methods/did_cheqd", + "did_core/did_methods/did_cheqd/cheqd_proto_gen", ] [workspace.package] diff --git a/README.md b/README.md index 364931b934..bf22237b99 100644 --- a/README.md +++ b/README.md @@ -23,6 +23,7 @@ The repository contains Rust crates to build - [`did_parser`](did_core/did_parser_nom) - Building and parsing [DIDs](https://w3c.github.io/did-core/) - [`did_peer`](did_core/did_methods/did_peer) - https://identity.foundation/peer-did-method-spec/ - [`did_sov`](did_core/did_methods/did_resolver_sov) - https://sovrin-foundation.github.io/sovrin/spec/did-method-spec-template.html + - [`did_cheqd`](did_core/did_methods/did_cheqd) - https://docs.cheqd.io/product/architecture/adr-list/adr-001-cheqd-did-method - [`did_web`](did_core/did_methods/did_resolver_web) - https://w3c-ccg.github.io/did-method-web/ - [`did_key`](did_core/did_methods/did_key) - https://w3c-ccg.github.io/did-method-key/ - [`did_jwk`](did_core/did_methods/did_jwk) - https://github.com/quartzjer/did-jwk/blob/main/spec.md diff --git a/aries/messages/src/msg_types/protocols/did_exchange.rs b/aries/messages/src/msg_types/protocols/did_exchange.rs index ff9eb294e1..8f5d522ef2 100644 --- a/aries/messages/src/msg_types/protocols/did_exchange.rs +++ b/aries/messages/src/msg_types/protocols/did_exchange.rs @@ -63,8 +63,6 @@ mod tests { #[test] fn test_protocol_didexchange_v1_1() { - let x = Protocol::from(DidExchangeTypeV1::new_v1_1()); - dbg!(x); test_utils::test_serde( Protocol::from(DidExchangeTypeV1::new_v1_1()), json!("https://didcomm.org/didexchange/1.1"), diff --git a/did_core/did_doc/src/schema/mod.rs b/did_core/did_doc/src/schema/mod.rs index bca3088b80..9ea5bd7174 100644 --- a/did_core/did_doc/src/schema/mod.rs +++ b/did_core/did_doc/src/schema/mod.rs @@ -3,3 +3,19 @@ pub mod service; pub mod types; pub mod utils; pub mod verification_method; + +/// Module of commonly used DID-related JSON-LD contexts +pub mod contexts { + pub const W3C_DID_V1: &str = "https://www.w3.org/ns/did/v1"; + pub const W3C_SUITE_ED25519_2020: &str = "https://w3id.org/security/suites/ed25519-2020/v1"; + pub const W3C_SUITE_ED25519_2018: &str = "https://w3id.org/security/suites/ed25519-2018/v1"; + pub const W3C_SUITE_JWS_2020: &str = "https://w3id.org/security/suites/jws-2020/v1"; + pub const W3C_SUITE_SECP256K1_2019: &str = "https://w3id.org/security/suites/secp256k1-2019/v1"; + pub const W3C_BBS_V1: &str = "https://w3id.org/security/bbs/v1"; + pub const W3C_PGP_V1: &str = "https://w3id.org/pgp/v1"; + pub const W3C_SUITE_X25519_2019: &str = "https://w3id.org/security/suites/x25519-2019/v1"; + pub const W3C_SUITE_X25519_2020: &str = "https://w3id.org/security/suites/x25519-2020/v1"; + pub const W3C_SUITE_SECP259K1_RECOVERY_2020: &str = + "https://w3id.org/security/suites/secp256k1recovery-2020/v2"; + pub const W3C_MULTIKEY_V1: &str = "https://w3id.org/security/multikey/v1"; +} diff --git a/did_core/did_doc/src/schema/utils/mod.rs b/did_core/did_doc/src/schema/utils/mod.rs index 6149b43584..8df0309e83 100644 --- a/did_core/did_doc/src/schema/utils/mod.rs +++ b/did_core/did_doc/src/schema/utils/mod.rs @@ -19,6 +19,15 @@ pub enum OneOrList { List(Vec), } +impl From> for OneOrList { + fn from(mut value: Vec) -> Self { + match value.len() { + 1 => OneOrList::One(value.remove(0)), + _ => OneOrList::List(value), + } + } +} + impl OneOrList { pub fn first(&self) -> Option { match self { diff --git a/did_core/did_doc/src/schema/verification_method/verification_method_type.rs b/did_core/did_doc/src/schema/verification_method/verification_method_type.rs index f8e715f5f0..4e30b75835 100644 --- a/did_core/did_doc/src/schema/verification_method/verification_method_type.rs +++ b/did_core/did_doc/src/schema/verification_method/verification_method_type.rs @@ -3,25 +3,58 @@ use std::fmt::Display; use public_key::KeyType; use serde::{Deserialize, Serialize}; -use crate::error::DidDocumentBuilderError; +use crate::{error::DidDocumentBuilderError, schema::contexts}; #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq)] pub enum VerificationMethodType { + /// https://w3id.org/security/suites/jws-2020/v1 JsonWebKey2020, + /// https://w3id.org/security/suites/secp256k1-2019/v1 EcdsaSecp256k1VerificationKey2019, + /// https://w3id.org/security/suites/ed25519-2018/v1 Ed25519VerificationKey2018, + /// https://w3id.org/security/suites/ed25519-2020/v1 Ed25519VerificationKey2020, + /// https://w3id.org/security/bbs/v1 Bls12381G1Key2020, + /// https://w3id.org/security/bbs/v1 Bls12381G2Key2020, + /// https://w3id.org/pgp/v1 PgpVerificationKey2021, - RsaVerificationKey2018, + /// https://w3id.org/security/suites/x25519-2019/v1 X25519KeyAgreementKey2019, + /// https://w3id.org/security/suites/x25519-2020/v1 X25519KeyAgreementKey2020, + /// https://identity.foundation/EcdsaSecp256k1RecoverySignature2020/lds-ecdsa-secp256k1-recovery2020-0.0.jsonld EcdsaSecp256k1RecoveryMethod2020, /// https://www.w3.org/TR/vc-data-integrity/#multikey + /// https://w3id.org/security/multikey/v1 Multikey, } +impl VerificationMethodType { + /// Return the JSON-LD context URL for which this type comes from + pub fn context_for_type(&self) -> &str { + match self { + VerificationMethodType::JsonWebKey2020 => contexts::W3C_SUITE_JWS_2020, + VerificationMethodType::EcdsaSecp256k1VerificationKey2019 => { + contexts::W3C_SUITE_SECP256K1_2019 + } + VerificationMethodType::Ed25519VerificationKey2018 => contexts::W3C_SUITE_ED25519_2018, + VerificationMethodType::Ed25519VerificationKey2020 => contexts::W3C_SUITE_ED25519_2020, + VerificationMethodType::Bls12381G1Key2020 => contexts::W3C_BBS_V1, + VerificationMethodType::Bls12381G2Key2020 => contexts::W3C_BBS_V1, + VerificationMethodType::PgpVerificationKey2021 => contexts::W3C_PGP_V1, + VerificationMethodType::X25519KeyAgreementKey2019 => contexts::W3C_SUITE_X25519_2019, + VerificationMethodType::X25519KeyAgreementKey2020 => contexts::W3C_SUITE_X25519_2020, + VerificationMethodType::EcdsaSecp256k1RecoveryMethod2020 => { + contexts::W3C_SUITE_SECP259K1_RECOVERY_2020 + } + VerificationMethodType::Multikey => contexts::W3C_MULTIKEY_V1, + } + } +} + impl Display for VerificationMethodType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -38,7 +71,6 @@ impl Display for VerificationMethodType { VerificationMethodType::Bls12381G1Key2020 => write!(f, "Bls12381G1Key2020"), VerificationMethodType::Bls12381G2Key2020 => write!(f, "Bls12381G2Key2020"), VerificationMethodType::PgpVerificationKey2021 => write!(f, "PgpVerificationKey2021"), - VerificationMethodType::RsaVerificationKey2018 => write!(f, "RsaVerificationKey2018"), VerificationMethodType::X25519KeyAgreementKey2019 => { write!(f, "X25519KeyAgreementKey2019") } diff --git a/did_core/did_methods/did_cheqd/Cargo.toml b/did_core/did_methods/did_cheqd/Cargo.toml new file mode 100644 index 0000000000..371e6724f2 --- /dev/null +++ b/did_core/did_methods/did_cheqd/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "did_cheqd" +authors.workspace = true +description.workspace = true +license.workspace = true +version = "0.1.0" +edition = "2021" + +[lib] +name = "did_cheqd" +path = "src/lib.rs" + +[dependencies] +did_resolver = { path = "../../did_resolver" } +tonic = { version = "0.12.3", default-features = false, features = [ + "codegen", + "prost", + "channel", +] } +prost = { version = "0.13.3", default-features = false } +prost-types = "0.13.3" +native-tls = { version = "0.2.12", features = ["alpn"] } +hyper-tls = "0.6.0" +hyper-util = { version = "0.1.10", features = ["client-legacy", "http2"] } +http-body-util = "0.1.2" +async-trait = "0.1.68" +serde_json = "1.0.96" +serde = { version = "1.0.160", features = ["derive"] } +thiserror = "1.0.40" +tokio = { version = "1.38.0" } +chrono = { version = "0.4.24", default-features = false } +url = { version = "2.3.1", default-features = false } +bytes = "1.8.0" + +[dev-dependencies] +tokio = { version = "1.38.0", default-features = false, features = [ + "macros", + "rt", +] } diff --git a/did_core/did_methods/did_cheqd/README.md b/did_core/did_methods/did_cheqd/README.md new file mode 100644 index 0000000000..3c62ecd4e6 --- /dev/null +++ b/did_core/did_methods/did_cheqd/README.md @@ -0,0 +1,14 @@ +# DID Cheqd Resolver +This crate contains a resolver for DIDs of the [did:cheqd](https://docs.cheqd.io/product/architecture/adr-list/adr-001-cheqd-did-method) method. The implementation resolves DIDs via gRPC network requests to the configured nodes. Default nodes for cheqd's `mainnet` & `testnet` can be used, or custom nodes can be opt-in by supplying a different gRPC URL configuration. + +The implementations in this crate are largely inspired from cheqd's own typescript [sdk](https://github.com/cheqd/sdk/blob/main/src/modules/did.ts). + +This crate uses gRPC types and clients generated using [tonic](https://github.com/hyperium/tonic). The generated rust code is checked-in to this repository for monitoring, [see here](./src/proto/mod.rs). These generated rust files are checked-in alongside the V2 cheqd proto files & dependencies, [here](./cheqd_proto_gen/proto/), which are sourced from [cheqd's Buf registry](https://buf.build/cheqd/proto/docs). + +Since the generated code & proto files are not relatively large nor overwhelming in content, they are checked-in rather than pulled and/or generated at build time. The benefit is that the contents of the files can be monitored with each update, making supply-chain attacks obvious. It also reduces the build time complexity for consumers - such as reducing requirements for any 3rd party build tools to be installed (`protobuf`). The drawback is that it introduces some more manual maintainence. + +## Crate Maintainence +If there is an update to the `.proto` files, or `tonic` had a breaking update, the checked-in files may be due for a manual update. To do so, update any proto files in the [proto dir](./cheqd_proto_gen/proto/), then re-generate the rust files by using the [cheqd-proto-gen](./cheqd_proto_gen/) binary within this directory: +``` +cargo run --bin cheqd-proto-gen +``` \ No newline at end of file diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/Cargo.toml b/did_core/did_methods/did_cheqd/cheqd_proto_gen/Cargo.toml new file mode 100644 index 0000000000..4efd515880 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "cheqd_proto_gen" +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "cheqd-proto-gen" +path = "src/main.rs" + +[dependencies] +tonic-build = "0.12.3" diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/did/v2/diddoc.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/did/v2/diddoc.proto new file mode 100644 index 0000000000..ba08ea6f58 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/did/v2/diddoc.proto @@ -0,0 +1,138 @@ +syntax = "proto3"; + +package cheqd.did.v2; + +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "github.com/cheqd/cheqd-node/x/did/types"; + +// DidDoc defines a DID Document, as defined in the DID Core specification. +// Documentation: https://www.w3.org/TR/did-core/ +message DidDoc { + // context is a list of URIs used to identify the context of the DID document. + // Default: https://www.w3.org/ns/did/v1 + repeated string context = 1; + + // id is the DID of the DID document. + // Format: did:cheqd:: + string id = 2; + + // controller is a list of DIDs that are allowed to control the DID document. + repeated string controller = 3; + + // verificationMethod is a list of verification methods that can be used to + // verify a digital signature or cryptographic proof. + repeated VerificationMethod verification_method = 4; + + // authentication is a list of verification methods that can be used to + // authenticate as the DID subject. + repeated string authentication = 5; + + // assertionMethod is a list of verification methods that can be used to + // assert statements as the DID subject. + repeated string assertion_method = 6; + + // capabilityInvocation is a list of verification methods that can be used to + // invoke capabilities as the DID subject. + repeated string capability_invocation = 7; + + // capabilityDelegation is a list of verification methods that can be used to + // delegate capabilities as the DID subject. + repeated string capability_delegation = 8; + + // keyAgreement is a list of verification methods that can be used to perform + // key agreement as the DID subject. + repeated string key_agreement = 9; + + // service is a list of services that can be used to interact with the DID subject. + repeated Service service = 10; + + // alsoKnownAs is a list of DIDs that are known to refer to the same DID subject. + repeated string also_known_as = 11; +} + +// VerificationMethod defines a verification method, as defined in the DID Core specification. +// Documentation: https://www.w3.org/TR/did-core/#verification-methods +message VerificationMethod { + // id is the unique identifier of the verification method. + // Format: did:cheqd::# + string id = 1; + + // type is the type of the verification method. + // Example: Ed25519VerificationKey2020 + string verification_method_type = 2 [(gogoproto.jsontag) = "type,omitempty"]; + + // controller is the DID of the controller of the verification method. + // Format: did:cheqd:: + string controller = 3; + + // verification_material is the public key of the verification method. + // Commonly used verification material types: publicJwk, publicKeyBase58, publicKeyMultibase + string verification_material = 4; +} + +// Service defines a service, as defined in the DID Core specification. +// Documentation: https://www.w3.org/TR/did-core/#services +message Service { + // id is the unique identifier of the service. + // Format: did:cheqd::# + string id = 1; + + // type is the type of the service. + // Example: LinkedResource + string service_type = 2 [(gogoproto.jsontag) = "type,omitempty"]; + + // serviceEndpoint is the endpoint of the service. + // Example: https://example.com/endpoint + repeated string service_endpoint = 3; +} + +// DidDocWithMetadata defines a DID Document with metadata, as defined in the DID Core specification. +// Contains the DID Document, as well as DID Document metadata. +message DidDocWithMetadata { + // didDocument is the DID Document. + DidDoc did_doc = 1 [(gogoproto.jsontag) = "didDocument"]; + + // didDocumentMetadata is the DID Document metadata. + Metadata metadata = 2 [(gogoproto.jsontag) = "didDocumentMetadata"]; +} + +// Metadata defines DID Document metadata, as defined in the DID Core specification. +// Documentation: https://www.w3.org/TR/did-core/#did-document-metadata-properties +message Metadata { + // created is the timestamp of the creation of the DID Document. + // Format: RFC3339 + // Example: 2021-03-10T15:16:17Z + google.protobuf.Timestamp created = 1 [ + (gogoproto.nullable) = false, + (gogoproto.stdtime) = true + ]; + + // updated is the timestamp of the last update of the DID Document. + // Format: RFC3339 + // Example: 2021-03-10T15:16:17Z + google.protobuf.Timestamp updated = 2 [ + (gogoproto.nullable) = true, + (gogoproto.stdtime) = true + ]; + + // deactivated is a flag that indicates whether the DID Document is deactivated. + // Default: false + bool deactivated = 3; + + // version_id is the version identifier of the DID Document. + // Format: UUID + // Example: 123e4567-e89b-12d3-a456-426655440000 + string version_id = 4; + + // next_version_id is the version identifier of the next version of the DID Document. + // Format: UUID + // Example: 123e4567-e89b-12d3-a456-426655440000 + string next_version_id = 5 [(gogoproto.nullable) = true]; + + // previous_version_id is the version identifier of the previous version of the DID Document. + // Format: UUID + // Example: 123e4567-e89b-12d3-a456-426655440000 + string previous_version_id = 6 [(gogoproto.nullable) = true]; +} \ No newline at end of file diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/did/v2/query.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/did/v2/query.proto new file mode 100644 index 0000000000..0bbfdc52e2 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/did/v2/query.proto @@ -0,0 +1,87 @@ +syntax = "proto3"; +package cheqd.did.v2; +import "cheqd/did/v2/diddoc.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "google/api/annotations.proto"; +option go_package = "github.com/cheqd/cheqd-node/x/did/types"; +// Query defines the gRPC querier service for the DID module +service Query { + // Fetch latest version of a DID Document for a given DID + rpc DidDoc(QueryDidDocRequest) returns (QueryDidDocResponse) { + option (google.api.http) = {get: "/cheqd/did/v2/{id}"}; + } + // Fetch specific version of a DID Document for a given DID + rpc DidDocVersion(QueryDidDocVersionRequest) returns (QueryDidDocVersionResponse) { + option (google.api.http) = {get: "/cheqd/did/v2/{id}/version/{version}"}; + } + // Fetch list of all versions of DID Documents for a given DID + rpc AllDidDocVersionsMetadata(QueryAllDidDocVersionsMetadataRequest) returns (QueryAllDidDocVersionsMetadataResponse) { + option (google.api.http) = {get: "/cheqd/did/v2/{id}/versions"}; + } +} +// QueryDidDocRequest is the request type for the Query/DidDoc method +message QueryDidDocRequest { + // DID unique identifier of the DID Document to fetch. + // UUID-style DIDs as well as Indy-style DID are supported. + // + // Format: did:cheqd:: + // + // Examples: + // - did:cheqd:mainnet:c82f2b02-bdab-4dd7-b833-3e143745d612 + // - did:cheqd:testnet:wGHEXrZvJxR8vw5P3UWH1j + string id = 1; +} +// QueryDidDocResponse is the response type for the Query/DidDoc method +message QueryDidDocResponse { + // Successful resolution of the DID Document returns the following: + // - did_doc is the latest version of the DID Document + // - metadata is is the DID Document metadata associated with the latest version of the DID Document + DidDocWithMetadata value = 1; +} +// QueryDidDocVersionRequest is the request type for the Query/DidDocVersion method +message QueryDidDocVersionRequest { + // DID unique identifier of the DID Document to fetch. + // UUID-style DIDs as well as Indy-style DID are supported. + // + // Format: did:cheqd:: + // + // Examples: + // - did:cheqd:mainnet:c82f2b02-bdab-4dd7-b833-3e143745d612 + // - did:cheqd:testnet:wGHEXrZvJxR8vw5P3UWH1j + string id = 1; + // Unique version identifier of the DID Document to fetch. + // Returns the specified version of the DID Document. + // + // Format: + // + // Example: 93f2573c-eca9-4098-96cb-a1ec676a29ed + string version = 2; +} +// QueryDidDocVersionResponse is the response type for the Query/DidDocVersion method +message QueryDidDocVersionResponse { + // Successful resolution of the DID Document returns the following: + // - did_doc is the requested version of the DID Document + // - metadata is DID Document metadata associated with the requested version of the DID Document + DidDocWithMetadata value = 1; +} +// QueryAllDidDocVersionsMetadataRequest is the request type for the Query/AllDidDocVersionsMetadata method +message QueryAllDidDocVersionsMetadataRequest { + // DID unique identifier of the DID Document to fetch version metadata. + // UUID-style DIDs as well as Indy-style DID are supported. + // + // Format: did:cheqd:: + // + // Examples: + // - did:cheqd:mainnet:c82f2b02-bdab-4dd7-b833-3e143745d612 + // - did:cheqd:testnet:wGHEXrZvJxR8vw5P3UWH1j + string id = 1; + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} +// QueryAllDidDocVersionsMetadataResponse is the response type for the Query/AllDidDocVersionsMetadata method +message QueryAllDidDocVersionsMetadataResponse { + // versions is the list of all versions of the requested DID Document + repeated Metadata versions = 1; + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/resource/v2/query.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/resource/v2/query.proto new file mode 100644 index 0000000000..b9a0097dd1 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/resource/v2/query.proto @@ -0,0 +1,79 @@ +syntax = "proto3"; +package cheqd.resource.v2; +import "cheqd/resource/v2/resource.proto"; +import "cosmos/base/query/v1beta1/pagination.proto"; +import "gogoproto/gogo.proto"; +import "google/api/annotations.proto"; +option go_package = "github.com/cheqd/cheqd-node/x/resource/types"; +// Query defines the gRPC querier service for the resource module +service Query { + // Fetch data/payload for a specific resource (without metadata) + rpc Resource(QueryResourceRequest) returns (QueryResourceResponse) { + option (google.api.http).get = "/cheqd/resource/v2/{collection_id}/resources/{id}"; + } + // Fetch only metadata for a specific resource + rpc ResourceMetadata(QueryResourceMetadataRequest) returns (QueryResourceMetadataResponse) { + option (google.api.http).get = "/cheqd/resource/v2/{collection_id}/resources/{id}/metadata"; + } + // Fetch metadata for all resources in a collection + rpc CollectionResources(QueryCollectionResourcesRequest) returns (QueryCollectionResourcesResponse) { + option (google.api.http).get = "/cheqd/resource/v2/{collection_id}/metadata"; + } +} +// QueryResourceRequest is the request type for the Query/Resource RPC method +message QueryResourceRequest { + // collection_id is an identifier of the DidDocument the resource belongs to. + // Format: + // + // Examples: + // - c82f2b02-bdab-4dd7-b833-3e143745d612 + // - wGHEXrZvJxR8vw5P3UWH1j + string collection_id = 1; + // id is a unique id of the resource. + // Format: + string id = 2; +} +// QueryResourceResponse is the response type for the Query/Resource RPC method +message QueryResourceResponse { + // Successful resolution of the resource returns the following: + // - resource is the requested resource + // - metadata is the resource metadata associated with the requested resource + ResourceWithMetadata resource = 1; +} +// QueryResourceMetadataRequest is the request type for the Query/ResourceMetadata RPC method +message QueryResourceMetadataRequest { + // collection_id is an identifier of the DidDocument the resource belongs to. + // Format: + // + // Examples: + // - c82f2b02-bdab-4dd7-b833-3e143745d612 + // - wGHEXrZvJxR8vw5P3UWH1j + string collection_id = 1; + // id is a unique id of the resource. + // Format: + string id = 2; +} +// QueryResourceMetadataResponse is the response type for the Query/ResourceMetadata RPC method +message QueryResourceMetadataResponse { + // resource is the requested resource metadata + Metadata resource = 1 [(gogoproto.jsontag) = "linkedResourceMetadata"]; +} +// QueryCollectionResourcesRequest is the request type for the Query/CollectionResources RPC method +message QueryCollectionResourcesRequest { + // collection_id is an identifier of the DidDocument the resource belongs to. + // Format: + // + // Examples: + // - c82f2b02-bdab-4dd7-b833-3e143745d612 + // - wGHEXrZvJxR8vw5P3UWH1j + string collection_id = 1; + // pagination defines an optional pagination for the request. + cosmos.base.query.v1beta1.PageRequest pagination = 2; +} +// QueryCollectionResourcesResponse is the response type for the Query/CollectionResources RPC method +message QueryCollectionResourcesResponse { + // resources is the requested collection of resource metadata + repeated Metadata resources = 1 [(gogoproto.jsontag) = "linkedResourceMetadata"]; + // pagination defines the pagination in the response. + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} \ No newline at end of file diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/resource/v2/resource.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/resource/v2/resource.proto new file mode 100644 index 0000000000..96ab371c95 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cheqd/resource/v2/resource.proto @@ -0,0 +1,89 @@ +syntax = "proto3"; +package cheqd.resource.v2; +import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; +option go_package = "github.com/cheqd/cheqd-node/x/resource/types"; +// Resource stores the contents of a DID-Linked Resource +message Resource { + // bytes is the raw data of the Resource + bytes data = 1; +} +// Metadata stores the metadata of a DID-Linked Resource +message Metadata { + // collection_id is the ID of the collection that the Resource belongs to. Defined client-side. + // This field is the unique identifier of the DID linked to this Resource + // Format: + // + // Examples: + // - c82f2b02-bdab-4dd7-b833-3e143745d612 + // - wGHEXrZvJxR8vw5P3UWH1j + string collection_id = 1 [(gogoproto.jsontag) = "resourceCollectionId"]; + // id is the ID of the Resource. Defined client-side. + // This field is a unique identifier for this specific version of the Resource. + // Format: + string id = 2 [(gogoproto.jsontag) = "resourceId"]; + // name is a human-readable name for the Resource. Defined client-side. + // Does not change between different versions. + // Example: PassportSchema, EducationTrustRegistry + string name = 3 [(gogoproto.jsontag) = "resourceName"]; + // version is a human-readable semantic version for the Resource. Defined client-side. + // Stored as a string. OPTIONAL. + // Example: 1.0.0, v2.1.0 + string version = 4 [ + (gogoproto.jsontag) = "resourceVersion", + (gogoproto.nullable) = true + ]; + // resource_type is a Resource type that identifies what the Resource is. Defined client-side. + // This is NOT the same as the resource's media type. + // Example: AnonCredsSchema, StatusList2021 + string resource_type = 5 [(gogoproto.jsontag) = "resourceType"]; + // List of alternative URIs for the SAME Resource. + repeated AlternativeUri also_known_as = 6 [ + (gogoproto.jsontag) = "resourceAlternativeUri", + (gogoproto.nullable) = true + ]; + // media_type is IANA media type of the Resource. Defined ledger-side. + // Example: application/json, image/png + string media_type = 7; + // created is the time at which the Resource was created. Defined ledger-side. + // Format: RFC3339 + // Example: 2021-01-01T00:00:00Z + google.protobuf.Timestamp created = 8 [ + (gogoproto.nullable) = false, + (gogoproto.stdtime) = true + ]; + // checksum is a SHA-256 checksum hash of the Resource. Defined ledger-side. + // Example: d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f + string checksum = 9; + // previous_version_id is the ID of the previous version of the Resource. Defined ledger-side. + // This is based on the Resource's name and Resource type to determine whether it's the same Resource. + // Format: + string previous_version_id = 10 [(gogoproto.nullable) = true]; + // next_version_id is the ID of the next version of the Resource. Defined ledger-side. + // This is based on the Resource's name and Resource type to determine whether it's the same Resource. + // Format: + string next_version_id = 11 [(gogoproto.nullable) = true]; +} +// AlternativeUri are alternative URIs that can be used to access the Resource. +// By default, at least the DID URI equivalent of the Resource is populated. +message AlternativeUri { + // uri is the URI of the Resource. + // Examples: + // - did:cheqd:testnet:MjYxNzYKMjYxNzYK/resources/4600ea35-8916-4ac4-b412-55b8f49dd94e + // - https://resolver..cheqd.net/1.0/identifiers/did:cheqd:testnet:MjYxNzYKMjYxNzYK/resources/4600ea35-8916-4ac4-b412-55b8f49dd94e + // - https://example.com/example.json + // - https://gateway.ipfs.io/ipfs/bafybeihetj2ng3d74k7t754atv2s5dk76pcqtvxls6dntef3xa6rax25xe + // - ipfs://bafybeihetj2ng3d74k7t754atv2s5dk76pcqtvxls6dntef3xa6rax25xe + string uri = 1; + // description is a human-readable description of the URI. Defined client-side. + // Examples: + // - did-uri + // - http-uri + // - ipfs-uri + string description = 2; +} +// ResourceWithMetadata describes the overall structure of a DID-Linked Resource +message ResourceWithMetadata { + Resource resource = 1 [(gogoproto.jsontag) = "linkedResource"]; + Metadata metadata = 2 [(gogoproto.jsontag) = "linkedResourceMetadata"]; +} diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cosmos/base/query/v1beta1/pagination.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cosmos/base/query/v1beta1/pagination.proto new file mode 100644 index 0000000000..a532fc38a0 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/cosmos/base/query/v1beta1/pagination.proto @@ -0,0 +1,48 @@ +syntax = "proto3"; +package cosmos.base.query.v1beta1; +option go_package = "github.com/cosmos/cosmos-sdk/types/query"; +// PageRequest is to be embedded in gRPC request messages for efficient +// pagination. Ex: +// +// message SomeRequest { +// Foo some_parameter = 1; +// PageRequest pagination = 2; +// } +message PageRequest { + // key is a value returned in PageResponse.next_key to begin + // querying the next page most efficiently. Only one of offset or key + // should be set. + bytes key = 1; + // offset is a numeric offset that can be used when key is unavailable. + // It is less efficient than using key. Only one of offset or key should + // be set. + uint64 offset = 2; + // limit is the total number of results to be returned in the result page. + // If left empty it will default to a value to be set by each app. + uint64 limit = 3; + // count_total is set to true to indicate that the result set should include + // a count of the total number of items available for pagination in UIs. + // count_total is only respected when offset is used. It is ignored when key + // is set. + bool count_total = 4; + // reverse is set to true if results are to be returned in the descending order. + // + // Since: cosmos-sdk 0.43 + bool reverse = 5; +} +// PageResponse is to be embedded in gRPC response messages where the +// corresponding request message has used PageRequest. +// +// message SomeResponse { +// repeated Bar results = 1; +// PageResponse page = 2; +// } +message PageResponse { + // next_key is the key to be passed to PageRequest.key to + // query the next page most efficiently. It will be empty if + // there are no more results. + bytes next_key = 1; + // total is total number of results available if PageRequest.count_total + // was set, its value is undefined otherwise + uint64 total = 2; +} diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/gogoproto/gogo.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/gogoproto/gogo.proto new file mode 100644 index 0000000000..8947d90aa5 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/gogoproto/gogo.proto @@ -0,0 +1,144 @@ +// Protocol Buffers for Go with Gadgets +// +// Copyright (c) 2013, The GoGo Authors. All rights reserved. +// http://github.com/gogo/protobuf +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto2"; +package gogoproto; + +import "google/protobuf/descriptor.proto"; + +option java_package = "com.google.protobuf"; +option java_outer_classname = "GoGoProtos"; +option go_package = "github.com/gogo/protobuf/gogoproto"; + +extend google.protobuf.EnumOptions { + optional bool goproto_enum_prefix = 62001; + optional bool goproto_enum_stringer = 62021; + optional bool enum_stringer = 62022; + optional string enum_customname = 62023; + optional bool enumdecl = 62024; +} + +extend google.protobuf.EnumValueOptions { + optional string enumvalue_customname = 66001; +} + +extend google.protobuf.FileOptions { + optional bool goproto_getters_all = 63001; + optional bool goproto_enum_prefix_all = 63002; + optional bool goproto_stringer_all = 63003; + optional bool verbose_equal_all = 63004; + optional bool face_all = 63005; + optional bool gostring_all = 63006; + optional bool populate_all = 63007; + optional bool stringer_all = 63008; + optional bool onlyone_all = 63009; + + optional bool equal_all = 63013; + optional bool description_all = 63014; + optional bool testgen_all = 63015; + optional bool benchgen_all = 63016; + optional bool marshaler_all = 63017; + optional bool unmarshaler_all = 63018; + optional bool stable_marshaler_all = 63019; + + optional bool sizer_all = 63020; + + optional bool goproto_enum_stringer_all = 63021; + optional bool enum_stringer_all = 63022; + + optional bool unsafe_marshaler_all = 63023; + optional bool unsafe_unmarshaler_all = 63024; + + optional bool goproto_extensions_map_all = 63025; + optional bool goproto_unrecognized_all = 63026; + optional bool gogoproto_import = 63027; + optional bool protosizer_all = 63028; + optional bool compare_all = 63029; + optional bool typedecl_all = 63030; + optional bool enumdecl_all = 63031; + + optional bool goproto_registration = 63032; + optional bool messagename_all = 63033; + + optional bool goproto_sizecache_all = 63034; + optional bool goproto_unkeyed_all = 63035; +} + +extend google.protobuf.MessageOptions { + optional bool goproto_getters = 64001; + optional bool goproto_stringer = 64003; + optional bool verbose_equal = 64004; + optional bool face = 64005; + optional bool gostring = 64006; + optional bool populate = 64007; + optional bool stringer = 67008; + optional bool onlyone = 64009; + + optional bool equal = 64013; + optional bool description = 64014; + optional bool testgen = 64015; + optional bool benchgen = 64016; + optional bool marshaler = 64017; + optional bool unmarshaler = 64018; + optional bool stable_marshaler = 64019; + + optional bool sizer = 64020; + + optional bool unsafe_marshaler = 64023; + optional bool unsafe_unmarshaler = 64024; + + optional bool goproto_extensions_map = 64025; + optional bool goproto_unrecognized = 64026; + + optional bool protosizer = 64028; + optional bool compare = 64029; + + optional bool typedecl = 64030; + + optional bool messagename = 64033; + + optional bool goproto_sizecache = 64034; + optional bool goproto_unkeyed = 64035; +} + +extend google.protobuf.FieldOptions { + optional bool nullable = 65001; + optional bool embed = 65002; + optional string customtype = 65003; + optional string customname = 65004; + optional string jsontag = 65005; + optional string moretags = 65006; + optional string casttype = 65007; + optional string castkey = 65008; + optional string castvalue = 65009; + + optional bool stdtime = 65010; + optional bool stdduration = 65011; + optional bool wktpointer = 65012; + +} \ No newline at end of file diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/api/annotations.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/api/annotations.proto new file mode 100644 index 0000000000..b17b345600 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/api/annotations.proto @@ -0,0 +1,31 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} \ No newline at end of file diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/api/http.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/api/http.proto new file mode 100644 index 0000000000..c9b7fdc3f1 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/api/http.proto @@ -0,0 +1,371 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// - HTTP: `GET /v1/messages/123456` +// - gRPC: `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// - HTTP: `GET /v1/messages/123456?revision=2&sub.subfield=foo` +// - gRPC: `GetMessage(message_id: "123456" revision: 2 sub: +// SubMessage(subfield: "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` +// - gRPC: `UpdateMessage(message_id: "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` +// - gRPC: `UpdateMessage(message_id: "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// - HTTP: `GET /v1/messages/123456` +// - gRPC: `GetMessage(message_id: "123456")` +// +// - HTTP: `GET /v1/users/me/messages/123456` +// - gRPC: `GetMessage(user_id: "me" message_id: "123456")` +// +// Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They +// are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL +// query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP +// request body, all +// fields are passed via URL path and URL query parameters. +// +// Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// The following example selects a gRPC method and applies an `HttpRule` to it: +// +// http: +// rules: +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} \ No newline at end of file diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/protobuf/descriptor.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/protobuf/descriptor.proto new file mode 100644 index 0000000000..14e7db7b44 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/protobuf/descriptor.proto @@ -0,0 +1,1307 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + +syntax = "proto2"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; + + // Extensions for tooling. + extensions 536000000 [declaration = { + number: 536000000 + type: ".buf.descriptor.v1.FileDescriptorSetExtension" + full_name: ".buf.descriptor.v1.buf_file_descriptor_set_extension" + }]; +} + +// The full set of known editions. +enum Edition { + // A placeholder for an unknown edition value. + EDITION_UNKNOWN = 0; + + // A placeholder edition for specifying default behaviors *before* a feature + // was first introduced. This is effectively an "infinite past". + EDITION_LEGACY = 900; + + // Legacy syntax "editions". These pre-date editions, but behave much like + // distinct editions. These can't be used to specify the edition of proto + // files, but feature definitions must supply proto2/proto3 defaults for + // backwards compatibility. + EDITION_PROTO2 = 998; + EDITION_PROTO3 = 999; + + // Editions that have been released. The specific values are arbitrary and + // should not be depended on, but they will always be time-ordered for easy + // comparison. + EDITION_2023 = 1000; + EDITION_2024 = 1001; + + // Placeholder editions for testing feature resolution. These should not be + // used or relied on outside of tests. + EDITION_1_TEST_ONLY = 1; + EDITION_2_TEST_ONLY = 2; + EDITION_99997_TEST_ONLY = 99997; + EDITION_99998_TEST_ONLY = 99998; + EDITION_99999_TEST_ONLY = 99999; + + // Placeholder for specifying unbounded edition support. This should only + // ever be used by plugins that can expect to never require any changes to + // support a new edition. + EDITION_MAX = 0x7FFFFFFF; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2", "proto3", and "editions". + // + // If `edition` is present, this value must be "editions". + optional string syntax = 12; + + // The edition of the proto file. + optional Edition edition = 14; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + message Declaration { + // The extension number declared within the extension range. + optional int32 number = 1; + + // The fully-qualified name of the extension field. There must be a leading + // dot in front of the full name. + optional string full_name = 2; + + // The fully-qualified type name of the extension field. Unlike + // Metadata.type, Declaration.type must have a leading dot for messages + // and enums. + optional string type = 3; + + // If true, indicates that the number is reserved in the extension range, + // and any extension field with the number will fail to compile. Set this + // when a declared extension field is deleted. + optional bool reserved = 5; + + // If true, indicates that the extension must be defined as repeated. + // Otherwise the extension must be defined as optional. + optional bool repeated = 6; + + reserved 4; // removed is_repeated + } + + // For external users: DO NOT USE. We are in the process of open sourcing + // extension declaration and executing internal cleanups before it can be + // used externally. + repeated Declaration declaration = 2 [retention = RETENTION_SOURCE]; + + // Any features defined in the specific edition. + optional FeatureSet features = 50; + + // The verification state of the extension range. + enum VerificationState { + // All the extensions of the range must be declared. + DECLARATION = 0; + UNVERIFIED = 1; + } + + // The verification state of the range. + // TODO: flip the default to DECLARATION once all empty ranges + // are marked as UNVERIFIED. + optional VerificationState verification = 3 + [default = UNVERIFIED, retention = RETENTION_SOURCE]; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported after google.protobuf. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. In Editions, the group wire format + // can be enabled via the `message_encoding` feature. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REPEATED = 3; + // The required label is only allowed in google.protobuf. In proto3 and Editions + // it's explicitly prohibited. In Editions, the `field_presence` feature + // can be used to get this behavior. + LABEL_REQUIRED = 2; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must belong to a oneof to signal + // to old proto3 clients that presence is tracked for this field. This oneof + // is known as a "synthetic" oneof, and this field must be its sole member + // (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs + // exist in the descriptor only, and do not generate any API. Synthetic oneofs + // must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + // Controls the name of the wrapper Java class generated for the .proto file. + // That class will always contain the .proto file's getDescriptor() method as + // well as any top-level extensions defined in the .proto file. + // If java_multiple_files is disabled, then all the other classes from the + // .proto file will be nested inside the single wrapper outer class. + optional string java_outer_classname = 8; + + // If enabled, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the wrapper class + // named by java_outer_classname. However, the wrapper class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // A proto2 file can set this to true to opt in to UTF-8 checking for Java, + // which will throw an exception if invalid UTF-8 is parsed from the wire or + // assigned to a string field. + // + // TODO: clarify exactly what kinds of field types this option + // applies to, and update these docs accordingly. + // + // Proto3 files already perform these checks. Setting the option explicitly to + // false has no effect: it cannot be used to opt proto3 files out of UTF-8 + // checks. + optional bool java_string_check_utf8 = 27 [default = false]; + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + reserved 42; // removed php_generic_services + reserved "php_generic_services"; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + // Any features defined in the specific edition. + optional FeatureSet features = 50; + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + reserved 4, 5, 6; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // + // This should only be used as a temporary measure against broken builds due + // to the change in behavior for JSON field name conflicts. + // + // TODO This is legacy behavior we plan to remove once downstream + // teams have had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 11 [deprecated = true]; + + // Any features defined in the specific edition. + optional FeatureSet features = 12; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // NOTE: ctype is deprecated. Use `features.(pb.cpp).string_type` instead. + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is only implemented to support use of + // [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + // type "bytes" in the open source release. + // TODO: make ctype actually deprecated. + optional CType ctype = 1 [/*deprecated = true,*/ default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + // The option [ctype=CORD] may be applied to a non-repeated field of type + // "bytes". It indicates that in C++, the data should be stored in a Cord + // instead of a string. For very large strings, this may reduce memory + // fragmentation. It may also allow better performance when parsing from a + // Cord, or when parsing with aliasing enabled, as the parsed Cord may then + // alias the original buffer. + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. This option is prohibited in + // Editions, but the `repeated_field_encoding` feature can be used to control + // the behavior. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // Note that lazy message fields are still eagerly verified to check + // ill-formed wireformat or missing required fields. Calling IsInitialized() + // on the outer message would fail if the inner message has missing required + // fields. Failed verification would result in parsing failure (except when + // uninitialized messages are acceptable). + optional bool lazy = 5 [default = false]; + + // unverified_lazy does no correctness checks on the byte stream. This should + // only be used where lazy with verification is prohibitive for performance + // reasons. + optional bool unverified_lazy = 15 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + // Indicate that the field value should not be printed out when using debug + // formats, e.g. when the field contains sensitive credentials. + optional bool debug_redact = 16 [default = false]; + + // If set to RETENTION_SOURCE, the option will be omitted from the binary. + enum OptionRetention { + RETENTION_UNKNOWN = 0; + RETENTION_RUNTIME = 1; + RETENTION_SOURCE = 2; + } + + optional OptionRetention retention = 17; + + // This indicates the types of entities that the field may apply to when used + // as an option. If it is unset, then the field may be freely used as an + // option on any kind of entity. + enum OptionTargetType { + TARGET_TYPE_UNKNOWN = 0; + TARGET_TYPE_FILE = 1; + TARGET_TYPE_EXTENSION_RANGE = 2; + TARGET_TYPE_MESSAGE = 3; + TARGET_TYPE_FIELD = 4; + TARGET_TYPE_ONEOF = 5; + TARGET_TYPE_ENUM = 6; + TARGET_TYPE_ENUM_ENTRY = 7; + TARGET_TYPE_SERVICE = 8; + TARGET_TYPE_METHOD = 9; + } + + repeated OptionTargetType targets = 19; + + message EditionDefault { + optional Edition edition = 3; + optional string value = 2; // Textproto value. + } + repeated EditionDefault edition_defaults = 20; + + // Any features defined in the specific edition. + optional FeatureSet features = 21; + + // Information about the support window of a feature. + message FeatureSupport { + // The edition that this feature was first available in. In editions + // earlier than this one, the default assigned to EDITION_LEGACY will be + // used, and proto files will not be able to override it. + optional Edition edition_introduced = 1; + + // The edition this feature becomes deprecated in. Using this after this + // edition may trigger warnings. + optional Edition edition_deprecated = 2; + + // The deprecation warning text if this feature is used after the edition it + // was marked deprecated in. + optional string deprecation_warning = 3; + + // The edition this feature is no longer available in. In editions after + // this one, the last default assigned will be used, and proto files will + // not be able to override it. + optional Edition edition_removed = 4; + } + optional FeatureSupport feature_support = 22; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype + reserved 18; // reserve target, target_obsolete_do_not_use +} + +message OneofOptions { + // Any features defined in the specific edition. + optional FeatureSet features = 1; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // Enable the legacy handling of JSON field name conflicts. This lowercases + // and strips underscored from the fields before comparison in proto3 only. + // The new behavior takes `json_name` into account and applies to proto2 as + // well. + // TODO Remove this legacy behavior once downstream teams have + // had time to migrate. + optional bool deprecated_legacy_json_field_conflicts = 6 [deprecated = true]; + + // Any features defined in the specific edition. + optional FeatureSet features = 7; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // Any features defined in the specific edition. + optional FeatureSet features = 2; + + // Indicate that fields annotated with this enum value should not be printed + // out when using debug formats, e.g. when the field contains sensitive + // credentials. + optional bool debug_redact = 3 [default = false]; + + // Information about the support window of a feature value. + optional FieldOptions.FeatureSupport feature_support = 4; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Any features defined in the specific edition. + optional FeatureSet features = 34; + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // Any features defined in the specific edition. + optional FeatureSet features = 35; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + // "foo.(bar.baz).moo". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Features + +// TODO Enums in C++ gencode (and potentially other languages) are +// not well scoped. This means that each of the feature enums below can clash +// with each other. The short names we've chosen maximize call-site +// readability, but leave us very open to this scenario. A future feature will +// be designed and implemented to handle this, hopefully before we ever hit a +// conflict here. +message FeatureSet { + enum FieldPresence { + FIELD_PRESENCE_UNKNOWN = 0; + EXPLICIT = 1; + IMPLICIT = 2; + LEGACY_REQUIRED = 3; + } + optional FieldPresence field_presence = 1 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_LEGACY, value: "EXPLICIT" }, + edition_defaults = { edition: EDITION_PROTO3, value: "IMPLICIT" }, + edition_defaults = { edition: EDITION_2023, value: "EXPLICIT" } + ]; + + enum EnumType { + ENUM_TYPE_UNKNOWN = 0; + OPEN = 1; + CLOSED = 2; + } + optional EnumType enum_type = 2 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_ENUM, + targets = TARGET_TYPE_FILE, + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_LEGACY, value: "CLOSED" }, + edition_defaults = { edition: EDITION_PROTO3, value: "OPEN" } + ]; + + enum RepeatedFieldEncoding { + REPEATED_FIELD_ENCODING_UNKNOWN = 0; + PACKED = 1; + EXPANDED = 2; + } + optional RepeatedFieldEncoding repeated_field_encoding = 3 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_LEGACY, value: "EXPANDED" }, + edition_defaults = { edition: EDITION_PROTO3, value: "PACKED" } + ]; + + enum Utf8Validation { + UTF8_VALIDATION_UNKNOWN = 0; + VERIFY = 2; + NONE = 3; + reserved 1; + } + optional Utf8Validation utf8_validation = 4 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_LEGACY, value: "NONE" }, + edition_defaults = { edition: EDITION_PROTO3, value: "VERIFY" } + ]; + + enum MessageEncoding { + MESSAGE_ENCODING_UNKNOWN = 0; + LENGTH_PREFIXED = 1; + DELIMITED = 2; + } + optional MessageEncoding message_encoding = 5 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_FIELD, + targets = TARGET_TYPE_FILE, + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_LEGACY, value: "LENGTH_PREFIXED" } + ]; + + enum JsonFormat { + JSON_FORMAT_UNKNOWN = 0; + ALLOW = 1; + LEGACY_BEST_EFFORT = 2; + } + optional JsonFormat json_format = 6 [ + retention = RETENTION_RUNTIME, + targets = TARGET_TYPE_MESSAGE, + targets = TARGET_TYPE_ENUM, + targets = TARGET_TYPE_FILE, + feature_support = { + edition_introduced: EDITION_2023, + }, + edition_defaults = { edition: EDITION_LEGACY, value: "LEGACY_BEST_EFFORT" }, + edition_defaults = { edition: EDITION_PROTO3, value: "ALLOW" } + ]; + + reserved 999; + + extensions 1000 to 9994 [ + declaration = { + number: 1000, + full_name: ".pb.cpp", + type: ".pb.CppFeatures" + }, + declaration = { + number: 1001, + full_name: ".pb.java", + type: ".pb.JavaFeatures" + }, + declaration = { number: 1002, full_name: ".pb.go", type: ".pb.GoFeatures" }, + declaration = { + number: 9990, + full_name: ".pb.proto1", + type: ".pb.Proto1Features" + } + ]; + + extensions 9995 to 9999; // For internal testing + extensions 10000; // for https://github.com/bufbuild/protobuf-es +} + +// A compiled specification for the defaults of a set of features. These +// messages are generated from FeatureSet extensions and can be used to seed +// feature resolution. The resolution with this object becomes a simple search +// for the closest matching edition, followed by proto merges. +message FeatureSetDefaults { + // A map from every known edition with a unique set of defaults to its + // defaults. Not all editions may be contained here. For a given edition, + // the defaults at the closest matching edition ordered at or before it should + // be used. This field must be in strict ascending order by edition. + message FeatureSetEditionDefault { + optional Edition edition = 3; + + // Defaults of features that can be overridden in this edition. + optional FeatureSet overridable_features = 4; + + // Defaults of features that can't be overridden in this edition. + optional FeatureSet fixed_features = 5; + + reserved 1, 2; + reserved "features"; + } + repeated FeatureSetEditionDefault defaults = 1; + + // The minimum supported edition (inclusive) when this was constructed. + // Editions before this will not have defaults. + optional Edition minimum_edition = 4; + + // The maximum known edition (inclusive) when this was constructed. Editions + // after this will not have reliable defaults. + optional Edition maximum_edition = 5; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition appears. + // For example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to moo. + // // + // // Another line attached to moo. + // optional double moo = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to moo or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } + + // Extensions for tooling. + extensions 536000000 [declaration = { + number: 536000000 + type: ".buf.descriptor.v1.SourceCodeInfoExtension" + full_name: ".buf.descriptor.v1.buf_source_code_info_extension" + }]; +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified object. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + + // Represents the identified object's effect on the element in the original + // .proto file. + enum Semantic { + // There is no effect or the effect is indescribable. + NONE = 0; + // The element is set or otherwise mutated. + SET = 1; + // An alias to the element is returned. + ALIAS = 2; + } + optional Semantic semantic = 5; + } +} \ No newline at end of file diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/protobuf/timestamp.proto b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/protobuf/timestamp.proto new file mode 100644 index 0000000000..d0698db680 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/proto/google/protobuf/timestamp.proto @@ -0,0 +1,144 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/timestamppb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "TimestampProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; + +// A Timestamp represents a point in time independent of any time zone or local +// calendar, encoded as a count of seconds and fractions of seconds at +// nanosecond resolution. The count is relative to an epoch at UTC midnight on +// January 1, 1970, in the proleptic Gregorian calendar which extends the +// Gregorian calendar backwards to year one. +// +// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +// second table is needed for interpretation, using a [24-hour linear +// smear](https://developers.google.com/time/smear). +// +// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +// restricting to that range, we ensure that we can convert to and from [RFC +// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// Example 5: Compute Timestamp from Java `Instant.now()`. +// +// Instant now = Instant.now(); +// +// Timestamp timestamp = +// Timestamp.newBuilder().setSeconds(now.getEpochSecond()) +// .setNanos(now.getNano()).build(); +// +// Example 6: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard +// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using +// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +// the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() +// ) to obtain a formatter capable of generating timestamps in this format. +// +message Timestamp { + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + int64 seconds = 1; + + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + int32 nanos = 2; +} \ No newline at end of file diff --git a/did_core/did_methods/did_cheqd/cheqd_proto_gen/src/main.rs b/did_core/did_methods/did_cheqd/cheqd_proto_gen/src/main.rs new file mode 100644 index 0000000000..0c2126a9c7 --- /dev/null +++ b/did_core/did_methods/did_cheqd/cheqd_proto_gen/src/main.rs @@ -0,0 +1,16 @@ +//! Binary for re-generating the cheqd proto types +fn main() -> Result<(), Box> { + let crate_dir = env!("CARGO_MANIFEST_DIR").to_string(); + + tonic_build::configure() + .build_server(false) + .out_dir(crate_dir.clone() + "/../src/proto") + .compile_protos( + &[ + crate_dir.clone() + "/proto/cheqd/did/v2/query.proto", + crate_dir.clone() + "/proto/cheqd/resource/v2/query.proto", + ], + &[crate_dir + "/proto"], + )?; + Ok(()) +} diff --git a/did_core/did_methods/did_cheqd/src/error/mod.rs b/did_core/did_methods/did_cheqd/src/error/mod.rs new file mode 100644 index 0000000000..fe3d49efdc --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/error/mod.rs @@ -0,0 +1,29 @@ +use parsing::ParsingErrorSource; +use thiserror::Error; + +pub mod parsing; + +pub type DidCheqdResult = Result; + +#[derive(Error, Debug)] +#[non_exhaustive] +pub enum DidCheqdError { + #[error("DID method not supported: {0}")] + MethodNotSupported(String), + #[error("Cheqd network not supported: {0}")] + NetworkNotSupported(String), + #[error("Bad configuration: {0}")] + BadConfiguration(String), + #[error("Transport error: {0}")] + TransportError(#[from] tonic::transport::Error), + #[error("Non-success resolver response: {0}")] + NonSuccessResponse(#[from] tonic::Status), + #[error("Response from resolver is invalid: {0}")] + InvalidResponse(String), + #[error("Invalid DID Document structure resolved: {0}")] + InvalidDidDocument(String), + #[error("Parsing error: {0}")] + ParsingError(#[from] ParsingErrorSource), + #[error(transparent)] + Other(#[from] Box), +} diff --git a/did_core/did_methods/did_cheqd/src/error/parsing.rs b/did_core/did_methods/did_cheqd/src/error/parsing.rs new file mode 100644 index 0000000000..e2c0079ddf --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/error/parsing.rs @@ -0,0 +1,56 @@ +use did_resolver::{did_doc::schema::types::uri::UriWrapperError, did_parser_nom}; +use thiserror::Error; + +use super::DidCheqdError; + +#[derive(Error, Debug)] +pub enum ParsingErrorSource { + #[error("DID document parsing error: {0}")] + DidDocumentParsingError(#[from] did_parser_nom::ParseError), + #[error("DID document parsing URI error: {0}")] + DidDocumentParsingUriError(#[from] UriWrapperError), + #[error("JSON parsing error: {0}")] + JsonError(#[from] serde_json::Error), + #[error("Invalid URL: {0}")] + UrlParsingError(url::ParseError), + #[error("Invalid encoding: {0}")] + Utf8Error(#[from] std::string::FromUtf8Error), + #[error("Invalid encoding: {0}")] + IntConversionError(#[from] std::num::TryFromIntError), +} + +impl From for DidCheqdError { + fn from(error: did_parser_nom::ParseError) -> Self { + DidCheqdError::ParsingError(ParsingErrorSource::DidDocumentParsingError(error)) + } +} + +impl From for DidCheqdError { + fn from(error: UriWrapperError) -> Self { + DidCheqdError::ParsingError(ParsingErrorSource::DidDocumentParsingUriError(error)) + } +} + +impl From for DidCheqdError { + fn from(error: serde_json::Error) -> Self { + DidCheqdError::ParsingError(ParsingErrorSource::JsonError(error)) + } +} + +impl From for DidCheqdError { + fn from(error: url::ParseError) -> Self { + DidCheqdError::ParsingError(ParsingErrorSource::UrlParsingError(error)) + } +} + +impl From for DidCheqdError { + fn from(error: std::string::FromUtf8Error) -> Self { + DidCheqdError::ParsingError(ParsingErrorSource::Utf8Error(error)) + } +} + +impl From for DidCheqdError { + fn from(error: std::num::TryFromIntError) -> Self { + DidCheqdError::ParsingError(ParsingErrorSource::IntConversionError(error)) + } +} diff --git a/did_core/did_methods/did_cheqd/src/lib.rs b/did_core/did_methods/did_cheqd/src/lib.rs new file mode 100644 index 0000000000..31400edaff --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/lib.rs @@ -0,0 +1,3 @@ +pub mod error; +pub mod proto; +pub mod resolution; diff --git a/did_core/did_methods/did_cheqd/src/proto/cheqd.did.v2.rs b/did_core/did_methods/did_cheqd/src/proto/cheqd.did.v2.rs new file mode 100644 index 0000000000..58e4eefc60 --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/proto/cheqd.did.v2.rs @@ -0,0 +1,387 @@ +// This file is @generated by prost-build. +/// DidDoc defines a DID Document, as defined in the DID Core specification. +/// Documentation: +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DidDoc { + /// context is a list of URIs used to identify the context of the DID document. + /// Default: + #[prost(string, repeated, tag = "1")] + pub context: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// id is the DID of the DID document. + /// Format: did:cheqd:: + #[prost(string, tag = "2")] + pub id: ::prost::alloc::string::String, + /// controller is a list of DIDs that are allowed to control the DID document. + #[prost(string, repeated, tag = "3")] + pub controller: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// verificationMethod is a list of verification methods that can be used to + /// verify a digital signature or cryptographic proof. + #[prost(message, repeated, tag = "4")] + pub verification_method: ::prost::alloc::vec::Vec, + /// authentication is a list of verification methods that can be used to + /// authenticate as the DID subject. + #[prost(string, repeated, tag = "5")] + pub authentication: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// assertionMethod is a list of verification methods that can be used to + /// assert statements as the DID subject. + #[prost(string, repeated, tag = "6")] + pub assertion_method: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// capabilityInvocation is a list of verification methods that can be used to + /// invoke capabilities as the DID subject. + #[prost(string, repeated, tag = "7")] + pub capability_invocation: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// capabilityDelegation is a list of verification methods that can be used to + /// delegate capabilities as the DID subject. + #[prost(string, repeated, tag = "8")] + pub capability_delegation: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// keyAgreement is a list of verification methods that can be used to perform + /// key agreement as the DID subject. + #[prost(string, repeated, tag = "9")] + pub key_agreement: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, + /// service is a list of services that can be used to interact with the DID subject. + #[prost(message, repeated, tag = "10")] + pub service: ::prost::alloc::vec::Vec, + /// alsoKnownAs is a list of DIDs that are known to refer to the same DID subject. + #[prost(string, repeated, tag = "11")] + pub also_known_as: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +/// VerificationMethod defines a verification method, as defined in the DID Core specification. +/// Documentation: +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct VerificationMethod { + /// id is the unique identifier of the verification method. + /// Format: did:cheqd::# + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, + /// type is the type of the verification method. + /// Example: Ed25519VerificationKey2020 + #[prost(string, tag = "2")] + pub verification_method_type: ::prost::alloc::string::String, + /// controller is the DID of the controller of the verification method. + /// Format: did:cheqd:: + #[prost(string, tag = "3")] + pub controller: ::prost::alloc::string::String, + /// verification_material is the public key of the verification method. + /// Commonly used verification material types: publicJwk, publicKeyBase58, publicKeyMultibase + #[prost(string, tag = "4")] + pub verification_material: ::prost::alloc::string::String, +} +/// Service defines a service, as defined in the DID Core specification. +/// Documentation: +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Service { + /// id is the unique identifier of the service. + /// Format: did:cheqd::# + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, + /// type is the type of the service. + /// Example: LinkedResource + #[prost(string, tag = "2")] + pub service_type: ::prost::alloc::string::String, + /// serviceEndpoint is the endpoint of the service. + /// Example: + #[prost(string, repeated, tag = "3")] + pub service_endpoint: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +/// DidDocWithMetadata defines a DID Document with metadata, as defined in the DID Core specification. +/// Contains the DID Document, as well as DID Document metadata. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DidDocWithMetadata { + /// didDocument is the DID Document. + #[prost(message, optional, tag = "1")] + pub did_doc: ::core::option::Option, + /// didDocumentMetadata is the DID Document metadata. + #[prost(message, optional, tag = "2")] + pub metadata: ::core::option::Option, +} +/// Metadata defines DID Document metadata, as defined in the DID Core specification. +/// Documentation: +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Metadata { + /// created is the timestamp of the creation of the DID Document. + /// Format: RFC3339 + /// Example: 2021-03-10T15:16:17Z + #[prost(message, optional, tag = "1")] + pub created: ::core::option::Option<::prost_types::Timestamp>, + /// updated is the timestamp of the last update of the DID Document. + /// Format: RFC3339 + /// Example: 2021-03-10T15:16:17Z + #[prost(message, optional, tag = "2")] + pub updated: ::core::option::Option<::prost_types::Timestamp>, + /// deactivated is a flag that indicates whether the DID Document is deactivated. + /// Default: false + #[prost(bool, tag = "3")] + pub deactivated: bool, + /// version_id is the version identifier of the DID Document. + /// Format: UUID + /// Example: 123e4567-e89b-12d3-a456-426655440000 + #[prost(string, tag = "4")] + pub version_id: ::prost::alloc::string::String, + /// next_version_id is the version identifier of the next version of the DID Document. + /// Format: UUID + /// Example: 123e4567-e89b-12d3-a456-426655440000 + #[prost(string, tag = "5")] + pub next_version_id: ::prost::alloc::string::String, + /// previous_version_id is the version identifier of the previous version of the DID Document. + /// Format: UUID + /// Example: 123e4567-e89b-12d3-a456-426655440000 + #[prost(string, tag = "6")] + pub previous_version_id: ::prost::alloc::string::String, +} +/// QueryDidDocRequest is the request type for the Query/DidDoc method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDidDocRequest { + /// DID unique identifier of the DID Document to fetch. + /// UUID-style DIDs as well as Indy-style DID are supported. + /// + /// Format: did:cheqd:: + /// + /// Examples: + /// - did:cheqd:mainnet:c82f2b02-bdab-4dd7-b833-3e143745d612 + /// - did:cheqd:testnet:wGHEXrZvJxR8vw5P3UWH1j + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, +} +/// QueryDidDocResponse is the response type for the Query/DidDoc method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDidDocResponse { + /// Successful resolution of the DID Document returns the following: + /// - did_doc is the latest version of the DID Document + /// - metadata is is the DID Document metadata associated with the latest version of the DID Document + #[prost(message, optional, tag = "1")] + pub value: ::core::option::Option, +} +/// QueryDidDocVersionRequest is the request type for the Query/DidDocVersion method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDidDocVersionRequest { + /// DID unique identifier of the DID Document to fetch. + /// UUID-style DIDs as well as Indy-style DID are supported. + /// + /// Format: did:cheqd:: + /// + /// Examples: + /// - did:cheqd:mainnet:c82f2b02-bdab-4dd7-b833-3e143745d612 + /// - did:cheqd:testnet:wGHEXrZvJxR8vw5P3UWH1j + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, + /// Unique version identifier of the DID Document to fetch. + /// Returns the specified version of the DID Document. + /// + /// Format: + /// + /// Example: 93f2573c-eca9-4098-96cb-a1ec676a29ed + #[prost(string, tag = "2")] + pub version: ::prost::alloc::string::String, +} +/// QueryDidDocVersionResponse is the response type for the Query/DidDocVersion method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDidDocVersionResponse { + /// Successful resolution of the DID Document returns the following: + /// - did_doc is the requested version of the DID Document + /// - metadata is DID Document metadata associated with the requested version of the DID Document + #[prost(message, optional, tag = "1")] + pub value: ::core::option::Option, +} +/// QueryAllDidDocVersionsMetadataRequest is the request type for the Query/AllDidDocVersionsMetadata method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryAllDidDocVersionsMetadataRequest { + /// DID unique identifier of the DID Document to fetch version metadata. + /// UUID-style DIDs as well as Indy-style DID are supported. + /// + /// Format: did:cheqd:: + /// + /// Examples: + /// - did:cheqd:mainnet:c82f2b02-bdab-4dd7-b833-3e143745d612 + /// - did:cheqd:testnet:wGHEXrZvJxR8vw5P3UWH1j + #[prost(string, tag = "1")] + pub id: ::prost::alloc::string::String, + /// pagination defines an optional pagination for the request. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::super::cosmos::base::query::v1beta1::PageRequest, + >, +} +/// QueryAllDidDocVersionsMetadataResponse is the response type for the Query/AllDidDocVersionsMetadata method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryAllDidDocVersionsMetadataResponse { + /// versions is the list of all versions of the requested DID Document + #[prost(message, repeated, tag = "1")] + pub versions: ::prost::alloc::vec::Vec, + /// pagination defines the pagination in the response. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::super::cosmos::base::query::v1beta1::PageResponse, + >, +} +/// Generated client implementations. +pub mod query_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// Query defines the gRPC querier service for the DID module + #[derive(Debug, Clone)] + pub struct QueryClient { + inner: tonic::client::Grpc, + } + impl QueryClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl QueryClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> QueryClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + QueryClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// Fetch latest version of a DID Document for a given DID + pub async fn did_doc( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cheqd.did.v2.Query/DidDoc", + ); + let mut req = request.into_request(); + req.extensions_mut().insert(GrpcMethod::new("cheqd.did.v2.Query", "DidDoc")); + self.inner.unary(req, path, codec).await + } + /// Fetch specific version of a DID Document for a given DID + pub async fn did_doc_version( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cheqd.did.v2.Query/DidDocVersion", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cheqd.did.v2.Query", "DidDocVersion")); + self.inner.unary(req, path, codec).await + } + /// Fetch list of all versions of DID Documents for a given DID + pub async fn all_did_doc_versions_metadata( + &mut self, + request: impl tonic::IntoRequest< + super::QueryAllDidDocVersionsMetadataRequest, + >, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cheqd.did.v2.Query/AllDidDocVersionsMetadata", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("cheqd.did.v2.Query", "AllDidDocVersionsMetadata"), + ); + self.inner.unary(req, path, codec).await + } + } +} diff --git a/did_core/did_methods/did_cheqd/src/proto/cheqd.resource.v2.rs b/did_core/did_methods/did_cheqd/src/proto/cheqd.resource.v2.rs new file mode 100644 index 0000000000..52c1b7d2e2 --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/proto/cheqd.resource.v2.rs @@ -0,0 +1,344 @@ +// This file is @generated by prost-build. +/// Resource stores the contents of a DID-Linked Resource +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Resource { + /// bytes is the raw data of the Resource + #[prost(bytes = "vec", tag = "1")] + pub data: ::prost::alloc::vec::Vec, +} +/// Metadata stores the metadata of a DID-Linked Resource +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Metadata { + /// collection_id is the ID of the collection that the Resource belongs to. Defined client-side. + /// This field is the unique identifier of the DID linked to this Resource + /// Format: + /// + /// Examples: + /// - c82f2b02-bdab-4dd7-b833-3e143745d612 + /// - wGHEXrZvJxR8vw5P3UWH1j + #[prost(string, tag = "1")] + pub collection_id: ::prost::alloc::string::String, + /// id is the ID of the Resource. Defined client-side. + /// This field is a unique identifier for this specific version of the Resource. + /// Format: + #[prost(string, tag = "2")] + pub id: ::prost::alloc::string::String, + /// name is a human-readable name for the Resource. Defined client-side. + /// Does not change between different versions. + /// Example: PassportSchema, EducationTrustRegistry + #[prost(string, tag = "3")] + pub name: ::prost::alloc::string::String, + /// version is a human-readable semantic version for the Resource. Defined client-side. + /// Stored as a string. OPTIONAL. + /// Example: 1.0.0, v2.1.0 + #[prost(string, tag = "4")] + pub version: ::prost::alloc::string::String, + /// resource_type is a Resource type that identifies what the Resource is. Defined client-side. + /// This is NOT the same as the resource's media type. + /// Example: AnonCredsSchema, StatusList2021 + #[prost(string, tag = "5")] + pub resource_type: ::prost::alloc::string::String, + /// List of alternative URIs for the SAME Resource. + #[prost(message, repeated, tag = "6")] + pub also_known_as: ::prost::alloc::vec::Vec, + /// media_type is IANA media type of the Resource. Defined ledger-side. + /// Example: application/json, image/png + #[prost(string, tag = "7")] + pub media_type: ::prost::alloc::string::String, + /// created is the time at which the Resource was created. Defined ledger-side. + /// Format: RFC3339 + /// Example: 2021-01-01T00:00:00Z + #[prost(message, optional, tag = "8")] + pub created: ::core::option::Option<::prost_types::Timestamp>, + /// checksum is a SHA-256 checksum hash of the Resource. Defined ledger-side. + /// Example: d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f + #[prost(string, tag = "9")] + pub checksum: ::prost::alloc::string::String, + /// previous_version_id is the ID of the previous version of the Resource. Defined ledger-side. + /// This is based on the Resource's name and Resource type to determine whether it's the same Resource. + /// Format: + #[prost(string, tag = "10")] + pub previous_version_id: ::prost::alloc::string::String, + /// next_version_id is the ID of the next version of the Resource. Defined ledger-side. + /// This is based on the Resource's name and Resource type to determine whether it's the same Resource. + /// Format: + #[prost(string, tag = "11")] + pub next_version_id: ::prost::alloc::string::String, +} +/// AlternativeUri are alternative URIs that can be used to access the Resource. +/// By default, at least the DID URI equivalent of the Resource is populated. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlternativeUri { + /// uri is the URI of the Resource. + /// Examples: + /// - did:cheqd:testnet:MjYxNzYKMjYxNzYK/resources/4600ea35-8916-4ac4-b412-55b8f49dd94e + /// - + /// - + /// - + /// - ipfs://bafybeihetj2ng3d74k7t754atv2s5dk76pcqtvxls6dntef3xa6rax25xe + #[prost(string, tag = "1")] + pub uri: ::prost::alloc::string::String, + /// description is a human-readable description of the URI. Defined client-side. + /// Examples: + /// - did-uri + /// - http-uri + /// - ipfs-uri + #[prost(string, tag = "2")] + pub description: ::prost::alloc::string::String, +} +/// ResourceWithMetadata describes the overall structure of a DID-Linked Resource +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ResourceWithMetadata { + #[prost(message, optional, tag = "1")] + pub resource: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub metadata: ::core::option::Option, +} +/// QueryResourceRequest is the request type for the Query/Resource RPC method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryResourceRequest { + /// collection_id is an identifier of the DidDocument the resource belongs to. + /// Format: + /// + /// Examples: + /// - c82f2b02-bdab-4dd7-b833-3e143745d612 + /// - wGHEXrZvJxR8vw5P3UWH1j + #[prost(string, tag = "1")] + pub collection_id: ::prost::alloc::string::String, + /// id is a unique id of the resource. + /// Format: + #[prost(string, tag = "2")] + pub id: ::prost::alloc::string::String, +} +/// QueryResourceResponse is the response type for the Query/Resource RPC method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryResourceResponse { + /// Successful resolution of the resource returns the following: + /// - resource is the requested resource + /// - metadata is the resource metadata associated with the requested resource + #[prost(message, optional, tag = "1")] + pub resource: ::core::option::Option, +} +/// QueryResourceMetadataRequest is the request type for the Query/ResourceMetadata RPC method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryResourceMetadataRequest { + /// collection_id is an identifier of the DidDocument the resource belongs to. + /// Format: + /// + /// Examples: + /// - c82f2b02-bdab-4dd7-b833-3e143745d612 + /// - wGHEXrZvJxR8vw5P3UWH1j + #[prost(string, tag = "1")] + pub collection_id: ::prost::alloc::string::String, + /// id is a unique id of the resource. + /// Format: + #[prost(string, tag = "2")] + pub id: ::prost::alloc::string::String, +} +/// QueryResourceMetadataResponse is the response type for the Query/ResourceMetadata RPC method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryResourceMetadataResponse { + /// resource is the requested resource metadata + #[prost(message, optional, tag = "1")] + pub resource: ::core::option::Option, +} +/// QueryCollectionResourcesRequest is the request type for the Query/CollectionResources RPC method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryCollectionResourcesRequest { + /// collection_id is an identifier of the DidDocument the resource belongs to. + /// Format: + /// + /// Examples: + /// - c82f2b02-bdab-4dd7-b833-3e143745d612 + /// - wGHEXrZvJxR8vw5P3UWH1j + #[prost(string, tag = "1")] + pub collection_id: ::prost::alloc::string::String, + /// pagination defines an optional pagination for the request. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::super::cosmos::base::query::v1beta1::PageRequest, + >, +} +/// QueryCollectionResourcesResponse is the response type for the Query/CollectionResources RPC method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryCollectionResourcesResponse { + /// resources is the requested collection of resource metadata + #[prost(message, repeated, tag = "1")] + pub resources: ::prost::alloc::vec::Vec, + /// pagination defines the pagination in the response. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::super::cosmos::base::query::v1beta1::PageResponse, + >, +} +/// Generated client implementations. +pub mod query_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// Query defines the gRPC querier service for the resource module + #[derive(Debug, Clone)] + pub struct QueryClient { + inner: tonic::client::Grpc, + } + impl QueryClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl QueryClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> QueryClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + QueryClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// Fetch data/payload for a specific resource (without metadata) + pub async fn resource( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cheqd.resource.v2.Query/Resource", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cheqd.resource.v2.Query", "Resource")); + self.inner.unary(req, path, codec).await + } + /// Fetch only metadata for a specific resource + pub async fn resource_metadata( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cheqd.resource.v2.Query/ResourceMetadata", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cheqd.resource.v2.Query", "ResourceMetadata")); + self.inner.unary(req, path, codec).await + } + /// Fetch metadata for all resources in a collection + pub async fn collection_resources( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cheqd.resource.v2.Query/CollectionResources", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("cheqd.resource.v2.Query", "CollectionResources"), + ); + self.inner.unary(req, path, codec).await + } + } +} diff --git a/did_core/did_methods/did_cheqd/src/proto/cosmos.base.query.v1beta1.rs b/did_core/did_methods/did_cheqd/src/proto/cosmos.base.query.v1beta1.rs new file mode 100644 index 0000000000..22ce08b530 --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/proto/cosmos.base.query.v1beta1.rs @@ -0,0 +1,55 @@ +// This file is @generated by prost-build. +/// PageRequest is to be embedded in gRPC request messages for efficient +/// pagination. Ex: +/// +/// message SomeRequest { +/// Foo some_parameter = 1; +/// PageRequest pagination = 2; +/// } +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PageRequest { + /// key is a value returned in PageResponse.next_key to begin + /// querying the next page most efficiently. Only one of offset or key + /// should be set. + #[prost(bytes = "vec", tag = "1")] + pub key: ::prost::alloc::vec::Vec, + /// offset is a numeric offset that can be used when key is unavailable. + /// It is less efficient than using key. Only one of offset or key should + /// be set. + #[prost(uint64, tag = "2")] + pub offset: u64, + /// limit is the total number of results to be returned in the result page. + /// If left empty it will default to a value to be set by each app. + #[prost(uint64, tag = "3")] + pub limit: u64, + /// count_total is set to true to indicate that the result set should include + /// a count of the total number of items available for pagination in UIs. + /// count_total is only respected when offset is used. It is ignored when key + /// is set. + #[prost(bool, tag = "4")] + pub count_total: bool, + /// reverse is set to true if results are to be returned in the descending order. + /// + /// Since: cosmos-sdk 0.43 + #[prost(bool, tag = "5")] + pub reverse: bool, +} +/// PageResponse is to be embedded in gRPC response messages where the +/// corresponding request message has used PageRequest. +/// +/// message SomeResponse { +/// repeated Bar results = 1; +/// PageResponse page = 2; +/// } +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PageResponse { + /// next_key is the key to be passed to PageRequest.key to + /// query the next page most efficiently. It will be empty if + /// there are no more results. + #[prost(bytes = "vec", tag = "1")] + pub next_key: ::prost::alloc::vec::Vec, + /// total is total number of results available if PageRequest.count_total + /// was set, its value is undefined otherwise + #[prost(uint64, tag = "2")] + pub total: u64, +} diff --git a/did_core/did_methods/did_cheqd/src/proto/google.api.rs b/did_core/did_methods/did_cheqd/src/proto/google.api.rs new file mode 100644 index 0000000000..635ce371a6 --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/proto/google.api.rs @@ -0,0 +1,360 @@ +// This file is @generated by prost-build. +/// Defines the HTTP configuration for an API service. It contains a list of +/// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +/// to one or more HTTP REST API methods. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Http { + /// A list of HTTP configuration rules that apply to individual API methods. + /// + /// **NOTE:** All service configuration rules follow "last one wins" order. + #[prost(message, repeated, tag = "1")] + pub rules: ::prost::alloc::vec::Vec, + /// When set to true, URL path parameters will be fully URI-decoded except in + /// cases of single segment matches in reserved expansion, where "%2F" will be + /// left encoded. + /// + /// The default behavior is to not decode RFC 6570 reserved characters in multi + /// segment matches. + #[prost(bool, tag = "2")] + pub fully_decode_reserved_expansion: bool, +} +/// gRPC Transcoding +/// +/// gRPC Transcoding is a feature for mapping between a gRPC method and one or +/// more HTTP REST endpoints. It allows developers to build a single API service +/// that supports both gRPC APIs and REST APIs. Many systems, including [Google +/// APIs](), +/// [Cloud Endpoints](), [gRPC +/// Gateway](), +/// and [Envoy]() proxy support this feature +/// and use it for large scale production services. +/// +/// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +/// how different portions of the gRPC request message are mapped to the URL +/// path, URL query parameters, and HTTP request body. It also controls how the +/// gRPC response message is mapped to the HTTP response body. `HttpRule` is +/// typically specified as an `google.api.http` annotation on the gRPC method. +/// +/// Each mapping specifies a URL path template and an HTTP method. The path +/// template may refer to one or more fields in the gRPC request message, as long +/// as each field is a non-repeated field with a primitive (non-message) type. +/// The path template controls how fields of the request message are mapped to +/// the URL path. +/// +/// Example: +/// +/// service Messaging { +/// rpc GetMessage(GetMessageRequest) returns (Message) { +/// option (google.api.http) = { +/// get: "/v1/{name=messages/*}" +/// }; +/// } +/// } +/// message GetMessageRequest { +/// string name = 1; // Mapped to URL path. +/// } +/// message Message { +/// string text = 1; // The resource content. +/// } +/// +/// This enables an HTTP REST to gRPC mapping as below: +/// +/// - HTTP: `GET /v1/messages/123456` +/// - gRPC: `GetMessage(name: "messages/123456")` +/// +/// Any fields in the request message which are not bound by the path template +/// automatically become HTTP query parameters if there is no HTTP request body. +/// For example: +/// +/// service Messaging { +/// rpc GetMessage(GetMessageRequest) returns (Message) { +/// option (google.api.http) = { +/// get:"/v1/messages/{message_id}" +/// }; +/// } +/// } +/// message GetMessageRequest { +/// message SubMessage { +/// string subfield = 1; +/// } +/// string message_id = 1; // Mapped to URL path. +/// int64 revision = 2; // Mapped to URL query parameter `revision`. +/// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +/// } +/// +/// This enables a HTTP JSON to RPC mapping as below: +/// +/// - HTTP: `GET /v1/messages/123456?revision=2&sub.subfield=foo` +/// - gRPC: `GetMessage(message_id: "123456" revision: 2 sub: +/// SubMessage(subfield: "foo"))` +/// +/// Note that fields which are mapped to URL query parameters must have a +/// primitive type or a repeated primitive type or a non-repeated message type. +/// In the case of a repeated type, the parameter can be repeated in the URL +/// as `...?param=A¶m=B`. In the case of a message type, each field of the +/// message is mapped to a separate parameter, such as +/// `...?foo.a=A&foo.b=B&foo.c=C`. +/// +/// For HTTP methods that allow a request body, the `body` field +/// specifies the mapping. Consider a REST update method on the +/// message resource collection: +/// +/// service Messaging { +/// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +/// option (google.api.http) = { +/// patch: "/v1/messages/{message_id}" +/// body: "message" +/// }; +/// } +/// } +/// message UpdateMessageRequest { +/// string message_id = 1; // mapped to the URL +/// Message message = 2; // mapped to the body +/// } +/// +/// The following HTTP JSON to RPC mapping is enabled, where the +/// representation of the JSON in the request body is determined by +/// protos JSON encoding: +/// +/// - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` +/// - gRPC: `UpdateMessage(message_id: "123456" message { text: "Hi!" })` +/// +/// The special name `*` can be used in the body mapping to define that +/// every field not bound by the path template should be mapped to the +/// request body. This enables the following alternative definition of +/// the update method: +/// +/// service Messaging { +/// rpc UpdateMessage(Message) returns (Message) { +/// option (google.api.http) = { +/// patch: "/v1/messages/{message_id}" +/// body: "*" +/// }; +/// } +/// } +/// message Message { +/// string message_id = 1; +/// string text = 2; +/// } +/// +/// +/// The following HTTP JSON to RPC mapping is enabled: +/// +/// - HTTP: `PATCH /v1/messages/123456 { "text": "Hi!" }` +/// - gRPC: `UpdateMessage(message_id: "123456" text: "Hi!")` +/// +/// Note that when using `*` in the body mapping, it is not possible to +/// have HTTP parameters, as all fields not bound by the path end in +/// the body. This makes this option more rarely used in practice when +/// defining REST APIs. The common usage of `*` is in custom methods +/// which don't use the URL at all for transferring data. +/// +/// It is possible to define multiple HTTP methods for one RPC by using +/// the `additional_bindings` option. Example: +/// +/// service Messaging { +/// rpc GetMessage(GetMessageRequest) returns (Message) { +/// option (google.api.http) = { +/// get: "/v1/messages/{message_id}" +/// additional_bindings { +/// get: "/v1/users/{user_id}/messages/{message_id}" +/// } +/// }; +/// } +/// } +/// message GetMessageRequest { +/// string message_id = 1; +/// string user_id = 2; +/// } +/// +/// This enables the following two alternative HTTP JSON to RPC mappings: +/// +/// - HTTP: `GET /v1/messages/123456` +/// - gRPC: `GetMessage(message_id: "123456")` +/// +/// - HTTP: `GET /v1/users/me/messages/123456` +/// - gRPC: `GetMessage(user_id: "me" message_id: "123456")` +/// +/// Rules for HTTP mapping +/// +/// 1. Leaf request fields (recursive expansion nested messages in the request +/// message) are classified into three categories: +/// - Fields referred by the path template. They are passed via the URL path. +/// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They +/// are passed via the HTTP +/// request body. +/// - All other fields are passed via the URL query parameters, and the +/// parameter name is the field path in the request message. A repeated +/// field can be represented as multiple query parameters under the same +/// name. +/// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL +/// query parameter, all fields +/// are passed via URL path and HTTP request body. +/// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP +/// request body, all +/// fields are passed via URL path and URL query parameters. +/// +/// Path template syntax +/// +/// Template = "/" Segments \[ Verb \] ; +/// Segments = Segment { "/" Segment } ; +/// Segment = "*" | "**" | LITERAL | Variable ; +/// Variable = "{" FieldPath \[ "=" Segments \] "}" ; +/// FieldPath = IDENT { "." IDENT } ; +/// Verb = ":" LITERAL ; +/// +/// The syntax `*` matches a single URL path segment. The syntax `**` matches +/// zero or more URL path segments, which must be the last part of the URL path +/// except the `Verb`. +/// +/// The syntax `Variable` matches part of the URL path as specified by its +/// template. A variable template must not contain other variables. If a variable +/// matches a single path segment, its template may be omitted, e.g. `{var}` +/// is equivalent to `{var=*}`. +/// +/// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +/// contains any reserved character, such characters should be percent-encoded +/// before the matching. +/// +/// If a variable contains exactly one path segment, such as `"{var}"` or +/// `"{var=*}"`, when such a variable is expanded into a URL path on the client +/// side, all characters except `\[-_.~0-9a-zA-Z\]` are percent-encoded. The +/// server side does the reverse decoding. Such variables show up in the +/// [Discovery +/// Document]() as +/// `{var}`. +/// +/// If a variable contains multiple path segments, such as `"{var=foo/*}"` +/// or `"{var=**}"`, when such a variable is expanded into a URL path on the +/// client side, all characters except `\[-_.~/0-9a-zA-Z\]` are percent-encoded. +/// The server side does the reverse decoding, except "%2F" and "%2f" are left +/// unchanged. Such variables show up in the +/// [Discovery +/// Document]() as +/// `{+var}`. +/// +/// Using gRPC API Service Configuration +/// +/// gRPC API Service Configuration (service config) is a configuration language +/// for configuring a gRPC service to become a user-facing product. The +/// service config is simply the YAML representation of the `google.api.Service` +/// proto message. +/// +/// As an alternative to annotating your proto file, you can configure gRPC +/// transcoding in your service config YAML files. You do this by specifying a +/// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +/// effect as the proto annotation. This can be particularly useful if you +/// have a proto that is reused in multiple services. Note that any transcoding +/// specified in the service config will override any matching transcoding +/// configuration in the proto. +/// +/// The following example selects a gRPC method and applies an `HttpRule` to it: +/// +/// http: +/// rules: +/// - selector: example.v1.Messaging.GetMessage +/// get: /v1/messages/{message_id}/{sub.subfield} +/// +/// Special notes +/// +/// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +/// proto to JSON conversion must follow the [proto3 +/// specification](). +/// +/// While the single segment variable follows the semantics of +/// [RFC 6570]() Section 3.2.2 Simple String +/// Expansion, the multi segment variable **does not** follow RFC 6570 Section +/// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +/// does not expand special characters like `?` and `#`, which would lead +/// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +/// for multi segment variables. +/// +/// The path variables **must not** refer to any repeated or mapped field, +/// because client libraries are not capable of handling such variable expansion. +/// +/// The path variables **must not** capture the leading "/" character. The reason +/// is that the most common use case "{var}" does not capture the leading "/" +/// character. For consistency, all path variables must share the same behavior. +/// +/// Repeated message fields must not be mapped to URL query parameters, because +/// no client library can support such complicated mapping. +/// +/// If an API needs to use a JSON array for request or response body, it can map +/// the request or response body to a repeated field. However, some gRPC +/// Transcoding implementations may not support this feature. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct HttpRule { + /// Selects a method to which this rule applies. + /// + /// Refer to [selector][google.api.DocumentationRule.selector] for syntax + /// details. + #[prost(string, tag = "1")] + pub selector: ::prost::alloc::string::String, + /// The name of the request field whose value is mapped to the HTTP request + /// body, or `*` for mapping all request fields not captured by the path + /// pattern to the HTTP body, or omitted for not having any HTTP request body. + /// + /// NOTE: the referred field must be present at the top-level of the request + /// message type. + #[prost(string, tag = "7")] + pub body: ::prost::alloc::string::String, + /// Optional. The name of the response field whose value is mapped to the HTTP + /// response body. When omitted, the entire response message will be used + /// as the HTTP response body. + /// + /// NOTE: The referred field must be present at the top-level of the response + /// message type. + #[prost(string, tag = "12")] + pub response_body: ::prost::alloc::string::String, + /// Additional HTTP bindings for the selector. Nested bindings must + /// not contain an `additional_bindings` field themselves (that is, + /// the nesting may only be one level deep). + #[prost(message, repeated, tag = "11")] + pub additional_bindings: ::prost::alloc::vec::Vec, + /// Determines the URL pattern is matched by this rules. This pattern can be + /// used with any of the {get|put|post|delete|patch} methods. A custom method + /// can be defined using the 'custom' field. + #[prost(oneof = "http_rule::Pattern", tags = "2, 3, 4, 5, 6, 8")] + pub pattern: ::core::option::Option, +} +/// Nested message and enum types in `HttpRule`. +pub mod http_rule { + /// Determines the URL pattern is matched by this rules. This pattern can be + /// used with any of the {get|put|post|delete|patch} methods. A custom method + /// can be defined using the 'custom' field. + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Pattern { + /// Maps to HTTP GET. Used for listing and getting information about + /// resources. + #[prost(string, tag = "2")] + Get(::prost::alloc::string::String), + /// Maps to HTTP PUT. Used for replacing a resource. + #[prost(string, tag = "3")] + Put(::prost::alloc::string::String), + /// Maps to HTTP POST. Used for creating a resource or performing an action. + #[prost(string, tag = "4")] + Post(::prost::alloc::string::String), + /// Maps to HTTP DELETE. Used for deleting a resource. + #[prost(string, tag = "5")] + Delete(::prost::alloc::string::String), + /// Maps to HTTP PATCH. Used for updating a resource. + #[prost(string, tag = "6")] + Patch(::prost::alloc::string::String), + /// The custom pattern is used for specifying an HTTP method that is not + /// included in the `pattern` field, such as HEAD, or "*" to leave the + /// HTTP method unspecified for this rule. The wild-card rule is useful + /// for services that provide content to Web (HTML) clients. + #[prost(message, tag = "8")] + Custom(super::CustomHttpPattern), + } +} +/// A custom pattern is used for defining custom HTTP verb. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CustomHttpPattern { + /// The name of this custom HTTP verb. + #[prost(string, tag = "1")] + pub kind: ::prost::alloc::string::String, + /// The path matched by this custom verb. + #[prost(string, tag = "2")] + pub path: ::prost::alloc::string::String, +} diff --git a/did_core/did_methods/did_cheqd/src/proto/mod.rs b/did_core/did_methods/did_cheqd/src/proto/mod.rs new file mode 100644 index 0000000000..70748162f3 --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/proto/mod.rs @@ -0,0 +1,24 @@ +//! module structure wrapper over the generated proto types + +pub mod cheqd { + pub mod did { + pub mod v2 { + include!("cheqd.did.v2.rs"); + } + } + pub mod resource { + pub mod v2 { + include!("cheqd.resource.v2.rs"); + } + } +} + +pub mod cosmos { + pub mod base { + pub mod query { + pub mod v1beta1 { + include!("cosmos.base.query.v1beta1.rs"); + } + } + } +} diff --git a/did_core/did_methods/did_cheqd/src/resolution/mod.rs b/did_core/did_methods/did_cheqd/src/resolution/mod.rs new file mode 100644 index 0000000000..a6972f6fd1 --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/resolution/mod.rs @@ -0,0 +1,2 @@ +pub mod resolver; +pub mod transformer; diff --git a/did_core/did_methods/did_cheqd/src/resolution/resolver.rs b/did_core/did_methods/did_cheqd/src/resolution/resolver.rs new file mode 100644 index 0000000000..4b063e35db --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/resolution/resolver.rs @@ -0,0 +1,241 @@ +use std::collections::HashMap; + +use async_trait::async_trait; +use bytes::Bytes; +use did_resolver::{ + did_doc::schema::did_doc::DidDocument, + did_parser_nom::Did, + error::GenericError, + shared_types::did_document_metadata::DidDocumentMetadata, + traits::resolvable::{resolution_output::DidResolutionOutput, DidResolvable}, +}; +use http_body_util::combinators::UnsyncBoxBody; +use hyper_tls::HttpsConnector; +use hyper_util::{ + client::legacy::{connect::HttpConnector, Client}, + rt::TokioExecutor, +}; +use tokio::sync::Mutex; +use tonic::{transport::Uri, Status}; + +use crate::{ + error::{DidCheqdError, DidCheqdResult}, + proto::cheqd::{ + did::v2::{query_client::QueryClient as DidQueryClient, QueryDidDocRequest}, + resource::v2::query_client::QueryClient as ResourceQueryClient, + }, +}; + +/// default namespace for the cheqd "mainnet". as it would appear in a DID. +pub const MAINNET_NAMESPACE: &str = "mainnet"; +/// default gRPC URL for the cheqd "mainnet". +pub const MAINNET_DEFAULT_GRPC: &str = "https://grpc.cheqd.net:443"; +/// default namespace for the cheqd "testnet". as it would appear in a DID. +pub const TESTNET_NAMESPACE: &str = "testnet"; +/// default gRPC URL for the cheqd "testnet". +pub const TESTNET_DEFAULT_GRPC: &str = "https://grpc.cheqd.network:443"; + +/// Configuration for the [DidCheqdResolver] resolver +pub struct DidCheqdResolverConfiguration { + /// Configuration for which networks are resolvable + pub networks: Vec, +} + +impl Default for DidCheqdResolverConfiguration { + fn default() -> Self { + Self { + networks: vec![ + NetworkConfiguration::mainnet(), + NetworkConfiguration::testnet(), + ], + } + } +} + +/// Configuration for a cheqd network. Defining details such as where to resolve DIDs from. +pub struct NetworkConfiguration { + /// the cheqd nodes gRPC URL + pub grpc_url: String, + /// the namespace of the network - as it would appear in a DID (did:cheqd:namespace:123) + pub namespace: String, +} + +impl NetworkConfiguration { + /// default configuration for cheqd mainnet + pub fn mainnet() -> Self { + Self { + grpc_url: String::from(MAINNET_DEFAULT_GRPC), + namespace: String::from(MAINNET_NAMESPACE), + } + } + + /// default configuration for cheqd testnet + pub fn testnet() -> Self { + Self { + grpc_url: String::from(TESTNET_DEFAULT_GRPC), + namespace: String::from(TESTNET_NAMESPACE), + } + } +} + +type HyperClient = Client, UnsyncBoxBody>; + +#[derive(Clone)] +struct CheqdGrpcClient { + did: DidQueryClient, + // FUTURE - not used yet + _resources: ResourceQueryClient, +} + +pub struct DidCheqdResolver { + networks: Vec, + network_clients: Mutex>, +} + +#[async_trait] +impl DidResolvable for DidCheqdResolver { + type DidResolutionOptions = (); + + async fn resolve( + &self, + did: &Did, + _: &Self::DidResolutionOptions, + ) -> Result { + Ok(self.resolve_did(did).await?) + } +} + +impl DidCheqdResolver { + /// Assemble a new resolver with the given config. + /// + /// [DidCheqdResolverConfiguration::default] can be used if default mainnet & testnet + /// configurations are suitable. + pub fn new(configuration: DidCheqdResolverConfiguration) -> Self { + Self { + networks: configuration.networks, + network_clients: Default::default(), + } + } + + /// lazily get the client, initializing if not already + async fn client_for_network(&self, network: &str) -> DidCheqdResult { + let mut lock = self.network_clients.lock().await; + if let Some(client) = lock.get(network) { + return Ok(client.clone()); + } + + let network_config = self + .networks + .iter() + .find(|n| n.namespace == network) + .ok_or(DidCheqdError::NetworkNotSupported(network.to_owned()))?; + + let client = native_tls_hyper_client()?; + let origin: Uri = network_config.grpc_url.parse().map_err(|e| { + DidCheqdError::BadConfiguration(format!( + "GRPC URL is not a URI: {} {e}", + network_config.grpc_url + )) + })?; + + let did_client = DidQueryClient::with_origin(client.clone(), origin.clone()); + let resource_client = ResourceQueryClient::with_origin(client, origin); + + let client = CheqdGrpcClient { + did: did_client, + _resources: resource_client, + }; + + lock.insert(network.to_owned(), client.clone()); + + Ok(client) + } + + /// Resolve a cheqd DID. + pub async fn resolve_did(&self, did: &Did) -> DidCheqdResult { + let method = did.method(); + if method != Some("cheqd") { + return Err(DidCheqdError::MethodNotSupported(format!("{method:?}"))); + } + + let network = did.namespace().unwrap_or(MAINNET_NAMESPACE); + let mut client = self.client_for_network(network).await?; + let did = did.did().to_owned(); + + let request = tonic::Request::new(QueryDidDocRequest { id: did }); + let response = client.did.did_doc(request).await?; + + let query_response = response.into_inner(); + let query_doc_res = query_response.value.ok_or(DidCheqdError::InvalidResponse( + "DIDDoc query did not return a value".into(), + ))?; + let query_doc = query_doc_res.did_doc.ok_or(DidCheqdError::InvalidResponse( + "DIDDoc query did not return a DIDDoc".into(), + ))?; + + let mut output_builder = DidResolutionOutput::builder(DidDocument::try_from(query_doc)?); + + if let Some(query_metadata) = query_doc_res.metadata { + // FUTURE - append linked resources to metadata + output_builder = output_builder + .did_document_metadata(DidDocumentMetadata::try_from(query_metadata)?); + } + + Ok(output_builder.build()) + } +} + +/// Assembles a hyper client which: +/// * uses native TLS +/// * supports HTTP2 only (gRPC) +fn native_tls_hyper_client() -> DidCheqdResult { + let tls = native_tls::TlsConnector::builder() + .request_alpns(&["h2"]) + .build() + .map_err(|e| { + DidCheqdError::BadConfiguration(format!("Failed to build TlsConnector: {e}")) + })?; + let mut http = HttpConnector::new(); + http.enforce_http(false); + let connector = HttpsConnector::from((http, tls.into())); + + Ok(Client::builder(TokioExecutor::new()) + .http2_only(true) + .build(connector)) +} + +#[cfg(test)] +mod unit_tests { + use super::*; + + #[tokio::test] + async fn test_resolve_fails_if_wrong_method() { + let did = "did:notcheqd:abc".parse().unwrap(); + let resolver = DidCheqdResolver::new(Default::default()); + let e = resolver.resolve_did(&did).await.unwrap_err(); + assert!(matches!(e, DidCheqdError::MethodNotSupported(_))); + } + + #[tokio::test] + async fn test_resolve_fails_if_no_network_config() { + let did = "did:cheqd:devnet:Ps1ysXP2Ae6GBfxNhNQNKN".parse().unwrap(); + let resolver = DidCheqdResolver::new(Default::default()); + let e = resolver.resolve_did(&did).await.unwrap_err(); + assert!(matches!(e, DidCheqdError::NetworkNotSupported(_))); + } + + #[tokio::test] + async fn test_resolve_fails_if_bad_network_uri() { + let did = "did:cheqd:devnet:Ps1ysXP2Ae6GBfxNhNQNKN".parse().unwrap(); + let config = DidCheqdResolverConfiguration { + networks: vec![NetworkConfiguration { + grpc_url: "@baduri://.".into(), + namespace: "devnet".into(), + }], + }; + + let resolver = DidCheqdResolver::new(config); + let e = resolver.resolve_did(&did).await.unwrap_err(); + assert!(matches!(e, DidCheqdError::BadConfiguration(_))); + } +} diff --git a/did_core/did_methods/did_cheqd/src/resolution/transformer.rs b/did_core/did_methods/did_cheqd/src/resolution/transformer.rs new file mode 100644 index 0000000000..7154d8dd96 --- /dev/null +++ b/did_core/did_methods/did_cheqd/src/resolution/transformer.rs @@ -0,0 +1,212 @@ +use std::str::FromStr; + +use chrono::{DateTime, Utc}; +use did_resolver::{ + did_doc::schema::{ + contexts, + did_doc::DidDocument, + service::Service, + types::uri::Uri, + utils::OneOrList, + verification_method::{PublicKeyField, VerificationMethod, VerificationMethodType}, + }, + did_parser_nom::Did, + shared_types::did_document_metadata::DidDocumentMetadata, +}; +use serde_json::json; + +use crate::{ + error::{DidCheqdError, DidCheqdResult}, + proto::cheqd::did::v2::{ + DidDoc as CheqdDidDoc, Metadata as CheqdDidDocMetadata, Service as CheqdService, + VerificationMethod as CheqdVerificationMethod, + }, +}; + +impl TryFrom for DidDocument { + type Error = DidCheqdError; + + fn try_from(value: CheqdDidDoc) -> Result { + let mut doc = DidDocument::new(value.id.parse()?); + let mut context = value.context; + + // insert default context + if !context.iter().any(|ctx| ctx == contexts::W3C_DID_V1) { + context.push(contexts::W3C_DID_V1.to_owned()); + } + + let controller: Vec<_> = value + .controller + .into_iter() + .map(Did::parse) + .collect::>()?; + if !controller.is_empty() { + doc.set_controller(OneOrList::from(controller)); + } + + for vm in value.verification_method { + let vm = VerificationMethod::try_from(vm)?; + let vm_ctx = vm.verification_method_type().context_for_type(); + if !context.iter().any(|ctx| ctx == vm_ctx) { + context.push(vm_ctx.to_owned()); + } + + doc.add_verification_method(vm); + } + + for vm_id in value.authentication { + doc.add_authentication_ref(vm_id.parse()?); + } + for vm_id in value.assertion_method { + doc.add_assertion_method_ref(vm_id.parse()?); + } + for vm_id in value.capability_invocation { + doc.add_capability_invocation_ref(vm_id.parse()?); + } + for vm_id in value.capability_delegation { + doc.add_capability_delegation_ref(vm_id.parse()?); + } + for vm_id in value.key_agreement { + doc.add_key_agreement_ref(vm_id.parse()?); + } + + for svc in value.service { + let svc = Service::try_from(svc)?; + doc.add_service(svc); + } + + let aka: Vec<_> = value + .also_known_as + .iter() + .map(|aka| Uri::from_str(aka)) + .collect::>()?; + doc.set_also_known_as(aka); + + // add in all contexts + doc.set_extra_field(String::from("@context"), json!(context)); + + Ok(doc) + } +} + +impl TryFrom for VerificationMethod { + type Error = DidCheqdError; + + fn try_from(value: CheqdVerificationMethod) -> Result { + let vm_type: VerificationMethodType = + serde_json::from_value(json!(value.verification_method_type))?; + + let vm_key_encoded = value.verification_material; + + let pk = match vm_type { + VerificationMethodType::Ed25519VerificationKey2020 => PublicKeyField::Multibase { + public_key_multibase: vm_key_encoded, + }, + VerificationMethodType::JsonWebKey2020 => PublicKeyField::Jwk { + public_key_jwk: serde_json::from_str(&vm_key_encoded)?, + }, + VerificationMethodType::Ed25519VerificationKey2018 => PublicKeyField::Base58 { + public_key_base58: vm_key_encoded, + }, + // https://w3c.github.io/vc-di-bbs/contexts/v1/ + VerificationMethodType::Bls12381G1Key2020 => PublicKeyField::Base58 { + public_key_base58: vm_key_encoded, + }, + // https://w3c.github.io/vc-di-bbs/contexts/v1/ + VerificationMethodType::Bls12381G2Key2020 => PublicKeyField::Base58 { + public_key_base58: vm_key_encoded, + }, + // https://ns.did.ai/suites/x25519-2019/v1/ + VerificationMethodType::X25519KeyAgreementKey2019 => PublicKeyField::Base58 { + public_key_base58: vm_key_encoded, + }, + // https://ns.did.ai/suites/x25519-2020/v1/ + VerificationMethodType::X25519KeyAgreementKey2020 => PublicKeyField::Multibase { + public_key_multibase: vm_key_encoded, + }, + // https://w3c.github.io/vc-data-integrity/contexts/multikey/v1.jsonld + VerificationMethodType::Multikey => PublicKeyField::Multibase { + public_key_multibase: vm_key_encoded, + }, + // https://w3id.org/pgp/v1 + VerificationMethodType::PgpVerificationKey2021 => PublicKeyField::Pgp { + public_key_pgp: vm_key_encoded, + }, + // cannot infer encoding type from vm type, as multiple are supported: https://ns.did.ai/suites/secp256k1-2019/v1/ + VerificationMethodType::EcdsaSecp256k1VerificationKey2019 => { + return Err(DidCheqdError::InvalidDidDocument( + "DidDocument uses VM type of EcdsaSecp256k1VerificationKey2019, cannot process" + .into(), + )) + } + // cannot infer encoding type from vm type: https://identity.foundation/EcdsaSecp256k1RecoverySignature2020/lds-ecdsa-secp256k1-recovery2020-0.0.jsonld + VerificationMethodType::EcdsaSecp256k1RecoveryMethod2020 => { + return Err(DidCheqdError::InvalidDidDocument( + "DidDocument uses VM type of EcdsaSecp256k1RecoveryMethod2020, cannot process" + .into(), + )) + } + }; + + let vm = VerificationMethod::builder() + .id(value.id.parse()?) + .verification_method_type(vm_type) + .controller(value.controller.parse()?) + .public_key(pk) + .build(); + + Ok(vm) + } +} + +impl TryFrom for Service { + type Error = DidCheqdError; + + fn try_from(value: CheqdService) -> Result { + // TODO #1301 - fix mapping: https://github.com/hyperledger/aries-vcx/issues/1301 + let endpoint = + value + .service_endpoint + .into_iter() + .next() + .ok_or(DidCheqdError::InvalidDidDocument( + "DID Document Service is missing an endpoint".into(), + ))?; + + let svc = Service::new( + Uri::from_str(&value.id)?, + endpoint.parse()?, + serde_json::from_value(json!(value.service_type))?, + Default::default(), + ); + + Ok(svc) + } +} + +impl TryFrom for DidDocumentMetadata { + type Error = DidCheqdError; + + fn try_from(value: CheqdDidDocMetadata) -> Result { + let mut builder = DidDocumentMetadata::builder(); + if let Some(timestamp) = value.created { + builder = builder.created(prost_timestamp_to_dt(timestamp)?); + } + if let Some(timestamp) = value.updated { + builder = builder.updated(prost_timestamp_to_dt(timestamp)?); + } + builder = builder + .deactivated(value.deactivated) + .version_id(value.version_id) + .next_version_id(value.next_version_id); + + Ok(builder.build()) + } +} + +fn prost_timestamp_to_dt(mut timestamp: prost_types::Timestamp) -> DidCheqdResult> { + timestamp.normalize(); + DateTime::from_timestamp(timestamp.seconds, timestamp.nanos.try_into()?).ok_or( + DidCheqdError::Other(format!("Unknown error, bad timestamp: {timestamp:?}").into()), + ) +} diff --git a/did_core/did_methods/did_cheqd/tests/resolution.rs b/did_core/did_methods/did_cheqd/tests/resolution.rs new file mode 100644 index 0000000000..cea9a7f457 --- /dev/null +++ b/did_core/did_methods/did_cheqd/tests/resolution.rs @@ -0,0 +1,92 @@ +use did_cheqd::resolution::resolver::{DidCheqdResolver, DidCheqdResolverConfiguration}; +use did_resolver::traits::resolvable::DidResolvable; +use serde_json::json; + +#[tokio::test] +async fn test_resolve_known_mainnet_vector() { + // sample from https://dev.uniresolver.io/ + let did = "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN".parse().unwrap(); + // NOTE: modifications from uni-resolver: + // make serviceEndpoints into single item (not array) + let expected_doc = json!({ + "@context": [ + "https://www.w3.org/ns/did/v1", + "https://w3id.org/security/suites/ed25519-2020/v1" + ], + "id": "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN", + "verificationMethod": [ + { + "id": "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN#key1", + "type": "Ed25519VerificationKey2020", + "controller": "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN", + "publicKeyMultibase": "z6Mkta7joRuvDh7UnoESdgpr9dDUMh5LvdoECDi3WGrJoscA" + } + ], + "authentication": [ + "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN#key1" + ], + "service": [ + { + "id": "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN#website", + "type": "LinkedDomains", + "serviceEndpoint": "https://www.cheqd.io/" + }, + { + "id": "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN#non-fungible-image", + "type": "LinkedDomains", + "serviceEndpoint": "https://gateway.ipfs.io/ipfs/bafybeihetj2ng3d74k7t754atv2s5dk76pcqtvxls6dntef3xa6rax25xe" + }, + { + "id": "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN#twitter", + "type": "LinkedDomains", + "serviceEndpoint": "https://twitter.com/cheqd_io" + }, + { + "id": "did:cheqd:mainnet:Ps1ysXP2Ae6GBfxNhNQNKN#linkedin", + "type": "LinkedDomains", + "serviceEndpoint": "https://www.linkedin.com/company/cheqd-identity/" + } + ] + }); + + let resolver = DidCheqdResolver::new(DidCheqdResolverConfiguration::default()); + let output = resolver.resolve(&did, &()).await.unwrap(); + let doc = output.did_document; + assert_eq!(serde_json::to_value(doc.clone()).unwrap(), expected_doc); + assert_eq!(doc, serde_json::from_value(expected_doc).unwrap()); +} + +#[tokio::test] +async fn test_resolve_known_testnet_vector() { + // sample from https://dev.uniresolver.io/ + let did = "did:cheqd:testnet:55dbc8bf-fba3-4117-855c-1e0dc1d3bb47" + .parse() + .unwrap(); + // NOTE: modifications from uni-resolver: + // * made controller a single item + let expected_doc = json!({ + "@context": [ + "https://www.w3.org/ns/did/v1", + "https://w3id.org/security/suites/ed25519-2020/v1" + ], + "id": "did:cheqd:testnet:55dbc8bf-fba3-4117-855c-1e0dc1d3bb47", + "controller": "did:cheqd:testnet:55dbc8bf-fba3-4117-855c-1e0dc1d3bb47", + "verificationMethod": [ + { + "id": "did:cheqd:testnet:55dbc8bf-fba3-4117-855c-1e0dc1d3bb47#key-1", + "type": "Ed25519VerificationKey2020", + "controller": "did:cheqd:testnet:55dbc8bf-fba3-4117-855c-1e0dc1d3bb47", + "publicKeyMultibase": "z6MkkVbyHJLLjdjU5B62DaJ4mkdMdUkttf9UqySSkA9bVTeZ" + } + ], + "authentication": [ + "did:cheqd:testnet:55dbc8bf-fba3-4117-855c-1e0dc1d3bb47#key-1" + ] + }); + + let resolver = DidCheqdResolver::new(DidCheqdResolverConfiguration::default()); + let output = resolver.resolve(&did, &()).await.unwrap(); + let doc = output.did_document; + assert_eq!(serde_json::to_value(doc.clone()).unwrap(), expected_doc); + assert_eq!(doc, serde_json::from_value(expected_doc).unwrap()); +} diff --git a/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/mod.rs b/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/mod.rs index 49d70f189a..bff5e7a5e9 100644 --- a/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/mod.rs +++ b/did_core/did_methods/did_peer/src/peer_did/numalgos/numalgo2/mod.rs @@ -123,7 +123,6 @@ mod test { let ddo_decoded: DidDocument = did_peer .to_did_doc_builder(PublicKeyEncoding::Multibase) .unwrap(); - dbg!(&ddo_decoded); assert_eq!(ddo_original, ddo_decoded); } diff --git a/did_core/did_methods/did_resolver_web/Cargo.toml b/did_core/did_methods/did_resolver_web/Cargo.toml index eb4c6f1ee5..16cd429759 100644 --- a/did_core/did_methods/did_resolver_web/Cargo.toml +++ b/did_core/did_methods/did_resolver_web/Cargo.toml @@ -10,10 +10,16 @@ did_resolver = { path = "../../did_resolver" } async-trait = "0.1.68" serde_json = "1.0.96" thiserror = "1.0.40" -hyper = { version = "0.14.26", features = ["client", "http2"] } -hyper-tls = "0.5.0" +hyper = { version = "1.5.1" } +hyper-tls = "0.6.0" +hyper-util = { version = "0.1.10", features = ["client-legacy", "http1", "http2"] } +http-body-util = "0.1.2" [dev-dependencies] -hyper = { version = "0.14.26", features = ["server"] } -tokio = { version = "1.38.0", default-features = false, features = ["macros", "rt"] } +hyper = { version = "1.5.1", features = ["server"] } +hyper-util = { version = "0.1.10", features = ["server"] } +tokio = { version = "1.38.0", default-features = false, features = [ + "macros", + "rt", +] } tokio-test = "0.4.2" diff --git a/did_core/did_methods/did_resolver_web/src/error/mod.rs b/did_core/did_methods/did_resolver_web/src/error/mod.rs index be189d6ed3..5532aa2c28 100644 --- a/did_core/did_methods/did_resolver_web/src/error/mod.rs +++ b/did_core/did_methods/did_resolver_web/src/error/mod.rs @@ -16,8 +16,10 @@ pub enum DidWebError { InvalidDid(String), #[error("Parsing error: {0}")] ParsingError(#[from] ParsingErrorSource), - #[error("URL parsing error: {0}")] - HttpError(#[from] hyper::Error), + #[error("Network error: {0}")] + NetworkError(#[from] hyper::Error), + #[error("Network error: {0}")] + NetworkClientError(#[from] hyper_util::client::legacy::Error), #[error("Non-success server response: {0}")] NonSuccessResponse(StatusCode), #[error(transparent)] diff --git a/did_core/did_methods/did_resolver_web/src/resolution/resolver.rs b/did_core/did_methods/did_resolver_web/src/resolution/resolver.rs index bbd19497d6..d7494c9bc7 100644 --- a/did_core/did_methods/did_resolver_web/src/resolution/resolver.rs +++ b/did_core/did_methods/did_resolver_web/src/resolution/resolver.rs @@ -8,12 +8,20 @@ use did_resolver::{ DidResolvable, }, }; +use http_body_util::{combinators::BoxBody, BodyExt as _}; use hyper::{ - client::{connect::Connect, HttpConnector}, + body::Bytes, http::uri::{self, Scheme}, - Body, Client, Uri, + Uri, }; use hyper_tls::HttpsConnector; +use hyper_util::{ + client::legacy::{ + connect::{Connect, HttpConnector}, + Client, + }, + rt::TokioExecutor, +}; use crate::error::DidWebError; @@ -21,14 +29,15 @@ pub struct DidWebResolver where C: Connect + Send + Sync + Clone + 'static, { - client: Client, + client: Client>, scheme: Scheme, } impl DidWebResolver { pub fn http() -> DidWebResolver { DidWebResolver { - client: Client::builder().build::<_, Body>(HttpConnector::new()), + client: Client::builder(TokioExecutor::new()) + .build::<_, BoxBody>(HttpConnector::new()), scheme: Scheme::HTTP, } } @@ -37,7 +46,8 @@ impl DidWebResolver { impl DidWebResolver> { pub fn https() -> DidWebResolver> { DidWebResolver { - client: Client::builder().build::<_, Body>(HttpsConnector::new()), + client: Client::builder(TokioExecutor::new()) + .build::<_, BoxBody>(HttpsConnector::new()), scheme: Scheme::HTTPS, } } @@ -54,7 +64,7 @@ where return Err(DidWebError::NonSuccessResponse(res.status())); } - let body = hyper::body::to_bytes(res.into_body()).await?; + let body = res.into_body().collect().await?.to_bytes(); String::from_utf8(body.to_vec()).map_err(|err| err.into()) } diff --git a/did_core/did_methods/did_resolver_web/tests/resolution.rs b/did_core/did_methods/did_resolver_web/tests/resolution.rs index 76681c8cf3..65949563a3 100644 --- a/did_core/did_methods/did_resolver_web/tests/resolution.rs +++ b/did_core/did_methods/did_resolver_web/tests/resolution.rs @@ -1,4 +1,4 @@ -use std::{convert::Infallible, net::SocketAddr}; +use std::convert::Infallible; use did_resolver::{ did_doc::schema::did_doc::DidDocument, @@ -6,10 +6,17 @@ use did_resolver::{ traits::resolvable::{resolution_output::DidResolutionOutput, DidResolvable}, }; use did_resolver_web::resolution::resolver::DidWebResolver; +use http_body_util::{combinators::BoxBody, BodyExt, Full}; use hyper::{ - service::{make_service_fn, service_fn}, - Body, Request, Response, Server, + body::{Bytes, Incoming}, + service::service_fn, + Request, Response, }; +use hyper_util::{ + rt::{TokioExecutor, TokioIo}, + server::conn::auto::Builder, +}; +use tokio::{net::TcpListener, task::JoinSet}; use tokio_test::assert_ok; const DID_DOCUMENT: &str = r#" @@ -66,12 +73,16 @@ const DID_DOCUMENT: &str = r#" ] }"#; -async fn mock_server_handler(req: Request) -> Result, Infallible> { +async fn mock_server_handler( + req: Request, +) -> Result>, Infallible> { let response = match req.uri().path() { - "/.well-known/did.json" | "/user/alice/did.json" => Response::new(Body::from(DID_DOCUMENT)), + "/.well-known/did.json" | "/user/alice/did.json" => { + Response::new(Full::new(Bytes::from(DID_DOCUMENT)).boxed()) + } _ => Response::builder() .status(404) - .body(Body::from("Not Found")) + .body(Full::new(Bytes::from("Not Found")).boxed()) .unwrap(), }; @@ -79,14 +90,36 @@ async fn mock_server_handler(req: Request) -> Result, Infal } async fn create_mock_server(port: u16) -> String { - let make_svc = - make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(mock_server_handler)) }); - - let addr = SocketAddr::from(([127, 0, 0, 1], port)); - let server = Server::bind(&addr).serve(make_svc); + let listen_addr = format!("127.0.0.1:{port}"); + let tcp_listener = TcpListener::bind(listen_addr).await.unwrap(); tokio::spawn(async move { - server.await.unwrap(); + let mut join_set = JoinSet::new(); + loop { + let (stream, addr) = match tcp_listener.accept().await { + Ok(x) => x, + Err(e) => { + eprintln!("failed to accept connection: {e}"); + continue; + } + }; + + let serve_connection = async move { + println!("handling a request from {addr}"); + + let result = Builder::new(TokioExecutor::new()) + .serve_connection(TokioIo::new(stream), service_fn(mock_server_handler)) + .await; + + if let Err(e) = result { + eprintln!("error serving {addr}: {e}"); + } + + println!("handled a request from {addr}"); + }; + + join_set.spawn(serve_connection); + } }); "localhost".to_string() diff --git a/did_core/did_parser_nom/src/did/parsing/did_cheqd.rs b/did_core/did_parser_nom/src/did/parsing/did_cheqd.rs new file mode 100644 index 0000000000..2223e37cc4 --- /dev/null +++ b/did_core/did_parser_nom/src/did/parsing/did_cheqd.rs @@ -0,0 +1,68 @@ +//! https://docs.cheqd.io/product/architecture/adr-list/adr-001-cheqd-did-method#syntax-for-did-cheqd-method + +use nom::{ + branch::alt, + bytes::complete::tag, + character::complete::{alphanumeric1, char, one_of}, + combinator::{cut, recognize}, + multi::count, + sequence::{delimited, terminated, tuple}, + IResult, +}; + +use super::{did_sov::parse_unqualified_sovrin_did, DidPart, HEX_DIGIT_CHARS}; + +// namespace = 1*namespace-char ":" ... +fn did_cheqd_namespace(input: &str) -> IResult<&str, &str> { + terminated(alphanumeric1, tag(":"))(input) +} + +// Parser for a single hexDigit +fn hex_digit_char(input: &str) -> IResult<&str, char> { + one_of(HEX_DIGIT_CHARS)(input) +} + +// Parser for hexOctet (2 hex digits) +fn parse_hex_octet(input: &str) -> IResult<&str, &str> { + recognize(count(hex_digit_char, 2))(input) +} + +// https://datatracker.ietf.org/doc/html/rfc4122#section-3 +fn parse_uuid(input: &str) -> IResult<&str, &str> { + recognize(tuple(( + count(parse_hex_octet, 4), // time-low + tag("-"), + count(parse_hex_octet, 2), // time mid + tag("-"), + count(parse_hex_octet, 2), // time high & version + tag("-"), + count(parse_hex_octet, 1), // clock sequence and reserved + count(parse_hex_octet, 1), // clock sequence low + tag("-"), + count(parse_hex_octet, 6), // node + )))(input) +} + +// unique-id = *id-char / UUID +// id-char = ALPHA / DIGIT +// > Note: The *id-char unique-id must be 16 bytes of Indy-style base58 encoded identifier. +fn parse_did_cheqd_unique_id(input: &str) -> IResult<&str, &str> { + alt(( + recognize(parse_unqualified_sovrin_did), // indy-style DID ID + recognize(parse_uuid), // UUID-style DID ID + ))(input) +} + +pub(super) fn parse_did_cheqd(input: &str) -> IResult<&str, DidPart> { + fn did_cheqd_method(input: &str) -> IResult<&str, &str> { + delimited(char(':'), tag("cheqd"), char(':'))(input) + } + let (input_left, (prefix, method, namespace, id)) = tuple(( + tag("did"), + did_cheqd_method, + cut(did_cheqd_namespace), + cut(parse_did_cheqd_unique_id), + ))(input)?; + + Ok((input_left, (prefix, method, Some(namespace), id))) +} diff --git a/did_core/did_parser_nom/src/did/parsing/mod.rs b/did_core/did_parser_nom/src/did/parsing/mod.rs index 148ef6d17d..3b5f616566 100644 --- a/did_core/did_parser_nom/src/did/parsing/mod.rs +++ b/did_core/did_parser_nom/src/did/parsing/mod.rs @@ -1,9 +1,11 @@ +mod did_cheqd; mod did_core; mod did_key; mod did_peer_4; mod did_sov; mod did_web; +use did_cheqd::parse_did_cheqd; use nom::{ branch::alt, combinator::{all_consuming, map}, @@ -22,6 +24,7 @@ type DidPart<'a> = (&'a str, &'a str, Option<&'a str>, &'a str); pub type DidRanges = (Option, Option, Option); static BASE58CHARS: &str = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"; +static HEX_DIGIT_CHARS: &str = "0123456789abcdefABCDEF"; fn to_id_range(id: &str) -> DidRanges { (None, None, Some(0..id.len())) @@ -66,6 +69,7 @@ pub fn parse_did_ranges(input: &str) -> IResult<&str, DidRanges> { map(parse_did_peer_4, to_did_ranges), map(parse_did_web, to_did_ranges), map(parse_did_key, to_did_ranges), + map(parse_did_cheqd, to_did_ranges), map(parse_qualified_sovrin_did, to_did_ranges), map(parse_qualified_did, to_did_ranges), map(parse_unqualified_sovrin_did, to_id_range), diff --git a/did_core/did_parser_nom/tests/did/negative.rs b/did_core/did_parser_nom/tests/did/negative.rs index 6a52f6a33b..85b9984979 100644 --- a/did_core/did_parser_nom/tests/did/negative.rs +++ b/did_core/did_parser_nom/tests/did/negative.rs @@ -18,7 +18,7 @@ test_cases_negative! { "not-a-did" no_method_specific_id: "did:example" - unqalified_invalid_len: + unqualified_invalid_len: "2ZHFFhtTD6hJqzux" indy_non_method_specific_id_char_in_namespace: "did:indy:s@vrin:7Tqg6BwSSWapxgUDm9KKgg" @@ -28,12 +28,34 @@ test_cases_negative! { "did:sov:2wJPyULfLLnYTEFYzByf" sov_invalid_char: "did:sov:2wJPyULfOLnYTEFYzByfUR" - sov_unqalified_invalid_len: + sov_unqualified_invalid_len: "2wJPyULfLLnYTEFYzByf" - sov_unqalified_invalid_char: + sov_unqualified_invalid_char: "2wJPyULfOLnYTEFYzByfUR" key_non_mb_value_char: "did:key:zWA8Ta6fesJIxeYku6cbA" key_non_base58_btc_encoded: "did:key:6MkhaXgBZDvotDkL5257faiztiGiC2QtKLGpbnnEGta2doK" + cheqd_no_namespace_did: + "did:cheqd:de9786cd-ec53-458c-857c-9342cf264f80" + cheqd_empty_namespace_did: + "did:cheqd::de9786cd-ec53-458c-857c-9342cf264f80" + cheqd_sub_namespace_did: + "did:cheqd:mainnet:foo:de9786cd-ec53-458c-857c-9342cf264f80" + cheqd_invalid_namespace_character: + "did:cheqd:m@innet:de9786cd-ec53-458c-857c-9342cf264f80" + cheqd_short_indy_style_id: + "did:cheqd:mainnet:TAwT8WVt3dz2DBAifwuS" + cheqd_long_indy_style_id: + "did:cheqd:mainnet:TAwT8WVt3dz2DBAifwuSknT" + cheqd_non_base58_indy_style_char: + "did:cheqd:mainnet:TAwT8WVt0dz2DBAifwuSkn" + cheqd_invalid_uuid_style_id_1: + "did:cheqd:mainnet:de9786cd-ec53-458c-857c-9342cf264f8" + cheqd_invalid_uuid_style_id_2: + "did:cheqd:mainnet:de9786cd-ec53-458c-857c9342cf264f80" + cheqd_invalid_uuid_style_id_3: + "did:cheqd:mainnet:de9786cd-ec53-458c-857c9342cf2-64f80" + cheqd_non_alpha_uuid_style_char: + "did:cheqd:mainnet:qe9786cd-ec53-458c-857c-9342cf264f80" } diff --git a/did_core/did_parser_nom/tests/did/positive.rs b/did_core/did_parser_nom/tests/did/positive.rs index 6a0a34fa62..9c859b47e4 100644 --- a/did_core/did_parser_nom/tests/did/positive.rs +++ b/did_core/did_parser_nom/tests/did/positive.rs @@ -81,4 +81,14 @@ test_cases_positive! { Some("peer"), None, "4z84UjLJ6ugExV8TJ5gJUtZap5q67uD34LU26m1Ljo2u9PZ4xHa9XnknHLc3YMST5orPXh3LKi6qEYSHdNSgRMvassKP:z27uFkiqJVwvvn2ke5M19UCvByS79r5NppqwjiGAJzkj1EM4sf2JmiUySkANKy4YNu8M7yKjSmvPJTqbcyhPrJs9TASzDs2fWE1vFegmaRJxHRF5M9wGTPwGR1NbPkLGsvcnXum7aN2f8kX3BnhWWWp" + test_did_cheqd: + "did:cheqd:mainnet:de9786cd-ec53-458c-857c-9342cf264f80", + Some("cheqd"), + Some("mainnet"), + "de9786cd-ec53-458c-857c-9342cf264f80" + test_did_cheqd_indy_style: + "did:cheqd:testnet:TAwT8WVt3dz2DBAifwuSkn", + Some("cheqd"), + Some("testnet"), + "TAwT8WVt3dz2DBAifwuSkn" } diff --git a/did_core/did_parser_nom/tests/did_url/positive.rs b/did_core/did_parser_nom/tests/did_url/positive.rs index fea2099559..ced17a3b35 100644 --- a/did_core/did_parser_nom/tests/did_url/positive.rs +++ b/did_core/did_parser_nom/tests/did_url/positive.rs @@ -361,4 +361,27 @@ test_cases_positive! { Some("/anoncreds/v0/REV_REG_DEF/56495/npdb/TAG1"), None, HashMap::new() + test_case28: + "did:cheqd:testnet:d8ac0372-0d4b-413e-8ef5-8e8f07822b2c/resources/40829caf-b415-4b1d-91a3-b56dfb6374f4", + Some("did:cheqd:testnet:d8ac0372-0d4b-413e-8ef5-8e8f07822b2c"), + Some("cheqd"), + Some("testnet"), + Some("d8ac0372-0d4b-413e-8ef5-8e8f07822b2c"), + Some("/resources/40829caf-b415-4b1d-91a3-b56dfb6374f4"), + None, + HashMap::new() + test_case29: + "did:cheqd:mainnet:zF7rhDBfUt9d1gJPjx7s1J?resourceName=universityDegree&resourceType=anonCredsCredDef", + Some("did:cheqd:mainnet:zF7rhDBfUt9d1gJPjx7s1J"), + Some("cheqd"), + Some("mainnet"), + Some("zF7rhDBfUt9d1gJPjx7s1J"), + None, + None, + { + vec![ + ("resourceName".to_string(), "universityDegree".to_string()), + ("resourceType".to_string(), "anonCredsCredDef".to_string()), + ].into_iter().collect() + } } diff --git a/justfile b/justfile index 9cbc5c52df..609f68a7b5 100644 --- a/justfile +++ b/justfile @@ -29,4 +29,4 @@ test-integration-aries-vcx-vdrproxy test_name="": cargo test --manifest-path="aries/aries_vcx/Cargo.toml" -F vdr_proxy_ledger,anoncreds -- --ignored {{test_name}} test-integration-did-crate test_name="": - cargo test --examples -p did_doc -p did_parser_nom -p did_resolver -p did_resolver_registry -p did_resolver_sov -p did_resolver_web -p did_key -p did_peer -p did_jwk -F did_doc/jwk --test "*" + cargo test --examples -p did_doc -p did_parser_nom -p did_resolver -p did_resolver_registry -p did_resolver_sov -p did_resolver_web -p did_key -p did_peer -p did_jwk -p did_cheqd -F did_doc/jwk --test "*" \ No newline at end of file