diff --git a/.github/workflows/publish_docs.yml b/.github/workflows/publish_docs.yml new file mode 100644 index 000000000..697e5d1fb --- /dev/null +++ b/.github/workflows/publish_docs.yml @@ -0,0 +1,79 @@ +name: Publish docs to pages +run-name: ${{ github.actor }} is publishing docs +on: + # on a successful push + push: + # only run on pushes to these branches + branches: + - 'master' + - 'main' + - 'docs' + # or run if pushes to these files. + # paths: + # - 'docs/**.md' + check_run: + types: + # Only run if we have a successfully completed branch + - completed + # if desired, we can also use cron like rules to trigger these + # schedule: + # - cron: 0 0 * * * + # Allow for manual triggering + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow only one concurrent deployment. Cancel any in-progress. +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + env: + MDBOOK_ENV: 0.4.24 + MERMAID_ENV: 0.12.6 + DEST_DIR: ~/.cargo/bin + steps: + # these are other job descriptions to call. + - uses: actions/checkout@v3 + - name: Configure rust # TODO: can we export building rust and installing mdbook as an artifact and reuse it? + run: | + curl --proto '=https' --tlsv1.2 https://sh.rustup.rs -sSf -y | sh + rustup update + - name: Install mdBook + run: | + curl -sSL "https://github.com/rust-lang/mdBook/releases/download/v$MDBOOK_ENV/mdbook-v$MDBOOK_ENV-x86_64-unknown-linux-gnu.tar.gz" | tar -xz --directory $DEST_DIR + curl -sSL "https://github.com/badboy/mdBook-mermaid/releases/download/v$MERMAID_ENV/mdbook-mermaid-v$MERMAID_ENV-x86_64-unknown-linux-gnu.tar.gz" | tar -xz --directory $DEST_DIR + # actually build the book + - name: Build the main book + run: cd docs && mdbook build + - name: Build API docs + run: cargo doc --all-features --workspace --no-deps + # bring just the built docs over into the artifact (there's a lot of build detrius) + - name: Copy cargo docs to API dir + run: mkdir -p docs/output/api && cp -r target/doc/* docs/output/api + - name: Upload artifact + uses: actions/upload-pages-artifact@v1 + with: + path: ./docs/output + + # write the pages + deploy: + needs: build + permissions: + pages: write + id-token: write + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Deploy to Github Pages + id: depolyment + uses: actions/deploy-pages@v2 diff --git a/.gitignore b/.gitignore index d46fed15d..d68dbd2ab 100644 --- a/.gitignore +++ b/.gitignore @@ -33,5 +33,9 @@ requirements.txt test-requirements.txt venv +# docs +docs/output +docs/old + # Local configs *.local.toml diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 57d29245a..9f36eade5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -53,7 +53,7 @@ of `: ` where `type` must be one of: * **test**: Adding missing tests * **chore**: Changes to the build process or auxiliary tools and libraries such as documentation generation -* **breaks**: Contains a *BREAKING_CHANGE* to the existing execution environment. +* **breaks**: Contains a **BREAKING_CHANGE** to the existing execution environment. ### Subject diff --git a/Cargo.lock b/Cargo.lock index 87f5c2fbd..2c8b6f419 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,7 +10,7 @@ dependencies = [ "base64 0.20.0", "erased-serde", "http 0.2.9", - "hyper 0.14.26", + "hyper 0.14.27", "hyper-alpn", "openssl", "serde", @@ -38,7 +38,7 @@ dependencies = [ "parking_lot 0.12.1", "pin-project-lite 0.2.9", "smallvec 1.10.0", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-util", ] @@ -54,7 +54,7 @@ dependencies = [ "futures-sink", "memchr", "pin-project-lite 0.2.9", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-util", "tracing", ] @@ -94,7 +94,7 @@ dependencies = [ "encoding_rs", "flate2", "futures-core", - "h2 0.3.19", + "h2 0.3.20", "http 0.2.9", "httparse", "httpdate", @@ -102,12 +102,12 @@ dependencies = [ "language-tags", "local-channel", "mime", - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", "pin-project-lite 0.2.9", "rand 0.8.5", "sha1", "smallvec 1.10.0", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-util", "tracing", "zstd", @@ -135,7 +135,7 @@ dependencies = [ "serde_urlencoded 0.7.1", "slab", "socket2", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -169,7 +169,7 @@ checksum = "15265b6b8e2347670eb363c47fc8c75208b4a4994b27192f345fcbe707804f3e" dependencies = [ "actix-macros", "futures-core", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -183,10 +183,10 @@ dependencies = [ "actix-utils", "futures-core", "futures-util", - "mio 0.8.6", + "mio 0.8.8", "num_cpus", "socket2", - "tokio 1.28.1", + "tokio 1.28.2", "tracing", ] @@ -221,7 +221,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded 0.7.1", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -288,8 +288,8 @@ dependencies = [ "serde_urlencoded 0.7.1", "smallvec 1.10.0", "socket2", - "time 0.3.21", - "url 2.3.1", + "time 0.3.22", + "url 2.4.0", ] [[package]] @@ -299,7 +299,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2262160a7ae29e3415554a3f1fc04c764b1540c116aa524683208078b7a75bc9" dependencies = [ "actix-router", - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -314,7 +314,7 @@ dependencies = [ "actix-http", "actix-web", "futures-core", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -323,7 +323,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d44b8fee1ced9671ba043476deddef739dd0959bf77030b26b738cc591737a7" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -360,7 +360,7 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -372,16 +372,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if 1.0.0", - "getrandom 0.2.9", + "getrandom 0.2.10", "once_cell", "version_check", ] [[package]] name = "aho-corasick" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" dependencies = [ "memchr", ] @@ -401,6 +401,12 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + [[package]] name = "android_system_properties" version = "0.1.5" @@ -461,9 +467,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -472,9 +478,9 @@ version = "0.1.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -546,7 +552,7 @@ dependencies = [ "slog-scope", "slog-stdlog", "slog-term", - "uuid 1.3.3", + "uuid 1.4.0", ] [[package]] @@ -564,8 +570,8 @@ dependencies = [ "serde_json", "slog", "slog-scope", - "tokio 1.28.1", - "uuid 1.3.3", + "tokio 1.28.2", + "uuid 1.4.0", ] [[package]] @@ -584,7 +590,7 @@ dependencies = [ "serde_derive", "slog", "slog-scope", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -609,8 +615,8 @@ dependencies = [ "reqwest 0.11.18", "serde_json", "slog-scope", - "tokio 1.28.1", - "uuid 1.3.3", + "tokio 1.28.2", + "uuid 1.4.0", ] [[package]] @@ -635,7 +641,7 @@ dependencies = [ "slog-scope", "strum", "thiserror", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -655,8 +661,8 @@ dependencies = [ "reqwest 0.11.18", "slog-scope", "thiserror", - "tokio 1.28.1", - "uuid 1.3.3", + "tokio 1.28.2", + "uuid 1.4.0", ] [[package]] @@ -708,9 +714,9 @@ dependencies = [ "slog-term", "tempfile", "thiserror", - "tokio 1.28.1", - "url 2.3.1", - "uuid 1.3.3", + "tokio 1.28.2", + "url 2.4.0", + "uuid 1.4.0", "validator", "validator_derive", "yup-oauth2", @@ -766,7 +772,7 @@ dependencies = [ "tokio-openssl", "tokio-tungstenite", "tungstenite", - "uuid 1.3.3", + "uuid 1.4.0", "woothee", ] @@ -790,7 +796,7 @@ dependencies = [ "gethostname", "hex", "httparse", - "hyper 0.14.26", + "hyper 0.14.27", "lazy_static", "log", "mockall", @@ -819,11 +825,11 @@ dependencies = [ "tempfile", "thiserror", "tokio 0.2.25", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-core", "tungstenite", - "url 2.3.1", - "uuid 1.3.3", + "url 2.4.0", + "uuid 1.4.0", "woothee", ] @@ -847,18 +853,18 @@ dependencies = [ "derive_more", "futures-core", "futures-util", - "h2 0.3.19", + "h2 0.3.20", "http 0.2.9", "itoa 1.0.6", "log", "mime", - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", "pin-project-lite 0.2.9", "rand 0.8.5", "serde", "serde_json", "serde_urlencoded 0.7.1", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -1077,13 +1083,13 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.24" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" dependencies = [ + "android-tzdata", "iana-time-zone", "js-sys", - "num-integer", "num-traits", "serde", "time 0.1.45", @@ -1158,8 +1164,8 @@ version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" dependencies = [ - "percent-encoding 2.2.0", - "time 0.3.21", + "percent-encoding 2.3.0", + "time 0.3.22", "version_check", ] @@ -1199,9 +1205,9 @@ checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "cpufeatures" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +checksum = "03e69e28e9f7f77debdedbaafa2866e1de9ba56df55a8bd7cfc724c25a09987c" dependencies = [ "libc", ] @@ -1222,7 +1228,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.15", + "crossbeam-utils 0.8.16", ] [[package]] @@ -1294,9 +1300,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if 1.0.0", ] @@ -1391,7 +1397,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" dependencies = [ "serde", - "uuid 1.3.3", + "uuid 1.4.0", ] [[package]] @@ -1401,7 +1407,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case", - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "rustc_version 0.4.0", "syn 1.0.109", @@ -1604,7 +1610,7 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", "synstructure", @@ -1633,7 +1639,7 @@ checksum = "3364d69f691f3903b1a71605fa04f40a7c2d259f0f0512347e36d19a63debf1f" dependencies = [ "base64 0.21.2", "byteorder", - "getrandom 0.2.9", + "getrandom 0.2.10", "openssl", "zeroize", ] @@ -1698,11 +1704,11 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", ] [[package]] @@ -1827,7 +1833,7 @@ checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06" dependencies = [ "futures-channel", "futures-task", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -1836,9 +1842,9 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -1923,9 +1929,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if 1.0.0", "libc", @@ -1934,9 +1940,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "h2" @@ -1958,9 +1964,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d357c7ae988e7d2182f7d7871d0b963962420b0678b0997ce7de72001aeab782" +checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" dependencies = [ "bytes 1.4.0", "fnv", @@ -1970,7 +1976,7 @@ dependencies = [ "http 0.2.9", "indexmap", "slab", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-util", "tracing", ] @@ -2155,15 +2161,15 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.26" +version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab302d72a6f11a3b910431ff93aae7e773078c769f0a3ef15fb9ec692ed147d4" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ "bytes 1.4.0", "futures-channel", "futures-core", "futures-util", - "h2 0.3.19", + "h2 0.3.20", "http 0.2.9", "http-body 0.4.5", "httparse", @@ -2171,22 +2177,22 @@ dependencies = [ "itoa 1.0.6", "pin-project-lite 0.2.9", "socket2", - "tokio 1.28.1", + "tokio 1.28.2", "tower-service", "tracing", - "want 0.3.0", + "want 0.3.1", ] [[package]] name = "hyper-alpn" -version = "0.4.0" -source = "git+https://github.com/WalletConnect/hyper-alpn#0030fd02266f761db7770412b4efa051af76c23d" +version = "0.4.1" +source = "git+https://github.com/WalletConnect/hyper-alpn#9761c744b8ba274dfaea04613bb4c39c1a97c141" dependencies = [ - "hyper 0.14.26", + "hyper 0.14.27", "log", "rustls 0.20.8", "rustls-pemfile", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-rustls 0.23.4", "webpki-roots", ] @@ -2199,11 +2205,11 @@ checksum = "5f9f7a97316d44c0af9b0301e65010573a853a9fc97046d7331d7f6bc0fd5a64" dependencies = [ "ct-logs", "futures-util", - "hyper 0.14.26", + "hyper 0.14.27", "log", "rustls 0.19.1", "rustls-native-certs 0.5.0", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-rustls 0.22.0", "webpki 0.21.4", ] @@ -2215,12 +2221,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0646026eb1b3eea4cd9ba47912ea5ce9cc07713d105b1a14698f4e6433d348b7" dependencies = [ "http 0.2.9", - "hyper 0.14.26", + "hyper 0.14.27", "log", - "rustls 0.21.1", - "rustls-native-certs 0.6.2", - "tokio 1.28.1", - "tokio-rustls 0.24.0", + "rustls 0.21.2", + "rustls-native-certs 0.6.3", + "tokio 1.28.2", + "tokio-rustls 0.24.1", ] [[package]] @@ -2243,17 +2249,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.4.0", - "hyper 0.14.26", + "hyper 0.14.27", "native-tls", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-native-tls", ] [[package]] name = "iana-time-zone" -version = "0.1.56" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2302,9 +2308,9 @@ dependencies = [ [[package]] name = "idna" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -2366,9 +2372,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.7.2" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12b6ee2129af8d4fb011108c73d99a1b83a85977f23b82460c0ae2e25bb4b57f" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" [[package]] name = "is-terminal" @@ -2414,9 +2420,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -2470,9 +2476,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.144" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "linked-hash-map" @@ -2515,9 +2521,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ "autocfg 1.1.0", "scopeguard", @@ -2525,12 +2531,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if 1.0.0", -] +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "match_cfg" @@ -2643,14 +2646,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.45.0", + "windows-sys 0.48.0", ] [[package]] @@ -2719,7 +2722,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if 1.0.0", - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -2773,9 +2776,9 @@ dependencies = [ [[package]] name = "net2" -version = "0.2.38" +version = "0.2.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d0df99cfcd2530b2e694f6e17e7f37b8e26bb23983ac530c0c97408837c631" +checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" dependencies = [ "cfg-if 0.1.10", "libc", @@ -2849,18 +2852,18 @@ dependencies = [ [[package]] name = "object" -version = "0.30.3" +version = "0.30.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "opaque-debug" @@ -2895,9 +2898,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -2963,7 +2966,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", - "lock_api 0.4.9", + "lock_api 0.4.10", "parking_lot_core 0.8.6", ] @@ -2973,8 +2976,8 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "lock_api 0.4.9", - "parking_lot_core 0.9.7", + "lock_api 0.4.10", + "parking_lot_core 0.9.8", ] [[package]] @@ -3008,15 +3011,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.7" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.16", + "redox_syscall 0.3.5", "smallvec 1.10.0", - "windows-sys 0.45.0", + "windows-targets", ] [[package]] @@ -3048,15 +3051,15 @@ checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" [[package]] name = "percent-encoding" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e68e84bfb01f0507134eac1e9b410a12ba379d064eab48c50ba4ce329a527b70" +checksum = "f73935e4d55e2abf7f130186537b19e7a4abc886a0252380b59248af473a3fc9" dependencies = [ "thiserror", "ucd-trie", @@ -3064,9 +3067,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b79d4c71c865a25a4322296122e3924d30bc8ee0834c8bfc8b95f7f054afbfb" +checksum = "aef623c9bbfa0eedf5a0efba11a5ee83209c326653ca31ff019bec3a95bfff2b" dependencies = [ "pest", "pest_generator", @@ -3074,26 +3077,26 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c435bf1076437b851ebc8edc3a18442796b30f1728ffea6262d59bbe28b077e" +checksum = "b3e8cba4ec22bada7fc55ffe51e2deb6a0e0db2d0b7ab0b103acc80d2510c190" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] name = "pest_meta" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "745a452f8eb71e39ffd8ee32b3c5f51d03845f99786fa9b68db6ff509c505411" +checksum = "a01f71cb40bd8bb94232df14b946909e14660e33fc05db3e50ae2a82d7ea0ca0" dependencies = [ "once_cell", "pest", - "sha2 0.10.6", + "sha2 0.10.7", ] [[package]] @@ -3173,7 +3176,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", "version_check", @@ -3185,7 +3188,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "version_check", ] @@ -3201,9 +3204,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.59" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b" +checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb" dependencies = [ "unicode-ident", ] @@ -3215,7 +3218,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95b4ce31ff0a27d93c8de1849cf58162283752f065a90d508f1105fa6c9a213f" dependencies = [ "idna 0.2.3", - "url 2.3.1", + "url 2.4.0", ] [[package]] @@ -3233,7 +3236,7 @@ version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", ] [[package]] @@ -3352,7 +3355,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", ] [[package]] @@ -3476,16 +3479,16 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", "redox_syscall 0.2.16", "thiserror", ] [[package]] name = "regex" -version = "1.8.3" +version = "1.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390" +checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" dependencies = [ "aho-corasick", "memchr", @@ -3549,10 +3552,10 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2 0.3.19", + "h2 0.3.20", "http 0.2.9", "http-body 0.4.5", - "hyper 0.14.26", + "hyper 0.14.27", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -3560,15 +3563,15 @@ dependencies = [ "mime", "native-tls", "once_cell", - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", "pin-project-lite 0.2.9", "serde", "serde_json", "serde_urlencoded 0.7.1", - "tokio 1.28.1", + "tokio 1.28.2", "tokio-native-tls", "tower-service", - "url 2.3.1", + "url 2.4.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -3639,7 +3642,7 @@ dependencies = [ "crc32fast", "futures 0.3.28", "http 0.2.9", - "hyper 0.14.26", + "hyper 0.14.27", "hyper-rustls 0.22.1", "lazy_static", "log", @@ -3648,7 +3651,7 @@ dependencies = [ "rustc_version 0.4.0", "serde", "serde_json", - "tokio 1.28.1", + "tokio 1.28.2", "xml-rs", ] @@ -3682,11 +3685,11 @@ dependencies = [ "chrono", "dirs-next", "futures 0.3.28", - "hyper 0.14.26", + "hyper 0.14.27", "serde", "serde_json", "shlex 1.1.0", - "tokio 1.28.1", + "tokio 1.28.2", "zeroize", ] @@ -3733,7 +3736,7 @@ dependencies = [ "hyper 0.12.36", "log", "md5", - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", "rusoto_credential 0.42.0", "rustc_version 0.2.3", "serde", @@ -3756,16 +3759,16 @@ dependencies = [ "hex", "hmac 0.11.0", "http 0.2.9", - "hyper 0.14.26", + "hyper 0.14.27", "log", "md-5", - "percent-encoding 2.2.0", + "percent-encoding 2.3.0", "pin-project-lite 0.2.9", "rusoto_credential 0.47.0", "rustc_version 0.4.0", "serde", "sha2 0.9.9", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -3777,7 +3780,7 @@ dependencies = [ "base64 0.13.1", "blake2b_simd", "constant_time_eq", - "crossbeam-utils 0.8.15", + "crossbeam-utils 0.8.16", ] [[package]] @@ -3816,9 +3819,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.19" +version = "0.37.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +checksum = "b96e891d04aa506a6d1f318d2771bcb1c7dfda84e126660ace067c9b474bb2c0" dependencies = [ "bitflags", "errno", @@ -3855,9 +3858,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.1" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c911ba11bc8433e811ce56fde130ccf32f5127cab0e0194e9c68c5a5b671791e" +checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f" dependencies = [ "log", "ring", @@ -3879,9 +3882,9 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", "rustls-pemfile", @@ -4026,7 +4029,7 @@ dependencies = [ "sentry-core 0.30.0", "sentry-debug-images", "sentry-panic", - "tokio 1.28.1", + "tokio 1.28.2", "ureq", ] @@ -4122,14 +4125,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10d8587b12c0b8211bb3066979ee57af6e8657e23cf439dc6c8581fd86de24e8" dependencies = [ "debugid", - "getrandom 0.2.9", + "getrandom 0.2.10", "hex", "serde", "serde_json", "thiserror", - "time 0.3.21", - "url 2.3.1", - "uuid 1.3.3", + "time 0.3.22", + "url 2.4.0", + "uuid 1.4.0", ] [[package]] @@ -4139,34 +4142,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "360ee3270f7a4a1eee6c667f7d38360b995431598a73b740dfe420da548d9cc9" dependencies = [ "debugid", - "getrandom 0.2.9", + "getrandom 0.2.10", "hex", "serde", "serde_json", "thiserror", - "time 0.3.21", - "url 2.3.1", - "uuid 1.3.3", + "time 0.3.22", + "url 2.4.0", + "uuid 1.4.0", ] [[package]] name = "serde" -version = "1.0.163" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" +checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.163" +version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" +checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -4192,9 +4195,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3" dependencies = [ "itoa 1.0.6", "ryu", @@ -4275,9 +4278,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ "cfg-if 1.0.0", "cpufeatures", @@ -4330,7 +4333,7 @@ dependencies = [ "num-bigint", "num-traits", "thiserror", - "time 0.3.21", + "time 0.3.22", ] [[package]] @@ -4422,7 +4425,7 @@ dependencies = [ "slog", "term", "thread_local", - "time 0.3.21", + "time 0.3.22", ] [[package]] @@ -4498,7 +4501,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "rustversion", "syn 1.0.109", @@ -4533,18 +4536,18 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.18" +version = "2.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" +checksum = "2efbeae7acf4eabd6bcdcbd11c92f45231ddda7539edc7806bd1a04a03b24616" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "unicode-ident", ] @@ -4555,7 +4558,7 @@ version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", "unicode-xid 0.2.4", @@ -4569,15 +4572,16 @@ checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" [[package]] name = "tempfile" -version = "3.5.0" +version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" dependencies = [ + "autocfg 1.1.0", "cfg-if 1.0.0", "fastrand", "redox_syscall 0.3.5", "rustix", - "windows-sys 0.45.0", + "windows-sys 0.48.0", ] [[package]] @@ -4621,9 +4625,9 @@ version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -4649,9 +4653,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.21" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc" +checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd" dependencies = [ "itoa 1.0.6", "libc", @@ -4728,14 +4732,14 @@ dependencies = [ [[package]] name = "tokio" -version = "1.28.1" +version = "1.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0aa32867d44e6f2ce3385e89dceb990188b8bb0fb25b0cf576647a6f98ac5105" +checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2" dependencies = [ "autocfg 1.1.0", "bytes 1.4.0", "libc", - "mio 0.8.6", + "mio 0.8.8", "num_cpus", "parking_lot 0.12.1", "pin-project-lite 0.2.9", @@ -4834,7 +4838,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e44da00bfc73a25f814cd8d7e57a68a5c31b74b3152a0a1d1f590c97ed06265a" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -4845,9 +4849,9 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -4857,7 +4861,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" dependencies = [ "native-tls", - "tokio 1.28.1", + "tokio 1.28.2", ] [[package]] @@ -4916,7 +4920,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" dependencies = [ "rustls 0.19.1", - "tokio 1.28.1", + "tokio 1.28.2", "webpki 0.21.4", ] @@ -4927,18 +4931,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ "rustls 0.20.8", - "tokio 1.28.1", + "tokio 1.28.2", "webpki 0.22.0", ] [[package]] name = "tokio-rustls" -version = "0.24.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0d409377ff5b1e3ca6437aa86c1eb7d40c134bfec254e44c830defa92669db5" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.1", - "tokio 1.28.1", + "rustls 0.21.2", + "tokio 1.28.2", ] [[package]] @@ -5065,7 +5069,7 @@ dependencies = [ "futures-core", "futures-sink", "pin-project-lite 0.2.9", - "tokio 1.28.1", + "tokio 1.28.2", "tracing", ] @@ -5135,7 +5139,7 @@ dependencies = [ "log", "rand 0.7.3", "sha-1", - "url 2.3.1", + "url 2.4.0", "utf-8", ] @@ -5216,15 +5220,15 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" [[package]] name = "ureq" -version = "2.6.2" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "338b31dd1314f68f3aabf3ed57ab922df95ffcd902476ca7ba3c4ce7b908c46d" +checksum = "0b11c96ac7ee530603dcdf68ed1557050f374ce55a5a07193ebf8cbc9f8927e9" dependencies = [ - "base64 0.13.1", + "base64 0.21.2", "log", "native-tls", "once_cell", - "url 2.3.1", + "url 2.4.0", ] [[package]] @@ -5240,13 +5244,13 @@ dependencies = [ [[package]] name = "url" -version = "2.3.1" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", - "idna 0.3.0", - "percent-encoding 2.2.0", + "idna 0.4.0", + "percent-encoding 2.3.0", "serde", ] @@ -5267,27 +5271,27 @@ dependencies = [ [[package]] name = "uuid" -version = "1.3.3" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345444e32442451b267fc254ae85a209c64be56d2890e601a0c37ff0c3c5ecd2" +checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", "serde", ] [[package]] name = "validator" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32ad5bf234c7d3ad1042e5252b7eddb2c4669ee23f32c7dd0e9b7705f07ef591" +checksum = "b92f40481c04ff1f4f61f304d61793c7b56ff76ac1469f1beb199b1445b253bd" dependencies = [ - "idna 0.2.3", + "idna 0.4.0", "lazy_static", "regex", "serde", "serde_derive", "serde_json", - "url 2.3.1", + "url 2.4.0", ] [[package]] @@ -5299,7 +5303,7 @@ dependencies = [ "if_chain", "lazy_static", "proc-macro-error", - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", "regex", "syn 1.0.109", @@ -5312,7 +5316,7 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "111abfe30072511849c5910134e8baf8dc05de4c0e5903d681cbd5c9c4d611e3" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "syn 1.0.109", ] @@ -5341,11 +5345,10 @@ dependencies = [ [[package]] name = "want" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "log", "try-lock", ] @@ -5369,9 +5372,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -5379,24 +5382,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.36" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -5406,9 +5409,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote 1.0.28", "wasm-bindgen-macro-support", @@ -5416,22 +5419,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-timer" @@ -5450,9 +5453,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", @@ -5536,7 +5539,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.48.0", + "windows-targets", ] [[package]] @@ -5554,37 +5557,13 @@ dependencies = [ "windows_x86_64_msvc 0.42.2", ] -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.0", -] - -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows-targets", ] [[package]] @@ -5726,9 +5705,9 @@ dependencies = [ [[package]] name = "xml-rs" -version = "0.8.13" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d8f380ae16a37b30e6a2cf67040608071384b1450c189e61bea3ff57cde922d" +checksum = "52839dc911083a8ef63efa4d039d1f58b5e409f923e44c80828f206f66e5541c" [[package]] name = "yaml-rust" @@ -5750,20 +5729,20 @@ dependencies = [ "base64 0.13.1", "futures 0.3.28", "http 0.2.9", - "hyper 0.14.26", + "hyper 0.14.27", "hyper-rustls 0.24.0", "itertools", "log", - "percent-encoding 2.2.0", - "rustls 0.21.1", + "percent-encoding 2.3.0", + "rustls 0.21.2", "rustls-pemfile", "seahash", "serde", "serde_json", - "time 0.3.21", - "tokio 1.28.1", + "time 0.3.22", + "tokio 1.28.2", "tower-service", - "url 2.3.1", + "url 2.4.0", ] [[package]] @@ -5781,9 +5760,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.59", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] diff --git a/autoconnect/autoconnect-common/src/protocol.rs b/autoconnect/autoconnect-common/src/protocol.rs index 8931b7c2d..ac021ddfa 100644 --- a/autoconnect/autoconnect-common/src/protocol.rs +++ b/autoconnect/autoconnect-common/src/protocol.rs @@ -5,7 +5,7 @@ //! defined here. The `derive(Deserialize)` and `derive(Serialize)` annotations //! are used to generate the ability to serialize these structures to JSON, //! using the `serde` crate. More docs for serde can be found at -//! https://serde.rs +//! use std::collections::HashMap; use std::str::FromStr; diff --git a/autoconnect/autoconnect-settings/src/lib.rs b/autoconnect/autoconnect-settings/src/lib.rs index 138f8c935..902662741 100644 --- a/autoconnect/autoconnect-settings/src/lib.rs +++ b/autoconnect/autoconnect-settings/src/lib.rs @@ -39,7 +39,8 @@ fn include_port(scheme: &str, port: u16) -> bool { } /// The Applications settings, read from CLI, Environment or settings file, for the -/// autoconnect application. These are later converted to [crate::options::AppState]. +/// autoconnect application. These are later converted to +/// [autoconnect::autoconnect-settings::AppState]. #[derive(Clone, Debug, Deserialize)] #[serde(default)] pub struct Settings { @@ -98,7 +99,7 @@ pub struct Settings { /// trigger a user reset because the user may have been offline way too long. pub msg_limit: u32, /// Maximum number of pending notifications for individual UserAgent handlers. - /// (if a given [RegisteredClient] receives more than this number, the calling + /// (if a given [autoconnect-common::RegisteredClient] receives more than this number, the calling /// thread will lock.) pub max_pending_notification_queue: u32, } diff --git a/autoendpoint/src/headers/crypto_key.rs b/autoendpoint/src/headers/crypto_key.rs index f6e58ecfc..74e8e7215 100644 --- a/autoendpoint/src/headers/crypto_key.rs +++ b/autoendpoint/src/headers/crypto_key.rs @@ -2,7 +2,7 @@ use crate::headers::util::split_key_value; use std::collections::HashMap; /// Parses the Crypto-Key header (and similar headers) described by -/// http://tools.ietf.org/html/draft-ietf-httpbis-encryption-encoding-00#section-4 +/// `http://tools.ietf.org/html/draft-ietf-httpbis-encryption-encoding-00#section-4` pub struct CryptoKeyHeader { /// The sections (comma separated) and their items (key-value semicolon separated) sections: Vec>, diff --git a/autopush-common/src/db/mod.rs b/autopush-common/src/db/mod.rs index 3016ef126..88cf8ed77 100644 --- a/autopush-common/src/db/mod.rs +++ b/autopush-common/src/db/mod.rs @@ -75,8 +75,8 @@ pub struct DbSettings { /// A JSON formatted dictionary containing Database settings that /// are specific to the type of Data storage specified in the `dsn` /// See the respective settings structures for - /// [crate::db::bigtable::BigTableDbSettings], [crate::db::dynamodb::DynamoDbSettings], - /// [crate::db::postgres::PostgresDbSettings] + /// [crate::db::dynamodb::DynamoDbSettings] + /// pub db_settings: String, } //TODO: add `From for DbSettings`? @@ -189,7 +189,7 @@ pub struct NotificationRecord { #[serde(skip_serializing_if = "Option::is_none")] timestamp: Option, /// DynamoDB expiration timestamp per - /// https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/TTL.html + /// expiry: u64, /// TTL value provided by application server for the message #[serde(skip_serializing_if = "Option::is_none")] diff --git a/autopush/src/db/commands.rs b/autopush/src/db/commands.rs index ab0bb95e4..22e2225e5 100644 --- a/autopush/src/db/commands.rs +++ b/autopush/src/db/commands.rs @@ -499,7 +499,7 @@ fn handle_user_result( Ok(user) } -/// Like Result::ok, convert from Result to Option but applying a +/// Like `Result::ok`, convert from `Result` to `Option` but applying a /// function to the Err value fn ok_or_inspect(result: StdResult, op: F) -> Option where diff --git a/autopush/src/server/mod.rs b/autopush/src/server/mod.rs index e698e2f08..3d81ce1d1 100644 --- a/autopush/src/server/mod.rs +++ b/autopush/src/server/mod.rs @@ -238,7 +238,7 @@ pub struct Server { impl Server { /// Creates a new server handle used by Megaphone and other services. /// - /// This will spawn a new server with the [`state`](autopush_rs::server::AppState) specified, spinning up a + /// This will spawn a new server with the [`state`](autoconnect-settings::AppState) specified, spinning up a /// separate thread for the tokio reactor. The returned ShutdownHandles can /// be used to interact with it (e.g. shut it down). fn start(app_state: &Arc) -> Result> { diff --git a/autopush/src/server/protocol.rs b/autopush/src/server/protocol.rs index 4533e4c6d..c2a4a0d84 100644 --- a/autopush/src/server/protocol.rs +++ b/autopush/src/server/protocol.rs @@ -5,7 +5,7 @@ //! defined here. The `derive(Deserialize)` and `derive(Serialize)` annotations //! are used to generate the ability to serialize these structures to JSON, //! using the `serde` crate. More docs for serde can be found at -//! https://serde.rs +//! use std::collections::HashMap; use std::str::FromStr; diff --git a/docs/book.toml b/docs/book.toml new file mode 100644 index 000000000..7841c230f --- /dev/null +++ b/docs/book.toml @@ -0,0 +1,14 @@ +[book] +title = "Mozilla Autopush Server" +authors = ["JR Conlin", "Philip Jenvey"] +description = "Detailed description and operation of the Mozilla WebPush Server named Autopush" +language = "en" + +[rust] +edition = "2021" + +[build] +build-dir = "output" + +[output.html] +additional-css = ["style.css"] diff --git a/docs/make_book.sh b/docs/make_book.sh new file mode 100644 index 000000000..51eeea87a --- /dev/null +++ b/docs/make_book.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# Generate the mdBook version of the document +echo Generating the cargo docs +cargo doc --all-features --workspace --no-deps +echo Generating mdbook +mdbook build +echo Generate the API docs +mkdir -p output/api +cargo doc --all-features --workspace --no-deps +cp -r ../target/doc/* output/api diff --git a/docs/notes.md b/docs/notes.md new file mode 100644 index 000000000..23e839809 --- /dev/null +++ b/docs/notes.md @@ -0,0 +1,60 @@ +# Documentation Notes and comments + +## mdbook + +To build the documentation, install mdbook + +```bash +cargo install mdbook +``` + +then run + +```bash +mdbook build +``` + +This will generate the html files into the `./output` directory. You can also run + +```bash +mdbook serve +``` + +which will serve those files on `http://localhost:3000` + +### Integration with rustdoc + +`mdbook` does not cleanly integrate with `rustdoc` at this time. It's possible (via some fun github actions) to build the docs and include them in the deploy + +## Building Pages using Github Actions + +### Setup + +Github Actions allows for various CI like steps to run. Currently, there is [publish_docs.yml](../.github/workflows/publish_docs.yml). +It currently has two "jobs", one to do the build, another to deploy the built artifact to Github pages. + +Under the repo settings, be sure to set: + +* Actions + * General + _Actions permissions_ + ◉**Allow $USER, and select non-$USER, actions and reusable workflows** + ☑ Allow actions created by GitHub + ☑ Allow actions by Marketplace verified creators + _Artifact and log retention_ + (can use default) + _Fork pull request workflows from outside collaborators_ + ◉ **Require approval for first-time contributors** + _Workflow permission_ + ◉ Read and write permissions + ☑ Allow GitHub Actions to create and approve pull requests + * Runners + No settings needed + +* Pages + **Build and deployment** + Source: GitHub Actions + +### Running + +You specify triggers within the `.github/workflows` document. Currently `publish_docs.yml` diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md new file mode 100644 index 000000000..f581e8848 --- /dev/null +++ b/docs/src/SUMMARY.md @@ -0,0 +1,25 @@ +# Summary + +* [Introduction](index.md) +* [General Architecture](architecture.md) + * [Legacy table rotation](table_rotation.md) +* [Install](install.md) + * [Apple Push Notification (APNs) guide](apns.md) + * [Google Firebase Cloud Messaging (FCM) guide](fcm.md) +* [Running](running.md) + +## Developing + +* [Style](style.md) +* [Testing](testing.md) +* [Release Process](releasing.md) + +## Reference + +* [HTTP Endpoints for Notifications](http.md) +* [Error codes](errors.md) +* [Glossary](glossary.md) +* [Why rust?](rust.md) diff --git a/docs/src/adm.md b/docs/src/adm.md new file mode 100644 index 000000000..7ede63a1e --- /dev/null +++ b/docs/src/adm.md @@ -0,0 +1,89 @@ +# Configuring the Amazon Device Messaging Bridge + +
ADM is no longer supported by Autopush. This section is obsolete. +
+ +[ADM](https://developer.amazon.com/docs/adm/overview.html) requires +credentials that are provided on the [Amazon Developer +portal](https://developer.amazon.com/myapps.html) page. _**Note**_, this is +different than the **Amazon Web Services** page. + +If you've not already done so, create a new App under the **Apps & +Services** tab. You will need to create an app so that you can associate +a Security Profile to it. + +Device Messaging can be created by generating a new **Security Profile** +(located under the **Security Profiles** sub-tab. If specifying for +Android or Kindle, you will need to provide the Java Package name you've +used to identify the application (e.g. `org.mozilla.services.admpushdemo`) + +You will need to provide the MD5 Signature and SHA256 Signature for the +package's Certificate. + +## Getting the Key Signatures + +Amazon provides [some instructions \]() +for getting the signature values of the `CERT.RSA` file. Be aware that android and ADM +are both moving targets and some information may no longer be correct. + +I was able to use the `keytool` to fetch +out the SHA256 signature, but had to get the MD5 signature from inside +**Android Studio** by looking under the **Gradle** tab, then under the +Project (root) + +``` text +> Task + > android + * signingReport +``` + +You do not need the SHA1: key provided from the signingReport output. + +Once the fields have been provided an API Key will be generated. This is +a long JWT that must be stored in a file named `api_key.txt` located in the `/assets` directory. The file should only contain +the key. Extra white space, comments, or other data will cause the key +to fail to be read. + +This file **MUST** be included with any client application that uses the +ADM bridge. Please note that the only way to test ADM messaging features +is to side load the application on a FireTV or Kindle device. + +## Configuring the server + +The server requires the **Client ID** and **Client Secret** from the ADM +Security Profile page. Since a given server may need to talk to +different applications using different profiles, the server can be +configured to use one of several profiles. + +The `autopush_endpoint.ini` file may +contain the `adm_creds` option. This is a +JSON structure similar to the APNS configuration. The configuration can +specify one or more "profiles". Each profile contains a "client_id" and +"client_secret". + +For example, let's say that we want to have a "dev" (for developers) and +a "stage" (for testing). We could specify the profiles as: + +``` json +{ + "dev": { + "client_id": "amzn1.application.0e7299...", + "client_secret": "559dac53757a571d2fee78e5fcb2..." + }, + "stage": { + "client_id": "amzn1.application.0e7300...", + "client_secret": "589dcc53957a971d2fee78e5fee4..." + }, +} +``` + +For the configuration, we'd collapse this to one line, e.g. + +``` text +adm_creds={"dev":{"client_id":"amzn1.application.0e7299...","client_secret":"559dac53757a571d2fee78e5fcb2..."},"stage":{"client_id":"amzn1.application.0e7300...","client_secret": "589dcc53957a971d2fee78e5fee4..."},} +``` + +Much like other systems, a sender invokes the profile by using it in the +Registration URL. e.g. to register a new endpoint using the `dev` profile: + +> `https://push.service.mozilla.org/v1/adm/dev/registration/` diff --git a/docs/src/apns.md b/docs/src/apns.md new file mode 100644 index 000000000..43d13ec2c --- /dev/null +++ b/docs/src/apns.md @@ -0,0 +1,108 @@ +# Configuring for the APNS bridge + +APNS requires a current [Apple Developer License](https://developer.apple.com/programs/) for the platform or +platforms you wish to bridge to (e.g. iOS, desktop, etc.). Once that +license has been acquired, you will need to create and export a valid +`.p12` type key file. For this document, we +will concentrate on creating an iOS certificate. + +## Create the App ID + +First, you will need an Application ID. If you do not already have an +application, you will need to [create an application +ID](https://developer.apple.com/account/ios/identifier/bundle/create). +For an App ID to use Push Notifications, it must be created as an +**Explicit App ID**. Please be sure that under "**App Services**" you +select **Push Notifications**. Once these values are set, click on +\[Continue\]. + +Confirm that the app settings are as you desire and click \[Register\], +or click \[Back\] and correct them. **Push Notifications** should appear +as "Configurable". + +## Create the Certificate + +Then [Create a new +certificate](https://developer.apple.com/account/ios/certificate/create). +Select "Apple Push Notification service SSL" for either Development or +Production, depending on intended usage of the certificate. +"Development", in this case, means a certificate that will not be used +by an application released for general public use, but instead only for +personal or team development. This is also known as a "Sandbox" +application and will require setting the "use_sandbox" flag. Once the +preferred option is selected, click \[Continue\]. + +Select the App ID that matches the Application that will use Push +Notifications. Several Application IDs may be present, be sure to match +the correct App ID. This will be the App ID which will act as the +recipient bridge for Push Notifications. Select \[Continue\]. + +Follow the on-screen instructions to generate a **CSR file**, click +\[Continue\], and upload the CSR. + +Download the newly created +*iOSTeam_Provisioning_Profile\_.mobileprovision* keyset, and import it +into your **KeyChain Access** app. + +## Exporting the .p12 key set + +In **KeyChain Access**, for the **login** keychain, in the +**Certificates** category, you should find an **Apple Push Services: +\*your AppID**\* certificate. Right click on this certificate and select +*Export "Apple Push Services:"...*. Provide the file with a reasonably +unique name, such as `Push_Production_APNS_Keys.p12`, so that you can +find it easily later. You may wish to secure these keys with a password. + +## Converting .p12 to PEM + +You will need to convert the .p12 file to PEM format. **openssl** can +perform these steps for you. A simple script you could use might be: + +``` bash +#!/bin/bash +echo Converting $1 to PEM +openssl pkcs12 -in $1 -out $1_cert.pem -clcerts -nokeys +openssl pkcs12 -in $1 -out $1_key.pem -nocerts -nodes +``` + +This will divide the p12 key into two components that can be read by the +autopush application. + +## Sending the APNS message + +The APNS post message contains JSON formatted data similar to the +following: + +``` json +{ + "aps": { + "content-available": 1 + }, + "key": "value", + ... +} +``` + +*aps* is reserved as a sub-dictionary. All other *key*: **value** slots +are open. + +In addition, you must specify the following headers: + + - *apns-id*: A lowercase, dash formatted UUID for this message. + - *apns-priority*: Either **10** for Immediate delivery or **5** for + delayable delivery. + - *apns-topic*: The bundle ID for the recipient application. This must + match the bundle ID of the AppID used to create the *"Apple Push + Services:..."* certificate. It usually has the format of `com.example.ApplicationName`. + - *apns-expiration*: The timestamp for when this message should expire + in UTC based seconds. A zero ("0") means immediate expiration. + +## Handling APNS responses + +APNS returns a status code and an optional JSON block describing the +error. A list of [these responses are provided in the APNS +documentation](https://developer.apple.com/library/content/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html) +> _**Note**_, Apple may change the document locaiton without warning. you may +be able to search using +[DeviceTokenNotForTopic](https://developer.apple.com/search/?q=DeviceTokenNotForTopic&type=Guides) +or similar error messages. diff --git a/docs/src/architecture.md b/docs/src/architecture.md new file mode 100644 index 000000000..1858419ff --- /dev/null +++ b/docs/src/architecture.md @@ -0,0 +1,248 @@ +# Architecture + +![image](assets/push_architecture.svg) + +## Overview + +For Autopush, we will focus on the section in the above diagram in the +*Autopush* square. + +Autopush consists of two types of server daemons: + +`autoconnect` (connection node) - _Run a connection node. These handle large amounts of Firefox user agents using the Websocket protocol._ + +`autoendpoint` (endpoint node) - _Run an endpoint node. These provide a `WebPush` HTTP API for `Application Servers ` to HTTP POST messages to endpoints._ + +To have a running Push Service for Firefox, both of these server daemons +must be running and communicating with the same Storage system and tables. + +Endpoint nodes handle all `Notification` POST requests, looking up in +storage to see what Push server the UAID is connected to. The Endpoint +nodes then attempt delivery to the appropriate connection node. If the +UAID is not online, the message may be stored in Storage in the +appropriate message table. + +Push connection nodes accept websocket connections (this can easily be +HTTP/2 for WebPush), and deliver notifications to connected clients. +They check Storage for missed notifications as necessary. + +There will be many more Push servers to handle the connection node, +while more Endpoint nodes can be handled as needed for notification +throughput. + +## Cryptography + +The HTTP endpoint URL's generated by the connection nodes contain +encrypted information, the `UAID` and `Subscription` to send the message +to. This means that they both must have the same `CRYPTO_KEY` supplied +to each. + +See `autopush_common::endpoint::make_endpoint(...)` for the endpoint +URL generator. + +If you are only running Autopush locally, you can skip to `running` as +later topics in this document apply only to developing or production +scale deployments of Autopush. + +## Storage Tables + +Autopush uses a key / value data storage system. It can either use +AWS DynamoDB, Google Cloud Bigtable, or a specific combination of the +two. + +### DynamoDB + +For DynamoDB, Autopush uses a single router and messages table. +On startup, Autopush will create these tables. +For more information on DynamoDB tables, see + + +### Google Bigtable + +For Bigtable, Autopush presumes +that the table `autopush` has already been allocated, and that the following Cell Families +have been created: + +* `message` with a garbage collection policy set to max age of 1 second +* `router` with a garbage collection policy set to max versions of 1 +* `message_topic` with a garbage collection policy set to max versions of 1 or max age of 1 second + +the following BASH script may be a useful example. It presumes that the [google-cloud-sdk](https://cloud.google.com/cli) has already been installed and initialized. + +```bash +PROJECT=test &&\ +INSTANCE=test &&\ +DATABASE=autopush &&\ +MESSAGE=message &&\ +TOPIC=message_topic &&\ +ROUTER=router &&\ +cbt -project $PROJECT -instance $INSTANCE createtable $DATABASE && \ +cbt -project $PROJECT -instance $INSTANCE createfamily $DATABASE $MESSAGE && \ +cbt -project $PROJECT -instance $INSTANCE createfamily $DATABASE $TOPIC && \ +cbt -project $PROJECT -instance $INSTANCE createfamily $DATABASE $ROUTER && \ +cbt -project $PROJECT -instance $INSTANCE setgcpolicy $DATABASE $MESSAGE maxage=1s && \ +cbt -project $PROJECT -instance $INSTANCE setgcpolicy $DATABASE $TOPIC maxversions=1 or maxage=1s && \ +cbt -project $PROJECT -instance $INSTANCE setgcpolicy $DATABASE $ROUTER maxversions=1 +``` + +Please note, this document will refer to the `message` table and the `router` table for +legacy reasons. Please consider these to be the same as the `message` and `router` cell +families. + +### Router Table Schema + +The router table contains info about how to send out the incoming message. + +#### DynamoDB + +The router table stores metadata for a given `UAID` as well as which +month table should be used for clients with a `router_type` of +`webpush`. + +For "[Bridging](install.md#configuring-for-third-party-bridge-services)", additional bridge-specific data may be stored in the +router record for a `UAID`. + +| | | +|--------------|----------------------------------------------------------------------------------| +| uaid | **partition key** - `UAID` | +| router_type | Router Type (See [`autoendpoint::extractors::routers::RouterType`]) | +| node_id | Hostname of the connection node the client is connected to. | +| connected_at | Precise time (in milliseconds) the client connected to the node. | +| last_connect | **global secondary index** - year-month-hour that the client has last connected. | +| curmonth | Message table name to use for storing `WebPush` messages. | + +Autopush uses an optimistic deletion policy for `node_id` to avoid +delete calls when not needed. During a delivery attempt, the endpoint +will check the `node_id` for the corresponding `UAID`. If the client is +not connected, it will clear the `node_id` record for that `UAID` in the +router table. + +If an endpoint node discovers during a delivery attempt that the +`node_id` on record does not have the client connected, it will clear +the `node_id` record for that `UAID` in the router table. + +The `last_connect` has a secondary global index on it to allow for +maintenance scripts to locate and purge stale client records and +messages. + +Clients with a `router_type` of `webpush` drain stored messages from the +message table named `curmonth` after completing their initial handshake. +If the `curmonth` entry is not the current month then it updates it to +store new messages in the latest message table after stored message +retrieval. + +#### Bigtable + +The `Router` table is identified by entries with just the `UAID`, containing cells +that are of the `router` family. These values are similar to the ones listed above. + +| | | +|--------------|----------------------------------------------------------------------------------| +| Key | `UAID` | +| router_type | Router Type (See [`autoendpoint::extractors::routers::RouterType`]) | +| node_id | Hostname of the connection node the client is connected to. | +| connected_at | Precise time (in milliseconds) the client connected to the node. | +| last_connect | year-month-hour that the client has last connected. | + +### Message Table Schema + +The message table stores messages for users while they're offline or +unable to get immediate message delivery. + +#### DynamoDB + +| | | +|---------------|---------------------------------------------------------------------------------------------------------------------------------------| +| uaid | **partition key** - `UAID` | +| chidmessageid | **sort key** - `CHID` + `Message-ID`. | +| chids | Set of `CHID` that are valid for a given user. This entry is only present in the item when `chidmessageid` is a space. | +| data | Payload of the message, provided in the Notification body. | +| headers | HTTP headers for the Notification. | +| ttl | Time-To-Live for the Notification. | +| timestamp | Time (in seconds) that the message was saved. | +| updateid | UUID generated when the message is stored to track if the message is updated between a client reading it and attempting to delete it. | + +The subscribed channels are stored as `chids` in a record stored with a +blank space set for `chidmessageid`. Before storing or delivering a +`Notification` a lookup is done against these `chids`. + +#### Bigtable + +| | | +|---------------|---------------------------------------------------------------------------------------------------------------------------------------| +| Key | `UAID`#`CHID`#`Message-ID` | +| data | Payload of the message, provided in the Notification body. | +| headers | HTTP headers for the Notification. | +| ttl | Time-To-Live for the Notification. | +| timestamp | Time (in seconds) that the message was saved. | +| updateid | UUID generated when the message is stored to track if the message is updated between a client reading it and attempting to delete it. | + +Autopush used a [table rotation system](table_rotation.md), which is now legacy. You may see some references to this as we continue to remove it. + +## Push Characteristics + +* When the Push server has sent a client a notification, no further + notifications will be accepted for delivery (except in one edge + case). In this state, the Push server will reply to the Endpoint + with a 503 to indicate it cannot currently deliver the notification. + Once the Push server has received ACKs for all sent notifications, + new notifications can flow again, and a check of storage will be + done if the Push server had to reply with a 503. The Endpoint will + put the Notification in storage in this case. +* (Edge Case) Multiple notifications can be sent at once, if a + notification comes in during a Storage check, but before it has + completed. +* If a connected client is able to accept a notification, then the + Endpoint will deliver the message to the client completely bypassing + Storage. This Notification will be referred to as a Direct + Notification vs. a Stored Notification. +* (_DynamoDb_) Provisioned Write Throughput for the Router table determines how + many connections per second can be accepted across the entire + cluster. +* (_DynamoDb_) Provisioned Read Throughput for the Router table **and** Provisioned + Write throughput for the Storage table determine maximum possible + notifications per second that can be handled. In theory notification + throughput can be higher than Provisioned Write Throughput on the + Storage as connected clients will frequently not require using + Storage at all. Read's to the Router table are still needed for + every notification, whether Storage is hit or not. +* (_DynamoDb_) Provisioned Read Throughput on for the Storage table is an important + factor in maximum notification throughput, as many slow clients may + require frequent Storage checks. +* If a client is reconnecting, their Router record will be old. Router + records have the node_id cleared optimistically by Endpoints when + the Endpoint discovers it cannot deliver the notification to the + Push node on file. If the conditional delete fails, it implies that + the client has during this period managed to connect somewhere + again. It's entirely possible that the client has reconnected and + checked storage before the Endpoint stored the Notification, as a + result the Endpoint must read the Router table again, and attempt to + tell the node_id for that client to check storage. Further action + isn't required, since any more reconnects in this period will have + seen the stored notification. + +### Push Endpoint Length + +The Endpoint URL may seem excessively long. This may seem needless and +confusing since the URL consists of the unique User Agent Identifier +(UAID) and the Subscription Channel Identifier (CHID). Both of these are +class 4 Universally Unique Identifiers (UUID) meaning that an endpoint +contains 256 bits of entropy (2 \* 128 bits). When used in string +format, these UUIDs are always in lower case, dashed format (e.g. +`01234567-0123-abcd-0123-0123456789ab`). + +Unfortunately, since the endpoint contains an identifier that can be +easily traced back to a specific device, and therefore a specific user, +there is the risk that a user might inadvertently disclose personal +information via their metadata. To prevent this, the server obscures the +UAID and CHID pair to prevent casual determination. + +As an example, it is possible for a user to get a Push endpoint for two +different accounts from the same User Agent. If the UAID were disclosed, +then a site may be able to associate a single user to both of those +accounts. In addition, there are reasons that storing the UAID and CHID +in the URL makes operating the server more efficient. + +Naturally, we're always looking at ways to improve and reduce the length +of the URL. This is why it's important to store the entire length of the +endpoint URL, rather than try and optimize in some manner. diff --git a/docs/src/assets/push_architecture.svg b/docs/src/assets/push_architecture.svg new file mode 100644 index 000000000..54aace3e2 --- /dev/null +++ b/docs/src/assets/push_architecture.svg @@ -0,0 +1,2 @@ + +
Autopush
Autopush
Developer Dashboard
Developer Dashboard
WS
WS
push.services.m.c
push.services.m.c
WSS
WSS
S3 Put Event
S3 Put Event
updates.push.services.m.c
updates.push.services.m.c
HTTP
HTTP
direct delivery
[Not supported by viewer]
HTTPS
HTTPS
endpoint nodes
endpoint nodes
Application Servers
Application Servers
Recent
Logged
Message
Metadata
[Not supported by viewer]
Push Messages API
Push Messages API
Message/Router
Tables
Message/Router<div>Tables</div>
firehose or other
ops transit
[Not supported by viewer]
connection nodes
connection nodes
Push
Processor
[Not supported by viewer]
Developers
Developers
developer.services.m.c
developer.services.m.c
HTTP
HTTP
Logs
Logs
\ No newline at end of file diff --git a/docs/src/errors.md b/docs/src/errors.md new file mode 100644 index 000000000..682bf2e04 --- /dev/null +++ b/docs/src/errors.md @@ -0,0 +1,107 @@ +# Error Codes + +Autopush uses error codes based on [HTTP response +codes](https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html). An +error response will contain a JSON body including an additional error +information (see `error_resp`). + +Unless otherwise specified, all calls return one the following error +statuses: + +* 20x - **Success** - The message was accepted for transmission to the + client. Please note that the message may still be rejected by the + User Agent if there is an error with the message's encryption. + +* 301 - **Moved + \`Location:\`** if `{client_token}` is invalid (Bridge API + Only) - Bridged services (ones that run over third party services + like GCM and APNS), may require a new URL be used. Please stop using + the old URL immediately and instead use the new URL provided. + +* 400 - **Bad Parameters** -- One or more of the parameters specified + is invalid. See the following sub-errors indicated by `errno` + + - errno 101 - Missing necessary crypto keys - One or more required + crypto key elements are missing from this transaction. Refer to + the [appropriate + specification](https://datatracker.ietf.org/doc/draft-ietf-httpbis-encryption-encoding/) + for the requested content-type. + + - errno 108 - Router type is invalid - The URL contains an invalid + router type, which may be from URL corruption or an unsupported + bridge. Refer to `bridge_api`. + + - errno 110 - Invalid crypto keys specified - One or more of the + crytpo key elements are invalid. Refer to the [appropriate + specification](https://datatracker.ietf.org/doc/draft-ietf-httpbis-encryption-encoding/) + for the requested content-type. + + - errno 111 - Missing Required Header - A required crypto element + header is missing. Refer to the [appropriate + specification](https://datatracker.ietf.org/doc/draft-ietf-httpbis-encryption-encoding/) + for the requested content-type. + + - Missing TTL Header - Include the Time To Live header + ([IETF WebPush protocol + §6.2](https://tools.ietf.org/html/draft-ietf-webpush-protocol#section-6.2)) + - Missing Crypto Headers - Include the appropriate + encryption headers ([WebPush Encryption + §3.2](https://webpush-wg.github.io/webpush-encryption/#rfc.section.3.2) + and [WebPush VAPID + §4](https://tools.ietf.org/html/draft-ietf-webpush-vapid-02#section-4)) + + - errno 112 - Invalid TTL header value - The Time To Live "TTL" + header contains an invalid or unreadable value. Please change to + a number of seconds that this message should live, between 0 + (message should be dropped immediately if user is unavailable) + and 2592000 (hold for delivery within the next approximately 30 + days). + + - errno 113 - Invalid Topic header value - The Topic header + contains an invalid or unreadable value. Please use only ASCII + alphanumeric values \[A-Za-z0-9\] and a maximum length of 32 + bytes.. + +* 401 - **Bad Authorization** - `Authorization` header is invalid or missing. + See the [VAPID + specification](https://datatracker.ietf.org/doc/draft-ietf-webpush-vapid/). + + - errno 109 - Invalid authentication + +* 404 - **Endpoint Not Found** - The URL specified is invalid and + should not be used again. + + - errno 102 - Invalid URL endpoint + +* 410 - **Endpoint Not Valid** - The URL specified is no longer valid + and should no longer be used. A User has become permanently + unavailable at this URL. + + - errno 103 - Expired URL endpoint + - errno 105 - Endpoint became unavailable during request + - errno 106 - Invalid subscription + +* 413 - **Payload too large** - The body of the message to send is too + large. The max data that can be sent is 4028 characters. Please + reduce the size of the message. + + - errno 104 - Data payload too large + +* 500 - **Unknown server error** - An internal error occurred within + the Push Server. + + - errno 999 - Unknown error + +* 502 - **Bad Gateway** - The Push Service received an invalid + response from an upstream Bridge service. + + - errno 900 - Internal Bridge misconfiguration + - errno 901 - Invalid authentication + - errno 902 - An error occurred while establishing a connection + - errno 903 - The request timed out + +* 503 - **Server temporarily unavaliable.** - The Push Service is + currently unavailable. See the error number "errno" value to see if + retries are available. + + - errno 201 - Use exponential back-off for retries + - errno 202 - Immediate retry ok diff --git a/docs/src/fcm.md b/docs/src/fcm.md new file mode 100644 index 000000000..976c4e496 --- /dev/null +++ b/docs/src/fcm.md @@ -0,0 +1,3 @@ +# Google GCM/FCM + + \ No newline at end of file diff --git a/docs/src/glossary.md b/docs/src/glossary.md new file mode 100644 index 000000000..a320c4afd --- /dev/null +++ b/docs/src/glossary.md @@ -0,0 +1,52 @@ +# Glossary + +
+ +**AppServer** +A third-party Application Server that delivers notifications to client +applications via Push. + +**Bridging** +Using a third party or proprietary network in order to deliver Push +notifications to an App. This may be preferred for mobile devices where +such a network may improve battery life or other reasons. + +**Channel** +A unique route between an `AppServer` and the Application. May also be +referred to as `Subscription` + +**CHID** +The Channel Subscription ID. Push assigns each subscription (or channel) +a unique identifier. + +**Message-ID** +A unique message ID. Each message for a given subscription is given a +unique identifier that is returned to the `AppServer` in the `Location` +header. + +**Notification** +A message sent to an endpoint node intended for delivery to a HTTP +endpoint. Autopush stores these in the message tables. + +**Router Type** +Every `UAID` that connects has a router type. This indicates the type of +routing to use when dispatching notifications. For most clients, this +value will be `webpush`. Clients using `Bridging` it will use either +`gcm`, `fcm`, or `apns`. + +**Subscription** +A unique route between an `AppServer` and the Application. May also be +referred to as a `Channel` + +**UAID** +The Push User Agent Registration ID. Push assigns each remote recipient +(Firefox client) a unique identifier. These may occasionally be reset by +the Push Service or the client. + +**WebPush** +An IETF standard for communication between Push Services, the clients, +and application servers. + +See: + +
diff --git a/docs/src/http.md b/docs/src/http.md new file mode 100644 index 000000000..9293d4eff --- /dev/null +++ b/docs/src/http.md @@ -0,0 +1,480 @@ +# HTTP Endpoints for Notifications + +Autopush exposes three HTTP endpoints: + +`/wpush/...` + +This is tied to the Endpoint Handler +`~autopush.web.webpush.WebPushHandler` This endpoint is returned by the +Push registration process and is used by the `AppServer` to send Push +alerts to the Application. See `send`. + +`/m/...` + +This is tied to `~autopush.web.message.MessageHandler`. This endpoint +allows a message that has not yet been delivered to be deleted. See +`cancel`. + +`/v1/.../.../registration/...` + +This is tied to the `reg_calls` Handlers. This endpoint is used by +devices that wish to use `bridging` protocols to register new channels. + +*NOTE*: This is not intended to be used by app developers. Please see +the [Web Push API on +MDN](https://developer.mozilla.org/en-US/docs/Web/API/Push_API) for how +to use WebPush. See `bridge_api`. + +--- + + + +# Push Service HTTP API + +The following section describes how remote servers can send Push +Notifications to apps running on remote User Agents. + +## Lexicon + + **{UAID}** +_The Push User Agent Registration ID_ + +Push assigns each remote recipient a unique identifier. {UAID}s are +UUIDs in lower case, undashed format. (e.g. +'01234567abcdabcdabcd01234567abcd') This value is assigned during +**Registration** + +**{CHID}** +_The `Channel` Subscription ID_ + +Push assigns a unique identifier for each subscription for a given +{UAID}. Like {UAID}s, {CHID}s are UUIDs, but in lower case, dashed +format( e.g. '01234567-abcd-abcd-abcd-0123456789ab'). The User Agent +usually creates this value and passes it as part of the **Channel +Subscription**. If no value is supplied, the server will create and +return one. + +**{message-id}** +_The unique Message ID_ + +Push assigns each message for a given Channel Subscription a unique +identifier. This value is assigned during **Send Notification**. + +## Response + +The responses will be JSON formatted objects. In addition, API calls +will return valid HTTP error codes (see `errors` sub-section for +descriptions of specific errors). + +For non-success responses, an extended error code object will be +returned with the following format: + +``` json +{ + "code": 404, // matches the HTTP status code + "errno": 103, // stable application-level error number + "error": "Not Found", // string representation of the status + "message": "No message found" // optional additional error information +} +``` + +See [Errors](errors.md) for a list of the errors, causes, and potential resolutions. + +## Calls + +### Send Notification + +Send a notification to the given endpoint identified by its `push_endpoint`. Please note, the Push endpoint +URL (which is what is used to send notifications) should be considered +"opaque". We reserve the right to change any portion of the Push URL in +future provisioned URLs. + +The `Topic` HTTP header allows new messages +to replace previously sent, unreceived subscription updates. See +`topic`. + +**Call:** + +If the client is using webpush style data delivery, then the body in +its entirety will be regarded as the data payload for the message per +[the WebPush +spec](https://tools.ietf.org/html/draft-thomson-webpush-http2-02#section-5). + +> _**Note**_ +> Some bridged connections require data transcription and may limit the +> length of data that can be sent. For instance, using a GCM/FCM bridge +> will require that the data be converted to base64. This means that +> data may be limited to only 2744 bytes instead of the normal 4096 +> bytes. +> + +**Reply:** + +``` json +{"message-id": {message-id}} +``` + +**Return Codes:** + +- statuscode 404 + Push subscription is invalid. + +- statuscode 202 + Message stored for delivery to client at a later time. + +- statuscode 200 + Message delivered to node client is connected to. + +### Message Topics + +Message topics allow newer message content to replace previously sent, +unread messages. This prevents the UA from displaying multiple messages +upon reconnect. [A blog +post](https://hacks.mozilla.org/2016/11/mozilla-push-server-now-supports-topics/) +provides an example of how to use Topics, but a summary is provided +here. + +To specify a Topic, include a `Topic` HTTP +header along with your `send`. The topic can be any 32 byte +alpha-numeric string (including "\_" and "-"). + +Example topics might be `MailMessages`, +`Current_Score`, or `20170814-1400_Meeting_Reminder` + +For example: + +``` bash +curl -X POST \ + https://push.services.mozilla.com/wpush/abc123... \ + -H "TTL: 86400" \ + -H "Topic: new_mail" \ + -H "Authorization: Vapid AbCd..." \ + ... +``` + +Would create or replace a message that is valid for the next 24 hours +that has the topic of `new_mail`. The body +of this might contain the number of unread messages. If a new message +arrives, the Application Server could send a second message with a body +containing a revised message count. + +Later, when the User reconnects, she will only see a single notification +containing the latest notification, with the most recent new mail +message count. + +### Cancel Notification + +Delete the message given the `message_id`. + +**Call:** + +**Parameters:** + +> None + +**Reply:** + +``` json +{} +``` + +**Return Codes:** + +See [errors](#error-codes). + +--- + + +# Push Service Bridge HTTP Interface + +Push allows for remote devices to perform some functions using an HTTP +interface. This is mostly used by devices that are bridging via an +external protocol like +[GCM](https://developers.google.com/cloud-messaging/)/[FCM](https://firebase.google.com/docs/cloud-messaging/) +or +[APNs](https://developer.apple.com/library/ios/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/Introduction.html#//apple_ref/doc/uid/TP40008196-CH1-SW1). +All message bodies must be UTF-8 encoded. + +API methods requiring Authorization must provide the Authorization +header containing the registration secret. The registration secret is +returned as "secret" in the registration response. + +## Lexicon + +For the following call definitions: + +**{type}** +_The bridge type._ + + Allowed bridges are `gcm` (Google Cloud + Messaging), `fcm` (Firebase Cloud + Messaging), and `apns` (Apple Push + Notification system) + +**{app_id}** +_The bridge specific application identifier_ + +Each bridge may require a unique token that addresses the remote +application For GCM/FCM, this is the `SenderID` (or 'project number') and is +pre-negotiated outside of the push service. You can find this number +using the [Google developer +console](https://console.developers.google.com/iam-admin/settings/project). +For APNS, this value is the "platform" or "channel" of development (e.g. +"firefox", "beta", "gecko", etc.) For our examples, we will use a client +token of "33clienttoken33". + +**{instance_id}** +_The bridge specific private identifier token_ + +Each bridge requires a unique token that addresses the application on a +given user's device. This is the "[Registration +Token](https://firebase.google.com/docs/cloud-messaging/android/client#sample-register)" +for GCM/FCM or "[Device +Token](https://developer.apple.com/library/ios/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/Chapters/IPhoneOSClientImp.html#//apple_ref/doc/uid/TP40008194-CH103-SW2)" +for APNS. This is usually the product of the application registering the +{instance_id} with the native bridge via the user agent. For our +examples, we will use an instance ID of "11-instance-id-11". + +**{secret}** +_The registration secret from the Registration call._ + +Most calls to the HTTP interface require a Authorization header. The +Authorization header is a simple bearer token, which has been provided +by the **Registration** call and is preceded by the scheme name +"Bearer". For our examples, we will use a registration secret of +"00secret00". + +An example of the Authorization header would be: + +```html + Authorization: Bearer 00secret00 +``` + +## Calls + +### Registration + +Request a new UAID registration, Channel ID, and set a bridge type and +3rd party bridge instance ID token for this connection. (See +`~autopush.web.registration.NewRegistrationHandler`) + +*NOTE*: This call is designed for devices to register endpoints to be +used by bridge protocols. Please see [Web Push +API](https://developer.mozilla.org/en-US/docs/Web/API/Push_API) for how +to use Web Push in your application. + +**Call:** + +This call requires no Authorization header. + +**Parameters:** + +`{"token":{instance_id}}` + +> _**Note**_ +> +> If additional information is required for the bridge, it may be +> included in the parameters as JSON elements. Currently, no additional +> information is required. +> + +**Reply:** + +``` json +`{"uaid": {UAID}, "secret": {secret}, +"endpoint": "https://updates-push...", "channelID": {CHID}}` +``` + +example: + +``` http + POST /v1/fcm/33clienttoken33/registration + + {"token": "11-instance-id-11"} +``` + +``` json + {"uaid": "01234567-0000-1111-2222-0123456789ab", + "secret": "00secret00", + "endpoint": "https://updates-push.services.mozaws.net/push/...", + "channelID": "00000000-0000-1111-2222-0123456789ab"} +``` + +**Return Codes:** + +See `errors`. + +### Token updates + +Update the current bridge token value. Note, this is a **\*PUT**\* call, +since we are updating existing information. (See +`~autopush.web.registration.UaidRegistrationHandler`) + +**Call:** + +```html + Authorization: Bearer {secret} +``` + +**Parameters:** + +```{"token": {instance_id}}``` + +> _**Note**_ +> +> +> If additional information is required for the bridge, it may be +> included in the parameters as JSON elements. Currently, no additional +> information is required. +> + +**Reply:** + +``` json +{} +``` + +example: + +``` http + PUT /v1/fcm/33clienttoken33/registration/abcdef012345 + Authorization: Bearer 00secret00 + + {"token": "22-instance-id-22"} +``` + +``` json +{} +``` + +**Return Codes:** + +See `errors`. + +### Channel Subscription + +Acquire a new ChannelID for a given UAID. (See +`~autopush.web.registration.SubRegistrationHandler`) + +**Call:** + +```html + Authorization: Bearer {secret} +``` + +**Parameters:** + +`{}` + +**Reply:** + +``` json +{"channelID": {CHID}, "endpoint": "https://updates-push..."} +``` + +example: + +``` http + POST /v1/fcm/33clienttoken33/registration/abcdef012345/subscription + Authorization: Bearer 00secret00 + + {} +``` + +``` json + {"channelID": "01234567-0000-1111-2222-0123456789ab", + "endpoint": "https://updates-push.services.mozaws.net/push/..."} +``` + +**Return Codes:** + +See `errors`. + +### Unregister UAID (and all associated ChannelID subscriptions) + +Indicate that the UAID, and by extension all associated subscriptions, +is no longer valid. (See +`~autopush.web.registration.UaidRegistrationHandler`) + +**Call:** + +```html + Authorization: Bearer {secret} +``` + +**Parameters:** + +`{}` + +**Reply:** + +``` json +{} +``` + +**Return Codes:** + +See `errors`. + +### Unsubscribe Channel + +Remove a given ChannelID subscription from a UAID. (See: +`~autopush.web.registration.ChannelRegistrationHandler`) + +**Call:** + +```html + Authorization: Bearer {secret} +``` + +**Parameters:** + +`{}` + +**Reply:** + +``` json +{} +``` + +**Return Codes:** + +See `errors`. + +### Get Known Channels for a UAID + +Fetch the known ChannelIDs for a given bridged endpoint. This is useful +to check link status. If no channelIDs are present for a given UAID, an +empty set of channelIDs will be returned. (See: +`~autopush.web.registration.UaidRegistrationHandler`) + +**Call:** + +`Authorization: Bearer {secret}` + +**Parameters:** + +`{}` + +**Reply:** + +``` json +{"uaid": {UAID}, "channelIDs": [{ChannelID}, ...]} +``` + +example: + +``` http +GET /v1/gcm/33clienttoken33/registration/abcdef012345/ +Authorization: Bearer 00secret00 +{} +``` + +``` json + {"uaid": "abcdef012345", + "channelIDS": ["01234567-0000-1111-2222-0123456789ab", "76543210-0000-1111-2222-0123456789ab"]} +``` + +**Return Codes:** + +See `errors`. diff --git a/docs/src/index.md b/docs/src/index.md new file mode 100644 index 000000000..8f8320221 --- /dev/null +++ b/docs/src/index.md @@ -0,0 +1,136 @@ +# Autopush + +Mozilla Push server and Push Endpoint utilizing Rust, Actix, and +a key/value data store. + +This is the fourth generation of Push server built in Mozilla Services, and is built to support the the [W3C Push +spec](http://w3c.github.io/push-api/index.html). + +For how to read and respond to **autopush error codes**, see +[Errors](errors.md). + +For an overview of the Mozilla Push Service and where autopush fits in, +see the [Mozilla Push Service architecture +diagram](architecture.md#architecture). +This push service uses [websockets](https://developer.mozilla.org/en-US/docs/Web/API/WebSockets_API) to talk to Firefox, with a Push +endpoint that implements the [`WebPush`](https://www.rfc-editor.org/rfc/rfc8030.html) standard for its `http` API. + +## Autopush APIs + +For developers writing mobile applications in Mozilla, or web developers using Push on the web with Firefox. + +
+ +* [HTTP Endpoints for Notifications](http.md) +* [Push Service HTTP API](http.md#push-service-http-api) + * [Lexicon](http.md#lexicon) + * [Response](http.md#response) + * [Error Codes](http.md#error-codes) + * [Calls](http.md#calls) +* [Push Service Bridge HTTP Interface](http.md#push-service-bridge-http-interface) + * [Lexicon](http.md#id3) + * [Calls](http.md#id4) + +
+ +## Running Autopush + +If you just want to run autopush, for testing Push locally with Firefox, +or to deploy autopush to a production environment for Firefox. + +
+ +* [Architecture](architecture.md) + * [Overview](architecture.md#overview) + * [Cryptography](architecture.md#cryptography) + * [Storage Tables](architecture.md#storage) + * [Push Characteristics](architecture.md#push-characteristics) +* [Running Autopush](running.md) + * [Overview](running.md#overview) + * [Setup](running.md#setup) + * [Start Autopush](running.md#start-autopush) + * [Configuration](running.md#configuration) + +
+ +## Developing Autopush + +For developers wishing to work with the latest autopush source code, +it's recommended that you first familiarize yourself with +`running Autopush ` before proceeding. + +
+ +* [Installing](install.md) + * [System Requirements](install.md#requirements) + * [Check-out the Autopush Repository](install.md#check-out) + * [Build environment](install.md#build-env) + * [Scripts](install.md#scripts) + * [Building Documentation](install.md#building-documentation) + * [Using a Local Storage Server](install.md#local-storage) +* [Testing](test.md) + * [Testing Configuration](test.md#testing-configuration) + * [Running Tests](test.md#running-tests) + * [Firefox Testing](test.md#firefox-testing) +* [Release Process](release.md) + * [Versions](release.md#versions) + * [Dev Releases](release.md#dev-releases) + * [Stage/Production Releases](release.md#stage-production-releases) +* [Coding Style Guide](style.md) + * [Exceptions](style.md#exceptions) + +
+ +## Source Code + +All source code is available on [github under +autopush](https://github.com/mozilla-services/autopush-rs). + + +* Code Documentation + +We are using [rust](https://rust-lang.org) for a number of optimizations +and speed improvements. These efforts are ongoing and may be subject to +change. Unfortunately, this also means that formal documentation is not +yet available. You are, of course, welcome to review the code located in +`./autopush-rs`. + +## Changelog + +[Changelog](https://github.com/mozilla-services/autopush-rs/blob/master/CHANGELOG.md) + + +## Bugs/Support + +Bugs should be reported on the [autopush github issue +tracker](https://github.com/mozilla-services/autopush-rs/issues). + +## autopush Endpoints + +autopush is automatically deployed from master to a dev environment for +testing, a stage environment for tagged releases, and the production +environment used by Firefox/FirefoxOS. + +### dev + +* Websocket: +* Endpoint: + +### stage + +* Websocket: +* Endpoint: + +### production + +* Websocket: +* Endpoint: + +## Reference + +* [Glossary](glossary.md) +* [Why rust?](rust.md) + +## License + +`autopush` is offered under the Mozilla Public License 2.0. diff --git a/docs/src/install.md b/docs/src/install.md new file mode 100644 index 000000000..e889ff4d6 --- /dev/null +++ b/docs/src/install.md @@ -0,0 +1,237 @@ +# Installing + +## System Requirements + +Autopush requires the following to be installed. Since each system has +different methods and package names, it's best to search for each +package. + +- Rust 1.66 (or later) + +* build-essential (a meta package that includes): + - autoconf + - automake + - gcc + - make + +* (for integration testing) python3 and the python3 development (header files) + +* libffi development + +* openssl development + +* python3 virtualenv + +* git + +For instance, if installing on a Fedora or RHEL-like Linux (e.g. an +Amazon EC2 instance): + +``` bash +$ sudo yum install autoconf automake gcc make libffi-devel \ +openssl-devel pypy pypy-devel python3-virtualenv git -y +``` + +Or a Debian based system (like Ubuntu): + +``` bash +$ sudo apt-get install build-essential libffi-dev \ +libssl-dev pypy-dev python3-virtualenv git --assume-yes +``` + +## Check-out the Autopush Repository + +You should now be able to check-out the autopush repository. + +``` bash +$ git clone https://github.com/mozilla-services/autopush-rs.git +``` + +Alternatively, if you're planning on submitting a patch/pull-request to +autopush then fork the repo and follow the **Github Workflow** documented +in [Mozilla Push Service - Code +Development](http://mozilla-push-service.readthedocs.io/en/latest/development/#code-development). + +## Rust and Cargo + +You can install Rust and Cargo (if not already present on your computer) by following the steps at [rustup.rs](https://rustup.rs), or by installing Rust from your systems package management system. Please note, that currently we require a minimum of rust 1.68. + +You can find what version of rust you are running using + +```bash +rustc --version +``` + +You can update to the latest version of rust by using + +```bash +rustup update +``` + +You can build all applications by running + +```bash +cargo build +``` + +## Scripts + +After installation of autopush the following command line utilities are +available in the virtualenv `bin/` directory: + +| | | +|-----------------------|-----------------------------------| +| `autopush` | Runs a Connection Node | +| `autoendpoint` | Runs an Endpoint Node | +| `endpoint_diagnostic` | Runs Endpoint diagnostics | +| `autokey` | Endpoint encryption key generator | + +If you are planning on using DynamoDB as your storage, you will need to have a [boto config +file](http://boto3.readthedocs.io/en/docs/guide/quickstart.html#configuration) +file or `AWS` environment keys setup. + +If you are planning on using Google Cloud Bigtable, you will need to configure +your `GOOGLE_APPLICATION_CREDENTIALS`. See [How Application Default Credentials works](https://cloud.google.com/docs/authentication/application-default-credentials) + +## Building Documentation + +To build the documentation, you will need additional packages installed: + +``` bash +cargo install mdbook +``` + +You can then build the documentation: + + +``` bash +cd docs +make html +``` + + + +# Local Storage emulation + +Local storage can be useful for development and testing. It is not advised to use emulated storage for any form of production environment, as there are strong restrictions on the emulators as well as no guarantee of data resiliance. + +Specifying storage is done via two main environment variables / configuration settings. + +**db_dsn** +This specifies the URL to the storage system to use. See following sections for details. + +**db_settings** +This is a serialized JSON dictionary containing the storage specific settings. + +## Using a Local DynamoDB Server + +Amazon supplies a [Local DynamoDB Java +server](http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Tools.DynamoDBLocal.html) +to use for local testing that implements the complete DynamoDB API. This +is used for automated unit testing on Travis and can be used to run +autopush locally for testing. + +You will need the Java JDK 6.x or newer. + +To setup the server locally: + +``` bash +mkdir ddb +curl -sSL http://dynamodb-local.s3-website-us-west-2.amazonaws.com/dynamodb_local_latest.tar.gz | tar xzvC ddb/ +java -Djava.library.path=./ddb/DynamoDBLocal_lib -jar ./ddb/DynamoDBLocal.jar -sharedDb -inMemory +``` + +An example [boto config +file](http://boto3.readthedocs.io/en/docs/guide/quickstart.html#configuration) +is provided in `automock/boto.cfg` that directs autopush to your local +DynamoDB instance. + +The `db_dsn` to use for this would be the same as what is specified in the `AWS_LOCAL_DYNAMODB` environment variable. e.g. `http://127.0.0.1:8000` + +The `db_settings` contains a JSON dictionary indicating the names of the message and router table (remember to escape these values for whatever system you are using): + +```json +{"message_table":"message","router_table":"router"} +``` + +## Using Google Bigtable Emulator locally + +Google supplies [a Bigtable emulator](https://cloud.google.com/sdk/gcloud/reference/beta/emulators) as part of their free [SDK](https://cloud.google.com/sdk). Install the [Cloud CLI](https://cloud.google.com/sdk/docs/install), per their instructions, and then start the Bigtable emulator by running + +```bash +gcloud beta emulators bigtable start +``` + +By default, the emulator is started on port 8086. When using the emulator, you will +need to set an environment variable that contains the address to use. + +```bash +export BIGTABLE_EMULATOR_HOST=localhost:8086 +``` + +Bigtable is memory only and does not maintain information between restarts. This +means that you will need to create the table, column families, and policies. + +You can initialize these using the `cbt` command from the SDK: + +```bash +cbt -project test -instance test createtable autopush && \ +cbt -project test -instance test createfamily autopush message && \ +cbt -project test -instance test createfamily autopush message_topic && \ +cbt -project test -instance test createfamily autopush router && \ +cbt -project test -instance test setgcpolicy autopush message maxage=1s && \ +cbt -project test -instance test setgcpolicy autopush router maxversions=1 && \ +cbt -project test -instance test setgcpolicy autopush message_topic maxversions=1 +``` + +The `db_dsn` to access this data store with Autopendpoint would be: +`grpc://localhost:8086` + +The `db_setings` contains a JSON dictionary indicating the names of the message and router families, as well as the path to the table name. + +For example, if we were to use the values from the initializion script above (remember to escape these values for whatever sysetm you are using): + +```json +{"message_family":"message","message_topic_family":"message_topic","router_family":"router","table_name":"projects/test/instances/test/tables/autopush"} +``` + +## Using the "Dual" storage configuration + +Dual is a temporary system to be used to transition user data from one system to another. The "primary" system is read/write, while the "secondary" is read only, and is only read when a value is not found in the "primary" storage. + +Dual's DSN Is `dual`. All connection information is stored in the `db_settings` parameter. (Remember to escape these values for whatever system you are using): + +```json +{"primary":{"db_settings":"{\"message_family\":\"message\",\"router_family\":\"router\",\"table_name\":\"projects/test/instances/test/tables/autopush\"}","dsn":"grpc://localhost:8086"},"secondary":{"db_settings":"{\"message_table\":\"test_message\",\"router_table\":\"test_router\"}","dsn":"http://localhost:8000/"}} +``` + +## Configuring for Third Party Bridge services: + +Working with mobile devices can present many challenges. One very significant one +deals with how mobile devices save battery very aggressively. Using your +mobile devices CPU and radio both require considerable battery power. This means +that maintaining something like a constant connection to a remote server, or +regularly "pinging" a server can cause your device to wake, spin up the CPU and +use the radio to connect to local wifi or cellular networks. This may cause your +application to be quickly flagged by the operating system and either aggressively +deactivated, or be flagged for removal. + +Fortunately, the major mobile OS providers offer a way to send messages to devices +on their networks. These systems operate similarly to the way Push works, but +have their own special considerations. In addition, we want to make sure that +messages remain encrypted while passing through these systems. The benefit of +using these sorts of systems is that message delivery is effectively "free", +and apps that use these systems are not flagged for removal. + +Setting up the client portion of these systems is outside the scope of this +document, however the providers of these networks have great documentation that +can help get you started. + +As a bit of shorthand, we refer to these proprietary mobile messaging systems as +"bridge" systems, since they act as a metaphorical bridge between our servers and +our applications. + +How we connect and use these systems is described in the following documents: + +* [Apple Push Notification service (APNs)](apns.md) +* [Google's Fire Cloud Messaging service (FCM)](fcm.md) \ No newline at end of file diff --git a/docs/src/releasing.md b/docs/src/releasing.md new file mode 100644 index 000000000..ba1a8a060 --- /dev/null +++ b/docs/src/releasing.md @@ -0,0 +1,114 @@ +Release Process {#releasing} +=============== + +
+NOTE This page is outdated, however the "Release Steps" are still a useful checklist. +
+ +Autopush has a regular 2-3 week release to production depending on +developer and QA availability. The developer creating a release should +handle all aspects of the following process as they\'re done closely in +order and time. + +Versions +-------- + +Autopush uses a `{major}.{minor}.{patch}` version scheme, new `{major}` +versions are only issued if backwards compatibility is affected. Patch +versions are used if a critical bug occurs after production deployment +that requires a bug fix immediately. + +Dev Releases +------------ + +When changes are committed to the `master` branch, an operations Jenkins +instance will build and deploy the code automatically to the dev +environment. + +The development environment can be verified at its endpoint/wss +endpoints: + +* Websocket: +* Endpoint: + +Stage/Production Releases +------------------------- + +### Pre-Requisites + +To create a release, you will need appropriate access to the autopush +GitHub repository with push permission. + +You will also need [clog](https://github.com/clog-tool/clog-cli) +installed to create the `CHANGELOG.md` update. + +### Release Steps + +In these steps, the `{version}` refers to the full version of the +release. + +i.e. If a new minor version is being released after `1.21.0`, the +`{version}` would be `1.22.0`. + +1. Switch to the `master` branch of autopush. +2. `git pull` to ensure the local copy is completely up-to-date. +3. `git diff origin/master` to ensure there are no local staged or + uncommited changes. +4. Run `tox` locally to ensure no artifacts or other local changes that + might break tests have been introduced. +5. Change to the release branch. + + If this is a new major/minor release, + `git checkout -b release/{major}.{minor}` to create a new release + branch. + + If this is a new patch release, you will first need to ensure you + have the minor release branch checked out, then: + + > 1. `git checkout release/{major}.{minor}` + > 2. `git pull` to ensure the branch is up-to-date. + > 3. `git merge master` to merge the new changes into the release + > branch. + + **Note that the release branch does not include a \`\`{patch}\`\` + component**. + +6. Edit `autopush/__init__.py` so that the version number reflects the + desired release version. +7. Run `clog --setversion {version}`, verify changes were properly + accounted for in `CHANGELOG.md`. +8. `git add CHANGELOG.md autopush/__init__.py` to add the two changes + to the new release commit. +9. `git commit -m "chore: tag {version}"` to commit the new version and + record of changes. +10. `git tag -s -m "chore: tag {version}" {version}` to create a signed + tag of the current HEAD commit for release. +11. `git push --set-upstream origin release/{major}.{minor}` to push the + commits to a new origin release branch. +12. `git push --tags origin release/{major}.{minor}` to push the tags to + the release branch. +13. Submit a pull request on github to merge the release branch to + master. +14. Go to the [autopush releases + page](https://github.com/mozilla-services/autopush/releases), you + should see the new tag with no release information under it. +15. Click the `Draft a new release` button. +16. Enter the tag for `Tag version`. +17. Copy/paste the changes from `CHANGELOG.md` into the release + description omitting the top 2 lines (the a name HTML and the + version) of the file. + + Keep these changes handy, you\'ll need them again shortly. + +18. Once the release branch pull request is approved and merged, click + `Publish Release`. +19. File a bug for stage deployment in Bugzilla, in the `Cloud Services` + product, under the `Operations: Deployment Requests` component. It + should be titled `Please deploy autopush {major}.{minor} to STAGE` + and include the changes in the Description along with any additional + instructions to operations regarding deployment changes and special + test cases if needed for QA to verify. + +At this point, QA will take-over, verify stage, and create a production +deployment Bugzilla ticket. QA will also schedule production deployment +for the release. diff --git a/docs/src/running.md b/docs/src/running.md new file mode 100644 index 000000000..6768363ae --- /dev/null +++ b/docs/src/running.md @@ -0,0 +1,215 @@ +# Running Autopush + +## Overview + +To run Autopush, you will need to run at least one connection node, one +endpoint node, and a local DynamoDB server or AWS DynamoDB. The prior +section on Autopush architecture documented these components and their +relation to each other. + +The recommended way to run the latest development or tagged Autopush +release is to use [docker](https://www.docker.com/). Autopush has +[docker](https://www.docker.com/) images built automatically for every +tagged release and when code is merged to master. + +If you want to run the latest Autopush code from source then you should +follow the `developing` instructions. + +The instructions below assume that you want to run Autopush with a local +DynamoDB server for testing or local verification. The docker containers +can be run on separate hosts as well, or with AWS DynamoDB instead. + +## Setup + +*#TODO* rebuild the docker-compose.yaml files based off of syncstorage ones. + +- [ ] rebuild docker-componse.yaml + - [ ] selectable for dynamodb, bigtable, dual + - [ ] initialize tables + - [ ] +- [] define steps here + +
+These instructions will yield a locally running Autopush setup with the +connection node listening on localhost port `8080`, with the endpoint +node listening on localhost port `8082`. Make sure these ports are +available on localhost before running, or change the configuration to +have the Autopush daemons use other ports. + +1. Install [docker](https://www.docker.com/) + +2. Install [docker-compose](https://docs.docker.com/compose/) + +3. Create a directory for your docker and Autopush configuration: + + > ``` bash + > $ mkdir autopush-config + > $ cd autopush-config + > ``` + +4. Fetch the latest `docker-compose.yml` file: + + > ``` bash + > $ curl -O https://raw.githubusercontent.com/mozilla-services/autopush/master/docker-compose.yml + > ``` + +> _**Note**_: The docker images used take approximately 1.5 GB of disk-space, make +sure you have appropriate free-space before proceeding. + +
+ +### Generate a Crypto-Key + +As the `cryptography` section notes, you will need a `CRYPTO_KEY` to run +both of the Autopush daemons. To generate one with the docker image: + +``` bash +$ docker run -t -i mozilla-services/autopush-rs autokey +CRYPTO_KEY="hkclU1V37Dnp-0DMF9HLe_40Nnr8kDTYVbo2yxuylzk=" +``` + +Store the key for later use (including any trailing `=`). + +## Start Autopush + +Once you've completed the setup and have a crypto key, you can run a +local Autopush with a single command: + +``` bash +$ CRYPTO_KEY="hkclU1V37Dnp-0DMF9HLe_40Nnr8kDTYVbo2yxuylzk=" docker-compose up +``` + +[docker-compose](https://docs.docker.com/compose/) will start up three +containers, two for each Autopush daemon, and a third for storage. + +By default, the following services will be exposed: + +`ws://localhost:8080/` - websocket server + +`http://localhost:8082/` - HTTP Endpoint Server (See [the HTTP API](http.md)) + +You could set the `CRYPTO_KEY` as an environment variable if you are +using Docker. If you are running these programs "stand-alone" or outside +of docker-compose, you may setup a more thorough configuration using +config files as documented below. + +> _**Note**_: The load-tester can be run against it or you can run Firefox with the +local Autopush per the `test-with-firefox` docs. + +## Configuration + +Autopush can be configured in three ways; by option flags, by +environment variables, and by configuration files. Autopush uses three +configuration files. These files use standard `ini` formatting similar to the following: + +``` cfg +# A comment description +;a_disabled_option +;another_disabled_option=default_value +option=value +``` + +Options can either have values or act as boolean flags. If the option is +a flag it is either True if enabled, or False if disabled. The +configuration files are usually richly commented, and you're encouraged +to read them to learn how to set up your installation of autopush. + +*Note*: any line that does not begin with a `\#` or `;` is +considered an option line. if an unexpected option is present in a +configuration file, the application will fail to start. + +Configuration files can be located in: + +* in the /etc/ directory +* in the configs subdirectory +* in the $HOME or current directory (prefixed by a period '.') + +The three configuration files are: + +* *autopush_connection.ini* - contains options for use by the + websocket handler. This file's path can be specified by the + `--config-connection` option. +* *autopush_shared.ini* - contains options shared between the + connection and endpoint handler. This file's path can be specified + by the `--config-shared` option. +* *autopush_endpoint.ini* - contains options for the HTTP handlers + This file's path can be specified by the `--config-endpoint` option. + +### Sample Configurations + +Three sample configurations, a base config, and a config for each +Autopush daemon can be found at + + +These can be downloaded and modified as desired. + +### Config Files with Docker + +To use a configuration file with [docker](https://www.docker.com/), +ensure the config files are accessible to the user running +[docker-compose](https://docs.docker.com/compose/). Then you will need +to update the `docker-compose.yml` to use the config files and make them +available to the appropriate docker containers. + +Mounting a config file to be available in a docker container is fairly +simple, for instance, to mount a local file `autopush_connection.ini` +into a container as `/etc/autopush_connection.ini`, update the +`autopush` section of the `docker-compose.yml` to be: + +``` yaml +volumes: + - ./boto-compose.cfg:/etc/boto.cfg:ro + - ./autopush_connection.ini:/etc/autopush_connection.ini +``` + +Autopush automatically searches for a configuration file at this +location so nothing else is needed. + +*Note*: The `docker-compose.yml` file +provides a number of overrides as environment variables, such as `CRYPTO_KEY`. If these values are not defined, +they are submitted as `""`, which will +prevent values from being read from the config files. In the case of +`CRYPTO_KEY`, a new, random key is +automatically generated, which will result in existing endpoints no +longer being valid. It is recommended that for docker based images, that +you **\*always**\* supply a `CRYPTO_KEY` as +part of the run command. + +### Notes on GCM/FCM support + +*Note*: GCM is no longer supported by Google. Some legacy users can +still use GCM, but it is strongly recommended that applications use FCM. + +Autopush is capable of routing messages over Firebase Cloud Messaging +for android devices. You will need to set up a valid +[FCM](https://firebase.google.com/docs/cloud-messaging/) account. Once +you have an account open the Google Developer Console: + +* create a new project. Record the Project Number as "SENDER_ID". You + will need this value for your android application. + +* in the `.autopush_endpoint` server config file: + + > - add `fcm_enabled` to enable FCM routing. + > + > - add `fcm_creds`. This is a json block with the following + > format: + > + > {"**app id**": {"projectid": "**project id name**", "auth": + > "**path to Private Key File**"}, ...} + +where: + +**app_id**: the URL identifier to be used when registering endpoints. +(e.g. if "reference_test" is chosen here, registration requests should +go to `https://updates.push.services.mozilla.com/v1/fcm/reference_test/registration` + +**project id name**: the name of the **Project ID** as specified on the + Project Settings \> General page. + +**path to Private Key File**: path to the Private Key file provided by +the Settings \> Service accounts \> Firebase Admin SDK page. *NOTE*: +This is **\*NOT**\* the "google-services.json" config file. + +Additional notes on using the FCM bridge are available [on the +wiki](https://github.com/mozilla-services/autopush/wiki/Bridging-Via-GCM). diff --git a/docs/src/rust.md b/docs/src/rust.md new file mode 100644 index 000000000..d3b45c07e --- /dev/null +++ b/docs/src/rust.md @@ -0,0 +1,40 @@ +# Migrating to Rust + +Progress never comes from resting. One of the significant considerations +of running a service that needs to communicate with hundreds of millions +of clients is cost. We are forced to continually evaluate and optimize. +When a lower cost option is presented, we seriously consider it. + +There is some risk, of course, so rapid change is avoided and testing is +strongly encouraged. As of early 2018, the decision was made to move the +costlier elements of the server to Rust. The rust based application is +at [autopush-rs](https://github.com/mozilla-services/autopush-rs). + +## Why Rust? + +Rust is a strongly typed, memory efficient language. It has matured +rapidly and offers structure that vastly reduces the memory requirements +for running connections. As a bonus, it’s also forced us to handle +potential bugs, making the service more reliable. + +The current python environment we use (pypy) continues to improve as +well, but does not offer the sort of improvements that rust does when it +comes to handling socket connections. + +To that end we’re continuing to use pypy for the endpoint connection +management for the time being. + +## When is the switch going to happen? + +As of the end of June 2018, our rust handler is in testing. We expect to +deploy it soon, but since this deployment should not impact external +users, we’re not rushing to deploy just to hit an arbitrary milestone. +It will be deployed when all parties have determined it’s ready. + +## What will happen to autopush? + +Currently, the plan is to maintain it so long as it’s in production use. +Since we plan on continuing to have autopush handle endpoints for some +period, even after autopush-rs has been deployed to production and is +handling connections. However, we do reserve the right to archive this +repo at some future date. diff --git a/docs/src/style.md b/docs/src/style.md new file mode 100644 index 000000000..83ca24694 --- /dev/null +++ b/docs/src/style.md @@ -0,0 +1,7 @@ +# Coding Style Guide + +Autopush uses Rust styling guides based on +`cargo fmt` and `cargo clippy` + +## Exceptions + diff --git a/docs/src/table_rotation.md b/docs/src/table_rotation.md new file mode 100644 index 000000000..937368ed3 --- /dev/null +++ b/docs/src/table_rotation.md @@ -0,0 +1,101 @@ +# DynamoDB Message Table Rotation (legacy) + +As of version 1.45.0, message table rotation can be disabled. This is +because DynamoDB now provides automatic entry expiration. This is +controlled in our data by the "expiry" field. (_**Note**_, field +expiration is only available in full DynamoDB, and is not replicated +with the mock DynamoDB API provided for development.) The following +feature is disabled with the `no_table_rotation` flag set in the +`autopush_shared.ini` configuration file. + +If table rotation is disabled, the last message table used will become +'frozen' and will be used for all future messages. While this may not be +aesthetically pleasing, it's more efficient than copying data to a new, +generic table. If it's preferred, service can be shut down, previous +tables dropped, the current table renamed, and service brought up again. + +**Message Table Rotation information** + +To avoid costly table scans, autopush used a rotating message and +router table. Clients that hadn't connected in 30-60 days would have +their router and message table entries dropped and needed to +re-register. +Tables were post-fixed with the year/month they were meant for, i.e. : + messages_2015_02 +Tables must have been created and had their read/write units properly +allocated by a separate process in advance of the month switch-over as +autopush nodes would assume the tables already existed. Scripts [were +provided\(https://github.com/mozilla-services/autopush/blob/master/maintenance.py) +that could be run weekly to ensure all necessary tables were +present, and tables old enough were dropped. + +Within a few days of the new month, the load on the prior months table +would fall as clients transition to the new table. The read/write units +on the prior month may then be lowered. + +## DynamoDB Rotating Message Table Interaction Rules (legacy) + +Due to the complexity of having notifications spread across two tables, +several rules are used to avoid losing messages during the month +transition. + +The logic for connection nodes is more complex, since only the +connection node knows when the client connects, and how many messages it +has read through. + +When table rotation is allowed, the router table uses the `curmonth` +field to indicate the last month the client has read notifications +through. This is independent of the last_connect since it is possible +for a client to connect, fail to read its notifications, then reconnect. +This field is updated for a new month when the client connects **after** +it has ack'd all the notifications out of the last month. + +To avoid issues with time synchronization, the node the client is +connected to acts as the source of truth for when the month has flipped +over. Clients are only moved to the new table on connect, and only after +reading/acking all the notifications for the prior month. + +#### Rules for Endpoints + +1. Check the router table to see the current_month the client is on. + +2. Read the chan list entry from the appropriate month message table to + see if its a valid channel. + + If its valid, move to step 3. + +3. Store the notification in the current months table if valid. (_**Note**_ + that this step does not copy the blank entry of valid channels) + +#### Rules for Connection Nodes + +After Identification: + +1. Check to see if the current_month matches the current month, if it + does then proceed normally using the current months message table. + + If the connection node month does not match stored current_month in + the clients router table entry, proceed to step 2. + +2. Read notifications from prior month and send to client. + + Once all ACKs are received for all the notifications for that month + proceed to step 3. + +3. Copy the blank message entry of valid channels to the new month + message table. + +4. Update the router table for the current_month. + +During switchover, only after the router table update are new commands +from the client accepted. + +Handling of Edge Cases: + +* Connection node gets more notifications during step 3, enough to + buffer, such that the endpoint starts storing them in the previous + current_month. In this case the connection node will check the old + table, then the new table to ensure it doesn't lose message during + the switch. +* Connection node dies, or client disconnects during step 3/4. Not a + problem as the reconnect will pick it up at the right spot. diff --git a/docs/src/testing.md b/docs/src/testing.md new file mode 100644 index 000000000..8b1b71964 --- /dev/null +++ b/docs/src/testing.md @@ -0,0 +1,80 @@ +# Testing + +## Testing Configuration + +When testing, it's important to reduce the number of potential conflicts +as much as possible. To that end, it's advised to have as clean a +testing environment as possible before running tests. + +This includes: + +* Making sure notifications are not globally blocked by your browser. +* "Do Not Disturb" or similar "distraction free" mode is disabled on + your OS +* You run a "fresh" Firefox profile (start `firefox --P` to display the profile picker) + which does not have extra extensions or optional plug-ins running. + +You may find it useful to run firefox in a Virtual Machine (like +VirtualBox or VMWare), but this is not required. + +In addition, it may be useful to open the Firefox Brower Console +(Ctrl+Shift+J) as well as the Firefox Web Console (Ctrl+Shift+K). Both +are located under the **Web Developer** sub-menu. + +## Running Tests + +If you plan on doing development and testing, you will need to install +some additional packages. + +``` bash +$ bin/pip install -r test-requirements.txt +``` + +Once the Makefile has been run, you can run `make test` to run the test +suite. + +Note Failures may occur if a `.boto` file exists in your home directory. This +file should be moved elsewhere before running the tests.` + +### Disabling Integration Tests + +`make test` runs the `tox` program which can be difficult to break for +debugging purposes. The following bash script has been useful for +running tests outside of tox: + +``` bash +#! /bin/bash +mv autopush/tests/test_integration.py{,.hold} +mv autopush/tests/test_logging.py{,.hold} +bin/nosetests -sv autopush +mv autopush/tests/test_integration.py{.hold,} +mv autopush/tests/test_logging.py{.hold,} +``` + +This script will cause the integration and logging tests to not run. + +## Firefox Testing + +To test a locally running Autopush with Firefox, you will need to edit +several config variables in Firefox. + +1. Open a New Tab. +2. Go to `about:config` in the Location bar and hit Enter, accept the + disclaimer if it's shown. +3. Search for `dom.push.serverURL`, make a note of the existing value + (you can right-click the preference and choose `Reset` to restore + the default). +4. Double click the entry and change it to `ws://localhost:8080/`. +5. Right click in the page and choose `New -> Boolean`, name it + `dom.push.testing.allowInsecureServerURL` and set it to `true`. + +You should then restart Firefox to begin using your local Autopush. + +### Debugging + +On Android, you can set `dom.push.debug` to enable debug logging of Push +via `adb logcat`. + +For desktop use, you can set `dom.push.loglevel` to `"debug"`. This will +log all push messages to the Browser Console (Tools \> Web Developer \> +Browser Console). diff --git a/docs/style.css b/docs/style.css new file mode 100644 index 000000000..492582b5f --- /dev/null +++ b/docs/style.css @@ -0,0 +1 @@ +h5 {font-weight:bold;} \ No newline at end of file