From 20c446bc29847fdf611fd47b70fa920697a4a2f2 Mon Sep 17 00:00:00 2001 From: Ethan Donowitz Date: Fri, 4 Feb 2022 20:18:57 +0000 Subject: [PATCH] feat: add BrowserID support to Tokenserver Closes #1215 --- .circleci/config.yml | 2 +- Cargo.lock | 329 +++++++++--- Cargo.toml | 9 +- Dockerfile | 5 +- Makefile | 4 +- config/local.example.toml | 4 +- docker-compose.e2e.mysql.yaml | 7 +- docker-compose.e2e.spanner.yaml | 7 +- docker-compose.mysql.yaml | 8 + docker-compose.spanner.yaml | 7 + src/settings.rs | 24 +- src/tokenserver/README.md | 20 +- src/tokenserver/auth/browserid.rs | 479 +++++++++++++++++ src/tokenserver/auth/mod.rs | 158 ++++++ src/tokenserver/auth/oauth.rs | 113 ++++ src/tokenserver/auth/secrets.py | 24 + src/tokenserver/{ => auth}/verify.py | 0 src/tokenserver/db/models.rs | 52 +- src/tokenserver/error.rs | 9 + src/tokenserver/extractors.rs | 492 ++++++++++++------ src/tokenserver/handlers.rs | 54 +- src/tokenserver/mod.rs | 46 +- src/tokenserver/settings.rs | 40 +- src/tokenserver/support.rs | 270 ---------- tools/integration_tests/run.py | 15 +- .../tokenserver/mock_fxa_server.py | 75 +++ tools/integration_tests/tokenserver/run.py | 10 +- .../tokenserver/test_authorization.py | 364 ++++++------- .../tokenserver/test_browserid.py | 476 +++++++++++++++++ .../integration_tests/tokenserver/test_e2e.py | 131 ++++- .../tokenserver/test_misc.py | 108 ++-- .../tokenserver/test_node_assignment.py | 31 +- .../tokenserver/test_support.py | 82 ++- tools/tokenserver/test_database.py | 44 +- 34 files changed, 2493 insertions(+), 1006 deletions(-) create mode 100644 src/tokenserver/auth/browserid.rs create mode 100644 src/tokenserver/auth/mod.rs create mode 100644 src/tokenserver/auth/oauth.rs create mode 100644 src/tokenserver/auth/secrets.py rename src/tokenserver/{ => auth}/verify.py (100%) delete mode 100644 src/tokenserver/support.rs create mode 100644 tools/integration_tests/tokenserver/mock_fxa_server.py create mode 100644 tools/integration_tests/tokenserver/test_browserid.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 59fc699a94..d28b2359c1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -56,7 +56,7 @@ commands: steps: - run: name: cargo build - command: cargo build --features tokenserver_test_mode + command: cargo build setup-gcp-grpc: steps: - run: diff --git a/Cargo.lock b/Cargo.lock index 626ea4a822..7a22dd4aea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,8 +14,8 @@ dependencies = [ "futures-sink", "log", "pin-project 0.4.29", - "tokio", - "tokio-util", + "tokio 0.2.25", + "tokio-util 0.3.1", ] [[package]] @@ -77,7 +77,7 @@ dependencies = [ "futures-core", "futures-util", "fxhash", - "h2", + "h2 0.2.7", "http", "httparse", "indexmap", @@ -133,7 +133,7 @@ dependencies = [ "futures-channel", "futures-util", "smallvec", - "tokio", + "tokio 0.2.25", ] [[package]] @@ -149,7 +149,7 @@ dependencies = [ "futures-channel", "futures-util", "log", - "mio", + "mio 0.6.23", "mio-uds", "num_cpus", "slab", @@ -345,6 +345,16 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" +[[package]] +name = "assert-json-diff" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f1c3703dd33532d7f0ca049168930e9099ecac238e23cf932f3a69c42f06da" +dependencies = [ + "serde 1.0.135", + "serde_json", +] + [[package]] name = "async-trait" version = "0.1.52" @@ -438,7 +448,7 @@ checksum = "374bba43fc924d90393ee7768e6f75d223a98307a488fe5bc34b66c3e96932a6" dependencies = [ "async-trait", "futures 0.3.19", - "tokio", + "tokio 0.2.25", ] [[package]] @@ -617,6 +627,17 @@ dependencies = [ "cc", ] +[[package]] +name = "colored" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd" +dependencies = [ + "atty", + "lazy_static", + "winapi 0.3.9", +] + [[package]] name = "config" version = "0.10.1" @@ -799,7 +820,7 @@ dependencies = [ "crossbeam-queue", "num_cpus", "serde 1.0.135", - "tokio", + "tokio 0.2.25", ] [[package]] @@ -869,6 +890,12 @@ dependencies = [ "migrations_macros", ] +[[package]] +name = "difference" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" + [[package]] name = "digest" version = "0.9.0" @@ -917,6 +944,12 @@ dependencies = [ "strsim", ] +[[package]] +name = "dyn-clone" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee2626afccd7561a06cf1367e2950c4718ea04565e20fb5029b6c7d8ad09abcf" + [[package]] name = "either" version = "1.6.1" @@ -1260,12 +1293,31 @@ dependencies = [ "http", "indexmap", "slab", - "tokio", - "tokio-util", + "tokio 0.2.25", + "tokio-util 0.3.1", "tracing", "tracing-futures", ] +[[package]] +name = "h2" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9f1f717ddc7b2ba36df7e871fd88db79326551d3d6f1fc406fbfd28b582ff8e" +dependencies = [ + "bytes 1.1.0", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio 1.16.1", + "tokio-util 0.6.9", + "tracing", +] + [[package]] name = "hashbrown" version = "0.11.2" @@ -1363,6 +1415,17 @@ dependencies = [ "http", ] +[[package]] +name = "http-body" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" +dependencies = [ + "bytes 1.1.0", + "http", + "pin-project-lite 0.2.8", +] + [[package]] name = "httparse" version = "1.5.1" @@ -1375,6 +1438,12 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + [[package]] name = "humantime" version = "2.1.0" @@ -1391,15 +1460,39 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.2.7", "http", - "http-body", + "http-body 0.3.1", "httparse", - "httpdate", + "httpdate 0.3.2", "itoa 0.4.8", "pin-project 1.0.10", "socket2 0.3.19", - "tokio", + "tokio 0.2.25", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper" +version = "0.14.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7ec3e62bdc98a2f0393a5048e4c30ef659440ea6e0e572965103e72bd836f55" +dependencies = [ + "bytes 1.1.0", + "futures-channel", + "futures-core", + "futures-util", + "h2 0.3.11", + "http", + "http-body 0.4.4", + "httparse", + "httpdate 1.0.2", + "itoa 0.4.8", + "pin-project-lite 0.2.8", + "socket2 0.4.3", + "tokio 1.16.1", "tower-service", "tracing", "want", @@ -1412,12 +1505,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d979acc56dcb5b8dddba3917601745e877576475aa046df3226eabdecef78eed" dependencies = [ "bytes 0.5.6", - "hyper", + "hyper 0.13.10", "native-tls", - "tokio", + "tokio 0.2.25", "tokio-tls", ] +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes 1.1.0", + "hyper 0.14.16", + "native-tls", + "tokio 1.16.1", + "tokio-native-tls", +] + [[package]] name = "idna" version = "0.1.5" @@ -1550,20 +1656,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jsonwebtoken" -version = "7.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afabcc15e437a6484fc4f12d0fd63068fe457bf93f1c148d3d9649c60b103f32" -dependencies = [ - "base64 0.12.3", - "pem", - "ring", - "serde 1.0.135", - "serde_json", - "simple_asn1", -] - [[package]] name = "kernel32-sys" version = "0.2.2" @@ -1750,12 +1842,25 @@ dependencies = [ "kernel32-sys", "libc", "log", - "miow", + "miow 0.2.2", "net2", "slab", "winapi 0.2.8", ] +[[package]] +name = "mio" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" +dependencies = [ + "libc", + "log", + "miow 0.3.7", + "ntapi", + "winapi 0.3.9", +] + [[package]] name = "mio-uds" version = "0.6.8" @@ -1764,7 +1869,7 @@ checksum = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0" dependencies = [ "iovec", "libc", - "mio", + "mio 0.6.23", ] [[package]] @@ -1779,6 +1884,33 @@ dependencies = [ "ws2_32-sys", ] +[[package]] +name = "miow" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "mockito" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d10030163d67f681db11810bc486df3149e6d91c8b4f3f96fa8b62b546c2cef8" +dependencies = [ + "assert-json-diff", + "colored", + "difference", + "httparse", + "lazy_static", + "log", + "rand 0.8.4", + "regex", + "serde_json", + "serde_urlencoded", +] + [[package]] name = "mysqlclient-sys" version = "0.2.4" @@ -1830,14 +1962,12 @@ dependencies = [ ] [[package]] -name = "num-bigint" -version = "0.2.6" +name = "ntapi" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" +checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" dependencies = [ - "autocfg", - "num-integer", - "num-traits 0.2.14", + "winapi 0.3.9", ] [[package]] @@ -1991,17 +2121,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" -[[package]] -name = "pem" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd56cbd21fea48d0c440b41cd69c589faacade08c992d9a54e471b79d0fd13eb" -dependencies = [ - "base64 0.13.0", - "once_cell", - "regex", -] - [[package]] name = "percent-encoding" version = "1.0.1" @@ -2350,9 +2469,9 @@ dependencies = [ "futures-core", "futures-util", "http", - "http-body", - "hyper", - "hyper-tls", + "http-body 0.3.1", + "hyper 0.13.10", + "hyper-tls 0.4.3", "ipnet", "js-sys", "lazy_static", @@ -2365,7 +2484,7 @@ dependencies = [ "serde 1.0.135", "serde_json", "serde_urlencoded", - "tokio", + "tokio 0.2.25", "tokio-tls", "url 2.2.2", "wasm-bindgen", @@ -2374,6 +2493,42 @@ dependencies = [ "winreg 0.7.0", ] +[[package]] +name = "reqwest" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f242f1488a539a79bac6dbe7c8609ae43b7914b7736210f239a37cccb32525" +dependencies = [ + "base64 0.13.0", + "bytes 1.1.0", + "encoding_rs", + "futures-core", + "futures-util", + "h2 0.3.11", + "http", + "http-body 0.4.4", + "hyper 0.14.16", + "hyper-tls 0.5.0", + "ipnet", + "js-sys", + "lazy_static", + "log", + "mime", + "native-tls", + "percent-encoding 2.1.0", + "pin-project-lite 0.2.8", + "serde 1.0.135", + "serde_json", + "serde_urlencoded", + "tokio 1.16.1", + "tokio-native-tls", + "url 2.2.2", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg 0.7.0", +] + [[package]] name = "resolv-conf" version = "0.7.0" @@ -2532,8 +2687,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebd0927ec4a785fc4328abe9089afbe074b3874983b3373fc328a73a9f8310cb" dependencies = [ "curl", - "httpdate", - "reqwest", + "httpdate 0.3.2", + "reqwest 0.10.10", "sentry-backtrace", "sentry-contexts", "sentry-core", @@ -2734,17 +2889,6 @@ dependencies = [ "libc", ] -[[package]] -name = "simple_asn1" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "692ca13de57ce0613a363c8c2f1de925adebc81b04c923ac60c5488bb44abe4b" -dependencies = [ - "chrono", - "num-bigint", - "num-traits 0.2.14", -] - [[package]] name = "sized-chunks" version = "0.5.3" @@ -2986,6 +3130,7 @@ dependencies = [ "diesel_logger", "diesel_migrations", "docopt", + "dyn-clone", "env_logger", "futures 0.3.19", "google-cloud-rust-raw", @@ -2996,15 +3141,16 @@ dependencies = [ "hmac", "hostname", "http", - "jsonwebtoken", "lazy_static", "log", "mime", + "mockito", "num_cpus", "protobuf", "pyo3", "rand 0.8.4", "regex", + "reqwest 0.11.9", "scheduled-thread-pool", "sentry", "sentry-backtrace", @@ -3021,7 +3167,7 @@ dependencies = [ "slog-term", "thiserror", "time 0.3.6", - "tokio", + "tokio 0.2.25", "url 2.2.2", "urlencoding", "uuid", @@ -3206,7 +3352,7 @@ dependencies = [ "lazy_static", "libc", "memchr", - "mio", + "mio 0.6.23", "mio-uds", "num_cpus", "pin-project-lite 0.1.12", @@ -3216,6 +3362,21 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "tokio" +version = "1.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c27a64b625de6d309e8c57716ba93021dccf1b3b5c97edd6d3dd2d2135afc0a" +dependencies = [ + "bytes 1.1.0", + "libc", + "memchr", + "mio 0.7.14", + "num_cpus", + "pin-project-lite 0.2.8", + "winapi 0.3.9", +] + [[package]] name = "tokio-macros" version = "0.2.6" @@ -3227,6 +3388,16 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" +dependencies = [ + "native-tls", + "tokio 1.16.1", +] + [[package]] name = "tokio-tls" version = "0.3.1" @@ -3234,7 +3405,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a70f4fcd7b3b24fb194f837560168208f669ca8cb70d0c4b862944452396343" dependencies = [ "native-tls", - "tokio", + "tokio 0.2.25", ] [[package]] @@ -3248,7 +3419,21 @@ dependencies = [ "futures-sink", "log", "pin-project-lite 0.1.12", - "tokio", + "tokio 0.2.25", +] + +[[package]] +name = "tokio-util" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" +dependencies = [ + "bytes 1.1.0", + "futures-core", + "futures-sink", + "log", + "pin-project-lite 0.2.8", + "tokio 1.16.1", ] [[package]] @@ -3313,7 +3498,7 @@ dependencies = [ "rand 0.7.3", "smallvec", "thiserror", - "tokio", + "tokio 0.2.25", "url 2.2.2", ] @@ -3332,7 +3517,7 @@ dependencies = [ "resolv-conf", "smallvec", "thiserror", - "tokio", + "tokio 0.2.25", "trust-dns-proto", ] diff --git a/Cargo.toml b/Cargo.toml index 8c2ec4a895..50c0148711 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,6 +36,7 @@ diesel = { version = "1.4", features = ["mysql", "r2d2"] } diesel_logger = "0.1.1" diesel_migrations = { version = "1.4.0", features = ["mysql"] } docopt = "1.1.0" +dyn-clone = "1.0.4" env_logger = "0.9" futures = { version = "0.3", features = ["compat"] } google-cloud-rust-raw = "0.11.0" @@ -44,7 +45,6 @@ google-cloud-rust-raw = "0.11.0" # `cargo build --features grpcio/openssl ...` grpcio = { version = "0.9" } lazy_static = "1.4.0" -pyo3 = { version = "0.14", features = ["auto-initialize"] } hawk = "3.2" hex = "0.4.3" hostname = "0.3.1" @@ -53,11 +53,14 @@ hmac = "0.11" http = "0.2.5" log = { version = "0.4", features = ["max_level_debug", "release_max_level_info"] } mime = "0.3" +mockito = "0.30.0" num_cpus = "1" # must match what's used by googleapis-raw protobuf = "2.20.0" +pyo3 = { version = "0.14", features = ["auto-initialize"] } rand = "0.8" regex = "1.4" +reqwest = { version = "0.11.9", features = ["blocking", "json"] } # pin to 0.19: https://github.com/getsentry/sentry-rust/issues/277 sentry = { version = "0.19", features = ["with_curl_transport"] }# pin to 0.19 until on-prem sentry server is updated sentry-backtrace = "0.19" @@ -84,12 +87,8 @@ validator = "0.14" validator_derive = "0.14" woothee = "0.11" -[dev-dependencies] -jsonwebtoken = "7.2.0" - [features] no_auth = [] -tokenserver_test_mode = [] [[bin]] name = "purge_ttl" diff --git a/Dockerfile b/Dockerfile index a5f4d99927..c454c01d7c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,8 +11,8 @@ RUN apt-get -q update && \ RUN \ cargo --version && \ rustc --version && \ - cargo install --features tokenserver_test_mode --path . --locked --root /app && \ - cargo install --features tokenserver_test_mode --path . --bin purge_ttl --locked --root /app + cargo install --path . --locked --root /app --features grpcio/openssl && \ + cargo install --path . --bin purge_ttl --locked --root /app --features grpcio/openssl FROM debian:buster-slim WORKDIR /app @@ -43,6 +43,7 @@ COPY --from=builder /app/scripts/prepare-spanner.sh /app/scripts/prepare-spanner COPY --from=builder /app/src/db/spanner/schema.ddl /app/schema.ddl RUN chmod +x /app/scripts/prepare-spanner.sh +RUN pip3 install -r /app/tools/integration_tests/requirements.txt USER app:app diff --git a/Makefile b/Makefile index 8737881e5c..5179b824b9 100644 --- a/Makefile +++ b/Makefile @@ -36,10 +36,10 @@ docker_stop_spanner: docker-compose -f docker-compose.spanner.yaml down run: - RUST_LOG=debug RUST_BACKTRACE=full cargo run --features tokenserver_test_mode -- --config config/local.toml + RUST_LOG=debug RUST_BACKTRACE=full cargo run --features grpcio/openssl -- --config config/local.toml run_spanner: GOOGLE_APPLICATION_CREDENTIALS=$(PATH_TO_SYNC_SPANNER_KEYS) GRPC_DEFAULT_SSL_ROOTS_FILE_PATH=$(PATH_TO_GRPC_CERT) make run test: - SYNC_DATABASE_URL=$(SYNC_DATABASE_URL) SYNC_TOKENSERVER__DATABASE_URL=$(SYNC_TOKENSERVER__DATABASE_URL) RUST_TEST_THREADS=1 cargo test --features tokenserver_test_mode + SYNC_DATABASE_URL=$(SYNC_DATABASE_URL) SYNC_TOKENSERVER__DATABASE_URL=$(SYNC_TOKENSERVER__DATABASE_URL) RUST_TEST_THREADS=1 cargo test --features grpcio/openssl diff --git a/config/local.example.toml b/config/local.example.toml index a1bfbef179..7e2a687cdb 100644 --- a/config/local.example.toml +++ b/config/local.example.toml @@ -22,7 +22,9 @@ tokenserver.enabled = true tokenserver.fxa_email_domain = "api-accounts.stage.mozaws.net" tokenserver.fxa_metrics_hash_secret = "INSERT_SECRET_KEY_HERE" tokenserver.fxa_oauth_server_url = "https://oauth.stage.mozaws.net" -tokenserver.test_mode_enabled = false +tokenserver.fxa_browserid_audience = "https://token.stage.mozaws.net" +tokenserver.fxa_browserid_issuer = "https://api-accounts.stage.mozaws.net" +tokenserver.fxa_browserid_server_url = "https://verifier.stage.mozaws.net/v2" # cors settings # cors_allowed_origin = "localhost" diff --git a/docker-compose.e2e.mysql.yaml b/docker-compose.e2e.mysql.yaml index c64b68391b..031cbb2801 100644 --- a/docker-compose.e2e.mysql.yaml +++ b/docker-compose.e2e.mysql.yaml @@ -17,21 +17,24 @@ services: " e2e-tests: depends_on: + - mock-fxa-server - syncstorage-rs image: app:build privileged: true user: root environment: + MOCK_FXA_SERVER_URL: http://mock-fxa-server:6000 SYNC_HOST: 0.0.0.0 SYNC_MASTER_SECRET: secret0 SYNC_DATABASE_URL: mysql://test:test@sync-db:3306/syncstorage SYNC_TOKENSERVER__DATABASE_URL: mysql://test:test@tokenserver-db:3306/tokenserver SYNC_TOKENSERVER__ENABLED: "true" + SYNC_TOKENSERVER__FXA_BROWSERID_AUDIENCE: "https://token.stage.mozaws.net/" + SYNC_TOKENSERVER__FXA_BROWSERID_ISSUER: "api-accounts.stage.mozaws.net" SYNC_TOKENSERVER__FXA_EMAIL_DOMAIN: api-accounts.stage.mozaws.net SYNC_TOKENSERVER__FXA_METRICS_HASH_SECRET: secret0 - SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL: https://oauth.stage.mozaws.net TOKENSERVER_HOST: http://localhost:8000 entrypoint: > /bin/sh -c " - sleep 28; pip3 install -r /app/tools/integration_tests/requirements.txt && python3 /app/tools/integration_tests/run.py 'http://localhost:8000#secret0' + python3 /app/tools/integration_tests/run.py 'http://localhost:8000#secret0' " diff --git a/docker-compose.e2e.spanner.yaml b/docker-compose.e2e.spanner.yaml index c435ab19c5..5aacb7b962 100644 --- a/docker-compose.e2e.spanner.yaml +++ b/docker-compose.e2e.spanner.yaml @@ -17,22 +17,25 @@ services: " e2e-tests: depends_on: + - mock-fxa-server - syncstorage-rs image: app:build privileged: true user: root environment: + MOCK_FXA_SERVER_URL: http://mock-fxa-server:6000 SYNC_HOST: 0.0.0.0 SYNC_MASTER_SECRET: secret0 SYNC_DATABASE_URL: spanner://projects/test-project/instances/test-instance/databases/test-database SYNC_SPANNER_EMULATOR_HOST: sync-db:9010 SYNC_TOKENSERVER__DATABASE_URL: mysql://test:test@tokenserver-db:3306/tokenserver SYNC_TOKENSERVER__ENABLED: "true" + SYNC_TOKENSERVER__FXA_BROWSERID_AUDIENCE: "https://token.stage.mozaws.net/" + SYNC_TOKENSERVER__FXA_BROWSERID_ISSUER: "api-accounts.stage.mozaws.net" SYNC_TOKENSERVER__FXA_EMAIL_DOMAIN: api-accounts.stage.mozaws.net SYNC_TOKENSERVER__FXA_METRICS_HASH_SECRET: secret0 - SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL: https://oauth.stage.mozaws.net TOKENSERVER_HOST: http://localhost:8000 entrypoint: > /bin/sh -c " - sleep 28; pip3 install -r /app/tools/integration_tests/requirements.txt && python3 /app/tools/integration_tests/run.py 'http://localhost:8000#secret0' + python3 /app/tools/integration_tests/run.py 'http://localhost:8000#secret0' " diff --git a/docker-compose.mysql.yaml b/docker-compose.mysql.yaml index 2e041c3086..47835d924d 100644 --- a/docker-compose.mysql.yaml +++ b/docker-compose.mysql.yaml @@ -28,6 +28,14 @@ services: MYSQL_USER: test MYSQL_PASSWORD: test + mock-fxa-server: + image: app:build + restart: "no" + entrypoint: "python3 /app/tools/integration_tests/tokenserver/mock_fxa_server.py" + environment: + MOCK_FXA_SERVER_HOST: 0.0.0.0 + MOCK_FXA_SERVER_PORT: 6000 + syncstorage-rs: image: ${SYNCSTORAGE_RS_IMAGE:-syncstorage-rs:latest} restart: always diff --git a/docker-compose.spanner.yaml b/docker-compose.spanner.yaml index 903d464b85..5317d91138 100644 --- a/docker-compose.spanner.yaml +++ b/docker-compose.spanner.yaml @@ -28,6 +28,13 @@ services: MYSQL_DATABASE: tokenserver MYSQL_USER: test MYSQL_PASSWORD: test + mock-fxa-server: + image: app:build + restart: "no" + entrypoint: "python3 /app/tools/integration_tests/tokenserver/mock_fxa_server.py" + environment: + MOCK_FXA_SERVER_HOST: 0.0.0.0 + MOCK_FXA_SERVER_PORT: 6000 syncstorage-rs: image: ${SYNCSTORAGE_RS_IMAGE:-syncstorage-rs:latest} restart: always diff --git a/src/settings.rs b/src/settings.rs index ee4b16ddd2..5246bdc8f7 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -189,9 +189,29 @@ impl Settings { "mysql://root@127.0.0.1/tokenserver", )?; s.set_default("tokenserver.enabled", false)?; - s.set_default("tokenserver.fxa_email_domain", "test.com")?; + s.set_default( + "tokenserver.fxa_browserid_audience", + "https://token.stage.mozaws.net", + )?; + s.set_default( + "tokenserver.fxa_browserid_issuer", + "api-accounts.stage.mozaws.net", + )?; + s.set_default( + "tokenserver.fxa_browserid_server_url", + "https://verifier.stage.mozaws.net/v2", + )?; + s.set_default("tokenserver.fxa_browserid_request_timeout", 10)?; + s.set_default( + "tokenserver.fxa_email_domain", + "api-accounts.stage.mozaws.net", + )?; s.set_default("tokenserver.fxa_metrics_hash_secret", "secret")?; - s.set_default("tokenserver.test_mode_enabled", false)?; + s.set_default( + "tokenserver.fxa_oauth_server_url", + "https://oauth.stage.mozaws.net", + )?; + s.set_default("tokenserver.fxa_oauth_request_timeout", 10)?; s.set_default("tokenserver.node_type", "spanner")?; s.set_default("tokenserver.statsd_label", "syncstorage.tokenserver")?; diff --git a/src/tokenserver/README.md b/src/tokenserver/README.md index 5af0abef5d..7d55267233 100644 --- a/src/tokenserver/README.md +++ b/src/tokenserver/README.md @@ -24,30 +24,12 @@ This functionality was previously provided by a [Python service](https://github. ## Configuration -You can find example settings for Tokenserver in [config/local.example.toml](../../config/local.example.toml). The available settings are: - -| Option | Default value | Description | -| --- | --- | --- | -| `disable_syncstorage` | `false` | whether to disable the Sync Storage endpoints (see [Disabling Syncstorage](#disabling-syncstorage) for more information) | -| `tokenserver.database_url` | `"mysql://root@127.0.0.1/tokenserver_rs"` | database DSN | -| `tokenserver.database_pool_max_size` | `None` | the maximum number of connections in the database pool | -| `tokenserver.database_pool_min_idle` | `None` | the minimum number of idle database connections to maintain at all times | -| `tokenserver.database_pool_connection_timeout` | `Some(30)` | the timeout (in seconds) when waiting for an available connection | -| `tokenserver.fxa_metrics_hash_secret` | `"secret"` | the secret used to hash users' FxA UIDs | -| `tokenserver.fxa_email_domain` | `"api.accounts.firefox.com"` | the email domain used to contruct the FxA email address from the user's FxA UID | -| `tokenserver.fxa_oauth_server_url` | `None` | the URL of the FxA OAuth server to be used to verify user's OAuth tokens | -| `tokenserver.test_mode_enabled` | `false` | whether to enable Tokenserver's [test mode](#test-mode) | +You can find example settings for Tokenserver in [config/local.example.toml](../../config/local.example.toml). The available settings are described in doc comments [here](../../src/tokenserver/settings.rs). ### Disabling Syncstorage Tokenserver can be run as a standalone service by disabling the Sync Storage endpoints. This can be done simply by setting the `disable_syncstorage` setting to `true`. **Note that the Sync Storage settings must still be set even when those endpoints are disabled.** -### Test Mode - -When Tokenserver's "test mode" is enabled, OAuth tokens are unpacked without being verified by FxA. Essentially, this allows one to "forge" an OAuth token as though it were created by FxA. This can be useful to test new functionality during development or to run integration tests. - -**NOTE:** This should **never** be run in production. - ### Connecting to Firefox 1. Visit `about:config` in Firefox diff --git a/src/tokenserver/auth/browserid.rs b/src/tokenserver/auth/browserid.rs new file mode 100644 index 0000000000..ac15431bf3 --- /dev/null +++ b/src/tokenserver/auth/browserid.rs @@ -0,0 +1,479 @@ +use actix_web::{web, Error}; +use async_trait::async_trait; +use reqwest::{blocking::Client as ReqwestClient, StatusCode}; +use serde::{de::Deserializer, Deserialize, Serialize}; + +use super::VerifyToken; +use crate::tokenserver::{ + error::{ErrorLocation, TokenserverError}, + settings::Settings, +}; + +use core::time::Duration; +use std::convert::TryFrom; + +/// The information extracted from a valid BrowserID assertion. +#[derive(Clone, Debug, Default, Deserialize, PartialEq)] +pub struct VerifyOutput { + pub device_id: Option, + pub email: String, + pub generation: Option, + pub keys_changed_at: Option, +} + +/// The verifier used to verify BrowserID assertions. +#[derive(Clone)] +pub struct RemoteVerifier { + audience: String, + issuer: String, + fxa_verifier_url: String, + // Note that we do not need to use an Arc here, since Reqwest's blocking client uses an Arc + // internally + request_client: ReqwestClient, +} + +impl TryFrom<&Settings> for RemoteVerifier { + type Error = Error; + + fn try_from(settings: &Settings) -> Result { + Ok(Self { + audience: settings.fxa_browserid_audience.clone(), + issuer: settings.fxa_browserid_issuer.clone(), + request_client: ReqwestClient::builder() + .timeout(Duration::new(settings.fxa_browserid_request_timeout, 0)) + .build() + .map_err(|_| Error::from(()))?, + fxa_verifier_url: settings.fxa_browserid_server_url.clone(), + }) + } +} + +#[async_trait] +impl VerifyToken for RemoteVerifier { + type Output = VerifyOutput; + + /// Verifies a BrowserID assertion. Returns `VerifyOutput` for valid assertions and a + /// `TokenserverError` for invalid assertions. + async fn verify(&self, assertion: String) -> Result { + let verifier = self.clone(); + + web::block(move || { + let response = verifier + .request_client + .post(&verifier.fxa_verifier_url) + .json(&VerifyRequest { + assertion, + audience: verifier.audience.clone(), + trusted_issuers: [verifier.issuer.clone()], + }) + .send() + .map_err(|e| { + if e.is_connect() { + // If we are unable to reach the FxA server, report a 503 to the client + TokenserverError::resource_unavailable() + } else { + // If any other error occurs during the request, report a 401 to the client + TokenserverError::invalid_credentials("Unauthorized") + } + })?; + + // If FxA responds with an HTTP status other than 200, report a 503 to the client + if response.status() != StatusCode::OK { + return Err(TokenserverError::resource_unavailable()); + } + + // If FxA responds with an invalid response body, report a 503 to the client + let response_body = response + .json::() + .map_err(|_| TokenserverError::resource_unavailable())?; + + match response_body { + VerifyResponse::Failure { + reason: Some(reason), + } if reason.contains("expired") || reason.contains("issued later than") => { + Err(TokenserverError { + status: "invalid-timestamp", + location: ErrorLocation::Body, + ..Default::default() + }) + } + VerifyResponse::Failure { .. } => { + Err(TokenserverError::invalid_credentials("Unauthorized")) + } + VerifyResponse::Okay { issuer, .. } if issuer != verifier.issuer => { + Err(TokenserverError::invalid_credentials("Unauthorized")) + } + VerifyResponse::Okay { + idp_claims: Some(claims), + .. + } if !claims.token_verified() => { + Err(TokenserverError::invalid_credentials("Unauthorized")) + } + VerifyResponse::Okay { + email, + idp_claims: Some(claims), + .. + } => Ok(VerifyOutput { + device_id: claims.device_id.clone(), + email, + generation: claims.generation()?, + keys_changed_at: claims.keys_changed_at()?, + }), + VerifyResponse::Okay { email, .. } => Ok(VerifyOutput { + device_id: None, + email, + generation: None, + keys_changed_at: None, + }), + } + }).await.map_err(Into::into) + } +} + +/// The request sent to the FxA BrowserID verifier for token verification. +#[derive(Serialize)] +struct VerifyRequest { + assertion: String, + audience: String, + #[serde(rename(serialize = "trustedIssuers"))] + trusted_issuers: [String; 1], +} + +/// The response returned by the FxA BrowserID verifier for a token verification request. +#[derive(Deserialize, Serialize)] +#[serde(tag = "status", rename_all = "lowercase")] +enum VerifyResponse { + Okay { + email: String, + #[serde(rename = "idpClaims")] + idp_claims: Option, + issuer: String, + }, + Failure { + reason: Option, + }, +} + +/// The claims extracted from a valid BrowserID assertion. +#[derive(Deserialize, Serialize)] +struct IdpClaims { + #[serde(rename = "fxa-deviceId")] + pub device_id: Option, + #[serde( + default, + rename = "fxa-generation", + deserialize_with = "strict_deserialize" + )] + generation: Option>, + #[serde( + default, + rename = "fxa-keysChangedAt", + deserialize_with = "strict_deserialize" + )] + keys_changed_at: Option>, + #[serde( + default, + rename = "fxa-tokenVerified", + deserialize_with = "strict_deserialize" + )] + token_verified: Option>, +} + +impl IdpClaims { + fn generation(&self) -> Result, TokenserverError> { + match self.generation { + // If the fxa-generation claim is present, return its value. If it's missing, return None. + Some(Some(_)) | None => Ok(self.generation.flatten()), + // If the fxa-generation claim is null, return an error. + Some(None) => Err(TokenserverError::invalid_generation()), + } + } + + fn keys_changed_at(&self) -> Result, TokenserverError> { + match self.keys_changed_at { + // If the fxa-keysChangedAt claim is present, return its value. If it's missing, return None. + Some(Some(_)) | None => Ok(self.keys_changed_at.flatten()), + // If the fxa-keysChangedAt claim is null, return an error. + Some(None) => Err(TokenserverError { + description: "invalid keysChangedAt", + status: "invalid-credentials", + location: ErrorLocation::Body, + ..Default::default() + }), + } + } + + fn token_verified(&self) -> bool { + match self.token_verified { + // If the fxa-tokenVerified claim is true or missing, return true. + Some(Some(true)) | None => true, + // If the fxa-tokenVerified claim is false or null, return false. + Some(Some(false)) | Some(None) => false, + } + } +} + +// Approach inspired by: https://github.com/serde-rs/serde/issues/984#issuecomment-314143738 +fn strict_deserialize<'de, T, D>(deserializer: D) -> Result, D::Error> +where + T: Deserialize<'de>, + D: Deserializer<'de>, +{ + Deserialize::deserialize(deserializer).map(Some) +} + +#[cfg(test)] +mod tests { + use super::*; + + use mockito::{self, Mock}; + + #[actix_rt::test] + async fn test_browserid_verifier_success() { + let body = r#"{ + "status": "okay", + "email": "test@example.com", + "audience": "https://test.com", + "issuer": "accounts.firefox.com", + "idpClaims": { + "fxa-deviceId": "test_device_id", + "fxa-generation": 1234, + "fxa-keysChangedAt": 5678 + } + }"#; + let mock = mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body(body) + .create(); + let verifier = RemoteVerifier::try_from(&Settings { + fxa_browserid_audience: "https://test.com".to_owned(), + fxa_browserid_issuer: "accounts.firefox.com".to_owned(), + fxa_browserid_server_url: format!("{}/v2", mockito::server_url()), + ..Default::default() + }).unwrap(); + + let result = verifier.verify("test".to_owned()).await.unwrap(); + mock.assert(); + + let expected_result = VerifyOutput { + device_id: Some("test_device_id".to_owned()), + email: "test@example.com".to_owned(), + generation: Some(1234), + keys_changed_at: Some(5678), + }; + + assert_eq!(expected_result, result); + } + + #[actix_rt::test] + async fn test_browserid_verifier_failure_cases() { + const AUDIENCE: &str = "https://test.com"; + + let verifier = RemoteVerifier::try_from(&Settings { + fxa_browserid_audience: AUDIENCE.to_owned(), + fxa_browserid_server_url: format!("{}/v2", mockito::server_url()), + ..Default::default() + }).unwrap(); + let assertion = "test"; + + // Verifier returns 500 + { + let mock = mockito::mock("POST", "/v2") + .with_status(500) + .with_header("content-type", "application/json") + .create(); + + let error = verifier.verify(assertion.to_owned()).await.unwrap_err(); + mock.assert(); + + let expected_error = TokenserverError::resource_unavailable(); + assert_eq!(expected_error, error); + } + + // "Server Error" in body + { + let mock = mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body("

Server Error

") + .create(); + + let error = verifier.verify(assertion.to_owned()).await.unwrap_err(); + mock.assert(); + + let expected_error = TokenserverError::resource_unavailable(); + assert_eq!(expected_error, error); + } + + // {"status": "error"} + { + let mock = mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body("{\"status\": \"error\"}") + .create(); + + let error = verifier.verify(assertion.to_owned()).await.unwrap_err(); + mock.assert(); + + let expected_error = TokenserverError::resource_unavailable(); + assert_eq!(expected_error, error); + } + + // {"status": "potato"} in body + { + let mock = mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body("{\"status\": \"potato\"}") + .create(); + + let error = verifier.verify(assertion.to_owned()).await.unwrap_err(); + mock.assert(); + + let expected_error = TokenserverError::resource_unavailable(); + assert_eq!(expected_error, error); + } + + // {"status": "error"} in body with random reason + { + let mock = mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body("{\"status\": \"failure\", \"reason\": \"something broke\"}") + .create(); + + let error = verifier.verify(assertion.to_owned()).await.unwrap_err(); + mock.assert(); + + let expected_error = TokenserverError::invalid_credentials("Unauthorized"); + assert_eq!(expected_error, error); + } + } + + #[actix_rt::test] + async fn test_browserid_verifier_rejects_unissuers() { + const AUDIENCE: &str = "https://test.com"; + const ISSUER: &str = "accounts.firefox.com"; + + fn mock(issuer: &'static str) -> Mock { + let body = format!( + r#"{{ + "status": "okay", + "email": "test@example.com", + "audience": "https://testmytoken.com", + "issuer": "{}" + }}"#, + issuer + ); + + mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body(body) + .create() + } + + let expected_error = TokenserverError::invalid_credentials("Unauthorized"); + let verifier = RemoteVerifier::try_from(&Settings { + fxa_browserid_audience: AUDIENCE.to_owned(), + fxa_browserid_issuer: ISSUER.to_owned(), + fxa_browserid_server_url: format!("{}/v2", mockito::server_url()), + ..Default::default() + }).unwrap(); + let assertion = "test".to_owned(); + + { + let mock = mock("login.persona.org"); + let error = verifier.verify(assertion.clone()).await.unwrap_err(); + + mock.assert(); + assert_eq!(expected_error, error); + } + + { + let mock = mock(ISSUER); + let result = verifier.verify(assertion.clone()).await.unwrap(); + let expected_result = VerifyOutput { + device_id: None, + email: "test@example.com".to_owned(), + generation: None, + keys_changed_at: None, + }; + + mock.assert(); + assert_eq!(expected_result, result); + } + + { + let mock = mock("accounts.firefox.org"); + let error = verifier.verify(assertion.clone()).await.unwrap_err(); + + mock.assert(); + assert_eq!(expected_error, error); + } + + { + let mock = mock("http://accounts.firefox.com"); + let error = verifier.verify(assertion.clone()).await.unwrap_err(); + + mock.assert(); + assert_eq!(expected_error, error); + } + + { + let mock = mock("accounts.firefox.co"); + let error = verifier.verify(assertion.clone()).await.unwrap_err(); + + mock.assert(); + assert_eq!(expected_error, error); + } + + let expected_error = TokenserverError::resource_unavailable(); + + { + let body = r#"{{ + "status": "okay", + "email": "test@example.com", + "audience": "https://testmytoken.com", + "issuer": 42 + }}"#; + let mock = mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body(body) + .create(); + let error = verifier.verify(assertion.clone()).await.unwrap_err(); + + mock.assert(); + assert_eq!(expected_error, error); + } + + { + let body = r#"{{ + "status": "okay", + "email": "test@example.com", + "audience": "https://testmytoken.com", + "issuer": null + }}"#; + let mock = mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body(body) + .create(); + let error = verifier.verify(assertion.clone()).await.unwrap_err(); + + mock.assert(); + assert_eq!(expected_error, error); + } + + { + let body = r#"{{ + "status": "okay", + "email": "test@example.com", + "audience": "https://testmytoken.com" + }}"#; + let mock = mockito::mock("POST", "/v2") + .with_header("content-type", "application/json") + .with_body(body) + .create(); + let error = verifier.verify(assertion).await.unwrap_err(); + + mock.assert(); + assert_eq!(expected_error, error); + } + } +} diff --git a/src/tokenserver/auth/mod.rs b/src/tokenserver/auth/mod.rs new file mode 100644 index 0000000000..36fb2552f9 --- /dev/null +++ b/src/tokenserver/auth/mod.rs @@ -0,0 +1,158 @@ +pub mod browserid; +pub mod oauth; + +use actix_web::Error; +use async_trait::async_trait; +use dyn_clone::{self, DynClone}; +use pyo3::{ + prelude::{IntoPy, PyErr, PyModule, PyObject, Python}, + types::IntoPyDict, +}; + +use super::error::TokenserverError; +use crate::error::{ApiError, ApiErrorKind}; + +/// The plaintext needed to build a token. +#[derive(Clone)] +pub struct MakeTokenPlaintext { + pub node: String, + pub fxa_kid: String, + pub fxa_uid: String, + pub hashed_device_id: String, + pub hashed_fxa_uid: String, + pub expires: u64, + pub uid: i64, +} + +impl IntoPy for MakeTokenPlaintext { + fn into_py(self, py: Python<'_>) -> PyObject { + let dict = [ + ("node", self.node), + ("fxa_kid", self.fxa_kid), + ("fxa_uid", self.fxa_uid), + ("hashed_device_id", self.hashed_device_id), + ("hashed_fxa_uid", self.hashed_fxa_uid), + ] + .into_py_dict(py); + + // These need to be set separately since they aren't strings, and + // Rust doesn't support heterogeneous arrays + dict.set_item("expires", self.expires).unwrap(); + dict.set_item("uid", self.uid).unwrap(); + + dict.into() + } +} + +/// An adapter to the tokenlib Python library. +pub struct Tokenlib; + +impl Tokenlib { + /// Builds the token and derived secret to be returned by Tokenserver. + pub fn get_token_and_derived_secret( + plaintext: MakeTokenPlaintext, + shared_secret: &str, + ) -> Result<(String, String), Error> { + Python::with_gil(|py| { + // `import tokenlib` + let module = PyModule::import(py, "tokenlib").map_err(|e| { + e.print_and_set_sys_last_vars(py); + e + })?; + // `kwargs = { 'secret': shared_secret }` + let kwargs = [("secret", shared_secret)].into_py_dict(py); + // `token = tokenlib.make_token(plaintext, **kwargs)` + let token = module + .getattr("make_token")? + .call((plaintext,), Some(kwargs)) + .map_err(|e| { + e.print_and_set_sys_last_vars(py); + e + }) + .and_then(|x| x.extract())?; + // `derived_secret = tokenlib.get_derived_secret(token, **kwargs)` + let derived_secret = module + .getattr("get_derived_secret")? + .call((&token,), Some(kwargs)) + .map_err(|e| { + e.print_and_set_sys_last_vars(py); + e + }) + .and_then(|x| x.extract())?; + // `return (token, derived_secret)` + Ok((token, derived_secret)) + }) + .map_err(pyerr_to_actix_error) + } +} + +pub fn derive_node_secrets(secrets: Vec<&str>, node: &str) -> Result, Error> { + const FILENAME: &str = "secrets.py"; + + Python::with_gil(|py| { + let code = include_str!("secrets.py"); + let module = PyModule::from_code(py, code, FILENAME, FILENAME)?; + + module + .getattr("derive_secrets")? + .call((secrets, node), None) + .map_err(|e| { + e.print_and_set_sys_last_vars(py); + e + }) + .and_then(|x| x.extract()) + }) + .map_err(pyerr_to_actix_error) +} + +/// Implementers of this trait can be used to verify tokens for Tokenserver. +#[async_trait] +pub trait VerifyToken: DynClone + Sync + Send { + type Output: Clone; + + /// Verifies the given token. This function is async because token verification often involves + /// making a request to a remote server. + async fn verify(&self, token: String) -> Result; +} + +dyn_clone::clone_trait_object!( VerifyToken); + +/// A mock verifier to be used for testing purposes. +#[derive(Clone, Default)] +pub struct MockVerifier { + pub valid: bool, + pub verify_output: T, +} + +#[async_trait] +impl VerifyToken for MockVerifier { + type Output = T; + + async fn verify(&self, _token: String) -> Result { + self.valid + .then(|| self.verify_output.clone()) + .ok_or_else(|| TokenserverError::invalid_credentials("Unauthorized")) + } +} + +fn pyerr_to_actix_error(e: PyErr) -> Error { + let api_error: ApiError = ApiErrorKind::Internal(e.to_string()).into(); + api_error.into() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_derive_secret_success() { + let secrets = vec!["deadbeefdeadbeefdeadbeefdeadbeef"]; + let node = "https://node"; + let derived_secrets = derive_node_secrets(secrets, node).unwrap(); + + assert_eq!( + derived_secrets, + vec!["a227eb0deb5fb4fd8002166f555c9071".to_owned()] + ); + } +} diff --git a/src/tokenserver/auth/oauth.rs b/src/tokenserver/auth/oauth.rs new file mode 100644 index 0000000000..ae5ed7594c --- /dev/null +++ b/src/tokenserver/auth/oauth.rs @@ -0,0 +1,113 @@ +use actix_web::{web, Error}; +use async_trait::async_trait; +use pyo3::{ + prelude::{Py, PyAny, PyErr, PyModule, Python}, + types::{IntoPyDict, PyString}, +}; +use serde::{Deserialize, Serialize}; +use serde_json; +use tokio::time; + +use super::VerifyToken; +use crate::tokenserver::{error::TokenserverError, settings::Settings}; + +use core::time::Duration; +use std::convert::TryFrom; + +/// The information extracted from a valid OAuth token. +#[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] +pub struct VerifyOutput { + #[serde(rename = "user")] + pub fxa_uid: String, + pub generation: Option, +} + +/// The verifier used to verify OAuth tokens. +#[derive(Clone)] +pub struct RemoteVerifier { + // Note that we do not need to use an Arc here, since Py is already a reference-counted + // pointer + inner: Py, + timeout: u64, +} + +impl RemoteVerifier { + const FILENAME: &'static str = "verify.py"; +} + +impl TryFrom<&Settings> for RemoteVerifier { + type Error = Error; + + fn try_from(settings: &Settings) -> Result { + let inner: Py = Python::with_gil::<_, Result, PyErr>>(|py| { + let code = include_str!("verify.py"); + let module = PyModule::from_code(py, code, Self::FILENAME, Self::FILENAME)?; + let kwargs = [("server_url", &settings.fxa_oauth_server_url)].into_py_dict(py); + let object: Py = module + .getattr("FxaOAuthClient")? + .call((), Some(kwargs)) + .map_err(|e| { + e.print_and_set_sys_last_vars(py); + e + })? + .into(); + + Ok(object) + }) + .map_err(super::pyerr_to_actix_error)?; + + Ok(Self { + inner, + timeout: settings.fxa_oauth_request_timeout, + }) + } +} + +#[async_trait] +impl VerifyToken for RemoteVerifier { + type Output = VerifyOutput; + + /// Verifies an OAuth token. Returns `VerifyOutput` for valid tokens and a `TokenserverError` + /// for invalid tokens. + async fn verify(&self, token: String) -> Result { + let verifier = self.clone(); + + let fut = web::block(move || { + let maybe_verify_output_string = Python::with_gil(|py| { + let client = verifier.inner.as_ref(py); + // `client.verify_token(token)` + let result: &PyAny = client + .getattr("verify_token")? + .call((token,), None) + .map_err(|e| { + e.print_and_set_sys_last_vars(py); + e + })?; + + if result.is_none() { + Ok(None) + } else { + let verify_output_python_string = result.downcast::()?; + verify_output_python_string.extract::().map(Some) + } + }) + .map_err(|_| TokenserverError::invalid_credentials("Unauthorized"))?; + + match maybe_verify_output_string { + Some(verify_output_string) => { + serde_json::from_str::(&verify_output_string) + .map_err(|_| TokenserverError::invalid_credentials("Unauthorized")) + } + None => Err(TokenserverError::invalid_credentials("Unauthorized")), + } + }); + + // The PyFxA OAuth client does not offer a way to set a request timeout, so we set one here + // by timing out the future if the verification process blocks this thread for longer + // than the specified number of seconds. + time::timeout(Duration::new(self.timeout, 0), fut) + .await + .map_err(|_| TokenserverError::resource_unavailable())? + .map_err(Into::into) + } +} diff --git a/src/tokenserver/auth/secrets.py b/src/tokenserver/auth/secrets.py new file mode 100644 index 0000000000..b7a9eb6c62 --- /dev/null +++ b/src/tokenserver/auth/secrets.py @@ -0,0 +1,24 @@ +import binascii +import hashlib +from tokenlib import HKDF + +# Namespace prefix for HKDF "info" parameter. +HKDF_INFO_NODE_SECRET = b"services.mozilla.com/mozsvc/v1/node_secret/" + + +def derive_secrets(master_secrets, node): + hkdf_params = { + "salt": None, + "info": HKDF_INFO_NODE_SECRET + node.encode("utf-8"), + "hashmod": hashlib.sha256, + } + node_secrets = [] + for master_secret in master_secrets: + # We want each hex-encoded derived secret to be the same + # size as its (presumably hex-encoded) master secret. + size = len(master_secret) // 2 + + node_secret = HKDF(master_secret.encode("utf-8"), size=size, + **hkdf_params) + node_secrets.append(binascii.b2a_hex(node_secret).decode()) + return node_secrets diff --git a/src/tokenserver/verify.py b/src/tokenserver/auth/verify.py similarity index 100% rename from src/tokenserver/verify.py rename to src/tokenserver/auth/verify.py diff --git a/src/tokenserver/db/models.rs b/src/tokenserver/db/models.rs index 8dcb8fe101..a8bb10d651 100644 --- a/src/tokenserver/db/models.rs +++ b/src/tokenserver/db/models.rs @@ -1080,7 +1080,7 @@ mod tests { service_id, email: email1.to_owned(), generation: 1, - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), created_at: 2, node_id, keys_changed_at: Some(3), @@ -1108,7 +1108,7 @@ mod tests { service_id, email: email1.to_owned(), generation: 1, - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), replaced_at: None, node_id, keys_changed_at: Some(3), @@ -1198,7 +1198,7 @@ mod tests { service_id, generation: 1234, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, })?; @@ -1251,7 +1251,7 @@ mod tests { service_id, generation: 1234, email: "test1@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, })?; @@ -1260,7 +1260,7 @@ mod tests { service_id, generation: 1234, email: "test2@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, })?; @@ -1303,7 +1303,7 @@ mod tests { service_id, generation: 1234, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }); @@ -1344,7 +1344,7 @@ mod tests { service_id, generation: 1234, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }); @@ -1384,7 +1384,7 @@ mod tests { service_id, generation: 1234, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, })?; @@ -1407,7 +1407,7 @@ mod tests { email: "test@test.com".to_owned(), service_id, generation: 1235, - client_state: "626262".to_owned(), + client_state: "bbbb".to_owned(), keys_changed_at: Some(1235), capacity_release_rate: None, }) @@ -1457,7 +1457,7 @@ mod tests { service_id, generation: MAX_GENERATION, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1468,7 +1468,7 @@ mod tests { service_id, generation: 1234, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1527,7 +1527,7 @@ mod tests { service_id, generation: 1234, email: "test1@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1538,7 +1538,7 @@ mod tests { service_id, generation: 1234, email: "test2@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1549,7 +1549,7 @@ mod tests { service_id, generation: 1234, email: "test3@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1560,7 +1560,7 @@ mod tests { service_id, generation: 1234, email: "test4@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1679,7 +1679,7 @@ mod tests { service_id, generation: 1234, email: "test1@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1696,7 +1696,7 @@ mod tests { service_id, generation: 1234, email: "test2@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1715,7 +1715,7 @@ mod tests { service_id, generation: 1234, email: "test3@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1732,7 +1732,7 @@ mod tests { service_id, generation: 1234, email: "test4@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1750,7 +1750,7 @@ mod tests { service_id, generation: 1234, email: "test5@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1767,7 +1767,7 @@ mod tests { service_id, generation: 1234, email: "test6@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1785,7 +1785,7 @@ mod tests { service_id, generation: 1234, email: "test7@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1832,7 +1832,7 @@ mod tests { service_id, generation: 1234, email: "test4@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1851,7 +1851,7 @@ mod tests { service_id, generation: 1234, email: "test4@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1894,7 +1894,7 @@ mod tests { service_id, generation: 1234, email: "test4@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) @@ -1910,7 +1910,7 @@ mod tests { service_id, generation: 1234, email: "test4@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: Some(1234), capacity_release_rate: None, }) diff --git a/src/tokenserver/error.rs b/src/tokenserver/error.rs index 7ad0721eb6..9babde5c0f 100644 --- a/src/tokenserver/error.rs +++ b/src/tokenserver/error.rs @@ -84,6 +84,15 @@ impl TokenserverError { } } + pub fn resource_unavailable() -> Self { + Self { + location: ErrorLocation::Body, + description: "Resource is not available", + http_status: StatusCode::SERVICE_UNAVAILABLE, + ..Default::default() + } + } + pub fn unsupported(description: &'static str, name: String) -> Self { Self { status: "error", diff --git a/src/tokenserver/extractors.rs b/src/tokenserver/extractors.rs index c8d436250d..c8166be3f2 100644 --- a/src/tokenserver/extractors.rs +++ b/src/tokenserver/extractors.rs @@ -3,28 +3,29 @@ //! Handles ensuring the header's, body, and query parameters are correct, extraction to //! relevant types, and failing correctly with the appropriate errors if issues arise. +use core::fmt::Debug; use std::sync::Arc; use actix_web::{ dev::Payload, http::StatusCode, - web::{self, Data, Query}, + web::{Data, Query}, Error, FromRequest, HttpRequest, }; -use actix_web_httpauth::extractors::bearer::BearerAuth; use futures::future::LocalBoxFuture; +use hex; use hmac::{Hmac, Mac, NewMac}; use lazy_static::lazy_static; use regex::Regex; use serde::Deserialize; use sha2::Sha256; -use super::db::{models::Db, params, pool::DbPool, results}; -use super::error::{ErrorLocation, TokenserverError}; -use super::support::TokenData; -use super::{LogItemsMutator, NodeType, ServerState}; -use crate::server::metrics::Metrics; -use crate::settings::Secrets; +use super::{ + db::{models::Db, params, pool::DbPool, results}, + error::{ErrorLocation, TokenserverError}, + LogItemsMutator, NodeType, ServerState, +}; +use crate::{server::metrics::Metrics, settings::Secrets}; lazy_static! { static ref CLIENT_STATE_REGEX: Regex = Regex::new("^[a-zA-Z0-9._-]{1,32}$").unwrap(); @@ -37,11 +38,7 @@ const SYNC_SERVICE_NAME: &str = "sync-1.5"; #[derive(Debug, Default, PartialEq)] pub struct TokenserverRequest { pub user: results::GetOrCreateUser, - pub fxa_uid: String, - pub email: String, - pub generation: Option, - pub keys_changed_at: i64, - pub client_state: String, + pub auth_data: AuthData, pub shared_secret: String, pub hashed_fxa_uid: String, pub hashed_device_id: String, @@ -72,62 +69,73 @@ impl TokenserverRequest { /// of the FxA server may not have been sending all the expected fields, and /// that some clients do not report the `generation` timestamp. fn validate(&self) -> Result<(), TokenserverError> { + /// Returns true only if both arguments are Some and opta > optb. + fn gt(opta: Option, optb: Option) -> bool { + opta.is_some() && optb.is_some() && opta > optb + } + + /// Returns true only if both arguments are Some and opta < optb. + fn lt(opta: Option, optb: Option) -> bool { + opta.is_some() && optb.is_some() && opta < optb + } + + /// Returns true only if both arguments are Some and opta <= optb. + fn le(opta: Option, optb: Option) -> bool { + opta.is_some() && optb.is_some() && opta <= optb + } + // If the caller reports a generation number, then a change // in keys should correspond to a change in generation number. // Unfortunately a previous version of the server that didn't // have `keys_changed_at` support may have already seen and // written the new value of `generation`. The best we can do // here is enforce that `keys_changed_at` <= `generation`. - if let (Some(generation), Some(user_keys_changed_at)) = - (self.generation, self.user.keys_changed_at) + if gt(self.auth_data.keys_changed_at, self.user.keys_changed_at) + && lt(self.auth_data.generation, self.auth_data.keys_changed_at) { - if self.keys_changed_at > user_keys_changed_at && generation < self.keys_changed_at { - return Err(TokenserverError::invalid_keys_changed_at()); - } + return Err(TokenserverError::invalid_keys_changed_at()); } // The client state on the request must not have been used in the past. - if self.user.old_client_states.contains(&self.client_state) { + if self + .user + .old_client_states + .contains(&self.auth_data.client_state) + { let error_message = "Unacceptable client-state value stale value"; return Err(TokenserverError::invalid_client_state(error_message)); } // If the client state on the request differs from the most recently-used client state, it must // be accompanied by a valid change in generation (if the client reports a generation). - if let Some(generation) = self.generation { - if self.client_state != self.user.client_state && generation <= self.user.generation { - let error_message = - "Unacceptable client-state value new value with no generation change"; - return Err(TokenserverError::invalid_client_state(error_message)); - } + if self.auth_data.client_state != self.user.client_state + && le(self.auth_data.generation, Some(self.user.generation)) + { + let error_message = + "Unacceptable client-state value new value with no generation change"; + return Err(TokenserverError::invalid_client_state(error_message)); } // If the client state on the request differs from the most recently-used client state, it must // be accompanied by a valid change in keys_changed_at - if let Some(user_keys_changed_at) = self.user.keys_changed_at { - if self.client_state != self.user.client_state - && self.keys_changed_at <= user_keys_changed_at - { - let error_message = - "Unacceptable client-state value new value with no keys_changed_at change"; - return Err(TokenserverError::invalid_client_state(error_message)); - } + if self.auth_data.client_state != self.user.client_state + && le(self.auth_data.keys_changed_at, self.user.keys_changed_at) + { + let error_message = + "Unacceptable client-state value new value with no keys_changed_at change"; + return Err(TokenserverError::invalid_client_state(error_message)); } // The generation on the request cannot be earlier than the generation stored on the user // record. - if let Some(generation) = self.generation { - if self.user.generation > generation { - return Err(TokenserverError::invalid_generation()); - } + if gt(Some(self.user.generation), self.auth_data.generation) { + return Err(TokenserverError::invalid_generation()); } // The keys_changed_at on the request cannot be earlier than the keys_changed_at stored on // the user record. - if let Some(user_keys_changed_at) = self.user.keys_changed_at { - if user_keys_changed_at > self.keys_changed_at { - return Err(TokenserverError::invalid_keys_changed_at()); - } + if gt(self.user.keys_changed_at, self.auth_data.keys_changed_at) { + return Err(TokenserverError::invalid_keys_changed_at()); } Ok(()) @@ -144,25 +152,35 @@ impl FromRequest for TokenserverRequest { Box::pin(async move { let mut log_items_mutator = LogItemsMutator::from(&req); - let token_data = TokenData::extract(&req).await?; + let auth_data = AuthData::extract(&req).await?; // XXX: Tokenserver state will no longer be an Option once the Tokenserver // code is rolled out, so we will eventually be able to remove this unwrap(). let state = get_server_state(&req)?.as_ref().as_ref().unwrap(); let shared_secret = get_secret(&req)?; let fxa_metrics_hash_secret = &state.fxa_metrics_hash_secret.as_bytes(); - let key_id = KeyId::extract(&req).await?; - let fxa_uid = token_data.user; + + // To preserve anonymity, compute a hash of the FxA UID to be used for reporting + // metrics let hashed_fxa_uid = { - let hashed_fxa_uid_full = fxa_metrics_hash(&fxa_uid, fxa_metrics_hash_secret); + let hashed_fxa_uid_full = + fxa_metrics_hash(&auth_data.fxa_uid, fxa_metrics_hash_secret); log_items_mutator.insert("uid".to_owned(), hashed_fxa_uid_full.clone()); hashed_fxa_uid_full[0..32].to_owned() }; log_items_mutator.insert("metrics_uid".to_owned(), hashed_fxa_uid.clone()); + + // To preserve anonymity, compute a hash of the FxA device ID to be used for reporting + // metrics. Only requests using BrowserID will have a device ID, so use "none" as + // a placeholder for OAuth requests. let hashed_device_id = { - let device_id = "none".to_string(); + let device_id = auth_data + .device_id + .clone() + .unwrap_or_else(|| "none".to_owned()); hash_device_id(&hashed_fxa_uid, &device_id, fxa_metrics_hash_secret) }; + let db = >::extract(&req).await?; let service_id = { let path = req.match_info(); @@ -202,14 +220,13 @@ impl FromRequest for TokenserverRequest { .into()); } }; - let email = format!("{}@{}", fxa_uid, state.fxa_email_domain); let user = db .get_or_create_user(params::GetOrCreateUser { service_id, - email: email.clone(), - generation: token_data.generation.unwrap_or(0), - client_state: key_id.client_state.clone(), - keys_changed_at: Some(key_id.keys_changed_at), + email: auth_data.email.clone(), + generation: auth_data.generation.unwrap_or(0), + client_state: auth_data.client_state.clone(), + keys_changed_at: auth_data.keys_changed_at, capacity_release_rate: state.node_capacity_release_rate, }) .await?; @@ -232,11 +249,7 @@ impl FromRequest for TokenserverRequest { let tokenserver_request = TokenserverRequest { user, - fxa_uid, - email, - generation: token_data.generation, - keys_changed_at: key_id.keys_changed_at, - client_state: key_id.client_state, + auth_data, shared_secret, hashed_fxa_uid, hashed_device_id, @@ -297,7 +310,14 @@ impl FromRequest for Box { } } -impl FromRequest for TokenData { +/// An authentication token as parsed from the `Authorization` header. Both BrowserID assertions +/// and OAuth tokens are opaque to Tokenserver and must be verified via FxA. +pub enum Token { + BrowserIdAssertion(String), + OAuthToken(String), +} + +impl FromRequest for Token { type Config = (); type Error = Error; type Future = LocalBoxFuture<'static, Result>; @@ -314,39 +334,104 @@ impl FromRequest for TokenData { .to_str() .map_err(|_| TokenserverError::unauthorized("Unauthorized"))?; - // The request must use Bearer auth - if let Some((auth_type, _)) = authorization_header.split_once(" ") { - if auth_type.to_ascii_lowercase() != "bearer" { - return Err(TokenserverError::unauthorized("Unsupported").into()); + if let Some((auth_type, token)) = authorization_header.split_once(" ") { + let auth_type = auth_type.to_ascii_lowercase(); + + if auth_type == "bearer" { + Ok(Token::OAuthToken(token.to_owned())) + } else if auth_type == "browserid" { + Ok(Token::BrowserIdAssertion(token.to_owned())) + } else { + // The request must use a Bearer token or BrowserID token + Err(TokenserverError::unauthorized("Unsupported").into()) } + } else { + // Headers that are not of the format "[AUTH TYPE] [TOKEN]" are invalid + Err(TokenserverError::unauthorized("Unauthorized").into()) } + }) + } +} - let auth = BearerAuth::extract(&req) - .await - .map_err(|_| TokenserverError::invalid_credentials("Unsupported"))?; +/// The data extracted from the authentication token. +#[derive(Debug, Default, PartialEq)] +pub struct AuthData { + pub client_state: String, + pub device_id: Option, + pub email: String, + pub fxa_uid: String, + pub generation: Option, + pub keys_changed_at: Option, +} + +impl FromRequest for AuthData { + type Config = (); + type Error = Error; + type Future = LocalBoxFuture<'static, Result>; + + fn from_request(req: &HttpRequest, _payload: &mut Payload) -> Self::Future { + let req = req.clone(); + + Box::pin(async move { // XXX: The Tokenserver state will no longer be an Option once the Tokenserver // code is rolled out, so we will eventually be able to remove this unwrap(). let state = get_server_state(&req)?.as_ref().as_ref().unwrap(); - let oauth_verifier = state.oauth_verifier.clone(); + let token = Token::extract(&req).await?; let mut metrics = Metrics::extract(&req).await?; - metrics.start_timer("tokenserver.oauth_token_verification", None); - - web::block(move || oauth_verifier.verify_token(auth.token())) - .await - .map_err(TokenserverError::from) - .map_err(Into::into) + metrics.start_timer("tokenserver.token_verification", None); + + match token { + Token::BrowserIdAssertion(assertion) => { + let verify_output = state.browserid_verifier.clone().verify(assertion).await?; + + // For requests using BrowserID, the client state is embedded in the + // X-Client-State header, and the generation and keys_changed_at are extracted + // from the assertion as part of the verification process. + let XClientStateHeader(client_state) = + XClientStateHeader::extract(&req).await?; + let (fxa_uid, _) = verify_output + .email + .split_once('@') + .unwrap_or((&verify_output.email, "")); + + Ok(AuthData { + client_state: client_state.unwrap_or_else(|| "".to_owned()), + device_id: verify_output.device_id, + email: verify_output.email.clone(), + fxa_uid: fxa_uid.to_owned(), + generation: verify_output.generation, + keys_changed_at: verify_output.keys_changed_at, + }) + } + Token::OAuthToken(token) => { + let verify_output = state.oauth_verifier.verify(token).await?; + + // For requests using OAuth, the keys_changed_at and client state are embedded + // in the X-KeyID header. + let key_id = KeyId::extract(&req).await?; + let fxa_uid = verify_output.fxa_uid; + let email = format!("{}@{}", fxa_uid, state.fxa_email_domain); + + Ok(AuthData { + client_state: key_id.client_state, + email, + device_id: None, + fxa_uid, + generation: verify_output.generation, + keys_changed_at: Some(key_id.keys_changed_at), + }) + } + } }) } } -#[derive(Clone, Debug, PartialEq)] -struct KeyId { - client_state: String, - keys_changed_at: i64, -} +/// The value extracted from the X-Client-State header if it was present. The value in this header +/// consists of the raw client state bytes encoded as a hexadecimal string. +struct XClientStateHeader(Option); -impl FromRequest for KeyId { +impl FromRequest for XClientStateHeader { type Config = (); type Error = Error; type Future = LocalBoxFuture<'static, Result>; @@ -374,27 +459,61 @@ impl FromRequest for KeyId { } } + Ok(Self(maybe_x_client_state.map(ToOwned::to_owned))) + }) + } +} + +// The key ID, as extracted from the X-KeyID header. The X-KeyID header is of the format +// `[keys_changed_at]-[base64-encoded client state]` (e.g. `00000000000001234-qqo`) +#[derive(Clone, Debug, PartialEq)] +struct KeyId { + client_state: String, + keys_changed_at: i64, +} + +impl FromRequest for KeyId { + type Config = (); + type Error = Error; + type Future = LocalBoxFuture<'static, Result>; + + fn from_request(req: &HttpRequest, _payload: &mut Payload) -> Self::Future { + let req = req.clone(); + + Box::pin(async move { + let headers = req.headers(); + + // The X-KeyID header must be present for requests using OAuth let x_key_id = headers .get("X-KeyID") .ok_or_else(|| TokenserverError::invalid_key_id("Missing X-KeyID header"))? .to_str() .map_err(|_| TokenserverError::invalid_key_id("Invalid X-KeyID header"))?; + // The X-KeyID header is of the format `[keys_changed_at]-[base64-encoded client state]` (e.g. `00000000000001234-qqo`) let (keys_changed_at_string, encoded_client_state) = x_key_id .split_once("-") .ok_or_else(|| TokenserverError::invalid_credentials("Unauthorized"))?; let client_state = { - let client_state_bytes = - base64::decode_config(encoded_client_state, base64::URL_SAFE_NO_PAD) - .map_err(|_| TokenserverError::invalid_credentials("Unauthorized"))?; - - let client_state = hex::encode(client_state_bytes); + // The client state in the X-KeyID header consists of the raw client state bytes + // encoded as URL-safe base64 with the padding removed. We convert it to hex + // because we store the client state as hex in the database. + let client_state_hex = { + let bytes = + base64::decode_config(encoded_client_state, base64::URL_SAFE_NO_PAD) + .map_err(|_| TokenserverError::invalid_credentials("Unauthorized"))?; + + hex::encode(bytes) + }; + // The client state from the X-Client-State header is already properly encoded as + // hex + let XClientStateHeader(x_client_state) = XClientStateHeader::extract(&req).await?; // If there's a client state value in the X-Client-State header, verify that it matches // the value in X-KeyID. - if let Some(x_client_state) = maybe_x_client_state { - if x_client_state != client_state { + if let Some(x_client_state) = x_client_state { + if x_client_state != client_state_hex { return Err(TokenserverError { status: "invalid-client-state", location: ErrorLocation::Body, @@ -404,7 +523,7 @@ impl FromRequest for KeyId { } } - client_state + client_state_hex }; let keys_changed_at = keys_changed_at_string @@ -474,7 +593,9 @@ mod tests { use crate::server::metrics; use crate::settings::{Secrets, ServerLimits, Settings}; use crate::tokenserver::{ - db::mock::MockDbPool as MockTokenserverPool, MockOAuthVerifier, ServerState, + auth::{browserid, oauth, MockVerifier}, + db::mock::MockDbPool as MockTokenserverPool, + ServerState, }; use std::sync::Arc; @@ -488,26 +609,26 @@ mod tests { #[actix_rt::test] async fn test_valid_tokenserver_request() { let fxa_uid = "test123"; - let verifier = { - let token_data = TokenData { - user: fxa_uid.to_owned(), - client_id: "client id".to_owned(), - scope: vec!["scope".to_owned()], + let oauth_verifier = { + let verify_output = oauth::VerifyOutput { + fxa_uid: fxa_uid.to_owned(), generation: Some(1234), - profile_changed_at: Some(1234), }; let valid = true; - MockOAuthVerifier { valid, token_data } + MockVerifier { + valid, + verify_output, + } }; - let state = make_state(verifier); + let state = make_state(oauth_verifier, MockVerifier::default()); let req = TestRequest::default() .data(Some(state)) .data(Arc::clone(&SECRETS)) .header("authorization", "Bearer fake_token") .header("accept", "application/json,text/plain:q=0.5") - .header("x-keyid", "0000000001234-YWFh") + .header("x-keyid", "0000000001234-qqo") .param("application", "sync") .param("version", "1.5") .uri("/1.0/sync/1.5?duration=100") @@ -520,11 +641,14 @@ mod tests { .unwrap(); let expected_tokenserver_request = TokenserverRequest { user: results::GetOrCreateUser::default(), - fxa_uid: fxa_uid.to_owned(), - email: "test123@test.com".to_owned(), - generation: Some(1234), - keys_changed_at: 1234, - client_state: "616161".to_owned(), + auth_data: AuthData { + device_id: None, + fxa_uid: fxa_uid.to_owned(), + email: "test123@test.com".to_owned(), + generation: Some(1234), + keys_changed_at: Some(1234), + client_state: "aaaa".to_owned(), + }, shared_secret: "Ted Koppel is a robot".to_owned(), hashed_fxa_uid: "4d00ecae64b98dd7dc7dea68d0dd615d".to_owned(), hashed_device_id: "3a41cccbdd666ebc4199f1f9d1249d44".to_owned(), @@ -539,26 +663,26 @@ mod tests { #[actix_rt::test] async fn test_invalid_auth_token() { let fxa_uid = "test123"; - let verifier = { - let token_data = TokenData { - user: fxa_uid.to_owned(), - client_id: "client id".to_owned(), - scope: vec!["scope".to_owned()], + let oauth_verifier = { + let verify_output = oauth::VerifyOutput { + fxa_uid: fxa_uid.to_owned(), generation: Some(1234), - profile_changed_at: None, }; let valid = false; - MockOAuthVerifier { valid, token_data } + MockVerifier { + valid, + verify_output, + } }; - let state = make_state(verifier); + let state = make_state(oauth_verifier, MockVerifier::default()); let request = TestRequest::default() .data(Some(state)) .data(Arc::clone(&SECRETS)) .header("authorization", "Bearer fake_token") .header("accept", "application/json,text/plain:q=0.5") - .header("x-keyid", "0000000001234-YWFh") + .header("x-keyid", "0000000001234-qqo") .param("application", "sync") .param("version", "1.5") .method(Method::GET) @@ -580,25 +704,25 @@ mod tests { async fn test_application_and_version() { fn build_request() -> TestRequest { let fxa_uid = "test123"; - let verifier = { - let token_data = TokenData { - user: fxa_uid.to_owned(), - client_id: "client id".to_owned(), - scope: vec!["scope".to_owned()], + let oauth_verifier = { + let verify_output = oauth::VerifyOutput { + fxa_uid: fxa_uid.to_owned(), generation: Some(1234), - profile_changed_at: None, }; let valid = true; - MockOAuthVerifier { valid, token_data } + MockVerifier { + valid, + verify_output, + } }; TestRequest::default() - .data(Some(make_state(verifier))) + .data(Some(make_state(oauth_verifier, MockVerifier::default()))) .data(Arc::clone(&SECRETS)) .header("authorization", "Bearer fake_token") .header("accept", "application/json,text/plain:q=0.5") - .header("x-keyid", "0000000001234-YWFh") + .header("x-keyid", "0000000001234-qqo") .method(Method::GET) } @@ -677,23 +801,23 @@ mod tests { async fn test_key_id() { fn build_request() -> TestRequest { let fxa_uid = "test123"; - let verifier = { + let oauth_verifier = { let start = SystemTime::now(); let current_time = start.duration_since(UNIX_EPOCH).unwrap(); - let token_data = TokenData { - user: fxa_uid.to_owned(), - client_id: "client id".to_owned(), - scope: vec!["scope".to_owned()], + let verify_output = oauth::VerifyOutput { + fxa_uid: fxa_uid.to_owned(), generation: Some(current_time.as_secs() as i64), - profile_changed_at: Some(current_time.as_secs() as i64), }; let valid = true; - MockOAuthVerifier { valid, token_data } + MockVerifier { + valid, + verify_output, + } }; TestRequest::default() - .data(Some(make_state(verifier))) + .data(Some(make_state(oauth_verifier, MockVerifier::default()))) .header("authorization", "Bearer fake_token") .header("accept", "application/json,text/plain:q=0.5") .param("application", "sync") @@ -767,7 +891,7 @@ mod tests { // X-KeyID header with non-integral keys_changed_at { let request = build_request() - .header("x-keyid", "notanumber-YWFh") + .header("x-keyid", "notanumber-qqo") .to_http_request(); let response: HttpResponse = KeyId::extract(&request).await.unwrap_err().into(); assert_eq!(response.status(), StatusCode::UNAUTHORIZED); @@ -780,8 +904,8 @@ mod tests { // X-KeyID header with client state that does not match that in the X-Client-State header { let request = build_request() - .header("x-keyid", "0000000001234-YWFh") - .header("x-client-state", "626262") + .header("x-keyid", "0000000001234-qqo") + .header("x-client-state", "bbbb") .to_http_request(); let response: HttpResponse = KeyId::extract(&request).await.unwrap_err().into(); assert_eq!(response.status(), StatusCode::UNAUTHORIZED); @@ -798,12 +922,12 @@ mod tests { // Valid X-KeyID header with matching X-Client-State header { let request = build_request() - .header("x-keyid", "0000000001234-YWFh") - .header("x-client-state", "616161") + .header("x-keyid", "0000000001234-qqo") + .header("x-client-state", "aaaa") .to_http_request(); let key_id = KeyId::extract(&request).await.unwrap(); let expected_key_id = KeyId { - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: 1234, }; @@ -813,11 +937,11 @@ mod tests { // Valid X-KeyID header with no X-Client-State header { let request = build_request() - .header("x-keyid", "0000000001234-YWFh") + .header("x-keyid", "0000000001234-qqo") .to_http_request(); let key_id = KeyId::extract(&request).await.unwrap(); let expected_key_id = KeyId { - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), keys_changed_at: 1234, }; @@ -833,7 +957,7 @@ mod tests { user: results::GetOrCreateUser { uid: 1, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), generation: 1234, node: "node".to_owned(), keys_changed_at: Some(1234), @@ -842,11 +966,14 @@ mod tests { first_seen_at: 1234, old_client_states: vec![], }, - fxa_uid: "test".to_owned(), - email: "test@test.com".to_owned(), - generation: Some(1233), - keys_changed_at: 1234, - client_state: "616161".to_owned(), + auth_data: AuthData { + device_id: None, + fxa_uid: "test".to_owned(), + email: "test@test.com".to_owned(), + generation: Some(1233), + keys_changed_at: Some(1234), + client_state: "aaaa".to_owned(), + }, shared_secret: "secret".to_owned(), hashed_fxa_uid: "abcdef".to_owned(), hashed_device_id: "abcdef".to_owned(), @@ -867,7 +994,7 @@ mod tests { user: results::GetOrCreateUser { uid: 1, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), generation: 1234, node: "node".to_owned(), keys_changed_at: Some(1234), @@ -876,11 +1003,14 @@ mod tests { replaced_at: None, old_client_states: vec![], }, - fxa_uid: "test".to_owned(), - email: "test@test.com".to_owned(), - generation: Some(1234), - keys_changed_at: 1233, - client_state: "616161".to_owned(), + auth_data: AuthData { + device_id: None, + fxa_uid: "test".to_owned(), + email: "test@test.com".to_owned(), + generation: Some(1234), + keys_changed_at: Some(1233), + client_state: "aaaa".to_owned(), + }, shared_secret: "secret".to_owned(), hashed_fxa_uid: "abcdef".to_owned(), hashed_device_id: "abcdef".to_owned(), @@ -900,7 +1030,7 @@ mod tests { user: results::GetOrCreateUser { uid: 1, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), generation: 1234, node: "node".to_owned(), keys_changed_at: Some(1234), @@ -909,11 +1039,14 @@ mod tests { replaced_at: None, old_client_states: vec![], }, - fxa_uid: "test".to_owned(), - email: "test@test.com".to_owned(), - generation: Some(1234), - keys_changed_at: 1235, - client_state: "616161".to_owned(), + auth_data: AuthData { + device_id: None, + fxa_uid: "test".to_owned(), + email: "test@test.com".to_owned(), + generation: Some(1234), + keys_changed_at: Some(1235), + client_state: "aaaa".to_owned(), + }, shared_secret: "secret".to_owned(), hashed_fxa_uid: "abcdef".to_owned(), hashed_device_id: "abcdef".to_owned(), @@ -934,20 +1067,23 @@ mod tests { user: results::GetOrCreateUser { uid: 1, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), generation: 1234, node: "node".to_owned(), keys_changed_at: Some(1234), created_at: 1234, first_seen_at: 1234, replaced_at: None, - old_client_states: vec!["626262".to_owned()], + old_client_states: vec!["bbbb".to_owned()], + }, + auth_data: AuthData { + device_id: None, + fxa_uid: "test".to_owned(), + email: "test@test.com".to_owned(), + generation: Some(1234), + keys_changed_at: Some(1234), + client_state: "bbbb".to_owned(), }, - fxa_uid: "test".to_owned(), - email: "test@test.com".to_owned(), - generation: Some(1234), - keys_changed_at: 1234, - client_state: "626262".to_owned(), shared_secret: "secret".to_owned(), hashed_fxa_uid: "abcdef".to_owned(), hashed_device_id: "abcdef".to_owned(), @@ -968,7 +1104,7 @@ mod tests { user: results::GetOrCreateUser { uid: 1, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), generation: 1234, node: "node".to_owned(), keys_changed_at: Some(1234), @@ -977,11 +1113,14 @@ mod tests { replaced_at: None, old_client_states: vec![], }, - fxa_uid: "test".to_owned(), - email: "test@test.com".to_owned(), - generation: Some(1234), - keys_changed_at: 1234, - client_state: "626262".to_owned(), + auth_data: AuthData { + device_id: None, + fxa_uid: "test".to_owned(), + email: "test@test.com".to_owned(), + generation: Some(1234), + keys_changed_at: Some(1234), + client_state: "bbbb".to_owned(), + }, shared_secret: "secret".to_owned(), hashed_fxa_uid: "abcdef".to_owned(), hashed_device_id: "abcdef".to_owned(), @@ -1002,7 +1141,7 @@ mod tests { user: results::GetOrCreateUser { uid: 1, email: "test@test.com".to_owned(), - client_state: "616161".to_owned(), + client_state: "aaaa".to_owned(), generation: 1234, node: "node".to_owned(), keys_changed_at: Some(1234), @@ -1011,11 +1150,14 @@ mod tests { replaced_at: None, old_client_states: vec![], }, - fxa_uid: "test".to_owned(), - email: "test@test.com".to_owned(), - generation: Some(1235), - keys_changed_at: 1234, - client_state: "626262".to_owned(), + auth_data: AuthData { + device_id: None, + fxa_uid: "test".to_owned(), + email: "test@test.com".to_owned(), + generation: Some(1235), + keys_changed_at: Some(1234), + client_state: "bbbb".to_owned(), + }, shared_secret: "secret".to_owned(), hashed_fxa_uid: "abcdef".to_owned(), hashed_device_id: "abcdef".to_owned(), @@ -1034,13 +1176,17 @@ mod tests { String::from_utf8(block_on(test::read_body(sresponse)).to_vec()).unwrap() } - fn make_state(verifier: MockOAuthVerifier) -> ServerState { + fn make_state( + oauth_verifier: MockVerifier, + browserid_verifier: MockVerifier, + ) -> ServerState { let settings = Settings::default(); ServerState { fxa_email_domain: "test.com".to_owned(), fxa_metrics_hash_secret: "".to_owned(), - oauth_verifier: Box::new(verifier), + browserid_verifier: Box::new(browserid_verifier), + oauth_verifier: Box::new(oauth_verifier), db_pool: Box::new(MockTokenserverPool::new()), node_capacity_release_rate: None, node_type: NodeType::default(), diff --git a/src/tokenserver/handlers.rs b/src/tokenserver/handlers.rs index 71fe15a000..be63c58f1f 100644 --- a/src/tokenserver/handlers.rs +++ b/src/tokenserver/handlers.rs @@ -8,13 +8,17 @@ use actix_web::{http::StatusCode, Error, HttpResponse}; use serde::Serialize; use serde_json::Value; -use super::db::models::Db; -use super::db::params::{GetNodeId, PostUser, PutUser, ReplaceUsers}; -use super::error::TokenserverError; -use super::extractors::TokenserverRequest; -use super::support::Tokenlib; -use super::NodeType; -use crate::tokenserver::support::MakeTokenPlaintext; +use super::{ + auth::{MakeTokenPlaintext, Tokenlib}, + db::{ + models::Db, + params::{GetNodeId, PostUser, PutUser, ReplaceUsers}, + }, + error::TokenserverError, + extractors::TokenserverRequest, + NodeType, +}; +use crate::server::metrics::Metrics; #[derive(Debug, Serialize)] pub struct TokenserverResult { @@ -31,11 +35,15 @@ pub struct TokenserverResult { pub async fn get_tokenserver_result( req: TokenserverRequest, db: Box, + mut metrics: Metrics ) -> Result { let updates = update_user(&req, db).await?; let (token, derived_secret) = { let token_plaintext = get_token_plaintext(&req, &updates)?; + + metrics.start_timer("tokenserver.token_creation", None); + // Get the token and secret Tokenlib::get_token_and_derived_secret(token_plaintext, &req.shared_secret)? }; @@ -43,7 +51,7 @@ pub async fn get_tokenserver_result( id: token, key: derived_secret, uid: updates.uid, - api_endpoint: format!("{:}/1.5/{:}", req.user.node, req.user.uid), + api_endpoint: format!("{:}/1.5/{:}", req.user.node, updates.uid), duration: req.duration, hashed_fxa_uid: req.hashed_fxa_uid, hashalg: "sha256", @@ -60,7 +68,7 @@ fn get_token_plaintext( let fxa_kid = { // If decoding the hex bytes fails, it means we did something wrong when we stored the // client state in the database - let client_state = hex::decode(req.client_state.clone()).map_err(|_| { + let client_state = hex::decode(req.auth_data.client_state.clone()).map_err(|_| { error!("⚠️ Failed to decode client state hex"); TokenserverError::internal_error() @@ -81,7 +89,7 @@ fn get_token_plaintext( Ok(MakeTokenPlaintext { node: req.user.node.to_owned(), fxa_kid, - fxa_uid: req.fxa_uid.clone(), + fxa_uid: req.auth_data.fxa_uid.clone(), hashed_device_id: req.hashed_device_id.clone(), hashed_fxa_uid: req.hashed_fxa_uid.clone(), expires, @@ -97,29 +105,23 @@ struct UserUpdates { async fn update_user(req: &TokenserverRequest, db: Box) -> Result { // If the keys_changed_at in the request is larger than that stored on the user record, // update to the value in the request. - let keys_changed_at = if let Some(user_keys_changed_at) = req.user.keys_changed_at { - cmp::max(req.keys_changed_at, user_keys_changed_at) - } else { - req.keys_changed_at - }; + let keys_changed_at = + cmp::max(req.auth_data.keys_changed_at, req.user.keys_changed_at).unwrap_or(0); - let generation = if let Some(generation) = req.generation { + let generation = if let Some(generation) = req.auth_data.generation { // If there's a generation on the request, choose the larger of that and the generation // already stored on the user record. cmp::max(generation, req.user.generation) - } else if req.keys_changed_at > req.user.generation { + } else { // If there's not a generation on the request and the keys_changed_at on the request is // larger than the generation stored on the user record, set the user's generation to be // the keys_changed_at on the request. - req.keys_changed_at - } else { - // As a fallback, set the user's generation to be 0. - 0 + cmp::max(req.auth_data.keys_changed_at, Some(req.user.generation)).unwrap_or(0) }; // If the client state changed, we need to mark the current user as "replaced" and create a // new user record. Otherwise, we can update the user in place. - if req.client_state != req.user.client_state { + if req.auth_data.client_state != req.user.client_state { let timestamp = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() @@ -128,9 +130,9 @@ async fn update_user(req: &TokenserverRequest, db: Box) -> Result) -> Result) -> Result, pub fxa_email_domain: String, pub fxa_metrics_hash_secret: String, - pub oauth_verifier: Box, + pub oauth_verifier: Box>, + pub browserid_verifier: Box>, pub node_capacity_release_rate: Option, pub node_type: NodeType, pub service_id: Option, @@ -39,23 +41,8 @@ pub struct ServerState { impl ServerState { pub fn from_settings(settings: &Settings, metrics: StatsdClient) -> Result { - let oauth_verifier: Box = if settings.test_mode_enabled { - #[cfg(feature = "tokenserver_test_mode")] - let oauth_verifier = Box::new(TestModeOAuthVerifier); - - #[cfg(not(feature = "tokenserver_test_mode"))] - let oauth_verifier = Box::new( - OAuthVerifier::new(settings.fxa_oauth_server_url.as_deref()) - .expect("failed to create Tokenserver OAuth verifier"), - ); - - oauth_verifier - } else { - Box::new( - OAuthVerifier::new(settings.fxa_oauth_server_url.as_deref()) - .expect("failed to create Tokenserver OAuth verifier"), - ) - }; + let oauth_verifier = Box::new(oauth::RemoteVerifier::try_from(settings).expect("failed to create Tokenserver OAuth verifier")); + let browserid_verifier = Box::new(browserid::RemoteVerifier::try_from(settings).expect("failed to create Tokenserver BrowserID verifier")); let use_test_transactions = false; TokenserverPool::new(settings, &Metrics::from(&metrics), use_test_transactions) @@ -74,6 +61,7 @@ impl ServerState { fxa_email_domain: settings.fxa_email_domain.clone(), fxa_metrics_hash_secret: settings.fxa_metrics_hash_secret.clone(), oauth_verifier, + browserid_verifier, db_pool: Box::new(db_pool), node_capacity_release_rate: settings.node_capacity_release_rate, node_type: settings.node_type, diff --git a/src/tokenserver/settings.rs b/src/tokenserver/settings.rs index 4745270af4..85c0ab93aa 100644 --- a/src/tokenserver/settings.rs +++ b/src/tokenserver/settings.rs @@ -4,39 +4,41 @@ use super::NodeType; #[derive(Clone, Debug, Deserialize)] pub struct Settings { + /// The URL of the Tokenserver MySQL database. pub database_url: String, - + /// The max size of the database connection pool. pub database_pool_max_size: Option, - // NOTE: Not supported by deadpool! + /// The minimum number of database connections to be maintained at any given time. pub database_pool_min_idle: Option, - /// Pool timeout when waiting for a slot to become available, in seconds pub database_pool_connection_timeout: Option, - // XXX: This is a temporary setting used to enable Tokenserver-related features. In // the future, Tokenserver will always be enabled, and this setting will be // removed. + /// Whether or not to enable Tokenserver. pub enabled: bool, - + /// The secret to be used when computing the hash for a Tokenserver user's metrics UID. pub fxa_metrics_hash_secret: String, - /// The email domain for users' FxA accounts. This should be set according to the /// desired FxA environment (production or stage). pub fxa_email_domain: String, - - /// The URL of the FxA server used for verifying Tokenserver OAuth tokens. - pub fxa_oauth_server_url: Option, - - /// When test mode is enabled, OAuth tokens are unpacked without being verified. - pub test_mode_enabled: bool, - + /// The URL of the FxA server used for verifying OAuth tokens. + pub fxa_oauth_server_url: String, + /// The timeout to be used when making requests to the FxA OAuth verification server. + pub fxa_oauth_request_timeout: u64, + /// The issuer expected in the BrowserID verification response. + pub fxa_browserid_issuer: String, + /// The audience to be sent to the FxA BrowserID verification server. + pub fxa_browserid_audience: String, + /// The URL of the FxA server used for verifying BrowserID assertions. + pub fxa_browserid_server_url: String, + /// The timeout to be used when making requests to the FxA BrowserID verification server. + pub fxa_browserid_request_timeout: u64, /// The rate at which capacity should be released from nodes that are at capacity. pub node_capacity_release_rate: Option, - /// The type of the storage nodes used by this instance of Tokenserver. pub node_type: NodeType, - /// The label to be used when reporting Metrics. pub statsd_label: String, } @@ -51,8 +53,12 @@ impl Default for Settings { enabled: false, fxa_email_domain: "api.accounts.firefox.com".to_owned(), fxa_metrics_hash_secret: "secret".to_owned(), - fxa_oauth_server_url: None, - test_mode_enabled: false, + fxa_oauth_server_url: "https://oauth.stage.mozaws.net".to_owned(), + fxa_oauth_request_timeout: 10, + fxa_browserid_audience: "https://token.stage.mozaws.net".to_owned(), + fxa_browserid_issuer: "api-accounts.stage.mozaws.net".to_owned(), + fxa_browserid_server_url: "https://verifier.stage.mozaws.net/v2".to_owned(), + fxa_browserid_request_timeout: 10, node_capacity_release_rate: None, node_type: NodeType::Spanner, statsd_label: "syncstorage.tokenserver".to_owned(), diff --git a/src/tokenserver/support.rs b/src/tokenserver/support.rs deleted file mode 100644 index ef3f815bbd..0000000000 --- a/src/tokenserver/support.rs +++ /dev/null @@ -1,270 +0,0 @@ -use actix_web::Error; -use pyo3::prelude::{IntoPy, Py, PyAny, PyErr, PyModule, PyObject, Python}; -use pyo3::types::{IntoPyDict, PyString}; -use serde::{Deserialize, Serialize}; - -use super::error::TokenserverError; -use crate::error::{ApiError, ApiErrorKind}; - -/// The plaintext needed to build a token. -#[derive(Clone)] -pub struct MakeTokenPlaintext { - pub node: String, - pub fxa_kid: String, - pub fxa_uid: String, - pub hashed_device_id: String, - pub hashed_fxa_uid: String, - pub expires: u64, - pub uid: i64, -} - -impl IntoPy for MakeTokenPlaintext { - fn into_py(self, py: Python<'_>) -> PyObject { - let dict = [ - ("node", self.node), - ("fxa_kid", self.fxa_kid), - ("fxa_uid", self.fxa_uid), - ("hashed_device_id", self.hashed_device_id), - ("hashed_fxa_uid", self.hashed_fxa_uid), - ] - .into_py_dict(py); - - // These need to be set separately since they aren't strings, and - // Rust doesn't support heterogeneous arrays - dict.set_item("expires", self.expires).unwrap(); - dict.set_item("uid", self.uid).unwrap(); - - dict.into() - } -} - -/// An adapter to the tokenlib Python library. -pub struct Tokenlib; - -impl Tokenlib { - /// Builds the token and derived secret to be returned by Tokenserver. - pub fn get_token_and_derived_secret( - plaintext: MakeTokenPlaintext, - shared_secret: &str, - ) -> Result<(String, String), Error> { - Python::with_gil(|py| { - let module = PyModule::import(py, "tokenlib").map_err(|e| { - e.print_and_set_sys_last_vars(py); - e - })?; - let kwargs = [("secret", shared_secret)].into_py_dict(py); - let token = module - .getattr("make_token")? - .call((plaintext,), Some(kwargs)) - .map_err(|e| { - e.print_and_set_sys_last_vars(py); - e - }) - .and_then(|x| x.extract())?; - let derived_secret = module - .getattr("get_derived_secret")? - .call((&token,), Some(kwargs)) - .map_err(|e| { - e.print_and_set_sys_last_vars(py); - e - }) - .and_then(|x| x.extract())?; - - Ok((token, derived_secret)) - }) - .map_err(pyerr_to_actix_error) - } -} - -#[derive(Clone, Debug, Default, Deserialize, PartialEq, Serialize)] -pub struct TokenData { - pub user: String, - pub client_id: String, - pub scope: Vec, - pub generation: Option, - pub profile_changed_at: Option, -} - -/// Implementers of this trait can be used to verify OAuth tokens for Tokenserver. -pub trait VerifyToken: Sync + Send { - fn verify_token(&self, token: &str) -> Result; - fn box_clone(&self) -> Box; -} - -impl Clone for Box { - fn clone(&self) -> Box { - self.box_clone() - } -} - -#[derive(Clone)] -/// An adapter to the PyFxA Python library. -pub struct OAuthVerifier { - inner: Py, -} - -impl OAuthVerifier { - const FILENAME: &'static str = "verify.py"; - - pub fn new(fxa_oauth_server_url: Option<&str>) -> Result { - let inner: Py = Python::with_gil::<_, Result, PyErr>>(|py| { - let code = include_str!("verify.py"); - let module = PyModule::from_code(py, code, Self::FILENAME, Self::FILENAME)?; - let kwargs = fxa_oauth_server_url.map(|url| [("server_url", url)].into_py_dict(py)); - let object: Py = module - .getattr("FxaOAuthClient")? - .call((), kwargs) - .map_err(|e| { - e.print_and_set_sys_last_vars(py); - e - })? - .into(); - - Ok(object) - }) - .map_err(pyerr_to_actix_error)?; - - Ok(Self { inner }) - } -} - -impl VerifyToken for OAuthVerifier { - /// Verifies an OAuth token. Returns `TokenData` for valid tokens and an `Error` for invalid - /// tokens. - fn verify_token(&self, token: &str) -> Result { - let maybe_token_data_string = Python::with_gil(|py| { - let client = self.inner.as_ref(py); - let result: &PyAny = client - .getattr("verify_token")? - .call((token,), None) - .map_err(|e| { - e.print_and_set_sys_last_vars(py); - e - })?; - - if result.is_none() { - Ok(None) - } else { - let token_data_python_string = result.downcast::()?; - token_data_python_string.extract::().map(Some) - } - }) - .map_err(|_| TokenserverError::invalid_credentials("Unauthorized"))?; - - match maybe_token_data_string { - Some(token_data_string) => serde_json::from_str(&token_data_string) - .map_err(|_| TokenserverError::invalid_credentials("Unauthorized")), - None => Err(TokenserverError::invalid_credentials("Unauthorized")), - } - } - - fn box_clone(&self) -> Box { - Box::new(self.clone()) - } -} - -#[derive(Deserialize, Serialize)] -struct JwtPayload { - client_id: String, - scope: String, - sub: String, - #[serde(rename(serialize = "fxa-generation", deserialize = "fxa-generation"))] - fxa_generation: Option, - #[serde(rename( - serialize = "fxa-profileChangedAt", - deserialize = "fxa-profileChangedAt" - ))] - fxa_profile_changed_at: Option, -} - -#[derive(Clone)] -pub struct TestModeOAuthVerifier; - -impl VerifyToken for TestModeOAuthVerifier { - fn verify_token(&self, token: &str) -> Result { - let token_components: Vec<&str> = token.split('.').collect(); - - if token_components.len() != 3 { - return Err(TokenserverError::invalid_credentials("Invalid JWT")); - } - - let payload_bytes = base64::decode_config(token_components[1], base64::URL_SAFE_NO_PAD) - .map_err(|_| TokenserverError::invalid_credentials("Invalid JWT base64"))?; - let payload_string = String::from_utf8(payload_bytes) - .map_err(|_| TokenserverError::invalid_credentials("JWT payload not a valid string"))?; - let payload: JwtPayload = serde_json::from_str(&payload_string) - .map_err(|_| TokenserverError::invalid_credentials("Invalid JWT payload"))?; - - Ok(TokenData { - user: payload.sub, - client_id: payload.client_id, - scope: payload.scope.split(' ').map(String::from).collect(), - generation: payload.fxa_generation, - profile_changed_at: payload.fxa_profile_changed_at, - }) - } - - fn box_clone(&self) -> Box { - Box::new(self.clone()) - } -} - -/// A mock OAuth verifier to be used for testing purposes. -#[derive(Clone, Default)] -pub struct MockOAuthVerifier { - pub valid: bool, - pub token_data: TokenData, -} - -impl VerifyToken for MockOAuthVerifier { - fn verify_token(&self, _token: &str) -> Result { - self.valid - .then(|| self.token_data.clone()) - .ok_or_else(|| TokenserverError::invalid_credentials("Unauthorized")) - } - - fn box_clone(&self) -> Box { - Box::new(self.clone()) - } -} - -fn pyerr_to_actix_error(e: PyErr) -> Error { - let api_error: ApiError = ApiErrorKind::Internal(e.to_string()).into(); - api_error.into() -} - -#[cfg(test)] -mod tests { - use super::*; - - use jsonwebtoken::{EncodingKey, Header}; - - #[test] - fn test_test_mode_oauth_verifier() { - let test_mode_oauth_verifier = TestModeOAuthVerifier; - let claims = JwtPayload { - sub: "test user".to_owned(), - client_id: "test client ID".to_owned(), - scope: "test1 test2".to_owned(), - fxa_generation: Some(1234), - fxa_profile_changed_at: Some(5678), - }; - - let token = jsonwebtoken::encode( - &Header::default(), - &claims, - &EncodingKey::from_secret("secret".as_ref()), - ) - .unwrap(); - let decoded_claims = test_mode_oauth_verifier.verify_token(&token).unwrap(); - let expected_claims = TokenData { - user: "test user".to_owned(), - client_id: "test client ID".to_owned(), - scope: vec!["test1".to_owned(), "test2".to_owned()], - generation: Some(1234), - profile_changed_at: Some(5678), - }; - - assert_eq!(expected_claims, decoded_claims); - } -} diff --git a/tools/integration_tests/run.py b/tools/integration_tests/run.py index 80447d990b..c87fba4311 100644 --- a/tools/integration_tests/run.py +++ b/tools/integration_tests/run.py @@ -50,16 +50,25 @@ def start_server(): os.environ.setdefault("SYNC_MASTER_SECRET", "secret0") os.environ.setdefault("SYNC_CORS_MAX_AGE", "555") os.environ.setdefault("SYNC_CORS_ALLOWED_ORIGIN", "localhost") - os.environ['SYNC_TOKENSERVER__TEST_MODE_ENABLED'] = 'true' + mock_fxa_server_url = os.environ["MOCK_FXA_SERVER_URL"] + url = "%s/v2" % mock_fxa_server_url + os.environ["SYNC_TOKENSERVER__FXA_BROWSERID_SERVER_URL"] = url + os.environ["SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL"] = mock_fxa_server_url the_server_subprocess = start_server() try: res = 0 res |= run_live_functional_tests(TestStorage, sys.argv) - res |= run_local_tests() + os.environ["TOKENSERVER_AUTH_METHOD"] = "oauth" + res |= run_local_tests(include_browserid_specific_tests=False) + os.environ["TOKENSERVER_AUTH_METHOD"] = "browserid" + res |= run_local_tests(include_browserid_specific_tests=True) finally: terminate_process(the_server_subprocess) - os.environ['SYNC_TOKENSERVER__TEST_MODE_ENABLED'] = 'false' + os.environ["SYNC_TOKENSERVER__FXA_BROWSERID_SERVER_URL"] = \ + "https://verifier.stage.mozaws.net/v2" + os.environ["SYNC_TOKENSERVER__FXA_OAUTH_SERVER_URL"] = \ + "https://oauth.stage.mozaws.net" the_server_subprocess = start_server() try: res |= run_end_to_end_tests() diff --git a/tools/integration_tests/tokenserver/mock_fxa_server.py b/tools/integration_tests/tokenserver/mock_fxa_server.py new file mode 100644 index 0000000000..ac3c9935a0 --- /dev/null +++ b/tools/integration_tests/tokenserver/mock_fxa_server.py @@ -0,0 +1,75 @@ +from wsgiref.simple_server import make_server as _make_server +from pyramid.config import Configurator +from pyramid.response import Response +from pyramid.view import view_config +import json +import os + + +@view_config(route_name='mock_oauth_verify', renderer='json') +def _mock_oauth_verify(request): + body = json.loads(request.json_body['token']) + + return Response(json=body['body'], content_type='application/json', + status=body['status']) + + +# The PyFxA OAuth client makes a request to the FxA OAuth server for its +# current public RSA key. While the client allows us to pass in a JWK to +# prevent this request from happening, mocking the endpoint is simpler. +@view_config(route_name='mock_oauth_jwk', renderer='json') +def _mock_oauth_jwk(request): + return {'keys': [{'fake': 'RSA key'}]} + + +@view_config(route_name='mock_verify', renderer='json') +def _mock_browserid_verify(request): + body = json.loads(request.json_body['assertion']) + + return Response(json=body['body'], content_type='application/json', + status=body['status']) + + +# This endpoint is used by the legacy Tokenserver during startup. We mock it +# here so the unit tests can be run against the legacy Tokenserver. +@view_config(route_name='mock_config', renderer='json') +def _mock_config(request): + return { + "browserid": { + "issuer": "api-accounts.stage.mozaws.net", + "verificationUrl": "https://verifier.stage.mozaws.net/v2" + }, + "contentUrl": "https://accounts.stage.mozaws.net" + } + + +def make_server(host, port): + with Configurator() as config: + config.add_route('mock_oauth_verify', '/v1/verify') + config.add_view(_mock_oauth_verify, route_name='mock_oauth_verify', + renderer='json') + + config.add_route('mock_oauth_jwk', '/v1/jwks') + config.add_view(_mock_oauth_jwk, route_name='mock_oauth_jwk', + renderer='json') + + config.add_route('mock_browserid_verify', '/v2') + config.add_view(_mock_browserid_verify, + route_name='mock_browserid_verify', + renderer='json') + + config.add_route('mock_config', '/config') + config.add_view(_mock_config, route_name='mock_config', + renderer='json') + app = config.make_wsgi_app() + + return _make_server(host, port, app) + + +if __name__ == '__main__': + host = os.environ.get('MOCK_FXA_SERVER_HOST', 'localhost') + port = os.environ.get('MOCK_FXA_SERVER_PORT', 6000) + + with make_server(host, int(port)) as httpd: + print("Running mock FxA server on %s:%s" % (host, port)) + httpd.serve_forever() diff --git a/tools/integration_tests/tokenserver/run.py b/tools/integration_tests/tokenserver/run.py index a05d1b61bf..294f3a9f3f 100644 --- a/tools/integration_tests/tokenserver/run.py +++ b/tools/integration_tests/tokenserver/run.py @@ -4,13 +4,19 @@ import unittest from tokenserver.test_authorization import TestAuthorization +from tokenserver.test_browserid import TestBrowserId from tokenserver.test_e2e import TestE2e from tokenserver.test_misc import TestMisc from tokenserver.test_node_assignment import TestNodeAssignment -def run_local_tests(): - return run_tests([TestAuthorization, TestMisc, TestNodeAssignment]) +def run_local_tests(include_browserid_specific_tests=True): + if include_browserid_specific_tests: + test_classes = [TestAuthorization, TestBrowserId, TestMisc, + TestNodeAssignment] + return run_tests(test_classes) + else: + return run_tests([TestAuthorization, TestMisc, TestNodeAssignment]) def run_end_to_end_tests(): diff --git a/tools/integration_tests/tokenserver/test_authorization.py b/tools/integration_tests/tokenserver/test_authorization.py index 5b10caf0bb..d4ac08f431 100644 --- a/tools/integration_tests/tokenserver/test_authorization.py +++ b/tools/integration_tests/tokenserver/test_authorization.py @@ -45,30 +45,30 @@ def test_no_auth(self): self.assertEqual(res.json, expected_error_response) def test_invalid_client_state_in_key_id(self): - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-state!' - } - res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + if self.AUTH_METHOD == "oauth": + headers = self._build_auth_headers(keys_changed_at=1234, + client_state='aaaa') + headers['X-KeyID'] = headers['X-KeyID'].replace('qqo', 'state!') + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) - expected_error_response = { - 'status': 'invalid-credentials', - 'errors': [ - { - 'location': 'body', - 'name': '', - 'description': 'Unauthorized' - } - ] - } - self.assertEqual(res.json, expected_error_response) + expected_error_response = { + 'status': 'invalid-credentials', + 'errors': [ + { + 'location': 'body', + 'name': '', + 'description': 'Unauthorized' + } + ] + } + self.assertEqual(res.json, expected_error_response) def test_invalid_client_state_in_x_client_state(self): - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-YWFh', - 'X-Client-State': 'state!' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') + headers['X-Client-State'] = 'state!' + res = self.app.get('/1.0/sync/1.5', headers=headers, status=400) expected_error_response = { @@ -87,11 +87,9 @@ def test_keys_changed_at_less_than_equal_to_generation(self): self._add_user(generation=1232, keys_changed_at=1234) # If keys_changed_at changes, that change must be less than or equal # the new generation - oauth_token = self._forge_oauth_token(generation=1235) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1236-YWFh' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1236, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { 'status': 'invalid-keysChangedAt', @@ -107,40 +105,31 @@ def test_keys_changed_at_less_than_equal_to_generation(self): # If the keys_changed_at on the request matches that currently stored # on the user record, it does not need to be less than or equal to the # generation on the request - oauth_token = self._forge_oauth_token(generation=1233) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1233, + keys_changed_at=1234, + client_state='aaaa') self.app.get('/1.0/sync/1.5', headers=headers) # A request with no generation is acceptable - oauth_token = self._forge_oauth_token(generation=None) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-YWFh' - } + headers = self._build_auth_headers(generation=None, + keys_changed_at=1235, + client_state='aaaa') self.app.get('/1.0/sync/1.5', headers=headers) # A request with a keys_changed_at less than the new generation # is acceptable - oauth_token = self._forge_oauth_token(generation=1236) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-YWFh' - } + headers = self._build_auth_headers(generation=1236, + keys_changed_at=1235, + client_state='aaaa') self.app.get('/1.0/sync/1.5', headers=headers) def test_disallow_reusing_old_client_state(self): # Add a user record that has already been replaced - self._add_user(client_state='616161', replaced_at=1200) + self._add_user(client_state='aaaa', replaced_at=1200) # Add the most up-to-date user record - self._add_user(client_state='626262') + self._add_user(client_state='bbbb') # A request cannot use a client state associated with a replaced user - oauth_token = self._forge_oauth_token() - # (Note that YWFh is base64 for 'aaa', which is 0x616161 in hex) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { 'status': 'invalid-client-state', @@ -155,30 +144,25 @@ def test_disallow_reusing_old_client_state(self): } self.assertEqual(res.json, expected_error_response) # Using the last-seen client state is okay - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YmJi' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='bbbb') res1 = self.app.get('/1.0/sync/1.5', headers=headers) # Using a new client state (with an updated generation and # keys_changed_at) is okay - oauth_token = self._forge_oauth_token(generation=1235) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-Y2Nj' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1235, + client_state='cccc') res2 = self.app.get('/1.0/sync/1.5', headers=headers) # This results in the creation of a new user record self.assertNotEqual(res1.json['uid'], res2.json['uid']) def test_generation_change_must_accompany_client_state_change(self): - self._add_user(generation=1234, client_state='616161') + self._add_user(generation=1234, client_state='aaaa') # A request with a new client state must also contain a new generation - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YmJi' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='bbbb') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { 'status': 'invalid-client-state', @@ -193,20 +177,16 @@ def test_generation_change_must_accompany_client_state_change(self): } self.assertEqual(res.json, expected_error_response) # A request with no generation is acceptable - oauth_token = self._forge_oauth_token(generation=None) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-YmJi' - } + headers = self._build_auth_headers(generation=None, + keys_changed_at=1235, + client_state='bbbb') self.app.get('/1.0/sync/1.5', headers=headers) # We can't use a generation of 1235 when setting a new client state # because the generation was set to be equal to the keys_changed_at # in the previous request, which was 1235 - oauth_token = self._forge_oauth_token(generation=1235) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-Y2Nj' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1235, + client_state='cccc') expected_error_response = { 'status': 'invalid-client-state', 'errors': [ @@ -222,23 +202,19 @@ def test_generation_change_must_accompany_client_state_change(self): self.assertEqual(res.json, expected_error_response) # A change in client state is acceptable only with a change in # generation (if it is present) - oauth_token = self._forge_oauth_token(generation=1236) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1236-Y2Nj' - } + headers = self._build_auth_headers(generation=1236, + keys_changed_at=1236, + client_state='cccc') self.app.get('/1.0/sync/1.5', headers=headers) def test_keys_changed_at_change_must_accompany_client_state_change(self): self._add_user(generation=1234, keys_changed_at=1234, - client_state='616161') + client_state='aaaa') # A request with a new client state must also contain a new # keys_changed_at - oauth_token = self._forge_oauth_token(generation=1235) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YmJi' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1234, + client_state='bbbb') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { 'status': 'invalid-client-state', @@ -253,22 +229,18 @@ def test_keys_changed_at_change_must_accompany_client_state_change(self): } self.assertEqual(res.json, expected_error_response) # A request with a new keys_changed_at is acceptable - oauth_token = self._forge_oauth_token(generation=1235) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-YmJi' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1235, + client_state='bbbb') self.app.get('/1.0/sync/1.5', headers=headers) def test_generation_must_not_be_less_than_last_seen_value(self): uid = self._add_user(generation=1234) # The generation in the request cannot be less than the generation # currently stored on the user record - oauth_token = self._forge_oauth_token(generation=1233) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1233, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { 'status': 'invalid-generation', @@ -282,46 +254,40 @@ def test_generation_must_not_be_less_than_last_seen_value(self): } self.assertEqual(res.json, expected_error_response) # A request with no generation is acceptable - oauth_token = self._forge_oauth_token(generation=None) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=None, + keys_changed_at=1234, + client_state='aaaa') self.app.get('/1.0/sync/1.5', headers=headers) # A request with a generation equal to the last-seen generation is # acceptable - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') self.app.get('/1.0/sync/1.5', headers=headers) # A request with a generation greater than the last-seen generation is # acceptable - oauth_token = self._forge_oauth_token(generation=1235) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) # This should not result in the creation of a new user self.assertEqual(res.json['uid'], uid) def test_fxa_kid_change(self): self._add_user(generation=1234, keys_changed_at=None, - client_state='616161') + client_state='aaaa') # An OAuth client shows up, setting keys_changed_at. # (The value matches generation number above, beause in this scenario # FxA hasn't been updated to track and report keysChangedAt yet). - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh', - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) token0 = self.unsafelyParseToken(res.json['id']) # Reject keys_changed_at lower than the value previously seen - headers['X-KeyID'] = '1233-YWFh' + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1233, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { 'status': 'invalid-keysChangedAt', @@ -336,30 +302,32 @@ def test_fxa_kid_change(self): self.assertEqual(res.json, expected_error_response) # Reject greater keys_changed_at with no corresponding update to # generation - headers['X-KeyID'] = '2345-YmJi' + headers = self._build_auth_headers(generation=1234, + keys_changed_at=2345, + client_state='bbbb') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) self.assertEqual(res.json, expected_error_response) # Accept equal keys_changed_at - headers['X-KeyID'] = '1234-YWFh' + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') self.app.get('/1.0/sync/1.5', headers=headers) # Accept greater keys_changed_at with new generation - headers['X-KeyID'] = '2345-YmJi' - oauth_token = self._forge_oauth_token(generation=2345) - headers['Authorization'] = 'Bearer %s' % oauth_token + headers = self._build_auth_headers(generation=2345, + keys_changed_at=2345, + client_state='bbbb') res = self.app.get('/1.0/sync/1.5', headers=headers) token = self.unsafelyParseToken(res.json['id']) - self.assertEqual(token['fxa_kid'], '0000000002345-YmJi') + self.assertEqual(token['fxa_kid'], '0000000002345-u7s') self.assertNotEqual(token['uid'], token0['uid']) self.assertEqual(token['node'], token0['node']) def test_client_specified_duration(self): self._add_user(generation=1234, keys_changed_at=1234, - client_state='616161') - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh', - } + client_state='aaaa') + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') # It's ok to request a shorter-duration token. res = self.app.get('/1.0/sync/1.5?duration=12', headers=headers) self.assertEquals(res.json['duration'], 12) @@ -377,15 +345,13 @@ def test_client_specified_duration(self): # https://github.com/mozilla-services/tokenserver/pull/176 def test_kid_change_during_gradual_tokenserver_rollout(self): # Let's start with a user already in the db, with no keys_changed_at. - uid = self._add_user(generation=1234, client_state='616161', + uid = self._add_user(generation=1234, client_state='aaaa', keys_changed_at=None) user1 = self._get_user(uid) # User hits updated tokenserver node, writing keys_changed_at to db. - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1200-YWFh', - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1200, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) # That should not have triggered a node re-assignment. user2 = self._get_user(res.json['uid']) @@ -395,18 +361,16 @@ def test_kid_change_during_gradual_tokenserver_rollout(self): self.assertEqual(user2['generation'], 1234) self.assertEqual(user2['keys_changed_at'], 1200) # User does a password reset on their Firefox Account. - oauth_token = self._forge_oauth_token(generation=2345) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '2345-YmJi', - } + headers = self._build_auth_headers(generation=2345, + keys_changed_at=2345, + client_state='bbbb') # They sync again, but hit a tokenserver node that isn't updated yet. # This would trigger the allocation of a new user, so we simulate this # by adding a new user. We set keys_changed_at to be the last-used # value, since we are simulating a server that doesn't pay attention # to keys_changed_at. uid = self._add_user(generation=2345, keys_changed_at=1200, - client_state='626262') + client_state='bbbb') user2 = self._get_user(uid) self.assertNotEqual(user1['uid'], user2['uid']) self.assertEqual(user1['nodeid'], user2['nodeid']) @@ -430,26 +394,22 @@ def test_update_client_state(self): seen_uids = set((uid,)) orig_node = user1['nodeid'] # Changing client_state allocates a new user, resulting in a new uid - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YmJi' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='bbbb') res = self.app.get('/1.0/sync/1.5', headers=headers) user2 = self._get_user(res.json['uid']) self.assertTrue(user2['uid'] not in seen_uids) self.assertEqual(user2['nodeid'], orig_node) self.assertEqual(user2['generation'], 1234) self.assertEqual(user2['keys_changed_at'], 1234) - self.assertEqual(user2['client_state'], '626262') + self.assertEqual(user2['client_state'], 'bbbb') seen_uids.add(user2['uid']) # We can change the client state even if no generation is present on # the request - oauth_token = self._forge_oauth_token(generation=None) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-Y2Nj' - } + headers = self._build_auth_headers(generation=None, + keys_changed_at=1235, + client_state='cccc') res = self.app.get('/1.0/sync/1.5', headers=headers) user3 = self._get_user(res.json['uid']) self.assertTrue(user3['uid'] not in seen_uids) @@ -458,14 +418,12 @@ def test_update_client_state(self): # request, generation is set to be the same as keys_changed_at self.assertEqual(user3['generation'], 1235) self.assertEqual(user3['keys_changed_at'], 1235) - self.assertEqual(user3['client_state'], '636363') + self.assertEqual(user3['client_state'], 'cccc') seen_uids.add(user3['uid']) # We cannot change client_state without a change in keys_changed_at - oauth_token = self._forge_oauth_token(generation=None) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-ZGRk' - } + headers = self._build_auth_headers(generation=None, + keys_changed_at=1235, + client_state='dddd') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { 'status': 'invalid-client-state', @@ -480,11 +438,9 @@ def test_update_client_state(self): } self.assertEqual(expected_error_response, res.json) # We cannot use a previously-used client_state - oauth_token = self._forge_oauth_token(generation=1236) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1236-YmJi' - } + headers = self._build_auth_headers(generation=1236, + keys_changed_at=1236, + client_state='bbbb') res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { 'status': 'invalid-client-state', @@ -502,12 +458,10 @@ def test_update_client_state(self): def test_set_generation_from_no_generation(self): # Add a user that has no generation set uid = self._add_user(generation=0, keys_changed_at=None, - client_state='616161') - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + client_state='aaaa') + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') # Send a request to set the generation self.app.get('/1.0/sync/1.5', headers=headers) user = self._get_user(uid) @@ -517,12 +471,10 @@ def test_set_generation_from_no_generation(self): def test_set_keys_changed_at_from_no_keys_changed_at(self): # Add a user that has no keys_changed_at set uid = self._add_user(generation=1234, keys_changed_at=None, - client_state='616161') - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + client_state='aaaa') + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') # Send a request to set the keys_changed_at self.app.get('/1.0/sync/1.5', headers=headers) user = self._get_user(uid) @@ -530,43 +482,25 @@ def test_set_keys_changed_at_from_no_keys_changed_at(self): self.assertEqual(user['keys_changed_at'], 1234) def test_x_client_state_must_have_same_client_state_as_key_id(self): - self._add_user(client_state='616161') - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-YWFh', - 'X-Client-State': '626262' - } - # If present, the X-Client-State header must have the same client - # state as the X-KeyID header - res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) - expected_error_response = { - 'errors': [ - { - 'description': 'Unauthorized', - 'location': 'body', - 'name': '' - } - ], - 'status': 'invalid-client-state' - } - self.assertEqual(res.json, expected_error_response) - headers['X-Client-State'] = '616161' - res = self.app.get('/1.0/sync/1.5', headers=headers) - - def test_x_key_id_header_required(self): - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token() - } - # A request without an X-KeyID header should fail - res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) - expected_error_response = { - 'errors': [ - { - 'description': 'Missing X-KeyID header', - 'location': 'header', - 'name': '' - } - ], - 'status': 'invalid-key-id' - } - self.assertEqual(res.json, expected_error_response) + if self.AUTH_METHOD == "oauth": + self._add_user(client_state='aaaa') + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') + headers['X-Client-State'] = 'bbbb' + # If present, the X-Client-State header must have the same client + # state as the X-KeyID header + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-client-state' + } + self.assertEqual(res.json, expected_error_response) + headers['X-Client-State'] = 'aaaa' + res = self.app.get('/1.0/sync/1.5', headers=headers) diff --git a/tools/integration_tests/tokenserver/test_browserid.py b/tools/integration_tests/tokenserver/test_browserid.py new file mode 100644 index 0000000000..0e3b0941a9 --- /dev/null +++ b/tools/integration_tests/tokenserver/test_browserid.py @@ -0,0 +1,476 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. +import json +import unittest +from tokenserver.test_support import TestCase + + +class TestBrowserId(TestCase, unittest.TestCase): + def setUp(self): + super(TestBrowserId, self).setUp() + + def tearDown(self): + super(TestBrowserId, self).tearDown() + + def _build_browserid_fxa_error_response(self, reason, status=200): + body = { + 'body': { + 'status': 'failure' + }, + 'status': status + } + + if reason: + body['body']['reason'] = reason + + return { + 'Authorization': 'BrowserID %s' % json.dumps(body), + 'X-Client-State': 'aaaa' + } + + def test_fxa_returns_status_not_ok(self): + expected_error_response = { + 'status': 'error', + 'errors': [ + { + 'location': 'body', + 'description': 'Resource is not available', + 'name': '' + } + ] + } + # If FxA returns any status code other than 200, the client gets a 503 + headers = self._build_browserid_headers(client_state='aaaa', + status=500) + res = self.app.get('/1.0/sync/1.5', headers=headers, status=503) + self.assertEqual(res.json, expected_error_response) + + headers = self._build_browserid_headers(client_state='aaaa', + status=404) + res = self.app.get('/1.0/sync/1.5', headers=headers, status=503) + self.assertEqual(res.json, expected_error_response) + + headers = self._build_browserid_headers(client_state='aaaa', + status=401) + res = self.app.get('/1.0/sync/1.5', headers=headers, status=503) + self.assertEqual(res.json, expected_error_response) + + headers = self._build_browserid_headers(client_state='aaaa', + status=201) + res = self.app.get('/1.0/sync/1.5', headers=headers, status=503) + self.assertEqual(res.json, expected_error_response) + + def test_fxa_returns_invalid_response(self): + # Craft a response that contains invalid JSON + token = json.dumps({ + 'body': {'test': True}, + 'status': 200, + }).replace('true', '') + headers = { + 'Authorization': 'BrowserID %s' % token, + 'X-Client-State': 'aaaa' + } + expected_error_response = { + 'status': 'error', + 'errors': [ + { + 'location': 'body', + 'description': 'Resource is not available', + 'name': '' + } + ] + } + res = self.app.get('/1.0/sync/1.5', headers=headers, status=503) + self.assertEqual(res.json, expected_error_response) + + def test_expired_token(self): + expected_error_response = { + 'status': 'invalid-timestamp', + 'errors': [ + { + 'location': 'body', + 'description': 'Unauthorized', + 'name': '' + } + ] + } + # If the FxA response includes "expired" in the reason message, + # the client gets a 401 and a message indicating an invalid timestamp + headers = self._build_browserid_fxa_error_response('assertion expired') + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + self.assertEqual(res.json, expected_error_response) + # If the FxA response includes "issued later than" in the reason + # message, the client gets a 401 and a message indicating an invalid + # timestamp + headers = self._build_browserid_fxa_error_response('issued later than') + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + self.assertEqual(res.json, expected_error_response) + + def test_other_reason_message(self): + expected_error_response = { + 'status': 'invalid-credentials', + 'errors': [ + { + 'location': 'body', + 'description': 'Unauthorized', + 'name': '' + } + ] + } + # If the FxA response includes a reason that doesn't indicate an + # invalid timestamp, a generic error is returned + headers = self._build_browserid_fxa_error_response('invalid') + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + self.assertEqual(res.json, expected_error_response) + + def test_missing_reason_message(self): + expected_error_response = { + 'status': 'invalid-credentials', + 'errors': [ + { + 'location': 'body', + 'description': 'Unauthorized', + 'name': '' + } + ] + } + # If the FxA response includes no reason, a generic error is returned + headers = self._build_browserid_fxa_error_response(None) + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + self.assertEqual(res.json, expected_error_response) + + def test_issuer_mismatch(self): + expected_error_response = { + 'status': 'invalid-credentials', + 'errors': [ + { + 'location': 'body', + 'description': 'Unauthorized', + 'name': '' + } + ] + } + # If the issuer in the response doesn't match the issuer on + # Tokenserver, a 401 is returned + invalid_issuer = 'invalid.com' + headers = self._build_browserid_headers(client_state='aaaa', + issuer=invalid_issuer) + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + self.assertEqual(res.json, expected_error_response) + + def test_fxa_error_response_not_ok(self): + expected_error_response = { + 'status': 'error', + 'errors': [ + { + 'location': 'body', + 'description': 'Resource is not available', + 'name': '' + } + ] + } + # If an FxA error response returns a status other than 200, the client + # gets a 503 error + headers = self._build_browserid_fxa_error_response('bad token', + status=401) + res = self.app.get('/1.0/sync/1.5', headers=headers, status=503) + self.assertEqual(res.json, expected_error_response) + + def test_no_idp_claims(self): + # A response from FxA that does not include idpClaims is still valid + headers = self._build_browserid_headers(client_state='aaaa') + self.app.get('/1.0/sync/1.5', headers=headers, status=200) + + def test_partial_idp_claims(self): + # A response from FxA that includes a partially-filled idpClaims + # object is still valid + headers = self._build_browserid_headers(user='test1', + client_state='aaaa', + generation=1234) + self.app.get('/1.0/sync/1.5', headers=headers, status=200) + + headers = self._build_browserid_headers(user='test2', + client_state='aaaa', + keys_changed_at=1234) + self.app.get('/1.0/sync/1.5', headers=headers, status=200) + + headers = self._build_browserid_headers(user='test3', + client_state='aaaa', + device_id='id') + self.app.get('/1.0/sync/1.5', headers=headers, status=200) + + def test_unverified_token(self): + headers = self._build_browserid_headers(client_state='aaaa', + token_verified=None) + # Assertion should not be rejected if fxa-tokenVerified is unset + self.app.get("/1.0/sync/1.5", headers=headers, status=200) + # Assertion should not be rejected if fxa-tokenVerified is true + headers = self._build_browserid_headers(client_state='aaaa', + token_verified=True) + self.app.get("/1.0/sync/1.5", headers=headers, status=200) + # Assertion should be rejected if fxa-tokenVerified is false + headers = self._build_browserid_headers(client_state='aaaa', + token_verified=False) + res = self.app.get("/1.0/sync/1.5", headers=headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-credentials' + } + self.assertEqual(res.json, expected_error_response) + # Assertion should be rejected if fxa-tokenVerified is null + headers['Authorization'] = headers['Authorization'].replace('false', + 'null') + res = self.app.get("/1.0/sync/1.5", headers=headers, status=401) + self.assertEqual(res.json, expected_error_response) + + def test_credentials_from_oauth_and_browserid(self): + # Send initial credentials via oauth. + oauth_headers = self._build_oauth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') + res1 = self.app.get("/1.0/sync/1.5", headers=oauth_headers) + # Send the same credentials via BrowserID + browserid_headers = self._build_browserid_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') + res2 = self.app.get("/1.0/sync/1.5", headers=browserid_headers) + # They should get the same node assignment. + self.assertEqual(res1.json["uid"], res2.json["uid"]) + self.assertEqual(res1.json["api_endpoint"], res2.json["api_endpoint"]) + # Earlier generation number via BrowserID -> invalid-generation + browserid_headers = self._build_browserid_headers(generation=1233, + keys_changed_at=1234, + client_state='aaaa') + res = self.app.get("/1.0/sync/1.5", headers=browserid_headers, + status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-generation' + } + self.assertEqual(res.json, expected_error_response) + # Earlier keys_changed_at via BrowserID is not accepted. + browserid_headers = self._build_browserid_headers(generation=1234, + keys_changed_at=1233, + client_state='aaaa') + res = self.app.get("/1.0/sync/1.5", headers=browserid_headers, + status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-keysChangedAt' + } + self.assertEqual(res.json, expected_error_response) + # Earlier generation number via OAuth -> invalid-generation + oauth_headers = self._build_oauth_headers(generation=1233, + keys_changed_at=1234, + client_state='aaaa') + res = self.app.get("/1.0/sync/1.5", headers=oauth_headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-generation' + } + self.assertEqual(res.json, expected_error_response) + # Earlier keys_changed_at via OAuth is not accepted. + oauth_headers = self._build_oauth_headers(generation=1234, + keys_changed_at=1233, + client_state='aaaa') + res = self.app.get("/1.0/sync/1.5", headers=oauth_headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-keysChangedAt' + } + self.assertEqual(res.json, expected_error_response) + # Change client-state via BrowserID. + browserid_headers = self._build_browserid_headers(generation=1235, + keys_changed_at=1235, + client_state='bbbb') + res1 = self.app.get("/1.0/sync/1.5", headers=browserid_headers) + # Previous OAuth creds are rejected due to keys_changed_at update. + oauth_headers = self._build_oauth_headers(generation=1235, + keys_changed_at=1234, + client_state='bbbb') + res = self.app.get("/1.0/sync/1.5", headers=oauth_headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-keysChangedAt' + } + self.assertEqual(res.json, expected_error_response) + # Previous OAuth creds are rejected due to generation update. + oauth_headers = self._build_oauth_headers(generation=1234, + keys_changed_at=1235, + client_state='bbbb') + res = self.app.get("/1.0/sync/1.5", headers=oauth_headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-generation' + } + self.assertEqual(res.json, expected_error_response) + # Updated OAuth creds are accepted. + oauth_headers = self._build_oauth_headers(generation=1235, + keys_changed_at=1235, + client_state='bbbb') + res2 = self.app.get("/1.0/sync/1.5", headers=oauth_headers) + # They should again get the same node assignment. + self.assertEqual(res1.json["uid"], res2.json["uid"]) + self.assertEqual(res1.json["api_endpoint"], + res2.json["api_endpoint"]) + + def test_null_idp_claims(self): + headers = self._build_browserid_headers(generation=1234, + client_state='aaaa') + headers['Authorization'] = headers['Authorization'].replace('1234', + 'null') + # A null fxa-generation claim results in a 401 + res = self.app.get("/1.0/sync/1.5", headers=headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-generation' + } + self.assertEqual(res.json, expected_error_response) + # A null fxa-keysChangedAt claim results in a 401 + headers = self._build_browserid_headers(keys_changed_at=1234, + client_state='aaaa') + headers['Authorization'] = headers['Authorization'].replace('1234', + 'null') + res = self.app.get("/1.0/sync/1.5", headers=headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'invalid keysChangedAt', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-credentials' + } + self.assertEqual(res.json, expected_error_response) + # A null fxa-tokenVerified claim results in a 401 + headers = self._build_browserid_headers(token_verified=True, + client_state='aaaa') + headers['Authorization'] = headers['Authorization'].replace('true', + 'null') + res = self.app.get("/1.0/sync/1.5", headers=headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-credentials' + } + self.assertEqual(res.json, expected_error_response) + headers = self._build_browserid_headers(device_id="device id", + client_state='aaaa') + headers['Authorization'] = \ + headers['Authorization'].replace('"device id"', 'null') + # A null fxa-deviceId claim is acceptable + self.app.get("/1.0/sync/1.5", headers=headers) + + def test_uid_and_kid(self): + browserid_headers = self._build_browserid_headers(user='testuser', + generation=1234, + keys_changed_at=1233, + client_state='aaaa') + res = self.app.get("/1.0/sync/1.5", headers=browserid_headers) + token = self.unsafelyParseToken(res.json["id"]) + self.assertEqual(token["uid"], res.json["uid"]) + self.assertEqual(token["fxa_uid"], "testuser") + self.assertEqual(token["fxa_kid"], "0000000001233-qqo") + self.assertNotEqual(token["hashed_fxa_uid"], token["fxa_uid"]) + self.assertEqual(token["hashed_fxa_uid"], res.json["hashed_fxa_uid"]) + self.assertIn("hashed_device_id", token) + + def test_generation_number_change(self): + headers = self._build_browserid_headers(client_state="aaaa") + # Start with no generation number. + res1 = self.app.get("/1.0/sync/1.5", headers=headers) + # Now send an explicit generation number. + # The node assignment should not change. + headers = self._build_browserid_headers(generation=1234, + client_state="aaaa") + res2 = self.app.get("/1.0/sync/1.5", headers=headers) + self.assertEqual(res1.json["uid"], res2.json["uid"]) + self.assertEqual(res1.json["api_endpoint"], res2.json["api_endpoint"]) + # Clients that don't report generation number are still allowed. + headers = self._build_browserid_headers(client_state="aaaa") + res2 = self.app.get("/1.0/sync/1.5", headers=headers) + self.assertEqual(res1.json["uid"], res2.json["uid"]) + headers = self._build_browserid_headers(device_id="nonsense", + client_state="aaaa") + headers['Authorization'] = \ + headers['Authorization'].replace("fxa-deviceId", "nonsense") + res2 = self.app.get("/1.0/sync/1.5", headers=headers) + self.assertEqual(res1.json["uid"], res2.json["uid"]) + # But previous generation numbers get an invalid-generation response. + headers = self._build_browserid_headers(generation=1233, + client_state="aaaa") + res = self.app.get("/1.0/sync/1.5", headers=headers, status=401) + self.assertEqual(res.json["status"], "invalid-generation") + # Equal generation numbers are accepted. + headers = self._build_browserid_headers(generation=1234, + client_state="aaaa") + res2 = self.app.get("/1.0/sync/1.5", headers=headers) + self.assertEqual(res1.json["uid"], res2.json["uid"]) + self.assertEqual(res1.json["api_endpoint"], res2.json["api_endpoint"]) + # Later generation numbers are accepted. + # Again, the node assignment should not change. + headers = self._build_browserid_headers(generation=1235, + client_state="aaaa") + res2 = self.app.get("/1.0/sync/1.5", headers=headers) + self.assertEqual(res1.json["uid"], res2.json["uid"]) + self.assertEqual(res1.json["api_endpoint"], res2.json["api_endpoint"]) + # And that should lock out the previous generation number + headers = self._build_browserid_headers(generation=1234, + client_state="aaaa") + res = self.app.get("/1.0/sync/1.5", headers=headers, status=401) + self.assertEqual(res.json["status"], "invalid-generation") diff --git a/tools/integration_tests/tokenserver/test_e2e.py b/tools/integration_tests/tokenserver/test_e2e.py index fd0b62f030..14d6a0ab9c 100644 --- a/tools/integration_tests/tokenserver/test_e2e.py +++ b/tools/integration_tests/tokenserver/test_e2e.py @@ -14,7 +14,6 @@ from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.backends import default_backend -from fxa.tools.bearer import get_bearer_token from fxa.core import Client from fxa.oauth import Client as OAuthClient from fxa.tests.utils import TestEmailAccount @@ -24,6 +23,7 @@ # This is the client ID used for Firefox Desktop. The FxA team confirmed that # this is the proper client ID to be using for these integration tests. +BROWSERID_AUDIENCE = "https://token.stage.mozaws.net" CLIENT_ID = '5882386c6d801776' DEFAULT_TOKEN_DURATION = 300 FXA_ACCOUNT_STAGE_HOST = 'https://api-accounts.stage.mozaws.net' @@ -63,6 +63,8 @@ def setUpClass(cls): cls.session.verify_email_code(m['headers']['x-verify-code']) # Create an OAuth token to be used for the end-to-end tests cls.oauth_token = cls.oauth_client.authorize_token(cls.session, SCOPE) + cls.browserid_assertion = \ + cls.session.get_identity_assertion(BROWSERID_AUDIENCE) @classmethod def tearDownClass(cls): @@ -75,15 +77,13 @@ def _generate_password(): return ''.join(random.choice(PASSWORD_CHARACTERS) for i in r) - def _get_token_with_bad_scope(self): + def _get_oauth_token_with_bad_scope(self): bad_scope = 'bad_scope' + return self.oauth_client.authorize_token(self.session, bad_scope) - return get_bearer_token(TestE2e.acct.email, - TestE2e.fxa_password, - scopes=[bad_scope], - account_server_url=FXA_ACCOUNT_STAGE_HOST, - oauth_server_url=FXA_OAUTH_STAGE_HOST, - client_id=CLIENT_ID) + def _get_browserid_assertion_with_bad_audience(self): + bad_audience = 'badaudience.com' + return self.session.get_identity_assertion(bad_audience) def _get_bad_token(self): key = rsa.generate_private_key(backend=default_backend(), @@ -103,15 +103,17 @@ def _get_bad_token(self): return jwt.encode(claims, private_key, algorithm='RS256') + def _extract_keys_changed_at_from_assertion(self, assertion): + token = assertion.split('~')[-2] + claims = jwt.decode(token, options={"verify_signature": False}) + + return claims['fxa-keysChangedAt'] + @classmethod def _change_password(cls): new_password = cls._generate_password() cls.session.change_password(cls.fxa_password, new_password) cls.fxa_password = new_password - # Refresh the session - cls.session = cls.client.login(cls.acct.email, cls.fxa_password) - # Refresh the OAuth token - cls.oauth_token = cls.oauth_client.authorize_token(cls.session, SCOPE) # Adapted from the original Tokenserver: # https://github.com/mozilla-services/tokenserver/blob/master/tokenserver/util.py#L24 @@ -121,11 +123,11 @@ def _fxa_metrics_hash(self, value): hasher.update(value.encode('utf-8')) return hasher.hexdigest() - def test_unauthorized_error_status(self): + def test_unauthorized_oauth_error_status(self): # Totally busted auth -> generic error. headers = { 'Authorization': 'Unsupported-Auth-Scheme IHACKYOU', - 'X-KeyID': '1234-YWFh' + 'X-KeyID': '1234-qqo' } res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) expected_error_response = { @@ -142,7 +144,7 @@ def test_unauthorized_error_status(self): token = self._get_bad_token() headers = { 'Authorization': 'Bearer %s' % token, - 'X-KeyID': '1234-YWFh' + 'X-KeyID': '1234-qqo' } # Bad token -> 'invalid-credentials' res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) @@ -158,19 +160,47 @@ def test_unauthorized_error_status(self): } self.assertEqual(res.json, expected_error_response) # Untrusted scopes -> 'invalid-credentials' - token = self._get_token_with_bad_scope() + token = self._get_oauth_token_with_bad_scope() headers = { 'Authorization': 'Bearer %s' % token, - 'X-KeyID': '1234-YWFh' + 'X-KeyID': '1234-qqo' + } + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + self.assertEqual(res.json, expected_error_response) + + def test_unauthorized_browserid_error_status(self): + assertion = self._get_bad_token() + headers = { + 'Authorization': 'BrowserID %s' % assertion, + 'X-Client-State': 'aaaa', + } + # Bad assertion -> 'invalid-credentials' + res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) + expected_error_response = { + 'errors': [ + { + 'description': 'Unauthorized', + 'location': 'body', + 'name': '' + } + ], + 'status': 'invalid-credentials' + } + self.assertEqual(res.json, expected_error_response) + # Bad audience -> 'invalid-credentials' + assertion = self._get_browserid_assertion_with_bad_audience() + headers = { + 'Authorization': 'BrowserID %s' % assertion, + 'X-Client-State': 'aaaa', } res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) self.assertEqual(res.json, expected_error_response) - def test_valid_request(self): + def test_valid_oauth_request(self): oauth_token = self.oauth_token headers = { 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' + 'X-KeyID': '1234-qqo' } # Send a valid request, allocating a new user res = self.app.get('/1.0/sync/1.5', headers=headers) @@ -206,3 +236,66 @@ def test_valid_request(self): self.assertEqual(res.json['hashed_fxa_uid'], self._fxa_metrics_hash(fxa_uid)[:32]) self.assertEqual(res.json['node_type'], 'spanner') + + token = self.unsafelyParseToken(res.json['id']) + self.assertIn('hashed_device_id', token) + self.assertEqual(token["uid"], res.json["uid"]) + self.assertEqual(token["fxa_uid"], fxa_uid) + self.assertEqual(token["fxa_kid"], "0000000001234-qqo") + self.assertNotEqual(token["hashed_fxa_uid"], token["fxa_uid"]) + self.assertEqual(token["hashed_fxa_uid"], res.json["hashed_fxa_uid"]) + self.assertIn("hashed_device_id", token) + + def test_valid_browserid_request(self): + assertion = self.browserid_assertion + headers = { + 'Authorization': 'BrowserID %s' % assertion, + 'X-Client-State': 'aaaa' + } + # Send a valid request, allocating a new user + res = self.app.get('/1.0/sync/1.5', headers=headers) + fxa_uid = self.session.uid + # Retrieve the user from the database + user = self._get_user(res.json['uid']) + # First, let's verify that the token we received is valid. To do this, + # we can unpack the hawk header ID into the payload and its signature + # and then construct a tokenlib token to compute the signature + # ourselves. To obtain a matching signature, we use the same secret as + # is used by Tokenserver. + raw = urlsafe_b64decode(res.json['id']) + payload = raw[:-32] + signature = raw[-32:] + payload_dict = json.loads(payload.decode('utf-8')) + + signing_secret = self.TOKEN_SIGNING_SECRET + expected_token = tokenlib.make_token(payload_dict, + secret=signing_secret) + expected_signature = urlsafe_b64decode(expected_token)[-32:] + # Using the #compare_digest method here is not strictly necessary, as + # this is not a security-sensitive situation, but it's good practice + self.assertTrue(hmac.compare_digest(expected_signature, signature)) + # Check that the given key is a secret derived from the hawk ID + expected_secret = tokenlib.get_derived_secret( + res.json['id'], secret=signing_secret) + self.assertEqual(res.json['key'], expected_secret) + # Check to make sure the remainder of the fields are valid + self.assertEqual(res.json['uid'], user['uid']) + self.assertEqual(res.json['api_endpoint'], + '%s/1.5/%s' % (self.NODE_URL, user['uid'])) + self.assertEqual(res.json['duration'], DEFAULT_TOKEN_DURATION) + self.assertEqual(res.json['hashalg'], 'sha256') + self.assertEqual(res.json['hashed_fxa_uid'], + self._fxa_metrics_hash(fxa_uid)[:32]) + self.assertEqual(res.json['node_type'], 'spanner') + + token = self.unsafelyParseToken(res.json['id']) + self.assertIn('hashed_device_id', token) + self.assertEqual(token["uid"], res.json["uid"]) + self.assertEqual(token["fxa_uid"], fxa_uid) + assertion = self.browserid_assertion + keys_changed_at = \ + self._extract_keys_changed_at_from_assertion(assertion) + self.assertEqual(token["fxa_kid"], "%s-qqo" % str(keys_changed_at)) + self.assertNotEqual(token["hashed_fxa_uid"], token["fxa_uid"]) + self.assertEqual(token["hashed_fxa_uid"], res.json["hashed_fxa_uid"]) + self.assertIn("hashed_device_id", token) diff --git a/tools/integration_tests/tokenserver/test_misc.py b/tools/integration_tests/tokenserver/test_misc.py index 605f535a4d..38c1bdb7a1 100644 --- a/tools/integration_tests/tokenserver/test_misc.py +++ b/tools/integration_tests/tokenserver/test_misc.py @@ -16,10 +16,9 @@ def tearDown(self): super(TestMisc, self).tearDown() def test_unknown_app(self): - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/xXx/token', headers=headers, status=404) expected_error_response = { 'errors': [ @@ -34,10 +33,9 @@ def test_unknown_app(self): self.assertEqual(res.json, expected_error_response) def test_unknown_version(self): - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.2', headers=headers, status=404) expected_error_response = { 'errors': [ @@ -53,10 +51,9 @@ def test_unknown_version(self): def test_valid_app(self): self._add_user() - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) self.assertIn('https://example.com/1.5', res.json['api_endpoint']) self.assertIn('duration', res.json) @@ -70,11 +67,9 @@ def test_current_user_is_the_most_up_to_date(self): uid = self._add_user(generation=1236, created_at=1233) # Users are sorted by (generation, created_at), so the fourth user # record is considered to be the current user - oauth_token = self._forge_oauth_token(generation=1236) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1236, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) self.assertEqual(res.json['uid'], uid) @@ -87,11 +82,9 @@ def test_user_creation_when_most_current_user_is_replaced(self): seen_uids = [uid1, uid2, uid3] # Because the current user (the one with uid3) has been replaced, a new # user record is created - oauth_token = self._forge_oauth_token(generation=1237) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1237-YWFh' - } + headers = self._build_auth_headers(generation=1237, + keys_changed_at=1237, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) self.assertNotIn(res.json['uid'], seen_uids) @@ -101,11 +94,9 @@ def test_old_users_marked_as_replaced_in_race_recovery(self): uid2 = self._add_user(generation=1235, created_at=1235) uid3 = self._add_user(generation=1236, created_at=1240) # Make a request - oauth_token = self._forge_oauth_token(generation=1236) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1236-YWFh' - } + headers = self._build_auth_headers(generation=1236, + keys_changed_at=1236, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) # uid3 is associated with the current user self.assertEqual(res.json['uid'], uid3) @@ -119,14 +110,12 @@ def test_old_users_marked_as_replaced_in_race_recovery(self): def test_user_updates_with_new_client_state(self): # Start with a single user in the database uid = self._add_user(generation=1234, keys_changed_at=1234, - client_state='616161') + client_state='aaaa') # Send a request, updating the generation, keys_changed_at, and # client_state - oauth_token = self._forge_oauth_token(generation=1235) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-YmJi' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1235, + client_state='bbbb') res = self.app.get('/1.0/sync/1.5', headers=headers) # A new user should have been created self.assertEqual(self._count_users(), 2) @@ -136,12 +125,12 @@ def test_user_updates_with_new_client_state(self): user = self._get_user(res.json['uid']) self.assertEqual(user['generation'], 1235) self.assertEqual(user['keys_changed_at'], 1235) - self.assertEqual(user['client_state'], '626262') + self.assertEqual(user['client_state'], 'bbbb') # The old user record should not have the updated values user = self._get_user(uid) self.assertEqual(user['generation'], 1234) self.assertEqual(user['keys_changed_at'], 1234) - self.assertEqual(user['client_state'], '616161') + self.assertEqual(user['client_state'], 'aaaa') # Get all the replaced users email = 'test@%s' % self.FXA_EMAIL_DOMAIN replaced_users = self._get_replaced_users(self.service_id, @@ -153,18 +142,16 @@ def test_user_updates_with_new_client_state(self): replaced_user = replaced_users[0] self.assertEqual(replaced_user['generation'], 1234) self.assertEqual(replaced_user['keys_changed_at'], 1234) - self.assertEqual(replaced_user['client_state'], '616161') + self.assertEqual(replaced_user['client_state'], 'aaaa') def test_user_updates_with_same_client_state(self): # Start with a single user in the database uid = self._add_user(generation=1234, keys_changed_at=1234) # Send a request, updating the generation and keys_changed_at but not # the client state - oauth_token = self._forge_oauth_token(generation=1235) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1235-YWFh' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1235, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) # A new user should not have been created self.assertEqual(self._count_users(), 1) @@ -177,11 +164,9 @@ def test_user_updates_with_same_client_state(self): def test_retired_users_can_make_requests(self): # Add a retired user to the database self._add_user(generation=MAX_GENERATION) - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1235, + keys_changed_at=1234, + client_state='aaaa') # Retired users cannot make requests with a generation smaller than # the max generation res = self.app.get('/1.0/sync/1.5', headers=headers, status=401) @@ -198,18 +183,17 @@ def test_retired_users_can_make_requests(self): self.assertEqual(res.json, expected_error_response) # Retired users can make requests with a generation number equal to # the max generation - oauth_token = self._forge_oauth_token(generation=MAX_GENERATION) - headers['Authorization'] = 'Bearer %s' % oauth_token + headers = self._build_auth_headers(generation=MAX_GENERATION, + keys_changed_at=1234, + client_state='aaaa') self.app.get('/1.0/sync/1.5', headers=headers) def test_replaced_users_can_make_requests(self): # Add a replaced user to the database self._add_user(generation=1234, created_at=1234, replaced_at=1234) - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') # Replaced users can make requests self.app.get('/1.0/sync/1.5', headers=headers) @@ -217,14 +201,10 @@ def test_retired_users_with_no_node_cannot_make_requests(self): # Add a retired user to the database invalid_node_id = self.NODE_ID + 1 self._add_user(generation=MAX_GENERATION, nodeid=invalid_node_id) - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } # Retired users without a node cannot make requests - oauth_token = self._forge_oauth_token(generation=MAX_GENERATION) - headers['Authorization'] = 'Bearer %s' % oauth_token + headers = self._build_auth_headers(generation=MAX_GENERATION, + keys_changed_at=1234, + client_state='aaaa') self.app.get('/1.0/sync/1.5', headers=headers, status=500) def test_replaced_users_with_no_node_can_make_requests(self): @@ -232,11 +212,9 @@ def test_replaced_users_with_no_node_can_make_requests(self): invalid_node_id = self.NODE_ID + 1 self._add_user(created_at=1234, replaced_at=1234, nodeid=invalid_node_id) - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') # Replaced users without a node can make requests res = self.app.get('/1.0/sync/1.5', headers=headers) user = self._get_user(res.json['uid']) diff --git a/tools/integration_tests/tokenserver/test_node_assignment.py b/tools/integration_tests/tokenserver/test_node_assignment.py index f6a0c78300..f21eb3f8af 100644 --- a/tools/integration_tests/tokenserver/test_node_assignment.py +++ b/tools/integration_tests/tokenserver/test_node_assignment.py @@ -19,11 +19,9 @@ def test_user_creation(self): self._add_node(available=1, node='https://node2') self._add_node(available=5, node='https://node3') # Send a request from an unseen user - oauth_token = self._forge_oauth_token(generation=1234) - headers = { - 'Authorization': 'Bearer %s' % oauth_token, - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) # Ensure a single user was created self.assertEqual(self._count_users(), 1) @@ -31,7 +29,7 @@ def test_user_creation(self): user1 = self._get_user(res.json['uid']) self.assertEqual(user1['generation'], 1234) self.assertEqual(user1['keys_changed_at'], 1234) - self.assertEqual(user1['client_state'], '616161') + self.assertEqual(user1['client_state'], 'aaaa') self.assertEqual(user1['nodeid'], self.NODE_ID) self.assertEqual(user1['service'], self.service_id) # Ensure the 'available' and 'current_load' counts on the node @@ -59,10 +57,9 @@ def test_new_user_allocation(self): node='https://node4') self._add_node(available=97, current_load=3, capacity=100, node='https://node5') - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) # The user should have been allocated to the least-loaded node # (computed as current_load / capacity) that has backoff and downed @@ -90,10 +87,9 @@ def test_successfully_releasing_node_capacity(self): node='https://node4', backoff=1) node_id5 = self._add_node(available=0, current_load=60, capacity=61, node='https://node5', downed=1) - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') res = self.app.get('/1.0/sync/1.5', headers=headers) # Since every node has no available spots, capacity is added to each # node according to the equation @@ -130,10 +126,9 @@ def test_unsuccessfully_releasing_node_capacity(self): node='https://node2') self._add_node(available=0, current_load=80, capacity=80, node='https://node3') - headers = { - 'Authorization': 'Bearer %s' % self._forge_oauth_token(), - 'X-KeyID': '1234-YWFh' - } + headers = self._build_auth_headers(generation=1234, + keys_changed_at=1234, + client_state='aaaa') # All of these nodes are completely full, and no capacity can be # released self.app.get('/1.0/sync/1.5', headers=headers, status=503) diff --git a/tools/integration_tests/tokenserver/test_support.py b/tools/integration_tests/tokenserver/test_support.py index 85b7a300e1..95614514f0 100644 --- a/tools/integration_tests/tokenserver/test_support.py +++ b/tools/integration_tests/tokenserver/test_support.py @@ -2,6 +2,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from base64 import urlsafe_b64encode as b64encode +import binascii import json import os import math @@ -12,8 +13,12 @@ from tokenlib.utils import decode_token_bytes from webtest import TestApp +DEFAULT_OAUTH_SCOPE = 'https://identity.mozilla.com/apps/oldsync' + class TestCase: + AUTH_METHOD = os.environ.get('TOKENSERVER_AUTH_METHOD', 'oauth') + BROWSERID_ISSUER = os.environ['SYNC_TOKENSERVER__FXA_BROWSERID_ISSUER'] FXA_EMAIL_DOMAIN = 'api-accounts.stage.mozaws.net' FXA_METRICS_HASH_SECRET = 'secret0' NODE_ID = 800 @@ -24,7 +29,7 @@ class TestCase: def setUp(self): engine = create_engine(os.environ['SYNC_TOKENSERVER__DATABASE_URL']) self.database = engine. \ - execution_options(isolation_level="AUTOCOMMIT"). \ + execution_options(isolation_level='AUTOCOMMIT'). \ connect() host_url = urlparse.urlparse(self.TOKENSERVER_HOST) @@ -35,6 +40,12 @@ def setUp(self): 'REMOTE_ADDR': '127.0.0.1', 'SCRIPT_NAME': host_url.path, }) + + if self.AUTH_METHOD == 'browserid': + self._build_auth_headers = self._build_browserid_headers + else: + self._build_auth_headers = self._build_oauth_headers + # Start each test with a blank slate. cursor = self._execute_sql(('DELETE FROM users'), ()) cursor.close() @@ -60,20 +71,65 @@ def tearDown(self): self.database.close() - def _forge_oauth_token(self, generation=None, sub='test', scope='scope'): + def _build_oauth_headers(self, generation=None, user='test', + keys_changed_at=None, client_state=None, + status=200): + claims = { + 'user': user, + 'generation': generation, + 'client_id': 'fake client id', + 'scope': [DEFAULT_OAUTH_SCOPE], + } + body = { + 'body': claims, + 'status': status + } + + headers = {} + headers['Authorization'] = 'Bearer %s' % json.dumps(body) + client_state = binascii.unhexlify(client_state) + client_state = b64encode(client_state).strip(b'=').decode('utf-8') + headers['X-KeyID'] = '%s-%s' % (keys_changed_at, client_state) + + return headers + + def _build_browserid_headers(self, generation=None, user='test', + keys_changed_at=None, client_state=None, + issuer=BROWSERID_ISSUER, device_id=None, + token_verified=None, status=200): claims = { - 'fxa-generation': generation, - 'sub': sub, - 'client_id': 'client ID', - 'scope': scope, - 'fxa-profileChangedAt': None + 'status': 'okay', + 'email': '%s@%s' % (user, self.FXA_EMAIL_DOMAIN), + 'issuer': issuer } - header = b64encode(b'{}').strip(b'=').decode('utf-8') - claims = b64encode(json.dumps(claims).encode('utf-8')) \ - .strip(b'=').decode('utf-8') - signature = b64encode(b'signature').strip(b'=').decode('utf-8') - return '%s.%s.%s' % (header, claims, signature) + if device_id or generation or keys_changed_at or \ + token_verified is not None: + idp_claims = {} + + if device_id: + idp_claims['fxa-deviceId'] = device_id + + if generation: + idp_claims['fxa-generation'] = generation + + if keys_changed_at: + idp_claims['fxa-keysChangedAt'] = keys_changed_at + + if token_verified is not None: + idp_claims['fxa-tokenVerified'] = token_verified + + claims['idpClaims'] = idp_claims + + body = { + 'body': claims, + 'status': status, + } + + return { + 'Authorization': 'BrowserID %s' % json.dumps(body), + 'X-Client-State': client_state + } def _add_node(self, capacity=100, available=100, node=NODE_URL, id=None, current_load=0, backoff=0, downed=0): @@ -126,7 +182,7 @@ def _add_service(self, service_name, pattern): return self._last_insert_id() - def _add_user(self, email=None, generation=1234, client_state='616161', + def _add_user(self, email=None, generation=1234, client_state='aaaa', created_at=None, nodeid=NODE_ID, keys_changed_at=1234, replaced_at=None): query = ''' diff --git a/tools/tokenserver/test_database.py b/tools/tokenserver/test_database.py index 83d1dae920..065b7a8d4d 100644 --- a/tools/tokenserver/test_database.py +++ b/tools/tokenserver/test_database.py @@ -112,50 +112,50 @@ def test_update_client_state(self): orig_node = user['node'] # Changing client-state allocates a new userid. - self.database.update_user(user, client_state='aaa') + self.database.update_user(user, client_state='aaaa') self.assertTrue(user['uid'] not in seen_uids) self.assertEqual(user['node'], orig_node) self.assertEqual(user['generation'], 0) - self.assertEqual(user['client_state'], 'aaa') + self.assertEqual(user['client_state'], 'aaaa') self.assertEqual(set(user['old_client_states']), set(('',))) user = self.database.get_user('test1@example.com') self.assertTrue(user['uid'] not in seen_uids) self.assertEqual(user['node'], orig_node) self.assertEqual(user['generation'], 0) - self.assertEqual(user['client_state'], 'aaa') + self.assertEqual(user['client_state'], 'aaaa') self.assertEqual(set(user['old_client_states']), set(('',))) seen_uids.add(user['uid']) # It's possible to change client-state and generation at once. self.database.update_user(user, - client_state='bbb', generation=12) + client_state='bbbb', generation=12) self.assertTrue(user['uid'] not in seen_uids) self.assertEqual(user['node'], orig_node) self.assertEqual(user['generation'], 12) - self.assertEqual(user['client_state'], 'bbb') - self.assertEqual(set(user['old_client_states']), set(('', 'aaa'))) + self.assertEqual(user['client_state'], 'bbbb') + self.assertEqual(set(user['old_client_states']), set(('', 'aaaa'))) user = self.database.get_user('test1@example.com') self.assertTrue(user['uid'] not in seen_uids) self.assertEqual(user['node'], orig_node) self.assertEqual(user['generation'], 12) - self.assertEqual(user['client_state'], 'bbb') - self.assertEqual(set(user['old_client_states']), set(('', 'aaa'))) + self.assertEqual(user['client_state'], 'bbbb') + self.assertEqual(set(user['old_client_states']), set(('', 'aaaa'))) # You can't got back to an old client_state. orig_uid = user['uid'] with self.assertRaises(Exception): self.database.update_user(user, - client_state='aaa') + client_state='aaaa') user = self.database.get_user('test1@example.com') self.assertEqual(user['uid'], orig_uid) self.assertEqual(user['node'], orig_node) self.assertEqual(user['generation'], 12) - self.assertEqual(user['client_state'], 'bbb') - self.assertEqual(set(user['old_client_states']), set(('', 'aaa'))) + self.assertEqual(user['client_state'], 'bbbb') + self.assertEqual(set(user['old_client_states']), set(('', 'aaaa'))) def test_user_retirement(self): self.database.allocate_user('test@mozilla.com') @@ -173,17 +173,17 @@ def test_cleanup_of_old_records(self): # users are created with the same timestamp, it can lead to a # situation where two active user records exist for a single email. time.sleep(0.1) - self.database.update_user(user1, client_state='a') + self.database.update_user(user1, client_state='aaaa') time.sleep(0.1) - self.database.update_user(user1, client_state='b') + self.database.update_user(user1, client_state='bbbb') time.sleep(0.1) - self.database.update_user(user1, client_state='c') + self.database.update_user(user1, client_state='cccc') time.sleep(0.1) break_time = time.time() time.sleep(0.1) - self.database.update_user(user1, client_state='d') + self.database.update_user(user1, client_state='dddd') time.sleep(0.1) - self.database.update_user(user1, client_state='e') + self.database.update_user(user1, client_state='eeee') time.sleep(0.1) records = list(self.database.get_user_records(email1)) self.assertEqual(len(records), 6) @@ -191,9 +191,9 @@ def test_cleanup_of_old_records(self): email2 = 'test2@mozilla.com' user2 = self.database.allocate_user(email2) time.sleep(0.1) - self.database.update_user(user2, client_state='a') + self.database.update_user(user2, client_state='aaaa') time.sleep(0.1) - self.database.update_user(user2, client_state='b') + self.database.update_user(user2, client_state='bbbb') time.sleep(0.1) records = list(self.database.get_user_records(email2)) self.assertEqual(len(records), 3) @@ -223,7 +223,7 @@ def test_node_reassignment_when_records_are_replaced(self): self.database.allocate_user('test@mozilla.com', generation=42, keys_changed_at=12, - client_state='aaa') + client_state='aaaa') user1 = self.database.get_user('test@mozilla.com') self.database.replace_user_records('test@mozilla.com') user2 = self.database.get_user('test@mozilla.com') @@ -236,7 +236,7 @@ def test_node_reassignment_when_records_are_replaced(self): def test_node_reassignment_not_done_for_retired_users(self): self.database.allocate_user('test@mozilla.com', - generation=42, client_state='aaa') + generation=42, client_state='aaaa') user1 = self.database.get_user('test@mozilla.com') self.database.retire_user('test@mozilla.com') user2 = self.database.get_user('test@mozilla.com') @@ -438,7 +438,7 @@ def test_count_users(self): user = self.database.allocate_user('rfkelly@mozilla.com') self.assertEqual(self.database.count_users(), 2) # Updating a user doesn't change the count. - self.database.update_user(user, client_state='aaa') + self.database.update_user(user, client_state='aaaa') self.assertEqual(self.database.count_users(), 2) # Looking back in time doesn't count newer users. self.assertEqual(self.database.count_users(old_timestamp), 1) @@ -454,7 +454,7 @@ def test_first_seen_at(self): self.assertEqual(user1['first_seen_at'], user0['first_seen_at']) # It should stay consistent if we re-allocate the user's node. time.sleep(0.1) - self.database.update_user(user1, client_state='aaa') + self.database.update_user(user1, client_state='aaaa') user2 = self.database.get_user(EMAIL) self.assertNotEqual(user2['uid'], user0['uid']) self.assertEqual(user2['first_seen_at'], user0['first_seen_at'])