From a8b64b743985bb4b8a1ec91eb9826c60f0eb27b3 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Mon, 23 Sep 2024 16:46:42 -0700 Subject: [PATCH 01/25] feat: Add database tracking and report for Push Reliability This PR introduces tracking throughput for the database. It also introduces the PushReliability reporting skeleton. This will be fleshed out with full reporting later. Closes: #SYNC-4324 --- .../autoconnect-common/src/protocol.rs | 2 + .../autoconnect-settings/src/app_state.rs | 1 + autoconnect/autoconnect-web/Cargo.toml | 2 + autoconnect/autoconnect-ws/Cargo.toml | 2 + .../autoconnect-ws-sm/Cargo.toml | 2 + autoendpoint/src/extractors/notification.rs | 7 ++ autoendpoint/src/extractors/subscription.rs | 19 +++-- autoendpoint/src/routers/common.rs | 8 +- autoendpoint/src/server.rs | 6 +- autoendpoint/src/settings.rs | 31 +++++--- autopush-common/Cargo.toml | 2 +- .../src/db/bigtable/bigtable_client/mod.rs | 17 +++++ autopush-common/src/db/mod.rs | 5 ++ autopush-common/src/notification.rs | 2 + autopush-common/src/reliability.rs | 74 +++++++++++++++++++ scripts/convert_pem_to_x962.py | 2 +- .../integration/test_integration_all_rust.py | 41 +++++++++- 17 files changed, 195 insertions(+), 28 deletions(-) create mode 100644 autopush-common/src/reliability.rs diff --git a/autoconnect/autoconnect-common/src/protocol.rs b/autoconnect/autoconnect-common/src/protocol.rs index 7936a5d60..1707e193a 100644 --- a/autoconnect/autoconnect-common/src/protocol.rs +++ b/autoconnect/autoconnect-common/src/protocol.rs @@ -85,6 +85,8 @@ pub struct ClientAck { #[serde(rename = "channelID")] pub channel_id: Uuid, pub version: String, + #[serde(default)] + pub reliability_id: Option, } #[derive(Debug, Serialize)] diff --git a/autoconnect/autoconnect-settings/src/app_state.rs b/autoconnect/autoconnect-settings/src/app_state.rs index 0d7656eca..8383c6ee8 100644 --- a/autoconnect/autoconnect-settings/src/app_state.rs +++ b/autoconnect/autoconnect-settings/src/app_state.rs @@ -68,6 +68,7 @@ impl AppState { db_settings: settings.db_settings.clone(), }; let storage_type = StorageType::from_dsn(&db_settings.dsn); + #[allow(unused)] let db: Box = match storage_type { #[cfg(feature = "bigtable")] diff --git a/autoconnect/autoconnect-web/Cargo.toml b/autoconnect/autoconnect-web/Cargo.toml index beebbd533..08f5f4ba8 100644 --- a/autoconnect/autoconnect-web/Cargo.toml +++ b/autoconnect/autoconnect-web/Cargo.toml @@ -34,3 +34,5 @@ ctor.workspace = true tokio.workspace = true autoconnect_common = { workspace = true, features = ["test-support"] } + +[features] diff --git a/autoconnect/autoconnect-ws/Cargo.toml b/autoconnect/autoconnect-ws/Cargo.toml index b6b08accf..194d79742 100644 --- a/autoconnect/autoconnect-ws/Cargo.toml +++ b/autoconnect/autoconnect-ws/Cargo.toml @@ -33,3 +33,5 @@ async-stream = "0.3" ctor.workspace = true autoconnect_common = { workspace = true, features = ["test-support"] } + +[features] diff --git a/autoconnect/autoconnect-ws/autoconnect-ws-sm/Cargo.toml b/autoconnect/autoconnect-ws/autoconnect-ws-sm/Cargo.toml index 2e3dbfa30..bc76baf62 100644 --- a/autoconnect/autoconnect-ws/autoconnect-ws-sm/Cargo.toml +++ b/autoconnect/autoconnect-ws/autoconnect-ws-sm/Cargo.toml @@ -30,3 +30,5 @@ tokio.workspace = true serde_json.workspace = true autoconnect_common = { workspace = true, features = ["test-support"] } + +[features] diff --git a/autoendpoint/src/extractors/notification.rs b/autoendpoint/src/extractors/notification.rs index ad86a9660..4e483bbb1 100644 --- a/autoendpoint/src/extractors/notification.rs +++ b/autoendpoint/src/extractors/notification.rs @@ -103,6 +103,7 @@ impl From for autopush_common::notification::Notification { timestamp: notification.timestamp, data: notification.data, sortkey_timestamp, + reliability_id: notification.subscription.reliability_id, headers: { let headers: HashMap = notification.headers.into(); if headers.is_empty() { @@ -171,6 +172,12 @@ impl Notification { map.insert("ttl", serde_json::to_value(self.headers.ttl).unwrap()); map.insert("topic", serde_json::to_value(&self.headers.topic).unwrap()); map.insert("timestamp", serde_json::to_value(self.timestamp).unwrap()); + if let Some(reliability_id) = &self.subscription.reliability_id { + map.insert( + "reliability_id", + serde_json::to_value(reliability_id).unwrap(), + ); + } if let Some(data) = &self.data { map.insert("data", serde_json::to_value(data).unwrap()); diff --git a/autoendpoint/src/extractors/subscription.rs b/autoendpoint/src/extractors/subscription.rs index 9251e71d5..c5a3df124 100644 --- a/autoendpoint/src/extractors/subscription.rs +++ b/autoendpoint/src/extractors/subscription.rs @@ -40,7 +40,7 @@ pub struct Subscription { /// (This should ONLY be applied for messages that match known /// Mozilla provided VAPID public keys.) /// - pub tracking_id: Option, + pub reliability_id: Option, } impl FromRequest for Subscription { @@ -75,11 +75,13 @@ impl FromRequest for Subscription { .transpose()?; trace!("raw vapid: {:?}", &vapid); - let trackable = if let Some(vapid) = &vapid { - app_state.reliability.is_trackable(vapid) - } else { - false - }; + let reliability_id: Option = vapid.clone().and_then(|v| { + app_state + .vapid_tracker + .is_trackable(&v) + .then(|| app_state.vapid_tracker.get_id(req.headers())) + }); + debug!("🔍 Assigning Reliability: {:?}", reliability_id); // Capturing the vapid sub right now will cause too much cardinality. Instead, // let's just capture if we have a valid VAPID, as well as what sort of bad sub @@ -134,14 +136,11 @@ impl FromRequest for Subscription { .incr(&format!("updates.vapid.draft{:02}", vapid.vapid.version()))?; } - let tracking_id = - trackable.then(|| app_state.reliability.get_tracking_id(req.headers())); - Ok(Subscription { user, channel_id, vapid, - tracking_id, + reliability_id, }) } .boxed_local() diff --git a/autoendpoint/src/routers/common.rs b/autoendpoint/src/routers/common.rs index e09ffd373..0cca3ac2d 100644 --- a/autoendpoint/src/routers/common.rs +++ b/autoendpoint/src/routers/common.rs @@ -21,6 +21,12 @@ pub fn build_message_data(notification: &Notification) -> ApiResult, #[cfg(feature = "stub")] pub stub_router: Arc, - pub reliability: Arc, + pub vapid_tracker: Arc, } pub struct Server; @@ -109,7 +109,7 @@ impl Server { ) .await?, ); - let reliability = Arc::new(VapidTracker(settings.tracking_keys())); + let vapid_tracker = Arc::new(VapidTracker(settings.tracking_keys())); #[cfg(feature = "stub")] let stub_router = Arc::new(StubRouter::new(settings.stub.clone())?); let app_state = AppState { @@ -122,7 +122,7 @@ impl Server { apns_router, #[cfg(feature = "stub")] stub_router, - reliability, + vapid_tracker, }; spawn_pool_periodic_reporter( diff --git a/autoendpoint/src/settings.rs b/autoendpoint/src/settings.rs index e519ac3ef..ec6e539bc 100644 --- a/autoendpoint/src/settings.rs +++ b/autoendpoint/src/settings.rs @@ -175,9 +175,11 @@ impl Settings { // public key, but that may not always be true. pub fn tracking_keys(&self) -> Vec { let keys = &self.tracking_keys.replace(['"', ' '], ""); - Self::read_list_from_str(keys, "Invalid AUTOEND_TRACKING_KEYS") - .map(|v| v.to_owned()) - .collect() + let reply = Self::read_list_from_str(keys, "Invalid AUTOEND_TRACKING_KEYS") + .map(|v| v.to_owned().replace("=", "")) + .collect(); + trace!("🔍 keys: {:?}", reply); + reply } /// Get the URL for this endpoint server @@ -199,11 +201,20 @@ impl VapidTracker { pub fn is_trackable(&self, vapid: &VapidHeaderWithKey) -> bool { // ideally, [Settings.with_env_and_config_file()] does the work of pre-populating // the Settings.tracking_vapid_pubs cache, but we can't rely on that. - self.0.contains(&vapid.public_key) + let key = vapid.public_key.replace('=', ""); + let result = self.0.contains(&key); + debug!("🔍 Checking {key} {}", { + if result { + "Match!" + } else { + "no match" + } + }); + result } /// Extract the message Id from the headers (if present), otherwise just make one up. - pub fn get_tracking_id(&self, headers: &HeaderMap) -> String { + pub fn get_id(&self, headers: &HeaderMap) -> String { headers .get("X-MessageId") .and_then(|v| @@ -310,7 +321,7 @@ mod tests { #[test] fn test_tracking_keys() -> ApiResult<()> { let settings = Settings{ - tracking_keys: r#"["BLMymkOqvT6OZ1o9etCqV4jGPkvOXNz5FdBjsAR9zR5oeCV1x5CBKuSLTlHon-H_boHTzMtMoNHsAGDlDB6X7vI"]"#.to_owned(), + tracking_keys: r#"["BLMymkOqvT6OZ1o9etCqV4jGPkvOXNz5FdBjsAR9zR5oeCV1x5CBKuSLTlHon-H_boHTzMtMoNHsAGDlDB6X7"]"#.to_owned(), ..Default::default() }; @@ -320,7 +331,7 @@ mod tests { token: "".to_owned(), version_data: crate::headers::vapid::VapidVersionData::Version1, }, - public_key: "BLMymkOqvT6OZ1o9etCqV4jGPkvOXNz5FdBjsAR9zR5oeCV1x5CBKuSLTlHon-H_boHTzMtMoNHsAGDlDB6X7vI".to_owned() + public_key: "BLMymkOqvT6OZ1o9etCqV4jGPkvOXNz5FdBjsAR9zR5oeCV1x5CBKuSLTlHon-H_boHTzMtMoNHsAGDlDB6X7==".to_owned() }; let key_set = settings.tracking_keys(); @@ -333,12 +344,12 @@ mod tests { } #[test] - fn test_tracking_id() -> ApiResult<()> { + fn test_reliability_id() -> ApiResult<()> { let mut headers = HeaderMap::new(); let keys = Vec::new(); let reliability = VapidTracker(keys); - let key = reliability.get_tracking_id(&headers); + let key = reliability.get_id(&headers); assert!(!key.is_empty()); headers.insert( @@ -346,7 +357,7 @@ mod tests { HeaderValue::from_static("123foobar456"), ); - let key = reliability.get_tracking_id(&headers); + let key = reliability.get_id(&headers); assert_eq!(key, "123foobar456".to_owned()); Ok(()) diff --git a/autopush-common/Cargo.toml b/autopush-common/Cargo.toml index 2164633cd..c68bd79d1 100644 --- a/autopush-common/Cargo.toml +++ b/autopush-common/Cargo.toml @@ -63,7 +63,7 @@ form_urlencoded = { version = "1.2", optional = true } [dev-dependencies] mockito = "0.31" tempfile = "3.2.0" -tokio = { workspace=true, features = ["macros"] } +tokio = { workspace = true, features = ["macros"] } actix-rt = "2.8" [features] diff --git a/autopush-common/src/db/bigtable/bigtable_client/mod.rs b/autopush-common/src/db/bigtable/bigtable_client/mod.rs index e36699327..85695e719 100644 --- a/autopush-common/src/db/bigtable/bigtable_client/mod.rs +++ b/autopush-common/src/db/bigtable/bigtable_client/mod.rs @@ -721,6 +721,7 @@ impl BigTableClientImpl { ) })?; + // Create from the known, required fields. let mut notif = Notification { channel_id: range_key.channel_id, topic: range_key.topic, @@ -731,6 +732,7 @@ impl BigTableClientImpl { ..Default::default() }; + // Backfill the Optional fields if let Some(cell) = row.take_cell("data") { notif.data = Some(to_string(cell.value, "data")?); } @@ -740,6 +742,10 @@ impl BigTableClientImpl { .map_err(|e| DbError::Serialization(e.to_string()))?, ); } + if let Some(cell) = row.take_cell("reliability_id") { + trace!("🚣 Is reliable"); + notif.reliability_id = Some(to_string(cell.value, "reliability_id")?); + } trace!("🚣 Deserialized message row: {:?}", ¬if); Ok(notif) @@ -1171,6 +1177,15 @@ impl DbClient for BigTableClientImpl { ..Default::default() }); } + + if let Some(reliability_id) = message.reliability_id { + cells.push(cell::Cell { + qualifier: "reliability_id".to_owned(), + value: reliability_id.into_bytes(), + timestamp: expiry, + ..Default::default() + }); + } row.add_cells(family, cells); trace!("🉑 Adding row"); self.write_row(row).await?; @@ -1287,6 +1302,8 @@ impl DbClient for BigTableClientImpl { ); let messages = self.rows_to_notifications(rows)?; + + // Sadly, we can't do this lower in the database calls because of async issues. // Note: Bigtable always returns a timestamp of None. // Under Bigtable `current_timestamp` is instead initially read // from [get_user]. diff --git a/autopush-common/src/db/mod.rs b/autopush-common/src/db/mod.rs index 32992d8e4..77d13915b 100644 --- a/autopush-common/src/db/mod.rs +++ b/autopush-common/src/db/mod.rs @@ -249,6 +249,10 @@ pub struct NotificationRecord { /// value before sending it to storage or a connection node. #[serde(skip_serializing_if = "Option::is_none")] updateid: Option, + /// Internal Push Reliability tracking id. (Applied only to subscription updates generated + /// by Mozilla owned and consumed messages, like SendTab updates.) + #[serde(skip_serializing_if = "Option::is_none")] + reliability_id: Option, } impl NotificationRecord { @@ -337,6 +341,7 @@ impl NotificationRecord { data: self.data, headers: self.headers.map(|m| m.into()), sortkey_timestamp: key.sortkey_timestamp, + reliability_id: None, }) } diff --git a/autopush-common/src/notification.rs b/autopush-common/src/notification.rs index 33ef0ee32..ecbb7f423 100644 --- a/autopush-common/src/notification.rs +++ b/autopush-common/src/notification.rs @@ -27,6 +27,8 @@ pub struct Notification { pub sortkey_timestamp: Option, #[serde(skip_serializing_if = "Option::is_none")] pub headers: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub reliability_id: Option, } pub const TOPIC_NOTIFICATION_PREFIX: &str = "01"; diff --git a/autopush-common/src/reliability.rs b/autopush-common/src/reliability.rs new file mode 100644 index 000000000..53b5eb031 --- /dev/null +++ b/autopush-common/src/reliability.rs @@ -0,0 +1,74 @@ +/// Push Reliability Recorder +use crate::errors::{ApcError, ApcErrorKind}; + +/// The various states that a message may transit on the way from reception to delivery. +#[derive(Debug)] +pub enum PushReliabilityStates { + RECEIVED, + STORED, + RETRIEVED, + TRANSMITTED, + ACCEPTED, + DELIVERED, +} + +// TODO: Differentiate between "transmitted via webpush" and "transmitted via bridge"? +impl std::fmt::Display for PushReliabilityStates { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(match self { + Self::RECEIVED => "received", + Self::STORED => "stored", + Self::RETRIEVED => "retrieved", + Self::TRANSMITTED => "transmitted", + Self::ACCEPTED => "accepted", + Self::DELIVERED => "delivered", + }) + } +} + +impl std::str::FromStr for PushReliabilityStates { + type Err = ApcError; + + fn from_str(s: &str) -> Result { + Ok(match s.to_lowercase().as_str() { + "recieved" => Self::RECEIVED, + "stored" => Self::STORED, + "retrieved" => Self::RETRIEVED, + "transmitted" => Self::TRANSMITTED, + "accepted" => Self::ACCEPTED, + "delivered" => Self::DELIVERED, + _ => { + return Err(ApcErrorKind::GeneralError("Unknown tracker state".to_owned()).into()); + } + }) + } +} + +#[derive(Debug, Clone)] +pub struct PushReliability { + record_host: String, + record_port: u32, +} + +impl PushReliability { + // Do the magic to make a report instance, whatever that will be. + pub fn new(host: String, port: u32) -> Self { + Self { + record_host: host, + record_port: port, + } + } + + // Handle errors internally. + pub async fn record(&self, reliability_id: &Option, state: PushReliabilityStates) { + if reliability_id.is_none() { + return; + } + // TODO: Record this to the reporting system. + // NO-OP things for now. + let _ = self.record_host; + let _ = self.record_port; + let _ = reliability_id; + let _ = state; + } +} diff --git a/scripts/convert_pem_to_x962.py b/scripts/convert_pem_to_x962.py index bf261215e..ffc2cd2c8 100644 --- a/scripts/convert_pem_to_x962.py +++ b/scripts/convert_pem_to_x962.py @@ -4,7 +4,7 @@ Autopush will try to scan for known VAPID public keys to track. These keys are specified in the header as x962 formatted strings. X962 is effectively "raw" format and contains the two longs that are the coordinates for the -public key. +public key prefixed with a '\04` byte. """ import base64 diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index a9fa0a46d..740ca4f91 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -16,7 +16,7 @@ import uuid from queue import Empty, Queue from threading import Event, Thread -from typing import Any, AsyncGenerator, Generator +from typing import Any, AsyncGenerator, Generator, cast from urllib.parse import urlparse import ecdsa @@ -48,6 +48,8 @@ MSG_LIMIT = 20 CRYPTO_KEY = os.environ.get("CRYPTO_KEY") or Fernet.generate_key().decode("utf-8") +TRACKING_KEY = ecdsa.SigningKey.generate(curve=ecdsa.NIST256p) +TRACKING_PUB_KEY = cast(ecdsa.VerifyingKey, TRACKING_KEY.get_verifying_key()) CONNECTION_PORT = 9150 ENDPOINT_PORT = 9160 ROUTER_PORT = 9170 @@ -184,6 +186,10 @@ def base64url_encode(value: bytes | str) -> str: message_table_name=MESSAGE_TABLE, human_logs="true", crypto_keys="[{}]".format(CRYPTO_KEY), + # convert to x692 format + tracking_keys="[{}]".format( + base64.urlsafe_b64encode((b"\4" + TRACKING_PUB_KEY.to_string())).decode() + ), ) @@ -213,7 +219,7 @@ def _get_vapid( payload["aud"] = endpoint if not key: key = ecdsa.SigningKey.generate(curve=ecdsa.NIST256p) - vk: ecdsa.VerifyingKey = key.get_verifying_key() + vk: ecdsa.VerifyingKey = cast(ecdsa.VerifyingKey, key.get_verifying_key()) auth: str = jws.sign(payload, key, algorithm="ES256").strip("=") crypto_key: str = base64url_encode((b"\4" + vk.to_string())) return {"auth": auth, "crypto-key": crypto_key, "key": key} @@ -808,6 +814,37 @@ async def test_basic_delivery_with_vapid( assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) assert result["messageType"] == "notification" + # The key we used should not have been registered, so no tracking should + # be occurring. + log.debug(f"🔍 Reliability: {result.get("reliability_id")}") + assert result.get("reliability_id") is None + + +async def test_basic_delivery_with_tracked_vapid( + registered_test_client: AsyncPushTestClient, + vapid_payload: dict[str, int | str], +) -> None: + """Test delivery of a basic push message with a VAPID header.""" + uuid_data: str = str(uuid.uuid4()) + vapid_info = _get_vapid(key=TRACKING_KEY, payload=vapid_payload) + # quick sanity check to ensure that the keys match. + # (ideally, this should dump as x962, but DER is good enough.) + assert vapid_info["key"].get_verifying_key().to_der() == TRACKING_PUB_KEY.to_der() + + # let's do an offline submit so we can validate the reliability_id survives storage. + await registered_test_client.disconnect() + await registered_test_client.send_notification(data=uuid_data, vapid=vapid_info) + await registered_test_client.connect() + await registered_test_client.hello() + result = await registered_test_client.get_notification() + + # the following presumes that only `salt` is padded. + clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") + assert result["headers"]["encryption"] == clean_header + assert result["data"] == base64url_encode(uuid_data) + assert result["messageType"] == "notification" + log.debug(f"🔍 reliability {result["reliability_id"]}") + assert result["reliability_id"] is not None async def test_basic_delivery_with_invalid_vapid( From 14bd4fbb8f76410036f3cd36056387b8b6654593 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 25 Sep 2024 16:07:53 -0700 Subject: [PATCH 02/25] f lint --- tests/integration/test_integration_all_rust.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index 740ca4f91..66dc3e3ab 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -829,7 +829,11 @@ async def test_basic_delivery_with_tracked_vapid( vapid_info = _get_vapid(key=TRACKING_KEY, payload=vapid_payload) # quick sanity check to ensure that the keys match. # (ideally, this should dump as x962, but DER is good enough.) - assert vapid_info["key"].get_verifying_key().to_der() == TRACKING_PUB_KEY.to_der() + key = cast( + ecdsa.VerifyingKey, cast(ecdsa.SigningKey, vapid_info["key"]).get_verifying_key() + ).to_der() + + assert key == TRACKING_PUB_KEY.to_der() # let's do an offline submit so we can validate the reliability_id survives storage. await registered_test_client.disconnect() From e914b14c78a0f624df5e2f2f28edd7f57a0e4ef5 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 25 Sep 2024 16:29:18 -0700 Subject: [PATCH 03/25] f remove extra from pending pr --- .../src/db/bigtable/bigtable_client/mod.rs | 1 - autopush-common/src/reliability.rs | 74 ------------------- 2 files changed, 75 deletions(-) delete mode 100644 autopush-common/src/reliability.rs diff --git a/autopush-common/src/db/bigtable/bigtable_client/mod.rs b/autopush-common/src/db/bigtable/bigtable_client/mod.rs index 85695e719..221f0dcdb 100644 --- a/autopush-common/src/db/bigtable/bigtable_client/mod.rs +++ b/autopush-common/src/db/bigtable/bigtable_client/mod.rs @@ -1303,7 +1303,6 @@ impl DbClient for BigTableClientImpl { let messages = self.rows_to_notifications(rows)?; - // Sadly, we can't do this lower in the database calls because of async issues. // Note: Bigtable always returns a timestamp of None. // Under Bigtable `current_timestamp` is instead initially read // from [get_user]. diff --git a/autopush-common/src/reliability.rs b/autopush-common/src/reliability.rs deleted file mode 100644 index 53b5eb031..000000000 --- a/autopush-common/src/reliability.rs +++ /dev/null @@ -1,74 +0,0 @@ -/// Push Reliability Recorder -use crate::errors::{ApcError, ApcErrorKind}; - -/// The various states that a message may transit on the way from reception to delivery. -#[derive(Debug)] -pub enum PushReliabilityStates { - RECEIVED, - STORED, - RETRIEVED, - TRANSMITTED, - ACCEPTED, - DELIVERED, -} - -// TODO: Differentiate between "transmitted via webpush" and "transmitted via bridge"? -impl std::fmt::Display for PushReliabilityStates { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(match self { - Self::RECEIVED => "received", - Self::STORED => "stored", - Self::RETRIEVED => "retrieved", - Self::TRANSMITTED => "transmitted", - Self::ACCEPTED => "accepted", - Self::DELIVERED => "delivered", - }) - } -} - -impl std::str::FromStr for PushReliabilityStates { - type Err = ApcError; - - fn from_str(s: &str) -> Result { - Ok(match s.to_lowercase().as_str() { - "recieved" => Self::RECEIVED, - "stored" => Self::STORED, - "retrieved" => Self::RETRIEVED, - "transmitted" => Self::TRANSMITTED, - "accepted" => Self::ACCEPTED, - "delivered" => Self::DELIVERED, - _ => { - return Err(ApcErrorKind::GeneralError("Unknown tracker state".to_owned()).into()); - } - }) - } -} - -#[derive(Debug, Clone)] -pub struct PushReliability { - record_host: String, - record_port: u32, -} - -impl PushReliability { - // Do the magic to make a report instance, whatever that will be. - pub fn new(host: String, port: u32) -> Self { - Self { - record_host: host, - record_port: port, - } - } - - // Handle errors internally. - pub async fn record(&self, reliability_id: &Option, state: PushReliabilityStates) { - if reliability_id.is_none() { - return; - } - // TODO: Record this to the reporting system. - // NO-OP things for now. - let _ = self.record_host; - let _ = self.record_port; - let _ = reliability_id; - let _ = state; - } -} From e9ff9f56d35bbf37a68af4649719cf79665e1923 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 2 Oct 2024 10:46:25 -0700 Subject: [PATCH 04/25] f r's --- .../autoconnect-common/src/protocol.rs | 6 +++++ .../src/identified/on_client_msg.rs | 1 + autoendpoint/src/error.rs | 7 +++++- autoendpoint/src/extractors/notification.rs | 25 ++++++++----------- autoendpoint/src/extractors/subscription.rs | 2 +- autoendpoint/src/routers/common.rs | 2 +- autoendpoint/src/settings.rs | 6 ++--- 7 files changed, 29 insertions(+), 20 deletions(-) diff --git a/autoconnect/autoconnect-common/src/protocol.rs b/autoconnect/autoconnect-common/src/protocol.rs index 1707e193a..07eafe735 100644 --- a/autoconnect/autoconnect-common/src/protocol.rs +++ b/autoconnect/autoconnect-common/src/protocol.rs @@ -80,11 +80,17 @@ impl FromStr for ClientMessage { } } +/// Returned ACKnowledgement of the received message by the User Agent. +/// This is the payload for the `messageType:ack` packet. +/// #[derive(Debug, Deserialize)] pub struct ClientAck { + // The channel_id which received messages #[serde(rename = "channelID")] pub channel_id: Uuid, + // The corresponding version number for the message. pub version: String, + // The Reliability ID for the messages (if present) #[serde(default)] pub reliability_id: Option, } diff --git a/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_client_msg.rs b/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_client_msg.rs index 784626ea7..ea277cdf1 100644 --- a/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_client_msg.rs +++ b/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_client_msg.rs @@ -216,6 +216,7 @@ impl WebPushClient { // Get the stored notification record. let n = &self.ack_state.unacked_stored_notifs[pos]; debug!("✅ Ack notif: {:?}", &n); + // TODO: Record "ack'd" reliability_id, if present. // Only force delete Topic messages, since they don't have a timestamp. // Other messages persist in the database, to be, eventually, cleaned up by their // TTL. We will need to update the `CurrentTimestamp` field for the channel diff --git a/autoendpoint/src/error.rs b/autoendpoint/src/error.rs index 8881193b1..75f77421c 100644 --- a/autoendpoint/src/error.rs +++ b/autoendpoint/src/error.rs @@ -70,6 +70,9 @@ pub enum ApiErrorKind { #[error(transparent)] Jwt(#[from] jsonwebtoken::errors::Error), + #[error(transparent)] + Serde(#[from] serde_json::Error), + #[error("Error while validating token")] TokenHashValidation(#[source] openssl::error::ErrorStack), @@ -143,6 +146,7 @@ impl ApiErrorKind { ApiErrorKind::VapidError(_) | ApiErrorKind::Jwt(_) + | ApiErrorKind::Serde(_) | ApiErrorKind::TokenHashValidation(_) | ApiErrorKind::InvalidAuthentication | ApiErrorKind::InvalidLocalAuth(_) => StatusCode::UNAUTHORIZED, @@ -179,7 +183,7 @@ impl ApiErrorKind { ApiErrorKind::InvalidMessageId => "invalid_message_id", ApiErrorKind::VapidError(_) => "vapid_error", - ApiErrorKind::Jwt(_) => "jwt", + ApiErrorKind::Jwt(_) | ApiErrorKind::Serde(_) => "jwt", ApiErrorKind::TokenHashValidation(_) => "token_hash_validation", ApiErrorKind::InvalidAuthentication => "invalid_authentication", ApiErrorKind::InvalidLocalAuth(_) => "invalid_local_auth", @@ -251,6 +255,7 @@ impl ApiErrorKind { ApiErrorKind::VapidError(_) | ApiErrorKind::TokenHashValidation(_) | ApiErrorKind::Jwt(_) + | ApiErrorKind::Serde(_) | ApiErrorKind::InvalidAuthentication | ApiErrorKind::InvalidLocalAuth(_) => Some(109), diff --git a/autoendpoint/src/extractors/notification.rs b/autoendpoint/src/extractors/notification.rs index 4e483bbb1..f29143c8e 100644 --- a/autoendpoint/src/extractors/notification.rs +++ b/autoendpoint/src/extractors/notification.rs @@ -1,4 +1,4 @@ -use crate::error::{ApiError, ApiErrorKind}; +use crate::error::{ApiError, ApiErrorKind, ApiResult}; use crate::extractors::{ message_id::MessageId, notification_headers::NotificationHeaders, subscription::Subscription, }; @@ -161,31 +161,28 @@ impl Notification { /// fields are still required when delivering to the connection server, so /// we can't simply convert this notification type to that one and serialize /// via serde. - pub fn serialize_for_delivery(&self) -> HashMap<&'static str, serde_json::Value> { + pub fn serialize_for_delivery(&self) -> ApiResult> { let mut map = HashMap::new(); map.insert( "channelID", - serde_json::to_value(self.subscription.channel_id).unwrap(), + serde_json::to_value(self.subscription.channel_id)?, ); - map.insert("version", serde_json::to_value(&self.message_id).unwrap()); - map.insert("ttl", serde_json::to_value(self.headers.ttl).unwrap()); - map.insert("topic", serde_json::to_value(&self.headers.topic).unwrap()); - map.insert("timestamp", serde_json::to_value(self.timestamp).unwrap()); + map.insert("version", serde_json::to_value(&self.message_id)?); + map.insert("ttl", serde_json::to_value(self.headers.ttl)?); + map.insert("topic", serde_json::to_value(&self.headers.topic)?); + map.insert("timestamp", serde_json::to_value(self.timestamp)?); if let Some(reliability_id) = &self.subscription.reliability_id { - map.insert( - "reliability_id", - serde_json::to_value(reliability_id).unwrap(), - ); + map.insert("reliability_id", serde_json::to_value(reliability_id)?); } if let Some(data) = &self.data { - map.insert("data", serde_json::to_value(data).unwrap()); + map.insert("data", serde_json::to_value(data)?); let headers: HashMap<_, _> = self.headers.clone().into(); - map.insert("headers", serde_json::to_value(headers).unwrap()); + map.insert("headers", serde_json::to_value(headers)?); } - map + Ok(map) } } diff --git a/autoendpoint/src/extractors/subscription.rs b/autoendpoint/src/extractors/subscription.rs index fdfa91ef5..4dee41517 100644 --- a/autoendpoint/src/extractors/subscription.rs +++ b/autoendpoint/src/extractors/subscription.rs @@ -81,7 +81,7 @@ impl FromRequest for Subscription { .is_trackable(&v) .then(|| app_state.vapid_tracker.get_id(req.headers())) }); - debug!("🔍 Assigning Reliability: {:?}", reliability_id); + debug!("🔍 Assigning Reliability ID: {:?}", reliability_id); // Capturing the vapid sub right now will cause too much cardinality. Instead, // let's just capture if we have a valid VAPID, as well as what sort of bad sub diff --git a/autoendpoint/src/routers/common.rs b/autoendpoint/src/routers/common.rs index 0cca3ac2d..34f68282d 100644 --- a/autoendpoint/src/routers/common.rs +++ b/autoendpoint/src/routers/common.rs @@ -23,7 +23,7 @@ pub fn build_message_data(notification: &Notification) -> ApiResult Vec { let keys = &self.tracking_keys.replace(['"', ' '], ""); - let reply = Self::read_list_from_str(keys, "Invalid AUTOEND_TRACKING_KEYS") + let result = Self::read_list_from_str(keys, "Invalid AUTOEND_TRACKING_KEYS") .map(|v| v.to_owned().replace("=", "")) .collect(); - trace!("🔍 keys: {:?}", reply); - reply + trace!("🔍 tracking_keys: {:?}", result); + result } /// Get the URL for this endpoint server From c9a3512a7e2ac42029abece26b8683458a69f472 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 2 Oct 2024 13:25:03 -0700 Subject: [PATCH 05/25] f r's --- autoendpoint/src/routers/webpush.rs | 2 +- tests/integration/test_integration_all_rust.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/autoendpoint/src/routers/webpush.rs b/autoendpoint/src/routers/webpush.rs index 3d2646c46..609132607 100644 --- a/autoendpoint/src/routers/webpush.rs +++ b/autoendpoint/src/routers/webpush.rs @@ -179,7 +179,7 @@ impl WebPushRouter { node_id: &str, ) -> Result { let url = format!("{}/push/{}", node_id, notification.subscription.user.uaid); - let notification = notification.serialize_for_delivery(); + let notification = notification.serialize_for_delivery().unwrap(); self.http.put(&url).json(¬ification).send().await } diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index 66dc3e3ab..5af17eaf8 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -1122,6 +1122,7 @@ async def test_ttl_0_connected(registered_test_client: AsyncPushTestClient) -> N """Test that a message with a TTL=0 is delivered to a client that is actively connected.""" uuid_data: str = str(uuid.uuid4()) result = await registered_test_client.send_notification(data=uuid_data, ttl=0) + log.info(result); assert result is not None # the following presumes that only `salt` is padded. clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") From f8c7ee924e74a509879cdc79d1a3d11fa46efed7 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 2 Oct 2024 14:08:20 -0700 Subject: [PATCH 06/25] f lint --- tests/integration/test_integration_all_rust.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index 5af17eaf8..66dc3e3ab 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -1122,7 +1122,6 @@ async def test_ttl_0_connected(registered_test_client: AsyncPushTestClient) -> N """Test that a message with a TTL=0 is delivered to a client that is actively connected.""" uuid_data: str = str(uuid.uuid4()) result = await registered_test_client.send_notification(data=uuid_data, ttl=0) - log.info(result); assert result is not None # the following presumes that only `salt` is padded. clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") From 1dd624f80156abce5d36f56db91959f7d1403363 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Tue, 8 Oct 2024 16:28:28 -0700 Subject: [PATCH 07/25] f post test --- tests/integration/async_push_test_client.py | 1 + .../integration/test_integration_all_rust.py | 26 +++++++++---------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/tests/integration/async_push_test_client.py b/tests/integration/async_push_test_client.py index 6b3a41437..714446ec1 100644 --- a/tests/integration/async_push_test_client.py +++ b/tests/integration/async_push_test_client.py @@ -28,6 +28,7 @@ class ClientMessageType(Enum): ACK = "ack" NACK = "nack" PING = "ping" + NOTIFICATION = "notification" class AsyncPushTestClient: diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index 66dc3e3ab..170c30fe5 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -740,7 +740,7 @@ async def test_basic_delivery(registered_test_client: AsyncPushTestClient) -> No clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(bytes(uuid_data, "utf-8")) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value async def test_topic_basic_delivery(registered_test_client: AsyncPushTestClient) -> None: @@ -751,7 +751,7 @@ async def test_topic_basic_delivery(registered_test_client: AsyncPushTestClient) clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value async def test_topic_replacement_delivery( @@ -771,7 +771,7 @@ async def test_topic_replacement_delivery( clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data_2) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value result = await registered_test_client.get_notification() assert result is None @@ -789,7 +789,7 @@ async def test_topic_no_delivery_on_reconnect(registered_test_client: AsyncPushT clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value await registered_test_client.ack(result["channelID"], result["version"]) await registered_test_client.disconnect() await registered_test_client.connect() @@ -813,7 +813,7 @@ async def test_basic_delivery_with_vapid( clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value # The key we used should not have been registered, so no tracking should # be occurring. log.debug(f"🔍 Reliability: {result.get("reliability_id")}") @@ -846,8 +846,8 @@ async def test_basic_delivery_with_tracked_vapid( clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" - log.debug(f"🔍 reliability {result["reliability_id"]}") + assert result["messageType"] == ClientMessageType.NOTIFICATION.value + log.debug(f"🔍 reliability {result}") assert result["reliability_id"] is not None @@ -1045,7 +1045,7 @@ async def test_multiple_delivery_with_single_ack( result = await registered_test_client.get_notification(timeout=0.5) assert result != {} assert result["data"] == base64url_encode(uuid_data_1) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value result2 = await registered_test_client.get_notification() assert result2 != {} assert result2["data"] == base64url_encode(uuid_data_2) @@ -1127,7 +1127,7 @@ async def test_ttl_0_connected(registered_test_client: AsyncPushTestClient) -> N clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value async def test_ttl_0_not_connected(registered_test_client: AsyncPushTestClient) -> None: @@ -1182,7 +1182,7 @@ async def test_ttl_batch_expired_and_good_one(registered_test_client: AsyncPushT clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data_2) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value result = await registered_test_client.get_notification(timeout=0.5) assert result is None @@ -1243,7 +1243,7 @@ async def test_empty_message_without_crypto_headers( """Test that a message without crypto headers, and does not have data, is accepted.""" result = await registered_test_client.send_notification(use_header=False) assert result is not None - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value assert "headers" not in result assert "data" not in result await registered_test_client.ack(result["channelID"], result["version"]) @@ -1267,14 +1267,14 @@ async def test_empty_message_with_crypto_headers( """ result = await registered_test_client.send_notification() assert result is not None - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value assert "headers" not in result assert "data" not in result result2 = await registered_test_client.send_notification() # We shouldn't store headers for blank messages. assert result2 is not None - assert result2["messageType"] == "notification" + assert result2["messageType"] == ClientMessageType.NOTIFICATION.value assert "headers" not in result2 assert "data" not in result2 From 829103955d6da72f5c00f28b77c022668ea8b62f Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 9 Oct 2024 10:04:32 -0700 Subject: [PATCH 08/25] f r's --- .../integration/test_integration_all_rust.py | 32 +++++++++---------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index 170c30fe5..4501f59b3 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -127,7 +127,7 @@ def base64url_encode(value: bytes | str) -> str: MOCK_SERVER_PORT: Any = get_free_port() MOCK_MP_SERVICES: dict = {} -MOCK_MP_TOKEN: str = "Bearer {}".format(uuid.uuid4().hex) +MOCK_MP_TOKEN: str = f"Bearer {uuid.uuid4().hex}" MOCK_MP_POLLED: Event = Event() MOCK_SENTRY_QUEUE: Queue = Queue() @@ -144,7 +144,7 @@ def base64url_encode(value: bytes | str) -> str: endpoint_scheme="http", router_tablename=ROUTER_TABLE, message_tablename=MESSAGE_TABLE, - crypto_key="[{}]".format(CRYPTO_KEY), + crypto_key=f"[{CRYPTO_KEY}]", auto_ping_interval=30.0, auto_ping_timeout=10.0, close_handshake_timeout=5, @@ -185,11 +185,9 @@ def base64url_encode(value: bytes | str) -> str: router_table_name=ROUTER_TABLE, message_table_name=MESSAGE_TABLE, human_logs="true", - crypto_keys="[{}]".format(CRYPTO_KEY), + crypto_keys=f"[{CRYPTO_KEY}]", # convert to x692 format - tracking_keys="[{}]".format( - base64.urlsafe_b64encode((b"\4" + TRACKING_PUB_KEY.to_string())).decode() - ), + tracking_keys=f"[{base64.urlsafe_b64encode((b"\4" + TRACKING_PUB_KEY.to_string())).decode()}]", ) @@ -204,10 +202,10 @@ def _get_vapid( global CONNECTION_CONFIG if endpoint is None: - endpoint = "{}://{}:{}".format( - CONNECTION_CONFIG.get("endpoint_scheme"), - CONNECTION_CONFIG.get("endpoint_hostname"), - CONNECTION_CONFIG.get("endpoint_port"), + endpoint = ( + f"{CONNECTION_CONFIG.get("endpoint_scheme")}://" + f"{CONNECTION_CONFIG.get("endpoint_hostname")}:" + f"{CONNECTION_CONFIG.get("endpoint_port")}" ) if not payload: payload = { @@ -333,11 +331,11 @@ def get_rust_binary_path(binary) -> str: """ global STRICT_LOG_COUNTS - rust_bin: str = root_dir + "/target/release/{}".format(binary) + rust_bin: str = root_dir + f"/target/release/{binary}" possible_paths: list[str] = [ - "/target/debug/{}".format(binary), - "/{0}/target/release/{0}".format(binary), - "/{0}/target/debug/{0}".format(binary), + f"/target/debug/{binary}", + f"/{binary}/target/release/{binary}", + f"/{binary}/target/debug/{binary}", ] while possible_paths and not os.path.exists(rust_bin): # pragma: nocover rust_bin = root_dir + possible_paths.pop(0) @@ -353,7 +351,7 @@ def write_config_to_env(config, prefix) -> None: """Write configurations to application read environment variables.""" for key, val in config.items(): new_key = prefix + key - log.debug("✍ config {} => {}".format(new_key, val)) + log.debug(f"✍ config {new_key} => {val}") os.environ[new_key.upper()] = str(val) @@ -472,7 +470,7 @@ def setup_megaphone_server(connection_binary) -> None: else: write_config_to_env(MEGAPHONE_CONFIG, CONNECTION_SETTINGS_PREFIX) cmd = [connection_binary] - log.debug("🐍🟢 Starting Megaphone server: {}".format(" ".join(cmd))) + log.debug(f"🐍🟢 Starting Megaphone server: {' '.join(cmd)}") CN_MP_SERVER = subprocess.Popen(cmd, shell=True, env=os.environ) # nosec @@ -501,7 +499,7 @@ def setup_endpoint_server() -> None: # Run autoendpoint cmd = [get_rust_binary_path("autoendpoint")] - log.debug("🐍🟢 Starting Endpoint server: {}".format(" ".join(cmd))) + log.debug(f"🐍🟢 Starting Endpoint server: {' '.join(cmd)}") EP_SERVER = subprocess.Popen( cmd, shell=True, From 6c2c7d443cd2a9942b4f7b37cc2f7e87bffa3482 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Tue, 8 Oct 2024 13:15:49 -0700 Subject: [PATCH 09/25] feat: Add redis based reliability reporting This adds a feature flag `reliable_report` that optionally enables Push message reliablity reporting. The report is done in two parts. The first part uses a Redis like storage system to note message states. This will require a regularly run "cleanup" script to sweep for expired messages and adust the current counts, as well log those states to some sequential logging friendly storage (e.g. common logging or steamed to a file). The clean-up script should be a singleton to prevent possible race conditions. The second component will write a record of the state transition times for tracked messages to a storage system that is indexed by the tracking_id. This will allow for more "in depth" analysis by external tooling. The idea being that reporting will be comprised of two parts: One part which shows active states of messages (with a log of prior states to show trends over time), and an optional "in-depth" record that could be used to show things like length of time in storage, overall success rates, survivability rates, etc. This patch also: * fixes a few typos * changes several methods that should consume Notifications, actually consume them. * convert from `tracking_id` to `reliability_id` * convert instance of specialized `Metrics` to generic Cadence (to make calls more consistent) * adds a `RELIABLE_REPORT` flag to testing. Closes: SYNC-4327 --- Cargo.lock | 34 +++ autoconnect/Cargo.toml | 6 + .../autoconnect-common/src/protocol.rs | 2 + autoconnect/autoconnect-settings/Cargo.toml | 1 + .../autoconnect-settings/src/app_state.rs | 15 ++ autoconnect/autoconnect-settings/src/lib.rs | 4 + autoconnect/autoconnect-web/Cargo.toml | 6 + autoconnect/autoconnect-web/src/routes.rs | 49 +++- autoconnect/autoconnect-ws/Cargo.toml | 6 + .../autoconnect-ws-sm/Cargo.toml | 3 + .../autoconnect-ws-sm/src/identified/mod.rs | 5 + .../src/identified/on_server_notif.rs | 49 +++- autoendpoint/Cargo.toml | 2 + autoendpoint/src/extractors/notification.rs | 57 ++++- autoendpoint/src/extractors/routers.rs | 2 + autoendpoint/src/extractors/subscription.rs | 112 ++++----- autoendpoint/src/headers/vapid.rs | 35 ++- autoendpoint/src/routers/apns/router.rs | 44 +++- autoendpoint/src/routers/common.rs | 11 +- autoendpoint/src/routers/fcm/router.rs | 38 +++- autoendpoint/src/routers/mod.rs | 2 +- autoendpoint/src/routers/stub/router.rs | 2 +- autoendpoint/src/routers/webpush.rs | 125 +++++++++-- autoendpoint/src/routes/mod.rs | 2 + autoendpoint/src/routes/reliability.rs | 28 +++ autoendpoint/src/routes/webpush.rs | 4 +- autoendpoint/src/server.rs | 36 ++- autoendpoint/src/settings.rs | 21 +- autopush-common/Cargo.toml | 3 + .../src/db/bigtable/bigtable_client/mod.rs | 73 ++++++ autopush-common/src/db/client.rs | 8 + autopush-common/src/db/mock.rs | 9 + autopush-common/src/db/mod.rs | 7 + autopush-common/src/lib.rs | 2 + autopush-common/src/notification.rs | 4 + autopush-common/src/reliability.rs | 180 +++++++++++++++ scripts/reliablity_cron.py | 212 ++++++++++++++++++ tests/integration/async_push_test_client.py | 1 + .../integration/test_integration_all_rust.py | 81 +++++-- tests/load/locustfiles/stored.py | 2 +- 40 files changed, 1128 insertions(+), 155 deletions(-) create mode 100644 autoendpoint/src/routes/reliability.rs create mode 100644 autopush-common/src/reliability.rs create mode 100644 scripts/reliablity_cron.py diff --git a/Cargo.lock b/Cargo.lock index 5bb92def2..1ad2ca53c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -806,6 +806,7 @@ dependencies = [ "openssl", "protobuf", "rand 0.8.5", + "redis", "regex", "reqwest 0.12.8", "sentry", @@ -1101,6 +1102,16 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "config" version = "0.14.0" @@ -2957,6 +2968,23 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "redis" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92f61607c4c4442b575fbc3f31a5dd4e5dd69cfea8f6afec5b83e24f61c126ab" +dependencies = [ + "arc-swap", + "combine", + "itoa", + "num-bigint", + "percent-encoding", + "ryu", + "sha1_smol", + "socket2", + "url", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -3538,6 +3566,12 @@ dependencies = [ "digest", ] +[[package]] +name = "sha1_smol" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" + [[package]] name = "sha2" version = "0.10.8" diff --git a/autoconnect/Cargo.toml b/autoconnect/Cargo.toml index c0ba4ba27..d5bee9d3e 100644 --- a/autoconnect/Cargo.toml +++ b/autoconnect/Cargo.toml @@ -57,3 +57,9 @@ default = ["bigtable"] bigtable = ["autopush_common/bigtable", "autoconnect_settings/bigtable"] emulator = ["bigtable"] log_vapid = [] +reliable_report = [ + "autoconnect_settings/reliable_report", + "autoconnect_web/reliable_report", + "autoconnect_ws/reliable_report", + "autopush_common/reliable_report", +] diff --git a/autoconnect/autoconnect-common/src/protocol.rs b/autoconnect/autoconnect-common/src/protocol.rs index 7936a5d60..1707e193a 100644 --- a/autoconnect/autoconnect-common/src/protocol.rs +++ b/autoconnect/autoconnect-common/src/protocol.rs @@ -85,6 +85,8 @@ pub struct ClientAck { #[serde(rename = "channelID")] pub channel_id: Uuid, pub version: String, + #[serde(default)] + pub reliability_id: Option, } #[derive(Debug, Serialize)] diff --git a/autoconnect/autoconnect-settings/Cargo.toml b/autoconnect/autoconnect-settings/Cargo.toml index 32c04aeb0..5b1e10239 100644 --- a/autoconnect/autoconnect-settings/Cargo.toml +++ b/autoconnect/autoconnect-settings/Cargo.toml @@ -25,3 +25,4 @@ autopush_common.workspace = true # specify the default via the calling crate, in order to simplify default chains. bigtable = ["autopush_common/bigtable"] emulator = ["bigtable"] +reliable_report = ["autopush_common/reliable_report"] diff --git a/autoconnect/autoconnect-settings/src/app_state.rs b/autoconnect/autoconnect-settings/src/app_state.rs index 0d7656eca..5cb85cfa8 100644 --- a/autoconnect/autoconnect-settings/src/app_state.rs +++ b/autoconnect/autoconnect-settings/src/app_state.rs @@ -12,6 +12,8 @@ use autoconnect_common::{ registry::ClientRegistry, }; use autopush_common::db::{client::DbClient, DbSettings, StorageType}; +#[cfg(feature = "reliable_report")] +use autopush_common::reliability::PushReliability; use crate::{Settings, ENV_PREFIX}; @@ -32,6 +34,9 @@ pub struct AppState { pub settings: Settings, pub router_url: String, pub endpoint_url: String, + + #[cfg(feature = "reliable_report")] + pub reliability: Arc, } impl AppState { @@ -68,6 +73,7 @@ impl AppState { db_settings: settings.db_settings.clone(), }; let storage_type = StorageType::from_dsn(&db_settings.dsn); + #[allow(unused)] let db: Box = match storage_type { #[cfg(feature = "bigtable")] @@ -83,6 +89,13 @@ impl AppState { ENV_PREFIX.to_uppercase() ), }; + + #[cfg(feature = "reliable_report")] + let reliability = Arc::new( + PushReliability::new(&settings.reliability_dsn, &Some(db.clone())).map_err(|e| { + ConfigError::Message(format!("Could not start Reliability connection: {:?}", e)) + })?, + ); let http = reqwest::Client::builder() .timeout(Duration::from_secs(1)) .build() @@ -102,6 +115,8 @@ impl AppState { settings, router_url, endpoint_url, + #[cfg(feature = "reliable_report")] + reliability, }) } diff --git a/autoconnect/autoconnect-settings/src/lib.rs b/autoconnect/autoconnect-settings/src/lib.rs index 6ac34f8ee..dbb4b377b 100644 --- a/autoconnect/autoconnect-settings/src/lib.rs +++ b/autoconnect/autoconnect-settings/src/lib.rs @@ -108,6 +108,8 @@ pub struct Settings { /// /// By default, the number of available physical CPUs is used as the worker count. pub actix_workers: Option, + #[cfg(feature = "reliable_report")] + pub reliability_dsn: Option, } impl Default for Settings { @@ -139,6 +141,8 @@ impl Default for Settings { msg_limit: 150, actix_max_connections: None, actix_workers: None, + #[cfg(feature = "reliable_report")] + reliability_dsn: None, } } } diff --git a/autoconnect/autoconnect-web/Cargo.toml b/autoconnect/autoconnect-web/Cargo.toml index beebbd533..2df9d5aba 100644 --- a/autoconnect/autoconnect-web/Cargo.toml +++ b/autoconnect/autoconnect-web/Cargo.toml @@ -34,3 +34,9 @@ ctor.workspace = true tokio.workspace = true autoconnect_common = { workspace = true, features = ["test-support"] } + +[features] +reliable_report = [ + "autopush_common/reliable_report", + "autoconnect_ws/reliable_report", +] diff --git a/autoconnect/autoconnect-web/src/routes.rs b/autoconnect/autoconnect-web/src/routes.rs index fbb59ef05..a435cdce4 100644 --- a/autoconnect/autoconnect-web/src/routes.rs +++ b/autoconnect/autoconnect-web/src/routes.rs @@ -21,16 +21,59 @@ pub async fn push_route( notif: web::Json, app_state: web::Data, ) -> HttpResponse { + #[allow(unused_mut)] // Needed for "reliable_report" + let mut notif = notif.into_inner(); + #[cfg(feature = "reliable_report")] + { + notif.reliable_state = app_state + .reliability + .record( + ¬if.reliability_id, + autopush_common::reliability::PushReliabilityState::IntAccepted, + ¬if.reliable_state, + Some(notif.timestamp + notif.ttl), + ) + .await; + } trace!( - "⏩ push_route, uaid: {} channel_id: {}", + "⏩ in push_route, uaid: {} channel_id: {}", uaid, - notif.channel_id + notif.channel_id, ); + #[cfg(feature = "reliable_report")] + let expiry = { + // Set "transmitted" a bit early since we can't do this inside of `notify`. + notif.reliable_state = app_state + .reliability + .record( + ¬if.reliability_id, + autopush_common::reliability::PushReliabilityState::Transmitted, + ¬if.reliable_state, + Some(notif.timestamp + notif.ttl), + ) + .await; + Some(notif.timestamp + notif.ttl) + }; + // Attempt to send the notification to the UA using WebSocket protocol, or store on failure. let result = app_state .clients - .notify(uaid.into_inner(), notif.into_inner()) + .notify(uaid.into_inner(), notif.clone()) .await; if result.is_ok() { + #[cfg(feature = "reliable_report")] + { + // Set "transmitted" a bit early since we can't do this inside of `notify`. + notif.reliable_state = app_state + .reliability + .record( + ¬if.reliability_id, + autopush_common::reliability::PushReliabilityState::Accepted, + ¬if.reliable_state, + expiry, + ) + .await; + } + HttpResponse::Ok().finish() } else { HttpResponse::NotFound().body("Client not available") diff --git a/autoconnect/autoconnect-ws/Cargo.toml b/autoconnect/autoconnect-ws/Cargo.toml index b6b08accf..d4561dcaa 100644 --- a/autoconnect/autoconnect-ws/Cargo.toml +++ b/autoconnect/autoconnect-ws/Cargo.toml @@ -33,3 +33,9 @@ async-stream = "0.3" ctor.workspace = true autoconnect_common = { workspace = true, features = ["test-support"] } + +[features] +reliable_report = [ + "autopush_common/reliable_report", + "autoconnect_ws_sm/reliable_report", +] diff --git a/autoconnect/autoconnect-ws/autoconnect-ws-sm/Cargo.toml b/autoconnect/autoconnect-ws/autoconnect-ws-sm/Cargo.toml index 2e3dbfa30..0f02874e9 100644 --- a/autoconnect/autoconnect-ws/autoconnect-ws-sm/Cargo.toml +++ b/autoconnect/autoconnect-ws/autoconnect-ws-sm/Cargo.toml @@ -30,3 +30,6 @@ tokio.workspace = true serde_json.workspace = true autoconnect_common = { workspace = true, features = ["test-support"] } + +[features] +reliable_report = [] diff --git a/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/mod.rs b/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/mod.rs index d7488a248..5e99efd04 100644 --- a/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/mod.rs +++ b/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/mod.rs @@ -138,6 +138,11 @@ impl WebPushClient { &self.app_state.settings } + #[cfg(feature = "reliable_report")] + pub fn app_reliability(&self) -> &autopush_common::reliability::PushReliability { + &self.app_state.reliability + } + /// Connect this `WebPushClient` to the `ClientRegistry` /// /// Returning a `Stream` of `ServerNotification`s from the `ClientRegistry` diff --git a/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_server_notif.rs b/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_server_notif.rs index 7e6b6f62f..406d49c44 100644 --- a/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_server_notif.rs +++ b/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_server_notif.rs @@ -163,6 +163,31 @@ impl WebPushClient { Ok(smsgs) } + #[cfg(feature = "reliable_report")] + /// Record and transition the state for trackable messages. + async fn record_state( + &self, + messages: &mut Vec, + state: autopush_common::reliability::PushReliabilityState, + ) { + // *Note* because `.map()` is sync + // we can't call the async func without additional hoops. + // I'm guessing that there's a more elegant way to do this, but this works for now. + for message in messages { + let expiry = message.timestamp + message.ttl; + message.reliable_state = self + .app_state + .reliability + .record( + &message.reliability_id, + state, + &message.reliable_state, + Some(expiry), + ) + .await; + } + } + /// Read a chunk (max count 10 returned) of Notifications from storage /// /// This alternates between reading Topic Notifications and Timestamp @@ -186,10 +211,20 @@ impl WebPushClient { let topic_resp = if self.flags.include_topic { trace!("🗄️ WebPushClient::do_check_storage: fetch_topic_messages"); // Get the most recent max 11 messages. - self.app_state + #[allow(unused_mut)] + let mut messages = self + .app_state .db .fetch_topic_messages(&self.uaid, 11) - .await? + .await?; + #[cfg(feature = "reliable_report")] + // Since we pulled these from storage, mark them as "retrieved" + self.record_state( + &mut messages.messages, + autopush_common::reliability::PushReliabilityState::Retreived, + ) + .await; + messages } else { Default::default() }; @@ -226,7 +261,8 @@ impl WebPushClient { "🗄️ WebPushClient::do_check_storage: fetch_timestamp_messages timestamp: {:?}", timestamp ); - let timestamp_resp = self + #[allow(unused_mut)] + let mut timestamp_resp = self .app_state .db .fetch_timestamp_messages(&self.uaid, timestamp, 10) @@ -244,6 +280,13 @@ impl WebPushClient { ) .with_tag("topic", "false") .send(); + #[cfg(feature = "reliable_report")] + // Since we pulled these from storage, mark them as "retrieved" + self.record_state( + &mut timestamp_resp.messages, + autopush_common::reliability::PushReliabilityState::Retreived, + ) + .await; } Ok(CheckStorageResponse { diff --git a/autoendpoint/Cargo.toml b/autoendpoint/Cargo.toml index 94586a0d4..bcd364cbe 100644 --- a/autoendpoint/Cargo.toml +++ b/autoendpoint/Cargo.toml @@ -82,3 +82,5 @@ emulator = ["bigtable"] stub = [] # Verbosely log vapid assertions (NOT ADVISED FOR WIDE PRODUCTION USE) log_vapid = [] + +reliable_report = ["autopush_common/reliable_report"] diff --git a/autoendpoint/src/extractors/notification.rs b/autoendpoint/src/extractors/notification.rs index ad86a9660..4999c3959 100644 --- a/autoendpoint/src/extractors/notification.rs +++ b/autoendpoint/src/extractors/notification.rs @@ -26,6 +26,14 @@ pub struct Notification { pub sort_key_timestamp: u64, /// The encrypted notification body pub data: Option, + #[cfg(feature = "reliable_report")] + /// The current state the message was in (if tracked) + pub reliable_state: Option, + #[cfg(feature = "reliable_report")] + /// The UTC expiration timestamp for this message + pub expiry: Option, + #[cfg(feature = "reliable_report")] + pub reliability_id: Option, } impl FromRequest for Notification { @@ -68,6 +76,30 @@ impl FromRequest for Notification { sort_key_timestamp, ); + #[cfg(feature = "reliable_report")] + let (expiry, current_state) = { + let expiry = if subscription.reliability_id.is_some() { + Some(timestamp + headers.ttl as u64) + } else { + None + }; + + // Brand new notification, so record it as "Received" + let current_state = app_state + .reliability + .record( + &subscription.reliability_id, + autopush_common::reliability::PushReliabilityState::Received, + &None, + expiry, + ) + .await; + (expiry, current_state) + }; + + #[cfg(feature = "reliable_report")] + let reliability_id = subscription.reliability_id.clone(); + // Record the encoding if we have an encrypted payload if let Some(encoding) = &headers.encoding { if data.is_some() { @@ -85,6 +117,12 @@ impl FromRequest for Notification { timestamp, sort_key_timestamp, data, + #[cfg(feature = "reliable_report")] + reliable_state: current_state, + #[cfg(feature = "reliable_report")] + expiry, + #[cfg(feature = "reliable_report")] + reliability_id, }) } .boxed_local() @@ -103,6 +141,7 @@ impl From for autopush_common::notification::Notification { timestamp: notification.timestamp, data: notification.data, sortkey_timestamp, + reliability_id: notification.subscription.reliability_id, headers: { let headers: HashMap = notification.headers.into(); if headers.is_empty() { @@ -111,6 +150,8 @@ impl From for autopush_common::notification::Notification { Some(headers) } }, + #[cfg(feature = "reliable_report")] + reliable_state: notification.reliable_state, } } } @@ -171,7 +212,21 @@ impl Notification { map.insert("ttl", serde_json::to_value(self.headers.ttl).unwrap()); map.insert("topic", serde_json::to_value(&self.headers.topic).unwrap()); map.insert("timestamp", serde_json::to_value(self.timestamp).unwrap()); - + #[cfg(feature = "reliable_report")] + { + if let Some(reliability_id) = self.subscription.reliability_id.clone() { + map.insert( + "reliability_id", + serde_json::to_value(reliability_id).unwrap(), + ); + } + if let Some(reliable_state) = self.reliable_state { + map.insert( + "reliable_state", + serde_json::to_value(reliable_state.to_string()).unwrap(), + ); + } + } if let Some(data) = &self.data { map.insert("data", serde_json::to_value(data).unwrap()); diff --git a/autoendpoint/src/extractors/routers.rs b/autoendpoint/src/extractors/routers.rs index c4c012a29..5ef4e3b00 100644 --- a/autoendpoint/src/extractors/routers.rs +++ b/autoendpoint/src/extractors/routers.rs @@ -79,6 +79,8 @@ impl FromRequest for Routers { metrics: app_state.metrics.clone(), http: app_state.http.clone(), endpoint_url: app_state.settings.endpoint_url(), + #[cfg(feature = "reliable_report")] + reliability: app_state.reliability.clone(), }, fcm: app_state.fcm_router.clone(), apns: app_state.apns_router.clone(), diff --git a/autoendpoint/src/extractors/subscription.rs b/autoendpoint/src/extractors/subscription.rs index a48602c4d..5ad545a69 100644 --- a/autoendpoint/src/extractors/subscription.rs +++ b/autoendpoint/src/extractors/subscription.rs @@ -7,6 +7,7 @@ use autopush_common::{ tags::Tags, util::{b64_decode_std, b64_decode_url}, }; + use cadence::{CountedExt, StatsdClient}; use futures::{future::LocalBoxFuture, FutureExt}; use jsonwebtoken::{Algorithm, DecodingKey, Validation}; @@ -22,7 +23,6 @@ use crate::headers::{ crypto_key::CryptoKeyHeader, vapid::{VapidClaims, VapidError, VapidHeader, VapidHeaderWithKey, VapidVersionData}, }; -use crate::metrics::Metrics; use crate::server::AppState; use crate::settings::Settings; @@ -38,7 +38,7 @@ pub struct Subscription { /// (This should ONLY be applied for messages that match known /// Mozilla provided VAPID public keys.) /// - pub tracking_id: Option, + pub reliability_id: Option, } impl FromRequest for Subscription { @@ -54,7 +54,6 @@ impl FromRequest for Subscription { trace!("🔐 Token info: {:?}", &token_info); let app_state: Data = Data::extract(&req).await.expect("No server state found"); - let metrics = Metrics::from(&app_state); // Decrypt the token let token = app_state @@ -71,18 +70,34 @@ impl FromRequest for Subscription { let vapid: Option = parse_vapid(&token_info, &app_state.metrics)? .map(|vapid| extract_public_key(vapid, &token_info)) .transpose()?; - - trace!("raw vapid: {:?}", &vapid); - let trackable = if let Some(vapid) = &vapid { - app_state.reliability.is_trackable(vapid) + // Validate the VAPID JWT token, fetch the claims, and record the version + let vapid = if let Some(with_key) = vapid { + // Validate the VAPID JWT token and record the version + validate_vapid_jwt(&with_key, &app_state.settings, &app_state.metrics)?; + app_state.metrics.incr(&format!( + "updates.vapid.draft{:02}", + with_key.vapid.version() + ))?; + Some(with_key) } else { - false + None }; + trace!("🔐 raw vapid: {:?}", &vapid); + let reliability_id = vapid + .as_ref() + .map(|v| { + app_state + .reliability_filter + .is_trackable(v) + .then(|| app_state.reliability_filter.get_id(req.headers())) + }) + .unwrap_or_default(); + trace!("🔍 track_id: {:?}", reliability_id); // Capturing the vapid sub right now will cause too much cardinality. Instead, // let's just capture if we have a valid VAPID, as well as what sort of bad sub // values we get. - if let Some(ref header) = vapid { + if let Some(ref header) = &vapid { let sub = header .vapid .sub() @@ -91,13 +106,15 @@ impl FromRequest for Subscription { let mut tags = Tags::default(); tags.tags .insert("error".to_owned(), e.as_metric().to_owned()); - metrics - .clone() - .incr_with_tags("notification.auth.error", Some(tags)); + app_state + .metrics + .incr_with_tags("notification.auth.error") + .with_tag("error", e.as_metric()) + .send(); }) .unwrap_or_default(); // For now, record that we had a good (?) VAPID sub, - metrics.clone().incr("notification.auth.ok"); + app_state.metrics.incr("notification.auth.ok")?; info!("VAPID sub: {:?}", sub) }; @@ -123,23 +140,11 @@ impl FromRequest for Subscription { trace!("user: {:?}", &user); validate_user(&user, &channel_id, &app_state).await?; - // Validate the VAPID JWT token and record the version - if let Some(vapid) = &vapid { - validate_vapid_jwt(vapid, &app_state.settings, &metrics)?; - - app_state - .metrics - .incr(&format!("updates.vapid.draft{:02}", vapid.vapid.version()))?; - } - - let tracking_id = - trackable.then(|| app_state.reliability.get_tracking_id(req.headers())); - Ok(Subscription { user, channel_id, vapid, - tracking_id, + reliability_id: reliability_id.clone(), }) } .boxed_local() @@ -285,8 +290,8 @@ fn term_to_label(term: &str) -> String { fn validate_vapid_jwt( vapid: &VapidHeaderWithKey, settings: &Settings, - metrics: &Metrics, -) -> ApiResult<()> { + metrics: &StatsdClient, +) -> ApiResult { let VapidHeaderWithKey { vapid, public_key } = vapid; let public_key = decode_public_key(public_key)?; @@ -305,20 +310,18 @@ fn validate_vapid_jwt( Err(e) => match e.kind() { // NOTE: This will fail if `exp` is specified as anything instead of a numeric or if a required field is empty jsonwebtoken::errors::ErrorKind::Json(e) => { - let mut tags = Tags::default(); - tags.tags.insert( - "error".to_owned(), - match e.classify() { - serde_json::error::Category::Io => "IO_ERROR", - serde_json::error::Category::Syntax => "SYNTAX_ERROR", - serde_json::error::Category::Data => "DATA_ERROR", - serde_json::error::Category::Eof => "EOF_ERROR", - } - .to_owned(), - ); metrics - .clone() - .incr_with_tags("notification.auth.bad_vapid.json", Some(tags)); + .incr_with_tags("notification.auth.bad_vapid.json") + .with_tag( + "error", + match e.classify() { + serde_json::error::Category::Io => "IO_ERROR", + serde_json::error::Category::Syntax => "SYNTAX_ERROR", + serde_json::error::Category::Data => "DATA_ERROR", + serde_json::error::Category::Eof => "EOF_ERROR", + }, + ) + .send(); if e.is_data() { debug!("VAPID data warning: {:?}", e); return Err(VapidError::InvalidVapid( @@ -337,7 +340,6 @@ fn validate_vapid_jwt( // Attempt to match up the majority of ErrorKind variants. // The third-party errors all defer to the source, so we can // use that to differentiate for actual errors. - let mut tags = Tags::default(); let label = if e.source().is_none() { // These two have the most cardinality, so we need to handle // them separately. @@ -356,10 +358,10 @@ fn validate_vapid_jwt( // If you need to dig into these, there's always the logs. "Other".to_owned() }; - tags.tags.insert("error".to_owned(), label); metrics - .clone() - .incr_with_tags("notification.auth.bad_vapid.other", Some(tags)); + .incr_with_tags("notification.auth.bad_vapid.other") + .with_tag("error", &label) + .send(); error!("Bad Aud: Unexpected VAPID error: {:?}", &e); return Err(e.into()); } @@ -388,7 +390,7 @@ fn validate_vapid_jwt( return Err(VapidError::FutureExpirationToken.into()); } - Ok(()) + Ok(token_data.claims) } #[cfg(test)] @@ -465,7 +467,7 @@ pub mod tests { VapidClaims::default_exp() - 100, public_key, ); - let result = validate_vapid_jwt(&header, &test_settings, &Metrics::noop()); + let result = validate_vapid_jwt(&header, &test_settings, &Metrics::sink()); assert!(result.is_ok()); } @@ -483,7 +485,7 @@ pub mod tests { PUB_KEY.to_owned(), ); assert!(matches!( - validate_vapid_jwt(&header, &test_settings, &Metrics::noop()) + validate_vapid_jwt(&header, &test_settings, &Metrics::sink()) .unwrap_err() .kind, ApiErrorKind::VapidError(VapidError::InvalidAudience) @@ -503,7 +505,7 @@ pub mod tests { VapidClaims::default_exp() - 100, PUB_KEY.to_owned(), ); - let result = validate_vapid_jwt(&header, &test_settings, &Metrics::noop()); + let result = validate_vapid_jwt(&header, &test_settings, &Metrics::sink()); assert!(result.is_ok()); } @@ -537,7 +539,7 @@ pub mod tests { version_data: VapidVersionData::Version1, }, }; - let vv = validate_vapid_jwt(&header, &test_settings, &Metrics::noop()) + let vv = validate_vapid_jwt(&header, &test_settings, &Metrics::sink()) .unwrap_err() .kind; assert!(matches![ @@ -580,7 +582,7 @@ pub mod tests { version_data: VapidVersionData::Version1, }, }; - assert!(validate_vapid_jwt(&header, &test_settings, &Metrics::noop()).is_ok()); + assert!(validate_vapid_jwt(&header, &test_settings, &Metrics::sink()).is_ok()); // try standard form with no padding let header = VapidHeaderWithKey { public_key: public_key_standard.trim_end_matches('=').to_owned(), @@ -590,7 +592,7 @@ pub mod tests { version_data: VapidVersionData::Version1, }, }; - assert!(validate_vapid_jwt(&header, &test_settings, &Metrics::noop()).is_ok()); + assert!(validate_vapid_jwt(&header, &test_settings, &Metrics::sink()).is_ok()); // try URL safe form with padding let header = VapidHeaderWithKey { public_key: public_key_url_safe.clone(), @@ -600,7 +602,7 @@ pub mod tests { version_data: VapidVersionData::Version1, }, }; - assert!(validate_vapid_jwt(&header, &test_settings, &Metrics::noop()).is_ok()); + assert!(validate_vapid_jwt(&header, &test_settings, &Metrics::sink()).is_ok()); // try URL safe form without padding let header = VapidHeaderWithKey { public_key: public_key_url_safe.trim_end_matches('=').to_owned(), @@ -610,7 +612,7 @@ pub mod tests { version_data: VapidVersionData::Version1, }, }; - assert!(validate_vapid_jwt(&header, &test_settings, &Metrics::noop()).is_ok()); + assert!(validate_vapid_jwt(&header, &test_settings, &Metrics::sink()).is_ok()); } #[test] @@ -643,7 +645,7 @@ pub mod tests { version_data: VapidVersionData::Version1, }, }; - let vv = validate_vapid_jwt(&header, &test_settings, &Metrics::noop()) + let vv = validate_vapid_jwt(&header, &test_settings, &Metrics::sink()) .unwrap_err() .kind; assert!(matches![ diff --git a/autoendpoint/src/headers/vapid.rs b/autoendpoint/src/headers/vapid.rs index 349e9fc0e..c4f707f41 100644 --- a/autoendpoint/src/headers/vapid.rs +++ b/autoendpoint/src/headers/vapid.rs @@ -1,9 +1,9 @@ +use core::str; use std::collections::HashMap; use std::fmt; use base64::Engine; use serde::{Deserialize, Serialize}; -use serde_json::Value; use thiserror::Error; use crate::headers::util::split_key_value; @@ -128,6 +128,8 @@ impl VapidHeader { (data, VapidVersionData::Version1) }; + // Validate the JWT here + Ok(Self { scheme, token, @@ -143,26 +145,12 @@ impl VapidHeader { } } + /// Extract the `sub` from the VAPID claims, if validly specified. pub fn sub(&self) -> Result { - let data: HashMap = serde_json::from_str(&self.token).map_err(|e| { - warn!("🔐 Vapid: {:?}", e); - VapidError::SubInvalid - })?; - - if let Some(sub_candiate) = data.get("sub") { - if let Some(sub) = sub_candiate.as_str() { - if !sub.starts_with("mailto:") || !sub.starts_with("https://") { - info!("🔐 Vapid: Bad Format {:?}", sub); - return Err(VapidError::SubBadFormat); - } - if sub.is_empty() { - info!("🔐 Empty Vapid sub"); - return Err(VapidError::SubEmpty); - } - info!("🔐 Vapid: sub: {:?}", sub); - return Ok(sub.to_owned()); - } + if let Ok(claims) = self.claims() { + return Ok(claims.sub); } + Err(VapidError::SubMissing) } @@ -259,4 +247,13 @@ mod tests { }) ) } + + #[test] + fn extract_sub() { + let header = VapidHeader::parse(VALID_HEADER).unwrap(); + assert_eq!( + header.sub().unwrap(), + "mailto:admin@example.com".to_string() + ); + } } diff --git a/autoendpoint/src/routers/apns/router.rs b/autoendpoint/src/routers/apns/router.rs index 48c497a43..75020fe86 100644 --- a/autoendpoint/src/routers/apns/router.rs +++ b/autoendpoint/src/routers/apns/router.rs @@ -1,4 +1,6 @@ use autopush_common::db::client::DbClient; +#[cfg(feature = "reliable_report")] +use autopush_common::reliability::{PushReliability, PushReliabilityState}; use crate::error::{ApiError, ApiResult}; use crate::extractors::notification::Notification; @@ -34,6 +36,8 @@ pub struct ApnsRouter { endpoint_url: Url, metrics: Arc, db: Box, + #[cfg(feature = "reliable_report")] + reliability: Arc, } struct ApnsClientData { @@ -115,6 +119,7 @@ impl ApnsRouter { endpoint_url: Url, metrics: Arc, db: Box, + #[cfg(feature = "reliable_report")] reliability: Arc, ) -> Result { let channels = settings.channels()?; @@ -130,6 +135,8 @@ impl ApnsRouter { endpoint_url, metrics, db, + #[cfg(feature = "reliable_report")] + reliability, }) } @@ -397,7 +404,7 @@ impl Router for ApnsRouter { Ok(router_data) } - async fn route_notification(&self, notification: &Notification) -> ApiResult { + async fn route_notification(&self, notification: Notification) -> ApiResult { debug!( "Sending APNS notification to UAID {}", notification.subscription.user.uaid @@ -420,7 +427,7 @@ impl Router for ApnsRouter { .and_then(Value::as_str) .ok_or(ApnsError::NoReleaseChannel)?; let aps_json = router_data.get("aps").cloned(); - let mut message_data = build_message_data(notification)?; + let mut message_data = build_message_data(¬ification)?; message_data.insert("ver", notification.message_id.clone()); // Get client and build payload @@ -473,9 +480,24 @@ impl Router for ApnsRouter { .await); } + #[cfg(feature = "reliable_report")] + { + // Record that we've sent the message out to APNS. + // We can't set the state here because the notification isn't + // mutable, but we are also essentially consuming the + // notification nothing else should modify it. + self.reliability + .record( + ¬ification.subscription.reliability_id, + PushReliabilityState::Transmitted, + ¬ification.reliable_state, + notification.expiry, + ) + .await; + } // Sent successfully, update metrics and make response trace!("APNS request was successful"); - incr_success_metrics(&self.metrics, "apns", channel, notification); + incr_success_metrics(&self.metrics, "apns", channel, ¬ification); Ok(RouterResponse::success( self.endpoint_url @@ -501,6 +523,8 @@ mod tests { use async_trait::async_trait; use autopush_common::db::client::DbClient; use autopush_common::db::mock::MockDbClient; + #[cfg(feature = "reliable_report")] + use autopush_common::reliability::PushReliability; use cadence::StatsdClient; use mockall::predicate; use std::collections::HashMap; @@ -562,6 +586,8 @@ mod tests { endpoint_url: Url::parse("http://localhost:8080/").unwrap(), metrics: Arc::new(StatsdClient::from_sink("autopush", cadence::NopMetricSink)), db, + #[cfg(feature = "reliable_report")] + reliability: Arc::new(PushReliability::new(&None, &None).unwrap()), } } @@ -607,7 +633,7 @@ mod tests { let router = make_router(client, db); let notification = make_notification(default_router_data(), None, RouterType::APNS); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_ok(), "result = {result:?}"); assert_eq!( result.unwrap(), @@ -646,7 +672,7 @@ mod tests { let data = "test-data".to_string(); let notification = make_notification(default_router_data(), Some(data), RouterType::APNS); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_ok(), "result = {result:?}"); assert_eq!( result.unwrap(), @@ -668,7 +694,7 @@ mod tests { ); let notification = make_notification(router_data, None, RouterType::APNS); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_err()); assert!( matches!( @@ -701,7 +727,7 @@ mod tests { .return_once(|_| Ok(())); let router = make_router(client, db.into_boxed_arc()); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_err()); assert!( matches!( @@ -729,7 +755,7 @@ mod tests { let router = make_router(client, db); let notification = make_notification(default_router_data(), None, RouterType::APNS); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_err()); assert!( matches!( @@ -762,7 +788,7 @@ mod tests { ); let notification = make_notification(router_data, None, RouterType::APNS); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_err()); assert!( matches!( diff --git a/autoendpoint/src/routers/common.rs b/autoendpoint/src/routers/common.rs index e09ffd373..c9d1c441d 100644 --- a/autoendpoint/src/routers/common.rs +++ b/autoendpoint/src/routers/common.rs @@ -21,6 +21,9 @@ pub fn build_message_data(notification: &Notification) -> ApiResult, /// A map from application ID to an authenticated FCM client clients: HashMap, + #[cfg(feature = "reliable_report")] + reliability: Arc, } impl FcmRouter { @@ -34,6 +38,7 @@ impl FcmRouter { http: reqwest::Client, metrics: Arc, db: Box, + #[cfg(feature = "reliable_report")] reliability: Arc, ) -> Result { let server_credentials = settings.credentials()?; let clients = Self::create_clients(&settings, server_credentials, http.clone()) @@ -45,6 +50,8 @@ impl FcmRouter { metrics, db, clients, + #[cfg(feature = "reliable_report")] + reliability, }) } @@ -137,7 +144,7 @@ impl Router for FcmRouter { Ok(router_data) } - async fn route_notification(&self, notification: &Notification) -> ApiResult { + async fn route_notification(&self, notification: Notification) -> ApiResult { debug!( "Sending FCM notification to UAID {}", notification.subscription.user.uaid @@ -162,7 +169,7 @@ impl Router for FcmRouter { .get(&app_id) .ok_or_else(|| FcmError::InvalidAppId(app_id.clone()))?; - let message_data = build_message_data(notification)?; + let message_data = build_message_data(¬ification)?; let platform = "fcmv1"; trace!("Sending message to {platform}: [{:?}]", &app_id); if let Err(e) = client.send(message_data, routing_token, ttl).await { @@ -177,7 +184,20 @@ impl Router for FcmRouter { ) .await); }; - incr_success_metrics(&self.metrics, platform, &app_id, notification); + incr_success_metrics(&self.metrics, platform, &app_id, ¬ification); + #[cfg(feature = "reliable_report")] + // Record that we've sent the message out to FCM. + // We can't set the state here because the notification isn't + // mutable, but we are also essentially consuming the + // notification nothing else should modify it. + self.reliability + .record( + ¬ification.subscription.reliability_id, + PushReliabilityState::Transmitted, + ¬ification.reliable_state, + notification.expiry, + ) + .await; // Sent successfully, update metrics and make response trace!("Send request was successful"); @@ -207,6 +227,8 @@ mod tests { use crate::routers::{Router, RouterResponse}; use autopush_common::db::client::DbClient; use autopush_common::db::mock::MockDbClient; + #[cfg(feature = "reliable_report")] + use autopush_common::reliability::PushReliability; use std::sync::Arc; use cadence::StatsdClient; @@ -245,6 +267,8 @@ mod tests { reqwest::Client::new(), Arc::new(StatsdClient::from_sink("autopush", cadence::NopMetricSink)), db, + #[cfg(feature = "reliable_report")] + Arc::new(PushReliability::new(&None, &None).unwrap()), ) .await .unwrap() @@ -291,7 +315,7 @@ mod tests { .create(); let notification = make_notification(default_router_data(), None, RouterType::FCM); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_ok(), "result = {result:?}"); assert_eq!( result.unwrap(), @@ -335,7 +359,7 @@ mod tests { let data = "test-data".to_string(); let notification = make_notification(default_router_data(), Some(data), RouterType::FCM); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_ok(), "result = {result:?}"); assert_eq!( result.unwrap(), @@ -366,7 +390,7 @@ mod tests { ); let notification = make_notification(router_data, None, RouterType::FCM); - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_err()); assert!( matches!( @@ -405,7 +429,7 @@ mod tests { .create_async() .await; - let result = router.route_notification(¬ification).await; + let result = router.route_notification(notification).await; assert!(result.is_err()); assert!( matches!( diff --git a/autoendpoint/src/routers/mod.rs b/autoendpoint/src/routers/mod.rs index 766bd8dc9..7b93c939c 100644 --- a/autoendpoint/src/routers/mod.rs +++ b/autoendpoint/src/routers/mod.rs @@ -35,7 +35,7 @@ pub trait Router { ) -> Result, RouterError>; /// Route a notification to the user - async fn route_notification(&self, notification: &Notification) -> ApiResult; + async fn route_notification(&self, notification: Notification) -> ApiResult; } /// The response returned when a router routes a notification diff --git a/autoendpoint/src/routers/stub/router.rs b/autoendpoint/src/routers/stub/router.rs index 94fbf9f2c..fcc6b5392 100644 --- a/autoendpoint/src/routers/stub/router.rs +++ b/autoendpoint/src/routers/stub/router.rs @@ -89,7 +89,7 @@ impl Router for StubRouter { Ok(router_data) } - async fn route_notification(&self, notification: &Notification) -> ApiResult { + async fn route_notification(&self, notification: Notification) -> ApiResult { debug!( "Sending Test notification to UAID {}", notification.subscription.user.uaid diff --git a/autoendpoint/src/routers/webpush.rs b/autoendpoint/src/routers/webpush.rs index 3d2646c46..ebfe028c4 100644 --- a/autoendpoint/src/routers/webpush.rs +++ b/autoendpoint/src/routers/webpush.rs @@ -1,4 +1,6 @@ use async_trait::async_trait; +#[cfg(feature = "reliable_report")] +use autopush_common::reliability::PushReliability; use cadence::{Counted, CountedExt, StatsdClient, Timed}; use reqwest::{Response, StatusCode}; use serde_json::Value; @@ -24,6 +26,8 @@ pub struct WebPushRouter { pub metrics: Arc, pub http: reqwest::Client, pub endpoint_url: Url, + #[cfg(feature = "reliable_report")] + pub reliability: Arc, } #[async_trait(?Send)] @@ -37,12 +41,17 @@ impl Router for WebPushRouter { Ok(HashMap::new()) } - async fn route_notification(&self, notification: &Notification) -> ApiResult { + async fn route_notification( + &self, + mut notification: Notification, + ) -> ApiResult { // The notification contains the original subscription information - let user = ¬ification.subscription.user; + let user = ¬ification.subscription.user.clone(); + // A clone of the notification used only for the responses + // The canonical Notification is consumed by the various functions. debug!( - "✉ Routing WebPush notification to UAID {}", - notification.subscription.user.uaid + "✉ Routing WebPush notification to UAID {} :: {:?}", + notification.subscription.user.uaid, notification.subscription.reliability_id, ); trace!("✉ Notification = {:?}", notification); @@ -53,16 +62,30 @@ impl Router for WebPushRouter { &node_id ); - // Try to send the notification to the node - match self.send_notification(notification, node_id).await { + #[cfg(feature = "reliable_report")] + let (revert_state, mut notification) = { + let revert_state = notification.reliable_state; + // The node will set the next state (if successful) + notification.reliable_state = self + .reliability + .record( + ¬ification.reliability_id, + autopush_common::reliability::PushReliabilityState::IntTransmitted, + ¬ification.reliable_state, + notification.expiry, + ) + .await; + + (revert_state, notification.clone()) + }; + match self.send_notification(¬ification, node_id).await { Ok(response) => { // The node might be busy, make sure it accepted the notification if response.status() == 200 { // The node has received the notification trace!("✉ Node received notification"); - return Ok(self.make_delivered_response(notification)); + return Ok(self.make_delivered_response(¬ification)); } - trace!( "✉ Node did not receive the notification, response = {:?}", response @@ -79,6 +102,26 @@ impl Router for WebPushRouter { self.remove_node_id(user, node_id).await? } } + + #[cfg(feature = "reliable_report")] + // Couldn't send the message! So revert to the prior state if we have one + if let Some(revert_state) = revert_state { + trace!( + "🔎 Revert {:?} from {:?} to {:?}", + ¬ification.reliability_id, + ¬ification.reliable_state, + revert_state + ); + notification.reliable_state = self + .reliability + .record( + ¬ification.reliability_id, + revert_state, + ¬ification.reliable_state, + notification.expiry, + ) + .await; + } } if notification.headers.ttl == 0 { @@ -92,12 +135,24 @@ impl Router for WebPushRouter { // TODO: include `internal` if meta is set. .with_tag("topic", &topic) .send(); - return Ok(self.make_delivered_response(notification)); + #[cfg(feature = "reliable_report")] + { + notification.reliable_state = self + .reliability + .record( + ¬ification.reliability_id, + autopush_common::reliability::PushReliabilityState::Expired, + ¬ification.reliable_state, + notification.expiry, + ) + .await; + } + return Ok(self.make_delivered_response(¬ification)); } // Save notification, node is not present or busy trace!("✉ Node is not present or busy, storing notification"); - self.store_notification(notification).await?; + self.store_notification(&mut notification).await?; // Retrieve the user data again, they may have reconnected or the node // is no longer busy. @@ -113,7 +168,7 @@ impl Router for WebPushRouter { Err(e) => { // Database error, but we already stored the message so it's ok debug!("✉ Database error while re-fetching user: {}", e); - return Ok(self.make_stored_response(notification)); + return Ok(self.make_stored_response(¬ification)); } }; @@ -123,7 +178,7 @@ impl Router for WebPushRouter { // The user is not connected to a node, nothing more to do None => { trace!("✉ User is not connected to a node, returning stored response"); - return Ok(self.make_stored_response(notification)); + return Ok(self.make_stored_response(¬ification)); } }; @@ -144,17 +199,17 @@ impl Router for WebPushRouter { .with_tag("app_id", "direct") .send(); - Ok(self.make_delivered_response(notification)) + Ok(self.make_delivered_response(¬ification)) } else { trace!("✉ Node has not delivered the message, returning stored response"); - Ok(self.make_stored_response(notification)) + Ok(self.make_stored_response(¬ification)) } } Err(error) => { // Can't communicate with the node, attempt to stop using it debug!("✉ Error while triggering notification check: {}", error); self.remove_node_id(&user, node_id).await?; - Ok(self.make_stored_response(notification)) + Ok(self.make_stored_response(¬ification)) } } } @@ -172,16 +227,22 @@ impl WebPushRouter { err } - /// Send the notification to the node + /// Consume and send the notification to the node async fn send_notification( &self, notification: &Notification, node_id: &str, ) -> Result { let url = format!("{}/push/{}", node_id, notification.subscription.user.uaid); - let notification = notification.serialize_for_delivery(); - self.http.put(&url).json(¬ification).send().await + let notification_out = notification.serialize_for_delivery(); + + trace!( + "⏩ out: Notification: {}, channel_id: {}", + ¬ification.subscription.user.uaid, + ¬ification.subscription.channel_id, + ); + self.http.put(&url).json(¬ification_out).send().await } /// Notify the node to check for notifications for the user @@ -196,8 +257,9 @@ impl WebPushRouter { } /// Store a notification in the database - async fn store_notification(&self, notification: &Notification) -> ApiResult<()> { - self.db + async fn store_notification(&self, notification: &mut Notification) -> ApiResult<()> { + let result = self + .db .save_message( ¬ification.subscription.user.uaid, notification.clone().into(), @@ -207,16 +269,29 @@ impl WebPushRouter { self.handle_error( ApiErrorKind::Router(RouterError::SaveDb( e, - // try to extract the `sub` from the VAPID clamis. + // try to extract the `sub` from the VAPID claims. notification .subscription .vapid .as_ref() - .map(|vapid| vapid.vapid.claims().map(|c| c.sub).unwrap_or_default()), + .map(|vapid| vapid.vapid.sub().unwrap_or_default()), )), notification.subscription.vapid.clone(), ) - }) + }); + #[cfg(feature = "reliable_report")] + { + notification.reliable_state = self + .reliability + .record( + ¬ification.subscription.reliability_id, + autopush_common::reliability::PushReliabilityState::Stored, + ¬ification.reliable_state, + notification.expiry, + ) + .await; + } + result } /// Remove the node ID from a user. This is done if the user is no longer @@ -289,6 +364,8 @@ mod test { use crate::extractors::subscription::tests::{make_vapid, PUB_KEY}; use crate::headers::vapid::VapidClaims; use autopush_common::errors::ReportableError; + #[cfg(feature = "reliable_report")] + use autopush_common::reliability::PushReliability; use super::*; use autopush_common::db::mock::MockDbClient; @@ -299,6 +376,8 @@ mod test { metrics: Arc::new(StatsdClient::from_sink("autopush", cadence::NopMetricSink)), http: reqwest::Client::new(), endpoint_url: Url::parse("http://localhost:8080/").unwrap(), + #[cfg(feature = "reliable_report")] + reliability: Arc::new(PushReliability::new(&None, &None).unwrap()), } } diff --git a/autoendpoint/src/routes/mod.rs b/autoendpoint/src/routes/mod.rs index 76e10aeda..0cd8ad202 100644 --- a/autoendpoint/src/routes/mod.rs +++ b/autoendpoint/src/routes/mod.rs @@ -1,3 +1,5 @@ pub mod health; pub mod registration; +#[cfg(feature = "reliable_report")] +pub mod reliability; pub mod webpush; diff --git a/autoendpoint/src/routes/reliability.rs b/autoendpoint/src/routes/reliability.rs new file mode 100644 index 000000000..27bc4824a --- /dev/null +++ b/autoendpoint/src/routes/reliability.rs @@ -0,0 +1,28 @@ +use actix_web::{web::Data, HttpResponse}; +use serde_json::json; + +use crate::server::AppState; + +pub async fn report_handler(app_state: Data) -> HttpResponse { + let reliability = app_state.reliability.clone(); + match reliability.report().await { + Ok(Some(v)) => { + debug!("🔍 Reporting {:?}", &v); + HttpResponse::Ok() + .content_type("application/json") + .body(json!(v).to_string()) + } + Ok(None) => { + debug!("🔍 Reporting, but nothing to report"); + HttpResponse::Ok() + .content_type("application/json") + .body(json!({"error": "No data"}).to_string()) + } + Err(e) => { + debug!("🔍🟥 Reporting, Error {:?}", &e); + HttpResponse::InternalServerError() + .content_type("application/json") + .body(json!({"error": e.to_string()}).to_string()) + } + } +} diff --git a/autoendpoint/src/routes/webpush.rs b/autoendpoint/src/routes/webpush.rs index 44f0a2d8b..161bcd55c 100644 --- a/autoendpoint/src/routes/webpush.rs +++ b/autoendpoint/src/routes/webpush.rs @@ -9,12 +9,12 @@ use actix_web::web::Data; use actix_web::HttpResponse; /// Handle the `POST /wpush/{api_version}/{token}` and `POST /wpush/{token}` routes +/// This is the endpoint for all incoming Push subscription updates. pub async fn webpush_route( notification: Notification, routers: Routers, _app_state: Data, ) -> ApiResult { - // TODO: sentry::configure_scope(|scope| { scope.set_extra( "uaid", @@ -25,7 +25,7 @@ pub async fn webpush_route( RouterType::from_str(¬ification.subscription.user.router_type) .map_err(|_| ApiErrorKind::InvalidRouterType)?, ); - Ok(router.route_notification(¬ification).await?.into()) + Ok(router.route_notification(notification).await?.into()) } /// Handle the `DELETE /m/{message_id}` route diff --git a/autoendpoint/src/server.rs b/autoendpoint/src/server.rs index bebbc9a8e..5321857b5 100644 --- a/autoendpoint/src/server.rs +++ b/autoendpoint/src/server.rs @@ -13,11 +13,14 @@ use serde_json::json; #[cfg(feature = "bigtable")] use autopush_common::db::bigtable::BigTableClientImpl; +#[cfg(feature = "reliable_report")] +use autopush_common::reliability::PushReliability; use autopush_common::{ db::{client::DbClient, spawn_pool_periodic_reporter, DbSettings, StorageType}, middleware::sentry::SentryWrapper, }; +use crate::error::{ApiError, ApiErrorKind, ApiResult}; use crate::metrics; #[cfg(feature = "stub")] use crate::routers::stub::router::StubRouter; @@ -31,10 +34,7 @@ use crate::routes::{ webpush::{delete_notification_route, webpush_route}, }; use crate::settings::Settings; -use crate::{ - error::{ApiError, ApiErrorKind, ApiResult}, - settings::VapidTracker, -}; +use crate::settings::VapidTracker; #[derive(Clone)] pub struct AppState { @@ -48,7 +48,9 @@ pub struct AppState { pub apns_router: Arc, #[cfg(feature = "stub")] pub stub_router: Arc, - pub reliability: Arc, + #[cfg(feature = "reliable_report")] + pub reliability: Arc, + pub reliability_filter: VapidTracker, } pub struct Server; @@ -59,6 +61,7 @@ impl Server { let bind_address = format!("{}:{}", settings.host, settings.port); let fernet = settings.make_fernet(); let endpoint_url = settings.endpoint_url(); + let reliability_filter = VapidTracker(settings.tracking_keys()); let db_settings = DbSettings { dsn: settings.db_dsn.clone(), db_settings: if settings.db_settings.is_empty() { @@ -85,6 +88,12 @@ impl Server { .into()); } }; + #[cfg(feature = "reliable_report")] + let reliability = Arc::new( + PushReliability::new(&settings.reliability_dsn, &Some(db.clone())).map_err(|e| { + ApiErrorKind::General(format!("Could not initialize Reliability Report: {:?}", e)) + })?, + ); let http = reqwest::ClientBuilder::new() .connect_timeout(Duration::from_millis(settings.connection_timeout_millis)) .timeout(Duration::from_millis(settings.request_timeout_millis)) @@ -97,6 +106,8 @@ impl Server { http.clone(), metrics.clone(), db.clone(), + #[cfg(feature = "reliable_report")] + reliability.clone(), ) .await?, ); @@ -106,10 +117,11 @@ impl Server { endpoint_url.clone(), metrics.clone(), db.clone(), + #[cfg(feature = "reliable_report")] + reliability.clone(), ) .await?, ); - let reliability = Arc::new(VapidTracker(settings.tracking_keys())); #[cfg(feature = "stub")] let stub_router = Arc::new(StubRouter::new(settings.stub.clone())?); let app_state = AppState { @@ -122,7 +134,9 @@ impl Server { apns_router, #[cfg(feature = "stub")] stub_router, + #[cfg(feature = "reliable_report")] reliability, + reliability_filter, }; spawn_pool_periodic_reporter( @@ -143,7 +157,7 @@ impl Server { actix_web::http::Method::PUT, ]) .max_age(3600); - App::new() + let app = App::new() // Actix 4 recommends wrapping structures wtih web::Data (internally an Arc) .app_data(Data::new(app_state.clone())) // Extractor configuration @@ -196,7 +210,13 @@ impl Server { // Dockerflow .service(web::resource("/__heartbeat__").route(web::get().to(health_route))) .service(web::resource("/__lbheartbeat__").route(web::get().to(lb_heartbeat_route))) - .service(web::resource("/__version__").route(web::get().to(version_route))) + .service(web::resource("/__version__").route(web::get().to(version_route))); + #[cfg(feature = "reliable_report")] + let app = app.service( + web::resource("/__milestones__") + .route(web::get().to(crate::routes::reliability::report_handler)), + ); + app }) .bind(bind_address)? .run(); diff --git a/autoendpoint/src/settings.rs b/autoendpoint/src/settings.rs index 02dc4ba13..9f681eb87 100644 --- a/autoendpoint/src/settings.rs +++ b/autoendpoint/src/settings.rs @@ -55,6 +55,8 @@ pub struct Settings { pub apns: ApnsSettings, #[cfg(feature = "stub")] pub stub: StubSettings, + #[cfg(feature = "reliable_report")] + pub reliability_dsn: Option, } impl Default for Settings { @@ -86,6 +88,8 @@ impl Default for Settings { apns: ApnsSettings::default(), #[cfg(feature = "stub")] stub: StubSettings::default(), + #[cfg(feature = "reliable_report")] + reliability_dsn: None, } } } @@ -170,7 +174,7 @@ impl Settings { pub fn tracking_keys(&self) -> Vec { let keys = &self.tracking_keys.replace(['"', ' '], ""); Self::read_list_from_str(keys, "Invalid AUTOEND_TRACKING_KEYS") - .map(|v| v.to_owned()) + .map(|v| v.replace('=', "").to_owned()) .collect() } @@ -193,11 +197,16 @@ impl VapidTracker { pub fn is_trackable(&self, vapid: &VapidHeaderWithKey) -> bool { // ideally, [Settings.with_env_and_config_file()] does the work of pre-populating // the Settings.tracking_vapid_pubs cache, but we can't rely on that. - self.0.contains(&vapid.public_key) + trace!("🔍 Looking for {} in {:?}", &vapid.public_key, &self.0); + let result = self.0.contains(&vapid.public_key); + if result { + trace!("🔍 🟢Trackable!!"); + } + result } /// Extract the message Id from the headers (if present), otherwise just make one up. - pub fn get_tracking_id(&self, headers: &HeaderMap) -> String { + pub fn get_id(&self, headers: &HeaderMap) -> String { headers .get("X-MessageId") .and_then(|v| @@ -327,12 +336,12 @@ mod tests { } #[test] - fn test_tracking_id() -> ApiResult<()> { + fn test_reliability_id() -> ApiResult<()> { let mut headers = HeaderMap::new(); let keys = Vec::new(); let reliability = VapidTracker(keys); - let key = reliability.get_tracking_id(&headers); + let key = reliability.get_id(&headers); assert!(!key.is_empty()); headers.insert( @@ -340,7 +349,7 @@ mod tests { HeaderValue::from_static("123foobar456"), ); - let key = reliability.get_tracking_id(&headers); + let key = reliability.get_id(&headers); assert_eq!(key, "123foobar456".to_owned()); Ok(()) diff --git a/autopush-common/Cargo.toml b/autopush-common/Cargo.toml index 053681e2d..339f067da 100644 --- a/autopush-common/Cargo.toml +++ b/autopush-common/Cargo.toml @@ -60,6 +60,8 @@ grpcio-sys = { version = "=0.13.0", optional = true } protobuf = { version = "=2.28.0", optional = true } # grpcio does not support protobuf 3+ form_urlencoded = { version = "1.2", optional = true } +redis = { version = "0.27", optional = true } + [dev-dependencies] mockito = "0.31" tempfile = "3.2.0" @@ -80,3 +82,4 @@ bigtable = [ emulator = [ "bigtable", ] # used for testing big table, requires an external bigtable emulator running. +reliable_report = ["dep:redis"] diff --git a/autopush-common/src/db/bigtable/bigtable_client/mod.rs b/autopush-common/src/db/bigtable/bigtable_client/mod.rs index 2e84aaffa..9a9f5c5ec 100644 --- a/autopush-common/src/db/bigtable/bigtable_client/mod.rs +++ b/autopush-common/src/db/bigtable/bigtable_client/mod.rs @@ -51,6 +51,10 @@ pub type FamilyId = String; const ROUTER_FAMILY: &str = "router"; const MESSAGE_FAMILY: &str = "message"; // The default family for messages const MESSAGE_TOPIC_FAMILY: &str = "message_topic"; +#[cfg(feature = "reliable_report")] +const RELIABLE_LOG_FAMILY: &str = "reliability"; +#[cfg(feature = "reliable_report")] +const RELIABLE_LOG_TTL: u64 = crate::db::MAX_NOTIFICATION_TTL * 2; pub(crate) const RETRY_COUNT: usize = 5; @@ -720,6 +724,7 @@ impl BigTableClientImpl { ) })?; + // Create from the known, required fields. let mut notif = Notification { channel_id: range_key.channel_id, topic: range_key.topic, @@ -730,9 +735,30 @@ impl BigTableClientImpl { ..Default::default() }; + // Backfill the Optional fields if let Some(cell) = row.take_cell("data") { notif.data = Some(to_string(cell.value, "data")?); } + #[cfg(feature = "reliable_report")] + { + if let Some(cell) = row.take_cell("reliability_id") { + notif.reliability_id = Some(to_string(cell.value, "reliability_id")?); + } + if let Some(cell) = row.take_cell("reliable_state") { + notif.reliable_state = Some( + crate::reliability::PushReliabilityState::from_str(&to_string( + cell.value, + "reliable_state", + )?) + .map_err(|e| { + DbError::DeserializeString(format!( + "Could not parse reliable_state {:?}", + e + )) + })?, + ); + } + } if let Some(cell) = row.take_cell("headers") { notif.headers = Some( serde_json::from_str::>(&to_string(cell.value, "headers")?) @@ -1174,6 +1200,26 @@ impl DbClient for BigTableClientImpl { }); } } + #[cfg(feature = "reliable_report")] + { + if let Some(reliability_id) = message.reliability_id { + trace!("🔍 FOUND RELIABILITY ID: {}", reliability_id); + cells.push(cell::Cell { + qualifier: "reliability_id".to_owned(), + value: reliability_id.into_bytes(), + timestamp: expiry, + ..Default::default() + }); + } + if let Some(reliable_state) = message.reliable_state { + cells.push(cell::Cell { + qualifier: "reliable_state".to_owned(), + value: reliable_state.to_string().into_bytes(), + timestamp: expiry, + ..Default::default() + }); + } + } if let Some(data) = message.data { cells.push(cell::Cell { qualifier: "data".to_owned(), @@ -1301,6 +1347,7 @@ impl DbClient for BigTableClientImpl { ); let messages = self.rows_to_notifications(rows)?; + // Note: Bigtable always returns a timestamp of None. // Under Bigtable `current_timestamp` is instead initially read // from [get_user]. @@ -1399,6 +1446,32 @@ impl DbClient for BigTableClientImpl { Ok(true) } + #[cfg(feature = "reliable_report")] + async fn log_report( + &self, + reliability_id: &str, + new_state: crate::reliability::PushReliabilityState, + ) -> DbResult<()> { + let row_key = reliability_id.to_owned(); + + let mut row = Row::new(row_key); + let expiry = SystemTime::now() + Duration::from_secs(RELIABLE_LOG_TTL); + + // Log the latest transition time for this id. + let cells: Vec = vec![cell::Cell { + qualifier: new_state.to_string(), + value: crate::util::ms_since_epoch().to_be_bytes().to_vec(), + timestamp: expiry, + ..Default::default() + }]; + + row.add_cells(RELIABLE_LOG_FAMILY, cells); + + self.write_row(row).await?; + + Ok(()) + } + fn box_clone(&self) -> Box { Box::new(self.clone()) } diff --git a/autopush-common/src/db/client.rs b/autopush-common/src/db/client.rs index f447b26ea..9ef91ef2b 100644 --- a/autopush-common/src/db/client.rs +++ b/autopush-common/src/db/client.rs @@ -107,6 +107,14 @@ pub trait DbClient: Send + Sync { None } + /// Record the Reliability Report to long term storage. + #[cfg(feature = "reliable_report")] + async fn log_report( + &self, + reliability_id: &str, + state: crate::reliability::PushReliabilityState, + ) -> DbResult<()>; + fn box_clone(&self) -> Box; } diff --git a/autopush-common/src/db/mock.rs b/autopush-common/src/db/mock.rs index a3d5db7b6..882a1bde0 100644 --- a/autopush-common/src/db/mock.rs +++ b/autopush-common/src/db/mock.rs @@ -95,6 +95,15 @@ impl DbClient for Arc { Arc::as_ref(self).remove_message(uaid, sort_key).await } + #[cfg(feature = "reliable_report")] + async fn log_report( + &self, + reliability_id: &str, + state: crate::reliability::PushReliabilityState, + ) -> DbResult<()> { + Arc::as_ref(self).log_report(reliability_id, state).await + } + async fn router_table_exists(&self) -> DbResult { Arc::as_ref(self).router_table_exists().await } diff --git a/autopush-common/src/db/mod.rs b/autopush-common/src/db/mod.rs index 3b1d2f11b..e265d68ea 100644 --- a/autopush-common/src/db/mod.rs +++ b/autopush-common/src/db/mod.rs @@ -245,6 +245,10 @@ pub struct NotificationRecord { /// value before sending it to storage or a connection node. #[serde(skip_serializing_if = "Option::is_none")] updateid: Option, + /// Internal Push Reliability tracking id. (Applied only to subscription updates generated + /// by Mozilla owned and consumed messages, like SendTab updates.) + #[serde(skip_serializing_if = "Option::is_none")] + reliability_id: Option, } impl NotificationRecord { @@ -333,6 +337,9 @@ impl NotificationRecord { data: self.data, headers: self.headers.map(|m| m.into()), sortkey_timestamp: key.sortkey_timestamp, + reliability_id: None, + #[cfg(feature = "reliable_report")] + reliable_state: None, }) } diff --git a/autopush-common/src/lib.rs b/autopush-common/src/lib.rs index 8656e814c..80918bca9 100644 --- a/autopush-common/src/lib.rs +++ b/autopush-common/src/lib.rs @@ -13,6 +13,8 @@ pub mod logging; pub mod metrics; pub mod middleware; pub mod notification; +#[cfg(feature = "reliable_report")] +pub mod reliability; pub mod sentry; pub mod tags; pub mod test_support; diff --git a/autopush-common/src/notification.rs b/autopush-common/src/notification.rs index 33ef0ee32..29b89a057 100644 --- a/autopush-common/src/notification.rs +++ b/autopush-common/src/notification.rs @@ -27,6 +27,10 @@ pub struct Notification { pub sortkey_timestamp: Option, #[serde(skip_serializing_if = "Option::is_none")] pub headers: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub reliability_id: Option, + #[cfg(feature = "reliable_report")] + pub reliable_state: Option, } pub const TOPIC_NOTIFICATION_PREFIX: &str = "01"; diff --git a/autopush-common/src/reliability.rs b/autopush-common/src/reliability.rs new file mode 100644 index 000000000..986531bec --- /dev/null +++ b/autopush-common/src/reliability.rs @@ -0,0 +1,180 @@ +/// Push Reliability Recorder +/// +/// This allows us to track messages from select, known parties (currently, just +/// mozilla generated and consumed) so that we can identify potential trouble spots +/// and where messages expire early. Message expiration can lead to message loss +use std::collections::HashMap; +use std::sync::Arc; + +use redis::Commands; + +use crate::db::client::DbClient; +use crate::errors::{ApcError, ApcErrorKind, Result}; + +pub const COUNTS: &str = "state_counts"; +pub const EXPIRY: &str = "expiry"; + +/// The various states that a message may transit on the way from reception to delivery. +#[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Deserialize)] +pub enum PushReliabilityState { + #[serde(rename = "received")] + Received, // Subscription was received by the Push Server + #[serde(rename = "stored")] + Stored, // Subscription was stored because it could not be delivered immediately + #[serde(rename = "retreived")] + Retreived, // Subscription was taken from storage for delivery + #[serde(rename = "transmitted_webpush")] + IntTransmitted, // Subscription was handed off between autoendpoint and autoconnect + #[serde(rename = "accepted_webpush")] + IntAccepted, // Subscription was accepted by autoconnect from autopendpoint + #[serde(rename = "transmitted")] + Transmitted, // Subscription was handed off for delivery to the UA + #[serde(rename = "accepted")] + Accepted, // Subscription was accepted for delivery by the UA + #[serde(rename = "delivered")] + Delivered, // Subscription was provided to the WebApp recipient by the UA + #[serde(rename = "expired")] + Expired, // Subscription expired naturally (e.g. TTL=0) +} + +// TODO: Differentiate between "transmitted via webpush" and "transmitted via bridge"? +impl std::fmt::Display for PushReliabilityState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(match self { + Self::Received => "received", + Self::Stored => "stored", + Self::Retreived => "retrieved", + Self::Transmitted => "transmitted", + Self::IntTransmitted => "transmitted_webpush", + Self::IntAccepted => "accepted_webpush", + Self::Accepted => "accepted", + Self::Delivered => "delivered", + Self::Expired => "expired", + }) + } +} + +impl std::str::FromStr for PushReliabilityState { + type Err = ApcError; + + fn from_str(s: &str) -> std::result::Result { + Ok(match s.to_lowercase().as_str() { + "received" => Self::Received, + "stored" => Self::Stored, + "retrieved" => Self::Retreived, + "transmitted" => Self::Transmitted, + "accepted" => Self::Accepted, + "transmitted_webpush" => Self::IntTransmitted, + "accepted_webpush" => Self::IntAccepted, + "delivered" => Self::Delivered, + "expired" => Self::Expired, + _ => { + return Err( + ApcErrorKind::GeneralError(format!("Unknown tracker state \"{}\"", s)).into(), + ); + } + }) + } +} + +impl serde::Serialize for PushReliabilityState { + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +#[derive(Default, Clone)] +pub struct PushReliability { + client: Option>, + db: Option>, +} + +impl PushReliability { + // Do the magic to make a report instance, whatever that will be. + pub fn new(reliability_dsn: &Option, db: &Option>) -> Result { + if reliability_dsn.is_none() { + debug!("🔍 No reliability DSN declared."); + return Ok(Self::default()); + }; + + let client = if let Some(dsn) = reliability_dsn { + let rclient = redis::Client::open(dsn.clone()).map_err(|e| { + ApcErrorKind::GeneralError(format!("Could not connect to redis server: {:?}", e)) + })?; + Some(Arc::new(rclient)) + } else { + None + }; + + Ok(Self { + client, + db: db.clone(), + }) + } + + // Record the record state change to storage. + pub async fn record( + &self, + reliability_id: &Option, + new: PushReliabilityState, + old: &Option, + expr: Option, + ) -> Option { + if reliability_id.is_none() { + return None; + } + let id = reliability_id.clone().unwrap(); + if let Some(client) = &self.client { + debug!( + "🔍 {} from {} to {}", + id, + old.map(|v| v.to_string()) + .unwrap_or_else(|| "None".to_owned()), + new + ); + if let Ok(mut con) = client.get_connection() { + let mut pipeline = redis::Pipeline::new(); + let pipeline = pipeline.hincr(COUNTS, new.to_string(), 1); + let pipeline = if let Some(old) = old { + pipeline + .hincr(COUNTS, old.to_string(), -1) + .zrem(EXPIRY, format!("{}#{}", &old, id)) + } else { + pipeline + }; + // Errors are not fatal, and should not impact message flow, but + // we should record them somewhere. + let _ = pipeline + .zadd(EXPIRY, format!("{}#{}", new, id), expr.unwrap_or_default()) + .exec(&mut con) + .inspect_err(|e| { + warn!("🔍 Failed to write to storage: {:?}", e); + }); + } + }; + if let Some(db) = &self.db { + // Errors are not fatal, and should not impact message flow, but + // we should record them somewhere. + let _ = db.log_report(&id, new).await.inspect_err(|e| { + warn!("🔍 Unable to record reliability state: {:?}", e); + }); + } + Some(new) + } + + // Return a snapshot of milestone states + // This will probably not be called directly, but useful for debugging. + pub async fn report(&self) -> Result>> { + if let Some(client) = &self.client { + if let Ok(mut conn) = client.get_connection() { + return Ok(Some(conn.hgetall(COUNTS).map_err(|e| { + ApcErrorKind::GeneralError(format!("Could not read report {:?}", e)) + })?)); + } + } + Ok(None) + } +} diff --git a/scripts/reliablity_cron.py b/scripts/reliablity_cron.py new file mode 100644 index 000000000..061b21ad3 --- /dev/null +++ b/scripts/reliablity_cron.py @@ -0,0 +1,212 @@ +#! python3 + +""" +This program reaps expired records and adjusts counts. + +Currently, this is desined to run on a cron, however it +can be adapted to include it's own timing loop. +""" + +import argparse +import asyncio +import json +import logging +import os +import time +import pdb + +from typing import cast + +import redis +import toml + +from google.cloud.bigtable.data import ( + BigtableDataClientAsync, + RowMutationEntry, + SetCell, +) + + +class Counter: + """Manage Redis-like storage counts + + Current milestone counts are managed in a Redis-like storage system. + There are two parts required, one is the active milestone count (as + an HINCR). The other is a ZHash that contains the expiration + timestamp for records. + Our 'garbage collection' goes through the ZHash looking for expired + records and removes them while decrementing the associated HINCR count, + indicating that the record expired "in place". + + We also update the Bigtable message log indicating that a message + failed to be delivered. + """ + + def __init__(self, log: logging.Logger, settings): + try: + import pdb + + pdb.set_trace() + self.redis = redis.Redis.from_url(settings.reliability_dsn) + self.bigtable = BigtableDataClientAsync( + project=settings.bigtable["project"] + ) + self.log = log + self.settings = settings + except Exception as e: + log.error(e) + + async def gc(self) -> dict[str, int | float]: + """Prune expired elements, decrementing counters and logging result""" + start = time.time() + # The table of counts + counts = self.settings.count_table + # The table of expirations + expiry = self.settings.expiry_table + # the BigTable reliability family + log_family = self.settings.log_family + + # Fetch the candidates to purge. + pdb.set_trace() + mutations = list() + purged = cast( + list[bytes], self.redis.zrange(expiry, -1, int(start), byscore=True) + ) + # Fix up the counts + with self.redis.pipeline() as pipeline: + for key in purged: + # clean up the counts. + parts = key.split(b"#", 2) + state = parts[0] + self.log.debug(f"🪦 decr {state.decode()}") + pipeline.hincrby(counts, state.decode(), -1) + pipeline.zrem(expiry, key) + # and add the log info. + mutations.append( + RowMutationEntry( + key, SetCell(log_family, "expired", int(start * 1000)) + ) + ) + mutations.append( + RowMutationEntry( + key, + SetCell( + log_family, + "error", + "expired", + ), + ) + ) + if len(purged) > 0: + # make the changes to redis, + pipeline.execute() + # then add the bigtable logs + table = self.bigtable.get_table( + self.settings.bigtable.get("instance"), + self.settings.bigtable.get("table"), + ) + await table.bulk_mutate_rows(mutations) + + result = { + "trimmed": len(purged), + "time": int(start * 1000) - (time.time() * 1000), + } + if len(purged): + self.log.info( + f"🪦 Trimmed {result.get("trimmed")} in {result.get("time")}ms" + ) + return result + + +def config(env_args: os._Environ = os.environ) -> argparse.Namespace: + """Read the configuration from the args and environment.""" + parser = argparse.ArgumentParser( + description="Manage Autopush Reliability Tracking Redis data." + ) + parser.add_argument("-c", "--config", help="configuration_file", action="append") + parser.add_argument( + "--reliability_dsn", + "-r", + help="DSN to connect to the Redis like service.", + default=env_args.get( + "AUTOCONNECT_RELIABILITY_DSN", env_args.get("AUTOEND_RELIABILITY_DSN") + ), + ) + parser.add_argument( + "--db_dsn", + "-b", + help="User Agent ID", + default=env_args.get("AUTOCONNECT_DB_DSN", env_args.get("AUTOEND_DB_DSN")), + ) + parser.add_argument( + "--db_settings", + "-s", + help="User Agent ID", + default=env_args.get( + "AUTOCONNECT_DB_SETTINGS", env_args.get("AUTOEND_DB_SETTINGS") + ), + ) + parser.add_argument( + "--count_table", + help="Name of Redis table of milestone counts", + default=env_args.get("AUTOTRACK_COUNTS", "state_counts"), + ) + parser.add_argument( + "--expiry_table", + help="Name of Redis table of milestone expirations", + default=env_args.get("AUTOTRACK_EXPIRY", "expiry"), + ) + parser.add_argument( + "--log_family", + help="Name of Bigtable log family", + default=env_args.get("AUTOTRACK_EXPIRY", "reliability"), + ) + args = parser.parse_args() + + # if we have a config file, read from that and then reload. + if args.config is not None: + for filename in args.config: + with open(filename, "r") as f: + args = parser.set_defaults(**toml.load(f)) + args = parser.parse_args() + + # fixup the bigtable settings so that they're easier for this script to deal with. + if args.db_settings is not None: + bt_settings = json.loads(args.db_settings) + parts = bt_settings.get("table_name").split("/") + for i in range(0, len(parts), 2): + # remember: the `tablename` dsn uses plurals for + # `projects`, `instances`, & `tables` + bt_settings[parts[i].rstrip("s")] = parts[i + 1] + args.bigtable = bt_settings + + return args + + +def init_logs(): + """Initialize logging (based on `PYTHON_LOG` environ)""" + level = getattr(logging, os.environ.get("PYTHON_LOG", "INFO").upper(), None) + logging.basicConfig(level=level) + log = logging.getLogger("autotrack") + return log + + +async def amain(log, settings): + """Async main loop""" + counter = Counter(log, settings) + _result = await counter.gc() + # TODO: adjust timing loop based on result time. + # Ideally, this would have a loop that it runs on that becomes tighter the more items + # were purged, and adjusts based on the time it took to run. + return + + +def main(): + """Configure and start async main loop""" + log = init_logs() + log.info("Starting up...") + asyncio.run(amain(log, config())) + + +if __name__ == "__main__": + main() diff --git a/tests/integration/async_push_test_client.py b/tests/integration/async_push_test_client.py index 6b3a41437..714446ec1 100644 --- a/tests/integration/async_push_test_client.py +++ b/tests/integration/async_push_test_client.py @@ -28,6 +28,7 @@ class ClientMessageType(Enum): ACK = "ack" NACK = "nack" PING = "ping" + NOTIFICATION = "notification" class AsyncPushTestClient: diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index a9fa0a46d..876ba2056 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -16,7 +16,7 @@ import uuid from queue import Empty, Queue from threading import Event, Thread -from typing import Any, AsyncGenerator, Generator +from typing import Any, AsyncGenerator, Generator, cast from urllib.parse import urlparse import ecdsa @@ -25,6 +25,7 @@ import pytest import uvicorn import websockets + from cryptography.fernet import Fernet from fastapi import FastAPI, Request from jose import jws @@ -48,6 +49,7 @@ MSG_LIMIT = 20 CRYPTO_KEY = os.environ.get("CRYPTO_KEY") or Fernet.generate_key().decode("utf-8") +TRACKING_KEY = ecdsa.SigningKey.generate(curve=ecdsa.NIST256p) CONNECTION_PORT = 9150 ENDPOINT_PORT = 9160 ROUTER_PORT = 9170 @@ -152,8 +154,16 @@ def base64url_encode(value: bytes | str) -> str: # new autoconnect db_dsn=os.environ.get("DB_DSN", "grpc://localhost:8086"), db_settings=get_db_settings(), + tracking_keys="[{}]".format( + base64.urlsafe_b64encode( + cast(ecdsa.VerifyingKey, TRACKING_KEY.get_verifying_key()).to_string() + ).decode() + ), ) +if os.environ.get("RELIABLE_REPORT") is not None: + CONNECTION_CONFIG["reliability_dsn"] = "redis://localhost:6379" + """Connection Megaphone Config: For local test debugging, set `AUTOPUSH_MP_CONFIG=_url_` to override creation of the local server. @@ -184,8 +194,19 @@ def base64url_encode(value: bytes | str) -> str: message_table_name=MESSAGE_TABLE, human_logs="true", crypto_keys="[{}]".format(CRYPTO_KEY), + # convert to x692 format + tracking_keys=f"[{ + base64.urlsafe_b64encode(( + b'\4' + cast( + ecdsa.VerifyingKey, + TRACKING_KEY.get_verifying_key() + ).to_string())).decode()}]", ) +if os.environ.get("RELIABLE_REPORT") is not None: + CONNECTION_CONFIG["reliability_dsn"] = "redis://localhost:6379" + ENDPOINT_CONFIG["reliability_dsn"] = "redis://localhost:6379" + def _get_vapid( key: ecdsa.SigningKey | None = None, @@ -213,7 +234,7 @@ def _get_vapid( payload["aud"] = endpoint if not key: key = ecdsa.SigningKey.generate(curve=ecdsa.NIST256p) - vk: ecdsa.VerifyingKey = key.get_verifying_key() + vk: ecdsa.VerifyingKey = cast(ecdsa.VerifyingKey, key.get_verifying_key()) auth: str = jws.sign(payload, key, algorithm="ES256").strip("=") crypto_key: str = base64url_encode((b"\4" + vk.to_string())) return {"auth": auth, "crypto-key": crypto_key, "key": key} @@ -734,7 +755,7 @@ async def test_basic_delivery(registered_test_client: AsyncPushTestClient) -> No clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(bytes(uuid_data, "utf-8")) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value async def test_topic_basic_delivery(registered_test_client: AsyncPushTestClient) -> None: @@ -745,7 +766,7 @@ async def test_topic_basic_delivery(registered_test_client: AsyncPushTestClient) clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value async def test_topic_replacement_delivery( @@ -765,7 +786,7 @@ async def test_topic_replacement_delivery( clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data_2) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value result = await registered_test_client.get_notification() assert result is None @@ -783,7 +804,7 @@ async def test_topic_no_delivery_on_reconnect(registered_test_client: AsyncPushT clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value await registered_test_client.ack(result["channelID"], result["version"]) await registered_test_client.disconnect() await registered_test_client.connect() @@ -801,13 +822,45 @@ async def test_basic_delivery_with_vapid( ) -> None: """Test delivery of a basic push message with a VAPID header.""" uuid_data: str = str(uuid.uuid4()) + # Since we are not explicity setting the TRACKING_KEY, we should not + # track this message. vapid_info = _get_vapid(payload=vapid_payload) result = await registered_test_client.send_notification(data=uuid_data, vapid=vapid_info) # the following presumes that only `salt` is padded. clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value + if os.environ.get("RELIABLE_REPORT") is not None: + assert result.get("reliability_id") is None, "Tracking unknown message" + + +async def test_basic_delivery_with_tracked_vapid( + registered_test_client: AsyncPushTestClient, + vapid_payload: dict[str, int | str], +) -> None: + """Test delivery of a basic push message with a VAPID header.""" + if os.environ.get("RELIABLE_REPORT") is None: + pytest.skip("RELIABLE_REPORT not set, skipping test.") + # TODO: connect to test redis server and redis.flushall() + uuid_data: str = str(uuid.uuid4()) + vapid_info = _get_vapid(key=TRACKING_KEY, payload=vapid_payload) + result = await registered_test_client.send_notification(data=uuid_data, vapid=vapid_info) + # the following presumes that only `salt` is padded. + clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") + assert result["headers"]["encryption"] == clean_header + assert result["data"] == base64url_encode(uuid_data) + assert result["messageType"] == ClientMessageType.NOTIFICATION.value + assert result.get("reliability_id") is not None, "missing reliability_id" + if os.environ.get("RELIABLE_REPORT") is not None: + endpoint = registered_test_client.get_host_client_endpoint() + async with httpx.AsyncClient() as httpx_client: + resp = await httpx_client.get(f"{endpoint}/__milestones__", timeout=5) + log.debug(f"🔍 Milestones: {resp.text}") + jresp = json.loads(resp.text) + assert jresp["accepted"] == 1 + for other in ["accepted_webpush", "received", "transmitted_webpush", "transmitted"]: + assert jresp[other] == 0, f"reliablity state '{other}' was not 0" async def test_basic_delivery_with_invalid_vapid( @@ -1004,7 +1057,7 @@ async def test_multiple_delivery_with_single_ack( result = await registered_test_client.get_notification(timeout=0.5) assert result != {} assert result["data"] == base64url_encode(uuid_data_1) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value result2 = await registered_test_client.get_notification() assert result2 != {} assert result2["data"] == base64url_encode(uuid_data_2) @@ -1086,7 +1139,7 @@ async def test_ttl_0_connected(registered_test_client: AsyncPushTestClient) -> N clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value async def test_ttl_0_not_connected(registered_test_client: AsyncPushTestClient) -> None: @@ -1141,7 +1194,7 @@ async def test_ttl_batch_expired_and_good_one(registered_test_client: AsyncPushT clean_header = registered_test_client._crypto_key.replace('"', "").rstrip("=") assert result["headers"]["encryption"] == clean_header assert result["data"] == base64url_encode(uuid_data_2) - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value result = await registered_test_client.get_notification(timeout=0.5) assert result is None @@ -1202,7 +1255,7 @@ async def test_empty_message_without_crypto_headers( """Test that a message without crypto headers, and does not have data, is accepted.""" result = await registered_test_client.send_notification(use_header=False) assert result is not None - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value assert "headers" not in result assert "data" not in result await registered_test_client.ack(result["channelID"], result["version"]) @@ -1226,14 +1279,14 @@ async def test_empty_message_with_crypto_headers( """ result = await registered_test_client.send_notification() assert result is not None - assert result["messageType"] == "notification" + assert result["messageType"] == ClientMessageType.NOTIFICATION.value assert "headers" not in result assert "data" not in result result2 = await registered_test_client.send_notification() # We shouldn't store headers for blank messages. assert result2 is not None - assert result2["messageType"] == "notification" + assert result2["messageType"] == ClientMessageType.NOTIFICATION.value assert "headers" not in result2 assert "data" not in result2 @@ -1259,8 +1312,6 @@ async def test_big_message(registered_test_client: AsyncPushTestClient) -> None: block that was 5624 bytes long. We'll skip the binary bit for a 4216 block of "text" we then b64 encode to send. """ - import base64 - bulk = "".join( random.choice(string.ascii_letters + string.digits + string.punctuation) for _ in range(0, 4216) diff --git a/tests/load/locustfiles/stored.py b/tests/load/locustfiles/stored.py index 7febf8af3..f2f12b400 100644 --- a/tests/load/locustfiles/stored.py +++ b/tests/load/locustfiles/stored.py @@ -230,7 +230,7 @@ def recv_message(self) -> None: assert self.ws data = self.ws.recv() if not isinstance(data, str): - logger.error("recv_message unexpectedly recieved bytes") + logger.error("recv_message unexpectedly received bytes") data = str(data) self.on_ws_message(self.ws, data) From 3a0beef0a174eee6753fc7a65674572b3bf81838 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Thu, 17 Oct 2024 10:43:52 -0700 Subject: [PATCH 10/25] f isort --- tests/integration/test_integration_all_rust.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index 876ba2056..6464af4ba 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -25,7 +25,6 @@ import pytest import uvicorn import websockets - from cryptography.fernet import Fernet from fastapi import FastAPI, Request from jose import jws From efb1cf523bdebc8705cbfade7c9fbf809a2b5d6a Mon Sep 17 00:00:00 2001 From: jrconlin Date: Thu, 17 Oct 2024 14:18:05 -0700 Subject: [PATCH 11/25] f add documentation * alter `setup_bt` to include reliability family * alter config.yml for eventual integration test changes --- .circleci/config.yml | 4 ++++ docs/src/reliability.md | 29 +++++++++++++++++++++++++++++ scripts/setup_bt.sh | 4 ++++ 3 files changed, 37 insertions(+) create mode 100644 docs/src/reliability.md diff --git a/.circleci/config.yml b/.circleci/config.yml index 7cd0f4b44..0c7b03389 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -165,6 +165,10 @@ jobs: username: $DOCKER_USER password: $DOCKER_PASS command: gcloud beta emulators bigtable start --host-port=localhost:8086 + # - image: redis/redis-stack-server + # auth: + # username: $DOCKER_USER + # password: $DOCKER_PASS resource_class: large environment: BIGTABLE_EMULATOR_HOST: localhost:8086 diff --git a/docs/src/reliability.md b/docs/src/reliability.md new file mode 100644 index 000000000..6bab56650 --- /dev/null +++ b/docs/src/reliability.md @@ -0,0 +1,29 @@ +# Push Reliability Tracing + +AutoPush does a lot to ensure that messages and subscriptions stay private. There are many reasons for this, aside from simply respecting user privacy, which include the high cost of data capture and retention. Tracking this information can easily increase costs by thousands of dollars a month. + +Traditionally, AutoPush has relied on "external" tracking to determine how reliably it can deliver a message. "External" in this case, means a Mozilla internal group that tracks how "successful" a Send Tab to Device is. There are some complications with this, as that group has a different qualification for "success" than AutoPush does (delivery within a very short timeframe). Autopush should be able to determine if there are any points of loss internally without incurring the sizable costs for full tracking. + +This can be done via sampling, but it's important to only sample push messages that have agreed to this. Fortunately, we can use those same Mozilla generated and consumed push notification messages for "Send Tab". (The action of sending a tab is done by the user, but the use of the AutoPush system is an internal detail. It would be like tracking a dollar bill through a vending machine. We don't know or care who put the bill into the machine, nor do we care what they purchased. We are only watching to make sure the bill doesn't get stuck or fall out somewhere.) + +## Configuration + +In order to start the process, the AutoEndpoint configuration file gets a list of VAPID public keys to look for. The key format is not a standard PEM format, so the `convert_pem_to_x962.py` file is used to convert the public key format to something that's more scan-able. + +That key is registered using the `autoendpoint.tracking_keys` configuration setting. + +Push Reliability requires a Redis like memory storage system to manage the various milestone transactions. Milestones are tracked using two internal stores, the first being a Hash Incrementor (HINCR) "state_counts" table, which records the count of message at a given state. and an "expiry" table to record the expiration timestamp for the subscription. + +Push Reliablity also includes a bigtable `reliability` column family, which is used to create a long term record which can be used for more "in depth" analysis of a given message's path. + +## Operation + +If an incoming subscription is validated and contains a matching VAPID public key, then a `reliability_id` is assigned to the message. This ID is a random UUID that is attached to the message. All tracking is tied to this ID alone, and only performed if the `reliable_report` feature is enabled in the code. + +Various milestones have been set along a given messages path. These are defined in `autopush_common::reliability::PushReliabilityState`. When a message transitions from one state to the next, the new `state_count` is incremented, and the old `state_count` is decremented. In addition, the "expiry" table which is a scored hash table records the message expiration. The "expiry" table uses a combo of the "`state`#`reliability_id`" as the key. with the expiration timestamp as the value. This table is scanned by the separate `scripts/reliability_cron.py` script, which looks for expired keys, decrements their counts, and logs the expired status to bigtable. This script is required because there are no known data stores that provide this function automatically (recommendations are very welcome). The use of an outside script is to ensure that only a single application decrements values and logs data, preventing possible race conditions. + +"Live" counts can be displayed using any tool that can read the Redis data store, or the AutoEndpoint `/__milestones__` endpoint can be queried. Historical tracking is not part of this, and should be done using external tooling and the `reliability_cron.py` script. + +## Data Retention + +Push Subscpritions have a maximum lifetime of 30 days. The Redis Reliability tracking information will last as long as the message TTL. The Bigtable Reliability Log information will be retained for twice the maximum subscription lifetime, of 60 days to allow for some longer term trend analysis. (Note, this reliability information does not have any record of the user, subscription provider, or message content. It only includes the milestone, and timestamp when the message crossed that milestone.) diff --git a/scripts/setup_bt.sh b/scripts/setup_bt.sh index 938bb53c6..a7b375fc2 100755 --- a/scripts/setup_bt.sh +++ b/scripts/setup_bt.sh @@ -12,11 +12,15 @@ TABLE_NAME=${TABLE_NAME:-"autopush"} MESSAGE_FAMILY=${MESSAGE_FAMILY:-"message"} MESSAGE_TOPIC_FAMILY=${MESSAGE_TOPIC_FAMILY:-"message_topic"} ROUTER_FAMILY=${ROUTER_FAMILY:-"router"} +RELIABILITY_FAMILY=${RELIABILITY_FAMILY:-"reliability"} cbt -project $PROJECT -instance $INSTANCE createtable $TABLE_NAME cbt -project $PROJECT -instance $INSTANCE createfamily $TABLE_NAME $MESSAGE_FAMILY cbt -project $PROJECT -instance $INSTANCE createfamily $TABLE_NAME $MESSAGE_TOPIC_FAMILY cbt -project $PROJECT -instance $INSTANCE createfamily $TABLE_NAME $ROUTER_FAMILY +cbt -project $PROJECT -instance $INSTANCE createfamily $TABLE_NAME $RELIABILITY_FAMILY + cbt -project $PROJECT -instance $INSTANCE setgcpolicy $TABLE_NAME $MESSAGE_FAMILY "maxage=1s or maxversions=1" cbt -project $PROJECT -instance $INSTANCE setgcpolicy $TABLE_NAME $MESSAGE_TOPIC_FAMILY "maxage=1s or maxversions=1" cbt -project $PROJECT -instance $INSTANCE setgcpolicy $TABLE_NAME $ROUTER_FAMILY maxversions=1 +cbt -project $PROJECT -instance $INSTANCE setgcpolicy $TABLE_NAME $MESSAGE_TOPIC_FAMILY "maxage=60d or maxversions=1" From 1c7e26aa2a10034ccf6673d381295b1cc0f88154 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Fri, 18 Oct 2024 15:37:25 -0700 Subject: [PATCH 12/25] f r's stop unwrapping things. --- autoconnect/autoconnect-common/src/protocol.rs | 3 --- autoendpoint/src/error.rs | 13 ++++++++++--- autoendpoint/src/extractors/subscription.rs | 6 +++--- autoendpoint/src/routers/webpush.rs | 16 +++++++++------- 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/autoconnect/autoconnect-common/src/protocol.rs b/autoconnect/autoconnect-common/src/protocol.rs index 07eafe735..d02d623da 100644 --- a/autoconnect/autoconnect-common/src/protocol.rs +++ b/autoconnect/autoconnect-common/src/protocol.rs @@ -90,9 +90,6 @@ pub struct ClientAck { pub channel_id: Uuid, // The corresponding version number for the message. pub version: String, - // The Reliability ID for the messages (if present) - #[serde(default)] - pub reliability_id: Option, } #[derive(Debug, Serialize)] diff --git a/autoendpoint/src/error.rs b/autoendpoint/src/error.rs index 75f77421c..3aec6c2c3 100644 --- a/autoendpoint/src/error.rs +++ b/autoendpoint/src/error.rs @@ -73,6 +73,9 @@ pub enum ApiErrorKind { #[error(transparent)] Serde(#[from] serde_json::Error), + #[error(transparent)] + ReqwestError(#[from] reqwest::Error), + #[error("Error while validating token")] TokenHashValidation(#[source] openssl::error::ErrorStack), @@ -165,7 +168,8 @@ impl ApiErrorKind { | ApiErrorKind::Io(_) | ApiErrorKind::Metrics(_) | ApiErrorKind::EndpointUrl(_) - | ApiErrorKind::RegistrationSecretHash(_) => StatusCode::INTERNAL_SERVER_ERROR, + | ApiErrorKind::RegistrationSecretHash(_) + | ApiErrorKind::ReqwestError(_) => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -203,6 +207,7 @@ impl ApiErrorKind { ApiErrorKind::Conditional(_) => "conditional", ApiErrorKind::EndpointUrl(e) => return e.metric_label(), ApiErrorKind::RegistrationSecretHash(_) => "registration_secret_hash", + ApiErrorKind::ReqwestError(_) => "reqwest", }) } @@ -225,7 +230,8 @@ impl ApiErrorKind { // Ignore oversized payload. ApiErrorKind::PayloadError(_) | ApiErrorKind::Validation(_) | - ApiErrorKind::Conditional(_) => false, + ApiErrorKind::Conditional(_) | + ApiErrorKind::ReqwestError(_) => false, _ => true, } } @@ -274,7 +280,8 @@ impl ApiErrorKind { | ApiErrorKind::InvalidRouterToken | ApiErrorKind::RegistrationSecretHash(_) | ApiErrorKind::EndpointUrl(_) - | ApiErrorKind::InvalidMessageId => None, + | ApiErrorKind::InvalidMessageId + | ApiErrorKind::ReqwestError(_) => None, } } } diff --git a/autoendpoint/src/extractors/subscription.rs b/autoendpoint/src/extractors/subscription.rs index a037f91cc..c1d5f9b0e 100644 --- a/autoendpoint/src/extractors/subscription.rs +++ b/autoendpoint/src/extractors/subscription.rs @@ -73,13 +73,13 @@ impl FromRequest for Subscription { .transpose()?; trace!("raw vapid: {:?}", &vapid); - let reliability_id: Option = vapid.clone().and_then(|v| { + let reliability_id: Option = vapid.as_ref().and_then(|v| { app_state .vapid_tracker - .is_trackable(&v) + .is_trackable(v) .then(|| app_state.vapid_tracker.get_id(req.headers())) }); - debug!("🔍 Assigning Reliability ID: {:?}", reliability_id); + debug!("🔍 Assigning Reliability ID: {reliability_id:?}"); // Capturing the vapid sub right now will cause too much cardinality. Instead, // let's just capture if we have a valid VAPID, as well as what sort of bad sub diff --git a/autoendpoint/src/routers/webpush.rs b/autoendpoint/src/routers/webpush.rs index 609132607..2857405f7 100644 --- a/autoendpoint/src/routers/webpush.rs +++ b/autoendpoint/src/routers/webpush.rs @@ -69,12 +69,14 @@ impl Router for WebPushRouter { ); } Err(error) => { - if error.is_timeout() { - self.metrics.incr("error.node.timeout")?; + if let ApiErrorKind::ReqwestError(error) = &error.kind { + if error.is_timeout() { + self.metrics.incr("error.node.timeout")?; + }; + if error.is_connect() { + self.metrics.incr("error.node.connect")?; + }; }; - if error.is_connect() { - self.metrics.incr("error.node.connect")?; - } debug!("✉ Error while sending webpush notification: {}", error); self.remove_node_id(user, node_id).await? } @@ -177,11 +179,11 @@ impl WebPushRouter { &self, notification: &Notification, node_id: &str, - ) -> Result { + ) -> ApiResult { let url = format!("{}/push/{}", node_id, notification.subscription.user.uaid); let notification = notification.serialize_for_delivery().unwrap(); - self.http.put(&url).json(¬ification).send().await + Ok(self.http.put(&url).json(¬ification).send().await?) } /// Notify the node to check for notifications for the user From 07e7db31303e53bb25bf943292a4de9c48815eb0 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Fri, 18 Oct 2024 16:44:33 -0700 Subject: [PATCH 13/25] f r's --- autoendpoint/src/routers/webpush.rs | 2 +- autoendpoint/src/settings.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/autoendpoint/src/routers/webpush.rs b/autoendpoint/src/routers/webpush.rs index 2857405f7..ac14b31e3 100644 --- a/autoendpoint/src/routers/webpush.rs +++ b/autoendpoint/src/routers/webpush.rs @@ -181,7 +181,7 @@ impl WebPushRouter { node_id: &str, ) -> ApiResult { let url = format!("{}/push/{}", node_id, notification.subscription.user.uaid); - let notification = notification.serialize_for_delivery().unwrap(); + let notification = notification.serialize_for_delivery()?; Ok(self.http.put(&url).json(¬ification).send().await?) } diff --git a/autoendpoint/src/settings.rs b/autoendpoint/src/settings.rs index 10ba97f20..9d5997185 100644 --- a/autoendpoint/src/settings.rs +++ b/autoendpoint/src/settings.rs @@ -172,7 +172,7 @@ impl Settings { let result = Self::read_list_from_str(keys, "Invalid AUTOEND_TRACKING_KEYS") .map(|v| v.to_owned().replace("=", "")) .collect(); - trace!("🔍 tracking_keys: {:?}", result); + trace!("🔍 tracking_keys: {result:?}"); result } From 853268c44af2cf45f1f94ca2a55d7db527bc5c83 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Mon, 21 Oct 2024 14:33:53 -0700 Subject: [PATCH 14/25] f post merge fix --- autoendpoint/src/routers/webpush.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/autoendpoint/src/routers/webpush.rs b/autoendpoint/src/routers/webpush.rs index 477c2dedb..550530d11 100644 --- a/autoendpoint/src/routers/webpush.rs +++ b/autoendpoint/src/routers/webpush.rs @@ -237,12 +237,13 @@ impl WebPushRouter { ) -> ApiResult { let url = format!("{}/push/{}", node_id, notification.subscription.user.uaid); - let notification_out = notification.serialize_for_delivery(); + let notification_out = notification.serialize_for_delivery()?; trace!( - "⏩ out: Notification: {}, channel_id: {}", + "⏩ out: Notification: {}, channel_id: {} :: {:?}", ¬ification.subscription.user.uaid, ¬ification.subscription.channel_id, + ¬ification_out, ); Ok(self.http.put(&url).json(¬ification_out).send().await?) } From 28bd9218704c3c066536da63a8d951d494f3b82c Mon Sep 17 00:00:00 2001 From: jrconlin Date: Tue, 22 Oct 2024 16:08:11 -0700 Subject: [PATCH 15/25] f add metric logging to reliability_cron.py --- scripts/reliablity_cron.py | 81 +++++++++++++++++++++++++++++++------- 1 file changed, 66 insertions(+), 15 deletions(-) diff --git a/scripts/reliablity_cron.py b/scripts/reliablity_cron.py index 061b21ad3..8f8cdfb37 100644 --- a/scripts/reliablity_cron.py +++ b/scripts/reliablity_cron.py @@ -3,8 +3,8 @@ """ This program reaps expired records and adjusts counts. -Currently, this is desined to run on a cron, however it -can be adapted to include it's own timing loop. +Specifying "--nap=0" will cause this app to only run once. + """ import argparse @@ -12,14 +12,13 @@ import json import logging import os -import time import pdb - +import time from typing import cast import redis +import statsd import toml - from google.cloud.bigtable.data import ( BigtableDataClientAsync, RowMutationEntry, @@ -42,7 +41,7 @@ class Counter: failed to be delivered. """ - def __init__(self, log: logging.Logger, settings): + def __init__(self, log: logging.Logger, settings: argparse.Namespace): try: import pdb @@ -117,6 +116,19 @@ async def gc(self) -> dict[str, int | float]: ) return result + def counts(self) -> dict[str, int]: + """Return the current milestone counts (this should happen shortly after a gc)""" + return cast(dict[str, int], self.redis.hgetall(self.settings.count_table)) + + +def record_metrics( + log: logging.Logger, settings: argparse.Namespace, counts: dict[str, int] +): + """Record the counts to metrics""" + log.info(f"📈 Recording metrics: {counts}") + for label, count in counts.items(): + cast(statsd.StatsClient, settings.metric).gauge(label, count) + def config(env_args: os._Environ = os.environ) -> argparse.Namespace: """Read the configuration from the args and environment.""" @@ -129,21 +141,21 @@ def config(env_args: os._Environ = os.environ) -> argparse.Namespace: "-r", help="DSN to connect to the Redis like service.", default=env_args.get( - "AUTOCONNECT_RELIABILITY_DSN", env_args.get("AUTOEND_RELIABILITY_DSN") + "AUTOEND_RELIABILITY_DSN", env_args.get("AUTOCONNECT_RELIABILITY_DSN") ), ) parser.add_argument( "--db_dsn", "-b", help="User Agent ID", - default=env_args.get("AUTOCONNECT_DB_DSN", env_args.get("AUTOEND_DB_DSN")), + default=env_args.get("AUTOEND_DB_DSN", env_args.get("AUTOCONNECT_DB_DSN")), ) parser.add_argument( "--db_settings", "-s", help="User Agent ID", default=env_args.get( - "AUTOCONNECT_DB_SETTINGS", env_args.get("AUTOEND_DB_SETTINGS") + "AUTOEND_DB_SETTINGS", env_args.get("AUTOCONNECT_DB_SETTINGS") ), ) parser.add_argument( @@ -161,6 +173,33 @@ def config(env_args: os._Environ = os.environ) -> argparse.Namespace: help="Name of Bigtable log family", default=env_args.get("AUTOTRACK_EXPIRY", "reliability"), ) + parser.add_argument( + "--statsd_host", + help="Metric host name", + default=env_args.get( + "AUTOEND_STATSD_HOST", env_args.get("AUTOCONNECT_STATSD_HOST") + ), + ) + parser.add_argument( + "--statsd_port", + help="Metric host port", + default=env_args.get( + "AUTOEND_STATSD_HOST", env_args.get("AUTOCONNECT_STATSD_HOST", 8125) + ), + ) + parser.add_argument( + "--statsd_label", + help="Metric root namespace", + default=env_args.get( + "AUTOEND_STATSD_LABEL", + env_args.get("AUTOCONNECT_STATSD_LABEL", "autotrack"), + ), + ) + parser.add_argument( + "--nap", + help="seconds to nap between each gc cycle (smaller number is more accurate measurements)", + default=60, + ) args = parser.parse_args() # if we have a config file, read from that and then reload. @@ -180,6 +219,13 @@ def config(env_args: os._Environ = os.environ) -> argparse.Namespace: bt_settings[parts[i].rstrip("s")] = parts[i + 1] args.bigtable = bt_settings + if args.statsd_host or args.statsd_port: + args.metrics = statsd.StatsClient( + args.statsd_host, args.statsd_port, prefix=args.statsd_label + ) + else: + args.metrics = None + return args @@ -191,14 +237,19 @@ def init_logs(): return log -async def amain(log, settings): +async def amain(log: logging.Logger, settings: argparse.Namespace): """Async main loop""" counter = Counter(log, settings) - _result = await counter.gc() - # TODO: adjust timing loop based on result time. - # Ideally, this would have a loop that it runs on that becomes tighter the more items - # were purged, and adjusts based on the time it took to run. - return + while True: + _result = await counter.gc() + record_metrics(log, settings, counter.counts()) + # TODO: adjust timing loop based on result time. + # Ideally, this would have a loop that it runs on that + # becomes tighter the more items were purged, and adjusts + # based on the time it took to run. + if settings.nap == 0: + return + time.sleep(settings.nap) def main(): From d9824f33e4ec3496fa8054fb591df445b9c7f6c9 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Fri, 22 Nov 2024 10:12:43 -0800 Subject: [PATCH 16/25] f r's --- .../autoconnect-common/src/protocol.rs | 2 - .../autoconnect-settings/src/app_state.rs | 2 +- autoconnect/autoconnect-settings/src/lib.rs | 3 + autoconnect/autoconnect-web/src/routes.rs | 31 ++++----- .../src/identified/on_server_notif.rs | 7 +- autoendpoint/src/extractors/notification.rs | 4 +- autoendpoint/src/extractors/subscription.rs | 21 ++---- autoendpoint/src/routers/apns/router.rs | 8 ++- autoendpoint/src/routers/fcm/router.rs | 6 +- autoendpoint/src/routers/webpush.rs | 10 +-- autoendpoint/src/server.rs | 2 +- autoendpoint/src/settings.rs | 5 ++ .../src/db/bigtable/bigtable_client/mod.rs | 4 +- autopush-common/src/db/client.rs | 2 +- autopush-common/src/db/mock.rs | 2 +- autopush-common/src/notification.rs | 2 +- autopush-common/src/reliability.rs | 67 ++++++++++--------- scripts/reliablity_cron.py | 5 -- .../integration/test_integration_all_rust.py | 3 +- 19 files changed, 87 insertions(+), 99 deletions(-) diff --git a/autoconnect/autoconnect-common/src/protocol.rs b/autoconnect/autoconnect-common/src/protocol.rs index 3f6207b71..d02d623da 100644 --- a/autoconnect/autoconnect-common/src/protocol.rs +++ b/autoconnect/autoconnect-common/src/protocol.rs @@ -90,8 +90,6 @@ pub struct ClientAck { pub channel_id: Uuid, // The corresponding version number for the message. pub version: String, - #[serde(default)] - pub reliability_id: Option, } #[derive(Debug, Serialize)] diff --git a/autoconnect/autoconnect-settings/src/app_state.rs b/autoconnect/autoconnect-settings/src/app_state.rs index 5cb85cfa8..7999ac604 100644 --- a/autoconnect/autoconnect-settings/src/app_state.rs +++ b/autoconnect/autoconnect-settings/src/app_state.rs @@ -92,7 +92,7 @@ impl AppState { #[cfg(feature = "reliable_report")] let reliability = Arc::new( - PushReliability::new(&settings.reliability_dsn, &Some(db.clone())).map_err(|e| { + PushReliability::new(&settings.reliability_dsn, db.clone()).map_err(|e| { ConfigError::Message(format!("Could not start Reliability connection: {:?}", e)) })?, ); diff --git a/autoconnect/autoconnect-settings/src/lib.rs b/autoconnect/autoconnect-settings/src/lib.rs index dbb4b377b..a03b47b4e 100644 --- a/autoconnect/autoconnect-settings/src/lib.rs +++ b/autoconnect/autoconnect-settings/src/lib.rs @@ -109,6 +109,9 @@ pub struct Settings { /// By default, the number of available physical CPUs is used as the worker count. pub actix_workers: Option, #[cfg(feature = "reliable_report")] + /// The DNS for the reliability data store. This is normally a Redis compatible + /// storage system. See [Connection Parameters](https://docs.rs/redis/latest/redis/#connection-parameters) + /// for details. pub reliability_dsn: Option, } diff --git a/autoconnect/autoconnect-web/src/routes.rs b/autoconnect/autoconnect-web/src/routes.rs index a435cdce4..b935a3484 100644 --- a/autoconnect/autoconnect-web/src/routes.rs +++ b/autoconnect/autoconnect-web/src/routes.rs @@ -21,38 +21,35 @@ pub async fn push_route( notif: web::Json, app_state: web::Data, ) -> HttpResponse { - #[allow(unused_mut)] // Needed for "reliable_report" - let mut notif = notif.into_inner(); + trace!( + "⏩ in push_route, uaid: {} channel_id: {}", + uaid, + notif.channel_id, + ); #[cfg(feature = "reliable_report")] - { + let (mut notif, expiry) = { + let mut notif = notif.into_inner(); + let expiry = Some(notif.timestamp + notif.ttl); notif.reliable_state = app_state .reliability .record( ¬if.reliability_id, - autopush_common::reliability::PushReliabilityState::IntAccepted, + autopush_common::reliability::ReliabilityState::IntAccepted, ¬if.reliable_state, - Some(notif.timestamp + notif.ttl), + expiry, ) .await; - } - trace!( - "⏩ in push_route, uaid: {} channel_id: {}", - uaid, - notif.channel_id, - ); - #[cfg(feature = "reliable_report")] - let expiry = { // Set "transmitted" a bit early since we can't do this inside of `notify`. notif.reliable_state = app_state .reliability .record( ¬if.reliability_id, - autopush_common::reliability::PushReliabilityState::Transmitted, + autopush_common::reliability::ReliabilityState::Transmitted, ¬if.reliable_state, - Some(notif.timestamp + notif.ttl), + expiry, ) .await; - Some(notif.timestamp + notif.ttl) + (notif, expiry) }; // Attempt to send the notification to the UA using WebSocket protocol, or store on failure. let result = app_state @@ -67,7 +64,7 @@ pub async fn push_route( .reliability .record( ¬if.reliability_id, - autopush_common::reliability::PushReliabilityState::Accepted, + autopush_common::reliability::ReliabilityState::Accepted, ¬if.reliable_state, expiry, ) diff --git a/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_server_notif.rs b/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_server_notif.rs index 406d49c44..cf5108678 100644 --- a/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_server_notif.rs +++ b/autoconnect/autoconnect-ws/autoconnect-ws-sm/src/identified/on_server_notif.rs @@ -168,11 +168,10 @@ impl WebPushClient { async fn record_state( &self, messages: &mut Vec, - state: autopush_common::reliability::PushReliabilityState, + state: autopush_common::reliability::ReliabilityState, ) { // *Note* because `.map()` is sync // we can't call the async func without additional hoops. - // I'm guessing that there's a more elegant way to do this, but this works for now. for message in messages { let expiry = message.timestamp + message.ttl; message.reliable_state = self @@ -221,7 +220,7 @@ impl WebPushClient { // Since we pulled these from storage, mark them as "retrieved" self.record_state( &mut messages.messages, - autopush_common::reliability::PushReliabilityState::Retreived, + autopush_common::reliability::ReliabilityState::Retrieved, ) .await; messages @@ -284,7 +283,7 @@ impl WebPushClient { // Since we pulled these from storage, mark them as "retrieved" self.record_state( &mut timestamp_resp.messages, - autopush_common::reliability::PushReliabilityState::Retreived, + autopush_common::reliability::ReliabilityState::Retrieved, ) .await; } diff --git a/autoendpoint/src/extractors/notification.rs b/autoendpoint/src/extractors/notification.rs index 5c9b38552..3969d6a9d 100644 --- a/autoendpoint/src/extractors/notification.rs +++ b/autoendpoint/src/extractors/notification.rs @@ -28,7 +28,7 @@ pub struct Notification { pub data: Option, #[cfg(feature = "reliable_report")] /// The current state the message was in (if tracked) - pub reliable_state: Option, + pub reliable_state: Option, #[cfg(feature = "reliable_report")] /// The UTC expiration timestamp for this message pub expiry: Option, @@ -89,7 +89,7 @@ impl FromRequest for Notification { .reliability .record( &subscription.reliability_id, - autopush_common::reliability::PushReliabilityState::Received, + autopush_common::reliability::ReliabilityState::Received, &None, expiry, ) diff --git a/autoendpoint/src/extractors/subscription.rs b/autoendpoint/src/extractors/subscription.rs index 0dccc463c..ae45676df 100644 --- a/autoendpoint/src/extractors/subscription.rs +++ b/autoendpoint/src/extractors/subscription.rs @@ -72,36 +72,23 @@ impl FromRequest for Subscription { .transpose()?; trace!("raw vapid: {:?}", &vapid); // Validate the VAPID JWT token, fetch the claims, and record the version - let vapid = if let Some(with_key) = vapid { + if let Some(with_key) = vapid.clone() { // Validate the VAPID JWT token and record the version validate_vapid_jwt(&with_key, &app_state.settings, &app_state.metrics)?; app_state.metrics.incr(&format!( "updates.vapid.draft{:02}", with_key.vapid.version() ))?; - Some(with_key) - } else { - None }; - + // If this is a known VAPID key, create a reliability_id from + // either the content of the vapid assertions, or the request + // header value, or just make one up. let reliability_id: Option = vapid.as_ref().and_then(|v| { app_state .reliability_filter .is_trackable(v) .then(|| app_state.reliability_filter.get_id(req.headers())) }); - debug!("🔍 Assigning Reliability ID: {reliability_id:?}"); - - trace!("🔐 raw vapid: {:?}", &vapid); - let reliability_id = vapid - .as_ref() - .map(|v| { - app_state - .reliability_filter - .is_trackable(v) - .then(|| app_state.reliability_filter.get_id(req.headers())) - }) - .unwrap_or_default(); trace!("🔍 track_id: {:?}", reliability_id); // Capturing the vapid sub right now will cause too much cardinality. Instead, // let's just capture if we have a valid VAPID, as well as what sort of bad sub diff --git a/autoendpoint/src/routers/apns/router.rs b/autoendpoint/src/routers/apns/router.rs index 75020fe86..dba454cf7 100644 --- a/autoendpoint/src/routers/apns/router.rs +++ b/autoendpoint/src/routers/apns/router.rs @@ -1,6 +1,6 @@ use autopush_common::db::client::DbClient; #[cfg(feature = "reliable_report")] -use autopush_common::reliability::{PushReliability, PushReliabilityState}; +use autopush_common::reliability::{PushReliability, ReliabilityState}; use crate::error::{ApiError, ApiResult}; use crate::extractors::notification::Notification; @@ -489,7 +489,7 @@ impl Router for ApnsRouter { self.reliability .record( ¬ification.subscription.reliability_id, - PushReliabilityState::Transmitted, + ReliabilityState::Transmitted, ¬ification.reliable_state, notification.expiry, ) @@ -587,7 +587,9 @@ mod tests { metrics: Arc::new(StatsdClient::from_sink("autopush", cadence::NopMetricSink)), db, #[cfg(feature = "reliable_report")] - reliability: Arc::new(PushReliability::new(&None, &None).unwrap()), + reliability: Arc::new( + PushReliability::new(&None, Box::new(MockDbClient::new())).unwrap(), + ), } } diff --git a/autoendpoint/src/routers/fcm/router.rs b/autoendpoint/src/routers/fcm/router.rs index ed1d6ce5d..deccaf6d4 100644 --- a/autoendpoint/src/routers/fcm/router.rs +++ b/autoendpoint/src/routers/fcm/router.rs @@ -1,5 +1,5 @@ #[cfg(feature = "reliable_report")] -use autopush_common::reliability::{PushReliability, PushReliabilityState}; +use autopush_common::reliability::{PushReliability, ReliabilityState}; use autopush_common::{db::client::DbClient, MAX_NOTIFICATION_TTL}; use crate::error::ApiResult; @@ -193,7 +193,7 @@ impl Router for FcmRouter { self.reliability .record( ¬ification.subscription.reliability_id, - PushReliabilityState::Transmitted, + ReliabilityState::Transmitted, ¬ification.reliable_state, notification.expiry, ) @@ -268,7 +268,7 @@ mod tests { Arc::new(StatsdClient::from_sink("autopush", cadence::NopMetricSink)), db, #[cfg(feature = "reliable_report")] - Arc::new(PushReliability::new(&None, &None).unwrap()), + Arc::new(PushReliability::new(&None, Box::new(MockDbClient::new())).unwrap()), ) .await .unwrap() diff --git a/autoendpoint/src/routers/webpush.rs b/autoendpoint/src/routers/webpush.rs index 550530d11..495acee32 100644 --- a/autoendpoint/src/routers/webpush.rs +++ b/autoendpoint/src/routers/webpush.rs @@ -70,7 +70,7 @@ impl Router for WebPushRouter { .reliability .record( ¬ification.reliability_id, - autopush_common::reliability::PushReliabilityState::IntTransmitted, + autopush_common::reliability::ReliabilityState::IntTransmitted, ¬ification.reliable_state, notification.expiry, ) @@ -143,7 +143,7 @@ impl Router for WebPushRouter { .reliability .record( ¬ification.reliability_id, - autopush_common::reliability::PushReliabilityState::Expired, + autopush_common::reliability::ReliabilityState::Expired, ¬ification.reliable_state, notification.expiry, ) @@ -288,7 +288,7 @@ impl WebPushRouter { .reliability .record( ¬ification.subscription.reliability_id, - autopush_common::reliability::PushReliabilityState::Stored, + autopush_common::reliability::ReliabilityState::Stored, ¬ification.reliable_state, notification.expiry, ) @@ -380,7 +380,9 @@ mod test { http: reqwest::Client::new(), endpoint_url: Url::parse("http://localhost:8080/").unwrap(), #[cfg(feature = "reliable_report")] - reliability: Arc::new(PushReliability::new(&None, &None).unwrap()), + reliability: Arc::new( + PushReliability::new(&None, Box::new(MockDbClient::new())).unwrap(), + ), } } diff --git a/autoendpoint/src/server.rs b/autoendpoint/src/server.rs index 5321857b5..25c2b5702 100644 --- a/autoendpoint/src/server.rs +++ b/autoendpoint/src/server.rs @@ -90,7 +90,7 @@ impl Server { }; #[cfg(feature = "reliable_report")] let reliability = Arc::new( - PushReliability::new(&settings.reliability_dsn, &Some(db.clone())).map_err(|e| { + PushReliability::new(&settings.reliability_dsn, db.clone()).map_err(|e| { ApiErrorKind::General(format!("Could not initialize Reliability Report: {:?}", e)) })?, ); diff --git a/autoendpoint/src/settings.rs b/autoendpoint/src/settings.rs index 9baea439f..b374fb803 100644 --- a/autoendpoint/src/settings.rs +++ b/autoendpoint/src/settings.rs @@ -54,8 +54,13 @@ pub struct Settings { pub fcm: FcmSettings, pub apns: ApnsSettings, #[cfg(feature = "stub")] + /// "Stub" is a predictable Mock bridge that allows us to "send" data and return an expected + /// result. pub stub: StubSettings, #[cfg(feature = "reliable_report")] + /// The DNS for the reliability data store. This is normally a Redis compatible + /// storage system. See [Connection Parameters](https://docs.rs/redis/latest/redis/#connection-parameters) + /// for details. pub reliability_dsn: Option, } diff --git a/autopush-common/src/db/bigtable/bigtable_client/mod.rs b/autopush-common/src/db/bigtable/bigtable_client/mod.rs index d9f3d487c..2c4a05423 100644 --- a/autopush-common/src/db/bigtable/bigtable_client/mod.rs +++ b/autopush-common/src/db/bigtable/bigtable_client/mod.rs @@ -746,7 +746,7 @@ impl BigTableClientImpl { } if let Some(cell) = row.take_cell("reliable_state") { notif.reliable_state = Some( - crate::reliability::PushReliabilityState::from_str(&to_string( + crate::reliability::ReliabilityState::from_str(&to_string( cell.value, "reliable_state", )?) @@ -1455,7 +1455,7 @@ impl DbClient for BigTableClientImpl { async fn log_report( &self, reliability_id: &str, - new_state: crate::reliability::PushReliabilityState, + new_state: crate::reliability::ReliabilityState, ) -> DbResult<()> { let row_key = reliability_id.to_owned(); diff --git a/autopush-common/src/db/client.rs b/autopush-common/src/db/client.rs index 9ef91ef2b..bac32d32a 100644 --- a/autopush-common/src/db/client.rs +++ b/autopush-common/src/db/client.rs @@ -112,7 +112,7 @@ pub trait DbClient: Send + Sync { async fn log_report( &self, reliability_id: &str, - state: crate::reliability::PushReliabilityState, + state: crate::reliability::ReliabilityState, ) -> DbResult<()>; fn box_clone(&self) -> Box; diff --git a/autopush-common/src/db/mock.rs b/autopush-common/src/db/mock.rs index 882a1bde0..cdad4ddd2 100644 --- a/autopush-common/src/db/mock.rs +++ b/autopush-common/src/db/mock.rs @@ -99,7 +99,7 @@ impl DbClient for Arc { async fn log_report( &self, reliability_id: &str, - state: crate::reliability::PushReliabilityState, + state: crate::reliability::ReliabilityState, ) -> DbResult<()> { Arc::as_ref(self).log_report(reliability_id, state).await } diff --git a/autopush-common/src/notification.rs b/autopush-common/src/notification.rs index 29b89a057..3198a4fb0 100644 --- a/autopush-common/src/notification.rs +++ b/autopush-common/src/notification.rs @@ -30,7 +30,7 @@ pub struct Notification { #[serde(skip_serializing_if = "Option::is_none")] pub reliability_id: Option, #[cfg(feature = "reliable_report")] - pub reliable_state: Option, + pub reliable_state: Option, } pub const TOPIC_NOTIFICATION_PREFIX: &str = "01"; diff --git a/autopush-common/src/reliability.rs b/autopush-common/src/reliability.rs index 986531bec..b3e04d164 100644 --- a/autopush-common/src/reliability.rs +++ b/autopush-common/src/reliability.rs @@ -15,35 +15,36 @@ pub const COUNTS: &str = "state_counts"; pub const EXPIRY: &str = "expiry"; /// The various states that a message may transit on the way from reception to delivery. +// Note: "Message" in this context refers to the Subscription Update. #[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Deserialize)] -pub enum PushReliabilityState { +pub enum ReliabilityState { #[serde(rename = "received")] - Received, // Subscription was received by the Push Server + Received, // Message was received by the Push Server #[serde(rename = "stored")] - Stored, // Subscription was stored because it could not be delivered immediately - #[serde(rename = "retreived")] - Retreived, // Subscription was taken from storage for delivery + Stored, // Message was stored because it could not be delivered immediately + #[serde(rename = "retrieved")] + Retrieved, // Message was taken from storage for delivery #[serde(rename = "transmitted_webpush")] - IntTransmitted, // Subscription was handed off between autoendpoint and autoconnect + IntTransmitted, // Message was handed off between autoendpoint and autoconnect #[serde(rename = "accepted_webpush")] - IntAccepted, // Subscription was accepted by autoconnect from autopendpoint + IntAccepted, // Message was accepted by autoconnect from autopendpoint #[serde(rename = "transmitted")] - Transmitted, // Subscription was handed off for delivery to the UA + Transmitted, // Message was handed off for delivery to the UA #[serde(rename = "accepted")] - Accepted, // Subscription was accepted for delivery by the UA + Accepted, // Message was accepted for delivery by the UA #[serde(rename = "delivered")] - Delivered, // Subscription was provided to the WebApp recipient by the UA + Delivered, // Message was provided to the WebApp recipient by the UA #[serde(rename = "expired")] - Expired, // Subscription expired naturally (e.g. TTL=0) + Expired, // Message expired naturally (e.g. TTL=0) } // TODO: Differentiate between "transmitted via webpush" and "transmitted via bridge"? -impl std::fmt::Display for PushReliabilityState { +impl std::fmt::Display for ReliabilityState { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str(match self { Self::Received => "received", Self::Stored => "stored", - Self::Retreived => "retrieved", + Self::Retrieved => "retrieved", Self::Transmitted => "transmitted", Self::IntTransmitted => "transmitted_webpush", Self::IntAccepted => "accepted_webpush", @@ -54,14 +55,14 @@ impl std::fmt::Display for PushReliabilityState { } } -impl std::str::FromStr for PushReliabilityState { +impl std::str::FromStr for ReliabilityState { type Err = ApcError; fn from_str(s: &str) -> std::result::Result { Ok(match s.to_lowercase().as_str() { "received" => Self::Received, "stored" => Self::Stored, - "retrieved" => Self::Retreived, + "retrieved" => Self::Retrieved, "transmitted" => Self::Transmitted, "accepted" => Self::Accepted, "transmitted_webpush" => Self::IntTransmitted, @@ -77,7 +78,7 @@ impl std::str::FromStr for PushReliabilityState { } } -impl serde::Serialize for PushReliabilityState { +impl serde::Serialize for ReliabilityState { fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, @@ -86,18 +87,21 @@ impl serde::Serialize for PushReliabilityState { } } -#[derive(Default, Clone)] +#[derive(Clone)] pub struct PushReliability { client: Option>, - db: Option>, + db: Box, } impl PushReliability { // Do the magic to make a report instance, whatever that will be. - pub fn new(reliability_dsn: &Option, db: &Option>) -> Result { + pub fn new(reliability_dsn: &Option, db: Box) -> Result { if reliability_dsn.is_none() { debug!("🔍 No reliability DSN declared."); - return Ok(Self::default()); + return Ok(Self { + client: None, + db: db.clone(), + }); }; let client = if let Some(dsn) = reliability_dsn { @@ -119,14 +123,13 @@ impl PushReliability { pub async fn record( &self, reliability_id: &Option, - new: PushReliabilityState, - old: &Option, + new: ReliabilityState, + old: &Option, expr: Option, - ) -> Option { - if reliability_id.is_none() { + ) -> Option { + let Some(id) = reliability_id else { return None; - } - let id = reliability_id.clone().unwrap(); + }; if let Some(client) = &self.client { debug!( "🔍 {} from {} to {}", @@ -155,13 +158,11 @@ impl PushReliability { }); } }; - if let Some(db) = &self.db { - // Errors are not fatal, and should not impact message flow, but - // we should record them somewhere. - let _ = db.log_report(&id, new).await.inspect_err(|e| { - warn!("🔍 Unable to record reliability state: {:?}", e); - }); - } + // Errors are not fatal, and should not impact message flow, but + // we should record them somewhere. + let _ = self.db.log_report(id, new).await.inspect_err(|e| { + warn!("🔍 Unable to record reliability state: {:?}", e); + }); Some(new) } diff --git a/scripts/reliablity_cron.py b/scripts/reliablity_cron.py index 8f8cdfb37..a2d7f0ece 100644 --- a/scripts/reliablity_cron.py +++ b/scripts/reliablity_cron.py @@ -12,7 +12,6 @@ import json import logging import os -import pdb import time from typing import cast @@ -43,9 +42,6 @@ class Counter: def __init__(self, log: logging.Logger, settings: argparse.Namespace): try: - import pdb - - pdb.set_trace() self.redis = redis.Redis.from_url(settings.reliability_dsn) self.bigtable = BigtableDataClientAsync( project=settings.bigtable["project"] @@ -66,7 +62,6 @@ async def gc(self) -> dict[str, int | float]: log_family = self.settings.log_family # Fetch the candidates to purge. - pdb.set_trace() mutations = list() purged = cast( list[bytes], self.redis.zrange(expiry, -1, int(start), byscore=True) diff --git a/tests/integration/test_integration_all_rust.py b/tests/integration/test_integration_all_rust.py index 623be8074..dd2bd1c14 100644 --- a/tests/integration/test_integration_all_rust.py +++ b/tests/integration/test_integration_all_rust.py @@ -827,13 +827,12 @@ async def test_basic_delivery_with_vapid( assert result.get("reliability_id") is None, "Tracking unknown message" +@pytest.mark.reliable_report async def test_basic_delivery_with_tracked_vapid( registered_test_client: AsyncPushTestClient, vapid_payload: dict[str, int | str], ) -> None: """Test delivery of a basic push message with a VAPID header.""" - if os.environ.get("RELIABLE_REPORT") is None: - pytest.skip("RELIABLE_REPORT not set, skipping test.") # TODO: connect to test redis server and redis.flushall() uuid_data: str = str(uuid.uuid4()) vapid_info = _get_vapid(key=TRACKING_KEY, payload=vapid_payload) From 3de403dff3438a623d735f526dbcf94c1b33f9dd Mon Sep 17 00:00:00 2001 From: jrconlin Date: Fri, 22 Nov 2024 12:41:44 -0800 Subject: [PATCH 17/25] f fix tests --- Cargo.lock | 888 ++++++++++++++++-------- Makefile | 16 +- autoendpoint/src/headers/vapid.rs | 2 +- autoendpoint/src/routers/apns/router.rs | 6 +- autoendpoint/src/routers/fcm/router.rs | 4 +- autoendpoint/src/routers/webpush.rs | 9 +- 6 files changed, 615 insertions(+), 310 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4af99d0e8..7df0e9609 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,9 +10,9 @@ checksum = "f279fc8b1f1a64138f0f4b9cda9be488ae35bc2f8556c7ffe60730f1c07d005a" dependencies = [ "base64 0.21.7", "erased-serde", - "http 1.1.0", + "http 1.2.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-rustls 0.26.0", "hyper-util", "openssl", @@ -153,7 +153,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -244,7 +244,7 @@ dependencies = [ "actix-utils", "futures-core", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "impl-more", "pin-project-lite", "tokio", @@ -313,7 +313,7 @@ dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -338,14 +338,14 @@ checksum = "b6ac1e58cded18cb28ddc17143c4dea5345b3ad575e14f32f66e4054a56eb271" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] name = "addr2line" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] @@ -404,6 +404,12 @@ dependencies = [ "alloc-no-stdlib", ] +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + [[package]] name = "android-tzdata" version = "0.1.1" @@ -430,9 +436,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -445,43 +451,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.89" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" [[package]] name = "arc-swap" @@ -489,6 +495,12 @@ version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" +[[package]] +name = "arraydeque" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d902e3d592a523def97af8f317b08ce16b7ab854c1985a0c671e6f15cebc236" + [[package]] name = "assert-json-diff" version = "2.0.2" @@ -518,7 +530,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -529,7 +541,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -579,14 +591,14 @@ dependencies = [ "docopt", "env_logger 0.11.5", "fernet", - "futures 0.3.30", + "futures 0.3.31", "futures-locks", "futures-util", "hex", "lazy_static", "log", "mozsvc-common", - "reqwest 0.12.8", + "reqwest 0.12.9", "sentry", "sentry-actix", "sentry-core", @@ -609,10 +621,10 @@ dependencies = [ "actix-web", "autopush_common", "cadence", - "futures 0.3.30", + "futures 0.3.31", "futures-locks", - "hyper 1.4.1", - "reqwest 0.12.8", + "hyper 1.5.1", + "reqwest 0.12.9", "sentry", "serde", "serde_derive", @@ -634,7 +646,7 @@ dependencies = [ "fernet", "lazy_static", "mozsvc-common", - "reqwest 0.12.8", + "reqwest 0.12.9", "serde", "serde_derive", "serde_json", @@ -663,7 +675,7 @@ dependencies = [ "cadence", "ctor", "futures-util", - "reqwest 0.12.8", + "reqwest 0.12.9", "serde_json", "slog-scope", "thiserror", @@ -687,7 +699,7 @@ dependencies = [ "autopush_common", "backtrace", "ctor", - "futures 0.3.30", + "futures 0.3.31", "mockall", "sentry", "serde_json", @@ -710,9 +722,9 @@ dependencies = [ "backtrace", "cadence", "ctor", - "futures 0.3.30", + "futures 0.3.31", "mockall", - "reqwest 0.12.8", + "reqwest 0.12.9", "sentry", "serde_json", "slog-scope", @@ -741,18 +753,18 @@ dependencies = [ "deadpool", "docopt", "fernet", - "futures 0.3.30", + "futures 0.3.31", "futures-util", "hex", "jsonwebtoken", "lazy_static", "log", "mockall", - "mockito 1.5.0", + "mockito 1.6.1", "openssl", "rand 0.8.5", "regex", - "reqwest 0.12.8", + "reqwest 0.12.9", "sentry", "sentry-actix", "sentry-core", @@ -792,7 +804,7 @@ dependencies = [ "derive_builder", "fernet", "form_urlencoded", - "futures 0.3.30", + "futures 0.3.31", "futures-util", "gethostname", "google-cloud-rust-raw", @@ -809,7 +821,7 @@ dependencies = [ "rand 0.8.5", "redis", "regex", - "reqwest 0.12.8", + "reqwest 0.12.9", "sentry", "sentry-backtrace", "serde", @@ -990,15 +1002,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.7.2" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "bytestring" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d80203ea6b29df88012294f62733de21cfeab47f17b41af3a38bc30a03ee72" +checksum = "e465647ae23b2823b0753f50decb2d5a86d2bb2cac04788fafd1f80e45378e5f" dependencies = [ "bytes", ] @@ -1014,9 +1026,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.24" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938" +checksum = "f34d93e62b03caf570cccc334cbc6c2fceca82f39211051345108adcba3eebdc" dependencies = [ "jobserver", "libc", @@ -1080,18 +1092,18 @@ dependencies = [ [[package]] name = "cmake" -version = "0.1.51" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb1e43aa7fd152b1f968787f7dbcdeb306d1867ff373c69955211876c053f91a" +checksum = "c682c223677e0e5b6b7f63a64b9351844c3f1b1678a68b7ee617e30fb082620e" dependencies = [ "cc", ] [[package]] name = "colorchoice" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "colored" @@ -1115,14 +1127,13 @@ dependencies = [ [[package]] name = "config" -version = "0.14.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7328b20597b53c2454f0b1919720c25c7339051c02b72b7e05409e00b14132be" +checksum = "68578f196d2a33ff61b27fae256c3164f65e36382648e30666dde05b8cc9dfdf" dependencies = [ "async-trait", "convert_case 0.6.0", "json5", - "lazy_static", "nom", "pathdiff", "ron", @@ -1130,7 +1141,7 @@ dependencies = [ "serde", "serde_json", "toml", - "yaml-rust", + "yaml-rust2", ] [[package]] @@ -1197,9 +1208,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -1246,12 +1257,12 @@ dependencies = [ [[package]] name = "ctor" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" dependencies = [ "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -1275,7 +1286,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -1286,7 +1297,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -1332,33 +1343,33 @@ dependencies = [ [[package]] name = "derive_builder" -version = "0.20.1" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd33f37ee6a119146a1781d3356a7c26028f83d779b2e04ecd45fdc75c76877b" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" dependencies = [ "derive_builder_macro", ] [[package]] name = "derive_builder_core" -version = "0.20.1" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7431fa049613920234f22c47fdc33e6cf3ee83067091ea4277a3f8c4587aae38" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] name = "derive_builder_macro" -version = "0.20.1" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4abae7035bf79b9877b779505d8cf3749285b80c43941eda66604841889451dc" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -1371,7 +1382,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -1405,6 +1416,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "dlv-list" version = "0.5.2" @@ -1440,9 +1462,9 @@ checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "encoding_rs" -version = "0.8.34" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] @@ -1500,19 +1522,19 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "fastrand" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" [[package]] name = "fernet" @@ -1541,9 +1563,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -1593,9 +1615,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -1608,9 +1630,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -1618,15 +1640,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -1635,9 +1657,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-locks" @@ -1652,32 +1674,32 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures 0.1.31", "futures-channel", @@ -1738,9 +1760,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.31.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -1754,7 +1776,7 @@ version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "864a48916c62ddbd1dc289be6d041d8ca61160c9c6169298e5cf3da11baf8370" dependencies = [ - "futures 0.3.30", + "futures 0.3.31", "grpcio", "protobuf", ] @@ -1811,16 +1833,16 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.1.0", + "http 1.2.0", "indexmap", "slab", "tokio", @@ -1830,15 +1852,28 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.13.2" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] [[package]] name = "hashbrown" -version = "0.15.0" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "hashlink" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +dependencies = [ + "hashbrown 0.14.5", +] [[package]] name = "heck" @@ -1917,9 +1952,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -1944,7 +1979,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -1955,7 +1990,7 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] @@ -1980,9 +2015,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.30" +version = "0.14.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" dependencies = [ "bytes", "futures-channel", @@ -2004,15 +2039,15 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.6", - "http 1.1.0", + "h2 0.4.7", + "http 1.2.0", "http-body 1.0.1", "httparse", "httpdate", @@ -2031,7 +2066,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.30", + "hyper 0.14.31", "log", "rustls 0.21.12", "rustls-native-certs", @@ -2046,8 +2081,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" dependencies = [ "futures-util", - "http 1.1.0", - "hyper 1.4.1", + "http 1.2.0", + "hyper 1.5.1", "hyper-util", "rustls 0.22.4", "rustls-pki-types", @@ -2064,10 +2099,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", - "http 1.1.0", - "hyper 1.4.1", + "http 1.2.0", + "hyper 1.5.1", "hyper-util", - "rustls 0.23.13", + "rustls 0.23.19", "rustls-pki-types", "tokio", "tokio-rustls 0.26.0", @@ -2081,7 +2116,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.30", + "hyper 0.14.31", "native-tls", "tokio", "tokio-native-tls", @@ -2095,7 +2130,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", "native-tls", "tokio", @@ -2105,16 +2140,16 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", - "hyper 1.4.1", + "hyper 1.5.1", "pin-project-lite", "socket2", "tokio", @@ -2145,6 +2180,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -2161,20 +2314,41 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + [[package]] name = "impl-more" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d" +checksum = "aae21c3177a27788957044151cc2800043d127acaa460a47ebb9b84dfa2c6aa0" [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown 0.15.0", + "hashbrown 0.15.2", ] [[package]] @@ -2218,11 +2392,20 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jobserver" @@ -2235,10 +2418,11 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.70" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" +checksum = "a865e038f7f6ed956f788f0d7d60c541fff74c7bd74272c5d4cf15c63743e705" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -2288,15 +2472,15 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.159" +version = "0.2.167" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "561d97a539a36e26a9a5fad1ea11a3039a67714694aaa379433e580854bc3dc5" +checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" [[package]] name = "libloading" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", "windows-targets 0.52.6", @@ -2324,18 +2508,18 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - [[package]] name = "linux-raw-sys" version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "local-channel" version = "0.1.5" @@ -2404,11 +2588,10 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi 0.3.9", "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", @@ -2439,7 +2622,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -2462,18 +2645,18 @@ dependencies = [ [[package]] name = "mockito" -version = "1.5.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b34bd91b9e5c5b06338d392463e1318d683cf82ec3d3af4014609be6e2108d" +checksum = "652cd6d169a36eaf9d1e6bce1a221130439a966d7f27858af66a33a66e9c4ee2" dependencies = [ "assert-json-diff", "bytes", "colored", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-util", "log", "rand 0.8.5", @@ -2577,27 +2760,24 @@ dependencies = [ [[package]] name = "object" -version = "0.36.4" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.20.1" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1" -dependencies = [ - "portable-atomic", -] +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "openssl" -version = "0.10.66" +version = "0.10.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" +checksum = "6174bc48f102d208783c2c84bf931bb75927a617866870de8a4ea85597f871f5" dependencies = [ "bitflags 2.6.0", "cfg-if", @@ -2616,7 +2796,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -2627,9 +2807,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.103" +version = "0.9.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" +checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" dependencies = [ "cc", "libc", @@ -2639,19 +2819,19 @@ dependencies = [ [[package]] name = "ordered-multimap" -version = "0.6.0" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ed8acf08e98e744e5384c8bc63ceb0364e68a6854187221c18df61c4797690e" +checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" dependencies = [ "dlv-list", - "hashbrown 0.13.2", + "hashbrown 0.14.5", ] [[package]] name = "os_info" -version = "3.8.2" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" +checksum = "e5ca711d8b83edbb00b44d504503cd247c9c0bd8b0fa2694f2a1a3d8165379ce" dependencies = [ "log", "serde", @@ -2714,9 +2894,9 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pathdiff" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" [[package]] name = "peeking_take_while" @@ -2742,9 +2922,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.13" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdbef9d1d47087a895abd220ed25eb4ad973a5e26f6a4367b038c25e28dfc2d9" +checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" dependencies = [ "memchr", "thiserror", @@ -2753,9 +2933,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.13" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d3a6e3394ec80feb3b6393c725571754c6188490265c61aaf260810d6b95aa0" +checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" dependencies = [ "pest", "pest_generator", @@ -2763,22 +2943,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.13" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94429506bde1ca69d1b5601962c73f4172ab4726571a59ea95931218cb0e930e" +checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] name = "pest_meta" -version = "2.7.13" +version = "2.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac8a071862e93690b6e34e9a5fb8e33ff3734473ac0245b27232222c4906a33f" +checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" dependencies = [ "once_cell", "pest", @@ -2787,9 +2967,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -2803,12 +2983,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" -[[package]] -name = "portable-atomic" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2" - [[package]] name = "powerfmt" version = "0.2.0" @@ -2876,9 +3050,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.86" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -2971,12 +3145,13 @@ dependencies = [ [[package]] name = "redis" -version = "0.27.3" +version = "0.27.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92f61607c4c4442b575fbc3f31a5dd4e5dd69cfea8f6afec5b83e24f61c126ab" +checksum = "09d8f99a4090c89cc489a94833c901ead69bfbf3877b4867d5482e321ee875bc" dependencies = [ "arc-swap", "combine", + "itertools 0.13.0", "itoa", "num-bigint", "percent-encoding", @@ -3017,9 +3192,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", @@ -3029,9 +3204,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -3064,7 +3239,7 @@ dependencies = [ "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -3092,9 +3267,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.8" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f713147fbe92361e52392c73b8c9e48c04c6625bce969ef54dc901e58e042a7b" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" dependencies = [ "base64 0.22.1", "bytes", @@ -3102,11 +3277,11 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.4.6", - "http 1.1.0", + "h2 0.4.7", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.4.1", + "hyper 1.5.1", "hyper-rustls 0.27.3", "hyper-tls 0.6.0", "hyper-util", @@ -3122,7 +3297,7 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper 1.0.2", "system-configuration 0.6.1", "tokio", "tokio-native-tls", @@ -3163,9 +3338,9 @@ dependencies = [ [[package]] name = "rust-ini" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e2a3bcec1f113553ef1c88aae6c020a369d03d55b58de9869a0908930385091" +checksum = "3e0698206bcb8882bf2a9ecb4c1e7785db57ff052297085a6efd4fe42302068a" dependencies = [ "cfg-if", "ordered-multimap", @@ -3194,9 +3369,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.37" +version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags 2.6.0", "errno", @@ -3233,9 +3408,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.13" +version = "0.23.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8" +checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" dependencies = [ "once_cell", "rustls-pki-types", @@ -3276,9 +3451,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e696e35370c65c9c541198af4543ccd580cf17fc25d8e05c5a242b202488c55" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" [[package]] name = "rustls-webpki" @@ -3303,9 +3478,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "ryu" @@ -3324,9 +3499,9 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.24" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9aaafd5a2b6e3d657ff009d82fbd630b6bd54dd4eb06f21693925cdf80f9b8b" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] @@ -3368,9 +3543,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", @@ -3391,7 +3566,7 @@ dependencies = [ "httpdate", "log", "native-tls", - "reqwest 0.12.8", + "reqwest 0.12.9", "sentry-backtrace", "sentry-contexts", "sentry-core", @@ -3505,29 +3680,29 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.210" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" +checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.210" +version = "1.0.215" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" +checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] name = "serde_json" -version = "1.0.128" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", @@ -3717,9 +3892,9 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -3731,6 +3906,12 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "strsim" version = "0.8.0" @@ -3768,7 +3949,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -3789,9 +3970,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.79" +version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ "proc-macro2", "quote", @@ -3806,13 +3987,24 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" dependencies = [ "futures-core", ] +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "system-configuration" version = "0.5.1" @@ -3863,9 +4055,9 @@ checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" [[package]] name = "tempfile" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", @@ -3911,22 +4103,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.64" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.64" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -3941,9 +4133,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -3964,9 +4156,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", @@ -3981,6 +4173,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" version = "1.8.0" @@ -3998,9 +4200,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.40.0" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", @@ -4022,7 +4224,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -4062,7 +4264,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.23.13", + "rustls 0.23.19", "rustls-pki-types", "tokio", ] @@ -4122,9 +4324,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -4133,9 +4335,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -4143,9 +4345,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "tracing-core", ] @@ -4185,9 +4387,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" @@ -4218,9 +4420,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "2.10.1" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a" +checksum = "3193f92e105038f98ae68af40c008e3c94f2f046926e0f95e6c835dc6459bac8" dependencies = [ "base64 0.22.1", "log", @@ -4231,16 +4433,28 @@ dependencies = [ [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", - "idna", + "idna 1.0.3", "percent-encoding", "serde", ] +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -4249,9 +4463,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" +checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "getrandom 0.2.15", "serde", @@ -4263,7 +4477,7 @@ version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db79c75af171630a3148bd3e6d7c4f42b6a9a014c2945bc5ed0020cbb8d9478e" dependencies = [ - "idna", + "idna 0.5.0", "once_cell", "regex", "serde", @@ -4283,7 +4497,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", ] [[package]] @@ -4343,9 +4557,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.93" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" +checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c" dependencies = [ "cfg-if", "once_cell", @@ -4354,36 +4568,37 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.93" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" +checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.43" +version = "0.4.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" +checksum = "9dfaf8f50e5f293737ee323940c7d8b08a66a95a419223d9f41610ca08b0833d" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.93" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" +checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4391,22 +4606,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.93" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" +checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.93" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" +checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49" [[package]] name = "wasm-timer" @@ -4414,7 +4629,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" dependencies = [ - "futures 0.3.30", + "futures 0.3.31", "js-sys", "parking_lot 0.11.2", "pin-utils", @@ -4425,9 +4640,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.70" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" +checksum = "a98bc3c33f0fe7e59ad7cd041b89034fa82a7c2d4365ca538dda6cdaf513863c" dependencies = [ "js-sys", "wasm-bindgen", @@ -4435,9 +4650,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.6" +version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ "rustls-pki-types", ] @@ -4712,12 +4927,50 @@ dependencies = [ ] [[package]] -name = "yaml-rust" -version = "0.4.5" +name = "write16" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "yaml-rust2" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8902160c4e6f2fb145dbe9d6760a75e3c9522d8bf796ed7047c85919ac7115f8" +dependencies = [ + "arraydeque", + "encoding_rs", + "hashlink", +] + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" dependencies = [ - "linked-hash-map", + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", + "synstructure", ] [[package]] @@ -4729,11 +4982,11 @@ dependencies = [ "anyhow", "async-trait", "base64 0.21.7", - "futures 0.3.30", + "futures 0.3.31", "http 0.2.12", - "hyper 0.14.30", + "hyper 0.14.31", "hyper-rustls 0.24.2", - "itertools", + "itertools 0.12.1", "log", "percent-encoding", "rustls 0.22.4", @@ -4765,7 +5018,28 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", + "synstructure", ] [[package]] @@ -4785,7 +5059,29 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.79", + "syn 2.0.90", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", ] [[package]] diff --git a/Makefile b/Makefile index 7c7762390..7a854dcdc 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,19 @@ SHELL := /bin/sh CARGO = cargo TESTS_DIR := tests TEST_RESULTS_DIR ?= workspace/test-results -PYTEST_ARGS ?= $(if $(SKIP_SENTRY),-m "not sentry") $(if $(TEST_STUB),,-m "not stub") # Stub tests do not work in CI +# Markers are "clever", and work by including multiple markers with " and ". +# This uses makefile magic to construct the set of items. +# technically `$(eval )` would work here, but I've not had it work consistently. +# The following is taken from [the makefile manual](https://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions) +NOOP := +SPACE :=$(NOOP) $(NOOP) +PYTEST_AND :=" and " +# specify the tests to NOT run +# NOTE: passing `-m not` to pytest will cause an error be sure at least one of these is set. +NEG_PYTEST_ARGS ?= $(if $(SKIP_SENTRY),sentry) $(if $(TEST_STUB),,stub) $(if $(TEST_RELIABILITY),,reliable_report) # Stub tests do not work in CI +# Now compose the pytest args by gluing the above items together +NEG_PYTEST_ARGS := "not $(subst $(SPACE), and not ,$(strip $(NEG_PYTEST_ARGS)))" +PYTEST_ARGS := -m $(NEG_PYTEST_ARGS) INTEGRATION_TEST_FILE := $(TESTS_DIR)/integration/test_integration_all_rust.py NOTIFICATION_TEST_DIR := $(TESTS_DIR)/notification LOAD_TEST_DIR := $(TESTS_DIR)/load @@ -38,7 +50,7 @@ integration-test-legacy: integration-test: $(POETRY) -V $(POETRY) install --without dev,load,notification --no-root - $(POETRY) run pytest $(INTEGRATION_TEST_FILE) \ + $(POETRY) run pytest $(INTEGRATION_TEST_FILE) \ --junit-xml=$(TEST_RESULTS_DIR)/integration_test_results.xml \ -v $(PYTEST_ARGS) diff --git a/autoendpoint/src/headers/vapid.rs b/autoendpoint/src/headers/vapid.rs index 858696775..a255c98bd 100644 --- a/autoendpoint/src/headers/vapid.rs +++ b/autoendpoint/src/headers/vapid.rs @@ -276,7 +276,7 @@ mod tests { fn extract_sub() { let header = VapidHeader::parse(VALID_HEADER).unwrap(); assert_eq!( - header.sub().unwrap(), + header.insecure_sub().unwrap(), "mailto:admin@example.com".to_string() ); } diff --git a/autoendpoint/src/routers/apns/router.rs b/autoendpoint/src/routers/apns/router.rs index dba454cf7..a8d5308e5 100644 --- a/autoendpoint/src/routers/apns/router.rs +++ b/autoendpoint/src/routers/apns/router.rs @@ -585,11 +585,9 @@ mod tests { settings: ApnsSettings::default(), endpoint_url: Url::parse("http://localhost:8080/").unwrap(), metrics: Arc::new(StatsdClient::from_sink("autopush", cadence::NopMetricSink)), - db, + db: db.clone(), #[cfg(feature = "reliable_report")] - reliability: Arc::new( - PushReliability::new(&None, Box::new(MockDbClient::new())).unwrap(), - ), + reliability: Arc::new(PushReliability::new(&None, db.clone()).unwrap()), } } diff --git a/autoendpoint/src/routers/fcm/router.rs b/autoendpoint/src/routers/fcm/router.rs index 8539bc827..8ebce076b 100644 --- a/autoendpoint/src/routers/fcm/router.rs +++ b/autoendpoint/src/routers/fcm/router.rs @@ -266,9 +266,9 @@ mod tests { Url::parse("http://localhost:8080/").unwrap(), reqwest::Client::new(), Arc::new(StatsdClient::from_sink("autopush", cadence::NopMetricSink)), - db, + db.clone(), #[cfg(feature = "reliable_report")] - Arc::new(PushReliability::new(&None, Box::new(MockDbClient::new())).unwrap()), + Arc::new(PushReliability::new(&None, db.clone()).unwrap()), ) .await .unwrap() diff --git a/autoendpoint/src/routers/webpush.rs b/autoendpoint/src/routers/webpush.rs index 1fc44914a..9f820147d 100644 --- a/autoendpoint/src/routers/webpush.rs +++ b/autoendpoint/src/routers/webpush.rs @@ -380,20 +380,19 @@ mod test { fn make_router(db: Box) -> WebPushRouter { WebPushRouter { - db, + db: db.box_clone(), metrics: Arc::new(StatsdClient::from_sink("autopush", cadence::NopMetricSink)), http: reqwest::Client::new(), endpoint_url: Url::parse("http://localhost:8080/").unwrap(), #[cfg(feature = "reliable_report")] - reliability: Arc::new( - PushReliability::new(&None, Box::new(MockDbClient::new())).unwrap(), - ), + reliability: Arc::new(PushReliability::new(&None, db).unwrap()), } } #[tokio::test] async fn pass_extras() { - let router = make_router(Box::new(MockDbClient::new())); + let db = MockDbClient::new().into_boxed_arc(); + let router = make_router(db); let sub = "foo@example.com"; let vapid = make_vapid( sub, From 51d7f7f402ae201e9e0f755ab058056eaca23323 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Fri, 6 Dec 2024 10:49:23 -0800 Subject: [PATCH 18/25] f r's --- Makefile | 14 ++++++++------ docs/src/testing.md | 10 ++++++---- tests/pyproject.toml | 7 ++++--- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/Makefile b/Makefile index 7a854dcdc..ea2b0f6d5 100644 --- a/Makefile +++ b/Makefile @@ -9,12 +9,14 @@ TEST_RESULTS_DIR ?= workspace/test-results NOOP := SPACE :=$(NOOP) $(NOOP) PYTEST_AND :=" and " -# specify the tests to NOT run -# NOTE: passing `-m not` to pytest will cause an error be sure at least one of these is set. -NEG_PYTEST_ARGS ?= $(if $(SKIP_SENTRY),sentry) $(if $(TEST_STUB),,stub) $(if $(TEST_RELIABILITY),,reliable_report) # Stub tests do not work in CI -# Now compose the pytest args by gluing the above items together -NEG_PYTEST_ARGS := "not $(subst $(SPACE), and not ,$(strip $(NEG_PYTEST_ARGS)))" -PYTEST_ARGS := -m $(NEG_PYTEST_ARGS) +# Run sentry UNLESS "SKIP_SENTRY" is specified. +PYTEST_MARKERS := $(if $(SKIP_SENTRY),not sentry,) +# Do not run "stub" unless "TEST_STUB" specified (Stub does not work in CI currently) +PYTEST_MARKERS := $(if $(TEST_STUB),,$(if $(strip $(PYTEST_MARKERS)),and ,)not stub) +# Do not run "push reliability" unless "TEST_RELIABLITY" specified (This is a feature in progress) +PYTEST_MARKERS := $(if $(TEST_RELIABILITY),,$(if $(strip $(PYTEST_MARKERS)),and ,)not reliable_report) +# Compile the pytest arguments if any have been specified. +PYTEST_ARGS := $(if $(strip $(PYTEST_MARKERS)), -m "$(strip $(PYTEST_MARKERS))",) INTEGRATION_TEST_FILE := $(TESTS_DIR)/integration/test_integration_all_rust.py NOTIFICATION_TEST_DIR := $(TESTS_DIR)/notification LOAD_TEST_DIR := $(TESTS_DIR)/load diff --git a/docs/src/testing.md b/docs/src/testing.md index df36980d3..048aaa6d0 100644 --- a/docs/src/testing.md +++ b/docs/src/testing.md @@ -87,11 +87,13 @@ The test output is then emitted in your terminal instance. This includes the nam The integration tests make use of [pytest markers][pytest_markers] for filtering tests. These can be used with the `-m` pytest option, or can be used through the following environment variables and `integration-test` make command. +Please note, when specifying multiple markers, you need to combine them using `and`. For example, to exclude both _sentry_ and _stub_ you would specify the marker as `-m "not sentry and not stub"`. As of pytest version 8.3.3, the `-m` CLI arguments do not stack. -| ENVIRONMENT VARIABLE | RELATED MARKER | DESCRIPTION | -|----------------------|----------------|-------------------------------------------------------------------| -| SKIP_SENTRY | sentry | If set will exclude all tests marked with `sentry` from execution | -| TEST_STUB | stub | If set will include all tests marked with `stub` in execution | +| ENVIRONMENT VARIABLE | RELATED MARKER | DESCRIPTION | +|----------------------|-----------------|--------------------------------------------------------------------------| +| SKIP_SENTRY | sentry | If set will exclude all tests marked with `sentry` from execution | +| TEST_STUB | stub | If set will include all tests marked with `stub` in execution | +| TEST_RELIABILITY | reliable_report | If set will include all tests marked with `reliable_report` in execution | Integration tests in CI will be triggered automatically whenever a commit is pushed to a branch as a part of the CI PR workflow. diff --git a/tests/pyproject.toml b/tests/pyproject.toml index 7db34a580..430b5cd5e 100644 --- a/tests/pyproject.toml +++ b/tests/pyproject.toml @@ -40,7 +40,7 @@ add-select = ["D212"] # D203 as it conflicts with D211 https://github.com/PyCQA/pydocstyle/issues/141 # D205 1 blank line required between summary line and description, awkward spacing # D400 First line should end with a period, doesn't work when sentence spans 2 lines -add-ignore = ["D105","D107","D203", "D205", "D400"] +add-ignore = ["D105", "D107", "D203", "D205", "D400"] [tool.poetry] name = "tests" @@ -75,7 +75,7 @@ pytest-order = "^1.3.0" python-jose = "^3.3.0" httpx = "^0.27.2" fastapi = "^0.111.1" -uvicorn = {extras = ["standard"], version="^0.29.0"} +uvicorn = { extras = ["standard"], version = "^0.29.0" } [tool.poetry.group.load.dependencies] locust = "^2.32.1" @@ -99,5 +99,6 @@ asyncio_mode = "auto" # Pytest marker documentation: https://docs.pytest.org/en/7.1.x/example/markers.html markers = [ "stub: mark a test for the stub system", - "sentry: mark a test for the sentry integration" + "sentry: mark a test for the sentry integration", + "reliable_report: enable tests for reliability reporting", ] From b86ef6b0eed77f770c8e12dabc83ba8769063b34 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Fri, 6 Dec 2024 14:48:35 -0800 Subject: [PATCH 19/25] f autocomplete --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index ea2b0f6d5..e644c1d6b 100644 --- a/Makefile +++ b/Makefile @@ -12,9 +12,9 @@ PYTEST_AND :=" and " # Run sentry UNLESS "SKIP_SENTRY" is specified. PYTEST_MARKERS := $(if $(SKIP_SENTRY),not sentry,) # Do not run "stub" unless "TEST_STUB" specified (Stub does not work in CI currently) -PYTEST_MARKERS := $(if $(TEST_STUB),,$(if $(strip $(PYTEST_MARKERS)),and ,)not stub) +PYTEST_MARKERS += $(if $(TEST_STUB),,$(if $(strip $(PYTEST_MARKERS)),and ,)not stub) # Do not run "push reliability" unless "TEST_RELIABLITY" specified (This is a feature in progress) -PYTEST_MARKERS := $(if $(TEST_RELIABILITY),,$(if $(strip $(PYTEST_MARKERS)),and ,)not reliable_report) +PYTEST_MARKERS += $(if $(TEST_RELIABILITY),,$(if $(strip $(PYTEST_MARKERS)),and ,)not reliable_report) # Compile the pytest arguments if any have been specified. PYTEST_ARGS := $(if $(strip $(PYTEST_MARKERS)), -m "$(strip $(PYTEST_MARKERS))",) INTEGRATION_TEST_FILE := $(TESTS_DIR)/integration/test_integration_all_rust.py From 1f15f74aa5f329ca422a8792980ca8048dc0ea1e Mon Sep 17 00:00:00 2001 From: jrconlin Date: Mon, 16 Dec 2024 16:09:15 -0800 Subject: [PATCH 20/25] f r's --- Makefile | 5 ----- 1 file changed, 5 deletions(-) diff --git a/Makefile b/Makefile index e644c1d6b..c26c6bec4 100644 --- a/Makefile +++ b/Makefile @@ -4,11 +4,6 @@ TESTS_DIR := tests TEST_RESULTS_DIR ?= workspace/test-results # Markers are "clever", and work by including multiple markers with " and ". # This uses makefile magic to construct the set of items. -# technically `$(eval )` would work here, but I've not had it work consistently. -# The following is taken from [the makefile manual](https://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions) -NOOP := -SPACE :=$(NOOP) $(NOOP) -PYTEST_AND :=" and " # Run sentry UNLESS "SKIP_SENTRY" is specified. PYTEST_MARKERS := $(if $(SKIP_SENTRY),not sentry,) # Do not run "stub" unless "TEST_STUB" specified (Stub does not work in CI currently) From 1bf49d236bd15815df1cf064a1ccf2cfa2d5a1dc Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 18 Dec 2024 13:02:18 -0800 Subject: [PATCH 21/25] f fix up pymarks --- Makefile | 11 +++++++++-- tests/integration/Dockerfile | 24 ++++++++++++------------ 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/Makefile b/Makefile index 0f886d35c..c1749bc92 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,14 @@ SHELL := /bin/sh CARGO = cargo TESTS_DIR := tests TEST_RESULTS_DIR ?= workspace/test-results -PYTEST_ARGS ?= $(if $(SKIP_SENTRY),-m "not sentry") $(if $(TEST_STUB),,-m "not stub") # Stub tests do not work in CI +# Run sentry UNLESS "SKIP_SENTRY" is specified. +PYTEST_MARKERS := $(if $(SKIP_SENTRY),not sentry,) +# Do not run "stub" unless "TEST_STUB" specified (Stub does not work in CI currently) +PYTEST_MARKERS += $(if $(TEST_STUB),,$(if $(strip $(PYTEST_MARKERS)),and ,)not stub) +# Do not run "push reliability" unless "TEST_RELIABILITY" specified (This is a feature in progress) +PYTEST_MARKERS += $(if $(TEST_RELIABILITY),,$(if $(strip $(PYTEST_MARKERS)),and ,)not reliable_report) +# Compile the pytest arguments if any have been specified. +PYTEST_ARGS := $(if $(strip $(PYTEST_MARKERS)), -m "$(strip $(PYTEST_MARKERS))",) INTEGRATION_TEST_DIR := $(TESTS_DIR)/integration INTEGRATION_TEST_FILE := $(INTEGRATION_TEST_DIR)/test_integration_all_rust.py NOTIFICATION_TEST_DIR := $(TESTS_DIR)/notification @@ -31,7 +38,7 @@ upgrade: integration-test: $(DOCKER_COMPOSE) -f $(INTEGRATION_TEST_DIR)/docker-compose.yml build - $(DOCKER_COMPOSE) -f $(INTEGRATION_TEST_DIR)/docker-compose.yml run -it --name integration-tests tests + PYTEST_ARGS='$(PYTEST_ARGS)' $(DOCKER_COMPOSE) -f $(INTEGRATION_TEST_DIR)/docker-compose.yml run -it --name integration-tests tests docker cp integration-tests:/code/integration_test_results.xml $(INTEGRATION_TEST_DIR) integration-test-clean: diff --git a/tests/integration/Dockerfile b/tests/integration/Dockerfile index ead3c23ae..fe5b54d7e 100644 --- a/tests/integration/Dockerfile +++ b/tests/integration/Dockerfile @@ -11,23 +11,23 @@ ENV PYTHONUNBUFFERED=1 ENV PATH=$PATH:/root/.cargo/bin ENV PYTHON_VENV=/venv -ENV PYTEST_ARGS="" +ENV PYTEST_ARGS=${PYTEST_ARGS} ENV RUST_LOG="autopush=debug,autopush_common=debug,autoendpoint=debug,autoconnect=debug,slog_mozlog_json=info,warn" ENV DB_DSN=grpc://localhost:8086 # Add gcc since there are no wheels for some packages for arm64/aarch64 # (g++/make for gevent on pypy) RUN apt-get update && apt install -y --no-install-recommends \ - git \ - gpg \ - build-essential \ - python3-dev \ - curl \ - libstdc++6 \ - libstdc++-12-dev \ - libssl-dev \ - pkg-config \ - cmake + git \ + gpg \ + build-essential \ + python3-dev \ + curl \ + libstdc++6 \ + libstdc++-12-dev \ + libssl-dev \ + pkg-config \ + cmake RUN python -m venv ${PYTHON_VENV} ENV PATH="${PYTHON_VENV}/bin:${PATH}" @@ -59,4 +59,4 @@ RUN cargo build --features=emulator RUN chmod +x scripts/setup_bt.sh -CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --junit-xml=integration_test_results.xml -v -m 'not stub' ${PYTEST_ARGS}"] +CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --junit-xml=integration_test_results.xml -v ${PYTEST_ARGS}"] From 9c7b13cda3ed7adb69b4bcca2e6609bf40fa9c62 Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 18 Dec 2024 14:56:57 -0800 Subject: [PATCH 22/25] f switch to pytest.ini --- Makefile | 21 +++++++++------------ docs/src/testing.md | 7 ++++--- tests/integration/Dockerfile | 4 ++-- 3 files changed, 15 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index c1749bc92..c62b00374 100644 --- a/Makefile +++ b/Makefile @@ -2,14 +2,11 @@ SHELL := /bin/sh CARGO = cargo TESTS_DIR := tests TEST_RESULTS_DIR ?= workspace/test-results -# Run sentry UNLESS "SKIP_SENTRY" is specified. -PYTEST_MARKERS := $(if $(SKIP_SENTRY),not sentry,) -# Do not run "stub" unless "TEST_STUB" specified (Stub does not work in CI currently) -PYTEST_MARKERS += $(if $(TEST_STUB),,$(if $(strip $(PYTEST_MARKERS)),and ,)not stub) -# Do not run "push reliability" unless "TEST_RELIABILITY" specified (This is a feature in progress) -PYTEST_MARKERS += $(if $(TEST_RELIABILITY),,$(if $(strip $(PYTEST_MARKERS)),and ,)not reliable_report) -# Compile the pytest arguments if any have been specified. -PYTEST_ARGS := $(if $(strip $(PYTEST_MARKERS)), -m "$(strip $(PYTEST_MARKERS))",) +# NOTE: Do not be clever. +# The integration tests (and a few others) use pytest markers to control +# the tests that are being run. These markers are set and defined within +# the `.tests/pytest.ini`. That is the single source of truth. +PYTEST_ARGS := ${PYTEST_ARGS} INTEGRATION_TEST_DIR := $(TESTS_DIR)/integration INTEGRATION_TEST_FILE := $(INTEGRATION_TEST_DIR)/test_integration_all_rust.py NOTIFICATION_TEST_DIR := $(TESTS_DIR)/notification @@ -36,23 +33,23 @@ upgrade: $(CARGO) upgrade $(CARGO) update -integration-test: +integration-test: ## pytest markers are stored in `tests/pytest.ini` $(DOCKER_COMPOSE) -f $(INTEGRATION_TEST_DIR)/docker-compose.yml build - PYTEST_ARGS='$(PYTEST_ARGS)' $(DOCKER_COMPOSE) -f $(INTEGRATION_TEST_DIR)/docker-compose.yml run -it --name integration-tests tests + $(DOCKER_COMPOSE) -f $(INTEGRATION_TEST_DIR)/docker-compose.yml run -it --name integration-tests tests docker cp integration-tests:/code/integration_test_results.xml $(INTEGRATION_TEST_DIR) integration-test-clean: $(DOCKER_COMPOSE) -f $(INTEGRATION_TEST_DIR)/docker-compose.yml down docker rm integration-tests -integration-test-legacy: +integration-test-legacy: ## pytest markers are stored in `tests/pytest.ini` $(POETRY) -V $(POETRY) install --without dev,load,notification --no-root $(POETRY) run pytest $(INTEGRATION_TEST_FILE) \ --junit-xml=$(TEST_RESULTS_DIR)/integration_test_legacy_results.xml \ -v $(PYTEST_ARGS) -integration-test-local: +integration-test-local: ## pytest markers are stored in `tests/pytest.ini` $(POETRY) -V $(POETRY) install --without dev,load,notification --no-root $(POETRY) run pytest $(INTEGRATION_TEST_FILE) \ diff --git a/docs/src/testing.md b/docs/src/testing.md index 9d1deb0f3..10d182cf8 100644 --- a/docs/src/testing.md +++ b/docs/src/testing.md @@ -85,13 +85,14 @@ $ pyenv activate push-312 ### Running Integration Tests To run the integration tests, simply run `make integration-tests-local` from your terminal at the root of the project. -You can alter the verbosity and logging output by adding command line flags to the `PYTEST_ARGS ?=` variable in the root project Makefile. For example, for greater verbosity and stdout printing, add `-vv -s`. +You can alter the verbosity and logging output by adding command line flags to the `PYTEST_ARGS ?=` variable in the root project Makefile. For example, for greater verbosity and stdout printing, add `-vv -s`. (Note: This may be unreliable due to several hand-offs during the +make / docker build process. For reliability, you may which to modify the `tests/pytest.ini` file to include the options.) The test output is then emitted in your terminal instance. This includes the name of the tests, whether they pass or fail and any exceptions that are triggered during the test run. The integration tests make use of [pytest markers][pytest_markers] for filtering tests. These can be -used with the `-m` pytest option, or can be used through the following environment variables and -`integration-test-local` make command. +used with the `-m` pytest option specified in the `tests/pytest.ini` file , or can be used through the +following environment variables and `integration-test-local` make command. | ENVIRONMENT VARIABLE | RELATED MARKER | DESCRIPTION | |----------------------|-----------------|--------------------------------------------------------------------------| diff --git a/tests/integration/Dockerfile b/tests/integration/Dockerfile index fe5b54d7e..97d7457d0 100644 --- a/tests/integration/Dockerfile +++ b/tests/integration/Dockerfile @@ -11,7 +11,6 @@ ENV PYTHONUNBUFFERED=1 ENV PATH=$PATH:/root/.cargo/bin ENV PYTHON_VENV=/venv -ENV PYTEST_ARGS=${PYTEST_ARGS} ENV RUST_LOG="autopush=debug,autopush_common=debug,autoendpoint=debug,autoconnect=debug,slog_mozlog_json=info,warn" ENV DB_DSN=grpc://localhost:8086 @@ -59,4 +58,5 @@ RUN cargo build --features=emulator RUN chmod +x scripts/setup_bt.sh -CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --junit-xml=integration_test_results.xml -v ${PYTEST_ARGS}"] +# Pytest markers are defined in the `pytest.ini` file. +CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --junit-xml=integration_test_results.xml -v"] From 2243e8af11034a2d90b6894b0316a881640dd90d Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 18 Dec 2024 15:52:51 -0800 Subject: [PATCH 23/25] f force the config file --- tests/integration/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/Dockerfile b/tests/integration/Dockerfile index 97d7457d0..e45f82464 100644 --- a/tests/integration/Dockerfile +++ b/tests/integration/Dockerfile @@ -58,5 +58,5 @@ RUN cargo build --features=emulator RUN chmod +x scripts/setup_bt.sh -# Pytest markers are defined in the `pytest.ini` file. -CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --junit-xml=integration_test_results.xml -v"] +# Pytest markers are defined in the `pytest.ini` file. +CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --config-file=tests/pytest.ini --junit-xml=integration_test_results.xml -v"] From 15c5e46723ea64222ba94d1f7fab0d4f1b03301e Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 18 Dec 2024 16:41:44 -0800 Subject: [PATCH 24/25] f move pytest to integration root --- tests/integration/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/Dockerfile b/tests/integration/Dockerfile index e45f82464..785ad9636 100644 --- a/tests/integration/Dockerfile +++ b/tests/integration/Dockerfile @@ -41,7 +41,7 @@ RUN rustc --version ENV POETRY_VIRTUALENVS_CREATE=false \ POETRY_VERSION=1.7.0 RUN python -m pip install --no-cache-dir --quiet poetry -COPY ./tests/pyproject.toml ./tests/poetry.lock ./ +COPY ./tests/pyproject.toml ./tests/poetry.lock ./tests/pytest.ini ./ RUN poetry install --only=integration --no-interaction --no-ansi # Setup cloud big table @@ -59,4 +59,4 @@ RUN cargo build --features=emulator RUN chmod +x scripts/setup_bt.sh # Pytest markers are defined in the `pytest.ini` file. -CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --config-file=tests/pytest.ini --junit-xml=integration_test_results.xml -v"] +CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --config-file=pytest.ini --junit-xml=integration_test_results.xml -v"] From e7939f61c682ed093880fb882708a052cb4e77bb Mon Sep 17 00:00:00 2001 From: jrconlin Date: Wed, 18 Dec 2024 17:18:23 -0800 Subject: [PATCH 25/25] f use the one in /code? --- tests/integration/Dockerfile | 4 ++-- tests/notification/Dockerfile | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/integration/Dockerfile b/tests/integration/Dockerfile index 785ad9636..427f3e8ea 100644 --- a/tests/integration/Dockerfile +++ b/tests/integration/Dockerfile @@ -41,7 +41,7 @@ RUN rustc --version ENV POETRY_VIRTUALENVS_CREATE=false \ POETRY_VERSION=1.7.0 RUN python -m pip install --no-cache-dir --quiet poetry -COPY ./tests/pyproject.toml ./tests/poetry.lock ./tests/pytest.ini ./ +COPY ./tests/pyproject.toml ./tests/poetry.lock ./ RUN poetry install --only=integration --no-interaction --no-ansi # Setup cloud big table @@ -59,4 +59,4 @@ RUN cargo build --features=emulator RUN chmod +x scripts/setup_bt.sh # Pytest markers are defined in the `pytest.ini` file. -CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --config-file=pytest.ini --junit-xml=integration_test_results.xml -v"] +CMD ["sh", "-c", "./scripts/setup_bt.sh && poetry run pytest tests/integration/test_integration_all_rust.py --config-file=/code/pytest.ini --junit-xml=integration_test_results.xml -v"] diff --git a/tests/notification/Dockerfile b/tests/notification/Dockerfile index cebc46c37..dd91d95b3 100644 --- a/tests/notification/Dockerfile +++ b/tests/notification/Dockerfile @@ -59,6 +59,7 @@ WORKDIR /code ADD notification/ /code ADD ../poetry.lock /code ADD ../pyproject.toml /code +ADD ../pytest.ini /code RUN poetry install --only=notification