From f7cc854f6ed62fdd072a8cb0e4c07c61aa168142 Mon Sep 17 00:00:00 2001 From: Yuki Kishimoto Date: Tue, 19 Nov 2024 16:34:08 +0100 Subject: [PATCH 1/2] database: add `NostrEventsDatabase` trait Signed-off-by: Yuki Kishimoto --- CHANGELOG.md | 1 + bindings/nostr-sdk-ffi/src/client/builder.rs | 10 +- bindings/nostr-sdk-ffi/src/database/events.rs | 10 +- bindings/nostr-sdk-ffi/src/database/mod.rs | 29 ++- bindings/nostr-sdk-ffi/src/error.rs | 4 +- bindings/nostr-sdk-ffi/src/pool/mod.rs | 7 +- bindings/nostr-sdk-ffi/src/profile.rs | 10 +- bindings/nostr-sdk-ffi/src/relay/mod.rs | 5 +- bindings/nostr-sdk-js/src/client/builder.rs | 4 +- bindings/nostr-sdk-js/src/database/mod.rs | 30 +-- bindings/nostr-sdk-js/src/pool/mod.rs | 4 +- bindings/nostr-sdk-js/src/profile.rs | 2 +- crates/nostr-database/examples/helper.rs | 71 ------ crates/nostr-database/examples/memory.rs | 78 ------ .../src/{ => collections}/events.rs | 2 +- .../src/{util.rs => collections/mod.rs} | 3 + .../src/{ => collections}/tree.rs | 0 .../nostr-database/src/{ => events}/helper.rs | 6 +- crates/nostr-database/src/events/mod.rs | 241 ++++++++++++++++++ crates/nostr-database/src/lib.rs | 229 +---------------- crates/nostr-database/src/memory.rs | 31 ++- crates/nostr-database/src/profile.rs | 2 +- crates/nostr-indexeddb/src/lib.rs | 39 ++- crates/nostr-lmdb/src/lib.rs | 13 +- crates/nostr-ndb/src/lib.rs | 11 +- crates/nostr-relay-builder/src/builder.rs | 6 +- .../nostr-relay-builder/src/local/internal.rs | 3 +- crates/nostr-relay-pool/src/pool/inner.rs | 5 +- crates/nostr-relay-pool/src/pool/mod.rs | 5 +- crates/nostr-relay-pool/src/relay/inner.rs | 4 +- crates/nostr-relay-pool/src/relay/mod.rs | 8 +- crates/nostr-sdk/src/client/builder.rs | 4 +- crates/nostr-sdk/src/client/mod.rs | 4 +- crates/nostr-sdk/src/lib.rs | 2 - crates/nostr-sdk/src/prelude.rs | 2 +- 35 files changed, 407 insertions(+), 478 deletions(-) delete mode 100644 crates/nostr-database/examples/helper.rs delete mode 100644 crates/nostr-database/examples/memory.rs rename crates/nostr-database/src/{ => collections}/events.rs (98%) rename crates/nostr-database/src/{util.rs => collections/mod.rs} (94%) rename crates/nostr-database/src/{ => collections}/tree.rs (100%) rename crates/nostr-database/src/{ => events}/helper.rs (99%) create mode 100644 crates/nostr-database/src/events/mod.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index c578e9d4f..275573288 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -84,6 +84,7 @@ * nostr: add `NostrSigner::backend` ([Yuki Kishimoto]) * nostr: add `EventBuilder::private_msg` ([Yuki Kishimoto]) * nostr: add `EventBuilder::tag` and `EventBuilder::tags` ([Yuki Kishimoto]) +* database: add `NostrEventsDatabase` trait ([Yuki Kishimoto]) * pool: add relay reconnection and disconnection unit tests ([Yuki Kishimoto]) * sdk: allow to specify relay pool notification channel size in `Options` ([Yuki Kishimoto]) * relay-builder: add `RelayTestOptions` ([Yuki Kishimoto]) diff --git a/bindings/nostr-sdk-ffi/src/client/builder.rs b/bindings/nostr-sdk-ffi/src/client/builder.rs index e60caf22c..450a8199a 100644 --- a/bindings/nostr-sdk-ffi/src/client/builder.rs +++ b/bindings/nostr-sdk-ffi/src/client/builder.rs @@ -5,8 +5,6 @@ use std::ops::Deref; use std::sync::Arc; -use nostr_sdk::database::DynNostrDatabase; -use nostr_sdk::zapper::DynNostrZapper; use uniffi::Object; use super::zapper::NostrZapper; @@ -42,16 +40,14 @@ impl ClientBuilder { } pub fn zapper(self: Arc, zapper: &NostrZapper) -> Self { - let zapper: Arc = zapper.deref().clone(); let mut builder = unwrap_or_clone_arc(self); - builder.inner = builder.inner.zapper(zapper); + builder.inner = builder.inner.zapper(zapper.deref().clone()); builder } - pub fn database(self: Arc, database: Arc) -> Self { - let database: Arc = database.as_ref().into(); + pub fn database(self: Arc, database: &NostrDatabase) -> Self { let mut builder = unwrap_or_clone_arc(self); - builder.inner = builder.inner.database(database); + builder.inner = builder.inner.database(database.deref().clone()); builder } diff --git a/bindings/nostr-sdk-ffi/src/database/events.rs b/bindings/nostr-sdk-ffi/src/database/events.rs index e258a3dd5..b36f0bdf3 100644 --- a/bindings/nostr-sdk-ffi/src/database/events.rs +++ b/bindings/nostr-sdk-ffi/src/database/events.rs @@ -5,24 +5,24 @@ use std::ops::Deref; use std::sync::Arc; -use nostr_sdk::database; +use nostr_sdk::prelude; use uniffi::Object; use crate::protocol::Event; #[derive(Clone, Object)] pub struct Events { - inner: database::Events, + inner: prelude::Events, } -impl From for Events { - fn from(inner: database::Events) -> Self { +impl From for Events { + fn from(inner: prelude::Events) -> Self { Self { inner } } } impl Deref for Events { - type Target = database::Events; + type Target = prelude::Events; fn deref(&self) -> &Self::Target { &self.inner diff --git a/bindings/nostr-sdk-ffi/src/database/mod.rs b/bindings/nostr-sdk-ffi/src/database/mod.rs index f65d5d32d..7a24eb0b1 100644 --- a/bindings/nostr-sdk-ffi/src/database/mod.rs +++ b/bindings/nostr-sdk-ffi/src/database/mod.rs @@ -5,7 +5,7 @@ use std::ops::Deref; use std::sync::Arc; -use nostr_sdk::database::{DynNostrDatabase, IntoNostrDatabase, NostrDatabaseExt}; +use nostr_sdk::prelude::{self, IntoNostrDatabase, NostrEventsDatabaseExt}; #[cfg(feature = "ndb")] use nostr_sdk::NdbDatabase; #[cfg(feature = "lmdb")] @@ -16,23 +16,24 @@ pub mod events; use self::events::Events; use crate::error::Result; -use crate::profile::Profile; -use crate::protocol::{Event, EventId, Filter, PublicKey}; +use crate::protocol::{Event, EventId, Filter, Metadata, PublicKey}; #[derive(Object)] pub struct NostrDatabase { - inner: Arc, + inner: Arc, } -impl From> for NostrDatabase { - fn from(inner: Arc) -> Self { - Self { inner } +impl Deref for NostrDatabase { + type Target = Arc; + + fn deref(&self) -> &Self::Target { + &self.inner } } -impl From<&NostrDatabase> for Arc { - fn from(db: &NostrDatabase) -> Self { - db.inner.clone() +impl From> for NostrDatabase { + fn from(inner: Arc) -> Self { + Self { inner } } } @@ -112,7 +113,11 @@ impl NostrDatabase { Ok(self.inner.wipe().await?) } - pub async fn profile(&self, public_key: &PublicKey) -> Result> { - Ok(Arc::new(self.inner.profile(**public_key).await?.into())) + pub async fn metadata(&self, public_key: &PublicKey) -> Result>> { + Ok(self + .inner + .metadata(**public_key) + .await? + .map(|m| Arc::new(m.into()))) } } diff --git a/bindings/nostr-sdk-ffi/src/error.rs b/bindings/nostr-sdk-ffi/src/error.rs index 086901cf2..0145069c3 100644 --- a/bindings/nostr-sdk-ffi/src/error.rs +++ b/bindings/nostr-sdk-ffi/src/error.rs @@ -249,8 +249,8 @@ impl From for NostrSdkError { } } -impl From for NostrSdkError { - fn from(e: nostr_sdk::database::DatabaseError) -> NostrSdkError { +impl From for NostrSdkError { + fn from(e: nostr_sdk::prelude::DatabaseError) -> NostrSdkError { Self::Generic(e.to_string()) } } diff --git a/bindings/nostr-sdk-ffi/src/pool/mod.rs b/bindings/nostr-sdk-ffi/src/pool/mod.rs index 5cec875f5..fdb08d70f 100644 --- a/bindings/nostr-sdk-ffi/src/pool/mod.rs +++ b/bindings/nostr-sdk-ffi/src/pool/mod.rs @@ -7,7 +7,6 @@ use std::ops::Deref; use std::sync::Arc; use std::time::Duration; -use nostr_sdk::database::DynNostrDatabase; use nostr_sdk::{RelayPoolOptions, SubscriptionId}; use uniffi::Object; @@ -46,9 +45,11 @@ impl RelayPool { /// Create new `RelayPool` with `custom` database #[uniffi::constructor] pub fn with_database(database: &NostrDatabase) -> Self { - let database: Arc = database.into(); Self { - inner: nostr_sdk::RelayPool::with_database(RelayPoolOptions::default(), database), + inner: nostr_sdk::RelayPool::with_database( + RelayPoolOptions::default(), + database.deref().clone(), + ), } } diff --git a/bindings/nostr-sdk-ffi/src/profile.rs b/bindings/nostr-sdk-ffi/src/profile.rs index bea4eeb74..b0158b631 100644 --- a/bindings/nostr-sdk-ffi/src/profile.rs +++ b/bindings/nostr-sdk-ffi/src/profile.rs @@ -5,7 +5,7 @@ use std::ops::Deref; use std::sync::Arc; -use nostr_sdk::database; +use nostr_sdk::prelude; use uniffi::Object; use crate::protocol::{Metadata, PublicKey}; @@ -13,11 +13,11 @@ use crate::protocol::{Metadata, PublicKey}; #[derive(Debug, PartialEq, Eq, Hash, Object)] #[uniffi::export(Debug, Eq, Hash)] pub struct Profile { - inner: database::Profile, + inner: prelude::Profile, } -impl From for Profile { - fn from(inner: database::Profile) -> Self { +impl From for Profile { + fn from(inner: prelude::Profile) -> Self { Self { inner } } } @@ -28,7 +28,7 @@ impl Profile { #[uniffi::constructor] pub fn new(public_key: &PublicKey, metadata: Arc) -> Self { Self { - inner: database::Profile::new(**public_key, metadata.as_ref().deref().clone()), + inner: prelude::Profile::new(**public_key, metadata.as_ref().deref().clone()), } } diff --git a/bindings/nostr-sdk-ffi/src/relay/mod.rs b/bindings/nostr-sdk-ffi/src/relay/mod.rs index cfb7ac69e..193f3273c 100644 --- a/bindings/nostr-sdk-ffi/src/relay/mod.rs +++ b/bindings/nostr-sdk-ffi/src/relay/mod.rs @@ -7,7 +7,6 @@ use std::ops::Deref; use std::sync::Arc; use std::time::Duration; -use nostr_sdk::database::DynNostrDatabase; use nostr_sdk::{pool, FilterOptions, SubscriptionId, Url}; use uniffi::{Object, Record}; @@ -124,10 +123,8 @@ impl Relay { #[uniffi::constructor] pub fn custom(url: String, database: &NostrDatabase, opts: &RelayOptions) -> Result { let url: Url = Url::parse(&url)?; - let database: Arc = database.into(); - let opts = opts.deref().clone(); Ok(Self { - inner: nostr_sdk::Relay::custom(url, database, opts), + inner: nostr_sdk::Relay::custom(url, database.deref().clone(), opts.deref().clone()), }) } diff --git a/bindings/nostr-sdk-js/src/client/builder.rs b/bindings/nostr-sdk-js/src/client/builder.rs index 142187be0..4dbaf7203 100644 --- a/bindings/nostr-sdk-js/src/client/builder.rs +++ b/bindings/nostr-sdk-js/src/client/builder.rs @@ -3,7 +3,6 @@ // Distributed under the MIT software license use std::ops::Deref; -use std::sync::Arc; use nostr_sdk::prelude::*; use wasm_bindgen::prelude::*; @@ -43,8 +42,7 @@ impl JsClientBuilder { } pub fn database(self, database: &JsNostrDatabase) -> Self { - let database: Arc = database.into(); - self.inner.database(database).into() + self.inner.database(database.deref().clone()).into() } pub fn opts(self, opts: &JsOptions) -> Self { diff --git a/bindings/nostr-sdk-js/src/database/mod.rs b/bindings/nostr-sdk-js/src/database/mod.rs index d71acbb94..cf43f9c71 100644 --- a/bindings/nostr-sdk-js/src/database/mod.rs +++ b/bindings/nostr-sdk-js/src/database/mod.rs @@ -6,35 +6,35 @@ use std::ops::Deref; use std::sync::Arc; use js_sys::Array; -use nostr_sdk::database::{DynNostrDatabase, IntoNostrDatabase, NostrDatabaseExt}; -use nostr_sdk::WebDatabase; +use nostr_sdk::prelude::*; use wasm_bindgen::prelude::*; pub mod events; pub use self::events::JsEvents; use crate::error::{into_err, Result}; -use crate::profile::JsProfile; use crate::protocol::event::{JsEvent, JsEventId}; use crate::protocol::key::JsPublicKey; -use crate::protocol::types::JsFilter; +use crate::protocol::types::{JsFilter, JsMetadata}; use crate::JsStringArray; /// Nostr Database #[wasm_bindgen(js_name = NostrDatabase)] pub struct JsNostrDatabase { - inner: Arc, + inner: Arc, } -impl From> for JsNostrDatabase { - fn from(inner: Arc) -> Self { - Self { inner } +impl Deref for JsNostrDatabase { + type Target = Arc; + + fn deref(&self) -> &Self::Target { + &self.inner } } -impl From<&JsNostrDatabase> for Arc { - fn from(db: &JsNostrDatabase) -> Self { - db.inner.clone() +impl From> for JsNostrDatabase { + fn from(inner: Arc) -> Self { + Self { inner } } } @@ -42,7 +42,7 @@ impl From<&JsNostrDatabase> for Arc { impl JsNostrDatabase { /// Open/Create database with **unlimited** capacity pub async fn indexeddb(name: &str) -> Result { - let db = Arc::new(WebDatabase::open(name).await.map_err(into_err)?); + let db = WebDatabase::open(name).await.map_err(into_err)?; Ok(Self { inner: db.into_nostr_database(), }) @@ -114,12 +114,12 @@ impl JsNostrDatabase { self.inner.wipe().await.map_err(into_err) } - pub async fn profile(&self, public_key: &JsPublicKey) -> Result { + pub async fn metadata(&self, public_key: &JsPublicKey) -> Result> { Ok(self .inner - .profile(**public_key) + .metadata(**public_key) .await .map_err(into_err)? - .into()) + .map(|m| m.into())) } } diff --git a/bindings/nostr-sdk-js/src/pool/mod.rs b/bindings/nostr-sdk-js/src/pool/mod.rs index 6e90a93f6..c4f692061 100644 --- a/bindings/nostr-sdk-js/src/pool/mod.rs +++ b/bindings/nostr-sdk-js/src/pool/mod.rs @@ -3,7 +3,6 @@ // Distributed under the MIT software license use std::ops::Deref; -use std::sync::Arc; use js_sys::Array; use nostr_sdk::prelude::*; @@ -46,9 +45,8 @@ impl JsRelayPool { /// Create new `RelayPool` with `custom` database #[wasm_bindgen(js_name = withDatabase)] pub fn with_database(database: &JsNostrDatabase) -> Self { - let database: Arc = database.into(); Self { - inner: RelayPool::with_database(RelayPoolOptions::default(), database), + inner: RelayPool::with_database(RelayPoolOptions::default(), database.deref().clone()), } } diff --git a/bindings/nostr-sdk-js/src/profile.rs b/bindings/nostr-sdk-js/src/profile.rs index 0ba2b97c5..52bd22f28 100644 --- a/bindings/nostr-sdk-js/src/profile.rs +++ b/bindings/nostr-sdk-js/src/profile.rs @@ -4,7 +4,7 @@ use std::ops::Deref; -use nostr_sdk::database::Profile; +use nostr_sdk::prelude::*; use wasm_bindgen::prelude::*; use crate::protocol::key::JsPublicKey; diff --git a/crates/nostr-database/examples/helper.rs b/crates/nostr-database/examples/helper.rs deleted file mode 100644 index ff9d23f3b..000000000 --- a/crates/nostr-database/examples/helper.rs +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright (c) 2022-2023 Yuki Kishimoto -// Copyright (c) 2023-2024 Rust Nostr Developers -// Distributed under the MIT software license - -use std::time::Duration; - -use nostr::prelude::*; -use nostr_database::DatabaseHelper; -use tracing_subscriber::fmt::format::FmtSpan; - -#[tokio::main] -async fn main() { - tracing_subscriber::fmt::fmt() - .with_span_events(FmtSpan::CLOSE) - .init(); - - let secret_key = - SecretKey::from_bech32("nsec1j4c6269y9w0q2er2xjw8sv2ehyrtfxq3jwgdlxj6qfn8z4gjsq5qfvfk99") - .unwrap(); - let keys_a = Keys::new(secret_key); - - let secret_key = - SecretKey::from_bech32("nsec1ufnus6pju578ste3v90xd5m2decpuzpql2295m3sknqcjzyys9ls0qlc85") - .unwrap(); - let keys_b = Keys::new(secret_key); - - let helper = DatabaseHelper::unbounded(); - - for i in 0..100_000 { - let event = EventBuilder::text_note(format!("Event #{i}")) - .sign_with_keys(&keys_a) - .unwrap(); - helper.index_event(&event).await; - - let event = EventBuilder::text_note(format!("Reply to event #{i}")) - .tags([Tag::event(event.id), Tag::public_key(event.pubkey)]) - .sign_with_keys(&keys_b) - .unwrap(); - helper.index_event(&event).await; - } - - for i in 0..1000 { - let metadata = Metadata::new().name(format!("Name #{i}")); - let event = EventBuilder::metadata(&metadata) - .sign_with_keys(&keys_a) - .unwrap(); - helper.index_event(&event).await; - } - - for i in 0..500_000 { - let event = EventBuilder::new(Kind::Custom(123), "Custom with d tag") - .tag(Tag::identifier(format!("myid{i}"))) - .sign_with_keys(&keys_a) - .unwrap(); - helper.index_event(&event).await; - } - - let events = helper - .query(vec![Filter::new() - .kinds(vec![Kind::Metadata, Kind::Custom(123), Kind::TextNote]) - .limit(20) - //.kind(Kind::Custom(123)) - //.identifier("myid5000") - .author(keys_a.public_key())]) - .await; - println!("Got {} events", events.len()); - - loop { - tokio::time::sleep(Duration::from_secs(60)).await; - } -} diff --git a/crates/nostr-database/examples/memory.rs b/crates/nostr-database/examples/memory.rs deleted file mode 100644 index bd2ba0d68..000000000 --- a/crates/nostr-database/examples/memory.rs +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) 2022-2023 Yuki Kishimoto -// Copyright (c) 2023-2024 Rust Nostr Developers -// Distributed under the MIT software license - -use std::time::Duration; - -use nostr::prelude::*; -use nostr::{EventBuilder, Filter, Keys, Kind, Metadata, Tag}; -use nostr_database::memory::MemoryDatabase; -use nostr_database::{MemoryDatabaseOptions, NostrDatabase}; -use tracing_subscriber::fmt::format::FmtSpan; - -#[tokio::main] -async fn main() { - tracing_subscriber::fmt::fmt() - .with_span_events(FmtSpan::CLOSE) - .init(); - - let secret_key = - SecretKey::from_bech32("nsec1j4c6269y9w0q2er2xjw8sv2ehyrtfxq3jwgdlxj6qfn8z4gjsq5qfvfk99") - .unwrap(); - let keys_a = Keys::new(secret_key); - - let secret_key = - SecretKey::from_bech32("nsec1ufnus6pju578ste3v90xd5m2decpuzpql2295m3sknqcjzyys9ls0qlc85") - .unwrap(); - let keys_b = Keys::new(secret_key); - - let opts = MemoryDatabaseOptions { - events: true, - ..Default::default() - }; - let database = MemoryDatabase::with_opts(opts); - - for i in 0..100_000 { - let event = EventBuilder::text_note(format!("Event #{i}")) - .sign_with_keys(&keys_a) - .unwrap(); - database.save_event(&event).await.unwrap(); - - let event = EventBuilder::text_note(format!("Reply to event #{i}")) - .tags([Tag::event(event.id), Tag::public_key(event.pubkey)]) - .sign_with_keys(&keys_b) - .unwrap(); - database.save_event(&event).await.unwrap(); - } - - for i in 0..10 { - let metadata = Metadata::new().name(format!("Name #{i}")); - let event = EventBuilder::metadata(&metadata) - .sign_with_keys(&keys_a) - .unwrap(); - database.save_event(&event).await.unwrap(); - } - - for i in 0..500_000 { - let event = EventBuilder::new(Kind::Custom(123), "Custom with d tag") - .tag(Tag::identifier(format!("myid{i}"))) - .sign_with_keys(&keys_a) - .unwrap(); - database.save_event(&event).await.unwrap(); - } - - let events = database - .query(vec![Filter::new() - .kinds(vec![Kind::Metadata, Kind::Custom(123), Kind::TextNote]) - .limit(20) - //.kind(Kind::Custom(123)) - //.identifier("myid5000") - .author(keys_a.public_key())]) - .await - .unwrap(); - println!("Got {} events", events.len()); - - loop { - tokio::time::sleep(Duration::from_secs(60)).await; - } -} diff --git a/crates/nostr-database/src/events.rs b/crates/nostr-database/src/collections/events.rs similarity index 98% rename from crates/nostr-database/src/events.rs rename to crates/nostr-database/src/collections/events.rs index f0cb120f5..bb6c796c0 100644 --- a/crates/nostr-database/src/events.rs +++ b/crates/nostr-database/src/collections/events.rs @@ -8,7 +8,7 @@ use std::hash::{Hash, Hasher}; use nostr::{Event, Filter}; -use crate::tree::{BTreeCappedSet, Capacity, OverCapacityPolicy}; +use super::tree::{BTreeCappedSet, Capacity, OverCapacityPolicy}; // Lookup ID: EVENT_ORD_IMPL const POLICY: OverCapacityPolicy = OverCapacityPolicy::Last; diff --git a/crates/nostr-database/src/util.rs b/crates/nostr-database/src/collections/mod.rs similarity index 94% rename from crates/nostr-database/src/util.rs rename to crates/nostr-database/src/collections/mod.rs index 5c2bc8708..3ef1c877d 100644 --- a/crates/nostr-database/src/util.rs +++ b/crates/nostr-database/src/collections/mod.rs @@ -7,6 +7,9 @@ use std::num::NonZeroUsize; use lru::LruCache; +pub mod events; +pub mod tree; + pub(crate) fn new_lru_cache(size: Option) -> LruCache where K: Hash + Eq, diff --git a/crates/nostr-database/src/tree.rs b/crates/nostr-database/src/collections/tree.rs similarity index 100% rename from crates/nostr-database/src/tree.rs rename to crates/nostr-database/src/collections/tree.rs diff --git a/crates/nostr-database/src/helper.rs b/crates/nostr-database/src/events/helper.rs similarity index 99% rename from crates/nostr-database/src/helper.rs rename to crates/nostr-database/src/events/helper.rs index 095712c0b..c69bc4822 100644 --- a/crates/nostr-database/src/helper.rs +++ b/crates/nostr-database/src/events/helper.rs @@ -2,7 +2,9 @@ // Copyright (c) 2023-2024 Rust Nostr Developers // Distributed under the MIT software license -//! Nostr Database Helper +//! Nostr Event Store Helper +//! +//! Used for the in-memory database. use std::collections::{BTreeSet, HashMap, HashSet}; use std::iter; @@ -13,7 +15,7 @@ use nostr::nips::nip01::Coordinate; use nostr::{Alphabet, Event, EventId, Filter, Kind, PublicKey, SingleLetterTag, Timestamp}; use tokio::sync::{OwnedRwLockReadGuard, RwLock}; -use crate::tree::{BTreeCappedSet, Capacity, InsertResult, OverCapacityPolicy}; +use crate::collections::tree::{BTreeCappedSet, Capacity, InsertResult, OverCapacityPolicy}; use crate::Events; type DatabaseEvent = Arc; diff --git a/crates/nostr-database/src/events/mod.rs b/crates/nostr-database/src/events/mod.rs new file mode 100644 index 000000000..58e15e96b --- /dev/null +++ b/crates/nostr-database/src/events/mod.rs @@ -0,0 +1,241 @@ +// Copyright (c) 2022-2023 Yuki Kishimoto +// Copyright (c) 2023-2024 Rust Nostr Developers +// Distributed under the MIT software license + +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::fmt; +use std::sync::Arc; + +use async_trait::async_trait; +use nostr::prelude::*; + +pub mod helper; + +use crate::{DatabaseError, Events, Profile}; + +/// Database event status +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum DatabaseEventStatus { + /// The event is saved into the database + Saved, + /// The event is marked as deleted + Deleted, + /// The event doesn't exist + NotExistent, +} + +#[doc(hidden)] +pub trait IntoNostrEventsDatabase { + fn into_database(self) -> Arc; +} + +impl IntoNostrEventsDatabase for Arc { + fn into_database(self) -> Arc { + self + } +} + +impl IntoNostrEventsDatabase for T +where + T: NostrEventsDatabase + Sized + 'static, +{ + fn into_database(self) -> Arc { + Arc::new(self) + } +} + +impl IntoNostrEventsDatabase for Arc +where + T: NostrEventsDatabase + 'static, +{ + fn into_database(self) -> Arc { + self + } +} + +/// Nostr Events Database +/// +/// Store for the nostr events. +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +pub trait NostrEventsDatabase: fmt::Debug + Send + Sync { + /// Save [`Event`] into store + /// + /// Return `true` if event was successfully saved into database. + /// + /// **This method assume that [`Event`] was already verified** + async fn save_event(&self, event: &Event) -> Result; + + /// Check event status by ID + /// + /// Check if the event is saved, deleted or not existent. + async fn check_id(&self, event_id: &EventId) -> Result; + + /// Check if [`Coordinate`] has been deleted before a certain [`Timestamp`] + async fn has_coordinate_been_deleted( + &self, + coordinate: &Coordinate, + timestamp: &Timestamp, + ) -> Result; + + /// Set [`EventId`] as seen by relay + /// + /// Useful for NIP65 (aka gossip) + async fn event_id_seen(&self, event_id: EventId, relay_url: Url) -> Result<(), DatabaseError>; + + /// Get list of relays that have seen the [`EventId`] + async fn event_seen_on_relays( + &self, + event_id: &EventId, + ) -> Result>, DatabaseError>; + + /// Get [`Event`] by [`EventId`] + async fn event_by_id(&self, event_id: &EventId) -> Result, DatabaseError>; + + /// Count number of [`Event`] found by filters + /// + /// Use `Filter::new()` or `Filter::default()` to count all events. + async fn count(&self, filters: Vec) -> Result; + + /// Query store with filters + async fn query(&self, filters: Vec) -> Result; + + /// Get `negentropy` items + async fn negentropy_items( + &self, + filter: Filter, + ) -> Result, DatabaseError> { + let events: Events = self.query(vec![filter]).await?; + Ok(events.into_iter().map(|e| (e.id, e.created_at)).collect()) + } + + /// Delete all events that match the [Filter] + async fn delete(&self, filter: Filter) -> Result<(), DatabaseError>; +} + +/// Nostr Event Store Extension +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +pub trait NostrEventsDatabaseExt: NostrEventsDatabase { + /// Get public key metadata + #[tracing::instrument(skip_all, level = "trace")] + async fn metadata(&self, public_key: PublicKey) -> Result, DatabaseError> { + let filter = Filter::new() + .author(public_key) + .kind(Kind::Metadata) + .limit(1); + let events: Events = self.query(vec![filter]).await?; + match events.first() { + Some(event) => Ok(Some( + Metadata::from_json(&event.content).map_err(DatabaseError::backend)?, + )), + None => Ok(None), + } + } + + /// Get contact list public keys + #[tracing::instrument(skip_all, level = "trace")] + async fn contacts_public_keys( + &self, + public_key: PublicKey, + ) -> Result, DatabaseError> { + let filter = Filter::new() + .author(public_key) + .kind(Kind::ContactList) + .limit(1); + let events: Events = self.query(vec![filter]).await?; + match events.first() { + Some(event) => Ok(event.tags.public_keys().copied().collect()), + None => Ok(HashSet::new()), + } + } + + /// Get contact list with metadata of [`PublicKey`] + #[tracing::instrument(skip_all, level = "trace")] + async fn contacts(&self, public_key: PublicKey) -> Result, DatabaseError> { + let filter = Filter::new() + .author(public_key) + .kind(Kind::ContactList) + .limit(1); + let events: Events = self.query(vec![filter]).await?; + match events.first() { + Some(event) => { + // Get contacts metadata + let filter = Filter::new() + .authors(event.tags.public_keys().copied()) + .kind(Kind::Metadata); + let mut contacts: HashSet = self + .query(vec![filter]) + .await? + .into_iter() + .map(|e| { + let metadata: Metadata = + Metadata::from_json(&e.content).unwrap_or_default(); + Profile::new(e.pubkey, metadata) + }) + .collect(); + + // Extend with missing public keys + contacts.extend(event.tags.public_keys().copied().map(Profile::from)); + + Ok(contacts.into_iter().collect()) + } + None => Ok(BTreeSet::new()), + } + } + + /// Get relays list for [PublicKey] + /// + /// + #[tracing::instrument(skip_all, level = "trace")] + async fn relay_list( + &self, + public_key: PublicKey, + ) -> Result>, DatabaseError> { + // Query + let filter: Filter = Filter::default() + .author(public_key) + .kind(Kind::RelayList) + .limit(1); + let events: Events = self.query(vec![filter]).await?; + + // Extract relay list (NIP65) + match events.first() { + Some(event) => Ok(nip65::extract_relay_list(event) + .map(|(u, m)| (u.clone(), *m)) + .collect()), + None => Ok(HashMap::new()), + } + } + + /// Get relays list for public keys + /// + /// + #[tracing::instrument(skip_all, level = "trace")] + async fn relay_lists( + &self, + public_keys: I, + ) -> Result>>, DatabaseError> + where + I: IntoIterator + Send, + { + // Query + let filter: Filter = Filter::default().authors(public_keys).kind(Kind::RelayList); + let events: Events = self.query(vec![filter]).await?; + + let mut map = HashMap::with_capacity(events.len()); + + for event in events.into_iter() { + map.insert( + event.pubkey, + nip65::extract_owned_relay_list(event).collect(), + ); + } + + Ok(map) + } +} + +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +impl NostrEventsDatabaseExt for T {} diff --git a/crates/nostr-database/src/lib.rs b/crates/nostr-database/src/lib.rs index 54e2f5052..7e9bf64aa 100644 --- a/crates/nostr-database/src/lib.rs +++ b/crates/nostr-database/src/lib.rs @@ -9,32 +9,28 @@ #![warn(clippy::large_futures)] #![allow(clippy::mutable_key_type)] // TODO: remove when possible. Needed to suppress false positive for `BTreeSet` -use core::fmt; -use std::collections::{BTreeSet, HashMap, HashSet}; use std::sync::Arc; pub use async_trait::async_trait; pub use nostr; -use nostr::nips::nip01::Coordinate; -use nostr::nips::nip65::{self, RelayMetadata}; -use nostr::{Event, EventId, Filter, JsonUtil, Kind, Metadata, PublicKey, Timestamp, Url}; +mod collections; mod error; mod events; #[cfg(feature = "flatbuf")] pub mod flatbuffers; -pub mod helper; pub mod memory; pub mod prelude; pub mod profile; -mod tree; -mod util; +pub use self::collections::events::Events; pub use self::error::DatabaseError; -pub use self::events::Events; +pub use self::events::helper::{DatabaseEventResult, DatabaseHelper}; +pub use self::events::{ + DatabaseEventStatus, IntoNostrEventsDatabase, NostrEventsDatabase, NostrEventsDatabaseExt, +}; #[cfg(feature = "flatbuf")] pub use self::flatbuffers::{FlatBufferBuilder, FlatBufferDecode, FlatBufferEncode}; -pub use self::helper::{DatabaseEventResult, DatabaseHelper}; pub use self::memory::{MemoryDatabase, MemoryDatabaseOptions}; pub use self::profile::Profile; @@ -64,17 +60,13 @@ impl Backend { } } -/// A type-erased [`NostrDatabase`]. -pub type DynNostrDatabase = dyn NostrDatabase; - -/// A type that can be type-erased into `Arc`. +#[doc(hidden)] pub trait IntoNostrDatabase { - #[doc(hidden)] - fn into_nostr_database(self) -> Arc; + fn into_nostr_database(self) -> Arc; } -impl IntoNostrDatabase for Arc { - fn into_nostr_database(self) -> Arc { +impl IntoNostrDatabase for Arc { + fn into_nostr_database(self) -> Arc { self } } @@ -83,7 +75,7 @@ impl IntoNostrDatabase for T where T: NostrDatabase + Sized + 'static, { - fn into_nostr_database(self) -> Arc { + fn into_nostr_database(self) -> Arc { Arc::new(self) } } @@ -92,217 +84,22 @@ impl IntoNostrDatabase for Arc where T: NostrDatabase + 'static, { - fn into_nostr_database(self) -> Arc { + fn into_nostr_database(self) -> Arc { self } } -/// Database event status -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum DatabaseEventStatus { - /// The event is saved into database - Saved, - /// The event is marked as deleted - Deleted, - /// The event not exists - NotExistent, -} - /// Nostr Database #[cfg_attr(target_arch = "wasm32", async_trait(?Send))] #[cfg_attr(not(target_arch = "wasm32"), async_trait)] -pub trait NostrDatabase: fmt::Debug + Send + Sync { +pub trait NostrDatabase: NostrEventsDatabase { /// Name of the backend database used fn backend(&self) -> Backend; - /// Save [`Event`] into store - /// - /// Return `true` if event was successfully saved into database. - /// - /// **This method assume that [`Event`] was already verified** - async fn save_event(&self, event: &Event) -> Result; - - /// Check event status by ID - /// - /// Check if the event is saved, deleted or not existent. - async fn check_id(&self, event_id: &EventId) -> Result; - - /// Check if [`Coordinate`] has been deleted before a certain [`Timestamp`] - async fn has_coordinate_been_deleted( - &self, - coordinate: &Coordinate, - timestamp: &Timestamp, - ) -> Result; - - /// Set [`EventId`] as seen by relay - /// - /// Useful for NIP65 (aka gossip) - async fn event_id_seen(&self, event_id: EventId, relay_url: Url) -> Result<(), DatabaseError>; - - /// Get list of relays that have seen the [`EventId`] - async fn event_seen_on_relays( - &self, - event_id: &EventId, - ) -> Result>, DatabaseError>; - - /// Get [`Event`] by [`EventId`] - async fn event_by_id(&self, event_id: &EventId) -> Result, DatabaseError>; - - /// Count number of [`Event`] found by filters - /// - /// Use `Filter::new()` or `Filter::default()` to count all events. - async fn count(&self, filters: Vec) -> Result; - - /// Query store with filters - async fn query(&self, filters: Vec) -> Result; - - /// Get `negentropy` items - async fn negentropy_items( - &self, - filter: Filter, - ) -> Result, DatabaseError> { - let events: Events = self.query(vec![filter]).await?; - Ok(events.into_iter().map(|e| (e.id, e.created_at)).collect()) - } - - /// Delete all events that match the [Filter] - async fn delete(&self, filter: Filter) -> Result<(), DatabaseError>; - /// Wipe all data async fn wipe(&self) -> Result<(), DatabaseError>; } -/// Nostr Database Extension -#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] -#[cfg_attr(not(target_arch = "wasm32"), async_trait)] -pub trait NostrDatabaseExt: NostrDatabase { - /// Get profile metadata - #[tracing::instrument(skip_all, level = "trace")] - async fn profile(&self, public_key: PublicKey) -> Result { - let filter = Filter::new() - .author(public_key) - .kind(Kind::Metadata) - .limit(1); - let events: Events = self.query(vec![filter]).await?; - match events.first() { - Some(event) => match Metadata::from_json(&event.content) { - Ok(metadata) => Ok(Profile::new(public_key, metadata)), - Err(e) => { - tracing::error!("Impossible to deserialize profile metadata: {e}"); - Ok(Profile::from(public_key)) - } - }, - None => Ok(Profile::from(public_key)), - } - } - - /// Get contact list public keys - #[tracing::instrument(skip_all, level = "trace")] - async fn contacts_public_keys( - &self, - public_key: PublicKey, - ) -> Result, DatabaseError> { - let filter = Filter::new() - .author(public_key) - .kind(Kind::ContactList) - .limit(1); - let events: Events = self.query(vec![filter]).await?; - match events.first() { - Some(event) => Ok(event.tags.public_keys().copied().collect()), - None => Ok(Vec::new()), - } - } - - /// Get contact list with metadata of [`PublicKey`] - #[tracing::instrument(skip_all, level = "trace")] - async fn contacts(&self, public_key: PublicKey) -> Result, DatabaseError> { - let filter = Filter::new() - .author(public_key) - .kind(Kind::ContactList) - .limit(1); - let events: Events = self.query(vec![filter]).await?; - match events.first() { - Some(event) => { - // Get contacts metadata - let filter = Filter::new() - .authors(event.tags.public_keys().copied()) - .kind(Kind::Metadata); - let mut contacts: HashSet = self - .query(vec![filter]) - .await? - .into_iter() - .map(|e| { - let metadata: Metadata = - Metadata::from_json(&e.content).unwrap_or_default(); - Profile::new(e.pubkey, metadata) - }) - .collect(); - - // Extend with missing public keys - contacts.extend(event.tags.public_keys().copied().map(Profile::from)); - - Ok(contacts.into_iter().collect()) - } - None => Ok(BTreeSet::new()), - } - } - - /// Get relays list for [PublicKey] - /// - /// - #[tracing::instrument(skip_all, level = "trace")] - async fn relay_list( - &self, - public_key: PublicKey, - ) -> Result>, DatabaseError> { - // Query - let filter: Filter = Filter::default() - .author(public_key) - .kind(Kind::RelayList) - .limit(1); - let events: Events = self.query(vec![filter]).await?; - - // Extract relay list (NIP65) - match events.first() { - Some(event) => Ok(nip65::extract_relay_list(event) - .map(|(u, m)| (u.clone(), *m)) - .collect()), - None => Ok(HashMap::new()), - } - } - - /// Get relays list for public keys - /// - /// - #[tracing::instrument(skip_all, level = "trace")] - async fn relay_lists( - &self, - public_keys: I, - ) -> Result>>, DatabaseError> - where - I: IntoIterator + Send, - { - // Query - let filter: Filter = Filter::default().authors(public_keys).kind(Kind::RelayList); - let events: Events = self.query(vec![filter]).await?; - - let mut map = HashMap::with_capacity(events.len()); - - for event in events.into_iter() { - map.insert( - event.pubkey, - nip65::extract_owned_relay_list(event).collect(), - ); - } - - Ok(map) - } -} - -#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] -#[cfg_attr(not(target_arch = "wasm32"), async_trait)] -impl NostrDatabaseExt for T {} - #[cfg(test)] mod tests { use super::*; diff --git a/crates/nostr-database/src/memory.rs b/crates/nostr-database/src/memory.rs index decc80fba..2c7a17d89 100644 --- a/crates/nostr-database/src/memory.rs +++ b/crates/nostr-database/src/memory.rs @@ -14,9 +14,10 @@ use nostr::nips::nip01::Coordinate; use nostr::{Event, EventId, Filter, Timestamp, Url}; use tokio::sync::Mutex; +use crate::collections::new_lru_cache; use crate::{ - util, Backend, DatabaseError, DatabaseEventResult, DatabaseEventStatus, DatabaseHelper, Events, - NostrDatabase, + Backend, DatabaseError, DatabaseEventResult, DatabaseEventStatus, DatabaseHelper, Events, + NostrDatabase, NostrEventsDatabase, }; /// Database options @@ -70,7 +71,7 @@ impl MemoryDatabase { pub fn with_opts(opts: MemoryDatabaseOptions) -> Self { Self { opts, - seen_event_ids: Arc::new(Mutex::new(util::new_lru_cache(opts.max_events))), + seen_event_ids: Arc::new(Mutex::new(new_lru_cache(opts.max_events))), helper: match opts.max_events { Some(max) => DatabaseHelper::bounded(max), None => DatabaseHelper::unbounded(), @@ -110,6 +111,20 @@ impl NostrDatabase for MemoryDatabase { Backend::Memory } + async fn wipe(&self) -> Result<(), DatabaseError> { + // Clear helper + self.helper.clear().await; + + // Clear + let mut seen_event_ids = self.seen_event_ids.lock().await; + seen_event_ids.clear(); + Ok(()) + } +} + +#[cfg_attr(target_arch = "wasm32", async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait)] +impl NostrEventsDatabase for MemoryDatabase { async fn save_event(&self, event: &Event) -> Result { if self.opts.events { let DatabaseEventResult { to_store, .. } = self.helper.index_event(event).await; @@ -192,14 +207,4 @@ impl NostrDatabase for MemoryDatabase { self.helper.delete(filter).await; Ok(()) } - - async fn wipe(&self) -> Result<(), DatabaseError> { - // Clear helper - self.helper.clear().await; - - // Clear - let mut seen_event_ids = self.seen_event_ids.lock().await; - seen_event_ids.clear(); - Ok(()) - } } diff --git a/crates/nostr-database/src/profile.rs b/crates/nostr-database/src/profile.rs index 27cd7b898..36ce62d86 100644 --- a/crates/nostr-database/src/profile.rs +++ b/crates/nostr-database/src/profile.rs @@ -7,7 +7,7 @@ use core::cmp::Ordering; use core::hash::{Hash, Hasher}; -use crate::{Metadata, PublicKey}; +use nostr::{Metadata, PublicKey}; /// Profile #[derive(Debug, Clone)] diff --git a/crates/nostr-indexeddb/src/lib.rs b/crates/nostr-indexeddb/src/lib.rs index 1f906324f..1f457b255 100644 --- a/crates/nostr-indexeddb/src/lib.rs +++ b/crates/nostr-indexeddb/src/lib.rs @@ -337,11 +337,45 @@ macro_rules! impl_nostr_database { }; } +// Small hack to have the following macro invocation act as the appropriate +// trait impl block on wasm, but still be compiled on non-wasm as a regular +// impl block otherwise. +// +// The trait impl doesn't compile on non-wasm due to unfulfilled trait bounds, +// this hack allows us to still have most of rust-analyzer's IDE functionality +// within the impl block without having to set it up to check things against +// the wasm target (which would disable many other parts of the codebase). +#[cfg(target_arch = "wasm32")] +macro_rules! impl_nostr_events_database { + ({ $($body:tt)* }) => { + #[async_trait(?Send)] + impl NostrEventsDatabase for WebDatabase { + $($body)* + } + }; +} + +#[cfg(not(target_arch = "wasm32"))] +macro_rules! impl_nostr_events_database { + ({ $($body:tt)* }) => { + impl WebDatabase { + $($body)* + } + }; +} + impl_nostr_database!({ fn backend(&self) -> Backend { Backend::IndexedDB } + #[inline] + async fn wipe(&self) -> Result<(), DatabaseError> { + self._wipe().await.map_err(DatabaseError::backend) + } +}); + +impl_nostr_events_database!({ #[inline] #[tracing::instrument(skip_all, level = "trace")] async fn save_event(&self, event: &Event) -> Result { @@ -477,11 +511,6 @@ impl_nostr_database!({ async fn delete(&self, filter: Filter) -> Result<(), DatabaseError> { self._delete(filter).await.map_err(DatabaseError::backend) } - - #[inline] - async fn wipe(&self) -> Result<(), DatabaseError> { - self._wipe().await.map_err(DatabaseError::backend) - } }); #[inline(always)] diff --git a/crates/nostr-lmdb/src/lib.rs b/crates/nostr-lmdb/src/lib.rs index 1ee846f5e..9dada9b5c 100644 --- a/crates/nostr-lmdb/src/lib.rs +++ b/crates/nostr-lmdb/src/lib.rs @@ -53,6 +53,14 @@ impl NostrDatabase for NostrLMDB { Backend::LMDB } + #[inline] + async fn wipe(&self) -> Result<(), DatabaseError> { + self.db.wipe().await.map_err(DatabaseError::backend) + } +} + +#[async_trait] +impl NostrEventsDatabase for NostrLMDB { #[inline] #[tracing::instrument(skip_all, level = "trace")] async fn save_event(&self, event: &Event) -> Result { @@ -146,11 +154,6 @@ impl NostrDatabase for NostrLMDB { async fn delete(&self, filter: Filter) -> Result<(), DatabaseError> { self.db.delete(filter).await.map_err(DatabaseError::backend) } - - #[inline] - async fn wipe(&self) -> Result<(), DatabaseError> { - self.db.wipe().await.map_err(DatabaseError::backend) - } } #[cfg(test)] diff --git a/crates/nostr-ndb/src/lib.rs b/crates/nostr-ndb/src/lib.rs index 4887acd01..e076ae3b8 100644 --- a/crates/nostr-ndb/src/lib.rs +++ b/crates/nostr-ndb/src/lib.rs @@ -82,6 +82,13 @@ impl NostrDatabase for NdbDatabase { Backend::LMDB } + async fn wipe(&self) -> Result<(), DatabaseError> { + Err(DatabaseError::NotSupported) + } +} + +#[async_trait] +impl NostrEventsDatabase for NdbDatabase { #[tracing::instrument(skip_all, level = "trace")] async fn save_event(&self, event: &Event) -> Result { let msg = RelayMessage::event(SubscriptionId::new("ndb"), event.clone()); @@ -169,10 +176,6 @@ impl NostrDatabase for NdbDatabase { async fn delete(&self, _filter: Filter) -> Result<(), DatabaseError> { Err(DatabaseError::NotSupported) } - - async fn wipe(&self) -> Result<(), DatabaseError> { - Err(DatabaseError::NotSupported) - } } #[inline(always)] diff --git a/crates/nostr-relay-builder/src/builder.rs b/crates/nostr-relay-builder/src/builder.rs index d46825bc7..6aea3aca5 100644 --- a/crates/nostr-relay-builder/src/builder.rs +++ b/crates/nostr-relay-builder/src/builder.rs @@ -108,7 +108,7 @@ pub struct RelayBuilder { /// Port pub(crate) port: Option, /// Database - pub(crate) database: Arc, + pub(crate) database: Arc, /// Mode pub(crate) mode: RelayBuilderMode, /// Rate limit @@ -163,9 +163,9 @@ impl RelayBuilder { #[inline] pub fn database(mut self, database: D) -> Self where - D: IntoNostrDatabase, + D: IntoNostrEventsDatabase, { - self.database = database.into_nostr_database(); + self.database = database.into_database(); self } diff --git a/crates/nostr-relay-builder/src/local/internal.rs b/crates/nostr-relay-builder/src/local/internal.rs index d97a812e3..1ac08b46e 100644 --- a/crates/nostr-relay-builder/src/local/internal.rs +++ b/crates/nostr-relay-builder/src/local/internal.rs @@ -10,7 +10,6 @@ use async_utility::futures_util::stream::{self, SplitSink}; use async_utility::futures_util::{SinkExt, StreamExt}; use async_wsocket::native::{self, Message, WebSocketStream}; use atomic_destructor::AtomicDestroyer; -use nostr::prelude::*; use nostr_database::prelude::*; use tokio::net::{TcpListener, TcpStream}; use tokio::sync::{broadcast, Semaphore}; @@ -25,7 +24,7 @@ type WsTx = SplitSink, Message>; #[derive(Debug, Clone)] pub(super) struct InternalLocalRelay { addr: SocketAddr, - database: Arc, + database: Arc, shutdown: broadcast::Sender<()>, /// Channel to notify new event received /// diff --git a/crates/nostr-relay-pool/src/pool/inner.rs b/crates/nostr-relay-pool/src/pool/inner.rs index 06b35b111..9acb41de4 100644 --- a/crates/nostr-relay-pool/src/pool/inner.rs +++ b/crates/nostr-relay-pool/src/pool/inner.rs @@ -13,8 +13,7 @@ use std::time::Duration; use async_utility::futures_util::{future, StreamExt}; use async_utility::thread; use atomic_destructor::AtomicDestroyer; -use nostr::prelude::*; -use nostr_database::{DynNostrDatabase, Events, IntoNostrDatabase}; +use nostr_database::prelude::*; use tokio::sync::{broadcast, mpsc, Mutex, RwLock, RwLockReadGuard}; use tokio_stream::wrappers::ReceiverStream; @@ -29,7 +28,7 @@ type Relays = HashMap; #[derive(Debug, Clone)] pub struct InnerRelayPool { - pub(super) database: Arc, + pub(super) database: Arc, relays: Arc>, notification_sender: broadcast::Sender, subscriptions: Arc>>>, diff --git a/crates/nostr-relay-pool/src/pool/mod.rs b/crates/nostr-relay-pool/src/pool/mod.rs index 747f5de84..a48fc5504 100644 --- a/crates/nostr-relay-pool/src/pool/mod.rs +++ b/crates/nostr-relay-pool/src/pool/mod.rs @@ -10,8 +10,7 @@ use std::sync::Arc; use std::time::Duration; use atomic_destructor::{AtomicDestructor, StealthClone}; -use nostr::prelude::*; -use nostr_database::{DynNostrDatabase, Events, IntoNostrDatabase, MemoryDatabase}; +use nostr_database::prelude::*; use tokio::sync::broadcast; pub use tokio_stream::wrappers::ReceiverStream; @@ -123,7 +122,7 @@ impl RelayPool { /// Get database #[inline] - pub fn database(&self) -> &Arc { + pub fn database(&self) -> &Arc { &self.inner.database } diff --git a/crates/nostr-relay-pool/src/relay/inner.rs b/crates/nostr-relay-pool/src/relay/inner.rs index 9608b637d..ee16320c3 100644 --- a/crates/nostr-relay-pool/src/relay/inner.rs +++ b/crates/nostr-relay-pool/src/relay/inner.rs @@ -154,7 +154,7 @@ pub(crate) struct InnerRelay { pub(super) flags: AtomicRelayServiceFlags, pub(super) stats: RelayConnectionStats, pub(super) filtering: RelayFiltering, - database: Arc, + database: Arc, channels: Arc, pub(super) internal_notification_sender: broadcast::Sender, external_notification_sender: OnceCell>, @@ -177,7 +177,7 @@ impl AtomicDestroyer for InnerRelay { impl InnerRelay { pub fn new( url: Url, - database: Arc, + database: Arc, filtering: RelayFiltering, opts: RelayOptions, ) -> Self { diff --git a/crates/nostr-relay-pool/src/relay/mod.rs b/crates/nostr-relay-pool/src/relay/mod.rs index e5bf8c7ee..04e9ad22b 100644 --- a/crates/nostr-relay-pool/src/relay/mod.rs +++ b/crates/nostr-relay-pool/src/relay/mod.rs @@ -140,7 +140,11 @@ impl Relay { /// Create new `Relay` with **custom** `database` and/or `options` #[inline] - pub fn custom(url: Url, database: Arc, opts: RelayOptions) -> Self { + pub fn custom(url: Url, database: T, opts: RelayOptions) -> Self + where + T: IntoNostrDatabase, + { + let database: Arc = database.into_nostr_database(); let filtering: RelayFiltering = RelayFiltering::new(opts.filtering_mode); Self::internal_custom(url, database, filtering, opts) } @@ -148,7 +152,7 @@ impl Relay { #[inline] pub(crate) fn internal_custom( url: Url, - database: Arc, + database: Arc, filtering: RelayFiltering, opts: RelayOptions, ) -> Self { diff --git a/crates/nostr-sdk/src/client/builder.rs b/crates/nostr-sdk/src/client/builder.rs index 3e9b43c9f..dd42cef59 100644 --- a/crates/nostr-sdk/src/client/builder.rs +++ b/crates/nostr-sdk/src/client/builder.rs @@ -8,7 +8,7 @@ use std::sync::Arc; use nostr::signer::{IntoNostrSigner, NostrSigner}; use nostr_database::memory::MemoryDatabase; -use nostr_database::{DynNostrDatabase, IntoNostrDatabase}; +use nostr_database::{IntoNostrDatabase, NostrDatabase}; #[cfg(feature = "nip57")] use nostr_zapper::{DynNostrZapper, IntoNostrZapper}; @@ -23,7 +23,7 @@ pub struct ClientBuilder { #[cfg(feature = "nip57")] pub zapper: Option>, /// Database - pub database: Arc, + pub database: Arc, /// Client options pub opts: Options, } diff --git a/crates/nostr-sdk/src/client/mod.rs b/crates/nostr-sdk/src/client/mod.rs index b72ad6f46..a7292ab48 100644 --- a/crates/nostr-sdk/src/client/mod.rs +++ b/crates/nostr-sdk/src/client/mod.rs @@ -12,7 +12,7 @@ use std::time::Duration; use atomic_destructor::StealthClone; use nostr::prelude::*; -use nostr_database::DynNostrDatabase; +use nostr_database::prelude::*; use nostr_relay_pool::prelude::*; #[cfg(feature = "nip57")] use nostr_zapper::{DynNostrZapper, IntoNostrZapper, ZapperError}; @@ -269,7 +269,7 @@ impl Client { /// Get database #[inline] - pub fn database(&self) -> &Arc { + pub fn database(&self) -> &Arc { self.pool.database() } diff --git a/crates/nostr-sdk/src/lib.rs b/crates/nostr-sdk/src/lib.rs index dd12aa84a..8778d3bd2 100644 --- a/crates/nostr-sdk/src/lib.rs +++ b/crates/nostr-sdk/src/lib.rs @@ -19,8 +19,6 @@ pub use async_utility; #[doc(hidden)] pub use nostr::{self, *}; #[doc(hidden)] -pub use nostr_database::{self as database, NostrDatabase, NostrDatabaseExt, Profile}; -#[doc(hidden)] #[cfg(all(target_arch = "wasm32", feature = "indexeddb"))] pub use nostr_indexeddb::WebDatabase; #[doc(hidden)] diff --git a/crates/nostr-sdk/src/prelude.rs b/crates/nostr-sdk/src/prelude.rs index 76afa2ac0..07420a970 100644 --- a/crates/nostr-sdk/src/prelude.rs +++ b/crates/nostr-sdk/src/prelude.rs @@ -10,7 +10,7 @@ // External crates pub use nostr::prelude::*; -pub use nostr_database::*; +pub use nostr_database::prelude::*; pub use nostr_relay_pool::prelude::*; #[cfg(feature = "nip57")] pub use nostr_zapper::prelude::*; From 893b2a5714ebe93ff118f582f7cab78be4b43888 Mon Sep 17 00:00:00 2001 From: Yuki Kishimoto Date: Tue, 19 Nov 2024 17:05:51 +0100 Subject: [PATCH 2/2] contrib: update list of crates to check Signed-off-by: Yuki Kishimoto --- contrib/scripts/check-crates.sh | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/contrib/scripts/check-crates.sh b/contrib/scripts/check-crates.sh index 001720c45..00e94faff 100755 --- a/contrib/scripts/check-crates.sh +++ b/contrib/scripts/check-crates.sh @@ -38,13 +38,14 @@ buildargs=( "-p nostr --no-default-features --features alloc,all-nips" # alloc + all-nips "-p nostr-database" "-p nostr-lmdb" + "-p nostr-indexeddb --target wasm32-unknown-unknown" + "-p nostr-ndb" "-p nostr-relay-pool" "-p nostr-relay-builder" "-p nostr-connect" + "-p nwc" "-p nostr-sdk" # No default features "-p nostr-sdk --features all-nips" - "-p nostr-sdk --features lmdb" - "-p nostr-sdk --features ndb" "-p nostr-sdk --features tor" "-p nostr-cli" )