#![warn(missing_docs)]
#![warn(rustdoc::bare_urls)]
#![warn(clippy::large_futures)]
#![allow(clippy::mutable_key_type)] use core::fmt;
use std::collections::{BTreeSet, HashMap, HashSet};
use std::sync::Arc;
pub use async_trait::async_trait;
pub use nostr;
use nostr::nips::nip01::Coordinate;
use nostr::nips::nip65::{self, RelayMetadata};
use nostr::{Event, EventId, Filter, JsonUtil, Kind, Metadata, PublicKey, Timestamp, Url};
mod error;
#[cfg(feature = "flatbuf")]
pub mod flatbuffers;
pub mod helper;
pub mod memory;
pub mod prelude;
pub mod profile;
mod tree;
mod util;
pub use self::error::DatabaseError;
#[cfg(feature = "flatbuf")]
pub use self::flatbuffers::{FlatBufferBuilder, FlatBufferDecode, FlatBufferEncode};
pub use self::helper::{DatabaseEventResult, DatabaseHelper};
pub use self::memory::{MemoryDatabase, MemoryDatabaseOptions};
pub use self::profile::Profile;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Backend {
Memory,
RocksDB,
LMDB,
SQLite,
IndexedDB,
Custom(String),
}
pub type DynNostrDatabase = dyn NostrDatabase;
pub trait IntoNostrDatabase {
#[doc(hidden)]
fn into_nostr_database(self) -> Arc<DynNostrDatabase>;
}
impl IntoNostrDatabase for Arc<DynNostrDatabase> {
fn into_nostr_database(self) -> Arc<DynNostrDatabase> {
self
}
}
impl<T> IntoNostrDatabase for T
where
T: NostrDatabase + Sized + 'static,
{
fn into_nostr_database(self) -> Arc<DynNostrDatabase> {
Arc::new(self)
}
}
impl<T> IntoNostrDatabase for Arc<T>
where
T: NostrDatabase + 'static,
{
fn into_nostr_database(self) -> Arc<DynNostrDatabase> {
self
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum DatabaseEventStatus {
Saved,
Deleted,
NotExistent,
}
#[cfg_attr(target_arch = "wasm32", async_trait(?Send))]
#[cfg_attr(not(target_arch = "wasm32"), async_trait)]
pub trait NostrDatabase: fmt::Debug + Send + Sync {
fn backend(&self) -> Backend;
async fn save_event(&self, event: &Event) -> Result<bool, DatabaseError>;
async fn check_id(&self, event_id: &EventId) -> Result<DatabaseEventStatus, DatabaseError>;
async fn has_coordinate_been_deleted(
&self,
coordinate: &Coordinate,
timestamp: &Timestamp,
) -> Result<bool, DatabaseError>;
async fn event_id_seen(&self, event_id: EventId, relay_url: Url) -> Result<(), DatabaseError>;
async fn event_seen_on_relays(
&self,
event_id: &EventId,
) -> Result<Option<HashSet<Url>>, DatabaseError>;
async fn event_by_id(&self, event_id: &EventId) -> Result<Option<Event>, DatabaseError>;
async fn count(&self, filters: Vec<Filter>) -> Result<usize, DatabaseError>;
async fn query(&self, filters: Vec<Filter>) -> Result<Vec<Event>, DatabaseError>;
async fn negentropy_items(
&self,
filter: Filter,
) -> Result<Vec<(EventId, Timestamp)>, DatabaseError> {
let events: Vec<Event> = self.query(vec![filter]).await?;
Ok(events.into_iter().map(|e| (e.id, e.created_at)).collect())
}
async fn delete(&self, filter: Filter) -> Result<(), DatabaseError>;
async fn wipe(&self) -> Result<(), DatabaseError>;
}
#[cfg_attr(target_arch = "wasm32", async_trait(?Send))]
#[cfg_attr(not(target_arch = "wasm32"), async_trait)]
pub trait NostrDatabaseExt: NostrDatabase {
#[tracing::instrument(skip_all, level = "trace")]
async fn profile(&self, public_key: PublicKey) -> Result<Profile, DatabaseError> {
let filter = Filter::new()
.author(public_key)
.kind(Kind::Metadata)
.limit(1);
let events: Vec<Event> = self.query(vec![filter]).await?;
match events.first() {
Some(event) => match Metadata::from_json(&event.content) {
Ok(metadata) => Ok(Profile::new(public_key, metadata)),
Err(e) => {
tracing::error!("Impossible to deserialize profile metadata: {e}");
Ok(Profile::from(public_key))
}
},
None => Ok(Profile::from(public_key)),
}
}
#[tracing::instrument(skip_all, level = "trace")]
async fn contacts_public_keys(
&self,
public_key: PublicKey,
) -> Result<Vec<PublicKey>, DatabaseError> {
let filter = Filter::new()
.author(public_key)
.kind(Kind::ContactList)
.limit(1);
let events: Vec<Event> = self.query(vec![filter]).await?;
match events.first() {
Some(event) => Ok(event.public_keys().copied().collect()),
None => Ok(Vec::new()),
}
}
#[tracing::instrument(skip_all, level = "trace")]
async fn contacts(&self, public_key: PublicKey) -> Result<BTreeSet<Profile>, DatabaseError> {
let filter = Filter::new()
.author(public_key)
.kind(Kind::ContactList)
.limit(1);
let events: Vec<Event> = self.query(vec![filter]).await?;
match events.first() {
Some(event) => {
let filter = Filter::new()
.authors(event.public_keys().copied())
.kind(Kind::Metadata);
let mut contacts: HashSet<Profile> = self
.query(vec![filter])
.await?
.into_iter()
.map(|e| {
let metadata: Metadata =
Metadata::from_json(&e.content).unwrap_or_default();
Profile::new(e.pubkey, metadata)
})
.collect();
contacts.extend(event.public_keys().copied().map(Profile::from));
Ok(contacts.into_iter().collect())
}
None => Ok(BTreeSet::new()),
}
}
#[tracing::instrument(skip_all, level = "trace")]
async fn relay_list(
&self,
public_key: PublicKey,
) -> Result<HashMap<Url, Option<RelayMetadata>>, DatabaseError> {
let filter: Filter = Filter::default()
.author(public_key)
.kind(Kind::RelayList)
.limit(1);
let events: Vec<Event> = self.query(vec![filter]).await?;
match events.first() {
Some(event) => Ok(nip65::extract_relay_list(event)
.map(|(u, m)| (u.clone(), *m))
.collect()),
None => Ok(HashMap::new()),
}
}
#[tracing::instrument(skip_all, level = "trace")]
async fn relay_lists<I>(
&self,
public_keys: I,
) -> Result<HashMap<PublicKey, HashMap<Url, Option<RelayMetadata>>>, DatabaseError>
where
I: IntoIterator<Item = PublicKey> + Send,
{
let filter: Filter = Filter::default().authors(public_keys).kind(Kind::RelayList);
let events: Vec<Event> = self.query(vec![filter]).await?;
let mut map = HashMap::with_capacity(events.len());
for event in events.into_iter() {
map.insert(
event.pubkey,
nip65::extract_owned_relay_list(event).collect(),
);
}
Ok(map)
}
}
#[cfg_attr(target_arch = "wasm32", async_trait(?Send))]
#[cfg_attr(not(target_arch = "wasm32"), async_trait)]
impl<T: NostrDatabase + ?Sized> NostrDatabaseExt for T {}