Skip to main content

FtsBackend

Trait FtsBackend 

Source
pub trait FtsBackend {
    type Error: Display;

Show 18 methods // Required methods fn read_postings( &self, tid: u64, collection: &str, term: &str, ) -> Result<Vec<Posting>, Self::Error>; fn write_postings( &self, tid: u64, collection: &str, term: &str, postings: &[Posting], ) -> Result<(), Self::Error>; fn remove_postings( &self, tid: u64, collection: &str, term: &str, ) -> Result<(), Self::Error>; fn read_doc_length( &self, tid: u64, collection: &str, doc_id: Surrogate, ) -> Result<Option<u32>, Self::Error>; fn write_doc_length( &self, tid: u64, collection: &str, doc_id: Surrogate, length: u32, ) -> Result<(), Self::Error>; fn remove_doc_length( &self, tid: u64, collection: &str, doc_id: Surrogate, ) -> Result<(), Self::Error>; fn collection_terms( &self, tid: u64, collection: &str, ) -> Result<Vec<String>, Self::Error>; fn collection_stats( &self, tid: u64, collection: &str, ) -> Result<(u32, u64), Self::Error>; fn increment_stats( &self, tid: u64, collection: &str, doc_len: u32, ) -> Result<(), Self::Error>; fn decrement_stats( &self, tid: u64, collection: &str, doc_len: u32, ) -> Result<(), Self::Error>; fn read_meta( &self, tid: u64, collection: &str, subkey: &str, ) -> Result<Option<Vec<u8>>, Self::Error>; fn write_meta( &self, tid: u64, collection: &str, subkey: &str, value: &[u8], ) -> Result<(), Self::Error>; fn write_segment( &self, tid: u64, collection: &str, segment_id: &str, data: &[u8], ) -> Result<(), Self::Error>; fn read_segment( &self, tid: u64, collection: &str, segment_id: &str, ) -> Result<Option<Vec<u8>>, Self::Error>; fn list_segments( &self, tid: u64, collection: &str, ) -> Result<Vec<String>, Self::Error>; fn remove_segment( &self, tid: u64, collection: &str, segment_id: &str, ) -> Result<(), Self::Error>; fn purge_collection( &self, tid: u64, collection: &str, ) -> Result<usize, Self::Error>; fn purge_tenant(&self, tid: u64) -> Result<usize, Self::Error>;
}
Expand description

Storage backend abstraction for the full-text search engine.

Origin implements this with redb (persistent). Lite implements with in-memory HashMap. All scoring, BMW, compression, and analysis logic works identically over any backend.

Every tenant-partitioned method takes tid: u64 as a first-class parameter. Backends are required to isolate tenants structurally — no tenant boundary may depend on lexical-prefix ordering of a composed string key.

Write methods take &self (not &mut self) because:

  • Redb provides transactional isolation internally — concurrent writes are safe through redb’s MVCC.
  • MemoryBackend uses interior mutability (RefCell) to match the same trait signature, keeping the trait uniform.

Required Associated Types§

Source

type Error: Display

Error type for backend operations.

Required Methods§

Source

fn read_postings( &self, tid: u64, collection: &str, term: &str, ) -> Result<Vec<Posting>, Self::Error>

Read the posting list for a term in a collection.

Source

fn write_postings( &self, tid: u64, collection: &str, term: &str, postings: &[Posting], ) -> Result<(), Self::Error>

Write/replace the posting list for a term in a collection.

Source

fn remove_postings( &self, tid: u64, collection: &str, term: &str, ) -> Result<(), Self::Error>

Remove a term’s posting list entirely.

Source

fn read_doc_length( &self, tid: u64, collection: &str, doc_id: Surrogate, ) -> Result<Option<u32>, Self::Error>

Read the document length (token count) for a document.

Source

fn write_doc_length( &self, tid: u64, collection: &str, doc_id: Surrogate, length: u32, ) -> Result<(), Self::Error>

Write/replace the document length for a document.

Source

fn remove_doc_length( &self, tid: u64, collection: &str, doc_id: Surrogate, ) -> Result<(), Self::Error>

Remove a document’s length entry.

Source

fn collection_terms( &self, tid: u64, collection: &str, ) -> Result<Vec<String>, Self::Error>

Get all term names in a collection (for fuzzy matching).

Source

fn collection_stats( &self, tid: u64, collection: &str, ) -> Result<(u32, u64), Self::Error>

Get total document count and sum of all document lengths for a collection. Returns (doc_count, total_token_sum).

Implementations should maintain these incrementally for O(1) lookup.

Source

fn increment_stats( &self, tid: u64, collection: &str, doc_len: u32, ) -> Result<(), Self::Error>

Increment collection stats after indexing a document. doc_len is the number of tokens in the newly indexed document.

Source

fn decrement_stats( &self, tid: u64, collection: &str, doc_len: u32, ) -> Result<(), Self::Error>

Decrement collection stats after removing a document. doc_len is the token count of the removed document.

Source

fn read_meta( &self, tid: u64, collection: &str, subkey: &str, ) -> Result<Option<Vec<u8>>, Self::Error>

Read a metadata blob by sub-key (e.g., “docmap”, “fieldnorms”, “analyzer”, “language”).

Source

fn write_meta( &self, tid: u64, collection: &str, subkey: &str, value: &[u8], ) -> Result<(), Self::Error>

Write a metadata blob by sub-key.

Source

fn write_segment( &self, tid: u64, collection: &str, segment_id: &str, data: &[u8], ) -> Result<(), Self::Error>

Write a segment blob. segment_id is a stable per-collection identifier (e.g., "L{level}:{id:016x}").

Source

fn read_segment( &self, tid: u64, collection: &str, segment_id: &str, ) -> Result<Option<Vec<u8>>, Self::Error>

Read a segment blob. Returns None if not found.

Source

fn list_segments( &self, tid: u64, collection: &str, ) -> Result<Vec<String>, Self::Error>

List all segment ids for a collection.

Source

fn remove_segment( &self, tid: u64, collection: &str, segment_id: &str, ) -> Result<(), Self::Error>

Remove a segment blob.

Source

fn purge_collection( &self, tid: u64, collection: &str, ) -> Result<usize, Self::Error>

Remove all entries for a collection. Returns count of removed entries.

Source

fn purge_tenant(&self, tid: u64) -> Result<usize, Self::Error>

Remove all entries for a tenant across every collection. Returns count of removed entries. Implementations MUST use a structural drop (e.g., tuple range (tid, ..)..(tid+1, ..)) rather than a lexical-prefix scan.

Implementors§