1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
//! # semstore
//!
//! Local semantic search for Rust applications — store text, search by meaning.
//! No cloud required. Embeddings run on-device via ONNX (BGE-Small, ~23 MB).
//!
//! ## Features
//!
//! | Feature | Default | Description |
//! |---------|---------|-------------|
//! | `default-embedder` | ✓ | Bundles BGE-Small-EN-v1.5 via fastembed |
//! | `bundled-sqlite` | ✓ | Statically links SQLite (no system lib needed) |
//!
//! Disable `default-embedder` to bring your own model via the [`Embedder`] trait.
//!
//! ## Quick start
//!
//! ```no_run
//! use semstore::SemanticIndex;
//! use serde_json::json;
//!
//! let mut idx = SemanticIndex::open("./index.db")?;
//!
//! idx.insert("Rust ownership prevents data races at compile time",
//! json!({ "lang": "rust" }))?;
//! idx.insert("Python uses reference counting for memory management",
//! json!({ "lang": "python" }))?;
//!
//! let results = idx.search("memory safety", 3)?;
//! for r in &results {
//! println!("[{:.2}] {}", r.score, r.content);
//! }
//! # Ok::<(), semstore::Error>(())
//! ```
//!
//! ## Custom embedder
//!
//! ```no_run
//! use semstore::{Embedder, Error, SemanticIndex};
//! use serde_json::json;
//!
//! struct OpenAiEmbedder { /* your fields */ }
//!
//! impl Embedder for OpenAiEmbedder {
//! fn embed(&self, text: &str) -> Result<Vec<f32>, Error> {
//! // call OpenAI /v1/embeddings …
//! Ok(vec![0.0; 1536])
//! }
//! fn dimensions(&self) -> usize { 1536 }
//! }
//!
//! let mut idx = SemanticIndex::builder()
//! .embedder(OpenAiEmbedder { /* … */ })
//! .path("./index.db")
//! .build()?;
//! # Ok::<(), semstore::Error>(())
//! ```
pub use Embedder;
pub use ;
pub use ;
pub use BgeEmbedder;