1use std::path::{Path, PathBuf};
42
43use mentedb_context::{AssemblyConfig, ContextAssembler, ContextWindow, ScoredMemory};
44use mentedb_core::edge::EdgeType;
45use mentedb_core::error::MenteResult;
46use mentedb_core::types::MemoryId;
47use mentedb_core::{MemoryEdge, MemoryNode, MenteError};
48use mentedb_embedding::provider::EmbeddingProvider;
49use mentedb_graph::GraphManager;
50use mentedb_index::IndexManager;
51use mentedb_query::{Mql, QueryPlan};
52use mentedb_storage::StorageEngine;
53use tracing::{debug, info};
54
55pub use mentedb_cognitive as cognitive;
58pub use mentedb_context as context;
60pub use mentedb_core as core;
62pub use mentedb_graph as graph;
64pub use mentedb_index as index;
66pub use mentedb_query as query;
68pub use mentedb_storage as storage;
70
71pub mod prelude {
73 pub use mentedb_core::edge::EdgeType;
74 pub use mentedb_core::error::MenteResult;
75 pub use mentedb_core::memory::MemoryType;
76 pub use mentedb_core::types::*;
77 pub use mentedb_core::{MemoryEdge, MemoryNode, MemoryTier, MenteError};
78
79 pub use crate::MenteDb;
80}
81
82use mentedb_storage::PageId;
83use std::collections::HashMap;
85
86pub struct MenteDb {
91 storage: StorageEngine,
92 index: IndexManager,
93 graph: GraphManager,
94 page_map: HashMap<MemoryId, PageId>,
96 embedding_dim: usize,
98 path: PathBuf,
100 embedder: Option<Box<dyn EmbeddingProvider>>,
102}
103
104impl MenteDb {
105 pub fn open(path: &Path) -> MenteResult<Self> {
107 info!("Opening MenteDB at {}", path.display());
108 let mut storage = StorageEngine::open(path)?;
109
110 let index_dir = path.join("indexes");
111 let graph_dir = path.join("graph");
112
113 let index = if index_dir.join("hnsw.json").exists() {
114 debug!("Loading indexes from {}", index_dir.display());
115 IndexManager::load(&index_dir)?
116 } else {
117 IndexManager::default()
118 };
119
120 let graph = if graph_dir.join("graph.json").exists() {
121 debug!("Loading graph from {}", graph_dir.display());
122 GraphManager::load(&graph_dir)?
123 } else {
124 GraphManager::new()
125 };
126
127 let entries = storage.scan_all_memories();
129 let mut page_map = HashMap::new();
130 for (memory_id, page_id) in &entries {
131 page_map.insert(*memory_id, *page_id);
132 }
133 if !page_map.is_empty() {
134 info!(memories = page_map.len(), "rebuilt page map from storage");
135 }
136
137 Ok(Self {
138 storage,
139 index,
140 graph,
141 page_map,
142 embedding_dim: 0,
143 path: path.to_path_buf(),
144 embedder: None,
145 })
146 }
147
148 pub fn open_with_embedder(
150 path: &Path,
151 embedder: Box<dyn EmbeddingProvider>,
152 ) -> MenteResult<Self> {
153 let mut db = Self::open(path)?;
154 db.embedding_dim = embedder.dimensions();
155 db.embedder = Some(embedder);
156 Ok(db)
157 }
158
159 pub fn set_embedder(&mut self, embedder: Box<dyn EmbeddingProvider>) {
161 self.embedding_dim = embedder.dimensions();
162 self.embedder = Some(embedder);
163 }
164
165 pub fn embed_text(&self, text: &str) -> MenteResult<Option<Vec<f32>>> {
168 match &self.embedder {
169 Some(e) => Ok(Some(e.embed(text)?)),
170 None => Ok(None),
171 }
172 }
173
174 pub fn store(&mut self, node: MemoryNode) -> MenteResult<()> {
179 let id = node.id;
180 debug!("Storing memory {}", id);
181
182 if self.embedding_dim > 0
184 && !node.embedding.is_empty()
185 && node.embedding.len() != self.embedding_dim
186 {
187 return Err(MenteError::EmbeddingDimensionMismatch {
188 got: node.embedding.len(),
189 expected: self.embedding_dim,
190 });
191 }
192
193 let page_id = self.storage.store_memory(&node)?;
194 self.page_map.insert(id, page_id);
195 self.index.index_memory(&node);
196 self.graph.add_memory(id);
197
198 Ok(())
199 }
200
201 pub fn recall(&mut self, query: &str) -> MenteResult<ContextWindow> {
207 debug!("Recalling with query: {}", query);
208 let plan = Mql::parse(query)?;
209
210 let scored = self.execute_plan(&plan)?;
211 let config = AssemblyConfig::default();
212 let window = ContextAssembler::assemble(scored, vec![], &config);
213 Ok(window)
214 }
215
216 pub fn recall_similar(
222 &mut self,
223 embedding: &[f32],
224 k: usize,
225 ) -> MenteResult<Vec<(MemoryId, f32)>> {
226 debug!("Recall similar, k={}", k);
227 let results = self.index.hybrid_search(embedding, None, None, k * 3);
229 let graph = self.graph.graph();
230 let filtered: Vec<(MemoryId, f32)> = results
231 .into_iter()
232 .filter(|(id, _)| {
233 let incoming = graph.incoming(*id);
234 !incoming.iter().any(|(_, e)| {
235 e.edge_type == EdgeType::Supersedes || e.edge_type == EdgeType::Contradicts
236 })
237 })
238 .take(k)
239 .collect();
240 Ok(filtered)
241 }
242
243 pub fn relate(&mut self, edge: MemoryEdge) -> MenteResult<()> {
245 debug!("Relating {} -> {}", edge.source, edge.target);
246 self.graph.add_relationship(&edge)?;
247 Ok(())
248 }
249
250 pub fn get_memory(&mut self, id: MemoryId) -> MenteResult<MemoryNode> {
252 let page_id = self
253 .page_map
254 .get(&id)
255 .copied()
256 .ok_or(MenteError::MemoryNotFound(id))?;
257 self.storage.load_memory(page_id)
258 }
259
260 pub fn memory_ids(&self) -> Vec<MemoryId> {
262 self.page_map.keys().copied().collect()
263 }
264
265 pub fn memory_count(&self) -> usize {
267 self.page_map.len()
268 }
269
270 pub fn forget(&mut self, id: MemoryId) -> MenteResult<()> {
272 debug!("Forgetting memory {}", id);
273
274 if let Some(&page_id) = self.page_map.get(&id)
276 && let Ok(node) = self.storage.load_memory(page_id)
277 {
278 self.index.remove_memory(id, &node);
279 }
280
281 self.graph.remove_memory(id);
282 self.page_map.remove(&id);
283 Ok(())
284 }
285
286 pub fn graph(&self) -> &GraphManager {
288 &self.graph
289 }
290
291 pub fn close(&mut self) -> MenteResult<()> {
293 info!("Closing MenteDB");
294 self.flush()?;
295 self.storage.close()?;
296 Ok(())
297 }
298
299 pub fn flush(&mut self) -> MenteResult<()> {
304 debug!("Flushing MenteDB to disk");
305 self.index.save(&self.path.join("indexes"))?;
306 self.graph.save(&self.path.join("graph"))?;
307 self.storage.checkpoint()?;
308 Ok(())
309 }
310
311 fn execute_plan(&mut self, plan: &QueryPlan) -> MenteResult<Vec<ScoredMemory>> {
313 match plan {
314 QueryPlan::VectorSearch { query, k, .. } => {
315 let hits = self.index.hybrid_search(query, None, None, *k);
316 self.load_scored_memories(&hits)
317 }
318 QueryPlan::TagScan { tags, limit, .. } => {
319 let tag_refs: Vec<&str> = tags.iter().map(|s| s.as_str()).collect();
320 let k = limit.unwrap_or(10);
321 let hits = self.index.hybrid_search(&[], Some(&tag_refs), None, k);
323 self.load_scored_memories(&hits)
324 }
325 QueryPlan::TemporalScan { start, end, .. } => {
326 let hits = self
327 .index
328 .hybrid_search(&[], None, Some((*start, *end)), 100);
329 self.load_scored_memories(&hits)
330 }
331 QueryPlan::GraphTraversal { start, depth, .. } => {
332 let (ids, _edges) = self.graph.get_context_subgraph(*start, *depth);
333 let scored: Vec<ScoredMemory> = ids
334 .iter()
335 .filter_map(|id| {
336 self.page_map.get(id).and_then(|&pid| {
337 self.storage.load_memory(pid).ok().map(|node| ScoredMemory {
338 memory: node,
339 score: 1.0,
340 })
341 })
342 })
343 .collect();
344 Ok(scored)
345 }
346 QueryPlan::PointLookup { id } => {
347 let page_id = self
348 .page_map
349 .get(id)
350 .ok_or(MenteError::MemoryNotFound(*id))?;
351 let node = self.storage.load_memory(*page_id)?;
352 Ok(vec![ScoredMemory {
353 memory: node,
354 score: 1.0,
355 }])
356 }
357 _ => Ok(vec![]),
358 }
359 }
360
361 fn load_scored_memories(&mut self, hits: &[(MemoryId, f32)]) -> MenteResult<Vec<ScoredMemory>> {
363 let mut scored = Vec::with_capacity(hits.len());
364 for &(id, score) in hits {
365 if let Some(&page_id) = self.page_map.get(&id)
366 && let Ok(node) = self.storage.load_memory(page_id)
367 {
368 scored.push(ScoredMemory {
369 memory: node,
370 score,
371 });
372 }
373 }
374 Ok(scored)
375 }
376}