llm_memory_graph/engine/mod.rs
1//! Core engine for the memory graph
2
3mod async_memory_graph;
4
5pub use async_memory_graph::AsyncMemoryGraph;
6
7use crate::error::{Error, Result};
8use crate::storage::{SledBackend, StorageBackend};
9use crate::types::{
10 AgentNode, Config, ConversationSession, Edge, EdgeType, Node, NodeId, PromptMetadata,
11 PromptNode, PromptTemplate, ResponseMetadata, ResponseNode, SessionId, TemplateId, TokenUsage,
12 ToolInvocation,
13};
14use parking_lot::RwLock;
15use std::collections::HashMap;
16use std::sync::Arc;
17
18/// Main interface for interacting with the memory graph
19///
20/// `MemoryGraph` provides a thread-safe, high-level API for managing conversation
21/// sessions, prompts, responses, and their relationships in a graph structure.
22///
23/// # Examples
24///
25/// ```no_run
26/// use llm_memory_graph::{MemoryGraph, Config};
27///
28/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
29/// let config = Config::new("./data/my_graph.db");
30/// let graph = MemoryGraph::open(config)?;
31///
32/// let session = graph.create_session()?;
33/// let prompt_id = graph.add_prompt(session.id, "What is Rust?".to_string(), None)?;
34/// # Ok(())
35/// # }
36/// ```
37pub struct MemoryGraph {
38 backend: Arc<dyn StorageBackend>,
39 sessions: Arc<RwLock<HashMap<SessionId, ConversationSession>>>,
40}
41
42impl MemoryGraph {
43 /// Open or create a memory graph with the given configuration
44 ///
45 /// This will create the database directory if it doesn't exist and initialize
46 /// all necessary storage trees.
47 ///
48 /// # Errors
49 ///
50 /// Returns an error if:
51 /// - The database path is invalid or inaccessible
52 /// - Storage initialization fails
53 /// - Existing data is corrupted
54 ///
55 /// # Examples
56 ///
57 /// ```no_run
58 /// use llm_memory_graph::{MemoryGraph, Config};
59 ///
60 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
61 /// let config = Config::new("./data/graph.db");
62 /// let graph = MemoryGraph::open(config)?;
63 /// # Ok(())
64 /// # }
65 /// ```
66 pub fn open(config: Config) -> Result<Self> {
67 let backend = SledBackend::open(&config.path)?;
68
69 Ok(Self {
70 backend: Arc::new(backend),
71 sessions: Arc::new(RwLock::new(HashMap::new())),
72 })
73 }
74
75 /// Create a new conversation session
76 ///
77 /// Sessions are used to group related prompts and responses together.
78 /// Each session has a unique ID and can store custom metadata.
79 ///
80 /// # Errors
81 ///
82 /// Returns an error if the session cannot be persisted to storage.
83 ///
84 /// # Examples
85 ///
86 /// ```no_run
87 /// # use llm_memory_graph::{MemoryGraph, Config};
88 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
89 /// # let graph = MemoryGraph::open(Config::default())?;
90 /// let session = graph.create_session()?;
91 /// println!("Created session: {}", session.id);
92 /// # Ok(())
93 /// # }
94 /// ```
95 pub fn create_session(&self) -> Result<ConversationSession> {
96 let session = ConversationSession::new();
97 self.backend.store_node(&Node::Session(session.clone()))?;
98
99 // Cache the session
100 self.sessions.write().insert(session.id, session.clone());
101
102 Ok(session)
103 }
104
105 /// Create a session with custom metadata
106 ///
107 /// # Errors
108 ///
109 /// Returns an error if the session cannot be persisted to storage.
110 ///
111 /// # Examples
112 ///
113 /// ```no_run
114 /// # use llm_memory_graph::{MemoryGraph, Config};
115 /// # use std::collections::HashMap;
116 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
117 /// # let graph = MemoryGraph::open(Config::default())?;
118 /// let mut metadata = HashMap::new();
119 /// metadata.insert("user_id".to_string(), "123".to_string());
120 /// let session = graph.create_session_with_metadata(metadata)?;
121 /// # Ok(())
122 /// # }
123 /// ```
124 pub fn create_session_with_metadata(
125 &self,
126 metadata: HashMap<String, String>,
127 ) -> Result<ConversationSession> {
128 let session = ConversationSession::with_metadata(metadata);
129 self.backend.store_node(&Node::Session(session.clone()))?;
130
131 // Cache the session
132 self.sessions.write().insert(session.id, session.clone());
133
134 Ok(session)
135 }
136
137 /// Get a session by ID
138 ///
139 /// This will first check the in-memory cache, then fall back to storage.
140 ///
141 /// # Errors
142 ///
143 /// Returns an error if:
144 /// - The session doesn't exist
145 /// - Storage retrieval fails
146 ///
147 /// # Examples
148 ///
149 /// ```no_run
150 /// # use llm_memory_graph::{MemoryGraph, Config};
151 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
152 /// # let graph = MemoryGraph::open(Config::default())?;
153 /// # let created_session = graph.create_session()?;
154 /// let session = graph.get_session(created_session.id)?;
155 /// # Ok(())
156 /// # }
157 /// ```
158 pub fn get_session(&self, session_id: SessionId) -> Result<ConversationSession> {
159 // Check cache first
160 if let Some(session) = self.sessions.read().get(&session_id) {
161 return Ok(session.clone());
162 }
163
164 // Fall back to storage
165 let nodes = self.backend.get_session_nodes(&session_id)?;
166 for node in nodes {
167 if let Node::Session(session) = node {
168 if session.id == session_id {
169 // Update cache
170 self.sessions.write().insert(session_id, session.clone());
171 return Ok(session);
172 }
173 }
174 }
175
176 Err(Error::SessionNotFound(session_id.to_string()))
177 }
178
179 /// Add a prompt to a session
180 ///
181 /// This creates a new prompt node and automatically creates edges linking it
182 /// to the session and to the previous prompt if one exists.
183 ///
184 /// # Errors
185 ///
186 /// Returns an error if:
187 /// - The session doesn't exist
188 /// - Storage operations fail
189 ///
190 /// # Examples
191 ///
192 /// ```no_run
193 /// # use llm_memory_graph::{MemoryGraph, Config};
194 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
195 /// # let graph = MemoryGraph::open(Config::default())?;
196 /// # let session = graph.create_session()?;
197 /// let prompt_id = graph.add_prompt(
198 /// session.id,
199 /// "Explain quantum entanglement".to_string(),
200 /// None,
201 /// )?;
202 /// # Ok(())
203 /// # }
204 /// ```
205 pub fn add_prompt(
206 &self,
207 session_id: SessionId,
208 content: String,
209 metadata: Option<PromptMetadata>,
210 ) -> Result<NodeId> {
211 // Verify session exists
212 self.get_session(session_id)?;
213
214 let prompt = if let Some(meta) = metadata {
215 PromptNode::with_metadata(session_id, content, meta)
216 } else {
217 PromptNode::new(session_id, content)
218 };
219
220 let prompt_id = prompt.id;
221 self.backend.store_node(&Node::Prompt(prompt.clone()))?;
222
223 // Create edge from prompt to session
224 let session_nodes = self.backend.get_session_nodes(&session_id)?;
225 if let Some(session_node) = session_nodes.iter().find(|n| matches!(n, Node::Session(_))) {
226 let edge = Edge::new(prompt_id, session_node.id(), EdgeType::PartOf);
227 self.backend.store_edge(&edge)?;
228 }
229
230 // Find the previous prompt in this session and create a Follows edge
231 let session_prompts: Vec<_> = session_nodes
232 .into_iter()
233 .filter_map(|n| {
234 if let Node::Prompt(p) = n {
235 Some(p)
236 } else {
237 None
238 }
239 })
240 .collect();
241
242 if !session_prompts.is_empty() {
243 // Get the most recent prompt (excluding the one we just added)
244 let mut previous_prompts: Vec<_> = session_prompts
245 .into_iter()
246 .filter(|p| p.id != prompt_id)
247 .collect();
248 previous_prompts.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
249
250 if let Some(prev_prompt) = previous_prompts.first() {
251 let edge = Edge::new(prompt_id, prev_prompt.id, EdgeType::Follows);
252 self.backend.store_edge(&edge)?;
253 }
254 }
255
256 Ok(prompt_id)
257 }
258
259 /// Add a response to a prompt
260 ///
261 /// This creates a response node and a RespondsTo edge linking it to the prompt.
262 ///
263 /// # Errors
264 ///
265 /// Returns an error if:
266 /// - The prompt doesn't exist
267 /// - Storage operations fail
268 ///
269 /// # Examples
270 ///
271 /// ```no_run
272 /// # use llm_memory_graph::{MemoryGraph, Config, TokenUsage};
273 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
274 /// # let graph = MemoryGraph::open(Config::default())?;
275 /// # let session = graph.create_session()?;
276 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
277 /// let usage = TokenUsage::new(10, 20);
278 /// let response_id = graph.add_response(
279 /// prompt_id,
280 /// "Quantum entanglement is...".to_string(),
281 /// usage,
282 /// None,
283 /// )?;
284 /// # Ok(())
285 /// # }
286 /// ```
287 pub fn add_response(
288 &self,
289 prompt_id: NodeId,
290 content: String,
291 usage: TokenUsage,
292 metadata: Option<ResponseMetadata>,
293 ) -> Result<NodeId> {
294 // Verify prompt exists
295 self.get_node(prompt_id)?;
296
297 let response = if let Some(meta) = metadata {
298 ResponseNode::with_metadata(prompt_id, content, usage, meta)
299 } else {
300 ResponseNode::new(prompt_id, content, usage)
301 };
302
303 let response_id = response.id;
304 self.backend.store_node(&Node::Response(response.clone()))?;
305
306 // Create edge from response to prompt
307 let edge = Edge::new(response_id, prompt_id, EdgeType::RespondsTo);
308 self.backend.store_edge(&edge)?;
309
310 Ok(response_id)
311 }
312
313 /// Add a tool invocation node to the graph
314 ///
315 /// This creates a tool invocation record and automatically creates an INVOKES edge
316 /// from the response to the tool invocation.
317 ///
318 /// # Arguments
319 ///
320 /// * `tool` - The tool invocation to add
321 ///
322 /// # Returns
323 ///
324 /// The node ID of the created tool invocation
325 ///
326 /// # Example
327 ///
328 /// ```no_run
329 /// # use llm_memory_graph::*;
330 /// # fn main() -> Result<()> {
331 /// # let graph = MemoryGraph::open(Config::default())?;
332 /// # let session = graph.create_session()?;
333 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
334 /// # let response_id = graph.add_response(prompt_id, "Response".to_string(), TokenUsage::new(10, 20), None)?;
335 /// let params = serde_json::json!({"operation": "add", "a": 2, "b": 3});
336 /// let tool = ToolInvocation::new(response_id, "calculator".to_string(), params);
337 /// let tool_id = graph.add_tool_invocation(tool)?;
338 /// # Ok(())
339 /// # }
340 /// ```
341 pub fn add_tool_invocation(&self, tool: ToolInvocation) -> Result<NodeId> {
342 let tool_id = tool.id;
343 let response_id = tool.response_id;
344
345 // Store the tool invocation node
346 self.backend.store_node(&Node::ToolInvocation(tool))?;
347
348 // Create INVOKES edge from response to tool
349 let edge = Edge::new(response_id, tool_id, EdgeType::Invokes);
350 self.backend.store_edge(&edge)?;
351
352 Ok(tool_id)
353 }
354
355 /// Update an existing tool invocation with results
356 ///
357 /// This method updates a tool invocation's status, result, and duration after execution.
358 ///
359 /// # Arguments
360 ///
361 /// * `tool_id` - The ID of the tool invocation to update
362 /// * `success` - Whether the tool execution was successful
363 /// * `result_or_error` - Either the result (if successful) or error message (if failed)
364 /// * `duration_ms` - Execution duration in milliseconds
365 ///
366 /// # Example
367 ///
368 /// ```no_run
369 /// # use llm_memory_graph::*;
370 /// # fn main() -> Result<()> {
371 /// # let graph = MemoryGraph::open(Config::default())?;
372 /// # let session = graph.create_session()?;
373 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
374 /// # let response_id = graph.add_response(prompt_id, "Response".to_string(), TokenUsage::new(10, 20), None)?;
375 /// # let params = serde_json::json!({"operation": "add", "a": 2, "b": 3});
376 /// # let tool = ToolInvocation::new(response_id, "calculator".to_string(), params);
377 /// # let tool_id = graph.add_tool_invocation(tool)?;
378 /// // Mark tool invocation as successful
379 /// let result = serde_json::json!({"result": 5});
380 /// graph.update_tool_invocation(tool_id, true, serde_json::to_string(&result)?, 150)?;
381 /// # Ok(())
382 /// # }
383 /// ```
384 pub fn update_tool_invocation(
385 &self,
386 tool_id: NodeId,
387 success: bool,
388 result_or_error: String,
389 duration_ms: u64,
390 ) -> Result<()> {
391 // Get the tool invocation node
392 let node = self
393 .backend
394 .get_node(&tool_id)?
395 .ok_or_else(|| Error::NodeNotFound(tool_id.to_string()))?;
396
397 if let Node::ToolInvocation(mut tool) = node {
398 if success {
399 let result: serde_json::Value = serde_json::from_str(&result_or_error)?;
400 tool.mark_success(result, duration_ms);
401 } else {
402 tool.mark_failed(result_or_error, duration_ms);
403 }
404
405 // Update the node in storage
406 self.backend.store_node(&Node::ToolInvocation(tool))?;
407 Ok(())
408 } else {
409 Err(Error::InvalidNodeType {
410 expected: "ToolInvocation".to_string(),
411 actual: format!("{:?}", node.node_type()),
412 })
413 }
414 }
415
416 /// Get all tool invocations for a specific response
417 ///
418 /// # Arguments
419 ///
420 /// * `response_id` - The response node ID
421 ///
422 /// # Returns
423 ///
424 /// A vector of tool invocation nodes
425 ///
426 /// # Example
427 ///
428 /// ```no_run
429 /// # use llm_memory_graph::*;
430 /// # fn main() -> Result<()> {
431 /// # let graph = MemoryGraph::open(Config::default())?;
432 /// # let session = graph.create_session()?;
433 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
434 /// # let response_id = graph.add_response(prompt_id, "Response".to_string(), TokenUsage::new(10, 20), None)?;
435 /// let tools = graph.get_response_tools(response_id)?;
436 /// println!("Response invoked {} tools", tools.len());
437 /// # Ok(())
438 /// # }
439 /// ```
440 pub fn get_response_tools(&self, response_id: NodeId) -> Result<Vec<ToolInvocation>> {
441 let edges = self.backend.get_outgoing_edges(&response_id)?;
442
443 let mut tools = Vec::new();
444 for edge in edges {
445 if edge.edge_type == EdgeType::Invokes {
446 if let Some(Node::ToolInvocation(tool)) = self.backend.get_node(&edge.to)? {
447 tools.push(tool);
448 }
449 }
450 }
451
452 Ok(tools)
453 }
454
455 /// Create and register an agent in the graph
456 ///
457 /// # Errors
458 ///
459 /// Returns an error if storage fails
460 ///
461 /// # Examples
462 ///
463 /// ```no_run
464 /// # use llm_memory_graph::{MemoryGraph, Config, AgentNode};
465 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
466 /// # let graph = MemoryGraph::open(Config::default())?;
467 /// let agent = AgentNode::new(
468 /// "Researcher".to_string(),
469 /// "research".to_string(),
470 /// vec!["web_search".to_string(), "summarize".to_string()],
471 /// );
472 /// let agent_id = graph.add_agent(agent)?;
473 /// # Ok(())
474 /// # }
475 /// ```
476 pub fn add_agent(&self, agent: AgentNode) -> Result<NodeId> {
477 let node_id = agent.node_id;
478 self.backend.store_node(&Node::Agent(agent))?;
479 Ok(node_id)
480 }
481
482 /// Update an existing agent's data
483 ///
484 /// # Errors
485 ///
486 /// Returns an error if:
487 /// - The agent doesn't exist
488 /// - Storage update fails
489 ///
490 /// # Examples
491 ///
492 /// ```no_run
493 /// # use llm_memory_graph::{MemoryGraph, Config, AgentNode};
494 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
495 /// # let graph = MemoryGraph::open(Config::default())?;
496 /// # let agent = AgentNode::new("Test".to_string(), "test".to_string(), vec![]);
497 /// # let node_id = graph.add_agent(agent)?;
498 /// let node = graph.get_node(node_id)?;
499 /// if let llm_memory_graph::types::Node::Agent(mut agent) = node {
500 /// agent.update_metrics(true, 250, 150);
501 /// graph.update_agent(agent)?;
502 /// }
503 /// # Ok(())
504 /// # }
505 /// ```
506 pub fn update_agent(&self, agent: AgentNode) -> Result<()> {
507 self.backend.store_node(&Node::Agent(agent))?;
508 Ok(())
509 }
510
511 /// Assign an agent to handle a prompt
512 ///
513 /// Creates a HandledBy edge from the prompt to the agent.
514 ///
515 /// # Errors
516 ///
517 /// Returns an error if storage fails
518 ///
519 /// # Examples
520 ///
521 /// ```no_run
522 /// # use llm_memory_graph::{MemoryGraph, Config, AgentNode};
523 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
524 /// # let graph = MemoryGraph::open(Config::default())?;
525 /// # let session = graph.create_session()?;
526 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
527 /// # let agent = AgentNode::new("Test".to_string(), "test".to_string(), vec![]);
528 /// # let agent_node_id = graph.add_agent(agent)?;
529 /// graph.assign_agent_to_prompt(prompt_id, agent_node_id)?;
530 /// # Ok(())
531 /// # }
532 /// ```
533 pub fn assign_agent_to_prompt(&self, prompt_id: NodeId, agent_node_id: NodeId) -> Result<()> {
534 let edge = Edge::new(prompt_id, agent_node_id, EdgeType::HandledBy);
535 self.backend.store_edge(&edge)?;
536 Ok(())
537 }
538
539 /// Create a transfer from a response to an agent
540 ///
541 /// Creates a TransfersTo edge indicating agent handoff.
542 ///
543 /// # Errors
544 ///
545 /// Returns an error if storage fails
546 ///
547 /// # Examples
548 ///
549 /// ```no_run
550 /// # use llm_memory_graph::{MemoryGraph, Config, AgentNode, TokenUsage};
551 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
552 /// # let graph = MemoryGraph::open(Config::default())?;
553 /// # let session = graph.create_session()?;
554 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
555 /// # let response_id = graph.add_response(prompt_id, "Test".to_string(), TokenUsage::new(10, 10), None)?;
556 /// # let agent = AgentNode::new("Test".to_string(), "test".to_string(), vec![]);
557 /// # let agent_node_id = graph.add_agent(agent)?;
558 /// graph.transfer_to_agent(response_id, agent_node_id)?;
559 /// # Ok(())
560 /// # }
561 /// ```
562 pub fn transfer_to_agent(&self, response_id: NodeId, agent_node_id: NodeId) -> Result<()> {
563 let edge = Edge::new(response_id, agent_node_id, EdgeType::TransfersTo);
564 self.backend.store_edge(&edge)?;
565 Ok(())
566 }
567
568 /// Get the agent assigned to handle a prompt
569 ///
570 /// # Errors
571 ///
572 /// Returns an error if:
573 /// - No agent is assigned
574 /// - Storage retrieval fails
575 ///
576 /// # Examples
577 ///
578 /// ```no_run
579 /// # use llm_memory_graph::{MemoryGraph, Config, AgentNode};
580 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
581 /// # let graph = MemoryGraph::open(Config::default())?;
582 /// # let session = graph.create_session()?;
583 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
584 /// # let agent = AgentNode::new("Test".to_string(), "test".to_string(), vec![]);
585 /// # let agent_id = graph.add_agent(agent)?;
586 /// # graph.assign_agent_to_prompt(prompt_id, agent_id)?;
587 /// let agent = graph.get_prompt_agent(prompt_id)?;
588 /// println!("Handled by: {}", agent.name);
589 /// # Ok(())
590 /// # }
591 /// ```
592 pub fn get_prompt_agent(&self, prompt_id: NodeId) -> Result<AgentNode> {
593 let edges = self.backend.get_outgoing_edges(&prompt_id)?;
594 for edge in edges {
595 if edge.edge_type == EdgeType::HandledBy {
596 if let Some(Node::Agent(agent)) = self.backend.get_node(&edge.to)? {
597 return Ok(agent);
598 }
599 }
600 }
601 Err(Error::TraversalError(
602 "No agent assigned to this prompt".to_string(),
603 ))
604 }
605
606 /// Get all agents a response was transferred to
607 ///
608 /// # Errors
609 ///
610 /// Returns an error if storage retrieval fails
611 ///
612 /// # Examples
613 ///
614 /// ```no_run
615 /// # use llm_memory_graph::{MemoryGraph, Config, AgentNode, TokenUsage};
616 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
617 /// # let graph = MemoryGraph::open(Config::default())?;
618 /// # let session = graph.create_session()?;
619 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
620 /// # let response_id = graph.add_response(prompt_id, "Test".to_string(), TokenUsage::new(10, 10), None)?;
621 /// let agents = graph.get_agent_handoffs(response_id)?;
622 /// for agent in agents {
623 /// println!("Transferred to: {}", agent.name);
624 /// }
625 /// # Ok(())
626 /// # }
627 /// ```
628 pub fn get_agent_handoffs(&self, response_id: NodeId) -> Result<Vec<AgentNode>> {
629 let edges = self.backend.get_outgoing_edges(&response_id)?;
630 let mut agents = Vec::new();
631 for edge in edges {
632 if edge.edge_type == EdgeType::TransfersTo {
633 if let Some(Node::Agent(agent)) = self.backend.get_node(&edge.to)? {
634 agents.push(agent);
635 }
636 }
637 }
638 Ok(agents)
639 }
640
641 /// Get a node by its ID
642 ///
643 /// # Errors
644 ///
645 /// Returns an error if:
646 /// - The node doesn't exist
647 /// - Storage retrieval fails
648 ///
649 /// # Examples
650 ///
651 /// ```no_run
652 /// # use llm_memory_graph::{MemoryGraph, Config};
653 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
654 /// # let graph = MemoryGraph::open(Config::default())?;
655 /// # let session = graph.create_session()?;
656 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
657 /// let node = graph.get_node(prompt_id)?;
658 /// # Ok(())
659 /// # }
660 /// ```
661 pub fn get_node(&self, node_id: NodeId) -> Result<Node> {
662 self.backend
663 .get_node(&node_id)?
664 .ok_or_else(|| Error::NodeNotFound(node_id.to_string()))
665 }
666
667 /// Add a custom edge between two nodes
668 ///
669 /// # Errors
670 ///
671 /// Returns an error if storage operations fail.
672 ///
673 /// # Examples
674 ///
675 /// ```no_run
676 /// # use llm_memory_graph::{MemoryGraph, Config, EdgeType};
677 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
678 /// # let graph = MemoryGraph::open(Config::default())?;
679 /// # let session = graph.create_session()?;
680 /// # let prompt1 = graph.add_prompt(session.id, "Test1".to_string(), None)?;
681 /// # let prompt2 = graph.add_prompt(session.id, "Test2".to_string(), None)?;
682 /// graph.add_edge(prompt1, prompt2, EdgeType::Follows)?;
683 /// # Ok(())
684 /// # }
685 /// ```
686 pub fn add_edge(&self, from: NodeId, to: NodeId, edge_type: EdgeType) -> Result<()> {
687 let edge = Edge::new(from, to, edge_type);
688 self.backend.store_edge(&edge)?;
689 Ok(())
690 }
691
692 /// Get all edges originating from a node
693 ///
694 /// # Errors
695 ///
696 /// Returns an error if storage retrieval fails.
697 ///
698 /// # Examples
699 ///
700 /// ```no_run
701 /// # use llm_memory_graph::{MemoryGraph, Config};
702 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
703 /// # let graph = MemoryGraph::open(Config::default())?;
704 /// # let session = graph.create_session()?;
705 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
706 /// let edges = graph.get_outgoing_edges(prompt_id)?;
707 /// # Ok(())
708 /// # }
709 /// ```
710 pub fn get_outgoing_edges(&self, node_id: NodeId) -> Result<Vec<Edge>> {
711 self.backend.get_outgoing_edges(&node_id)
712 }
713
714 /// Get all edges pointing to a node
715 ///
716 /// # Errors
717 ///
718 /// Returns an error if storage retrieval fails.
719 ///
720 /// # Examples
721 ///
722 /// ```no_run
723 /// # use llm_memory_graph::{MemoryGraph, Config};
724 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
725 /// # let graph = MemoryGraph::open(Config::default())?;
726 /// # let session = graph.create_session()?;
727 /// # let prompt_id = graph.add_prompt(session.id, "Test".to_string(), None)?;
728 /// let edges = graph.get_incoming_edges(prompt_id)?;
729 /// # Ok(())
730 /// # }
731 /// ```
732 pub fn get_incoming_edges(&self, node_id: NodeId) -> Result<Vec<Edge>> {
733 self.backend.get_incoming_edges(&node_id)
734 }
735
736 /// Get all nodes in a session
737 ///
738 /// # Errors
739 ///
740 /// Returns an error if storage retrieval fails.
741 ///
742 /// # Examples
743 ///
744 /// ```no_run
745 /// # use llm_memory_graph::{MemoryGraph, Config};
746 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
747 /// # let graph = MemoryGraph::open(Config::default())?;
748 /// # let session = graph.create_session()?;
749 /// let nodes = graph.get_session_nodes(session.id)?;
750 /// # Ok(())
751 /// # }
752 /// ```
753 pub fn get_session_nodes(&self, session_id: SessionId) -> Result<Vec<Node>> {
754 self.backend.get_session_nodes(&session_id)
755 }
756
757 /// Flush all pending writes to disk
758 ///
759 /// # Errors
760 ///
761 /// Returns an error if the flush operation fails.
762 ///
763 /// # Examples
764 ///
765 /// ```no_run
766 /// # use llm_memory_graph::{MemoryGraph, Config};
767 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
768 /// # let graph = MemoryGraph::open(Config::default())?;
769 /// graph.flush()?;
770 /// # Ok(())
771 /// # }
772 /// ```
773 pub fn flush(&self) -> Result<()> {
774 self.backend.flush()
775 }
776
777 /// Get storage statistics
778 ///
779 /// Returns information about node count, edge count, storage size, etc.
780 ///
781 /// # Errors
782 ///
783 /// Returns an error if statistics cannot be retrieved.
784 ///
785 /// # Examples
786 ///
787 /// ```no_run
788 /// # use llm_memory_graph::{MemoryGraph, Config};
789 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
790 /// # let graph = MemoryGraph::open(Config::default())?;
791 /// let stats = graph.stats()?;
792 /// println!("Nodes: {}, Edges: {}", stats.node_count, stats.edge_count);
793 /// # Ok(())
794 /// # }
795 /// ```
796 pub fn stats(&self) -> Result<crate::storage::StorageStats> {
797 self.backend.stats()
798 }
799
800 // ===== Template Management Methods =====
801
802 /// Create and store a new prompt template
803 ///
804 /// Templates are versioned prompt structures that can be instantiated with variables.
805 ///
806 /// # Errors
807 ///
808 /// Returns an error if storage fails.
809 ///
810 /// # Examples
811 ///
812 /// ```no_run
813 /// # use llm_memory_graph::{MemoryGraph, Config, PromptTemplate, VariableSpec};
814 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
815 /// # let graph = MemoryGraph::open(Config::default())?;
816 /// let variables = vec![
817 /// VariableSpec::new(
818 /// "user_input".to_string(),
819 /// "String".to_string(),
820 /// true,
821 /// "User's question".to_string(),
822 /// ),
823 /// ];
824 /// let template = PromptTemplate::new(
825 /// "Question Answering".to_string(),
826 /// "Answer this question: {{user_input}}".to_string(),
827 /// variables,
828 /// );
829 /// let template_id = graph.create_template(template)?;
830 /// # Ok(())
831 /// # }
832 /// ```
833 pub fn create_template(&self, template: PromptTemplate) -> Result<TemplateId> {
834 let template_id = template.id;
835 self.backend.store_node(&Node::Template(template))?;
836 Ok(template_id)
837 }
838
839 /// Get a template by its ID
840 ///
841 /// # Errors
842 ///
843 /// Returns an error if:
844 /// - The template doesn't exist
845 /// - Storage retrieval fails
846 ///
847 /// # Examples
848 ///
849 /// ```no_run
850 /// # use llm_memory_graph::{MemoryGraph, Config, PromptTemplate, VariableSpec};
851 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
852 /// # let graph = MemoryGraph::open(Config::default())?;
853 /// # let template = PromptTemplate::new("Test".to_string(), "{{x}}".to_string(), vec![]);
854 /// # let template_id = graph.create_template(template)?;
855 /// let template = graph.get_template(template_id)?;
856 /// # Ok(())
857 /// # }
858 /// ```
859 pub fn get_template(&self, _template_id: TemplateId) -> Result<PromptTemplate> {
860 // Templates store template_id as their primary ID, but we need the node_id
861 // We'll need to search for it - for now, let's try a direct approach
862 // In practice, we might want to add a template index to the storage backend
863
864 // For now, search all nodes (this is inefficient - TODO: add template index)
865 let stats = self.backend.stats()?;
866 for _ in 0..stats.node_count {
867 // This is a placeholder - we need a way to iterate all nodes
868 // or maintain a template index
869 }
870
871 Err(Error::NodeNotFound(
872 "Template lookup by TemplateId not yet fully implemented - use get_template_by_node_id instead".to_string()
873 ))
874 }
875
876 /// Get a template by its node ID
877 ///
878 /// # Errors
879 ///
880 /// Returns an error if:
881 /// - The node doesn't exist or is not a template
882 /// - Storage retrieval fails
883 ///
884 /// # Examples
885 ///
886 /// ```no_run
887 /// # use llm_memory_graph::{MemoryGraph, Config, PromptTemplate};
888 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
889 /// # let graph = MemoryGraph::open(Config::default())?;
890 /// # let template = PromptTemplate::new("Test".to_string(), "{{x}}".to_string(), vec![]);
891 /// # let node_id = template.node_id;
892 /// # graph.create_template(template)?;
893 /// let template = graph.get_template_by_node_id(node_id)?;
894 /// # Ok(())
895 /// # }
896 /// ```
897 pub fn get_template_by_node_id(&self, node_id: NodeId) -> Result<PromptTemplate> {
898 let node = self
899 .backend
900 .get_node(&node_id)?
901 .ok_or_else(|| Error::NodeNotFound(format!("Node {} not found", node_id)))?;
902
903 match node {
904 Node::Template(template) => Ok(template),
905 _ => Err(Error::ValidationError(format!(
906 "Node {} is not a template",
907 node_id
908 ))),
909 }
910 }
911
912 /// Update an existing template
913 ///
914 /// This will store the updated template data. Note that the template's
915 /// version should be bumped appropriately before calling this method.
916 ///
917 /// # Errors
918 ///
919 /// Returns an error if storage update fails.
920 ///
921 /// # Examples
922 ///
923 /// ```no_run
924 /// # use llm_memory_graph::{MemoryGraph, Config, PromptTemplate, VersionLevel};
925 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
926 /// # let graph = MemoryGraph::open(Config::default())?;
927 /// # let template = PromptTemplate::new("Test".to_string(), "{{x}}".to_string(), vec![]);
928 /// # let node_id = template.node_id;
929 /// # graph.create_template(template)?;
930 /// let mut template = graph.get_template_by_node_id(node_id)?;
931 /// template.record_usage();
932 /// template.bump_version(VersionLevel::Patch);
933 /// graph.update_template(template)?;
934 /// # Ok(())
935 /// # }
936 /// ```
937 pub fn update_template(&self, template: PromptTemplate) -> Result<()> {
938 self.backend.store_node(&Node::Template(template))?;
939 Ok(())
940 }
941
942 /// Link a prompt to the template it was instantiated from
943 ///
944 /// Creates an Instantiates edge from the prompt to the template.
945 ///
946 /// # Errors
947 ///
948 /// Returns an error if storage fails.
949 ///
950 /// # Examples
951 ///
952 /// ```no_run
953 /// # use llm_memory_graph::{MemoryGraph, Config, PromptTemplate, VariableSpec};
954 /// # use std::collections::HashMap;
955 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
956 /// # let graph = MemoryGraph::open(Config::default())?;
957 /// # let session = graph.create_session()?;
958 /// # let template = PromptTemplate::new("Test".to_string(), "Hello {{name}}".to_string(), vec![]);
959 /// # let template_node_id = template.node_id;
960 /// # graph.create_template(template.clone())?;
961 /// let mut values = HashMap::new();
962 /// values.insert("name".to_string(), "World".to_string());
963 /// let prompt_text = template.instantiate(&values)?;
964 /// let prompt_id = graph.add_prompt(session.id, prompt_text, None)?;
965 /// graph.link_prompt_to_template(prompt_id, template_node_id)?;
966 /// # Ok(())
967 /// # }
968 /// ```
969 pub fn link_prompt_to_template(
970 &self,
971 prompt_id: NodeId,
972 template_node_id: NodeId,
973 ) -> Result<()> {
974 let edge = Edge::new(prompt_id, template_node_id, EdgeType::Instantiates);
975 self.backend.store_edge(&edge)?;
976 Ok(())
977 }
978
979 /// Create a new template that inherits from a parent template
980 ///
981 /// This creates the new template and automatically establishes an Inherits edge.
982 ///
983 /// # Errors
984 ///
985 /// Returns an error if storage fails.
986 ///
987 /// # Examples
988 ///
989 /// ```no_run
990 /// # use llm_memory_graph::{MemoryGraph, Config, PromptTemplate, VariableSpec};
991 /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
992 /// # let graph = MemoryGraph::open(Config::default())?;
993 /// # let parent = PromptTemplate::new("Parent".to_string(), "Base: {{x}}".to_string(), vec![]);
994 /// # let parent_id = parent.id;
995 /// # let parent_node_id = parent.node_id;
996 /// # graph.create_template(parent)?;
997 /// let child = PromptTemplate::from_parent(
998 /// parent_id,
999 /// "Child Template".to_string(),
1000 /// "Extended: {{x}} with {{y}}".to_string(),
1001 /// vec![],
1002 /// );
1003 /// let child_id = graph.create_template_from_parent(child, parent_node_id)?;
1004 /// # Ok(())
1005 /// # }
1006 /// ```
1007 pub fn create_template_from_parent(
1008 &self,
1009 template: PromptTemplate,
1010 parent_node_id: NodeId,
1011 ) -> Result<TemplateId> {
1012 let template_id = template.id;
1013 let template_node_id = template.node_id;
1014
1015 // Store the new template
1016 self.backend.store_node(&Node::Template(template))?;
1017
1018 // Create Inherits edge from child to parent
1019 let edge = Edge::new(template_node_id, parent_node_id, EdgeType::Inherits);
1020 self.backend.store_edge(&edge)?;
1021
1022 Ok(template_id)
1023 }
1024}
1025
1026#[cfg(test)]
1027mod tests {
1028 use super::*;
1029 use tempfile::tempdir;
1030
1031 #[test]
1032 fn test_create_graph() {
1033 let dir = tempdir().unwrap();
1034 let config = Config::new(dir.path());
1035 let graph = MemoryGraph::open(config).unwrap();
1036
1037 let stats = graph.stats().unwrap();
1038 assert_eq!(stats.node_count, 0);
1039 }
1040
1041 #[test]
1042 fn test_create_session() {
1043 let dir = tempdir().unwrap();
1044 let config = Config::new(dir.path());
1045 let graph = MemoryGraph::open(config).unwrap();
1046
1047 let session = graph.create_session().unwrap();
1048 let retrieved = graph.get_session(session.id).unwrap();
1049
1050 assert_eq!(session.id, retrieved.id);
1051 }
1052
1053 #[test]
1054 fn test_add_prompt() {
1055 let dir = tempdir().unwrap();
1056 let config = Config::new(dir.path());
1057 let graph = MemoryGraph::open(config).unwrap();
1058
1059 let session = graph.create_session().unwrap();
1060 let prompt_id = graph
1061 .add_prompt(session.id, "Test prompt".to_string(), None)
1062 .unwrap();
1063
1064 let node = graph.get_node(prompt_id).unwrap();
1065 assert!(matches!(node, Node::Prompt(_)));
1066 }
1067
1068 #[test]
1069 fn test_add_response() {
1070 let dir = tempdir().unwrap();
1071 let config = Config::new(dir.path());
1072 let graph = MemoryGraph::open(config).unwrap();
1073
1074 let session = graph.create_session().unwrap();
1075 let prompt_id = graph
1076 .add_prompt(session.id, "Test prompt".to_string(), None)
1077 .unwrap();
1078
1079 let usage = TokenUsage::new(10, 20);
1080 let response_id = graph
1081 .add_response(prompt_id, "Test response".to_string(), usage, None)
1082 .unwrap();
1083
1084 let node = graph.get_node(response_id).unwrap();
1085 assert!(matches!(node, Node::Response(_)));
1086 }
1087
1088 #[test]
1089 fn test_conversation_chain() {
1090 let dir = tempdir().unwrap();
1091 let config = Config::new(dir.path());
1092 let graph = MemoryGraph::open(config).unwrap();
1093
1094 let session = graph.create_session().unwrap();
1095
1096 // Add first prompt
1097 let prompt1 = graph
1098 .add_prompt(session.id, "First prompt".to_string(), None)
1099 .unwrap();
1100 let usage1 = TokenUsage::new(5, 10);
1101 let _response1 = graph
1102 .add_response(prompt1, "First response".to_string(), usage1, None)
1103 .unwrap();
1104
1105 // Add second prompt
1106 let prompt2 = graph
1107 .add_prompt(session.id, "Second prompt".to_string(), None)
1108 .unwrap();
1109 let usage2 = TokenUsage::new(6, 12);
1110 let _response2 = graph
1111 .add_response(prompt2, "Second response".to_string(), usage2, None)
1112 .unwrap();
1113
1114 // Verify session has all nodes
1115 let nodes = graph.get_session_nodes(session.id).unwrap();
1116 assert!(nodes.len() >= 4); // session + 2 prompts + 2 responses
1117
1118 // Verify edges exist
1119 let outgoing = graph.get_outgoing_edges(prompt2).unwrap();
1120 assert!(!outgoing.is_empty());
1121 }
1122
1123 #[test]
1124 fn test_session_not_found() {
1125 let dir = tempdir().unwrap();
1126 let config = Config::new(dir.path());
1127 let graph = MemoryGraph::open(config).unwrap();
1128
1129 let fake_session = SessionId::new();
1130 let result = graph.get_session(fake_session);
1131
1132 assert!(result.is_err());
1133 assert!(matches!(result.unwrap_err(), Error::SessionNotFound(_)));
1134 }
1135
1136 #[test]
1137 fn test_node_not_found() {
1138 let dir = tempdir().unwrap();
1139 let config = Config::new(dir.path());
1140 let graph = MemoryGraph::open(config).unwrap();
1141
1142 let fake_node = NodeId::new();
1143 let result = graph.get_node(fake_node);
1144
1145 assert!(result.is_err());
1146 assert!(matches!(result.unwrap_err(), Error::NodeNotFound(_)));
1147 }
1148
1149 #[test]
1150 fn test_session_with_metadata() {
1151 let dir = tempdir().unwrap();
1152 let config = Config::new(dir.path());
1153 let graph = MemoryGraph::open(config).unwrap();
1154
1155 let mut metadata = HashMap::new();
1156 metadata.insert("user".to_string(), "alice".to_string());
1157
1158 let session = graph.create_session_with_metadata(metadata).unwrap();
1159 let retrieved = graph.get_session(session.id).unwrap();
1160
1161 assert_eq!(retrieved.metadata.get("user"), Some(&"alice".to_string()));
1162 }
1163}