lmm_agent/agent.rs
1// Copyright 2026 Mahmoud Harmouch.
2//
3// Licensed under the MIT license
4// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
5// option. This file may not be copied, modified, or distributed
6// except according to those terms.
7
8//! # `LmmAgent` - the core agent struct.
9//!
10//! `LmmAgent` is the batteries-included foundation for every custom agent.
11//! It holds all agent state (hot memory, long-term memory, tools, planner,
12//! reflection, scheduler, ...) and provides symbolic text generation powered
13//! by `lmm`'s [`TextPredictor`] plus optional DuckDuckGo knowledge enrichment.
14//!
15//! ## Builder pattern
16//!
17//! ```rust
18//! use lmm_agent::agent::LmmAgent;
19//!
20//! let agent = LmmAgent::builder()
21//! .persona("Research Assistant")
22//! .behavior("Summarise the Rust ecosystem.")
23//! .build();
24//!
25//! assert_eq!(agent.persona.as_str(), "Research Assistant");
26//! assert_eq!(agent.behavior.as_str(), "Summarise the Rust ecosystem.");
27//! ```
28//!
29//! ## Attribution
30//!
31//! Adapted from the `autogpt` project's `agents/agent.rs`:
32//! <https://github.com/wiseaidotdev/autogpt/blob/main/autogpt/src/agents/agent.rs>
33
34use crate::cognition::drive::InternalDrive;
35use crate::cognition::knowledge::{KnowledgeIndex, KnowledgeSource, ingest as knowledge_ingest};
36use crate::cognition::learning::engine::LearningEngine;
37use crate::cognition::learning::q_table::{ActionKey, QTable};
38use crate::cognition::learning::store::LearningStore;
39use crate::traits::agent::Agent;
40use crate::types::{
41 Capability, ContextManager, Knowledge, Message, Planner, Profile, Reflection, Status, Task,
42 TaskScheduler, ThinkResult, Tool,
43};
44use anyhow::Result;
45use lmm::predict::TextPredictor;
46use std::borrow::Cow;
47use std::collections::HashSet;
48
49#[cfg(feature = "net")]
50use duckduckgo::browser::Browser;
51#[cfg(feature = "net")]
52use duckduckgo::user_agents::get as get_ua;
53
54use crate::cognition::r#loop::ThinkLoop;
55use crate::cognition::search::SearchOracle;
56
57// LmmAgent struct
58
59/// The core agent type.
60///
61/// Use [`LmmAgent::builder()`] for fluent construction, or
62/// [`LmmAgent::new()`] for the quick two-argument form.
63///
64/// # Examples
65///
66/// ```rust
67/// use lmm_agent::agent::LmmAgent;
68///
69/// let agent = LmmAgent::builder()
70/// .persona("Research Agent")
71/// .behavior("Research quantum computing.")
72/// .build();
73///
74/// assert_eq!(agent.persona.as_str(), "Research Agent");
75///
76/// let agent2 = LmmAgent::new("Scientist".into(), "Do science.".into());
77/// assert_eq!(agent2.persona.as_str(), "Scientist");
78/// ```
79#[derive(Debug, Clone, Default)]
80pub struct LmmAgent {
81 /// Unique identifier for this agent instance (auto-generated UUIDv4).
82 pub id: String,
83
84 /// The primary mission statement for this agent.
85 pub persona: String,
86
87 /// The role or behavior label (e.g. `"Research Assistant"`).
88 pub behavior: String,
89
90 /// Current lifecycle state.
91 pub status: Status,
92
93 /// Hot memory - recent messages kept in RAM.
94 pub memory: Vec<Message>,
95
96 /// Long-term memory - persisted between task executions (in-memory store).
97 pub long_term_memory: Vec<Message>,
98
99 /// Structured knowledge facts for reasoning.
100 pub knowledge: Knowledge,
101
102 /// Callable tools available to this agent.
103 pub tools: Vec<Tool>,
104
105 /// Optional goal planner.
106 pub planner: Option<Planner>,
107
108 /// Self-reflection / evaluation module.
109 pub reflection: Option<Reflection>,
110
111 /// Time-based task scheduler.
112 pub scheduler: Option<TaskScheduler>,
113
114 /// Profilelity traits and behavioural profile.
115 pub profile: Profile,
116
117 /// Recent-message context window.
118 pub context: ContextManager,
119
120 /// Capabilities the agent possesses.
121 pub capabilities: HashSet<Capability>,
122
123 /// Active task queue.
124 pub tasks: Vec<Task>,
125
126 /// Queryable knowledge base built from ingested documents or URLs.
127 pub knowledge_index: KnowledgeIndex,
128
129 /// Optional HELM learning engine for in-environment lifelong learning.
130 pub learning_engine: Option<LearningEngine>,
131
132 /// Internalized drive system for intrinsic motivation signals.
133 pub internal_drive: InternalDrive,
134}
135
136// LmmAgentBuilder
137
138/// Builder for [`LmmAgent`].
139///
140/// Obtain via [`LmmAgent::builder()`].
141///
142/// # Examples
143///
144/// ```rust
145/// use lmm_agent::agent::LmmAgent;
146/// use lmm_agent::types::{Message, Planner, Goal};
147///
148/// let agent = LmmAgent::builder()
149/// .persona("Test agent.")
150/// .behavior("Tester")
151/// .memory(vec![Message::new("user", "Hi")])
152/// .planner(Planner {
153/// current_plan: vec![Goal {
154/// description: "Say hello.".into(),
155/// priority: 0,
156/// completed: false,
157/// }],
158/// })
159/// .build();
160///
161/// assert_eq!(agent.persona.as_str(), "Test agent.");
162/// assert_eq!(agent.memory.len(), 1);
163/// ```
164#[derive(Default)]
165pub struct LmmAgentBuilder {
166 id: Option<String>,
167 persona: Option<String>,
168 behavior: Option<String>,
169 status: Option<Status>,
170 memory: Option<Vec<Message>>,
171 long_term_memory: Option<Vec<Message>>,
172 knowledge: Option<Knowledge>,
173 tools: Option<Vec<Tool>>,
174 planner: Option<Option<Planner>>,
175 reflection: Option<Option<Reflection>>,
176 scheduler: Option<Option<TaskScheduler>>,
177 profile: Option<Profile>,
178 context: Option<ContextManager>,
179 capabilities: Option<HashSet<Capability>>,
180 tasks: Option<Vec<Task>>,
181 knowledge_index: Option<KnowledgeIndex>,
182 learning_engine: Option<Option<LearningEngine>>,
183 internal_drive: Option<InternalDrive>,
184}
185
186impl LmmAgentBuilder {
187 /// Sets the agent's unique identifier (default: auto-generated UUIDv4).
188 pub fn id(mut self, id: impl Into<String>) -> Self {
189 self.id = Some(id.into());
190 self
191 }
192
193 /// Sets the agent's persona **(required)**.
194 pub fn persona(mut self, persona: impl Into<String>) -> Self {
195 self.persona = Some(persona.into());
196 self
197 }
198
199 /// Sets the agent's behavior / role label **(required)**.
200 pub fn behavior(mut self, behavior: impl Into<String>) -> Self {
201 self.behavior = Some(behavior.into());
202 self
203 }
204
205 /// Sets the initial [`Status`] (default: [`Status::Idle`]).
206 pub fn status(mut self, status: Status) -> Self {
207 self.status = Some(status);
208 self
209 }
210
211 /// Sets the hot memory (default: empty).
212 pub fn memory(mut self, memory: Vec<Message>) -> Self {
213 self.memory = Some(memory);
214 self
215 }
216
217 /// Sets the long-term memory (default: empty).
218 pub fn long_term_memory(mut self, ltm: Vec<Message>) -> Self {
219 self.long_term_memory = Some(ltm);
220 self
221 }
222
223 /// Sets the knowledge base (default: empty).
224 pub fn knowledge(mut self, knowledge: Knowledge) -> Self {
225 self.knowledge = Some(knowledge);
226 self
227 }
228
229 /// Sets the tool list (default: empty).
230 pub fn tools(mut self, tools: Vec<Tool>) -> Self {
231 self.tools = Some(tools);
232 self
233 }
234
235 /// Sets an optional planner (default: empty planner).
236 pub fn planner(mut self, planner: impl Into<Option<Planner>>) -> Self {
237 self.planner = Some(planner.into());
238 self
239 }
240
241 /// Sets an optional reflection module (default: default reflection).
242 pub fn reflection(mut self, reflection: impl Into<Option<Reflection>>) -> Self {
243 self.reflection = Some(reflection.into());
244 self
245 }
246
247 /// Sets an optional task scheduler (default: empty scheduler).
248 pub fn scheduler(mut self, scheduler: impl Into<Option<TaskScheduler>>) -> Self {
249 self.scheduler = Some(scheduler.into());
250 self
251 }
252
253 /// Sets the profile (default: name = behavior, no traits).
254 pub fn profile(mut self, profile: Profile) -> Self {
255 self.profile = Some(profile);
256 self
257 }
258
259 /// Sets the context manager (default: empty).
260 pub fn context(mut self, context: ContextManager) -> Self {
261 self.context = Some(context);
262 self
263 }
264
265 /// Sets the capability set (default: empty).
266 pub fn capabilities(mut self, capabilities: HashSet<Capability>) -> Self {
267 self.capabilities = Some(capabilities);
268 self
269 }
270
271 /// Sets the task queue (default: empty).
272 pub fn tasks(mut self, tasks: Vec<Task>) -> Self {
273 self.tasks = Some(tasks);
274 self
275 }
276
277 /// Pre-populates the knowledge index with an existing [`KnowledgeIndex`].
278 pub fn knowledge_index(mut self, index: KnowledgeIndex) -> Self {
279 self.knowledge_index = Some(index);
280 self
281 }
282
283 /// Attaches a [`LearningEngine`] for in-environment lifelong learning.
284 pub fn learning_engine(mut self, engine: impl Into<Option<LearningEngine>>) -> Self {
285 self.learning_engine = Some(engine.into());
286 self
287 }
288
289 /// Overrides the default [`InternalDrive`] configuration.
290 pub fn internal_drive(mut self, drive: InternalDrive) -> Self {
291 self.internal_drive = Some(drive);
292 self
293 }
294
295 /// Constructs the [`LmmAgent`].
296 ///
297 /// # Panics
298 ///
299 /// Panics if `persona` or `behavior` were not set.
300 pub fn build(self) -> LmmAgent {
301 let persona = self
302 .persona
303 .expect("LmmAgentBuilder: `persona` is required");
304 let behavior = self
305 .behavior
306 .expect("LmmAgentBuilder: `behavior` is required");
307 let profile = self.profile.unwrap_or_else(|| Profile {
308 name: behavior.clone().into(),
309 traits: vec![],
310 behavior_script: None,
311 });
312
313 LmmAgent {
314 id: self.id.unwrap_or_else(|| uuid::Uuid::new_v4().to_string()),
315 persona,
316 behavior,
317 status: self.status.unwrap_or_default(),
318 memory: self.memory.unwrap_or_default(),
319 long_term_memory: self.long_term_memory.unwrap_or_default(),
320 knowledge: self.knowledge.unwrap_or_default(),
321 tools: self.tools.unwrap_or_default(),
322 planner: self.planner.unwrap_or_else(|| Some(Planner::default())),
323 reflection: self
324 .reflection
325 .unwrap_or_else(|| Some(Reflection::default())),
326 scheduler: self
327 .scheduler
328 .unwrap_or_else(|| Some(TaskScheduler::default())),
329 profile,
330 context: self.context.unwrap_or_default(),
331 capabilities: self.capabilities.unwrap_or_default(),
332 tasks: self.tasks.unwrap_or_default(),
333 knowledge_index: self.knowledge_index.unwrap_or_default(),
334 learning_engine: self.learning_engine.unwrap_or(None),
335 internal_drive: self.internal_drive.unwrap_or_default(),
336 }
337 }
338}
339
340// Inherent methods
341
342impl LmmAgent {
343 /// Returns a new [`LmmAgentBuilder`].
344 ///
345 /// The builder accepts every field with `with_*`-style setters and calls
346 /// `.build()` to produce the final [`LmmAgent`].
347 pub fn builder() -> LmmAgentBuilder {
348 LmmAgentBuilder::default()
349 }
350
351 /// Constructs an [`LmmAgent`] with the given persona and behavior;
352 /// every other field is set to its sensible default.
353 ///
354 /// # Examples
355 ///
356 /// ```rust
357 /// use lmm_agent::agent::LmmAgent;
358 ///
359 /// let agent = LmmAgent::new("Researcher".into(), "Research Rust.".into());
360 /// assert_eq!(agent.behavior.as_str(), "Research Rust.");
361 /// ```
362 pub fn new(
363 persona: std::borrow::Cow<'static, str>,
364 behavior: std::borrow::Cow<'static, str>,
365 ) -> Self {
366 LmmAgent::builder()
367 .persona(persona.into_owned())
368 .behavior(behavior.into_owned())
369 .build()
370 }
371
372 /// Appends a [`Message`] to the agent's hot memory.
373 ///
374 /// # Examples
375 ///
376 /// ```rust
377 /// use lmm_agent::agent::LmmAgent;
378 /// use lmm_agent::types::Message;
379 ///
380 /// let mut agent = LmmAgent::new("Tester".into(), "Test.".into());
381 /// agent.add_message(Message::new("user", "Hello"));
382 /// assert_eq!(agent.memory.len(), 1);
383 /// ```
384 pub fn add_message(&mut self, message: Message) {
385 self.memory.push(message);
386 }
387
388 /// Appends a [`Message`] to the agent's long-term memory.
389 pub fn add_ltm_message(&mut self, message: Message) {
390 self.long_term_memory.push(message);
391 }
392
393 /// Marks a goal as completed by its description substring.
394 ///
395 /// Returns `true` if a matching goal was found and updated.
396 pub fn complete_goal(&mut self, description_substr: &str) -> bool {
397 if let Some(plan) = self.planner.as_mut() {
398 for goal in &mut plan.current_plan {
399 if goal.description.contains(description_substr) {
400 goal.completed = true;
401 return true;
402 }
403 }
404 }
405 false
406 }
407
408 /// Generates a textual response to `request` using [`lmm::predict::TextPredictor`].
409 ///
410 /// `TextPredictor` fits a tone trajectory and a rhythm trajectory over the
411 /// input tokens using symbolic regression, then selects continuation words
412 /// from compile-time lexical pools: entirely deterministic, no LLM API
413 /// required.
414 ///
415 /// When the `net` feature is enabled, the seed is enriched with DuckDuckGo
416 /// search snippets before feeding it to the predictor.
417 ///
418 /// # Examples
419 ///
420 /// ```rust
421 /// #[tokio::main]
422 /// async fn main() {
423 /// use lmm_agent::agent::LmmAgent;
424 /// let mut agent = LmmAgent::new("Tester".into(), "Rust is fast.".into());
425 /// let result = agent.generate("the universe reveals its truth").await;
426 /// assert!(result.is_ok());
427 /// assert!(!result.unwrap().is_empty());
428 /// }
429 /// ```
430 pub async fn generate(&mut self, request: &str) -> Result<String> {
431 if !self.knowledge_index.is_empty()
432 && let Some(answer) = self.knowledge_index.answer(request, 5)
433 {
434 self.add_message(Message::new("user", request.to_string()));
435 self.add_message(Message::new("assistant", answer.clone()));
436 return Ok(answer);
437 }
438
439 #[cfg(feature = "net")]
440 let result = {
441 let corpus = self.search(request, 5).await.unwrap_or_default();
442 if let Some(sentence) = Self::best_sentence(&corpus, request) {
443 sentence
444 } else {
445 let seed = if corpus.is_empty() {
446 Self::domain_seed(request, &self.behavior)
447 } else {
448 format!("{request} {corpus}")
449 };
450 Self::symbolic_continuation(seed)
451 }
452 };
453
454 #[cfg(not(feature = "net"))]
455 let result = {
456 let seed = Self::domain_seed(request, &self.behavior);
457 Self::symbolic_continuation(seed)
458 };
459
460 self.add_message(Message::new("user", request.to_string()));
461 self.add_message(Message::new("assistant", result.clone()));
462 Ok(result)
463 }
464
465 fn domain_seed(request: &str, behavior: &str) -> String {
466 const STOP: &[&str] = &[
467 "a", "an", "the", "and", "or", "of", "to", "in", "is", "are", "be", "for", "on", "at",
468 "by", "as", "it", "its",
469 ];
470 let domain_words: Vec<&str> = behavior
471 .split_whitespace()
472 .filter(|w| {
473 let lw = w.to_ascii_lowercase();
474 !STOP.contains(&lw.as_str()) && w.len() > 3
475 })
476 .take(6)
477 .collect();
478
479 let mut seed = request.to_string();
480 if !domain_words.is_empty() {
481 seed.push(' ');
482 seed.push_str(&domain_words.join(" "));
483 }
484 if seed.split_whitespace().count() < 2 {
485 seed.push_str(" and");
486 }
487 seed
488 }
489
490 /// Runs the symbolic predictor on a seed and returns the continuation.
491 fn symbolic_continuation(seed: String) -> String {
492 let mut predictor = TextPredictor::new(20, 40, 3);
493 if let Ok(lex) = lmm::lexicon::Lexicon::load_system() {
494 predictor = predictor.with_lexicon(lex);
495 }
496 predictor
497 .predict_continuation(&seed, 120)
498 .map(|c| format!("{} {}", seed.trim(), c.continuation.trim()))
499 .unwrap_or(seed)
500 }
501
502 /// Returns the sentence from `corpus` with the highest token overlap with `query`.
503 /// Returns `None` if no sentence has meaningful overlap.
504 #[cfg(feature = "net")]
505 fn best_sentence(corpus: &str, query: &str) -> Option<String> {
506 use std::collections::HashSet;
507 let query_tokens: HashSet<String> = query
508 .split_whitespace()
509 .map(|w| w.to_ascii_lowercase())
510 .collect();
511
512 corpus
513 .split(['.', '!', '?'])
514 .map(str::trim)
515 .filter(|s| s.split_whitespace().count() >= 5)
516 .map(|sentence| {
517 let sentence_tokens: HashSet<String> = sentence
518 .split_whitespace()
519 .map(|w| w.to_ascii_lowercase())
520 .collect();
521 let overlap = query_tokens.intersection(&sentence_tokens).count();
522 (overlap, sentence.to_string())
523 })
524 .filter(|(overlap, _)| *overlap >= 2)
525 .max_by_key(|(overlap, _)| *overlap)
526 .map(|(_, sentence)| sentence)
527 }
528
529 /// Searches DuckDuckGo for `query` (requires `net` feature).
530 #[cfg(feature = "net")]
531 pub async fn search(&self, query: &str, limit: usize) -> Result<String> {
532 let browser = Browser::new();
533 let ua = get_ua("firefox").unwrap_or("Mozilla/5.0");
534 let results = browser.lite_search(query, "wt-wt", Some(limit), ua).await?;
535
536 let corpus = results
537 .iter()
538 .filter_map(|r| {
539 let snippet = r.snippet.trim();
540 if !snippet.is_empty() {
541 Some(snippet.to_string())
542 } else if !r.title.trim().is_empty() {
543 Some(r.title.trim().to_string())
544 } else {
545 None
546 }
547 })
548 .collect::<Vec<_>>()
549 .join(" ");
550
551 Ok(corpus)
552 }
553
554 /// No-op search when the `net` feature is disabled.
555 #[cfg(not(feature = "net"))]
556 pub async fn search(&self, _query: &str, _limit: usize) -> Result<String> {
557 Ok(String::new())
558 }
559
560 /// Runs the closed-loop **ThinkLoop** reasoning cycle toward `goal`.
561 ///
562 /// The agent transitions through `Status::Thinking` and back to
563 /// `Status::Completed`. At the end of the run the cold-store archive is
564 /// serialised into the agent's `long_term_memory` so knowledge persists
565 /// across multiple `think()` calls.
566 ///
567 /// ## Parameters
568 ///
569 /// * `goal` - natural-language task description (the setpoint).
570 ///
571 /// Defaults used internally:
572 /// - `max_iterations = 10`
573 /// - `convergence_threshold = 0.25`
574 /// - `k_proportional = 1.0`
575 /// - `k_integral = 0.05`
576 ///
577 /// Use [`LmmAgent::think_with`] for fine-grained control.
578 ///
579 /// # Examples
580 ///
581 /// ```rust
582 /// #[tokio::main]
583 /// async fn main() {
584 /// use lmm_agent::agent::LmmAgent;
585 ///
586 /// let mut agent = LmmAgent::new("Researcher".into(), "Explore Rust.".into());
587 /// let result = agent.think("What is Rust ownership?").await.unwrap();
588 /// assert!(result.steps > 0);
589 /// assert!(result.final_error >= 0.0 && result.final_error <= 1.0);
590 /// }
591 /// ```
592 pub async fn think(&mut self, goal: &str) -> Result<ThinkResult> {
593 self.think_with(goal, 10, 0.25, 1.0, 0.05).await
594 }
595
596 /// Like [`think`](Self::think) but exposes all ThinkLoop parameters.
597 ///
598 /// # Arguments
599 ///
600 /// * `goal` - natural-language goal / setpoint.
601 /// * `max_iterations` - maximum feedback loop iterations (≥ 1).
602 /// * `convergence_threshold` - Jaccard error threshold ∈ [0, 1].
603 /// * `k_proportional` - proportional gain Kp.
604 /// * `k_integral` - integral gain Ki.
605 ///
606 /// # Examples
607 ///
608 /// ```rust
609 /// #[tokio::main]
610 /// async fn main() {
611 /// use lmm_agent::agent::LmmAgent;
612 ///
613 /// let mut agent = LmmAgent::new("Researcher".into(), "Explore Rust.".into());
614 /// let result = agent
615 /// .think_with("Rust memory safety", 5, 0.3, 1.0, 0.05)
616 /// .await
617 /// .unwrap();
618 /// assert!(result.steps <= 5);
619 /// }
620 /// ```
621 pub async fn think_with(
622 &mut self,
623 goal: &str,
624 max_iterations: usize,
625 convergence_threshold: f64,
626 k_proportional: f64,
627 k_integral: f64,
628 ) -> Result<ThinkResult> {
629 self.status = Status::Thinking;
630
631 let mut oracle = SearchOracle::new(5);
632 let mut lp = ThinkLoop::new(
633 goal,
634 max_iterations,
635 convergence_threshold,
636 k_proportional,
637 k_integral,
638 );
639 let result = lp.run(&mut oracle).await;
640
641 if let Some(engine) = &mut self.learning_engine {
642 let mut prev_state = QTable::state_key(goal);
643 for signal in &result.signals {
644 let next_state = QTable::state_key(&signal.observation);
645 let action = engine.recommend_action(prev_state, goal, signal.step);
646 engine.record_step(signal, prev_state, action, next_state);
647 prev_state = next_state;
648 }
649 let avg_reward = if result.steps > 0 {
650 result.signals.iter().map(|s| s.reward).sum::<f64>() / result.steps as f64
651 } else {
652 0.0
653 };
654 engine.end_of_episode(&lp.cold, &mut self.knowledge_index, goal, avg_reward);
655 }
656
657 for entry in lp.cold.all() {
658 self.long_term_memory
659 .push(Message::new("think", entry.content.clone()));
660 }
661
662 self.add_message(Message::new("think:goal", goal.to_string()));
663 self.add_message(Message::new(
664 "think:result",
665 format!(
666 "converged={} steps={} error={:.3}",
667 result.converged, result.steps, result.final_error
668 ),
669 ));
670
671 self.status = Status::Completed;
672 Ok(result)
673 }
674
675 /// Ingests a [`KnowledgeSource`] into this agent's [`KnowledgeIndex`].
676 ///
677 /// Returns the number of new sentence-level chunks added to the index.
678 ///
679 /// # Examples
680 ///
681 /// ```rust
682 /// use lmm_agent::agent::LmmAgent;
683 /// use lmm_agent::cognition::knowledge::KnowledgeSource;
684 ///
685 /// #[tokio::main]
686 /// async fn main() {
687 /// let mut agent = LmmAgent::new("KA Agent".into(), "Rust ownership.".into());
688 /// let n = agent
689 /// .ingest(KnowledgeSource::RawText(
690 /// "Rust prevents data races at compile time through its ownership system. \
691 /// The borrow checker enforces these rules statically.".into(),
692 /// ))
693 /// .await
694 /// .unwrap();
695 /// assert!(n > 0);
696 /// }
697 /// ```
698 pub async fn ingest(&mut self, source: KnowledgeSource) -> Result<usize> {
699 knowledge_ingest(&mut self.knowledge_index, source).await
700 }
701
702 /// Returns the top-`top_k` relevant passages from the knowledge index for `question`.
703 ///
704 /// Returns an empty `Vec` when the index contains no matching material.
705 pub fn query_knowledge(&self, question: &str, top_k: usize) -> Vec<String> {
706 self.knowledge_index
707 .query(question, top_k)
708 .into_iter()
709 .map(|c| c.text.clone())
710 .collect()
711 }
712
713 /// Produces an extractive answer to `question` from the knowledge index.
714 ///
715 /// Retrieves the top-5 relevant chunks, concatenates them, and runs
716 /// [`lmm::text::TextSummarizer`] to select the most informative sentences.
717 ///
718 /// Returns `None` when the index is empty or no relevant material is found.
719 ///
720 /// # Examples
721 ///
722 /// ```rust
723 /// use lmm_agent::agent::LmmAgent;
724 /// use lmm_agent::cognition::knowledge::KnowledgeSource;
725 ///
726 /// #[tokio::main]
727 /// async fn main() {
728 /// let mut agent = LmmAgent::new("QA Agent".into(), "Rust.".into());
729 /// agent
730 /// .ingest(KnowledgeSource::RawText(
731 /// "Rust prevents data races through ownership. \
732 /// The borrow checker ensures memory safety at compile time.".into(),
733 /// ))
734 /// .await
735 /// .unwrap();
736 /// let answer = agent.answer_from_knowledge("How does Rust handle memory?");
737 /// assert!(answer.is_some());
738 /// }
739 /// ```
740 pub fn answer_from_knowledge(&self, question: &str) -> Option<String> {
741 self.knowledge_index.answer(question, 5)
742 }
743
744 /// Saves the current [`LearningEngine`] state to `path` as JSON.
745 ///
746 /// Returns `Ok(())` when no learning engine is attached.
747 ///
748 /// # Examples
749 ///
750 /// ```rust
751 /// use lmm_agent::agent::LmmAgent;
752 /// use lmm_agent::cognition::learning::engine::LearningEngine;
753 /// use lmm_agent::cognition::learning::config::LearningConfig;
754 ///
755 /// let mut agent = LmmAgent::builder()
756 /// .persona("Learner")
757 /// .behavior("Learn.")
758 /// .learning_engine(LearningEngine::new(LearningConfig::default()))
759 /// .build();
760 ///
761 /// let path = std::env::temp_dir().join(format!("agent_helm_{}.json", uuid::Uuid::new_v4()));
762 /// agent.save_learning(&path).unwrap();
763 /// ```
764 pub fn save_learning(&self, path: &std::path::Path) -> Result<()> {
765 if let Some(engine) = &self.learning_engine {
766 LearningStore::save(engine, path)
767 } else {
768 Ok(())
769 }
770 }
771
772 /// Loads a previously saved [`LearningEngine`] state from `path` and
773 /// attaches it to this agent, replacing any existing engine.
774 ///
775 /// # Examples
776 ///
777 /// ```rust
778 /// use lmm_agent::agent::LmmAgent;
779 /// use lmm_agent::cognition::learning::engine::LearningEngine;
780 /// use lmm_agent::cognition::learning::config::LearningConfig;
781 ///
782 /// let mut agent = LmmAgent::builder()
783 /// .persona("Learner")
784 /// .behavior("Learn.")
785 /// .learning_engine(LearningEngine::new(LearningConfig::default()))
786 /// .build();
787 ///
788 /// let path = std::env::temp_dir().join(format!("agent_helm_load_{}.json", uuid::Uuid::new_v4()));
789 /// agent.save_learning(&path).unwrap();
790 /// agent.load_learning(&path).unwrap();
791 /// ```
792 pub fn load_learning(&mut self, path: &std::path::Path) -> Result<()> {
793 let engine = LearningStore::load(path)?;
794 self.learning_engine = Some(engine);
795 Ok(())
796 }
797
798 /// Returns the Q-table–recommended action for the current query string,
799 /// or `None` when no learning engine is attached or the state is unknown.
800 ///
801 /// # Examples
802 ///
803 /// ```rust
804 /// use lmm_agent::agent::LmmAgent;
805 /// use lmm_agent::cognition::learning::engine::LearningEngine;
806 /// use lmm_agent::cognition::learning::config::LearningConfig;
807 ///
808 /// let mut agent = LmmAgent::builder()
809 /// .persona("Learner")
810 /// .behavior("Learn.")
811 /// .learning_engine(LearningEngine::new(LearningConfig::default()))
812 /// .build();
813 ///
814 /// let action = agent.recall_learned("rust memory safety", 0);
815 /// // No experience recorded yet, so the engine explores freely.
816 /// assert!(action.is_some());
817 /// ```
818 pub fn recall_learned(&mut self, query: &str, step: usize) -> Option<ActionKey> {
819 let engine = self.learning_engine.as_mut()?;
820 let state = QTable::state_key(query);
821 Some(engine.recommend_action(state, query, step))
822 }
823
824 /// Attributes the outcome of `outcome_var` in `graph` to its causal parents
825 /// by running Pearl *do*-calculus counterfactuals on each parent.
826 ///
827 /// Returns an [`AttributionReport`] with normalised weights sorted
828 /// highest-first, or `None` when `outcome_var` has no parents.
829 ///
830 /// # Examples
831 ///
832 /// ```rust
833 /// use lmm::causal::CausalGraph;
834 /// use lmm_agent::agent::LmmAgent;
835 ///
836 /// let mut g = CausalGraph::new();
837 /// g.add_node("cause", Some(2.0));
838 /// g.add_node("effect", None);
839 /// g.add_edge("cause", "effect", Some(1.0)).unwrap();
840 /// g.forward_pass().unwrap();
841 ///
842 /// let agent = LmmAgent::new("Analyst".into(), "Causal analysis.".into());
843 /// let report = agent.attribute_causes(&g, "effect").unwrap();
844 /// assert_eq!(report.weights[0].0, "cause");
845 /// ```
846 pub fn attribute_causes(
847 &self,
848 graph: &lmm::causal::CausalGraph,
849 outcome_var: &str,
850 ) -> anyhow::Result<crate::cognition::attribution::AttributionReport> {
851 crate::cognition::attribution::CausalAttributor::attribute(graph, outcome_var)
852 .map_err(|e| anyhow::anyhow!("{e}"))
853 }
854
855 /// Generates causal hypotheses for variables whose observed values are not
856 /// explained by the current `graph` structure.
857 ///
858 /// Returns up to `max_hypotheses` candidate new edges ranked by
859 /// explanatory power, highest first.
860 ///
861 /// # Examples
862 ///
863 /// ```rust
864 /// use lmm::causal::CausalGraph;
865 /// use lmm_agent::agent::LmmAgent;
866 /// use std::collections::HashMap;
867 ///
868 /// let mut g = CausalGraph::new();
869 /// g.add_node("x", Some(1.0));
870 /// g.add_node("y", Some(0.0));
871 ///
872 /// let mut observed = HashMap::new();
873 /// observed.insert("y".to_string(), 0.9);
874 ///
875 /// let agent = LmmAgent::new("Scientist".into(), "Discover causal laws.".into());
876 /// let hypotheses = agent.form_hypotheses(&g, &observed, 5).unwrap();
877 /// assert!(!hypotheses.is_empty());
878 /// ```
879 pub fn form_hypotheses(
880 &self,
881 graph: &lmm::causal::CausalGraph,
882 observed: &std::collections::HashMap<String, f64>,
883 max_hypotheses: usize,
884 ) -> anyhow::Result<Vec<crate::cognition::hypothesis::Hypothesis>> {
885 let r#gen = crate::cognition::hypothesis::HypothesisGenerator::new(0.05, max_hypotheses);
886 r#gen
887 .generate(graph, observed)
888 .map_err(|e| anyhow::anyhow!("{e}"))
889 }
890
891 /// Emits the current [`DriveState`] by ticking the agent's [`InternalDrive`].
892 ///
893 /// If no drive has been accumulated via [`LmmAgent::record_residual`] the
894 /// returned state will be idle. The drive counters are reset after each
895 /// call, matching the semantics of [`InternalDrive::tick`].
896 ///
897 /// # Examples
898 ///
899 /// ```rust
900 /// use lmm_agent::agent::LmmAgent;
901 ///
902 /// let mut agent = LmmAgent::new("Curious".into(), "Learn everything.".into());
903 /// agent.record_residual(0.9);
904 /// let state = agent.drive_state();
905 /// assert!(!state.signals.is_empty());
906 /// ```
907 pub fn drive_state(&mut self) -> crate::cognition::drive::DriveState {
908 self.internal_drive.tick()
909 }
910
911 /// Feeds an unexplained prediction residual into the agent's internal drive.
912 ///
913 /// Calling this after each world-model error accumulates curiosity that
914 /// surfaces on the next [`drive_state`](Self::drive_state) call.
915 pub fn record_residual(&mut self, magnitude: f64) {
916 self.internal_drive.record_residual(magnitude);
917 }
918
919 /// Feeds an incoherence signal into the agent's internal drive.
920 pub fn record_incoherence(&mut self, magnitude: f64) {
921 self.internal_drive.record_incoherence(magnitude);
922 }
923
924 /// Notifies the drive system that a contradiction was detected in memory.
925 pub fn record_contradiction(&mut self) {
926 self.internal_drive.record_contradiction();
927 }
928}
929
930// Agent trait implementation
931
932impl Agent for LmmAgent {
933 fn new(persona: Cow<'static, str>, behavior: Cow<'static, str>) -> Self {
934 LmmAgent::new(persona, behavior)
935 }
936
937 fn update(&mut self, status: Status) {
938 self.status = status;
939 }
940
941 fn persona(&self) -> &str {
942 &self.persona
943 }
944
945 fn behavior(&self) -> &str {
946 &self.behavior
947 }
948
949 fn status(&self) -> &Status {
950 &self.status
951 }
952
953 fn memory(&self) -> &Vec<Message> {
954 &self.memory
955 }
956
957 fn tools(&self) -> &Vec<Tool> {
958 &self.tools
959 }
960
961 fn knowledge(&self) -> &Knowledge {
962 &self.knowledge
963 }
964
965 fn planner(&self) -> Option<&Planner> {
966 self.planner.as_ref()
967 }
968
969 fn profile(&self) -> &Profile {
970 &self.profile
971 }
972
973 fn reflection(&self) -> Option<&Reflection> {
974 self.reflection.as_ref()
975 }
976
977 fn scheduler(&self) -> Option<&TaskScheduler> {
978 self.scheduler.as_ref()
979 }
980
981 fn capabilities(&self) -> &HashSet<Capability> {
982 &self.capabilities
983 }
984
985 fn context(&self) -> &ContextManager {
986 &self.context
987 }
988
989 fn tasks(&self) -> &Vec<Task> {
990 &self.tasks
991 }
992
993 fn memory_mut(&mut self) -> &mut Vec<Message> {
994 &mut self.memory
995 }
996
997 fn planner_mut(&mut self) -> Option<&mut Planner> {
998 self.planner.as_mut()
999 }
1000
1001 fn context_mut(&mut self) -> &mut ContextManager {
1002 &mut self.context
1003 }
1004}
1005
1006// Copyright 2026 Mahmoud Harmouch.
1007//
1008// Licensed under the MIT license
1009// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
1010// option. This file may not be copied, modified, or distributed
1011// except according to those terms.