Skip to main content

tycode_core/modules/memory/
background.rs

1//! Background memory management task.
2
3use std::collections::BTreeMap;
4use std::sync::Arc;
5
6use tracing::{info, warn};
7
8use crate::agents::agent::ActiveAgent;
9use crate::agents::memory_manager::MemoryManagerAgent;
10use crate::agents::runner::AgentRunner;
11use crate::ai::provider::AiProvider;
12use crate::ai::types::{ContentBlock, Message, MessageRole};
13use crate::module::ContextBuilder;
14use crate::module::Module;
15use crate::module::PromptBuilder;
16use crate::settings::manager::SettingsManager;
17use crate::spawn::complete_task::CompleteTask;
18use crate::steering::SteeringDocuments;
19use crate::tools::r#trait::ToolExecutor;
20
21use super::compaction;
22use super::config::MemoryConfig;
23use super::log::MemoryLog;
24use super::tool::AppendMemoryTool;
25
26/// Spawn the memory manager agent as a background task.
27/// This is fire-and-forget - errors are logged but not propagated.
28///
29/// # Arguments
30/// * `ai_provider` - The AI provider to use
31/// * `memory_log` - The memory log to store memories in
32/// * `settings` - Settings manager
33/// * `conversation` - The conversation messages to analyze (last N messages, pre-sliced by caller)
34/// * `steering` - Steering documents
35/// * `mcp_manager` - MCP manager for tool access
36pub fn spawn_memory_manager(
37    ai_provider: Arc<dyn AiProvider>,
38    memory_log: Arc<MemoryLog>,
39    settings: SettingsManager,
40    conversation: Vec<Message>,
41    steering: SteeringDocuments,
42    prompt_builder: PromptBuilder,
43    context_builder: ContextBuilder,
44    modules: Vec<Arc<dyn Module>>,
45) {
46    let mut tools: BTreeMap<String, Arc<dyn ToolExecutor + Send + Sync>> = BTreeMap::new();
47    tools.insert(
48        AppendMemoryTool::tool_name().to_string(),
49        Arc::new(AppendMemoryTool::new(memory_log.clone())),
50    );
51    tools.insert(
52        CompleteTask::tool_name().to_string(),
53        Arc::new(CompleteTask::standalone()),
54    );
55
56    let compaction_log = memory_log.clone();
57    let compaction_provider = ai_provider.clone();
58    let compaction_settings = settings.clone();
59    let compaction_modules = modules.clone();
60    let compaction_steering = steering.clone();
61    let compaction_prompt = prompt_builder.clone();
62    let compaction_context = context_builder.clone();
63
64    tokio::task::spawn_local(async move {
65        let msg_count = conversation.len();
66        info!(messages = msg_count, "Memory manager starting");
67
68        let mut active_agent = ActiveAgent::new(Arc::new(MemoryManagerAgent));
69        active_agent.conversation = conversation;
70        active_agent.conversation.push(Message::user(
71            "=== MEMORY MANAGER AGENT ===\n\n\
72            You are now the Memory Manager agent. Your conversation history contains the interaction \
73            between the user and a coding agent that just concluded. Your task is to analyze that conversation \
74            history (all messages before this one) and extract any learnings worth remembering.\n\n\
75            Look for:\n\
76            - User preferences or corrections\n\
77            - Project-specific decisions\n\
78            - Coding style preferences\n\
79            - Technical constraints mentioned\n\n\
80            Use append_memory for each distinct learning, then call complete_task. \
81            If the conversation contains no extractable learnings, call complete_task immediately."
82        ));
83
84        let runner = AgentRunner::new(
85            ai_provider,
86            settings,
87            tools,
88            modules,
89            steering,
90            prompt_builder,
91            context_builder,
92        );
93
94        match runner.run(active_agent, 2).await {
95            Ok(_) => info!("Memory manager completed"),
96            Err(e) => warn!(error = ?e, "Memory manager failed"),
97        }
98
99        maybe_auto_compact(
100            &compaction_log,
101            &compaction_settings,
102            compaction_provider,
103            compaction_modules,
104            compaction_steering,
105            compaction_prompt,
106            compaction_context,
107        )
108        .await;
109    });
110}
111
112/// Spawn a background compaction task. Fire-and-forget.
113pub fn spawn_background_compaction(
114    memory_log: Arc<MemoryLog>,
115    ai_provider: Arc<dyn AiProvider>,
116    settings: SettingsManager,
117    modules: Vec<Arc<dyn Module>>,
118    steering: SteeringDocuments,
119    prompt_builder: PromptBuilder,
120    context_builder: ContextBuilder,
121) {
122    tokio::task::spawn_local(async move {
123        info!("Background compaction starting");
124        match compaction::run_compaction(
125            &memory_log,
126            ai_provider,
127            settings,
128            modules,
129            steering,
130            prompt_builder,
131            context_builder,
132        )
133        .await
134        {
135            Ok(Some(c)) => info!(
136                through_seq = c.through_seq,
137                memories = c.memories_count,
138                "Background compaction completed"
139            ),
140            Ok(None) => info!("Background compaction: no new memories"),
141            Err(e) => warn!(error = ?e, "Background compaction failed"),
142        }
143    });
144}
145
146async fn maybe_auto_compact(
147    memory_log: &MemoryLog,
148    settings: &SettingsManager,
149    provider: Arc<dyn AiProvider>,
150    modules: Vec<Arc<dyn Module>>,
151    steering: SteeringDocuments,
152    prompt_builder: PromptBuilder,
153    context_builder: ContextBuilder,
154) {
155    let config: MemoryConfig = settings.get_module_config::<MemoryConfig>("memory");
156    let threshold = match config.auto_compaction_threshold {
157        Some(t) if t > 0 => t,
158        _ => return,
159    };
160
161    let pending = match compaction::memories_since_last_compaction(memory_log) {
162        Ok(c) => c,
163        Err(e) => {
164            warn!(error = ?e, "Failed to check memories for auto-compaction");
165            return;
166        }
167    };
168
169    if pending < threshold {
170        return;
171    }
172
173    info!(pending, threshold, "Auto-compaction threshold reached");
174    match compaction::run_compaction(
175        memory_log,
176        provider,
177        settings.clone(),
178        modules,
179        steering,
180        prompt_builder,
181        context_builder,
182    )
183    .await
184    {
185        Ok(Some(c)) => info!(
186            through_seq = c.through_seq,
187            memories = c.memories_count,
188            "Auto-compaction completed"
189        ),
190        Ok(None) => info!("Auto-compaction: no new memories"),
191        Err(e) => warn!(error = ?e, "Auto-compaction failed"),
192    }
193}
194
195/// Safely slice a conversation to get the last N messages without tearing tool call pairs.
196/// Returns messages starting from a clean boundary (User message without orphaned ToolResults).
197pub fn safe_conversation_slice(conversation: &[Message], max_messages: usize) -> Vec<Message> {
198    if conversation.len() <= max_messages {
199        return conversation.to_vec();
200    }
201
202    let start_idx = conversation.len().saturating_sub(max_messages);
203    let mut slice = &conversation[start_idx..];
204
205    // Tool results require matching tool uses from prior assistant messages.
206    // Starting mid-pair would create invalid conversation structure for the AI model.
207    while !slice.is_empty() {
208        let first = &slice[0];
209        if first.role == MessageRole::User {
210            let has_tool_results = first
211                .content
212                .blocks()
213                .iter()
214                .any(|b| matches!(b, ContentBlock::ToolResult(_)));
215            if !has_tool_results {
216                break;
217            }
218        }
219        slice = &slice[1..];
220    }
221
222    slice.to_vec()
223}