crabtalk_core/agent/
compact.rs1use crate::model::HistoryEntry;
4use crabllm_core::{ChatCompletionRequest, Message, Provider, Role};
5
6pub(crate) const COMPACT_PROMPT: &str = include_str!("../../prompts/compact.md");
7
8impl<P: Provider + 'static> super::Agent<P> {
9 pub async fn compact(&self, history: &[HistoryEntry]) -> Option<String> {
15 let model_name = self.config.model.clone();
16 let prompt = COMPACT_PROMPT.to_owned();
17
18 let mut messages = Vec::with_capacity(2 + history.len());
19 messages.push(Message::system(&prompt));
20 if !self.config.system_prompt.is_empty() {
23 messages.push(Message::user(format!(
24 "Agent system prompt (preserve identity/profile info):\n{}",
25 self.config.system_prompt
26 )));
27 }
28 let max_len = self.config.compact_tool_max_len;
29 for entry in history {
30 let mut msg = entry.to_wire_message();
31 if *entry.role() == Role::Tool
32 && let Some(serde_json::Value::String(text)) = msg.content.as_mut()
33 && text.len() > max_len
34 {
35 text.truncate(text.floor_char_boundary(max_len));
36 text.push_str("... [truncated]");
37 }
38 messages.push(msg);
39 }
40
41 let request = ChatCompletionRequest {
42 model: model_name,
43 messages,
44 temperature: None,
45 top_p: None,
46 max_tokens: None,
47 stream: None,
48 stop: None,
49 tools: None,
50 tool_choice: None,
51 frequency_penalty: None,
52 presence_penalty: None,
53 seed: None,
54 user: None,
55 reasoning_effort: None,
56 extra: Default::default(),
57 };
58 match self.model.send_ct(request).await {
59 Ok(response) => response.content().map(|s| s.to_owned()),
60 Err(e) => {
61 tracing::warn!("compaction LLM call failed: {e}");
62 None
63 }
64 }
65 }
66
67 pub(crate) fn estimate_tokens(history: &[HistoryEntry]) -> usize {
72 history.iter().map(|e| e.estimate_tokens()).sum()
73 }
74}