1use std::path::Path;
5use crate::types::*;
6
7const COMPACT_THRESHOLD: usize = 150_000;
8
9pub struct ContextEngine {
10 messages: Vec<Message>,
11 system_prompt: String,
12 token_estimate: usize,
13}
14
15impl Default for ContextEngine {
16 fn default() -> Self {
17 Self::new()
18 }
19}
20
21impl ContextEngine {
22 pub fn new() -> Self {
23 Self {
24 messages: Vec::new(),
25 system_prompt: String::new(),
26 token_estimate: 0,
27 }
28 }
29
30 pub async fn build_system_prompt(&mut self, cwd: &str, memory_path: &str) {
34 let home = std::env::var("HOME").unwrap_or_default();
35 let custom_system_path = format!("{home}/.claude-agent/system.md");
36
37 let base_prompt = if Path::new(&custom_system_path).exists() {
38 tokio::fs::read_to_string(&custom_system_path).await.unwrap_or_default()
39 } else {
40 "You are an AI coding agent. You help users with software engineering tasks \
41 by reading files, writing code, running commands, and searching codebases.".to_string()
42 };
43
44 let mut parts = vec![base_prompt];
45
46 for md_path in &[
48 format!("{cwd}/CLAUDE.md"),
49 format!("{}/CLAUDE.md", std::env::var("HOME").unwrap_or_default()),
50 ] {
51 if Path::new(md_path).exists() {
52 if let Ok(content) = tokio::fs::read_to_string(md_path).await {
53 parts.push(format!("\n# Project Instructions ({md_path})\n{content}"));
54 }
55 }
56 }
57
58 if Path::new(memory_path).exists() {
60 if let Ok(memory) = tokio::fs::read_to_string(memory_path).await {
61 if !memory.trim().is_empty() {
62 let truncated: String = memory.chars().take(4000).collect();
63 parts.push(format!("\n# Persistent Memory\n{truncated}"));
64 }
65 }
66 }
67
68 parts.push("\n# Environment".to_string());
70 parts.push(format!("- Working directory: {cwd}"));
71 parts.push(format!("- Platform: {}", std::env::consts::OS));
72 parts.push(format!("- Arch: {}", std::env::consts::ARCH));
73 parts.push(format!("- Date: {}", chrono::Utc::now().format("%Y-%m-%d")));
74
75 self.system_prompt = parts.join("\n");
76 }
77
78 pub fn compact(&mut self) {
80 if self.messages.len() <= 4 {
81 return;
82 }
83
84 let first = self.messages[0].clone();
85 let recent: Vec<_> = self.messages.iter().rev().take(6).cloned().collect();
86 let dropped = self.messages.len() - 7;
87
88 let marker = Message {
89 role: Role::User,
90 content: MessageContent::Text(format!(
91 "[Context compacted: {dropped} earlier messages summarized.]"
92 )),
93 };
94
95 self.messages = Vec::with_capacity(8);
96 self.messages.push(first);
97 self.messages.push(marker);
98 self.messages.extend(recent.into_iter().rev());
99 self.estimate_tokens();
100 }
101
102 pub fn add_message(&mut self, msg: Message) {
103 self.messages.push(msg);
104 self.estimate_tokens();
105
106 if self.token_estimate > COMPACT_THRESHOLD {
107 self.compact();
108 }
109 }
110
111 fn estimate_tokens(&mut self) {
112 let mut chars = self.system_prompt.len();
113 for msg in &self.messages {
114 match &msg.content {
115 MessageContent::Text(t) => chars += t.len(),
116 MessageContent::Blocks(blocks) => {
117 for block in blocks {
118 match block {
119 ContentBlock::Text { text } => chars += text.len(),
120 _ => chars += 200,
121 }
122 }
123 }
124 }
125 }
126 self.token_estimate = chars / 4;
127 }
128
129 pub fn override_system_prompt(&mut self, prompt: &str) {
130 self.system_prompt = prompt.to_string();
131 }
132
133 pub fn messages(&self) -> &[Message] {
134 &self.messages
135 }
136
137 pub fn system_prompt(&self) -> &str {
138 &self.system_prompt
139 }
140
141 pub fn token_estimate(&self) -> usize {
142 self.token_estimate
143 }
144
145 pub fn is_near_limit(&self) -> bool {
146 self.token_estimate > COMPACT_THRESHOLD
147 }
148}