aether_core/core/
agent_builder.rs1use super::agent::{AgentConfig, AutoContinue};
2use crate::agent_spec::AgentSpec;
3use crate::context::CompactionConfig;
4use crate::core::{Agent, Prompt, Result};
5use crate::events::{AgentMessage, UserMessage};
6use crate::mcp::run_mcp_task::McpCommand;
7use llm::parser::ModelProviderParser;
8use llm::types::IsoString;
9use llm::{ChatMessage, Context, StreamingModelProvider, ToolDefinition};
10use std::sync::Arc;
11use std::time::Duration;
12use tokio::sync::mpsc::{self, Receiver, Sender};
13use tokio::task::JoinHandle;
14
15pub struct AgentHandle {
17 handle: JoinHandle<()>,
18}
19
20impl AgentHandle {
21 pub fn abort(&self) {
23 self.handle.abort();
24 }
25
26 pub fn is_finished(&self) -> bool {
28 self.handle.is_finished()
29 }
30
31 pub async fn await_completion(self) {
33 let _ = self.handle.await;
34 }
35}
36
37pub struct AgentBuilder {
38 llm: Arc<dyn StreamingModelProvider>,
39 prompts: Vec<Prompt>,
40 tool_definitions: Vec<ToolDefinition>,
41 initial_messages: Vec<ChatMessage>,
42 mcp_tx: Option<Sender<McpCommand>>,
43 channel_capacity: usize,
44 tool_timeout: Duration,
45 compaction_config: Option<CompactionConfig>,
46 max_auto_continues: u32,
47 prompt_cache_key: Option<String>,
48}
49
50impl AgentBuilder {
51 pub fn new(llm: Arc<dyn StreamingModelProvider>) -> Self {
52 Self {
53 llm,
54 prompts: Vec::new(),
55 tool_definitions: Vec::new(),
56 initial_messages: Vec::new(),
57 mcp_tx: None,
58 channel_capacity: 1000,
59 tool_timeout: Duration::from_secs(60 * 20),
60 compaction_config: Some(CompactionConfig::default()),
61 max_auto_continues: 3,
62 prompt_cache_key: None,
63 }
64 }
65
66 pub async fn from_spec(spec: &AgentSpec, base_prompts: Vec<Prompt>) -> Result<Self> {
71 let (provider, _) = ModelProviderParser::default().parse(&spec.model).await?;
72 let mut builder = Self::new(Arc::from(provider));
73 for prompt in base_prompts {
74 builder = builder.system_prompt(prompt);
75 }
76 for prompt in &spec.prompts {
77 builder = builder.system_prompt(prompt.clone());
78 }
79 Ok(builder)
80 }
81
82 pub fn system_prompt(mut self, prompt: Prompt) -> Self {
86 self.prompts.push(prompt);
87 self
88 }
89
90 pub fn tools(mut self, tx: Sender<McpCommand>, tools: Vec<ToolDefinition>) -> Self {
91 self.tool_definitions = tools;
92 self.mcp_tx = Some(tx);
93 self
94 }
95
96 pub fn tool_timeout(mut self, timeout: Duration) -> Self {
103 self.tool_timeout = timeout;
104 self
105 }
106
107 pub fn compaction(mut self, config: CompactionConfig) -> Self {
128 self.compaction_config = Some(config);
129 self
130 }
131
132 pub fn disable_compaction(mut self) -> Self {
136 self.compaction_config = None;
137 self
138 }
139
140 pub fn max_auto_continues(mut self, max: u32) -> Self {
160 self.max_auto_continues = max;
161 self
162 }
163
164 pub fn prompt_cache_key(mut self, key: String) -> Self {
169 self.prompt_cache_key = Some(key);
170 self
171 }
172
173 pub fn messages(mut self, messages: Vec<ChatMessage>) -> Self {
177 self.initial_messages = messages;
178 self
179 }
180
181 pub async fn spawn(self) -> Result<(Sender<UserMessage>, Receiver<AgentMessage>, AgentHandle)> {
182 let mut messages = Vec::new();
183
184 if !self.prompts.is_empty() {
185 let system_content = Prompt::build_all(&self.prompts).await?;
186 if !system_content.is_empty() {
187 messages.push(ChatMessage::System { content: system_content, timestamp: IsoString::now() });
188 }
189 }
190
191 messages.extend(self.initial_messages);
192
193 let (user_message_tx, user_message_rx) = mpsc::channel::<UserMessage>(self.channel_capacity);
194
195 let (message_tx, agent_message_rx) = mpsc::channel::<AgentMessage>(self.channel_capacity);
196
197 let mut context = Context::new(messages, self.tool_definitions);
198 context.set_prompt_cache_key(self.prompt_cache_key);
199
200 let config = AgentConfig {
201 llm: self.llm,
202 context,
203 mcp_command_tx: self.mcp_tx,
204 tool_timeout: self.tool_timeout,
205 compaction_config: self.compaction_config,
206 auto_continue: AutoContinue::new(self.max_auto_continues),
207 };
208
209 let agent = Agent::new(config, user_message_rx, message_tx);
210
211 let agent_handle = tokio::spawn(agent.run());
212
213 Ok((user_message_tx, agent_message_rx, AgentHandle { handle: agent_handle }))
214 }
215}
216
217#[cfg(test)]
218mod tests {
219 use super::*;
220 use crate::agent_spec::{AgentSpecExposure, ToolFilter};
221
222 #[tokio::test]
223 async fn test_agent_handle_is_finished() {
224 let handle = AgentHandle { handle: tokio::spawn(async {}) };
225 handle.await_completion().await;
226 }
227
228 #[tokio::test]
229 async fn test_agent_handle_abort() {
230 let handle = AgentHandle {
231 handle: tokio::spawn(async {
232 tokio::time::sleep(Duration::from_secs(60)).await;
233 }),
234 };
235 assert!(!handle.is_finished());
236 handle.abort();
237 tokio::time::sleep(Duration::from_millis(10)).await;
239 assert!(handle.is_finished());
240 }
241
242 #[tokio::test]
243 async fn system_prompt_preserves_add_order() {
244 let builder = AgentBuilder::new(Arc::new(llm::testing::FakeLlmProvider::new(vec![])))
245 .system_prompt(Prompt::text("first"))
246 .system_prompt(Prompt::text("second"))
247 .system_prompt(Prompt::text("third"));
248
249 let rendered = Prompt::build_all(&builder.prompts).await.unwrap();
250
251 assert_eq!(rendered, "first\n\nsecond\n\nthird");
252 }
253
254 #[tokio::test]
255 async fn from_spec_accepts_alloy_model_specs() {
256 let spec = AgentSpec {
257 name: "alloy".to_string(),
258 description: "alloy".to_string(),
259 model: "ollama:llama3.2,llamacpp:local".to_string(),
260 reasoning_effort: None,
261 prompts: vec![],
262 mcp_config_paths: Vec::new(),
263 exposure: AgentSpecExposure::both(),
264 tools: ToolFilter::default(),
265 };
266
267 let builder = AgentBuilder::from_spec(&spec, vec![]).await;
268 assert!(builder.is_ok());
269 }
270}