batuta/agent/driver/
apr_serve.rs1use async_trait::async_trait;
16use std::path::PathBuf;
17use std::process::{Child, Command, Stdio};
18
19use super::{CompletionRequest, CompletionResponse, LlmDriver, Message, ToolCall};
20use crate::agent::result::{AgentError, DriverError, StopReason, TokenUsage};
21use crate::serve::backends::PrivacyTier;
22
23pub struct AprServeDriver {
25 base_url: String,
27 model_name: String,
29 _child: Child,
31 context_window_size: usize,
33}
34
35impl Drop for AprServeDriver {
36 fn drop(&mut self) {
38 let pid = self._child.id();
39
40 #[cfg(unix)]
42 {
43 let _ = Command::new("kill")
44 .args(["-TERM", &pid.to_string()])
45 .stdout(Stdio::null())
46 .stderr(Stdio::null())
47 .status();
48
49 let deadline = std::time::Instant::now() + std::time::Duration::from_secs(2);
51 loop {
52 match self._child.try_wait() {
53 Ok(Some(_)) => return, Ok(None) if std::time::Instant::now() < deadline => {
55 std::thread::sleep(std::time::Duration::from_millis(100));
56 }
57 _ => break, }
59 }
60 }
61
62 let _ = self._child.kill();
64 let _ = self._child.wait();
65 }
66}
67
68impl AprServeDriver {
69 pub fn launch(model_path: PathBuf, context_window: Option<usize>) -> Result<Self, AgentError> {
75 let apr_path = find_apr_binary()?;
76
77 let port = 19384 + (std::process::id() % 1000) as u16;
79 let base_url = format!("http://127.0.0.1:{port}");
80
81 let model_name = model_path
82 .file_stem()
83 .map(|s| s.to_string_lossy().to_string())
84 .unwrap_or_else(|| "local".to_string());
85
86 let child = Command::new(&apr_path)
91 .args([
92 "serve",
93 "run",
94 &model_path.to_string_lossy(),
95 "--port",
96 &port.to_string(),
97 "--host",
98 "127.0.0.1",
99 "--gpu",
100 ])
101 .env("BATCHED_PREFILL", "0")
102 .stdout(Stdio::piped())
103 .stderr(Stdio::piped())
104 .spawn()
105 .map_err(|e| {
106 AgentError::Driver(DriverError::InferenceFailed(format!(
107 "failed to spawn apr serve: {e}"
108 )))
109 })?;
110
111 eprintln!("Launched apr serve on port {port} (pid {})", child.id());
112
113 let mut driver = Self {
114 base_url,
115 model_name,
116 _child: child,
117 context_window_size: context_window.unwrap_or(4096),
118 };
119
120 driver.wait_for_ready()?;
122
123 Ok(driver)
124 }
125
126 fn wait_for_ready(&mut self) -> Result<(), AgentError> {
131 let addr = self.base_url.trim_start_matches("http://").to_string();
132 let sock_addr: std::net::SocketAddr =
133 addr.parse().unwrap_or_else(|_| std::net::SocketAddr::from(([127, 0, 0, 1], 19384)));
134
135 let start = std::time::Instant::now();
136 let timeout = std::time::Duration::from_secs(30);
137
138 loop {
139 if start.elapsed() > timeout {
140 let stderr = self.drain_stderr();
141 let mut msg = "apr serve did not become ready within 30s".to_string();
142 if !stderr.is_empty() {
143 msg.push_str(&format!("\nsubprocess stderr:\n{stderr}"));
144 }
145 msg.push_str(&format!(
146 "\nDebug manually: apr serve run <model> --port {} --host 127.0.0.1",
147 addr.rsplit(':').next().unwrap_or("19384")
148 ));
149 return Err(AgentError::Driver(DriverError::InferenceFailed(msg)));
150 }
151
152 if let Ok(Some(status)) = self._child.try_wait() {
154 let stderr = self.drain_stderr();
155 let mut msg = format!("apr serve exited with {status} during startup");
156 if !stderr.is_empty() {
157 msg.push_str(&format!("\nsubprocess stderr:\n{stderr}"));
158 }
159 return Err(AgentError::Driver(DriverError::InferenceFailed(msg)));
160 }
161
162 if std::net::TcpStream::connect_timeout(
163 &sock_addr,
164 std::time::Duration::from_millis(200),
165 )
166 .is_ok()
167 {
168 eprintln!("apr serve ready ({:.1}s)", start.elapsed().as_secs_f64());
169 return Ok(());
170 }
171
172 std::thread::sleep(std::time::Duration::from_millis(500));
173 }
174 }
175
176 fn drain_stderr(&mut self) -> String {
178 use std::io::Read;
179 let Some(stderr) = self._child.stderr.as_mut() else {
180 return String::new();
181 };
182 let mut buf = vec![0u8; 2048];
183 let n = stderr.read(&mut buf).unwrap_or(0);
184 let text = String::from_utf8_lossy(&buf[..n]).to_string();
185 let lines: Vec<&str> = text.lines().collect();
187 if lines.len() > 10 {
188 lines[lines.len() - 10..].join("\n")
189 } else {
190 text
191 }
192 }
193
194 fn build_openai_body(&self, request: &CompletionRequest) -> serde_json::Value {
201 let mut messages = Vec::new();
202
203 if let Some(ref system) = request.system {
204 let compact_system = system
208 .find("\n\n## Available Tools")
209 .map(|i| &system[..i])
210 .unwrap_or(system)
211 .to_string();
212
213 messages.push(serde_json::json!({
214 "role": "system",
215 "content": compact_system
216 }));
217 }
218
219 for msg in &request.messages {
220 match msg {
221 Message::User(text) => messages.push(serde_json::json!({
222 "role": "user",
223 "content": text
224 })),
225 Message::Assistant(text) => messages.push(serde_json::json!({
226 "role": "assistant",
227 "content": text
228 })),
229 Message::AssistantToolUse(call) => messages.push(serde_json::json!({
230 "role": "assistant",
231 "content": format!("<tool_call>\n{}\n</tool_call>",
232 serde_json::json!({"name": call.name, "input": call.input}))
233 })),
234 Message::ToolResult(result) => messages.push(serde_json::json!({
235 "role": "user",
236 "content": format!("<tool_result>\n{}\n</tool_result>", result.content)
237 })),
238 _ => {}
239 }
240 }
241
242 let max_tokens = request.max_tokens.min(1024);
249
250 serde_json::json!({
251 "model": self.model_name,
252 "messages": messages,
253 "max_tokens": max_tokens,
254 "temperature": request.temperature,
255 "stream": false
256 })
257 }
258}
259
260#[async_trait]
261impl LlmDriver for AprServeDriver {
262 async fn complete(&self, request: CompletionRequest) -> Result<CompletionResponse, AgentError> {
263 let url = format!("{}/v1/chat/completions", self.base_url);
264 let body = self.build_openai_body(&request);
265
266 let client = reqwest::Client::builder()
267 .timeout(std::time::Duration::from_secs(120))
268 .build()
269 .map_err(|e| AgentError::Driver(DriverError::Network(format!("http client: {e}"))))?;
270 let response = client
271 .post(&url)
272 .header("content-type", "application/json")
273 .json(&body)
274 .send()
275 .await
276 .map_err(|e| AgentError::Driver(DriverError::Network(format!("apr serve: {e}"))))?;
277
278 if !response.status().is_success() {
279 let status = response.status().as_u16();
280 let text = response.text().await.unwrap_or_default();
281 return Err(AgentError::Driver(DriverError::Network(format!(
282 "apr serve HTTP {status}: {text}"
283 ))));
284 }
285
286 let json: serde_json::Value = response
287 .json()
288 .await
289 .map_err(|e| AgentError::Driver(DriverError::InferenceFailed(format!("parse: {e}"))))?;
290
291 let raw_text = json["choices"][0]["message"]["content"].as_str().unwrap_or("").to_string();
293
294 let text = strip_thinking_blocks(&raw_text);
298
299 let usage = json.get("usage").cloned().unwrap_or(serde_json::json!({}));
300 let input_tokens = usage["prompt_tokens"].as_u64().unwrap_or(0);
301 let output_tokens = usage["completion_tokens"].as_u64().unwrap_or(0);
302
303 let (clean_text, tool_calls) = super::realizar::parse_tool_calls_pub(&text);
305
306 let stop_reason =
307 if tool_calls.is_empty() { StopReason::EndTurn } else { StopReason::ToolUse };
308
309 Ok(CompletionResponse {
310 text: clean_text,
311 stop_reason,
312 tool_calls,
313 usage: TokenUsage { input_tokens, output_tokens },
314 })
315 }
316
317 fn context_window(&self) -> usize {
318 self.context_window_size
319 }
320
321 fn privacy_tier(&self) -> PrivacyTier {
322 PrivacyTier::Sovereign
324 }
325}
326
327fn strip_thinking_blocks(text: &str) -> String {
329 let mut result = text.to_string();
330 while let Some(start) = result.find("<think>") {
332 if let Some(end) = result[start..].find("</think>") {
333 result.replace_range(start..start + end + "</think>".len(), "");
334 } else {
335 result.truncate(start);
337 break;
338 }
339 }
340 result = result.replace("</think>", "");
342 result.trim().to_string()
343}
344
345fn find_apr_binary() -> Result<PathBuf, AgentError> {
347 which::which("apr").map_err(|_| {
348 AgentError::Driver(DriverError::InferenceFailed(
349 "apr binary not found on PATH. Install: cargo install apr-cli".into(),
350 ))
351 })
352}
353
354#[cfg(test)]
355#[path = "apr_serve_tests.rs"]
356mod tests;