Skip to main content

a3s_code_core/
agent_api.rs

1//! Agent Facade API
2//!
3//! High-level, ergonomic API for using A3S Code as an embedded library.
4//!
5//! ## Example
6//!
7//! ```rust,no_run
8//! use a3s_code_core::Agent;
9//!
10//! # async fn run() -> anyhow::Result<()> {
11//! let agent = Agent::new("agent.hcl").await?;
12//! let session = agent.session("/my-project", None)?;
13//! let result = session.send("Explain the auth module").await?;
14//! println!("{}", result.text);
15//! # Ok(())
16//! # }
17//! ```
18
19use crate::agent::{AgentConfig, AgentEvent, AgentLoop, AgentResult};
20use crate::config::CodeConfig;
21use crate::error::Result;
22use crate::llm::{LlmClient, Message};
23use crate::tools::{ToolContext, ToolExecutor};
24use anyhow::Context;
25use std::path::{Path, PathBuf};
26use std::sync::Arc;
27use tokio::sync::mpsc;
28use tokio::task::JoinHandle;
29
30// ============================================================================
31// ToolCallResult
32// ============================================================================
33
34/// Result of a direct tool execution (no LLM).
35#[derive(Debug, Clone)]
36pub struct ToolCallResult {
37    pub name: String,
38    pub output: String,
39    pub exit_code: i32,
40}
41
42// ============================================================================
43// SessionOptions
44// ============================================================================
45
46/// Optional per-session overrides.
47#[derive(Debug, Clone, Default)]
48pub struct SessionOptions {
49    /// Override the default model. Format: `"provider/model"` (e.g., `"openai/gpt-4o"`).
50    pub model: Option<String>,
51}
52
53impl SessionOptions {
54    pub fn new() -> Self {
55        Self::default()
56    }
57
58    pub fn with_model(mut self, model: impl Into<String>) -> Self {
59        self.model = Some(model.into());
60        self
61    }
62}
63
64// ============================================================================
65// Agent
66// ============================================================================
67
68/// High-level agent facade.
69///
70/// Holds the LLM client and agent config. Workspace-independent.
71/// Use [`Agent::session()`] to bind to a workspace.
72pub struct Agent {
73    llm_client: Arc<dyn LlmClient>,
74    code_config: CodeConfig,
75    config: AgentConfig,
76}
77
78impl std::fmt::Debug for Agent {
79    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
80        f.debug_struct("Agent").finish()
81    }
82}
83
84impl Agent {
85    /// Create from a config file path or inline config string.
86    ///
87    /// Auto-detects: file path (.hcl/.json) vs inline JSON vs inline HCL.
88    pub async fn new(config_source: impl Into<String>) -> Result<Self> {
89        let source = config_source.into();
90        let path = Path::new(&source);
91
92        let config = if path.extension().is_some() && path.exists() {
93            CodeConfig::from_file(path)
94                .with_context(|| format!("Failed to load config: {}", path.display()))?
95        } else {
96            CodeConfig::from_json(&source)
97                .or_else(|_| CodeConfig::from_hcl(&source))
98                .context("Failed to parse config as JSON or HCL")?
99        };
100
101        Self::from_config(config).await
102    }
103
104    /// Create from a config file path or inline config string.
105    ///
106    /// Alias for [`Agent::new()`] — provides a consistent API with
107    /// the Python and Node.js SDKs.
108    pub async fn create(config_source: impl Into<String>) -> Result<Self> {
109        Self::new(config_source).await
110    }
111
112    /// Create from a [`CodeConfig`] struct.
113    pub async fn from_config(config: CodeConfig) -> Result<Self> {
114        let llm_config = config
115            .default_llm_config()
116            .context("default_model must be set in 'provider/model' format with a valid API key")?;
117        let llm_client = crate::llm::create_client_with_config(llm_config);
118
119        let agent_config = AgentConfig {
120            max_tool_rounds: config
121                .max_tool_rounds
122                .unwrap_or(AgentConfig::default().max_tool_rounds),
123            ..AgentConfig::default()
124        };
125
126        Ok(Agent {
127            llm_client,
128            code_config: config,
129            config: agent_config,
130        })
131    }
132
133    /// Bind to a workspace directory, returning an [`AgentSession`].
134    ///
135    /// Pass `None` for defaults, or `Some(SessionOptions)` to override
136    /// the model for this session.
137    pub fn session(
138        &self,
139        workspace: impl Into<String>,
140        options: Option<SessionOptions>,
141    ) -> Result<AgentSession> {
142        let opts = options.unwrap_or_default();
143
144        let llm_client = if let Some(ref model) = opts.model {
145            let (provider_name, model_id) = model
146                .split_once('/')
147                .context("model format must be 'provider/model' (e.g., 'openai/gpt-4o')")?;
148
149            let llm_config = self
150                .code_config
151                .llm_config(provider_name, model_id)
152                .with_context(|| {
153                    format!("provider '{provider_name}' or model '{model_id}' not found in config")
154                })?;
155
156            crate::llm::create_client_with_config(llm_config)
157        } else {
158            self.llm_client.clone()
159        };
160
161        Ok(self.build_session(workspace.into(), llm_client))
162    }
163
164    fn build_session(&self, workspace: String, llm_client: Arc<dyn LlmClient>) -> AgentSession {
165        let canonical =
166            std::fs::canonicalize(&workspace).unwrap_or_else(|_| PathBuf::from(&workspace));
167
168        let tool_executor = Arc::new(ToolExecutor::new(canonical.display().to_string()));
169        let tool_defs = tool_executor.definitions();
170
171        let config = AgentConfig {
172            tools: tool_defs,
173            ..self.config.clone()
174        };
175
176        AgentSession {
177            llm_client,
178            tool_executor,
179            tool_context: ToolContext::new(canonical.clone()),
180            config,
181            workspace: canonical,
182            history: Vec::new(),
183        }
184    }
185}
186
187// ============================================================================
188// AgentSession
189// ============================================================================
190
191/// Workspace-bound session. All LLM and tool operations happen here.
192pub struct AgentSession {
193    llm_client: Arc<dyn LlmClient>,
194    tool_executor: Arc<ToolExecutor>,
195    tool_context: ToolContext,
196    config: AgentConfig,
197    workspace: PathBuf,
198    history: Vec<Message>,
199}
200
201impl std::fmt::Debug for AgentSession {
202    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
203        f.debug_struct("AgentSession")
204            .field("workspace", &self.workspace.display().to_string())
205            .finish()
206    }
207}
208
209impl AgentSession {
210    /// Send a prompt and wait for the complete response.
211    pub async fn send(&self, prompt: &str) -> Result<AgentResult> {
212        let agent_loop = AgentLoop::new(
213            self.llm_client.clone(),
214            self.tool_executor.clone(),
215            self.tool_context.clone(),
216            self.config.clone(),
217        );
218        Ok(agent_loop.execute(&self.history, prompt, None).await?)
219    }
220
221    /// Send a prompt with conversation history.
222    pub async fn send_with_history(
223        &self,
224        history: &[Message],
225        prompt: &str,
226    ) -> Result<AgentResult> {
227        let agent_loop = AgentLoop::new(
228            self.llm_client.clone(),
229            self.tool_executor.clone(),
230            self.tool_context.clone(),
231            self.config.clone(),
232        );
233        Ok(agent_loop.execute(history, prompt, None).await?)
234    }
235
236    /// Send a prompt and stream events back.
237    pub async fn stream(
238        &self,
239        prompt: &str,
240    ) -> Result<(mpsc::Receiver<AgentEvent>, JoinHandle<()>)> {
241        let (tx, rx) = mpsc::channel(256);
242        let agent_loop = AgentLoop::new(
243            self.llm_client.clone(),
244            self.tool_executor.clone(),
245            self.tool_context.clone(),
246            self.config.clone(),
247        );
248        let history = self.history.clone();
249        let prompt = prompt.to_string();
250
251        let handle = tokio::spawn(async move {
252            let _ = agent_loop.execute(&history, &prompt, Some(tx)).await;
253        });
254
255        Ok((rx, handle))
256    }
257
258    /// Read a file from the workspace.
259    pub async fn read_file(&self, path: &str) -> Result<String> {
260        let args = serde_json::json!({ "file_path": path });
261        let result = self.tool_executor.execute("read", &args).await?;
262        Ok(result.output)
263    }
264
265    /// Execute a bash command in the workspace.
266    pub async fn bash(&self, command: &str) -> Result<String> {
267        let args = serde_json::json!({ "command": command });
268        let result = self.tool_executor.execute("bash", &args).await?;
269        Ok(result.output)
270    }
271
272    /// Search for files matching a glob pattern.
273    pub async fn glob(&self, pattern: &str) -> Result<Vec<String>> {
274        let args = serde_json::json!({ "pattern": pattern });
275        let result = self.tool_executor.execute("glob", &args).await?;
276        let files: Vec<String> = result
277            .output
278            .lines()
279            .filter(|l| !l.is_empty())
280            .map(|l| l.to_string())
281            .collect();
282        Ok(files)
283    }
284
285    /// Search file contents with a regex pattern.
286    pub async fn grep(&self, pattern: &str) -> Result<String> {
287        let args = serde_json::json!({ "pattern": pattern });
288        let result = self.tool_executor.execute("grep", &args).await?;
289        Ok(result.output)
290    }
291
292    /// Execute a tool by name, bypassing the LLM.
293    pub async fn tool(&self, name: &str, args: serde_json::Value) -> Result<ToolCallResult> {
294        let result = self.tool_executor.execute(name, &args).await?;
295        Ok(ToolCallResult {
296            name: name.to_string(),
297            output: result.output,
298            exit_code: result.exit_code,
299        })
300    }
301}
302
303// ============================================================================
304// Tests
305// ============================================================================
306
307#[cfg(test)]
308mod tests {
309    use super::*;
310    use crate::config::{ModelConfig, ModelModalities, ProviderConfig};
311
312    fn test_config() -> CodeConfig {
313        CodeConfig {
314            default_model: Some("anthropic/claude-sonnet-4-20250514".to_string()),
315            providers: vec![
316                ProviderConfig {
317                    name: "anthropic".to_string(),
318                    api_key: Some("test-key".to_string()),
319                    base_url: None,
320                    models: vec![ModelConfig {
321                        id: "claude-sonnet-4-20250514".to_string(),
322                        name: "Claude Sonnet 4".to_string(),
323                        family: "claude-sonnet".to_string(),
324                        api_key: None,
325                        base_url: None,
326                        attachment: false,
327                        reasoning: false,
328                        tool_call: true,
329                        temperature: true,
330                        release_date: None,
331                        modalities: ModelModalities::default(),
332                        cost: Default::default(),
333                        limit: Default::default(),
334                    }],
335                },
336                ProviderConfig {
337                    name: "openai".to_string(),
338                    api_key: Some("test-openai-key".to_string()),
339                    base_url: None,
340                    models: vec![ModelConfig {
341                        id: "gpt-4o".to_string(),
342                        name: "GPT-4o".to_string(),
343                        family: "gpt-4".to_string(),
344                        api_key: None,
345                        base_url: None,
346                        attachment: false,
347                        reasoning: false,
348                        tool_call: true,
349                        temperature: true,
350                        release_date: None,
351                        modalities: ModelModalities::default(),
352                        cost: Default::default(),
353                        limit: Default::default(),
354                    }],
355                },
356            ],
357            ..Default::default()
358        }
359    }
360
361    #[tokio::test]
362    async fn test_from_config() {
363        let agent = Agent::from_config(test_config()).await;
364        assert!(agent.is_ok());
365    }
366
367    #[tokio::test]
368    async fn test_session_default() {
369        let agent = Agent::from_config(test_config()).await.unwrap();
370        let session = agent.session("/tmp/test-workspace", None);
371        assert!(session.is_ok());
372        let debug = format!("{:?}", session.unwrap());
373        assert!(debug.contains("AgentSession"));
374    }
375
376    #[tokio::test]
377    async fn test_session_with_model_override() {
378        let agent = Agent::from_config(test_config()).await.unwrap();
379        let opts = SessionOptions::new().with_model("openai/gpt-4o");
380        let session = agent.session("/tmp/test-workspace", Some(opts));
381        assert!(session.is_ok());
382    }
383
384    #[tokio::test]
385    async fn test_session_with_invalid_model_format() {
386        let agent = Agent::from_config(test_config()).await.unwrap();
387        let opts = SessionOptions::new().with_model("gpt-4o");
388        let session = agent.session("/tmp/test-workspace", Some(opts));
389        assert!(session.is_err());
390    }
391
392    #[tokio::test]
393    async fn test_session_with_model_not_found() {
394        let agent = Agent::from_config(test_config()).await.unwrap();
395        let opts = SessionOptions::new().with_model("openai/nonexistent");
396        let session = agent.session("/tmp/test-workspace", Some(opts));
397        assert!(session.is_err());
398    }
399
400    #[tokio::test]
401    async fn test_new_with_json_string() {
402        let json = r#"{
403            "defaultModel": "anthropic/claude-sonnet-4-20250514",
404            "providers": [{
405                "name": "anthropic",
406                "apiKey": "test-key",
407                "models": [{"id": "claude-sonnet-4-20250514", "name": "Claude Sonnet 4"}]
408            }]
409        }"#;
410        let agent = Agent::new(json).await;
411        assert!(agent.is_ok());
412    }
413
414    #[tokio::test]
415    async fn test_new_with_hcl_string() {
416        let hcl = r#"
417            default_model = "anthropic/claude-sonnet-4-20250514"
418            providers {
419                name    = "anthropic"
420                api_key = "test-key"
421                models {
422                    id   = "claude-sonnet-4-20250514"
423                    name = "Claude Sonnet 4"
424                }
425            }
426        "#;
427        let agent = Agent::new(hcl).await;
428        assert!(agent.is_ok());
429    }
430
431    #[tokio::test]
432    async fn test_create_alias_json() {
433        let json = r#"{
434            "defaultModel": "anthropic/claude-sonnet-4-20250514",
435            "providers": [{
436                "name": "anthropic",
437                "apiKey": "test-key",
438                "models": [{"id": "claude-sonnet-4-20250514", "name": "Claude Sonnet 4"}]
439            }]
440        }"#;
441        let agent = Agent::create(json).await;
442        assert!(agent.is_ok());
443    }
444
445    #[tokio::test]
446    async fn test_create_alias_hcl() {
447        let hcl = r#"
448            default_model = "anthropic/claude-sonnet-4-20250514"
449            providers {
450                name    = "anthropic"
451                api_key = "test-key"
452                models {
453                    id   = "claude-sonnet-4-20250514"
454                    name = "Claude Sonnet 4"
455                }
456            }
457        "#;
458        let agent = Agent::create(hcl).await;
459        assert!(agent.is_ok());
460    }
461
462    #[tokio::test]
463    async fn test_create_and_new_produce_same_result() {
464        let json = r#"{
465            "defaultModel": "anthropic/claude-sonnet-4-20250514",
466            "providers": [{
467                "name": "anthropic",
468                "apiKey": "test-key",
469                "models": [{"id": "claude-sonnet-4-20250514", "name": "Claude Sonnet 4"}]
470            }]
471        }"#;
472        let agent_new = Agent::new(json).await;
473        let agent_create = Agent::create(json).await;
474        assert!(agent_new.is_ok());
475        assert!(agent_create.is_ok());
476
477        // Both should produce working sessions
478        let session_new = agent_new.unwrap().session("/tmp/test-ws-new", None);
479        let session_create = agent_create.unwrap().session("/tmp/test-ws-create", None);
480        assert!(session_new.is_ok());
481        assert!(session_create.is_ok());
482    }
483
484    #[test]
485    fn test_from_config_requires_default_model() {
486        let rt = tokio::runtime::Runtime::new().unwrap();
487        let config = CodeConfig {
488            providers: vec![ProviderConfig {
489                name: "anthropic".to_string(),
490                api_key: Some("test-key".to_string()),
491                base_url: None,
492                models: vec![],
493            }],
494            ..Default::default()
495        };
496        let result = rt.block_on(Agent::from_config(config));
497        assert!(result.is_err());
498    }
499}