use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::{Context, Result};
use async_trait::async_trait;
use tracing::info;
use super::{config::AgentConfig, Agent};
use crate::llm::ToolSpec;
use crate::tools::{
loader::load_tools, registry::ToolRegistry, AsyncTool, ToolConcurrency, ToolContext,
};
pub struct TomlAgent {
config: AgentConfig,
base_dir: PathBuf,
system_prompt: String,
registry: Arc<ToolRegistry>,
model_override: Option<String>,
}
impl TomlAgent {
pub fn from_file(path: impl AsRef<Path>) -> Result<Self> {
let path = path.as_ref();
info!(path = %path.display(), "Loading agent from TOML");
let config = AgentConfig::from_file(path)
.with_context(|| format!("Failed to load agent config: {}", path.display()))?;
let base_dir = path
.parent()
.map(|p| p.to_path_buf())
.unwrap_or_else(|| PathBuf::from("."));
config
.validate(&base_dir)
.context("Agent configuration validation failed")?;
let prompt_path = if config.agent.system_prompt.is_absolute() {
config.agent.system_prompt.clone()
} else {
base_dir.join(&config.agent.system_prompt)
};
let system_prompt = std::fs::read_to_string(&prompt_path)
.with_context(|| format!("Failed to read system prompt: {}", prompt_path.display()))?;
let registry = Arc::new(ToolRegistry::new());
load_tools(&config.tools, &base_dir, ®istry).context("Failed to load tools")?;
info!(
agent = %config.agent.name,
tools = registry.len(),
"Agent loaded successfully"
);
Ok(Self {
config,
base_dir,
system_prompt,
registry,
model_override: None,
})
}
pub fn with_model(mut self, model: impl Into<String>) -> Self {
self.model_override = Some(model.into());
self
}
pub fn model(&self) -> String {
self.model_override
.clone()
.or_else(|| self.config.agent.model.clone())
.unwrap_or_else(|| "openai/gpt-5".to_string())
}
pub fn base_dir(&self) -> &Path {
&self.base_dir
}
pub fn registry(&self) -> Arc<ToolRegistry> {
Arc::clone(&self.registry)
}
pub fn config(&self) -> &AgentConfig {
&self.config
}
pub fn with_additional_tool(self, tool: Arc<dyn crate::tools::Tool>) -> Self {
self.registry.register(tool);
self
}
pub fn with_additional_async_tool(self, tool: Arc<dyn AsyncTool>) -> Self {
self.registry.register_async(tool);
self
}
pub fn with_additional_tools(self, tools: Vec<Arc<dyn crate::tools::Tool>>) -> Self {
for tool in tools {
self.registry.register(tool);
}
self
}
pub fn with_additional_async_tools(self, tools: Vec<Arc<dyn AsyncTool>>) -> Self {
for tool in tools {
self.registry.register_async(tool);
}
self
}
pub fn with_system_prompt_override(mut self, prompt: impl Into<String>) -> Self {
self.system_prompt = prompt.into();
self
}
}
#[async_trait]
impl Agent for TomlAgent {
fn name(&self) -> &str {
&self.config.agent.name
}
fn system_prompt(&self) -> Result<String> {
Ok(self.system_prompt.clone())
}
fn available_tools(&self) -> Result<Vec<ToolSpec>> {
self.registry.specs()
}
fn execute_tool(&self, name: &str, args: serde_json::Value) -> Result<serde_json::Value> {
self.registry.execute(name, args)
}
async fn execute_tool_with_context(
&self,
name: &str,
ctx: ToolContext,
args: serde_json::Value,
) -> Result<serde_json::Value> {
self.registry.execute_with_context(ctx, name, args).await
}
fn tool_concurrency(&self, name: &str) -> ToolConcurrency {
self.registry
.concurrency(name)
.unwrap_or(ToolConcurrency::SerialOnly)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(feature = "python")]
use std::io::Write;
#[cfg(feature = "python")]
use tempfile::TempDir;
#[cfg(feature = "python")]
fn create_test_agent_files() -> (TempDir, PathBuf) {
let dir = TempDir::new().unwrap();
let base = dir.path();
let config_path = base.join("agent.toml");
let mut config_file = std::fs::File::create(&config_path).unwrap();
config_file
.write_all(
br#"
[agent]
name = "test_agent"
model = "openai/gpt-5"
system_prompt = "prompt.txt"
description = "Test agent"
[[tools]]
name = "echo"
schema = "echo.json"
implementation = { type = "python", script = "echo.py" }
"#,
)
.unwrap();
let prompt_path = base.join("prompt.txt");
std::fs::write(&prompt_path, "You are a test assistant.").unwrap();
let schema_path = base.join("echo.json");
std::fs::write(
&schema_path,
r#"{"type": "function", "name": "echo", "description": "Echo tool", "parameters": {"type": "object", "properties": {"message": {"type": "string"}}}}"#,
)
.unwrap();
let script_path = base.join("echo.py");
std::fs::write(
&script_path,
r#"
def execute(args):
return {"output": args.get("message", "")}
"#,
)
.unwrap();
(dir, config_path)
}
#[cfg(feature = "python")]
#[test]
fn test_load_toml_agent() {
let (_dir, config_path) = create_test_agent_files();
let agent = TomlAgent::from_file(&config_path).unwrap();
assert_eq!(agent.name(), "test_agent");
assert_eq!(agent.model(), "openai/gpt-5");
assert!(agent.system_prompt().unwrap().contains("test assistant"));
let tools = agent.available_tools().unwrap();
assert_eq!(tools.len(), 1);
assert_eq!(tools[0].name, "echo");
}
#[cfg(feature = "python")]
#[test]
fn test_agent_with_model_override() {
let (_dir, config_path) = create_test_agent_files();
let agent = TomlAgent::from_file(&config_path)
.unwrap()
.with_model("anthropic/claude-3.5-sonnet");
assert_eq!(agent.model(), "anthropic/claude-3.5-sonnet");
}
#[test]
fn test_load_nonexistent_config() {
let result = TomlAgent::from_file("nonexistent.toml");
assert!(result.is_err());
}
}