use std::path::Path;
use std::sync::Arc;
use anyhow::{Context, Result};
use super::runtime_agent::RuntimeAgent;
use crate::tools::{AsyncTool, Tool, ToolRegistry};
#[derive(Debug, Clone)]
pub enum ReasoningProvider {
OpenAI(crate::llm::openai::ReasoningConfig),
OpenRouter(crate::llm::openrouter::config::ReasoningConfig),
}
pub struct AgentBuilder {
name: String,
provider: Option<crate::llm::LlmProvider>,
model: Option<String>,
system_prompt: Option<String>,
system_prompt_file: Option<std::path::PathBuf>,
registry: Option<Arc<ToolRegistry>>,
tools: Vec<Arc<dyn Tool>>,
async_tools: Vec<Arc<dyn AsyncTool>>,
anthropic_api_key: Option<String>,
openrouter_api_key: Option<String>,
vertex_api_key: Option<String>,
anthropic_pricing_model: Option<String>,
thinking: Option<crate::llm::anthropic::ThinkingConfig>,
caching: Option<crate::llm::anthropic::CachingConfig>,
tool_choice: Option<crate::llm::anthropic::ToolChoiceConfig>,
effort: Option<crate::llm::anthropic::EffortLevel>,
beta_features: Option<crate::llm::anthropic::BetaFeatures>,
retry: Option<crate::llm::anthropic::RetryConfig>,
rate_limiter: Option<crate::llm::anthropic::RateLimiterConfig>,
reasoning: Option<ReasoningProvider>,
provider_preferences: Option<crate::llm::openrouter::config::ProviderPreferences>,
openrouter_transforms: Option<Vec<String>>,
openrouter_models: Option<Vec<String>>,
openai_api_key: Option<String>,
openai_codex_access_token: Option<String>,
openai_service_tier: Option<crate::llm::openai::ServiceTier>,
openai_text_verbosity: Option<crate::llm::openai::TextVerbosity>,
openai_pricing_model: Option<String>,
max_tokens: Option<u32>,
temperature: Option<f32>,
top_p: Option<f32>,
top_k: Option<u32>,
stop_sequences: Option<Vec<String>>,
logs_dir: Option<std::path::PathBuf>,
log_level: Option<String>,
log_format: Option<crate::config::LogFormat>,
enable_traces: Option<bool>,
trace_format: Option<crate::config::TraceFormat>,
history_enabled: Option<bool>,
history_db_path: Option<std::path::PathBuf>,
history_auto_save: Option<bool>,
provider_parallel_tool_calls: bool,
max_concurrent_tool_executions: usize,
required_completion_tools: Vec<Arc<dyn Tool>>,
max_continuations: usize,
continuation_message: Option<String>,
}
impl AgentBuilder {
pub fn new(name: impl Into<String>) -> Self {
Self {
name: name.into(),
provider: None,
model: None,
system_prompt: None,
system_prompt_file: None,
registry: None,
tools: Vec::new(),
async_tools: Vec::new(),
anthropic_api_key: None,
openrouter_api_key: None,
vertex_api_key: None,
openai_api_key: None,
openai_codex_access_token: None,
openai_service_tier: None,
openai_text_verbosity: None,
openai_pricing_model: None,
anthropic_pricing_model: None,
thinking: None,
caching: None,
tool_choice: None,
effort: None,
beta_features: None,
retry: None,
rate_limiter: None,
reasoning: None,
provider_preferences: None,
openrouter_transforms: None,
openrouter_models: None,
max_tokens: None,
temperature: None,
top_p: None,
top_k: None,
stop_sequences: None,
logs_dir: None,
log_level: None,
log_format: None,
enable_traces: None,
trace_format: None,
history_enabled: None,
history_db_path: None,
history_auto_save: None,
provider_parallel_tool_calls: false,
max_concurrent_tool_executions: 1,
required_completion_tools: Vec::new(),
max_continuations: 2,
continuation_message: None,
}
}
pub fn provider(mut self, provider: crate::llm::LlmProvider) -> Self {
self.provider = Some(provider);
self
}
pub fn model(mut self, model: impl Into<String>) -> Self {
self.model = Some(model.into());
self
}
pub fn anthropic_api_key(mut self, key: impl Into<String>) -> Self {
self.anthropic_api_key = Some(key.into());
self
}
pub fn openrouter_api_key(mut self, key: impl Into<String>) -> Self {
self.openrouter_api_key = Some(key.into());
self
}
pub fn thinking(mut self, config: crate::llm::anthropic::ThinkingConfig) -> Self {
self.thinking = Some(config);
self
}
pub fn caching(mut self, config: crate::llm::anthropic::CachingConfig) -> Self {
self.caching = Some(config);
self
}
pub fn tool_choice(mut self, config: crate::llm::anthropic::ToolChoiceConfig) -> Self {
self.tool_choice = Some(config);
self
}
pub fn effort(mut self, level: crate::llm::anthropic::EffortLevel) -> Self {
self.effort = Some(level);
self
}
pub fn beta_features(mut self, features: crate::llm::anthropic::BetaFeatures) -> Self {
self.beta_features = Some(features);
self
}
pub fn retry(mut self, config: crate::llm::anthropic::RetryConfig) -> Self {
self.retry = Some(config);
self
}
pub fn disable_retry(mut self) -> Self {
self.retry = Some(crate::llm::anthropic::RetryConfig {
max_retries: 0,
initial_backoff_ms: 0,
max_backoff_ms: 0,
backoff_multiplier: 1.0,
jitter: false,
});
self
}
pub fn rate_limiter(mut self, config: crate::llm::anthropic::RateLimiterConfig) -> Self {
self.rate_limiter = Some(config);
self
}
pub fn enable_rate_limiter(mut self) -> Self {
self.rate_limiter = Some(crate::llm::anthropic::RateLimiterConfig {
enabled: true,
..Default::default()
});
self
}
pub fn reasoning(mut self, config: ReasoningProvider) -> Self {
self.reasoning = Some(config);
self
}
pub fn openai_reasoning(mut self, config: crate::llm::openai::ReasoningConfig) -> Self {
self.reasoning = Some(ReasoningProvider::OpenAI(config));
self
}
pub fn openai_text_verbosity(mut self, verbosity: crate::llm::openai::TextVerbosity) -> Self {
self.openai_text_verbosity = Some(verbosity);
self
}
pub fn openai_pricing_model(mut self, model: impl Into<String>) -> Self {
self.openai_pricing_model = Some(model.into());
self
}
pub fn anthropic_pricing_model(mut self, model: impl Into<String>) -> Self {
self.anthropic_pricing_model = Some(model.into());
self
}
pub fn openrouter_reasoning(
mut self,
config: crate::llm::openrouter::config::ReasoningConfig,
) -> Self {
self.reasoning = Some(ReasoningProvider::OpenRouter(config));
self
}
pub fn openai_api_key(mut self, key: impl Into<String>) -> Self {
self.openai_api_key = Some(key.into());
self
}
pub fn openai_codex_access_token(mut self, token: impl Into<String>) -> Self {
self.openai_codex_access_token = Some(token.into());
self
}
pub fn vertex_api_key(mut self, key: impl Into<String>) -> Self {
self.vertex_api_key = Some(key.into());
self
}
pub fn openai_service_tier(mut self, tier: crate::llm::openai::ServiceTier) -> Self {
self.openai_service_tier = Some(tier);
self
}
pub fn openrouter_provider_routing(
mut self,
prefs: crate::llm::openrouter::config::ProviderPreferences,
) -> Self {
self.provider_preferences = Some(prefs);
self
}
pub fn openrouter_transforms(mut self, transforms: Vec<String>) -> Self {
self.openrouter_transforms = Some(transforms);
self
}
pub fn openrouter_models(mut self, models: Vec<String>) -> Self {
self.openrouter_models = Some(models);
self
}
pub fn max_tokens(mut self, max_tokens: u32) -> Self {
self.max_tokens = Some(max_tokens);
self
}
pub fn temperature(mut self, temperature: f32) -> Self {
self.temperature = Some(temperature);
self
}
pub fn top_p(mut self, top_p: f32) -> Self {
self.top_p = Some(top_p);
self
}
pub fn top_k(mut self, top_k: u32) -> Self {
self.top_k = Some(top_k);
self
}
pub fn stop_sequences(mut self, sequences: Vec<String>) -> Self {
self.stop_sequences = Some(sequences);
self
}
pub fn logs_dir(mut self, path: impl Into<std::path::PathBuf>) -> Self {
self.logs_dir = Some(path.into());
self
}
pub fn log_level(mut self, level: impl Into<String>) -> Self {
self.log_level = Some(level.into());
self
}
pub fn log_format(mut self, format: crate::config::LogFormat) -> Self {
self.log_format = Some(format);
self
}
pub fn enable_traces(mut self) -> Self {
self.enable_traces = Some(true);
self
}
pub fn disable_traces(mut self) -> Self {
self.enable_traces = Some(false);
self
}
pub fn trace_format(mut self, format: crate::config::TraceFormat) -> Self {
self.trace_format = Some(format);
self
}
pub fn enable_history(mut self) -> Self {
self.history_enabled = Some(true);
if self.history_db_path.is_none() {
self.history_db_path = Some("data/sessions.db".into());
}
if self.history_auto_save.is_none() {
self.history_auto_save = Some(true);
}
self
}
pub fn disable_history(mut self) -> Self {
self.history_enabled = Some(false);
self
}
pub fn history_db_path(mut self, path: impl Into<std::path::PathBuf>) -> Self {
self.history_db_path = Some(path.into());
self
}
pub fn auto_save_sessions(mut self, auto_save: bool) -> Self {
self.history_auto_save = Some(auto_save);
self
}
pub fn require_completion_tools(mut self, tools: Vec<Arc<dyn Tool>>) -> Self {
self.required_completion_tools = tools;
self
}
pub fn max_continuations(mut self, count: usize) -> Self {
self.max_continuations = count;
self
}
pub fn continuation_message(mut self, message: impl Into<String>) -> Self {
self.continuation_message = Some(message.into());
self
}
pub fn system_prompt(mut self, prompt: impl Into<String>) -> Self {
self.system_prompt = Some(prompt.into());
self.system_prompt_file = None; self
}
pub fn system_prompt_file(mut self, path: impl AsRef<Path>) -> Self {
self.system_prompt_file = Some(path.as_ref().to_path_buf());
self.system_prompt = None; self
}
pub fn with_tool(mut self, tool: Arc<dyn Tool>) -> Self {
self.tools.push(tool);
self
}
pub fn with_async_tool(mut self, tool: Arc<dyn AsyncTool>) -> Self {
self.async_tools.push(tool);
self
}
pub fn with_tools(mut self, tools: Vec<Arc<dyn Tool>>) -> Self {
self.tools.extend(tools);
self
}
pub fn with_async_tools(mut self, tools: Vec<Arc<dyn AsyncTool>>) -> Self {
self.async_tools.extend(tools);
self
}
pub fn with_registry(mut self, registry: Arc<ToolRegistry>) -> Self {
self.registry = Some(registry);
self
}
fn ensure_registry(&mut self) -> Arc<ToolRegistry> {
Arc::clone(
self.registry
.get_or_insert_with(|| Arc::new(ToolRegistry::new())),
)
}
pub fn manage<T>(mut self, state: T) -> Self
where
T: Send + Sync + 'static,
{
self.ensure_registry().manage(state);
self
}
pub fn session_state<T>(mut self) -> Self
where
T: Default + Send + Sync + 'static,
{
self.ensure_registry()
.session_state_with::<T, _>(T::default);
self
}
pub fn session_state_with<T, F>(mut self, init: F) -> Self
where
T: Send + Sync + 'static,
F: Fn() -> T + Send + Sync + 'static,
{
self.ensure_registry().session_state_with::<T, F>(init);
self
}
pub fn enable_parallel_tool_calls(mut self, max_concurrency: usize) -> Self {
self.provider_parallel_tool_calls = true;
self.max_concurrent_tool_executions = max_concurrency.max(1);
self
}
pub fn build(self) -> Result<RuntimeAgent> {
let system_prompt = if let Some(prompt) = self.system_prompt {
prompt
} else if let Some(path) = self.system_prompt_file {
std::fs::read_to_string(&path)
.with_context(|| format!("Failed to read system prompt: {}", path.display()))?
} else {
anyhow::bail!(
"System prompt must be provided via system_prompt() or system_prompt_file()"
);
};
let registry = self
.registry
.unwrap_or_else(|| Arc::new(ToolRegistry::new()));
for tool in self.tools {
registry.register(tool);
}
for tool in self.async_tools {
registry.register_async(tool);
}
for tool in &self.required_completion_tools {
registry.register(Arc::clone(tool));
}
let required_tool_names: Option<Vec<String>> = if !self.required_completion_tools.is_empty()
{
Some(
self.required_completion_tools
.iter()
.map(|t| t.name().to_string())
.collect(),
)
} else {
None
};
let agent = RuntimeAgent::with_config(
&self.name,
system_prompt,
registry,
self.provider,
self.model,
self.anthropic_api_key,
self.openrouter_api_key,
self.openai_api_key,
self.openai_codex_access_token,
self.vertex_api_key,
self.openai_service_tier,
self.openai_text_verbosity,
self.openai_pricing_model,
self.anthropic_pricing_model,
self.thinking,
self.caching,
self.tool_choice,
self.effort,
self.beta_features,
self.retry,
self.rate_limiter,
self.reasoning,
self.provider_preferences,
self.openrouter_transforms,
self.openrouter_models,
self.max_tokens,
self.temperature,
self.top_p,
self.top_k,
self.stop_sequences,
self.logs_dir,
self.log_level,
self.log_format,
self.enable_traces,
self.trace_format,
self.history_enabled,
self.history_db_path,
self.history_auto_save,
required_tool_names,
self.max_continuations,
self.continuation_message,
self.provider_parallel_tool_calls,
self.max_concurrent_tool_executions,
);
Ok(agent)
}
pub fn build_with_stream(
self,
) -> Result<(
RuntimeAgent,
tokio::sync::mpsc::UnboundedReceiver<super::streaming::StreamEvent>,
)> {
let agent = self.build()?;
let (_tx, rx) = tokio::sync::mpsc::unbounded_channel();
Ok((agent, rx))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::agent::Agent;
use crate::llm::ToolSpec;
use serde_json::json;
use std::io::Write;
use tempfile::NamedTempFile;
struct MockTool {
name: String,
}
impl Tool for MockTool {
fn name(&self) -> &str {
&self.name
}
fn spec(&self) -> Result<ToolSpec> {
Ok(serde_json::from_value(json!({
"type": "function",
"name": self.name,
"description": "Mock tool",
"parameters": {
"type": "object",
"properties": {}
}
}))?)
}
fn execute(&self, _args: serde_json::Value) -> Result<serde_json::Value> {
Ok(json!({"success": true}))
}
}
#[test]
fn test_builder_basic() {
let agent = AgentBuilder::new("test-agent")
.system_prompt("Test prompt")
.build()
.unwrap();
assert_eq!(agent.name(), "test-agent");
}
#[test]
fn test_builder_with_model() {
let agent = AgentBuilder::new("test-agent")
.model("anthropic/claude-3.5-sonnet")
.system_prompt("Test prompt")
.build()
.unwrap();
assert_eq!(agent.model(), "anthropic/claude-3.5-sonnet");
}
#[test]
fn test_builder_with_tool() {
let agent = AgentBuilder::new("test-agent")
.system_prompt("Test prompt")
.with_tool(Arc::new(MockTool {
name: "test_tool".to_string(),
}))
.build()
.unwrap();
let tools = agent.available_tools().unwrap();
assert_eq!(tools.len(), 1);
assert_eq!(tools[0].name, "test_tool");
}
#[test]
fn test_builder_with_multiple_tools() {
let agent = AgentBuilder::new("test-agent")
.system_prompt("Test prompt")
.with_tools(vec![
Arc::new(MockTool {
name: "tool1".to_string(),
}),
Arc::new(MockTool {
name: "tool2".to_string(),
}),
])
.build()
.unwrap();
let tools = agent.available_tools().unwrap();
assert_eq!(tools.len(), 2);
}
#[test]
fn test_builder_with_prompt_file() {
let mut file = NamedTempFile::new().unwrap();
file.write_all(b"File-based prompt").unwrap();
file.flush().unwrap();
let agent = AgentBuilder::new("test-agent")
.system_prompt_file(file.path())
.build()
.unwrap();
use super::super::Agent;
assert_eq!(agent.system_prompt().unwrap(), "File-based prompt");
}
#[test]
fn test_builder_missing_prompt() {
let result = AgentBuilder::new("test-agent").build();
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("System prompt"));
}
#[test]
fn test_builder_with_custom_registry() {
let registry = Arc::new(ToolRegistry::new());
registry.register(Arc::new(MockTool {
name: "pre_registered".to_string(),
}));
let agent = AgentBuilder::new("test-agent")
.system_prompt("Test prompt")
.with_registry(registry.clone())
.with_tool(Arc::new(MockTool {
name: "added_later".to_string(),
}))
.build()
.unwrap();
let tools = agent.available_tools().unwrap();
assert_eq!(tools.len(), 2);
}
}