1use serde::{Deserialize, Serialize};
4use std::path::Path;
5
6#[derive(Debug, Clone, Serialize, Deserialize)]
8pub struct AgentConfig {
9 pub model: ModelConfig,
10 pub execution: ExecutionConfig,
11 pub safety: SafetyConfig,
12 pub tools: ToolConfig,
13 pub logging: LoggingConfig,
14}
15
16#[derive(Debug, Clone, Serialize, Deserialize)]
18pub struct ModelConfig {
19 pub provider: ModelProvider,
20 pub model_name: String,
21 pub api_key: Option<String>,
22 pub endpoint: Option<String>,
23 pub max_tokens: u32,
24 pub temperature: f32,
25}
26
27#[derive(Debug, Clone, Serialize, Deserialize)]
29#[serde(rename_all = "lowercase")]
30pub enum ModelProvider {
31 OpenAI,
32 Anthropic,
33 Zhipu,
34 Local(String),
35}
36
37#[derive(Debug, Clone, Serialize, Deserialize)]
39pub struct ExecutionConfig {
40 pub max_steps: u32,
41 pub timeout_seconds: u64,
42 pub max_retries: u32,
43 pub retry_delay_seconds: u64,
44}
45
46#[derive(Debug, Clone, Serialize, Deserialize)]
48pub struct SafetyConfig {
49 pub enable_safety_checks: bool,
50 pub allowed_directories: Vec<String>,
51 pub blocked_commands: Vec<String>,
52}
53
54#[derive(Debug, Clone, Serialize, Deserialize)]
56pub struct ToolConfig {
57 pub auto_discovery: bool,
58 pub custom_tools_path: Option<String>,
59 pub enabled_tools: Vec<String>,
60 pub disabled_tools: Vec<String>,
61}
62
63#[derive(Debug, Clone, Serialize, Deserialize)]
65pub struct LoggingConfig {
66 pub level: String,
67 pub file: Option<String>,
68 pub console: bool,
69 pub format: LogFormat,
70}
71
72#[derive(Debug, Clone, Serialize, Deserialize)]
74#[serde(rename_all = "lowercase")]
75pub enum LogFormat {
76 Pretty,
77 Json,
78 Compact,
79}
80
81impl AgentConfig {
82 pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, Box<dyn std::error::Error>> {
84 let content = std::fs::read_to_string(path)?;
85 let mut config: AgentConfig = toml::from_str(&content)?;
86
87 if let Some(ref api_key) = config.model.api_key {
89 if api_key.starts_with("${") && api_key.ends_with("}") {
90 let env_var = &api_key[2..api_key.len()-1];
91 config.model.api_key = std::env::var(env_var).ok();
92 }
93 }
94
95 Ok(config)
96 }
97
98 pub fn from_env() -> Result<Self, Box<dyn std::error::Error>> {
100 let provider = if std::env::var("ZHIPU_API_KEY").is_ok() {
101 ModelProvider::Zhipu
102 } else if std::env::var("ANTHROPIC_API_KEY").is_ok() {
103 ModelProvider::Anthropic
104 } else {
105 ModelProvider::OpenAI
106 };
107
108 let model_name = match provider {
109 ModelProvider::Zhipu => "GLM-4.6",
110 ModelProvider::Anthropic => "claude-3-sonnet-20240229",
111 _ => "gpt-4-turbo-preview",
112 };
113
114 let api_key = match provider {
115 ModelProvider::Zhipu => std::env::var("ZHIPU_API_KEY").ok(),
116 ModelProvider::Anthropic => std::env::var("ANTHROPIC_API_KEY").ok(),
117 ModelProvider::OpenAI => std::env::var("OPENAI_API_KEY").ok(),
118 ModelProvider::Local(_) => std::env::var("API_KEY").ok(),
119 };
120
121 let endpoint = match provider {
122 ModelProvider::Zhipu => Some("https://open.bigmodel.cn/api/paas/v4/".to_string()),
123 _ => std::env::var("MODEL_ENDPOINT").ok(),
124 };
125
126 Ok(AgentConfig {
127 model: ModelConfig {
128 provider,
129 model_name: model_name.to_string(),
130 api_key,
131 endpoint,
132 max_tokens: 4000,
133 temperature: 0.7,
134 },
135 execution: ExecutionConfig {
136 max_steps: 50,
137 timeout_seconds: 300,
138 max_retries: 3,
139 retry_delay_seconds: 2,
140 },
141 safety: SafetyConfig {
142 enable_safety_checks: true,
143 allowed_directories: vec![".".to_string(), "/tmp".to_string()],
144 blocked_commands: vec![
145 "rm -rf /".to_string(),
146 "format".to_string(),
147 "fdisk".to_string(),
148 "dd if=".to_string(),
149 ],
150 },
151 tools: ToolConfig {
152 auto_discovery: true,
153 custom_tools_path: None,
154 enabled_tools: vec![
155 "read_file".to_string(),
156 "write_file".to_string(),
157 "run_command".to_string(),
158 "list_files".to_string(),
159 ],
160 disabled_tools: vec![],
161 },
162 logging: LoggingConfig {
163 level: "info".to_string(),
164 file: Some("agent.log".to_string()),
165 console: true,
166 format: LogFormat::Pretty,
167 },
168 })
169 }
170
171 pub fn load_with_fallback<P: AsRef<Path>>(config_path: P) -> Result<Self, Box<dyn std::error::Error>> {
173 match Self::from_file(&config_path) {
174 Ok(config) => Ok(config),
175 Err(_) => {
176 tracing::warn!("Failed to load config file {:?}, using environment", config_path.as_ref());
177 Self::from_env()
178 }
179 }
180 }
181}
182
183impl Default for AgentConfig {
184 fn default() -> Self {
185 Self {
186 model: ModelConfig {
187 provider: ModelProvider::OpenAI,
188 model_name: "gpt-3.5-turbo".to_string(),
189 api_key: None,
190 endpoint: None,
191 max_tokens: 4000,
192 temperature: 0.7,
193 },
194 execution: ExecutionConfig {
195 max_steps: 50,
196 timeout_seconds: 300,
197 max_retries: 3,
198 retry_delay_seconds: 2,
199 },
200 safety: SafetyConfig {
201 enable_safety_checks: true,
202 allowed_directories: vec![".".to_string()],
203 blocked_commands: vec![],
204 },
205 tools: ToolConfig {
206 auto_discovery: true,
207 custom_tools_path: None,
208 enabled_tools: vec![],
209 disabled_tools: vec![],
210 },
211 logging: LoggingConfig {
212 level: "info".to_string(),
213 file: None,
214 console: true,
215 format: LogFormat::Pretty,
216 },
217 }
218 }
219}