1use crate::config::agent::AgentProfile;
7use anyhow::{Context, Result};
8use directories::BaseDirs;
9use serde::{Deserialize, Serialize};
10use std::collections::HashMap;
11use std::path::PathBuf;
12
13const DEFAULT_CONFIG: &str =
15 include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/spec-ai.config.toml"));
16
17const CONFIG_FILE_NAME: &str = "spec-ai.config.toml";
19
20#[derive(Debug, Clone, Serialize, Deserialize, Default)]
22pub struct AppConfig {
23 #[serde(default)]
25 pub database: DatabaseConfig,
26 #[serde(default)]
28 pub model: ModelConfig,
29 #[serde(default)]
31 pub ui: UiConfig,
32 #[serde(default)]
34 pub logging: LoggingConfig,
35 #[serde(default)]
37 pub audio: AudioConfig,
38 #[serde(default)]
40 pub mesh: MeshConfig,
41 #[serde(default)]
43 pub plugins: PluginConfig,
44 #[serde(default)]
46 pub agents: HashMap<String, AgentProfile>,
47 #[serde(default)]
49 pub default_agent: Option<String>,
50}
51
52impl AppConfig {
53 pub fn load() -> Result<Self> {
55 if let Ok(content) = std::fs::read_to_string(CONFIG_FILE_NAME) {
57 return toml::from_str(&content)
58 .map_err(|e| anyhow::anyhow!("Failed to parse {}: {}", CONFIG_FILE_NAME, e));
59 }
60
61 if let Ok(base_dirs) =
63 BaseDirs::new().ok_or(anyhow::anyhow!("Could not determine home directory"))
64 {
65 let home_config = base_dirs.home_dir().join(".spec-ai").join(CONFIG_FILE_NAME);
66 if let Ok(content) = std::fs::read_to_string(&home_config) {
67 return toml::from_str(&content).map_err(|e| {
68 anyhow::anyhow!("Failed to parse {}: {}", home_config.display(), e)
69 });
70 }
71 }
72
73 if let Ok(config_path) = std::env::var("CONFIG_PATH") {
75 if let Ok(content) = std::fs::read_to_string(&config_path) {
76 return toml::from_str(&content)
77 .map_err(|e| anyhow::anyhow!("Failed to parse config: {}", e));
78 }
79 }
80
81 eprintln!(
83 "No configuration file found. Creating {} with default settings...",
84 CONFIG_FILE_NAME
85 );
86 if let Err(e) = std::fs::write(CONFIG_FILE_NAME, DEFAULT_CONFIG) {
87 eprintln!("Warning: Could not create {}: {}", CONFIG_FILE_NAME, e);
88 eprintln!("Continuing with default configuration in memory.");
89 } else {
90 eprintln!(
91 "Created {}. You can edit this file to customize your settings.",
92 CONFIG_FILE_NAME
93 );
94 }
95
96 toml::from_str(DEFAULT_CONFIG)
98 .map_err(|e| anyhow::anyhow!("Failed to parse embedded default config: {}", e))
99 }
100
101 pub fn load_from_file(path: &std::path::Path) -> Result<Self> {
104 match std::fs::read_to_string(path) {
106 Ok(content) => toml::from_str(&content).map_err(|e| {
107 anyhow::anyhow!("Failed to parse config file {}: {}", path.display(), e)
108 }),
109 Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
110 eprintln!(
112 "Configuration file not found at {}. Creating with default settings...",
113 path.display()
114 );
115
116 if let Some(parent) = path.parent() {
118 std::fs::create_dir_all(parent)
119 .context(format!("Failed to create directory {}", parent.display()))?;
120 }
121
122 std::fs::write(path, DEFAULT_CONFIG).context(format!(
124 "Failed to create config file at {}",
125 path.display()
126 ))?;
127
128 eprintln!(
129 "Created {}. You can edit this file to customize your settings.",
130 path.display()
131 );
132
133 toml::from_str(DEFAULT_CONFIG)
135 .map_err(|e| anyhow::anyhow!("Failed to parse embedded default config: {}", e))
136 }
137 Err(e) => Err(anyhow::anyhow!(
138 "Failed to read config file {}: {}",
139 path.display(),
140 e
141 )),
142 }
143 }
144
145 pub fn validate(&self) -> Result<()> {
147 if self.model.provider.is_empty() {
149 return Err(anyhow::anyhow!("Model provider cannot be empty"));
150 }
151 {
153 let p = self.model.provider.to_lowercase();
154 let known = ["mock", "openai", "anthropic", "ollama", "mlx", "lmstudio"];
155 if !known.contains(&p.as_str()) {
156 return Err(anyhow::anyhow!(
157 "Invalid model provider: {}",
158 self.model.provider
159 ));
160 }
161 }
162
163 if self.model.temperature < 0.0 || self.model.temperature > 2.0 {
165 return Err(anyhow::anyhow!(
166 "Temperature must be between 0.0 and 2.0, got {}",
167 self.model.temperature
168 ));
169 }
170
171 match self.logging.level.as_str() {
173 "trace" | "debug" | "info" | "warn" | "error" => {}
174 _ => return Err(anyhow::anyhow!("Invalid log level: {}", self.logging.level)),
175 }
176
177 if let Some(default_agent) = &self.default_agent {
179 if !self.agents.contains_key(default_agent) {
180 return Err(anyhow::anyhow!(
181 "Default agent '{}' not found in agents map",
182 default_agent
183 ));
184 }
185 }
186
187 Ok(())
188 }
189
190 pub fn apply_env_overrides(&mut self) {
192 fn first(a: &str, b: &str) -> Option<String> {
194 std::env::var(a).ok().or_else(|| std::env::var(b).ok())
195 }
196
197 if let Some(provider) = first("AGENT_MODEL_PROVIDER", "SPEC_AI_PROVIDER") {
198 self.model.provider = provider;
199 }
200 if let Some(model_name) = first("AGENT_MODEL_NAME", "SPEC_AI_MODEL") {
201 self.model.model_name = Some(model_name);
202 }
203 if let Some(api_key_source) = first("AGENT_API_KEY_SOURCE", "SPEC_AI_API_KEY_SOURCE") {
204 self.model.api_key_source = Some(api_key_source);
205 }
206 if let Some(temp_str) = first("AGENT_MODEL_TEMPERATURE", "SPEC_AI_TEMPERATURE") {
207 if let Ok(temp) = temp_str.parse::<f32>() {
208 self.model.temperature = temp;
209 }
210 }
211 if let Some(level) = first("AGENT_LOG_LEVEL", "SPEC_AI_LOG_LEVEL") {
212 self.logging.level = level;
213 }
214 if let Some(db_path) = first("AGENT_DB_PATH", "SPEC_AI_DB_PATH") {
215 self.database.path = PathBuf::from(db_path);
216 }
217 if let Some(theme) = first("AGENT_UI_THEME", "SPEC_AI_UI_THEME") {
218 self.ui.theme = theme;
219 }
220 if let Some(default_agent) = first("AGENT_DEFAULT_AGENT", "SPEC_AI_DEFAULT_AGENT") {
221 self.default_agent = Some(default_agent);
222 }
223 }
224
225 pub fn summary(&self) -> String {
227 let mut summary = String::new();
228 summary.push_str("Configuration loaded:\n");
229 summary.push_str(&format!("Database: {}\n", self.database.path.display()));
230 summary.push_str(&format!("Model Provider: {}\n", self.model.provider));
231 if let Some(model) = &self.model.model_name {
232 summary.push_str(&format!("Model Name: {}\n", model));
233 }
234 summary.push_str(&format!("Temperature: {}\n", self.model.temperature));
235 summary.push_str(&format!("Logging Level: {}\n", self.logging.level));
236 summary.push_str(&format!("UI Theme: {}\n", self.ui.theme));
237 summary.push_str(&format!("Available Agents: {}\n", self.agents.len()));
238 if let Some(default) = &self.default_agent {
239 summary.push_str(&format!("Default Agent: {}\n", default));
240 }
241 summary
242 }
243}
244
245#[derive(Debug, Clone, Serialize, Deserialize)]
247pub struct DatabaseConfig {
248 pub path: PathBuf,
250}
251
252impl Default for DatabaseConfig {
253 fn default() -> Self {
254 Self {
255 path: PathBuf::from("spec-ai.duckdb"),
256 }
257 }
258}
259
260#[derive(Debug, Clone, Serialize, Deserialize)]
262pub struct ModelConfig {
263 pub provider: String,
265 #[serde(default)]
267 pub model_name: Option<String>,
268 #[serde(default)]
270 pub embeddings_model: Option<String>,
271 #[serde(default)]
273 pub api_key_source: Option<String>,
274 #[serde(default = "default_temperature")]
276 pub temperature: f32,
277}
278
279fn default_temperature() -> f32 {
280 0.7
281}
282
283impl Default for ModelConfig {
284 fn default() -> Self {
285 Self {
286 provider: "mock".to_string(),
287 model_name: None,
288 embeddings_model: None,
289 api_key_source: None,
290 temperature: default_temperature(),
291 }
292 }
293}
294
295#[derive(Debug, Clone, Serialize, Deserialize)]
297pub struct UiConfig {
298 pub prompt: String,
300 pub theme: String,
302}
303
304impl Default for UiConfig {
305 fn default() -> Self {
306 Self {
307 prompt: "> ".to_string(),
308 theme: "default".to_string(),
309 }
310 }
311}
312
313#[derive(Debug, Clone, Serialize, Deserialize)]
315pub struct LoggingConfig {
316 pub level: String,
318}
319
320impl Default for LoggingConfig {
321 fn default() -> Self {
322 Self {
323 level: "info".to_string(),
324 }
325 }
326}
327
328#[derive(Debug, Clone, Serialize, Deserialize)]
330pub struct MeshConfig {
331 #[serde(default)]
333 pub enabled: bool,
334 #[serde(default = "default_registry_port")]
336 pub registry_port: u16,
337 #[serde(default = "default_heartbeat_interval")]
339 pub heartbeat_interval_secs: u64,
340 #[serde(default = "default_leader_timeout")]
342 pub leader_timeout_secs: u64,
343 #[serde(default = "default_replication_factor")]
345 pub replication_factor: usize,
346 #[serde(default)]
348 pub auto_join: bool,
349}
350
351fn default_registry_port() -> u16 {
352 3000
353}
354
355fn default_heartbeat_interval() -> u64 {
356 5
357}
358
359fn default_leader_timeout() -> u64 {
360 15
361}
362
363fn default_replication_factor() -> usize {
364 2
365}
366
367impl Default for MeshConfig {
368 fn default() -> Self {
369 Self {
370 enabled: false,
371 registry_port: default_registry_port(),
372 heartbeat_interval_secs: default_heartbeat_interval(),
373 leader_timeout_secs: default_leader_timeout(),
374 replication_factor: default_replication_factor(),
375 auto_join: true,
376 }
377 }
378}
379
380#[derive(Debug, Clone, Serialize, Deserialize)]
382pub struct AudioConfig {
383 #[serde(default)]
385 pub enabled: bool,
386 #[serde(default = "default_transcription_provider")]
388 pub provider: String,
389 #[serde(default)]
391 pub model: Option<String>,
392 #[serde(default)]
394 pub api_key_source: Option<String>,
395 #[serde(default)]
397 pub on_device: bool,
398 #[serde(default)]
400 pub endpoint: Option<String>,
401 #[serde(default = "default_chunk_duration")]
403 pub chunk_duration_secs: f64,
404 #[serde(default = "default_duration")]
406 pub default_duration_secs: u64,
407 #[serde(default = "default_duration")]
409 pub default_duration: u64,
410 #[serde(default)]
412 pub out_file: Option<String>,
413 #[serde(default)]
415 pub language: Option<String>,
416 #[serde(default)]
418 pub auto_respond: bool,
419 #[serde(default = "default_mock_scenario")]
421 pub mock_scenario: String,
422 #[serde(default = "default_event_delay_ms")]
424 pub event_delay_ms: u64,
425}
426
427fn default_transcription_provider() -> String {
428 "vttrs".to_string()
429}
430
431fn default_chunk_duration() -> f64 {
432 5.0
433}
434
435fn default_duration() -> u64 {
436 30
437}
438
439fn default_mock_scenario() -> String {
440 "simple_conversation".to_string()
441}
442
443fn default_event_delay_ms() -> u64 {
444 500
445}
446
447impl Default for AudioConfig {
448 fn default() -> Self {
449 Self {
450 enabled: false,
451 provider: default_transcription_provider(),
452 model: Some("whisper-1".to_string()),
453 api_key_source: None,
454 on_device: false,
455 endpoint: None,
456 chunk_duration_secs: default_chunk_duration(),
457 default_duration_secs: default_duration(),
458 default_duration: default_duration(),
459 out_file: None,
460 language: None,
461 auto_respond: false,
462 mock_scenario: default_mock_scenario(),
463 event_delay_ms: default_event_delay_ms(),
464 }
465 }
466}
467
468#[derive(Debug, Clone, Serialize, Deserialize)]
470pub struct PluginConfig {
471 #[serde(default)]
473 pub enabled: bool,
474
475 #[serde(default = "default_plugins_dir")]
477 pub custom_tools_dir: PathBuf,
478
479 #[serde(default = "default_continue_on_error")]
481 pub continue_on_error: bool,
482
483 #[serde(default)]
485 pub allow_override_builtin: bool,
486}
487
488fn default_plugins_dir() -> PathBuf {
489 PathBuf::from("~/.spec-ai/tools")
490}
491
492fn default_continue_on_error() -> bool {
493 true
494}
495
496impl Default for PluginConfig {
497 fn default() -> Self {
498 Self {
499 enabled: false,
500 custom_tools_dir: default_plugins_dir(),
501 continue_on_error: true,
502 allow_override_builtin: false,
503 }
504 }
505}