spec_ai_config/config/
agent_config.rs

1//! Application-level configuration
2//!
3//! Defines the top-level application configuration, including model settings,
4//! database configuration, UI preferences, and logging.
5
6use crate::config::agent::AgentProfile;
7use anyhow::{Context, Result};
8use directories::BaseDirs;
9use serde::{Deserialize, Serialize};
10use std::collections::HashMap;
11use std::path::PathBuf;
12
13/// Embedded default configuration file
14const DEFAULT_CONFIG: &str =
15    include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/spec-ai.config.toml"));
16
17/// Configuration file name
18const CONFIG_FILE_NAME: &str = "spec-ai.config.toml";
19
20/// Top-level application configuration
21#[derive(Debug, Clone, Serialize, Deserialize, Default)]
22pub struct AppConfig {
23    /// Database configuration
24    #[serde(default)]
25    pub database: DatabaseConfig,
26    /// Model provider configuration
27    #[serde(default)]
28    pub model: ModelConfig,
29    /// UI configuration
30    #[serde(default)]
31    pub ui: UiConfig,
32    /// Logging configuration
33    #[serde(default)]
34    pub logging: LoggingConfig,
35    /// Audio transcription configuration
36    #[serde(default)]
37    pub audio: AudioConfig,
38    /// Mesh networking configuration
39    #[serde(default)]
40    pub mesh: MeshConfig,
41    /// Plugin configuration for custom tools
42    #[serde(default)]
43    pub plugins: PluginConfig,
44    /// Available agent profiles
45    #[serde(default)]
46    pub agents: HashMap<String, AgentProfile>,
47    /// Default agent to use (if not specified)
48    #[serde(default)]
49    pub default_agent: Option<String>,
50}
51
52impl AppConfig {
53    /// Load configuration from file or create a default configuration
54    pub fn load() -> Result<Self> {
55        // Try to load from spec-ai.config.toml in current directory
56        if let Ok(content) = std::fs::read_to_string(CONFIG_FILE_NAME) {
57            return toml::from_str(&content)
58                .map_err(|e| anyhow::anyhow!("Failed to parse {}: {}", CONFIG_FILE_NAME, e));
59        }
60
61        // Try to load from ~/.spec-ai/spec-ai.config.toml
62        if let Ok(base_dirs) =
63            BaseDirs::new().ok_or(anyhow::anyhow!("Could not determine home directory"))
64        {
65            let home_config = base_dirs.home_dir().join(".spec-ai").join(CONFIG_FILE_NAME);
66            if let Ok(content) = std::fs::read_to_string(&home_config) {
67                return toml::from_str(&content).map_err(|e| {
68                    anyhow::anyhow!("Failed to parse {}: {}", home_config.display(), e)
69                });
70            }
71        }
72
73        // Try to load from environment variable CONFIG_PATH
74        if let Ok(config_path) = std::env::var("CONFIG_PATH") {
75            if let Ok(content) = std::fs::read_to_string(&config_path) {
76                return toml::from_str(&content)
77                    .map_err(|e| anyhow::anyhow!("Failed to parse config: {}", e));
78            }
79        }
80
81        // No config file found - create one from embedded default
82        eprintln!(
83            "No configuration file found. Creating {} with default settings...",
84            CONFIG_FILE_NAME
85        );
86        if let Err(e) = std::fs::write(CONFIG_FILE_NAME, DEFAULT_CONFIG) {
87            eprintln!("Warning: Could not create {}: {}", CONFIG_FILE_NAME, e);
88            eprintln!("Continuing with default configuration in memory.");
89        } else {
90            eprintln!(
91                "Created {}. You can edit this file to customize your settings.",
92                CONFIG_FILE_NAME
93            );
94        }
95
96        // Parse and return the embedded default config
97        toml::from_str(DEFAULT_CONFIG)
98            .map_err(|e| anyhow::anyhow!("Failed to parse embedded default config: {}", e))
99    }
100
101    /// Load configuration from a specific file path
102    /// If the file doesn't exist, creates it with default settings
103    pub fn load_from_file(path: &std::path::Path) -> Result<Self> {
104        // Try to read existing file
105        match std::fs::read_to_string(path) {
106            Ok(content) => toml::from_str(&content).map_err(|e| {
107                anyhow::anyhow!("Failed to parse config file {}: {}", path.display(), e)
108            }),
109            Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
110                // File doesn't exist - create it with default config
111                eprintln!(
112                    "Configuration file not found at {}. Creating with default settings...",
113                    path.display()
114                );
115
116                // Create parent directories if needed
117                if let Some(parent) = path.parent() {
118                    std::fs::create_dir_all(parent)
119                        .context(format!("Failed to create directory {}", parent.display()))?;
120                }
121
122                // Write default config
123                std::fs::write(path, DEFAULT_CONFIG).context(format!(
124                    "Failed to create config file at {}",
125                    path.display()
126                ))?;
127
128                eprintln!(
129                    "Created {}. You can edit this file to customize your settings.",
130                    path.display()
131                );
132
133                // Parse and return the embedded default config
134                toml::from_str(DEFAULT_CONFIG)
135                    .map_err(|e| anyhow::anyhow!("Failed to parse embedded default config: {}", e))
136            }
137            Err(e) => Err(anyhow::anyhow!(
138                "Failed to read config file {}: {}",
139                path.display(),
140                e
141            )),
142        }
143    }
144
145    /// Validate the configuration
146    pub fn validate(&self) -> Result<()> {
147        // Validate model provider: must be non-empty and supported
148        if self.model.provider.is_empty() {
149            return Err(anyhow::anyhow!("Model provider cannot be empty"));
150        }
151        // Validate against known provider names independent of compile-time feature flags
152        {
153            let p = self.model.provider.to_lowercase();
154            let known = ["mock", "openai", "anthropic", "ollama", "mlx", "lmstudio"];
155            if !known.contains(&p.as_str()) {
156                return Err(anyhow::anyhow!(
157                    "Invalid model provider: {}",
158                    self.model.provider
159                ));
160            }
161        }
162
163        // Validate temperature
164        if self.model.temperature < 0.0 || self.model.temperature > 2.0 {
165            return Err(anyhow::anyhow!(
166                "Temperature must be between 0.0 and 2.0, got {}",
167                self.model.temperature
168            ));
169        }
170
171        // Validate log level
172        match self.logging.level.as_str() {
173            "trace" | "debug" | "info" | "warn" | "error" => {}
174            _ => return Err(anyhow::anyhow!("Invalid log level: {}", self.logging.level)),
175        }
176
177        // If a default agent is specified, it must exist in the agents map
178        if let Some(default_agent) = &self.default_agent {
179            if !self.agents.contains_key(default_agent) {
180                return Err(anyhow::anyhow!(
181                    "Default agent '{}' not found in agents map",
182                    default_agent
183                ));
184            }
185        }
186
187        Ok(())
188    }
189
190    /// Apply environment variable overrides to the configuration
191    pub fn apply_env_overrides(&mut self) {
192        // Helper: prefer AGENT_* over SPEC_AI_* if both present
193        fn first(a: &str, b: &str) -> Option<String> {
194            std::env::var(a).ok().or_else(|| std::env::var(b).ok())
195        }
196
197        if let Some(provider) = first("AGENT_MODEL_PROVIDER", "SPEC_AI_PROVIDER") {
198            self.model.provider = provider;
199        }
200        if let Some(model_name) = first("AGENT_MODEL_NAME", "SPEC_AI_MODEL") {
201            self.model.model_name = Some(model_name);
202        }
203        if let Some(api_key_source) = first("AGENT_API_KEY_SOURCE", "SPEC_AI_API_KEY_SOURCE") {
204            self.model.api_key_source = Some(api_key_source);
205        }
206        if let Some(temp_str) = first("AGENT_MODEL_TEMPERATURE", "SPEC_AI_TEMPERATURE") {
207            if let Ok(temp) = temp_str.parse::<f32>() {
208                self.model.temperature = temp;
209            }
210        }
211        if let Some(level) = first("AGENT_LOG_LEVEL", "SPEC_AI_LOG_LEVEL") {
212            self.logging.level = level;
213        }
214        if let Some(db_path) = first("AGENT_DB_PATH", "SPEC_AI_DB_PATH") {
215            self.database.path = PathBuf::from(db_path);
216        }
217        if let Some(theme) = first("AGENT_UI_THEME", "SPEC_AI_UI_THEME") {
218            self.ui.theme = theme;
219        }
220        if let Some(default_agent) = first("AGENT_DEFAULT_AGENT", "SPEC_AI_DEFAULT_AGENT") {
221            self.default_agent = Some(default_agent);
222        }
223    }
224
225    /// Get a summary of the configuration
226    pub fn summary(&self) -> String {
227        let mut summary = String::new();
228        summary.push_str("Configuration loaded:\n");
229        summary.push_str(&format!("Database: {}\n", self.database.path.display()));
230        summary.push_str(&format!("Model Provider: {}\n", self.model.provider));
231        if let Some(model) = &self.model.model_name {
232            summary.push_str(&format!("Model Name: {}\n", model));
233        }
234        summary.push_str(&format!("Temperature: {}\n", self.model.temperature));
235        summary.push_str(&format!("Logging Level: {}\n", self.logging.level));
236        summary.push_str(&format!("UI Theme: {}\n", self.ui.theme));
237        summary.push_str(&format!("Available Agents: {}\n", self.agents.len()));
238        if let Some(default) = &self.default_agent {
239            summary.push_str(&format!("Default Agent: {}\n", default));
240        }
241        summary
242    }
243}
244
245/// Database configuration
246#[derive(Debug, Clone, Serialize, Deserialize)]
247pub struct DatabaseConfig {
248    /// Path to the database file
249    pub path: PathBuf,
250}
251
252impl Default for DatabaseConfig {
253    fn default() -> Self {
254        Self {
255            path: PathBuf::from("spec-ai.duckdb"),
256        }
257    }
258}
259
260/// Model provider configuration
261#[derive(Debug, Clone, Serialize, Deserialize)]
262pub struct ModelConfig {
263    /// Provider name (e.g., "openai", "anthropic", "mlx", "lmstudio", "mock")
264    pub provider: String,
265    /// Model name to use (e.g., "gpt-4", "claude-3-opus")
266    #[serde(default)]
267    pub model_name: Option<String>,
268    /// Embeddings model name (optional, for semantic search)
269    #[serde(default)]
270    pub embeddings_model: Option<String>,
271    /// API key source (e.g., environment variable name or path)
272    #[serde(default)]
273    pub api_key_source: Option<String>,
274    /// Default temperature for model completions (0.0 to 2.0)
275    #[serde(default = "default_temperature")]
276    pub temperature: f32,
277}
278
279fn default_temperature() -> f32 {
280    0.7
281}
282
283impl Default for ModelConfig {
284    fn default() -> Self {
285        Self {
286            provider: "mock".to_string(),
287            model_name: None,
288            embeddings_model: None,
289            api_key_source: None,
290            temperature: default_temperature(),
291        }
292    }
293}
294
295/// UI configuration
296#[derive(Debug, Clone, Serialize, Deserialize)]
297pub struct UiConfig {
298    /// Command prompt string
299    pub prompt: String,
300    /// UI theme name
301    pub theme: String,
302}
303
304impl Default for UiConfig {
305    fn default() -> Self {
306        Self {
307            prompt: "> ".to_string(),
308            theme: "default".to_string(),
309        }
310    }
311}
312
313/// Logging configuration
314#[derive(Debug, Clone, Serialize, Deserialize)]
315pub struct LoggingConfig {
316    /// Log level (trace, debug, info, warn, error)
317    pub level: String,
318}
319
320impl Default for LoggingConfig {
321    fn default() -> Self {
322        Self {
323            level: "info".to_string(),
324        }
325    }
326}
327
328/// Mesh networking configuration
329#[derive(Debug, Clone, Serialize, Deserialize)]
330pub struct MeshConfig {
331    /// Enable mesh networking
332    #[serde(default)]
333    pub enabled: bool,
334    /// Registry port for mesh coordination
335    #[serde(default = "default_registry_port")]
336    pub registry_port: u16,
337    /// Heartbeat interval in seconds
338    #[serde(default = "default_heartbeat_interval")]
339    pub heartbeat_interval_secs: u64,
340    /// Leader timeout in seconds (how long before new election)
341    #[serde(default = "default_leader_timeout")]
342    pub leader_timeout_secs: u64,
343    /// Replication factor for knowledge graph
344    #[serde(default = "default_replication_factor")]
345    pub replication_factor: usize,
346    /// Auto-join mesh on startup
347    #[serde(default)]
348    pub auto_join: bool,
349}
350
351fn default_registry_port() -> u16 {
352    3000
353}
354
355fn default_heartbeat_interval() -> u64 {
356    5
357}
358
359fn default_leader_timeout() -> u64 {
360    15
361}
362
363fn default_replication_factor() -> usize {
364    2
365}
366
367impl Default for MeshConfig {
368    fn default() -> Self {
369        Self {
370            enabled: false,
371            registry_port: default_registry_port(),
372            heartbeat_interval_secs: default_heartbeat_interval(),
373            leader_timeout_secs: default_leader_timeout(),
374            replication_factor: default_replication_factor(),
375            auto_join: true,
376        }
377    }
378}
379
380/// Audio transcription configuration
381#[derive(Debug, Clone, Serialize, Deserialize)]
382pub struct AudioConfig {
383    /// Enable audio transcription
384    #[serde(default)]
385    pub enabled: bool,
386    /// Transcription provider (mock, vttrs)
387    #[serde(default = "default_transcription_provider")]
388    pub provider: String,
389    /// Transcription model (e.g., "whisper-1", "whisper-large-v3")
390    #[serde(default)]
391    pub model: Option<String>,
392    /// API key source for cloud transcription
393    #[serde(default)]
394    pub api_key_source: Option<String>,
395    /// Use on-device transcription (offline mode)
396    #[serde(default)]
397    pub on_device: bool,
398    /// Custom API endpoint (optional)
399    #[serde(default)]
400    pub endpoint: Option<String>,
401    /// Audio chunk duration in seconds
402    #[serde(default = "default_chunk_duration")]
403    pub chunk_duration_secs: f64,
404    /// Default transcription duration in seconds
405    #[serde(default = "default_duration")]
406    pub default_duration_secs: u64,
407    /// Default transcription duration in seconds (legacy field name)
408    #[serde(default = "default_duration")]
409    pub default_duration: u64,
410    /// Output file path for transcripts (optional)
411    #[serde(default)]
412    pub out_file: Option<String>,
413    /// Language code (e.g., "en", "es", "fr")
414    #[serde(default)]
415    pub language: Option<String>,
416    /// Whether to automatically respond to transcriptions
417    #[serde(default)]
418    pub auto_respond: bool,
419    /// Mock scenario for testing (e.g., "simple_conversation", "emotional_context")
420    #[serde(default = "default_mock_scenario")]
421    pub mock_scenario: String,
422    /// Delay between mock transcription events in milliseconds
423    #[serde(default = "default_event_delay_ms")]
424    pub event_delay_ms: u64,
425}
426
427fn default_transcription_provider() -> String {
428    "vttrs".to_string()
429}
430
431fn default_chunk_duration() -> f64 {
432    5.0
433}
434
435fn default_duration() -> u64 {
436    30
437}
438
439fn default_mock_scenario() -> String {
440    "simple_conversation".to_string()
441}
442
443fn default_event_delay_ms() -> u64 {
444    500
445}
446
447impl Default for AudioConfig {
448    fn default() -> Self {
449        Self {
450            enabled: false,
451            provider: default_transcription_provider(),
452            model: Some("whisper-1".to_string()),
453            api_key_source: None,
454            on_device: false,
455            endpoint: None,
456            chunk_duration_secs: default_chunk_duration(),
457            default_duration_secs: default_duration(),
458            default_duration: default_duration(),
459            out_file: None,
460            language: None,
461            auto_respond: false,
462            mock_scenario: default_mock_scenario(),
463            event_delay_ms: default_event_delay_ms(),
464        }
465    }
466}
467
468/// Plugin configuration for custom tools
469#[derive(Debug, Clone, Serialize, Deserialize)]
470pub struct PluginConfig {
471    /// Enable plugin loading
472    #[serde(default)]
473    pub enabled: bool,
474
475    /// Directory containing plugin libraries (.dylib/.so/.dll)
476    #[serde(default = "default_plugins_dir")]
477    pub custom_tools_dir: PathBuf,
478
479    /// Continue startup even if some plugins fail to load
480    #[serde(default = "default_continue_on_error")]
481    pub continue_on_error: bool,
482
483    /// Allow plugins to override built-in tools
484    #[serde(default)]
485    pub allow_override_builtin: bool,
486}
487
488fn default_plugins_dir() -> PathBuf {
489    PathBuf::from("~/.spec-ai/tools")
490}
491
492fn default_continue_on_error() -> bool {
493    true
494}
495
496impl Default for PluginConfig {
497    fn default() -> Self {
498        Self {
499            enabled: false,
500            custom_tools_dir: default_plugins_dir(),
501            continue_on_error: true,
502            allow_override_builtin: false,
503        }
504    }
505}