Skip to main content

vex_llm/
config.rs

1//! Configuration management for VEX
2//!
3//! Handles API keys, provider settings, and runtime configuration.
4
5use serde::{Deserialize, Serialize};
6use std::env;
7
8/// Error types for configuration
9#[derive(Debug, thiserror::Error)]
10pub enum ConfigError {
11    #[error("Missing environment variable: {0}")]
12    MissingEnvVar(String),
13    #[error("Invalid configuration: {0}")]
14    Invalid(String),
15}
16
17/// LLM provider configuration
18#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct LlmConfig {
20    /// DeepSeek API key (env: DEEPSEEK_API_KEY)
21    pub deepseek_api_key: Option<String>,
22    /// Mistral API key (env: MISTRAL_API_KEY)
23    pub mistral_api_key: Option<String>,
24    /// OpenAI API key (env: OPENAI_API_KEY)
25    pub openai_api_key: Option<String>,
26    /// Anthropic API key (env: ANTHROPIC_API_KEY)
27    pub anthropic_api_key: Option<String>,
28    /// Ollama base URL (default: http://localhost:11434)
29    pub ollama_url: String,
30    /// Default provider
31    pub default_provider: String,
32    /// Default model
33    pub default_model: String,
34    /// Default request timeout in seconds
35    pub request_timeout_secs: u64,
36}
37
38impl Default for LlmConfig {
39    fn default() -> Self {
40        Self {
41            deepseek_api_key: None,
42            mistral_api_key: None,
43            openai_api_key: None,
44            anthropic_api_key: None,
45            ollama_url: "http://localhost:11434".to_string(),
46            default_provider: "deepseek".to_string(),
47            default_model: "deepseek-chat".to_string(),
48            request_timeout_secs: 30,
49        }
50    }
51}
52
53impl LlmConfig {
54    /// Load configuration from environment variables
55    pub fn from_env() -> Self {
56        Self {
57            deepseek_api_key: env::var("DEEPSEEK_API_KEY").ok(),
58            mistral_api_key: env::var("MISTRAL_API_KEY").ok(),
59            openai_api_key: env::var("OPENAI_API_KEY").ok(),
60            anthropic_api_key: env::var("ANTHROPIC_API_KEY").ok(),
61            ollama_url: env::var("OLLAMA_URL")
62                .unwrap_or_else(|_| "http://localhost:11434".to_string()),
63            default_provider: env::var("VEX_DEFAULT_PROVIDER")
64                .unwrap_or_else(|_| "deepseek".to_string()),
65            default_model: env::var("VEX_DEFAULT_MODEL")
66                .unwrap_or_else(|_| "deepseek-chat".to_string()),
67            request_timeout_secs: env::var("VEX_LLM_TIMEOUT")
68                .ok()
69                .and_then(|v| v.parse().ok())
70                .unwrap_or(30),
71        }
72    }
73
74    /// Get API key for a provider
75    pub fn api_key(&self, provider: &str) -> Option<&str> {
76        match provider.to_lowercase().as_str() {
77            "deepseek" => self.deepseek_api_key.as_deref(),
78            "mistral" => self.mistral_api_key.as_deref(),
79            "openai" => self.openai_api_key.as_deref(),
80            "anthropic" => self.anthropic_api_key.as_deref(),
81            _ => None,
82        }
83    }
84
85    /// Check if a provider is configured
86    pub fn is_configured(&self, provider: &str) -> bool {
87        match provider.to_lowercase().as_str() {
88            "deepseek" => self.deepseek_api_key.is_some(),
89            "mistral" => self.mistral_api_key.is_some(),
90            "openai" => self.openai_api_key.is_some(),
91            "anthropic" => self.anthropic_api_key.is_some(),
92            "ollama" | "mock" => true, // Always available
93            _ => false,
94        }
95    }
96
97    /// List available providers
98    pub fn available_providers(&self) -> Vec<&str> {
99        let mut providers = vec!["mock", "ollama"];
100        if self.deepseek_api_key.is_some() {
101            providers.push("deepseek");
102        }
103        if self.mistral_api_key.is_some() {
104            providers.push("mistral");
105        }
106        if self.openai_api_key.is_some() {
107            providers.push("openai");
108        }
109        if self.anthropic_api_key.is_some() {
110            providers.push("anthropic");
111        }
112        providers
113    }
114}
115
116/// Full VEX configuration
117#[derive(Debug, Clone, Default)]
118pub struct VexConfig {
119    /// LLM provider settings
120    pub llm: LlmConfig,
121    /// Enable debug logging
122    pub debug: bool,
123    /// Maximum agent depth
124    pub max_agent_depth: u8,
125    /// Enable adversarial verification
126    pub adversarial_enabled: bool,
127}
128
129impl VexConfig {
130    /// Load from environment
131    pub fn from_env() -> Self {
132        Self {
133            llm: LlmConfig::from_env(),
134            debug: env::var("VEX_DEBUG")
135                .map(|v| v == "1" || v == "true")
136                .unwrap_or(false),
137            max_agent_depth: env::var("VEX_MAX_DEPTH")
138                .ok()
139                .and_then(|v| v.parse().ok())
140                .unwrap_or(3),
141            adversarial_enabled: env::var("VEX_ADVERSARIAL")
142                .map(|v| v != "0" && v != "false")
143                .unwrap_or(true),
144        }
145    }
146}
147
148#[cfg(test)]
149mod tests {
150    use super::*;
151
152    #[test]
153    fn test_default_config() {
154        let config = LlmConfig::default();
155        assert_eq!(config.default_provider, "deepseek");
156        assert!(config.is_configured("mock"));
157        assert!(config.is_configured("ollama"));
158    }
159
160    #[test]
161    fn test_available_providers() {
162        let config = LlmConfig::default();
163        let providers = config.available_providers();
164        assert!(providers.contains(&"mock"));
165        assert!(providers.contains(&"ollama"));
166    }
167}