1use serde::{Deserialize, Serialize};
6use std::env;
7
8#[derive(Debug, thiserror::Error)]
10pub enum ConfigError {
11 #[error("Missing environment variable: {0}")]
12 MissingEnvVar(String),
13 #[error("Invalid configuration: {0}")]
14 Invalid(String),
15}
16
17#[derive(Debug, Clone, Serialize, Deserialize)]
19pub struct LlmConfig {
20 pub deepseek_api_key: Option<String>,
22 pub mistral_api_key: Option<String>,
24 pub openai_api_key: Option<String>,
26 pub anthropic_api_key: Option<String>,
28 pub ollama_url: String,
30 pub default_provider: String,
32 pub default_model: String,
34 pub request_timeout_secs: u64,
36}
37
38impl Default for LlmConfig {
39 fn default() -> Self {
40 Self {
41 deepseek_api_key: None,
42 mistral_api_key: None,
43 openai_api_key: None,
44 anthropic_api_key: None,
45 ollama_url: "http://localhost:11434".to_string(),
46 default_provider: "deepseek".to_string(),
47 default_model: "deepseek-chat".to_string(),
48 request_timeout_secs: 30,
49 }
50 }
51}
52
53impl LlmConfig {
54 pub fn from_env() -> Self {
56 Self {
57 deepseek_api_key: env::var("DEEPSEEK_API_KEY").ok(),
58 mistral_api_key: env::var("MISTRAL_API_KEY").ok(),
59 openai_api_key: env::var("OPENAI_API_KEY").ok(),
60 anthropic_api_key: env::var("ANTHROPIC_API_KEY").ok(),
61 ollama_url: env::var("OLLAMA_URL")
62 .unwrap_or_else(|_| "http://localhost:11434".to_string()),
63 default_provider: env::var("VEX_DEFAULT_PROVIDER")
64 .unwrap_or_else(|_| "deepseek".to_string()),
65 default_model: env::var("VEX_DEFAULT_MODEL")
66 .unwrap_or_else(|_| "deepseek-chat".to_string()),
67 request_timeout_secs: env::var("VEX_LLM_TIMEOUT")
68 .ok()
69 .and_then(|v| v.parse().ok())
70 .unwrap_or(30),
71 }
72 }
73
74 pub fn api_key(&self, provider: &str) -> Option<&str> {
76 match provider.to_lowercase().as_str() {
77 "deepseek" => self.deepseek_api_key.as_deref(),
78 "mistral" => self.mistral_api_key.as_deref(),
79 "openai" => self.openai_api_key.as_deref(),
80 "anthropic" => self.anthropic_api_key.as_deref(),
81 _ => None,
82 }
83 }
84
85 pub fn is_configured(&self, provider: &str) -> bool {
87 match provider.to_lowercase().as_str() {
88 "deepseek" => self.deepseek_api_key.is_some(),
89 "mistral" => self.mistral_api_key.is_some(),
90 "openai" => self.openai_api_key.is_some(),
91 "anthropic" => self.anthropic_api_key.is_some(),
92 "ollama" | "mock" => true, _ => false,
94 }
95 }
96
97 pub fn available_providers(&self) -> Vec<&str> {
99 let mut providers = vec!["mock", "ollama"];
100 if self.deepseek_api_key.is_some() {
101 providers.push("deepseek");
102 }
103 if self.mistral_api_key.is_some() {
104 providers.push("mistral");
105 }
106 if self.openai_api_key.is_some() {
107 providers.push("openai");
108 }
109 if self.anthropic_api_key.is_some() {
110 providers.push("anthropic");
111 }
112 providers
113 }
114}
115
116#[derive(Debug, Clone, Default)]
118pub struct VexConfig {
119 pub llm: LlmConfig,
121 pub debug: bool,
123 pub max_agent_depth: u8,
125 pub adversarial_enabled: bool,
127}
128
129impl VexConfig {
130 pub fn from_env() -> Self {
132 Self {
133 llm: LlmConfig::from_env(),
134 debug: env::var("VEX_DEBUG")
135 .map(|v| v == "1" || v == "true")
136 .unwrap_or(false),
137 max_agent_depth: env::var("VEX_MAX_DEPTH")
138 .ok()
139 .and_then(|v| v.parse().ok())
140 .unwrap_or(3),
141 adversarial_enabled: env::var("VEX_ADVERSARIAL")
142 .map(|v| v != "0" && v != "false")
143 .unwrap_or(true),
144 }
145 }
146}
147
148#[cfg(test)]
149mod tests {
150 use super::*;
151
152 #[test]
153 fn test_default_config() {
154 let config = LlmConfig::default();
155 assert_eq!(config.default_provider, "deepseek");
156 assert!(config.is_configured("mock"));
157 assert!(config.is_configured("ollama"));
158 }
159
160 #[test]
161 fn test_available_providers() {
162 let config = LlmConfig::default();
163 let providers = config.available_providers();
164 assert!(providers.contains(&"mock"));
165 assert!(providers.contains(&"ollama"));
166 }
167}