manx_cli/
config.rs

1use anyhow::{Context, Result};
2use directories::ProjectDirs;
3use serde::{Deserialize, Serialize};
4use std::fs;
5use std::path::PathBuf;
6
7use crate::rag::{
8    llm::{LlmConfig, LlmProvider},
9    RagConfig,
10};
11
12#[derive(Debug, Serialize, Deserialize, Clone)]
13pub struct Config {
14    // Context7 MCP settings (existing)
15    pub api_key: Option<String>,
16    pub cache_dir: Option<PathBuf>,
17    pub default_limit: usize,
18    pub offline_mode: bool,
19    pub color_output: bool,
20    pub auto_cache_enabled: bool,
21    pub cache_ttl_hours: u64,
22    pub max_cache_size_mb: u64,
23
24    // Local RAG settings
25    pub rag: RagConfig,
26
27    // LLM integration settings
28    pub llm: LlmConfig,
29}
30
31impl Default for Config {
32    fn default() -> Self {
33        Self {
34            // Context7 MCP defaults (existing)
35            api_key: None,
36            cache_dir: None,
37            default_limit: 10,
38            offline_mode: false,
39            color_output: true,
40            auto_cache_enabled: true,
41            cache_ttl_hours: 24,
42            max_cache_size_mb: 100,
43
44            // RAG defaults
45            rag: RagConfig::default(),
46
47            // LLM defaults
48            llm: LlmConfig::default(),
49        }
50    }
51}
52
53impl Config {
54    pub fn load() -> Result<Self> {
55        let config_path = Self::config_path()?;
56
57        if !config_path.exists() {
58            let config = Config::default();
59            config.save()?;
60            return Ok(config);
61        }
62
63        let content = fs::read_to_string(&config_path).context("Failed to read config file")?;
64
65        let config: Config =
66            serde_json::from_str(&content).context("Failed to parse config file")?;
67
68        Ok(config)
69    }
70
71    pub fn save(&self) -> Result<()> {
72        let config_path = Self::config_path()?;
73
74        if let Some(parent) = config_path.parent() {
75            fs::create_dir_all(parent)?;
76        }
77
78        let content = serde_json::to_string_pretty(self)?;
79        fs::write(&config_path, content).context("Failed to write config file")?;
80
81        Ok(())
82    }
83
84    fn config_path() -> Result<PathBuf> {
85        Ok(ProjectDirs::from("", "", "manx")
86            .context("Failed to determine config directory")?
87            .config_dir()
88            .join("config.json"))
89    }
90
91    pub fn merge_with_cli(
92        &mut self,
93        api_key: Option<String>,
94        cache_dir: Option<PathBuf>,
95        offline: bool,
96    ) {
97        if api_key.is_some() {
98            self.api_key = api_key;
99        }
100        if cache_dir.is_some() {
101            self.cache_dir = cache_dir;
102        }
103        if offline {
104            self.offline_mode = true;
105        }
106
107        // Check NO_COLOR environment variable
108        if std::env::var("NO_COLOR").is_ok() {
109            self.color_output = false;
110        }
111    }
112
113    pub fn display(&self) -> String {
114        let mut output = String::new();
115        output.push_str("Current Configuration:\n");
116        output.push_str("=====================\n\n");
117
118        // Context7 MCP Settings
119        output.push_str("Context7 MCP:\n");
120        output.push_str(&format!(
121            "  API Key: {}\n",
122            self.api_key
123                .as_ref()
124                .map(|k| {
125                    if k.len() > 8 {
126                        format!("{}...{}", &k[..4], &k[k.len() - 4..])
127                    } else {
128                        "***".to_string()
129                    }
130                })
131                .unwrap_or_else(|| "Not set".to_string())
132        ));
133
134        output.push_str(&format!(
135            "  Cache Directory: {}\n",
136            self.cache_dir
137                .as_ref()
138                .map(|p| p.display().to_string())
139                .unwrap_or_else(|| "Default (~/.cache/manx)".to_string())
140        ));
141
142        output.push_str(&format!("  Default Search Limit: {}\n", self.default_limit));
143        output.push_str(&format!("  Offline Mode: {}\n", self.offline_mode));
144        output.push_str(&format!("  Color Output: {}\n", self.color_output));
145        output.push_str(&format!(
146            "  Auto Cache Enabled: {}\n",
147            self.auto_cache_enabled
148        ));
149        output.push_str(&format!("  Cache TTL (hours): {}\n", self.cache_ttl_hours));
150        output.push_str(&format!(
151            "  Max Cache Size (MB): {}\n",
152            self.max_cache_size_mb
153        ));
154
155        // Local RAG Settings
156        output.push_str("\nLocal RAG:\n");
157        output.push_str(&format!("  Enabled: {}\n", self.rag.enabled));
158        output.push_str(&format!(
159            "  Index Path: {}\n",
160            self.rag.index_path.display()
161        ));
162        output.push_str(&format!("  Max Results: {}\n", self.rag.max_results));
163        output.push_str(&format!(
164            "  PDF Processing: {} (Security Setting)\n",
165            if self.rag.allow_pdf_processing {
166                "Enabled"
167            } else {
168                "Disabled"
169            }
170        ));
171
172        // Embedding Settings
173        output.push_str(&format!(
174            "  Embedding Provider: {:?}\n",
175            self.rag.embedding.provider
176        ));
177        output.push_str(&format!(
178            "  Embedding Dimension: {}\n",
179            self.rag.embedding.dimension
180        ));
181        if let Some(model_path) = &self.rag.embedding.model_path {
182            output.push_str(&format!("  Model Path: {}\n", model_path.display()));
183        }
184        if self.rag.embedding.api_key.is_some() {
185            output.push_str("  API Key: ****\n");
186        }
187        if let Some(endpoint) = &self.rag.embedding.endpoint {
188            output.push_str(&format!("  Custom Endpoint: {}\n", endpoint));
189        }
190
191        // LLM Settings
192        output.push_str("\nLLM Integration:\n");
193        let llm_status = if self.has_llm_configured() {
194            "Available"
195        } else {
196            "Not configured"
197        };
198        output.push_str(&format!("  Status: {}\n", llm_status));
199
200        if let Some(key) = &self.llm.openai_api_key {
201            output.push_str(&format!(
202                "  OpenAI API Key: {}...{}\n",
203                &key[..4],
204                &key[key.len() - 4..]
205            ));
206        }
207
208        if let Some(key) = &self.llm.anthropic_api_key {
209            output.push_str(&format!(
210                "  Anthropic API Key: {}...{}\n",
211                &key[..4],
212                &key[key.len() - 4..]
213            ));
214        }
215
216        if let Some(key) = &self.llm.groq_api_key {
217            output.push_str(&format!(
218                "  Groq API Key: {}...{}\n",
219                &key[..4],
220                &key[key.len() - 4..]
221            ));
222        }
223
224        if let Some(key) = &self.llm.openrouter_api_key {
225            output.push_str(&format!(
226                "  OpenRouter API Key: {}...{}\n",
227                &key[..4],
228                &key[key.len() - 4..]
229            ));
230        }
231
232        if let Some(key) = &self.llm.huggingface_api_key {
233            output.push_str(&format!(
234                "  HuggingFace API Key: {}...{}\n",
235                &key[..4],
236                &key[key.len() - 4..]
237            ));
238        }
239
240        if let Some(endpoint) = &self.llm.custom_endpoint {
241            output.push_str(&format!("  Custom Endpoint: {}\n", endpoint));
242        }
243
244        output.push_str(&format!("  Provider: {:?}\n", self.llm.preferred_provider));
245
246        if let Some(model) = &self.llm.model_name {
247            output.push_str(&format!("  Model: {}\n", model));
248        }
249
250        output
251    }
252
253    /// Check if LLM functionality should be used
254    pub fn should_use_llm(&self, no_llm_flag: bool) -> bool {
255        if no_llm_flag {
256            return false;
257        }
258        self.has_llm_configured()
259    }
260
261    /// Check if any LLM provider is configured
262    pub fn has_llm_configured(&self) -> bool {
263        self.llm.openai_api_key.is_some()
264            || self.llm.anthropic_api_key.is_some()
265            || self.llm.groq_api_key.is_some()
266            || self.llm.openrouter_api_key.is_some()
267            || self.llm.huggingface_api_key.is_some()
268            || self.llm.custom_endpoint.is_some()
269    }
270
271    /// Set LLM API key (auto-detect provider)
272    pub fn set_llm_api_key(&mut self, key: String) -> Result<()> {
273        if key.is_empty() {
274            // Clear all API keys
275            self.llm.openai_api_key = None;
276            self.llm.anthropic_api_key = None;
277            return Ok(());
278        }
279
280        // Auto-detect provider based on key format
281        if key.starts_with("sk-") {
282            self.llm.openai_api_key = Some(key);
283            self.llm.preferred_provider = LlmProvider::OpenAI;
284        } else if key.starts_with("sk-ant-") {
285            self.llm.anthropic_api_key = Some(key);
286            self.llm.preferred_provider = LlmProvider::Anthropic;
287        } else {
288            // Default to OpenAI format
289            self.llm.openai_api_key = Some(key);
290            self.llm.preferred_provider = LlmProvider::OpenAI;
291        }
292
293        self.save()
294    }
295
296    /// Set OpenAI API key
297    pub fn set_openai_api_key(&mut self, key: String) -> Result<()> {
298        if key.is_empty() {
299            self.llm.openai_api_key = None;
300        } else {
301            self.llm.openai_api_key = Some(key);
302            self.llm.preferred_provider = LlmProvider::OpenAI;
303        }
304        self.save()
305    }
306
307    /// Set Anthropic API key
308    pub fn set_anthropic_api_key(&mut self, key: String) -> Result<()> {
309        if key.is_empty() {
310            self.llm.anthropic_api_key = None;
311        } else {
312            self.llm.anthropic_api_key = Some(key);
313            self.llm.preferred_provider = LlmProvider::Anthropic;
314        }
315        self.save()
316    }
317
318    /// Set Groq API key
319    pub fn set_groq_api_key(&mut self, key: String) -> Result<()> {
320        if key.is_empty() {
321            self.llm.groq_api_key = None;
322        } else {
323            self.llm.groq_api_key = Some(key);
324            self.llm.preferred_provider = LlmProvider::Groq;
325        }
326        self.save()
327    }
328
329    /// Set OpenRouter API key
330    pub fn set_openrouter_api_key(&mut self, key: String) -> Result<()> {
331        if key.is_empty() {
332            self.llm.openrouter_api_key = None;
333        } else {
334            self.llm.openrouter_api_key = Some(key);
335            self.llm.preferred_provider = LlmProvider::OpenRouter;
336        }
337        self.save()
338    }
339
340    /// Set HuggingFace API key
341    pub fn set_huggingface_api_key(&mut self, key: String) -> Result<()> {
342        if key.is_empty() {
343            self.llm.huggingface_api_key = None;
344        } else {
345            self.llm.huggingface_api_key = Some(key);
346            self.llm.preferred_provider = LlmProvider::HuggingFace;
347        }
348        self.save()
349    }
350
351    /// Set custom endpoint
352    pub fn set_custom_endpoint(&mut self, endpoint: String) -> Result<()> {
353        if endpoint.is_empty() {
354            self.llm.custom_endpoint = None;
355        } else {
356            self.llm.custom_endpoint = Some(endpoint);
357            self.llm.preferred_provider = LlmProvider::Custom;
358        }
359        self.save()
360    }
361
362    /// Set LLM provider preference
363    pub fn set_llm_provider(&mut self, provider: String) -> Result<()> {
364        match provider.to_lowercase().as_str() {
365            "openai" => self.llm.preferred_provider = LlmProvider::OpenAI,
366            "anthropic" => self.llm.preferred_provider = LlmProvider::Anthropic,
367            "groq" => self.llm.preferred_provider = LlmProvider::Groq,
368            "openrouter" => self.llm.preferred_provider = LlmProvider::OpenRouter,
369            "huggingface" => self.llm.preferred_provider = LlmProvider::HuggingFace,
370            "custom" => self.llm.preferred_provider = LlmProvider::Custom,
371            "auto" => self.llm.preferred_provider = LlmProvider::Auto,
372            _ => anyhow::bail!("Invalid provider '{}'. Use: openai, anthropic, groq, openrouter, huggingface, custom, auto", provider),
373        }
374        self.save()
375    }
376
377    /// Set specific LLM model
378    pub fn set_llm_model(&mut self, model: String) -> Result<()> {
379        if model.is_empty() {
380            self.llm.model_name = None;
381        } else {
382            self.llm.model_name = Some(model);
383        }
384        self.save()
385    }
386
387    /// Enable/disable local RAG
388    pub fn set_rag_enabled(&mut self, enabled: bool) -> Result<()> {
389        self.rag.enabled = enabled;
390        self.save()
391    }
392
393    /// Set embedding provider (dimension will be detected dynamically)
394    pub fn set_embedding_provider(&mut self, provider_str: &str) -> Result<()> {
395        use crate::rag::EmbeddingProvider;
396
397        let provider = match provider_str.to_lowercase().as_str() {
398            "hash" => EmbeddingProvider::Hash,
399            _ if provider_str.starts_with("onnx:") => {
400                let model_name = provider_str.strip_prefix("onnx:").unwrap_or("").to_string();
401                if model_name.is_empty() {
402                    anyhow::bail!("ONNX provider requires model name: onnx:model_name");
403                }
404                EmbeddingProvider::Onnx(model_name)
405            },
406            _ if provider_str.starts_with("ollama:") => {
407                let model_name = provider_str.strip_prefix("ollama:").unwrap_or("").to_string();
408                if model_name.is_empty() {
409                    anyhow::bail!("Ollama provider requires model name: ollama:model_name");
410                }
411                EmbeddingProvider::Ollama(model_name)
412            },
413            _ if provider_str.starts_with("openai:") => {
414                let model_name = provider_str.strip_prefix("openai:").unwrap_or("text-embedding-3-small").to_string();
415                EmbeddingProvider::OpenAI(model_name)
416            },
417            _ if provider_str.starts_with("huggingface:") => {
418                let model_name = provider_str.strip_prefix("huggingface:").unwrap_or("").to_string();
419                if model_name.is_empty() {
420                    anyhow::bail!("HuggingFace provider requires model name: huggingface:model_name");
421                }
422                EmbeddingProvider::HuggingFace(model_name)
423            },
424            _ if provider_str.starts_with("custom:") => {
425                let endpoint = provider_str.strip_prefix("custom:").unwrap_or("").to_string();
426                if endpoint.is_empty() {
427                    anyhow::bail!("Custom provider requires endpoint URL: custom:http://...");
428                }
429                EmbeddingProvider::Custom(endpoint)
430            },
431            _ => anyhow::bail!(
432                "Invalid embedding provider '{}'. Use: hash, onnx:model, ollama:model, openai:model, huggingface:model, custom:url", 
433                provider_str
434            ),
435        };
436
437        // Set provider (dimension will be detected on first use)
438        self.rag.embedding.provider = provider;
439
440        self.save()
441    }
442
443    /// Set embedding API key (for API providers)
444    pub fn set_embedding_api_key(&mut self, key: String) -> Result<()> {
445        if key.is_empty() {
446            self.rag.embedding.api_key = None;
447        } else {
448            self.rag.embedding.api_key = Some(key);
449        }
450        self.save()
451    }
452
453    /// Set embedding model path (for local models)
454    pub fn set_embedding_model_path(&mut self, path: std::path::PathBuf) -> Result<()> {
455        self.rag.embedding.model_path = Some(path);
456        self.save()
457    }
458
459    /// Set embedding dimension
460    pub fn set_embedding_dimension(&mut self, dimension: usize) -> Result<()> {
461        if dimension == 0 {
462            anyhow::bail!("Embedding dimension must be greater than 0");
463        }
464        self.rag.embedding.dimension = dimension;
465        self.save()
466    }
467}