Skip to main content

manx_cli/
config.rs

1use anyhow::{Context, Result};
2use directories::ProjectDirs;
3use serde::{Deserialize, Serialize};
4use std::fs;
5use std::path::PathBuf;
6
7use crate::rag::{
8    llm::{LlmConfig, LlmProvider},
9    RagConfig,
10};
11
12#[derive(Debug, Serialize, Deserialize, Clone)]
13pub struct Config {
14    // Context7 MCP settings (existing)
15    pub api_key: Option<String>,
16    pub cache_dir: Option<PathBuf>,
17    pub default_limit: usize,
18    pub offline_mode: bool,
19    pub color_output: bool,
20    pub auto_cache_enabled: bool,
21    pub cache_ttl_hours: u64,
22    pub max_cache_size_mb: u64,
23
24    // Local RAG settings
25    pub rag: RagConfig,
26
27    // LLM integration settings
28    pub llm: LlmConfig,
29}
30
31impl Default for Config {
32    fn default() -> Self {
33        Self {
34            // Context7 MCP defaults (existing)
35            api_key: None,
36            cache_dir: None,
37            default_limit: 10,
38            offline_mode: false,
39            color_output: true,
40            auto_cache_enabled: true,
41            cache_ttl_hours: 24,
42            max_cache_size_mb: 100,
43
44            // RAG defaults
45            rag: RagConfig::default(),
46
47            // LLM defaults
48            llm: LlmConfig::default(),
49        }
50    }
51}
52
53impl Config {
54    pub fn load() -> Result<Self> {
55        let config_path = Self::config_path()?;
56
57        if !config_path.exists() {
58            let config = Config::default();
59            config.save()?;
60            return Ok(config);
61        }
62
63        let content = fs::read_to_string(&config_path).context("Failed to read config file")?;
64
65        let config: Config =
66            serde_json::from_str(&content).context("Failed to parse config file")?;
67
68        Ok(config)
69    }
70
71    pub fn save(&self) -> Result<()> {
72        let config_path = Self::config_path()?;
73
74        if let Some(parent) = config_path.parent() {
75            fs::create_dir_all(parent)?;
76        }
77
78        let content = serde_json::to_string_pretty(self)?;
79        fs::write(&config_path, content).context("Failed to write config file")?;
80
81        Ok(())
82    }
83
84    fn config_path() -> Result<PathBuf> {
85        Ok(ProjectDirs::from("", "", "manx")
86            .context("Failed to determine config directory")?
87            .config_dir()
88            .join("config.json"))
89    }
90
91    pub fn merge_with_cli(
92        &mut self,
93        api_key: Option<String>,
94        cache_dir: Option<PathBuf>,
95        offline: bool,
96    ) {
97        if api_key.is_some() {
98            self.api_key = api_key;
99        }
100        if cache_dir.is_some() {
101            self.cache_dir = cache_dir;
102        }
103        if offline {
104            self.offline_mode = true;
105        }
106
107        // Check NO_COLOR environment variable
108        if std::env::var("NO_COLOR").is_ok() {
109            self.color_output = false;
110        }
111    }
112
113    pub fn display(&self) -> String {
114        let mut output = String::new();
115        output.push_str("Current Configuration:\n");
116        output.push_str("=====================\n\n");
117
118        // Context7 MCP Settings
119        output.push_str("Context7 MCP:\n");
120        output.push_str(&format!(
121            "  API Key: {}\n",
122            self.api_key
123                .as_ref()
124                .map(|k| {
125                    if k.len() > 8 {
126                        format!("{}...{}", &k[..4], &k[k.len() - 4..])
127                    } else {
128                        "***".to_string()
129                    }
130                })
131                .unwrap_or_else(|| "Not set".to_string())
132        ));
133
134        output.push_str(&format!(
135            "  Cache Directory: {}\n",
136            self.cache_dir
137                .as_ref()
138                .map(|p| p.display().to_string())
139                .unwrap_or_else(|| "Default (~/.cache/manx)".to_string())
140        ));
141
142        output.push_str(&format!("  Default Search Limit: {}\n", self.default_limit));
143        output.push_str(&format!("  Offline Mode: {}\n", self.offline_mode));
144        output.push_str(&format!("  Color Output: {}\n", self.color_output));
145        output.push_str(&format!(
146            "  Auto Cache Enabled: {}\n",
147            self.auto_cache_enabled
148        ));
149        output.push_str(&format!("  Cache TTL (hours): {}\n", self.cache_ttl_hours));
150        output.push_str(&format!(
151            "  Max Cache Size (MB): {}\n",
152            self.max_cache_size_mb
153        ));
154
155        // Local RAG Settings
156        output.push_str("\nLocal RAG:\n");
157        output.push_str(&format!("  Enabled: {}\n", self.rag.enabled));
158        output.push_str(&format!(
159            "  Index Path: {}\n",
160            self.rag.index_path.display()
161        ));
162        output.push_str(&format!("  Max Results: {}\n", self.rag.max_results));
163        output.push_str(&format!(
164            "  PDF Processing: {} (Security Setting)\n",
165            if self.rag.allow_pdf_processing {
166                "Enabled"
167            } else {
168                "Disabled"
169            }
170        ));
171
172        // Embedding Settings
173        output.push_str(&format!(
174            "  Embedding Provider: {:?}\n",
175            self.rag.embedding.provider
176        ));
177        output.push_str(&format!(
178            "  Embedding Dimension: {}\n",
179            self.rag.embedding.dimension
180        ));
181        if let Some(model_path) = &self.rag.embedding.model_path {
182            output.push_str(&format!("  Model Path: {}\n", model_path.display()));
183        }
184        if self.rag.embedding.api_key.is_some() {
185            output.push_str("  API Key: ****\n");
186        }
187        if let Some(endpoint) = &self.rag.embedding.endpoint {
188            output.push_str(&format!("  Custom Endpoint: {}\n", endpoint));
189        }
190
191        // LLM Settings
192        output.push_str("\nLLM Integration:\n");
193        let llm_status = if self.has_llm_configured() {
194            "Available"
195        } else {
196            "Not configured"
197        };
198        output.push_str(&format!("  Status: {}\n", llm_status));
199
200        if let Some(key) = &self.llm.openai_api_key {
201            output.push_str(&format!(
202                "  OpenAI API Key: {}...{}\n",
203                &key[..4],
204                &key[key.len() - 4..]
205            ));
206        }
207
208        if let Some(key) = &self.llm.anthropic_api_key {
209            output.push_str(&format!(
210                "  Anthropic API Key: {}...{}\n",
211                &key[..4],
212                &key[key.len() - 4..]
213            ));
214        }
215
216        if let Some(key) = &self.llm.groq_api_key {
217            output.push_str(&format!(
218                "  Groq API Key: {}...{}\n",
219                &key[..4],
220                &key[key.len() - 4..]
221            ));
222        }
223
224        if let Some(key) = &self.llm.openrouter_api_key {
225            output.push_str(&format!(
226                "  OpenRouter API Key: {}...{}\n",
227                &key[..4],
228                &key[key.len() - 4..]
229            ));
230        }
231
232        if let Some(key) = &self.llm.huggingface_api_key {
233            output.push_str(&format!(
234                "  HuggingFace API Key: {}...{}\n",
235                &key[..4],
236                &key[key.len() - 4..]
237            ));
238        }
239
240        if let Some(key) = &self.llm.zai_api_key {
241            output.push_str(&format!(
242                "  Z.AI API Key: {}...{}\n",
243                &key[..4],
244                &key[key.len() - 4..]
245            ));
246        }
247
248        if let Some(endpoint) = &self.llm.custom_endpoint {
249            output.push_str(&format!("  Custom Endpoint: {}\n", endpoint));
250        }
251
252        output.push_str(&format!("  Provider: {:?}\n", self.llm.preferred_provider));
253
254        if let Some(model) = &self.llm.model_name {
255            output.push_str(&format!("  Model: {}\n", model));
256        }
257
258        output
259    }
260
261    /// Check if LLM functionality should be used
262    pub fn should_use_llm(&self, no_llm_flag: bool) -> bool {
263        if no_llm_flag {
264            return false;
265        }
266        self.has_llm_configured()
267    }
268
269    /// Check if any LLM provider is configured
270    pub fn has_llm_configured(&self) -> bool {
271        self.llm.openai_api_key.is_some()
272            || self.llm.anthropic_api_key.is_some()
273            || self.llm.groq_api_key.is_some()
274            || self.llm.openrouter_api_key.is_some()
275            || self.llm.huggingface_api_key.is_some()
276            || self.llm.zai_api_key.is_some()
277            || self.llm.custom_endpoint.is_some()
278    }
279
280    /// Set LLM API key (auto-detect provider)
281    pub fn set_llm_api_key(&mut self, key: String) -> Result<()> {
282        if key.is_empty() {
283            // Clear all API keys
284            self.llm.openai_api_key = None;
285            self.llm.anthropic_api_key = None;
286            return Ok(());
287        }
288
289        // Auto-detect provider based on key format
290        if key.starts_with("sk-") {
291            self.llm.openai_api_key = Some(key);
292            self.llm.preferred_provider = LlmProvider::OpenAI;
293        } else if key.starts_with("sk-ant-") {
294            self.llm.anthropic_api_key = Some(key);
295            self.llm.preferred_provider = LlmProvider::Anthropic;
296        } else {
297            // Default to OpenAI format
298            self.llm.openai_api_key = Some(key);
299            self.llm.preferred_provider = LlmProvider::OpenAI;
300        }
301
302        self.save()
303    }
304
305    /// Set OpenAI API key
306    pub fn set_openai_api_key(&mut self, key: String) -> Result<()> {
307        if key.is_empty() {
308            self.llm.openai_api_key = None;
309        } else {
310            self.llm.openai_api_key = Some(key);
311            self.llm.preferred_provider = LlmProvider::OpenAI;
312        }
313        self.save()
314    }
315
316    /// Set Anthropic API key
317    pub fn set_anthropic_api_key(&mut self, key: String) -> Result<()> {
318        if key.is_empty() {
319            self.llm.anthropic_api_key = None;
320        } else {
321            self.llm.anthropic_api_key = Some(key);
322            self.llm.preferred_provider = LlmProvider::Anthropic;
323        }
324        self.save()
325    }
326
327    /// Set Groq API key
328    pub fn set_groq_api_key(&mut self, key: String) -> Result<()> {
329        if key.is_empty() {
330            self.llm.groq_api_key = None;
331        } else {
332            self.llm.groq_api_key = Some(key);
333            self.llm.preferred_provider = LlmProvider::Groq;
334        }
335        self.save()
336    }
337
338    /// Set OpenRouter API key
339    pub fn set_openrouter_api_key(&mut self, key: String) -> Result<()> {
340        if key.is_empty() {
341            self.llm.openrouter_api_key = None;
342        } else {
343            self.llm.openrouter_api_key = Some(key);
344            self.llm.preferred_provider = LlmProvider::OpenRouter;
345        }
346        self.save()
347    }
348
349    /// Set HuggingFace API key
350    pub fn set_huggingface_api_key(&mut self, key: String) -> Result<()> {
351        if key.is_empty() {
352            self.llm.huggingface_api_key = None;
353        } else {
354            self.llm.huggingface_api_key = Some(key);
355            self.llm.preferred_provider = LlmProvider::HuggingFace;
356        }
357        self.save()
358    }
359
360    /// Set custom endpoint
361    pub fn set_custom_endpoint(&mut self, endpoint: String) -> Result<()> {
362        if endpoint.is_empty() {
363            self.llm.custom_endpoint = None;
364        } else {
365            self.llm.custom_endpoint = Some(endpoint);
366            self.llm.preferred_provider = LlmProvider::Custom;
367        }
368        self.save()
369    }
370
371    /// Set LLM provider preference
372    pub fn set_llm_provider(&mut self, provider: String) -> Result<()> {
373        match provider.to_lowercase().as_str() {
374            "openai" => self.llm.preferred_provider = LlmProvider::OpenAI,
375            "anthropic" => self.llm.preferred_provider = LlmProvider::Anthropic,
376            "groq" => self.llm.preferred_provider = LlmProvider::Groq,
377            "openrouter" => self.llm.preferred_provider = LlmProvider::OpenRouter,
378            "huggingface" => self.llm.preferred_provider = LlmProvider::HuggingFace,
379            "zai" => self.llm.preferred_provider = LlmProvider::Zai,
380            "custom" => self.llm.preferred_provider = LlmProvider::Custom,
381            "auto" => self.llm.preferred_provider = LlmProvider::Auto,
382            _ => anyhow::bail!("Invalid provider '{}'. Use: openai, anthropic, groq, openrouter, huggingface, zai, custom, auto", provider),
383        }
384        self.save()
385    }
386
387    /// Set specific LLM model
388    pub fn set_llm_model(&mut self, model: String) -> Result<()> {
389        if model.is_empty() {
390            self.llm.model_name = None;
391        } else {
392            self.llm.model_name = Some(model);
393        }
394        self.save()
395    }
396
397    /// Enable/disable local RAG
398    pub fn set_rag_enabled(&mut self, enabled: bool) -> Result<()> {
399        self.rag.enabled = enabled;
400        self.save()
401    }
402
403    /// Set embedding provider (dimension will be detected dynamically)
404    pub fn set_embedding_provider(&mut self, provider_str: &str) -> Result<()> {
405        use crate::rag::EmbeddingProvider;
406
407        let provider = match provider_str.to_lowercase().as_str() {
408            "hash" => EmbeddingProvider::Hash,
409            _ if provider_str.starts_with("onnx:") => {
410                let model_name = provider_str.strip_prefix("onnx:").unwrap_or("").to_string();
411                if model_name.is_empty() {
412                    anyhow::bail!("ONNX provider requires model name: onnx:model_name");
413                }
414                EmbeddingProvider::Onnx(model_name)
415            },
416            _ if provider_str.starts_with("ollama:") => {
417                let model_name = provider_str.strip_prefix("ollama:").unwrap_or("").to_string();
418                if model_name.is_empty() {
419                    anyhow::bail!("Ollama provider requires model name: ollama:model_name");
420                }
421                EmbeddingProvider::Ollama(model_name)
422            },
423            _ if provider_str.starts_with("openai:") => {
424                let model_name = provider_str.strip_prefix("openai:").unwrap_or("text-embedding-3-small").to_string();
425                EmbeddingProvider::OpenAI(model_name)
426            },
427            _ if provider_str.starts_with("huggingface:") => {
428                let model_name = provider_str.strip_prefix("huggingface:").unwrap_or("").to_string();
429                if model_name.is_empty() {
430                    anyhow::bail!("HuggingFace provider requires model name: huggingface:model_name");
431                }
432                EmbeddingProvider::HuggingFace(model_name)
433            },
434            _ if provider_str.starts_with("custom:") => {
435                let endpoint = provider_str.strip_prefix("custom:").unwrap_or("").to_string();
436                if endpoint.is_empty() {
437                    anyhow::bail!("Custom provider requires endpoint URL: custom:http://...");
438                }
439                EmbeddingProvider::Custom(endpoint)
440            },
441            _ => anyhow::bail!(
442                "Invalid embedding provider '{}'. Use: hash, onnx:model, ollama:model, openai:model, huggingface:model, custom:url",
443                provider_str
444            ),
445        };
446
447        // Set provider (dimension will be detected on first use)
448        self.rag.embedding.provider = provider;
449
450        self.save()
451    }
452
453    /// Set embedding API key (for API providers)
454    pub fn set_embedding_api_key(&mut self, key: String) -> Result<()> {
455        if key.is_empty() {
456            self.rag.embedding.api_key = None;
457        } else {
458            self.rag.embedding.api_key = Some(key);
459        }
460        self.save()
461    }
462
463    /// Set embedding model path (for local models)
464    pub fn set_embedding_model_path(&mut self, path: std::path::PathBuf) -> Result<()> {
465        self.rag.embedding.model_path = Some(path);
466        self.save()
467    }
468
469    /// Set embedding dimension
470    pub fn set_embedding_dimension(&mut self, dimension: usize) -> Result<()> {
471        if dimension == 0 {
472            anyhow::bail!("Embedding dimension must be greater than 0");
473        }
474        self.rag.embedding.dimension = dimension;
475        self.save()
476    }
477}