mermaid_cli/app/
config.rs

1use crate::constants::{DEFAULT_LITELLM_PROXY_URL, DEFAULT_OLLAMA_PORT};
2use crate::prompts;
3use anyhow::{Context, Result};
4use directories::ProjectDirs;
5use figment::{
6    providers::{Env, Format, Serialized, Toml},
7    Figment,
8};
9use serde::{Deserialize, Serialize};
10use std::path::PathBuf;
11
12/// Main configuration structure
13#[derive(Debug, Clone, Serialize, Deserialize)]
14pub struct Config {
15    /// Default model configuration
16    #[serde(default)]
17    pub default_model: ModelSettings,
18
19    /// LiteLLM proxy configuration
20    #[serde(default)]
21    pub litellm: LiteLLMConfig,
22
23    /// Ollama configuration
24    #[serde(default)]
25    pub ollama: OllamaConfig,
26
27    /// OpenAI configuration
28    #[serde(default)]
29    pub openai: OpenAIConfig,
30
31    /// Anthropic configuration
32    #[serde(default)]
33    pub anthropic: AnthropicConfig,
34
35    /// UI configuration
36    #[serde(default)]
37    pub ui: UIConfig,
38
39    /// Context loader configuration
40    #[serde(default)]
41    pub context: ContextConfig,
42
43    /// Operation mode configuration
44    #[serde(default)]
45    pub mode: ModeConfig,
46}
47
48impl Default for Config {
49    fn default() -> Self {
50        Self {
51            default_model: ModelSettings::default(),
52            litellm: LiteLLMConfig::default(),
53            ollama: OllamaConfig::default(),
54            openai: OpenAIConfig::default(),
55            anthropic: AnthropicConfig::default(),
56            ui: UIConfig::default(),
57            context: ContextConfig::default(),
58            mode: ModeConfig::default(),
59        }
60    }
61}
62
63/// Default model settings
64#[derive(Debug, Clone, Serialize, Deserialize)]
65pub struct ModelSettings {
66    /// Model provider (ollama, openai, anthropic)
67    pub provider: String,
68    /// Model name
69    pub name: String,
70    /// Temperature for generation
71    pub temperature: f32,
72    /// Maximum tokens to generate
73    pub max_tokens: usize,
74    /// System prompt
75    pub system_prompt: Option<String>,
76}
77
78impl ModelSettings {
79    /// Default system prompt that teaches models how to use Mermaid's action blocks
80    pub fn default_system_prompt() -> String {
81        prompts::get_system_prompt()
82    }
83}
84
85impl Default for ModelSettings {
86    fn default() -> Self {
87        Self {
88            provider: String::from("ollama"),
89            name: String::from("tinyllama"),
90            temperature: 0.7,
91            max_tokens: 4096,
92            system_prompt: Some(Self::default_system_prompt()),
93        }
94    }
95}
96
97/// LiteLLM proxy configuration
98#[derive(Debug, Clone, Serialize, Deserialize)]
99pub struct LiteLLMConfig {
100    /// Proxy server URL
101    pub proxy_url: String,
102    /// Master key for authentication
103    pub master_key: Option<String>,
104}
105
106impl Default for LiteLLMConfig {
107    fn default() -> Self {
108        Self {
109            proxy_url: DEFAULT_LITELLM_PROXY_URL.to_string(),
110            master_key: None,
111        }
112    }
113}
114
115/// Ollama configuration
116#[derive(Debug, Clone, Serialize, Deserialize)]
117pub struct OllamaConfig {
118    /// Ollama server host
119    pub host: String,
120    /// Ollama server port
121    pub port: u16,
122    /// Ollama cloud API key (for :cloud models)
123    /// Set this to use Ollama's cloud inference service
124    /// Get your key at: https://ollama.com/cloud
125    pub cloud_api_key: Option<String>,
126    /// Number of GPU layers to offload (None = auto, 0 = CPU only, positive = specific count)
127    /// Lower values free up VRAM for larger models at the cost of speed
128    pub num_gpu: Option<i32>,
129    /// Number of CPU threads for processing offloaded layers
130    /// Higher values improve CPU inference speed for large models
131    pub num_thread: Option<i32>,
132    /// Context window size (number of tokens)
133    /// Larger values allow longer conversations but use more memory
134    pub num_ctx: Option<i32>,
135    /// Enable NUMA optimization for multi-CPU systems
136    pub numa: Option<bool>,
137}
138
139impl Default for OllamaConfig {
140    fn default() -> Self {
141        Self {
142            host: String::from("localhost"),
143            port: DEFAULT_OLLAMA_PORT,
144            cloud_api_key: None,
145            num_gpu: None,    // Let Ollama auto-detect
146            num_thread: None, // Let Ollama auto-detect
147            num_ctx: None,    // Use model default
148            numa: None,       // Auto-detect
149        }
150    }
151}
152
153/// OpenAI configuration
154#[derive(Debug, Clone, Serialize, Deserialize)]
155pub struct OpenAIConfig {
156    /// Environment variable containing API key
157    pub api_key_env: String,
158    /// Organization ID (optional)
159    pub organization: Option<String>,
160}
161
162impl Default for OpenAIConfig {
163    fn default() -> Self {
164        Self {
165            api_key_env: String::from("OPENAI_API_KEY"),
166            organization: None,
167        }
168    }
169}
170
171/// Anthropic configuration
172#[derive(Debug, Clone, Serialize, Deserialize)]
173pub struct AnthropicConfig {
174    /// Environment variable containing API key
175    pub api_key_env: String,
176}
177
178impl Default for AnthropicConfig {
179    fn default() -> Self {
180        Self {
181            api_key_env: String::from("ANTHROPIC_API_KEY"),
182        }
183    }
184}
185
186/// UI configuration
187#[derive(Debug, Clone, Serialize, Deserialize)]
188pub struct UIConfig {
189    /// Color theme
190    pub theme: String,
191    /// Syntax highlighting theme
192    pub syntax_theme: String,
193    /// Show line numbers in code blocks
194    pub show_line_numbers: bool,
195    /// Show file sidebar by default
196    pub show_sidebar: bool,
197}
198
199impl Default for UIConfig {
200    fn default() -> Self {
201        Self {
202            theme: String::from("dark"),
203            syntax_theme: String::from("monokai"),
204            show_line_numbers: true,
205            show_sidebar: true,
206        }
207    }
208}
209
210/// Context loader configuration
211#[derive(Debug, Clone, Serialize, Deserialize)]
212pub struct ContextConfig {
213    /// Maximum file size to load (in bytes)
214    pub max_file_size: usize,
215    /// Maximum number of files to include
216    pub max_files: usize,
217    /// Maximum total context size in tokens
218    pub max_context_tokens: usize,
219    /// Auto-include these file patterns
220    pub include_patterns: Vec<String>,
221    /// Always exclude these patterns
222    pub exclude_patterns: Vec<String>,
223}
224
225impl Default for ContextConfig {
226    fn default() -> Self {
227        Self {
228            max_file_size: 1024 * 1024, // 1MB
229            max_files: 100,
230            max_context_tokens: 50000,
231            include_patterns: vec![],
232            exclude_patterns: vec![String::from("*.log"), String::from("*.tmp")],
233        }
234    }
235}
236
237/// Operation mode configuration
238#[derive(Debug, Clone, Serialize, Deserialize)]
239pub struct ModeConfig {
240    /// Default operation mode (normal, accept_edits, plan_mode, bypass_all)
241    pub default_mode: String,
242    /// Remember mode between sessions
243    pub remember_mode: bool,
244    /// Auto-commit in AcceptEdits mode
245    pub auto_commit_on_accept: bool,
246    /// Require double confirmation for destructive operations in BypassAll mode
247    pub require_destructive_confirmation: bool,
248}
249
250impl Default for ModeConfig {
251    fn default() -> Self {
252        Self {
253            default_mode: String::from("normal"),
254            remember_mode: false,
255            auto_commit_on_accept: false,
256            require_destructive_confirmation: true,
257        }
258    }
259}
260
261/// Load configuration from multiple sources
262pub fn load_config() -> Result<Config> {
263    // Get config directories
264    let config_dir = get_config_dir()?;
265    let global_config = config_dir.join("config.toml");
266    let local_config = PathBuf::from(".mermaid/config.toml");
267
268    // Build figment configuration
269    let mut figment = Figment::from(Serialized::defaults(Config::default()));
270
271    // Add global config if it exists
272    if global_config.exists() {
273        figment = figment.merge(Toml::file(&global_config));
274    }
275
276    // Add local config if it exists
277    if local_config.exists() {
278        figment = figment.merge(Toml::file(&local_config));
279    }
280
281    // Add environment variables (MERMAID_ prefix)
282    figment = figment.merge(Env::prefixed("MERMAID_"));
283
284    // Extract and return config
285    figment
286        .extract()
287        .context("Failed to load configuration. Check that config files are valid TOML format.")
288}
289
290/// Get the configuration directory
291pub fn get_config_dir() -> Result<PathBuf> {
292    if let Some(proj_dirs) = ProjectDirs::from("", "", "mermaid") {
293        let config_dir = proj_dirs.config_dir();
294        std::fs::create_dir_all(config_dir)?;
295        Ok(config_dir.to_path_buf())
296    } else {
297        // Fallback to home directory
298        let home = std::env::var("HOME")
299            .or_else(|_| std::env::var("USERPROFILE"))
300            .context("Could not determine home directory")?;
301        let config_dir = PathBuf::from(home).join(".config").join("mermaid");
302        std::fs::create_dir_all(&config_dir)?;
303        Ok(config_dir)
304    }
305}
306
307/// Save configuration to file
308pub fn save_config(config: &Config, path: Option<PathBuf>) -> Result<()> {
309    let path = if let Some(p) = path {
310        p
311    } else {
312        get_config_dir()?.join("config.toml")
313    };
314
315    let toml_string = toml::to_string_pretty(config)?;
316    std::fs::write(&path, toml_string)
317        .with_context(|| format!("Failed to write config to {}", path.display()))?;
318
319    Ok(())
320}
321
322/// Create a default configuration file if it doesn't exist
323pub fn init_config() -> Result<()> {
324    let config_dir = get_config_dir()?;
325    let config_file = config_dir.join("config.toml");
326
327    if !config_file.exists() {
328        let default_config = Config::default();
329        save_config(&default_config, Some(config_file.clone()))?;
330        println!(
331            "Created default configuration at: {}",
332            config_file.display()
333        );
334    }
335
336    // Create example local config
337    let local_example = PathBuf::from(".mermaid/config.toml.example");
338    if !local_example.exists() {
339        if let Some(parent) = local_example.parent() {
340            std::fs::create_dir_all(parent)?;
341        }
342        let example_config = r#"# Mermaid Project Configuration
343# This file overrides global settings for this project
344
345[default_model]
346provider = "ollama"
347name = "tinyllama"
348temperature = 0.7
349max_tokens = 4096
350
351[context]
352max_files = 150
353max_context_tokens = 75000
354include_patterns = ["src/**/*.rs", "Cargo.toml"]
355"#;
356        std::fs::write(&local_example, example_config)?;
357        println!(
358            "Created example configuration at: {}",
359            local_example.display()
360        );
361    }
362
363    Ok(())
364}