1use crate::constants::{DEFAULT_LITELLM_PROXY_URL, DEFAULT_OLLAMA_PORT};
2use crate::prompts;
3use anyhow::{Context, Result};
4use directories::ProjectDirs;
5use figment::{
6 providers::{Env, Format, Serialized, Toml},
7 Figment,
8};
9use serde::{Deserialize, Serialize};
10use std::path::PathBuf;
11
12#[derive(Debug, Clone, Serialize, Deserialize)]
14pub struct Config {
15 #[serde(default)]
17 pub default_model: ModelSettings,
18
19 #[serde(default)]
21 pub litellm: LiteLLMConfig,
22
23 #[serde(default)]
25 pub ollama: OllamaConfig,
26
27 #[serde(default)]
29 pub openai: OpenAIConfig,
30
31 #[serde(default)]
33 pub anthropic: AnthropicConfig,
34
35 #[serde(default)]
37 pub ui: UIConfig,
38
39 #[serde(default)]
41 pub context: ContextConfig,
42
43 #[serde(default)]
45 pub mode: ModeConfig,
46}
47
48impl Default for Config {
49 fn default() -> Self {
50 Self {
51 default_model: ModelSettings::default(),
52 litellm: LiteLLMConfig::default(),
53 ollama: OllamaConfig::default(),
54 openai: OpenAIConfig::default(),
55 anthropic: AnthropicConfig::default(),
56 ui: UIConfig::default(),
57 context: ContextConfig::default(),
58 mode: ModeConfig::default(),
59 }
60 }
61}
62
63#[derive(Debug, Clone, Serialize, Deserialize)]
65pub struct ModelSettings {
66 pub provider: String,
68 pub name: String,
70 pub temperature: f32,
72 pub max_tokens: usize,
74 pub system_prompt: Option<String>,
76}
77
78impl ModelSettings {
79 pub fn default_system_prompt() -> String {
81 prompts::get_system_prompt()
82 }
83}
84
85impl Default for ModelSettings {
86 fn default() -> Self {
87 Self {
88 provider: String::from("ollama"),
89 name: String::from("tinyllama"),
90 temperature: 0.7,
91 max_tokens: 4096,
92 system_prompt: Some(Self::default_system_prompt()),
93 }
94 }
95}
96
97#[derive(Debug, Clone, Serialize, Deserialize)]
99pub struct LiteLLMConfig {
100 pub proxy_url: String,
102 pub master_key: Option<String>,
104}
105
106impl Default for LiteLLMConfig {
107 fn default() -> Self {
108 Self {
109 proxy_url: DEFAULT_LITELLM_PROXY_URL.to_string(),
110 master_key: None,
111 }
112 }
113}
114
115#[derive(Debug, Clone, Serialize, Deserialize)]
117pub struct OllamaConfig {
118 pub host: String,
120 pub port: u16,
122 pub cloud_api_key: Option<String>,
126 pub num_gpu: Option<i32>,
129 pub num_thread: Option<i32>,
132 pub num_ctx: Option<i32>,
135 pub numa: Option<bool>,
137}
138
139impl Default for OllamaConfig {
140 fn default() -> Self {
141 Self {
142 host: String::from("localhost"),
143 port: DEFAULT_OLLAMA_PORT,
144 cloud_api_key: None,
145 num_gpu: None, num_thread: None, num_ctx: None, numa: None, }
150 }
151}
152
153#[derive(Debug, Clone, Serialize, Deserialize)]
155pub struct OpenAIConfig {
156 pub api_key_env: String,
158 pub organization: Option<String>,
160}
161
162impl Default for OpenAIConfig {
163 fn default() -> Self {
164 Self {
165 api_key_env: String::from("OPENAI_API_KEY"),
166 organization: None,
167 }
168 }
169}
170
171#[derive(Debug, Clone, Serialize, Deserialize)]
173pub struct AnthropicConfig {
174 pub api_key_env: String,
176}
177
178impl Default for AnthropicConfig {
179 fn default() -> Self {
180 Self {
181 api_key_env: String::from("ANTHROPIC_API_KEY"),
182 }
183 }
184}
185
186#[derive(Debug, Clone, Serialize, Deserialize)]
188pub struct UIConfig {
189 pub theme: String,
191 pub syntax_theme: String,
193 pub show_line_numbers: bool,
195 pub show_sidebar: bool,
197}
198
199impl Default for UIConfig {
200 fn default() -> Self {
201 Self {
202 theme: String::from("dark"),
203 syntax_theme: String::from("monokai"),
204 show_line_numbers: true,
205 show_sidebar: true,
206 }
207 }
208}
209
210#[derive(Debug, Clone, Serialize, Deserialize)]
212pub struct ContextConfig {
213 pub max_file_size: usize,
215 pub max_files: usize,
217 pub max_context_tokens: usize,
219 pub include_patterns: Vec<String>,
221 pub exclude_patterns: Vec<String>,
223}
224
225impl Default for ContextConfig {
226 fn default() -> Self {
227 Self {
228 max_file_size: 1024 * 1024, max_files: 100,
230 max_context_tokens: 50000,
231 include_patterns: vec![],
232 exclude_patterns: vec![String::from("*.log"), String::from("*.tmp")],
233 }
234 }
235}
236
237#[derive(Debug, Clone, Serialize, Deserialize)]
239pub struct ModeConfig {
240 pub default_mode: String,
242 pub remember_mode: bool,
244 pub auto_commit_on_accept: bool,
246 pub require_destructive_confirmation: bool,
248}
249
250impl Default for ModeConfig {
251 fn default() -> Self {
252 Self {
253 default_mode: String::from("normal"),
254 remember_mode: false,
255 auto_commit_on_accept: false,
256 require_destructive_confirmation: true,
257 }
258 }
259}
260
261pub fn load_config() -> Result<Config> {
263 let config_dir = get_config_dir()?;
265 let global_config = config_dir.join("config.toml");
266 let local_config = PathBuf::from(".mermaid/config.toml");
267
268 let mut figment = Figment::from(Serialized::defaults(Config::default()));
270
271 if global_config.exists() {
273 figment = figment.merge(Toml::file(&global_config));
274 }
275
276 if local_config.exists() {
278 figment = figment.merge(Toml::file(&local_config));
279 }
280
281 figment = figment.merge(Env::prefixed("MERMAID_"));
283
284 figment
286 .extract()
287 .context("Failed to load configuration. Check that config files are valid TOML format.")
288}
289
290pub fn get_config_dir() -> Result<PathBuf> {
292 if let Some(proj_dirs) = ProjectDirs::from("", "", "mermaid") {
293 let config_dir = proj_dirs.config_dir();
294 std::fs::create_dir_all(config_dir)?;
295 Ok(config_dir.to_path_buf())
296 } else {
297 let home = std::env::var("HOME")
299 .or_else(|_| std::env::var("USERPROFILE"))
300 .context("Could not determine home directory")?;
301 let config_dir = PathBuf::from(home).join(".config").join("mermaid");
302 std::fs::create_dir_all(&config_dir)?;
303 Ok(config_dir)
304 }
305}
306
307pub fn save_config(config: &Config, path: Option<PathBuf>) -> Result<()> {
309 let path = if let Some(p) = path {
310 p
311 } else {
312 get_config_dir()?.join("config.toml")
313 };
314
315 let toml_string = toml::to_string_pretty(config)?;
316 std::fs::write(&path, toml_string)
317 .with_context(|| format!("Failed to write config to {}", path.display()))?;
318
319 Ok(())
320}
321
322pub fn init_config() -> Result<()> {
324 let config_dir = get_config_dir()?;
325 let config_file = config_dir.join("config.toml");
326
327 if !config_file.exists() {
328 let default_config = Config::default();
329 save_config(&default_config, Some(config_file.clone()))?;
330 println!(
331 "Created default configuration at: {}",
332 config_file.display()
333 );
334 }
335
336 let local_example = PathBuf::from(".mermaid/config.toml.example");
338 if !local_example.exists() {
339 if let Some(parent) = local_example.parent() {
340 std::fs::create_dir_all(parent)?;
341 }
342 let example_config = r#"# Mermaid Project Configuration
343# This file overrides global settings for this project
344
345[default_model]
346provider = "ollama"
347name = "tinyllama"
348temperature = 0.7
349max_tokens = 4096
350
351[context]
352max_files = 150
353max_context_tokens = 75000
354include_patterns = ["src/**/*.rs", "Cargo.toml"]
355"#;
356 std::fs::write(&local_example, example_config)?;
357 println!(
358 "Created example configuration at: {}",
359 local_example.display()
360 );
361 }
362
363 Ok(())
364}