1use crate::constants::{DEFAULT_MAX_TOKENS, DEFAULT_OLLAMA_PORT, DEFAULT_TEMPERATURE};
2use crate::prompts;
3use anyhow::{Context, Result};
4use directories::ProjectDirs;
5use serde::{Deserialize, Serialize};
6use std::path::PathBuf;
7
8#[derive(Debug, Clone, Default, Serialize, Deserialize)]
10pub struct Config {
11 #[serde(default)]
13 pub last_used_model: Option<String>,
14
15 #[serde(default)]
17 pub default_model: ModelSettings,
18
19 #[serde(default)]
21 pub ollama: OllamaConfig,
22
23 #[serde(default)]
25 pub openai: OpenAIConfig,
26
27 #[serde(default)]
29 pub anthropic: AnthropicConfig,
30
31 #[serde(default)]
33 pub ui: UIConfig,
34
35 #[serde(default)]
37 pub mode: ModeConfig,
38
39 #[serde(default)]
41 pub behavior: BehaviorConfig,
42
43 #[serde(default)]
45 pub non_interactive: NonInteractiveConfig,
46}
47
48#[derive(Debug, Clone, Serialize, Deserialize)]
50#[serde(default)]
51pub struct ModelSettings {
52 pub provider: String,
54 pub name: String,
56 pub temperature: f32,
58 pub max_tokens: usize,
60 pub system_prompt: Option<String>,
62}
63
64impl ModelSettings {
65 pub fn default_system_prompt() -> String {
67 prompts::get_system_prompt()
68 }
69}
70
71impl Default for ModelSettings {
72 fn default() -> Self {
73 Self {
74 provider: String::new(),
75 name: String::new(),
76 temperature: DEFAULT_TEMPERATURE,
77 max_tokens: DEFAULT_MAX_TOKENS,
78 system_prompt: Some(Self::default_system_prompt()),
79 }
80 }
81}
82
83#[derive(Debug, Clone, Serialize, Deserialize)]
85#[serde(default)]
86pub struct OllamaConfig {
87 pub host: String,
89 pub port: u16,
91 pub cloud_api_key: Option<String>,
95 pub num_gpu: Option<i32>,
98 pub num_thread: Option<i32>,
101 pub num_ctx: Option<i32>,
104 pub numa: Option<bool>,
106}
107
108impl Default for OllamaConfig {
109 fn default() -> Self {
110 Self {
111 host: String::from("localhost"),
112 port: DEFAULT_OLLAMA_PORT,
113 cloud_api_key: None,
114 num_gpu: None, num_thread: None, num_ctx: None, numa: None, }
119 }
120}
121
122#[derive(Debug, Clone, Serialize, Deserialize)]
124#[serde(default)]
125pub struct OpenAIConfig {
126 pub api_key_env: String,
128 pub organization: Option<String>,
130}
131
132impl Default for OpenAIConfig {
133 fn default() -> Self {
134 Self {
135 api_key_env: String::from("OPENAI_API_KEY"),
136 organization: None,
137 }
138 }
139}
140
141#[derive(Debug, Clone, Serialize, Deserialize)]
143#[serde(default)]
144pub struct AnthropicConfig {
145 pub api_key_env: String,
147}
148
149impl Default for AnthropicConfig {
150 fn default() -> Self {
151 Self {
152 api_key_env: String::from("ANTHROPIC_API_KEY"),
153 }
154 }
155}
156
157#[derive(Debug, Clone, Serialize, Deserialize)]
159#[serde(default)]
160pub struct UIConfig {
161 pub theme: String,
163}
164
165impl Default for UIConfig {
166 fn default() -> Self {
167 Self {
168 theme: String::from("dark"),
169 }
170 }
171}
172
173#[derive(Debug, Clone, Serialize, Deserialize)]
175#[serde(default)]
176pub struct ModeConfig {
177 pub default_mode: String,
179 pub remember_mode: bool,
181 pub auto_commit_on_accept: bool,
183 pub require_destructive_confirmation: bool,
185}
186
187impl Default for ModeConfig {
188 fn default() -> Self {
189 Self {
190 default_mode: String::from("normal"),
191 remember_mode: false,
192 auto_commit_on_accept: false,
193 require_destructive_confirmation: true,
194 }
195 }
196}
197
198#[derive(Debug, Clone, Serialize, Deserialize)]
200#[serde(default)]
201pub struct BehaviorConfig {
202 pub auto_install_models: bool,
204 pub backend: String,
206}
207
208impl Default for BehaviorConfig {
209 fn default() -> Self {
210 Self {
211 auto_install_models: true,
212 backend: String::from("auto"),
213 }
214 }
215}
216
217#[derive(Debug, Clone, Serialize, Deserialize)]
219#[serde(default)]
220pub struct NonInteractiveConfig {
221 pub output_format: String,
223 pub max_tokens: usize,
225 pub no_execute: bool,
227}
228
229impl Default for NonInteractiveConfig {
230 fn default() -> Self {
231 Self {
232 output_format: String::from("text"),
233 max_tokens: DEFAULT_MAX_TOKENS,
234 no_execute: false,
235 }
236 }
237}
238
239pub fn load_config() -> Result<Config> {
242 let config_path = get_config_path()?;
243
244 if config_path.exists() {
245 let toml_str = std::fs::read_to_string(&config_path)
246 .with_context(|| format!("Failed to read {}", config_path.display()))?;
247 let config: Config = toml::from_str(&toml_str)
248 .with_context(|| format!("Failed to parse {}. Run 'mermaid init' to regenerate.", config_path.display()))?;
249 Ok(config)
250 } else {
251 Ok(Config::default())
252 }
253}
254
255pub fn get_config_path() -> Result<PathBuf> {
257 Ok(get_config_dir()?.join("config.toml"))
258}
259
260pub fn get_config_dir() -> Result<PathBuf> {
262 if let Some(proj_dirs) = ProjectDirs::from("", "", "mermaid") {
263 let config_dir = proj_dirs.config_dir();
264 std::fs::create_dir_all(config_dir)?;
265 Ok(config_dir.to_path_buf())
266 } else {
267 let home = std::env::var("HOME")
269 .or_else(|_| std::env::var("USERPROFILE"))
270 .context("Could not determine home directory")?;
271 let config_dir = PathBuf::from(home).join(".config").join("mermaid");
272 std::fs::create_dir_all(&config_dir)?;
273 Ok(config_dir)
274 }
275}
276
277pub fn save_config(config: &Config, path: Option<PathBuf>) -> Result<()> {
279 let path = if let Some(p) = path {
280 p
281 } else {
282 get_config_dir()?.join("config.toml")
283 };
284
285 let toml_string = toml::to_string_pretty(config)?;
286 std::fs::write(&path, toml_string)
287 .with_context(|| format!("Failed to write config to {}", path.display()))?;
288
289 Ok(())
290}
291
292pub fn init_config() -> Result<()> {
294 let config_file = get_config_path()?;
295
296 if config_file.exists() {
297 println!("Configuration already exists at: {}", config_file.display());
298 } else {
299 let default_config = Config::default();
300 save_config(&default_config, Some(config_file.clone()))?;
301 println!("Created configuration at: {}", config_file.display());
302 }
303
304 Ok(())
305}
306
307pub fn persist_last_model(model: &str) -> Result<()> {
309 let mut config = load_config().unwrap_or_default();
310 config.last_used_model = Some(model.to_string());
311 save_config(&config, None)
312}