mermaid_cli/app/
config.rs1use crate::constants::{DEFAULT_MAX_TOKENS, DEFAULT_OLLAMA_PORT, DEFAULT_TEMPERATURE};
2use anyhow::{Context, Result};
3use directories::ProjectDirs;
4use serde::{Deserialize, Serialize};
5use std::path::PathBuf;
6
7#[derive(Debug, Clone, Default, Serialize, Deserialize)]
9pub struct Config {
10 #[serde(default)]
12 pub last_used_model: Option<String>,
13
14 #[serde(default)]
16 pub default_model: ModelSettings,
17
18 #[serde(default)]
20 pub ollama: OllamaConfig,
21
22 #[serde(default)]
24 pub non_interactive: NonInteractiveConfig,
25}
26
27#[derive(Debug, Clone, Serialize, Deserialize)]
29#[serde(default)]
30pub struct ModelSettings {
31 pub provider: String,
33 pub name: String,
35 pub temperature: f32,
37 pub max_tokens: usize,
39}
40
41impl Default for ModelSettings {
42 fn default() -> Self {
43 Self {
44 provider: String::new(),
45 name: String::new(),
46 temperature: DEFAULT_TEMPERATURE,
47 max_tokens: DEFAULT_MAX_TOKENS,
48 }
49 }
50}
51
52#[derive(Debug, Clone, Serialize, Deserialize)]
54#[serde(default)]
55pub struct OllamaConfig {
56 pub host: String,
58 pub port: u16,
60 pub cloud_api_key: Option<String>,
64 pub num_gpu: Option<i32>,
67 pub num_thread: Option<i32>,
70 pub num_ctx: Option<i32>,
73 pub numa: Option<bool>,
75}
76
77impl Default for OllamaConfig {
78 fn default() -> Self {
79 Self {
80 host: String::from("localhost"),
81 port: DEFAULT_OLLAMA_PORT,
82 cloud_api_key: None,
83 num_gpu: None, num_thread: None, num_ctx: None, numa: None, }
88 }
89}
90
91#[derive(Debug, Clone, Serialize, Deserialize)]
93#[serde(default)]
94pub struct NonInteractiveConfig {
95 pub output_format: String,
97 pub max_tokens: usize,
99 pub no_execute: bool,
101}
102
103impl Default for NonInteractiveConfig {
104 fn default() -> Self {
105 Self {
106 output_format: String::from("text"),
107 max_tokens: DEFAULT_MAX_TOKENS,
108 no_execute: false,
109 }
110 }
111}
112
113pub fn load_config() -> Result<Config> {
116 let config_path = get_config_path()?;
117
118 if config_path.exists() {
119 let toml_str = std::fs::read_to_string(&config_path)
120 .with_context(|| format!("Failed to read {}", config_path.display()))?;
121 let config: Config = toml::from_str(&toml_str).with_context(|| {
122 format!(
123 "Failed to parse {}. Run 'mermaid init' to regenerate.",
124 config_path.display()
125 )
126 })?;
127 Ok(config)
128 } else {
129 Ok(Config::default())
130 }
131}
132
133pub fn get_config_path() -> Result<PathBuf> {
135 Ok(get_config_dir()?.join("config.toml"))
136}
137
138pub fn get_config_dir() -> Result<PathBuf> {
140 if let Some(proj_dirs) = ProjectDirs::from("", "", "mermaid") {
141 let config_dir = proj_dirs.config_dir();
142 std::fs::create_dir_all(config_dir)?;
143 Ok(config_dir.to_path_buf())
144 } else {
145 let home = std::env::var("HOME")
147 .or_else(|_| std::env::var("USERPROFILE"))
148 .context("Could not determine home directory")?;
149 let config_dir = PathBuf::from(home).join(".config").join("mermaid");
150 std::fs::create_dir_all(&config_dir)?;
151 Ok(config_dir)
152 }
153}
154
155pub fn save_config(config: &Config, path: Option<PathBuf>) -> Result<()> {
157 let path = if let Some(p) = path {
158 p
159 } else {
160 get_config_dir()?.join("config.toml")
161 };
162
163 let toml_string = toml::to_string_pretty(config)?;
164 std::fs::write(&path, toml_string)
165 .with_context(|| format!("Failed to write config to {}", path.display()))?;
166
167 Ok(())
168}
169
170pub fn init_config() -> Result<()> {
172 let config_file = get_config_path()?;
173
174 if config_file.exists() {
175 println!("Configuration already exists at: {}", config_file.display());
176 } else {
177 let default_config = Config::default();
178 save_config(&default_config, Some(config_file.clone()))?;
179 println!("Created configuration at: {}", config_file.display());
180 }
181
182 Ok(())
183}
184
185pub fn persist_last_model(model: &str) -> Result<()> {
187 let mut config = load_config().unwrap_or_default();
188 config.last_used_model = Some(model.to_string());
189 save_config(&config, None)
190}
191
192pub async fn resolve_model_id(cli_model: Option<&str>, config: &Config) -> anyhow::Result<String> {
194 if let Some(model) = cli_model {
195 return Ok(model.to_string());
196 }
197 if let Some(last_model) = &config.last_used_model {
198 return Ok(last_model.clone());
199 }
200 if !config.default_model.provider.is_empty() && !config.default_model.name.is_empty() {
201 return Ok(format!(
202 "{}/{}",
203 config.default_model.provider, config.default_model.name
204 ));
205 }
206 let available = crate::ollama::require_any_model().await?;
207 Ok(format!("ollama/{}", available[0]))
208}