Skip to main content

saorsa_agent/config/
settings.rs

1//! General agent settings.
2
3use std::fmt;
4use std::path::Path;
5use std::str::FromStr;
6
7use serde::{Deserialize, Serialize};
8
9use crate::error::{Result, SaorsaAgentError};
10
11/// Extended-thinking / chain-of-thought level.
12#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
13#[serde(rename_all = "snake_case")]
14pub enum ThinkingLevel {
15    /// Thinking disabled.
16    #[default]
17    Off,
18    /// Minimal thinking budget.
19    Low,
20    /// Moderate thinking budget.
21    Medium,
22    /// Maximum thinking budget.
23    High,
24}
25
26impl fmt::Display for ThinkingLevel {
27    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
28        let s = match self {
29            Self::Off => "off",
30            Self::Low => "low",
31            Self::Medium => "medium",
32            Self::High => "high",
33        };
34        f.write_str(s)
35    }
36}
37
38/// Error returned when parsing an invalid thinking level string.
39#[derive(Clone, Debug, PartialEq, Eq, thiserror::Error)]
40#[error("invalid thinking level: '{0}' (expected off, low, medium, high)")]
41pub struct ParseThinkingLevelError(String);
42
43impl FromStr for ThinkingLevel {
44    type Err = ParseThinkingLevelError;
45
46    fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
47        match s.trim().to_lowercase().as_str() {
48            "off" | "none" | "0" => Ok(Self::Off),
49            "low" | "1" => Ok(Self::Low),
50            "medium" | "med" | "2" => Ok(Self::Medium),
51            "high" | "3" => Ok(Self::High),
52            other => Err(ParseThinkingLevelError(other.to_string())),
53        }
54    }
55}
56
57/// General agent settings that apply across all sessions.
58#[derive(Clone, Debug, Default, Serialize, Deserialize)]
59pub struct Settings {
60    /// Default provider name (e.g. `"anthropic"`, `"openai"`).
61    #[serde(default)]
62    pub default_provider: Option<String>,
63    /// Default model identifier.
64    #[serde(default)]
65    pub default_model: Option<String>,
66    /// Extended-thinking level.
67    #[serde(default)]
68    pub thinking_level: ThinkingLevel,
69    /// List of model identifiers the user has enabled for selection.
70    #[serde(default)]
71    pub enabled_models: Vec<String>,
72    /// Maximum number of agent turns per run.
73    #[serde(default)]
74    pub max_turns: Option<u32>,
75    /// Maximum tokens per LLM response.
76    #[serde(default)]
77    pub max_tokens: Option<u32>,
78}
79
80/// Load settings from a JSON file.
81///
82/// Returns [`Settings::default()`] if the file does not exist.
83///
84/// # Errors
85///
86/// Returns [`SaorsaAgentError::ConfigIo`] on I/O failures or
87/// [`SaorsaAgentError::ConfigParse`] on JSON parse failures.
88pub fn load(path: &Path) -> Result<Settings> {
89    if !path.exists() {
90        return Ok(Settings::default());
91    }
92    let data = std::fs::read_to_string(path).map_err(SaorsaAgentError::ConfigIo)?;
93    let settings: Settings = serde_json::from_str(&data).map_err(SaorsaAgentError::ConfigParse)?;
94    Ok(settings)
95}
96
97/// Save settings to a JSON file.
98///
99/// Creates parent directories if they do not exist.
100///
101/// # Errors
102///
103/// Returns [`SaorsaAgentError::ConfigIo`] on I/O failures or
104/// [`SaorsaAgentError::ConfigParse`] on serialization failures.
105pub fn save(settings: &Settings, path: &Path) -> Result<()> {
106    if let Some(parent) = path.parent() {
107        std::fs::create_dir_all(parent).map_err(SaorsaAgentError::ConfigIo)?;
108    }
109    let data = serde_json::to_string_pretty(settings).map_err(SaorsaAgentError::ConfigParse)?;
110    std::fs::write(path, data).map_err(SaorsaAgentError::ConfigIo)?;
111    Ok(())
112}
113
114/// Merge an overlay settings into base settings.
115///
116/// Fields in `overlay` that are `Some` or non-default override the
117/// corresponding fields in `base`.
118pub fn merge(base: &Settings, overlay: &Settings) -> Settings {
119    Settings {
120        default_provider: overlay
121            .default_provider
122            .clone()
123            .or_else(|| base.default_provider.clone()),
124        default_model: overlay
125            .default_model
126            .clone()
127            .or_else(|| base.default_model.clone()),
128        thinking_level: if overlay.thinking_level != ThinkingLevel::Off {
129            overlay.thinking_level.clone()
130        } else {
131            base.thinking_level.clone()
132        },
133        enabled_models: if overlay.enabled_models.is_empty() {
134            base.enabled_models.clone()
135        } else {
136            overlay.enabled_models.clone()
137        },
138        max_turns: overlay.max_turns.or(base.max_turns),
139        max_tokens: overlay.max_tokens.or(base.max_tokens),
140    }
141}
142
143#[cfg(test)]
144#[allow(clippy::unwrap_used)]
145mod tests {
146    use super::*;
147
148    #[test]
149    fn roundtrip_settings() {
150        let tmp = tempfile::tempdir().unwrap();
151        let path = tmp.path().join("settings.json");
152
153        let settings = Settings {
154            default_provider: Some("anthropic".into()),
155            default_model: Some("claude-sonnet-4-5-20250929".into()),
156            thinking_level: ThinkingLevel::High,
157            enabled_models: vec!["claude-sonnet-4-5-20250929".into(), "gpt-4".into()],
158            max_turns: Some(20),
159            max_tokens: Some(8192),
160        };
161
162        save(&settings, &path).unwrap();
163        let loaded = load(&path).unwrap();
164
165        assert_eq!(loaded.default_provider.as_deref(), Some("anthropic"));
166        assert_eq!(
167            loaded.default_model.as_deref(),
168            Some("claude-sonnet-4-5-20250929")
169        );
170        assert_eq!(loaded.thinking_level, ThinkingLevel::High);
171        assert_eq!(loaded.enabled_models.len(), 2);
172        assert_eq!(loaded.max_turns, Some(20));
173        assert_eq!(loaded.max_tokens, Some(8192));
174    }
175
176    #[test]
177    fn load_missing_file_returns_default() {
178        let tmp = tempfile::tempdir().unwrap();
179        let path = tmp.path().join("nonexistent.json");
180        let settings = load(&path).unwrap();
181        assert!(settings.default_provider.is_none());
182        assert!(settings.default_model.is_none());
183        assert_eq!(settings.thinking_level, ThinkingLevel::Off);
184        assert!(settings.enabled_models.is_empty());
185        assert!(settings.max_turns.is_none());
186        assert!(settings.max_tokens.is_none());
187    }
188
189    #[test]
190    fn merge_overlay_wins() {
191        let base = Settings {
192            default_provider: Some("anthropic".into()),
193            default_model: Some("old-model".into()),
194            thinking_level: ThinkingLevel::Low,
195            enabled_models: vec!["a".into()],
196            max_turns: Some(10),
197            max_tokens: Some(4096),
198        };
199        let overlay = Settings {
200            default_provider: Some("openai".into()),
201            default_model: None,
202            thinking_level: ThinkingLevel::High,
203            enabled_models: vec!["b".into(), "c".into()],
204            max_turns: None,
205            max_tokens: Some(8192),
206        };
207
208        let merged = merge(&base, &overlay);
209        assert_eq!(merged.default_provider.as_deref(), Some("openai"));
210        // overlay.default_model is None, so base wins.
211        assert_eq!(merged.default_model.as_deref(), Some("old-model"));
212        assert_eq!(merged.thinking_level, ThinkingLevel::High);
213        assert_eq!(merged.enabled_models, vec!["b", "c"]);
214        // overlay.max_turns is None, so base wins.
215        assert_eq!(merged.max_turns, Some(10));
216        assert_eq!(merged.max_tokens, Some(8192));
217    }
218
219    #[test]
220    fn merge_base_preserved_when_overlay_empty() {
221        let base = Settings {
222            default_provider: Some("anthropic".into()),
223            default_model: Some("model".into()),
224            thinking_level: ThinkingLevel::Medium,
225            enabled_models: vec!["x".into()],
226            max_turns: Some(5),
227            max_tokens: Some(2048),
228        };
229        let overlay = Settings::default();
230
231        let merged = merge(&base, &overlay);
232        assert_eq!(merged.default_provider.as_deref(), Some("anthropic"));
233        assert_eq!(merged.default_model.as_deref(), Some("model"));
234        // ThinkingLevel::Off in overlay means use base.
235        assert_eq!(merged.thinking_level, ThinkingLevel::Medium);
236        assert_eq!(merged.enabled_models, vec!["x"]);
237        assert_eq!(merged.max_turns, Some(5));
238        assert_eq!(merged.max_tokens, Some(2048));
239    }
240
241    #[test]
242    fn save_creates_parent_dirs() {
243        let tmp = tempfile::tempdir().unwrap();
244        let path = tmp.path().join("a").join("b").join("settings.json");
245        let settings = Settings::default();
246        save(&settings, &path).unwrap();
247        assert!(path.exists());
248    }
249
250    #[test]
251    fn thinking_level_serde_roundtrip() {
252        let json = serde_json::to_string(&ThinkingLevel::High).unwrap();
253        assert_eq!(json, "\"high\"");
254        let deserialized: ThinkingLevel = serde_json::from_str(&json).unwrap();
255        assert_eq!(deserialized, ThinkingLevel::High);
256    }
257
258    #[test]
259    fn thinking_level_default_is_off() {
260        assert_eq!(ThinkingLevel::default(), ThinkingLevel::Off);
261    }
262
263    #[test]
264    fn thinking_level_display() {
265        assert_eq!(ThinkingLevel::Off.to_string(), "off");
266        assert_eq!(ThinkingLevel::Low.to_string(), "low");
267        assert_eq!(ThinkingLevel::Medium.to_string(), "medium");
268        assert_eq!(ThinkingLevel::High.to_string(), "high");
269    }
270
271    #[test]
272    fn thinking_level_from_str() {
273        assert_eq!("off".parse::<ThinkingLevel>().unwrap(), ThinkingLevel::Off);
274        assert_eq!("low".parse::<ThinkingLevel>().unwrap(), ThinkingLevel::Low);
275        assert_eq!(
276            "medium".parse::<ThinkingLevel>().unwrap(),
277            ThinkingLevel::Medium
278        );
279        assert_eq!(
280            "high".parse::<ThinkingLevel>().unwrap(),
281            ThinkingLevel::High
282        );
283        // Case insensitive.
284        assert_eq!(
285            "HIGH".parse::<ThinkingLevel>().unwrap(),
286            ThinkingLevel::High
287        );
288        // Numeric aliases.
289        assert_eq!("0".parse::<ThinkingLevel>().unwrap(), ThinkingLevel::Off);
290        assert_eq!("3".parse::<ThinkingLevel>().unwrap(), ThinkingLevel::High);
291    }
292
293    #[test]
294    fn thinking_level_from_str_invalid() {
295        let err = "extreme".parse::<ThinkingLevel>().unwrap_err();
296        assert!(err.to_string().contains("extreme"));
297    }
298}