Skip to main content

muninn/
config.rs

1use std::collections::{BTreeMap, HashSet};
2use std::env;
3use std::ffi::OsString;
4use std::fs;
5use std::path::{Path, PathBuf};
6
7use serde::{Deserialize, Serialize};
8use thiserror::Error;
9
10use crate::target_context::TargetContextSnapshot;
11use crate::transcription::{
12    ResolvedTranscriptionRoute, TranscriptionProvider, TranscriptionRouteSource,
13};
14
15const DEFAULT_CONFIG_FILE_NAME: &str = "config.toml";
16const DEFAULT_CONFIG_DIR_NAME: &str = "muninn";
17
18#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
19#[serde(default, deny_unknown_fields)]
20pub struct AppConfig {
21    pub app: AppSettings,
22    pub hotkeys: HotkeysConfig,
23    pub indicator: IndicatorConfig,
24    pub recording: RecordingConfig,
25    pub pipeline: PipelineConfig,
26    pub scoring: ScoringConfig,
27    pub transcription: TranscriptionConfig,
28    pub transcript: TranscriptConfig,
29    pub refine: RefineConfig,
30    pub logging: LoggingConfig,
31    pub providers: ProvidersConfig,
32    #[serde(default)]
33    pub voices: BTreeMap<String, VoiceConfig>,
34    #[serde(default)]
35    pub profiles: BTreeMap<String, ProfileConfig>,
36    #[serde(default)]
37    pub profile_rules: Vec<ProfileRuleConfig>,
38}
39
40impl AppConfig {
41    pub fn load() -> Result<Self, ConfigError> {
42        let path = resolve_config_path()?;
43        Self::load_or_create_default(path)
44    }
45
46    pub fn load_from_path(path: impl AsRef<Path>) -> Result<Self, ConfigError> {
47        let path = path.as_ref();
48        if !path.exists() {
49            return Err(ConfigError::NotFound {
50                path: path.to_path_buf(),
51            });
52        }
53
54        let raw = fs::read_to_string(path).map_err(|source| ConfigError::Read {
55            path: path.to_path_buf(),
56            source,
57        })?;
58
59        let config: Self = toml::from_str(&raw).map_err(|source| ConfigError::ParseTomlAtPath {
60            path: path.to_path_buf(),
61            source,
62        })?;
63        config.validate()?;
64
65        Ok(config)
66    }
67
68    pub fn from_toml_str(raw: &str) -> Result<Self, ConfigError> {
69        let config: Self =
70            toml::from_str(raw).map_err(|source| ConfigError::ParseToml { source })?;
71        config.validate()?;
72        Ok(config)
73    }
74
75    pub fn launchable_default() -> Self {
76        let mut config = Self::default();
77        config.pipeline.deadline_ms = 40_000;
78        config.transcription.providers =
79            Some(TranscriptionProvider::default_ordered_route().to_vec());
80        config.pipeline.steps = vec![PipelineStepConfig {
81            id: "refine".to_string(),
82            cmd: "refine".to_string(),
83            args: Vec::new(),
84            io_mode: StepIoMode::Auto,
85            timeout_ms: 2_500,
86            on_error: OnErrorPolicy::Continue,
87        }];
88        config
89    }
90
91    fn load_or_create_default(path: impl AsRef<Path>) -> Result<Self, ConfigError> {
92        let path = path.as_ref();
93        if !path.exists() {
94            write_default_config(path)?;
95        }
96
97        Self::load_from_path(path)
98    }
99
100    pub fn validate(&self) -> Result<(), ConfigValidationError> {
101        validate_identifier(self.app.profile.trim(), "app.profile")?;
102
103        if self.pipeline.deadline_ms == 0 {
104            return Err(ConfigValidationError::PipelineDeadlineMsMustBePositive);
105        }
106
107        if self.pipeline.steps.is_empty() {
108            return Err(ConfigValidationError::PipelineMustContainAtLeastOneStep);
109        }
110
111        self.refine.validate()?;
112        self.transcript.validate("transcript")?;
113        self.transcription.validate("transcription.providers")?;
114        self.providers.validate()?;
115
116        for (name, binding) in [
117            ("push_to_talk", &self.hotkeys.push_to_talk),
118            ("done_mode_toggle", &self.hotkeys.done_mode_toggle),
119            (
120                "cancel_current_capture",
121                &self.hotkeys.cancel_current_capture,
122            ),
123        ] {
124            if binding.chord.is_empty() {
125                return Err(ConfigValidationError::HotkeyChordMustNotBeEmpty {
126                    hotkey_name: name.to_string(),
127                });
128            }
129
130            if matches!(binding.trigger, TriggerType::DoubleTap)
131                && binding.double_tap_timeout_ms == Some(0)
132            {
133                return Err(ConfigValidationError::DoubleTapTimeoutMsMustBePositive {
134                    hotkey_name: name.to_string(),
135                });
136            }
137        }
138
139        self.indicator.colors.validate()?;
140        self.recording.validate()?;
141        validate_pipeline_steps(&self.pipeline.steps)?;
142        validate_voices(&self.voices)?;
143        validate_profiles(self)?;
144        validate_profile_rules(self)?;
145
146        Ok(())
147    }
148
149    #[must_use]
150    pub fn resolve_profile_selection(
151        &self,
152        target_context: &TargetContextSnapshot,
153    ) -> ResolvedProfileSelection {
154        let matched_rule = self
155            .profile_rules
156            .iter()
157            .find(|rule| rule.matches(target_context));
158        let used_default_profile_fallback =
159            matched_rule.is_none() && !self.profile_rules.is_empty();
160        let profile_id = matched_rule
161            .map(|rule| rule.profile.clone())
162            .unwrap_or_else(|| self.app.profile.clone());
163        let explicit_profile = self.profiles.get(&profile_id);
164        let voice_id = explicit_profile.and_then(|profile| profile.voice.clone());
165        // Preserve the default profile behavior, but let the indicator render the
166        // generic M glyph until a contextual rule matches.
167        let voice_glyph = if used_default_profile_fallback {
168            None
169        } else {
170            voice_id
171                .as_deref()
172                .and_then(|voice_id| self.voices.get(voice_id))
173                .and_then(VoiceConfig::normalized_indicator_glyph)
174        };
175        let fallback_reason = if used_default_profile_fallback {
176            Some(fallback_reason(target_context, &self.app.profile))
177        } else {
178            None
179        };
180
181        ResolvedProfileSelection {
182            matched_rule_id: matched_rule.map(|rule| rule.id.clone()),
183            profile_id,
184            voice_id,
185            voice_glyph,
186            fallback_reason,
187        }
188    }
189
190    #[must_use]
191    pub fn resolve_effective_config(
192        &self,
193        target_context: TargetContextSnapshot,
194    ) -> ResolvedUtteranceConfig {
195        let selection = self.resolve_profile_selection(&target_context);
196        let mut recording = self.recording.clone();
197        let mut pipeline = self.pipeline.clone();
198        let mut transcription = self.transcription.clone();
199        let mut transcript = self.transcript.clone();
200        let mut refine = self.refine.clone();
201
202        if let Some(voice_id) = selection.voice_id.as_deref() {
203            if let Some(voice) = self.voices.get(voice_id) {
204                voice.apply_to(&mut transcript, &mut refine);
205            }
206        }
207
208        if let Some(profile) = self.profiles.get(&selection.profile_id) {
209            if let Some(recording_overrides) = profile.recording.as_ref() {
210                recording_overrides.apply_to(&mut recording);
211            }
212            if let Some(pipeline_overrides) = profile.pipeline.as_ref() {
213                pipeline_overrides.apply_to(&mut pipeline);
214            }
215            if let Some(transcription_overrides) = profile.transcription.as_ref() {
216                transcription_overrides.apply_to(&mut transcription);
217            }
218            if let Some(transcript_overrides) = profile.transcript.as_ref() {
219                transcript_overrides.apply_to(&mut transcript);
220            }
221            if let Some(refine_overrides) = profile.refine.as_ref() {
222                refine_overrides.apply_to(&mut refine);
223            }
224        }
225
226        transcript.materialize_system_prompt();
227
228        let transcription_route = resolve_transcription_route_from_parts(&transcription, &pipeline);
229        let pipeline = expand_pipeline_with_transcription_route(&pipeline, &transcription_route);
230        let providers = self.providers.clone();
231        let logging = self.logging.clone();
232        let replay_snapshot_enabled = logging.replay_enabled;
233        let effective_config = AppConfig {
234            app: self.app.clone(),
235            hotkeys: self.hotkeys.clone(),
236            indicator: self.indicator.clone(),
237            recording,
238            pipeline,
239            scoring: self.scoring.clone(),
240            transcription,
241            transcript,
242            refine,
243            logging,
244            providers,
245            voices: if replay_snapshot_enabled {
246                self.voices.clone()
247            } else {
248                BTreeMap::new()
249            },
250            profiles: if replay_snapshot_enabled {
251                self.profiles.clone()
252            } else {
253                BTreeMap::new()
254            },
255            profile_rules: if replay_snapshot_enabled {
256                self.profile_rules.clone()
257            } else {
258                Vec::new()
259            },
260        };
261
262        ResolvedUtteranceConfig {
263            target_context,
264            matched_rule_id: selection.matched_rule_id,
265            profile_id: selection.profile_id,
266            voice_id: selection.voice_id,
267            voice_glyph: selection.voice_glyph,
268            fallback_reason: selection.fallback_reason,
269            transcription_route,
270            builtin_steps: ResolvedBuiltinStepConfig::from_app_config(&effective_config),
271            effective_config,
272        }
273    }
274}
275
276#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
277#[serde(default, deny_unknown_fields)]
278pub struct AppSettings {
279    pub profile: String,
280    pub strict_step_contract: bool,
281    pub autostart: bool,
282}
283
284impl Default for AppSettings {
285    fn default() -> Self {
286        Self {
287            profile: "default".to_string(),
288            strict_step_contract: true,
289            autostart: false,
290        }
291    }
292}
293
294#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
295#[serde(default, deny_unknown_fields)]
296pub struct HotkeysConfig {
297    pub push_to_talk: HotkeyBinding,
298    pub done_mode_toggle: HotkeyBinding,
299    pub cancel_current_capture: HotkeyBinding,
300}
301
302impl Default for HotkeysConfig {
303    fn default() -> Self {
304        Self {
305            push_to_talk: HotkeyBinding {
306                trigger: TriggerType::DoubleTap,
307                chord: vec!["ctrl".to_string()],
308                double_tap_timeout_ms: Some(default_double_tap_timeout_ms()),
309            },
310            done_mode_toggle: HotkeyBinding {
311                trigger: TriggerType::Press,
312                chord: vec!["ctrl".to_string(), "shift".to_string(), "d".to_string()],
313                double_tap_timeout_ms: None,
314            },
315            cancel_current_capture: HotkeyBinding {
316                trigger: TriggerType::Press,
317                chord: vec!["ctrl".to_string(), "shift".to_string(), "x".to_string()],
318                double_tap_timeout_ms: None,
319            },
320        }
321    }
322}
323
324#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
325#[serde(default, deny_unknown_fields)]
326pub struct HotkeyBinding {
327    pub trigger: TriggerType,
328    pub chord: Vec<String>,
329    #[serde(default, skip_serializing_if = "Option::is_none")]
330    pub double_tap_timeout_ms: Option<u64>,
331}
332
333impl Default for HotkeyBinding {
334    fn default() -> Self {
335        Self {
336            trigger: TriggerType::Press,
337            chord: Vec::new(),
338            double_tap_timeout_ms: None,
339        }
340    }
341}
342
343impl HotkeyBinding {
344    #[must_use]
345    pub fn effective_double_tap_timeout_ms(&self) -> u64 {
346        self.double_tap_timeout_ms
347            .unwrap_or_else(default_double_tap_timeout_ms)
348    }
349}
350
351#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
352#[serde(rename_all = "snake_case")]
353pub enum TriggerType {
354    Hold,
355    #[default]
356    Press,
357    DoubleTap,
358}
359
360#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
361#[serde(default, deny_unknown_fields)]
362pub struct IndicatorConfig {
363    pub show_recording: bool,
364    pub show_processing: bool,
365    #[serde(default)]
366    pub colors: IndicatorColorsConfig,
367}
368
369impl Default for IndicatorConfig {
370    fn default() -> Self {
371        Self {
372            show_recording: true,
373            show_processing: true,
374            colors: IndicatorColorsConfig::default(),
375        }
376    }
377}
378
379#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
380#[serde(default, deny_unknown_fields)]
381pub struct IndicatorColorsConfig {
382    pub idle: String,
383    pub recording: String,
384    pub transcribing: String,
385    pub pipeline: String,
386    pub output: String,
387    pub cancelled: String,
388    pub outline: String,
389    pub glyph: String,
390}
391
392impl IndicatorColorsConfig {
393    pub fn validate(&self) -> Result<(), ConfigValidationError> {
394        for (color_name, color_value) in [
395            ("indicator.colors.idle", self.idle.as_str()),
396            ("indicator.colors.recording", self.recording.as_str()),
397            ("indicator.colors.transcribing", self.transcribing.as_str()),
398            ("indicator.colors.pipeline", self.pipeline.as_str()),
399            ("indicator.colors.output", self.output.as_str()),
400            ("indicator.colors.cancelled", self.cancelled.as_str()),
401            ("indicator.colors.outline", self.outline.as_str()),
402            ("indicator.colors.glyph", self.glyph.as_str()),
403        ] {
404            if !is_valid_hex_color(color_value) {
405                return Err(ConfigValidationError::IndicatorColorMustBeHex {
406                    color_name: color_name.to_string(),
407                    color_value: color_value.to_string(),
408                });
409            }
410        }
411
412        Ok(())
413    }
414}
415
416impl Default for IndicatorColorsConfig {
417    fn default() -> Self {
418        Self {
419            idle: "#636366".to_string(),
420            recording: "#FF9F0A".to_string(),
421            transcribing: "#0A84FF".to_string(),
422            pipeline: "#BF5AF2".to_string(),
423            output: "#30D158".to_string(),
424            cancelled: "#FF453A".to_string(),
425            outline: "#2C2C2E".to_string(),
426            glyph: "#FFFFFF".to_string(),
427        }
428    }
429}
430
431#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
432#[serde(default, deny_unknown_fields)]
433pub struct RecordingConfig {
434    pub mono: bool,
435    pub sample_rate_khz: u32,
436}
437
438impl RecordingConfig {
439    pub fn validate(&self) -> Result<(), ConfigValidationError> {
440        if self.sample_rate_khz == 0 {
441            return Err(ConfigValidationError::RecordingSampleRateKhzMustBePositive);
442        }
443
444        Ok(())
445    }
446
447    #[must_use]
448    pub const fn sample_rate_hz(&self) -> u32 {
449        self.sample_rate_khz * 1_000
450    }
451}
452
453impl Default for RecordingConfig {
454    fn default() -> Self {
455        Self {
456            mono: true,
457            sample_rate_khz: 16,
458        }
459    }
460}
461
462#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
463#[serde(default, deny_unknown_fields)]
464pub struct PipelineConfig {
465    pub deadline_ms: u64,
466    pub payload_format: PayloadFormat,
467    pub steps: Vec<PipelineStepConfig>,
468}
469
470impl Default for PipelineConfig {
471    fn default() -> Self {
472        Self {
473            deadline_ms: 500,
474            payload_format: PayloadFormat::JsonObject,
475            steps: Vec::new(),
476        }
477    }
478}
479
480#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
481#[serde(deny_unknown_fields)]
482pub struct PipelineStepConfig {
483    pub id: String,
484    pub cmd: String,
485    #[serde(default)]
486    pub args: Vec<String>,
487    #[serde(default)]
488    pub io_mode: StepIoMode,
489    #[serde(default = "default_step_timeout_ms")]
490    pub timeout_ms: u64,
491    #[serde(default)]
492    pub on_error: OnErrorPolicy,
493}
494
495#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
496#[serde(rename_all = "snake_case")]
497pub enum StepIoMode {
498    #[default]
499    Auto,
500    EnvelopeJson,
501    TextFilter,
502}
503
504#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
505#[serde(rename_all = "snake_case")]
506pub enum OnErrorPolicy {
507    Continue,
508    FallbackRaw,
509    #[default]
510    Abort,
511}
512
513#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
514#[serde(rename_all = "snake_case")]
515pub enum PayloadFormat {
516    #[default]
517    JsonObject,
518}
519
520#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
521#[serde(default, deny_unknown_fields)]
522pub struct ScoringConfig {
523    pub min_top_score: f32,
524    pub min_margin: f32,
525    pub acronym_min_top_score: f32,
526    pub acronym_min_margin: f32,
527}
528
529impl Default for ScoringConfig {
530    fn default() -> Self {
531        Self {
532            min_top_score: 0.84,
533            min_margin: 0.10,
534            acronym_min_top_score: 0.90,
535            acronym_min_margin: 0.15,
536        }
537    }
538}
539
540#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
541#[serde(default, deny_unknown_fields)]
542pub struct TranscriptConfig {
543    pub system_prompt: String,
544    #[serde(default, skip_serializing_if = "Option::is_none")]
545    pub system_prompt_append: Option<String>,
546}
547
548impl Default for TranscriptConfig {
549    fn default() -> Self {
550        Self {
551            system_prompt: "Prefer minimal corrections. Focus on technical terms, developer tools, package names, commands, flags, file names, paths, env vars, acronyms, and obvious dictation errors. If uncertain, keep the original wording.".to_string(),
552            system_prompt_append: None,
553        }
554    }
555}
556
557impl TranscriptConfig {
558    fn validate(&self, field_prefix: &str) -> Result<(), ConfigValidationError> {
559        if let Some(system_prompt_append) = self.system_prompt_append.as_deref() {
560            validate_prompt_fragment(
561                system_prompt_append,
562                format!("{field_prefix}.system_prompt_append"),
563            )?;
564        }
565
566        Ok(())
567    }
568
569    fn replace_system_prompt(&mut self, prompt: &str) {
570        self.system_prompt = prompt.to_string();
571        self.system_prompt_append = None;
572    }
573
574    fn append_system_prompt(&mut self, fragment: &str) {
575        append_prompt_fragment(&mut self.system_prompt_append, fragment);
576    }
577
578    fn materialize_system_prompt(&mut self) {
579        self.system_prompt =
580            compose_prompt_text(&self.system_prompt, self.system_prompt_append.as_deref());
581        self.system_prompt_append = None;
582    }
583}
584
585#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
586#[serde(default, deny_unknown_fields)]
587pub struct TranscriptionConfig {
588    #[serde(default, skip_serializing_if = "Option::is_none")]
589    pub providers: Option<Vec<TranscriptionProvider>>,
590}
591
592impl TranscriptionConfig {
593    fn validate(&self, field_name: &str) -> Result<(), ConfigValidationError> {
594        if self.providers.as_ref().is_some_and(Vec::is_empty) {
595            return Err(
596                ConfigValidationError::TranscriptionProvidersMustNotBeEmpty {
597                    field_name: field_name.to_string(),
598                },
599            );
600        }
601
602        if let Some(providers) = self.providers.as_ref() {
603            let mut seen = HashSet::new();
604            let mut duplicates = HashSet::new();
605            for provider in providers {
606                if !seen.insert(*provider) {
607                    duplicates.insert(*provider);
608                }
609            }
610            if !duplicates.is_empty() {
611                let mut provider_ids = duplicates
612                    .into_iter()
613                    .map(|provider| provider.config_id().to_string())
614                    .collect::<Vec<_>>();
615                provider_ids.sort();
616                return Err(ConfigValidationError::DuplicateTranscriptionProviders {
617                    field_name: field_name.to_string(),
618                    provider_ids,
619                });
620            }
621        }
622
623        Ok(())
624    }
625
626    fn apply_to(&self, target: &mut TranscriptionConfig) {
627        if let Some(providers) = self.providers.as_ref() {
628            target.providers = Some(providers.clone());
629        }
630    }
631}
632
633#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
634#[serde(default, deny_unknown_fields)]
635pub struct RefineConfig {
636    pub provider: RefineProvider,
637    pub endpoint: String,
638    pub model: String,
639    pub temperature: f32,
640    pub max_output_tokens: u32,
641    pub max_length_delta_ratio: f32,
642    pub max_token_change_ratio: f32,
643    pub max_new_word_count: u32,
644}
645
646impl RefineConfig {
647    pub fn validate(&self) -> Result<(), ConfigValidationError> {
648        if self.endpoint.trim().is_empty() {
649            return Err(ConfigValidationError::RefineEndpointMustNotBeEmpty);
650        }
651        if self.model.trim().is_empty() {
652            return Err(ConfigValidationError::RefineModelMustNotBeEmpty);
653        }
654        if !self.temperature.is_finite() || self.temperature < 0.0 {
655            return Err(ConfigValidationError::RefineTemperatureMustBeNonNegative);
656        }
657        if self.max_output_tokens == 0 {
658            return Err(ConfigValidationError::RefineMaxOutputTokensMustBePositive);
659        }
660        for (field_name, value) in [
661            ("refine.max_length_delta_ratio", self.max_length_delta_ratio),
662            ("refine.max_token_change_ratio", self.max_token_change_ratio),
663        ] {
664            if !value.is_finite() || !(0.0..=1.0).contains(&value) {
665                return Err(ConfigValidationError::RefineRatioMustBeBetweenZeroAndOne {
666                    field_name: field_name.to_string(),
667                    value: value.to_string(),
668                });
669            }
670        }
671
672        Ok(())
673    }
674}
675
676impl Default for RefineConfig {
677    fn default() -> Self {
678        Self {
679            provider: RefineProvider::OpenAi,
680            endpoint: "https://api.openai.com/v1/chat/completions".to_string(),
681            model: "gpt-4.1-mini".to_string(),
682            temperature: 0.0,
683            max_output_tokens: 512,
684            max_length_delta_ratio: 0.25,
685            max_token_change_ratio: 0.60,
686            max_new_word_count: 2,
687        }
688    }
689}
690
691#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
692pub enum RefineProvider {
693    #[serde(rename = "openai")]
694    #[default]
695    OpenAi,
696}
697
698#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
699#[serde(default, deny_unknown_fields)]
700pub struct VoiceConfig {
701    #[serde(default, skip_serializing_if = "Option::is_none")]
702    pub indicator_glyph: Option<String>,
703    #[serde(default, skip_serializing_if = "Option::is_none")]
704    pub system_prompt: Option<String>,
705    #[serde(default, skip_serializing_if = "Option::is_none")]
706    pub system_prompt_append: Option<String>,
707    #[serde(default, skip_serializing_if = "Option::is_none")]
708    pub temperature: Option<f32>,
709    #[serde(default, skip_serializing_if = "Option::is_none")]
710    pub max_output_tokens: Option<u32>,
711    #[serde(default, skip_serializing_if = "Option::is_none")]
712    pub max_length_delta_ratio: Option<f32>,
713    #[serde(default, skip_serializing_if = "Option::is_none")]
714    pub max_token_change_ratio: Option<f32>,
715    #[serde(default, skip_serializing_if = "Option::is_none")]
716    pub max_new_word_count: Option<u32>,
717}
718
719impl VoiceConfig {
720    #[must_use]
721    pub fn normalized_indicator_glyph(&self) -> Option<char> {
722        self.indicator_glyph
723            .as_deref()
724            .map(str::trim)
725            .filter(|value| !value.is_empty())
726            .and_then(|value| value.chars().next())
727            .map(|glyph| glyph.to_ascii_uppercase())
728    }
729
730    fn validate(&self, voice_id: &str) -> Result<(), ConfigValidationError> {
731        if let Some(glyph) = self.indicator_glyph.as_deref() {
732            let glyph = glyph.trim();
733            let mut chars = glyph.chars();
734            match (chars.next(), chars.next()) {
735                (Some(letter), None) if letter.is_ascii_alphabetic() => {}
736                _ => {
737                    return Err(
738                        ConfigValidationError::VoiceIndicatorGlyphMustBeSingleAsciiLetter {
739                            voice_id: voice_id.to_string(),
740                            value: glyph.to_string(),
741                        },
742                    );
743                }
744            }
745        }
746
747        if let Some(system_prompt) = self.system_prompt.as_deref() {
748            validate_prompt_fragment(system_prompt, format!("voices.{voice_id}.system_prompt"))?;
749        }
750        if let Some(system_prompt_append) = self.system_prompt_append.as_deref() {
751            validate_prompt_fragment(
752                system_prompt_append,
753                format!("voices.{voice_id}.system_prompt_append"),
754            )?;
755        }
756
757        validate_optional_refine_fields(
758            self.temperature,
759            self.max_output_tokens,
760            self.max_length_delta_ratio,
761            self.max_token_change_ratio,
762            "voices",
763            voice_id,
764        )
765    }
766
767    fn apply_to(&self, transcript: &mut TranscriptConfig, refine: &mut RefineConfig) {
768        if let Some(system_prompt) = self.system_prompt.as_ref() {
769            transcript.replace_system_prompt(system_prompt);
770        }
771        if let Some(system_prompt_append) = self.system_prompt_append.as_deref() {
772            transcript.append_system_prompt(system_prompt_append);
773        }
774        if let Some(temperature) = self.temperature {
775            refine.temperature = temperature;
776        }
777        if let Some(max_output_tokens) = self.max_output_tokens {
778            refine.max_output_tokens = max_output_tokens;
779        }
780        if let Some(max_length_delta_ratio) = self.max_length_delta_ratio {
781            refine.max_length_delta_ratio = max_length_delta_ratio;
782        }
783        if let Some(max_token_change_ratio) = self.max_token_change_ratio {
784            refine.max_token_change_ratio = max_token_change_ratio;
785        }
786        if let Some(max_new_word_count) = self.max_new_word_count {
787            refine.max_new_word_count = max_new_word_count;
788        }
789    }
790}
791
792#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
793#[serde(default, deny_unknown_fields)]
794pub struct ProfileConfig {
795    #[serde(default, skip_serializing_if = "Option::is_none")]
796    pub voice: Option<String>,
797    #[serde(default, skip_serializing_if = "Option::is_none")]
798    pub recording: Option<RecordingOverrides>,
799    #[serde(default, skip_serializing_if = "Option::is_none")]
800    pub pipeline: Option<PipelineOverrides>,
801    #[serde(default, skip_serializing_if = "Option::is_none")]
802    pub transcription: Option<TranscriptionConfig>,
803    #[serde(default, skip_serializing_if = "Option::is_none")]
804    pub transcript: Option<TranscriptOverrides>,
805    #[serde(default, skip_serializing_if = "Option::is_none")]
806    pub refine: Option<RefineOverrides>,
807}
808
809impl ProfileConfig {
810    fn validate(&self, profile_id: &str) -> Result<(), ConfigValidationError> {
811        if let Some(voice_id) = self.voice.as_deref() {
812            validate_identifier(voice_id.trim(), &format!("profiles.{profile_id}.voice"))?;
813        }
814        if let Some(recording) = self.recording.as_ref() {
815            recording.validate(profile_id)?;
816        }
817        if let Some(pipeline) = self.pipeline.as_ref() {
818            pipeline.validate(profile_id)?;
819        }
820        if let Some(transcription) = self.transcription.as_ref() {
821            transcription.validate(&format!("profiles.{profile_id}.transcription.providers"))?;
822        }
823        if let Some(transcript) = self.transcript.as_ref() {
824            transcript.validate(profile_id)?;
825        }
826        if let Some(refine) = self.refine.as_ref() {
827            refine.validate(profile_id)?;
828        }
829        Ok(())
830    }
831}
832
833#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
834#[serde(default, deny_unknown_fields)]
835pub struct RecordingOverrides {
836    #[serde(default, skip_serializing_if = "Option::is_none")]
837    pub mono: Option<bool>,
838    #[serde(default, skip_serializing_if = "Option::is_none")]
839    pub sample_rate_khz: Option<u32>,
840}
841
842impl RecordingOverrides {
843    fn validate(&self, profile_id: &str) -> Result<(), ConfigValidationError> {
844        if matches!(self.sample_rate_khz, Some(0)) {
845            return Err(ConfigValidationError::RecordingSampleRateKhzMustBePositive);
846        }
847        validate_identifier(profile_id, &format!("profiles.{profile_id}"))
848    }
849
850    fn apply_to(&self, recording: &mut RecordingConfig) {
851        if let Some(mono) = self.mono {
852            recording.mono = mono;
853        }
854        if let Some(sample_rate_khz) = self.sample_rate_khz {
855            recording.sample_rate_khz = sample_rate_khz;
856        }
857    }
858}
859
860#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
861#[serde(default, deny_unknown_fields)]
862pub struct PipelineOverrides {
863    #[serde(default, skip_serializing_if = "Option::is_none")]
864    pub deadline_ms: Option<u64>,
865    #[serde(default, skip_serializing_if = "Option::is_none")]
866    pub payload_format: Option<PayloadFormat>,
867    #[serde(default, skip_serializing_if = "Option::is_none")]
868    pub steps: Option<Vec<PipelineStepConfig>>,
869}
870
871impl PipelineOverrides {
872    fn validate(&self, _profile_id: &str) -> Result<(), ConfigValidationError> {
873        if matches!(self.deadline_ms, Some(0)) {
874            return Err(ConfigValidationError::PipelineDeadlineMsMustBePositive);
875        }
876        if let Some(steps) = self.steps.as_ref() {
877            if steps.is_empty() {
878                return Err(ConfigValidationError::PipelineMustContainAtLeastOneStep);
879            }
880            validate_pipeline_steps(steps)?;
881        }
882        Ok(())
883    }
884
885    fn apply_to(&self, pipeline: &mut PipelineConfig) {
886        if let Some(deadline_ms) = self.deadline_ms {
887            pipeline.deadline_ms = deadline_ms;
888        }
889        if let Some(payload_format) = self.payload_format {
890            pipeline.payload_format = payload_format;
891        }
892        if let Some(steps) = self.steps.as_ref() {
893            pipeline.steps = steps.clone();
894        }
895    }
896}
897
898#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
899#[serde(default, deny_unknown_fields)]
900pub struct TranscriptOverrides {
901    #[serde(default, skip_serializing_if = "Option::is_none")]
902    pub system_prompt: Option<String>,
903    #[serde(default, skip_serializing_if = "Option::is_none")]
904    pub system_prompt_append: Option<String>,
905}
906
907impl TranscriptOverrides {
908    fn validate(&self, profile_id: &str) -> Result<(), ConfigValidationError> {
909        if let Some(system_prompt) = self.system_prompt.as_deref() {
910            validate_prompt_fragment(
911                system_prompt,
912                format!("profiles.{profile_id}.transcript.system_prompt"),
913            )?;
914        }
915        if let Some(system_prompt_append) = self.system_prompt_append.as_deref() {
916            validate_prompt_fragment(
917                system_prompt_append,
918                format!("profiles.{profile_id}.transcript.system_prompt_append"),
919            )?;
920        }
921        Ok(())
922    }
923
924    fn apply_to(&self, transcript: &mut TranscriptConfig) {
925        if let Some(system_prompt) = self.system_prompt.as_ref() {
926            transcript.replace_system_prompt(system_prompt);
927        }
928        if let Some(system_prompt_append) = self.system_prompt_append.as_deref() {
929            transcript.append_system_prompt(system_prompt_append);
930        }
931    }
932}
933
934#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
935#[serde(default, deny_unknown_fields)]
936pub struct RefineOverrides {
937    #[serde(default, skip_serializing_if = "Option::is_none")]
938    pub provider: Option<RefineProvider>,
939    #[serde(default, skip_serializing_if = "Option::is_none")]
940    pub endpoint: Option<String>,
941    #[serde(default, skip_serializing_if = "Option::is_none")]
942    pub model: Option<String>,
943    #[serde(default, skip_serializing_if = "Option::is_none")]
944    pub temperature: Option<f32>,
945    #[serde(default, skip_serializing_if = "Option::is_none")]
946    pub max_output_tokens: Option<u32>,
947    #[serde(default, skip_serializing_if = "Option::is_none")]
948    pub max_length_delta_ratio: Option<f32>,
949    #[serde(default, skip_serializing_if = "Option::is_none")]
950    pub max_token_change_ratio: Option<f32>,
951    #[serde(default, skip_serializing_if = "Option::is_none")]
952    pub max_new_word_count: Option<u32>,
953}
954
955impl RefineOverrides {
956    fn validate(&self, profile_id: &str) -> Result<(), ConfigValidationError> {
957        if self
958            .endpoint
959            .as_deref()
960            .is_some_and(|value| value.trim().is_empty())
961        {
962            return Err(ConfigValidationError::RefineEndpointMustNotBeEmpty);
963        }
964        if self
965            .model
966            .as_deref()
967            .is_some_and(|value| value.trim().is_empty())
968        {
969            return Err(ConfigValidationError::RefineModelMustNotBeEmpty);
970        }
971        validate_optional_refine_fields(
972            self.temperature,
973            self.max_output_tokens,
974            self.max_length_delta_ratio,
975            self.max_token_change_ratio,
976            "profiles",
977            profile_id,
978        )
979    }
980
981    fn apply_to(&self, refine: &mut RefineConfig) {
982        if let Some(provider) = self.provider {
983            refine.provider = provider;
984        }
985        if let Some(endpoint) = self.endpoint.as_ref() {
986            refine.endpoint = endpoint.clone();
987        }
988        if let Some(model) = self.model.as_ref() {
989            refine.model = model.clone();
990        }
991        if let Some(temperature) = self.temperature {
992            refine.temperature = temperature;
993        }
994        if let Some(max_output_tokens) = self.max_output_tokens {
995            refine.max_output_tokens = max_output_tokens;
996        }
997        if let Some(max_length_delta_ratio) = self.max_length_delta_ratio {
998            refine.max_length_delta_ratio = max_length_delta_ratio;
999        }
1000        if let Some(max_token_change_ratio) = self.max_token_change_ratio {
1001            refine.max_token_change_ratio = max_token_change_ratio;
1002        }
1003        if let Some(max_new_word_count) = self.max_new_word_count {
1004            refine.max_new_word_count = max_new_word_count;
1005        }
1006    }
1007}
1008
1009#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
1010#[serde(default, deny_unknown_fields)]
1011pub struct ProfileRuleConfig {
1012    pub id: String,
1013    pub profile: String,
1014    #[serde(default, skip_serializing_if = "Option::is_none")]
1015    pub bundle_id: Option<String>,
1016    #[serde(default, skip_serializing_if = "Option::is_none")]
1017    pub bundle_id_prefix: Option<String>,
1018    #[serde(default, skip_serializing_if = "Option::is_none")]
1019    pub app_name: Option<String>,
1020    #[serde(default, skip_serializing_if = "Option::is_none")]
1021    pub app_name_contains: Option<String>,
1022    #[serde(default, skip_serializing_if = "Option::is_none")]
1023    pub window_title_contains: Option<String>,
1024}
1025
1026impl ProfileRuleConfig {
1027    fn validate(&self, app: &AppSettings) -> Result<(), ConfigValidationError> {
1028        validate_identifier(self.id.trim(), "profile_rules.id")?;
1029        validate_identifier(
1030            self.profile.trim(),
1031            &format!("profile_rules.{}.profile", self.id),
1032        )?;
1033
1034        if !self.has_matcher() {
1035            return Err(
1036                ConfigValidationError::ProfileRuleMustIncludeAtLeastOneMatcher {
1037                    rule_id: self.id.clone(),
1038                },
1039            );
1040        }
1041
1042        for (field_name, value) in [
1043            ("bundle_id", self.bundle_id.as_deref()),
1044            ("bundle_id_prefix", self.bundle_id_prefix.as_deref()),
1045            ("app_name", self.app_name.as_deref()),
1046            ("app_name_contains", self.app_name_contains.as_deref()),
1047            (
1048                "window_title_contains",
1049                self.window_title_contains.as_deref(),
1050            ),
1051        ] {
1052            if value.is_some_and(|value| value.trim().is_empty()) {
1053                return Err(ConfigValidationError::ProfileRuleFieldMustNotBeEmpty {
1054                    rule_id: self.id.clone(),
1055                    field_name: field_name.to_string(),
1056                });
1057            }
1058        }
1059
1060        if self.profile != app.profile && !self.profile.is_empty() {
1061            // Profile existence is validated at the AppConfig level.
1062        }
1063
1064        Ok(())
1065    }
1066
1067    fn has_matcher(&self) -> bool {
1068        self.bundle_id.is_some()
1069            || self.bundle_id_prefix.is_some()
1070            || self.app_name.is_some()
1071            || self.app_name_contains.is_some()
1072            || self.window_title_contains.is_some()
1073    }
1074
1075    #[must_use]
1076    pub fn matches(&self, target_context: &TargetContextSnapshot) -> bool {
1077        if !match_optional_exact(
1078            self.bundle_id.as_deref(),
1079            target_context.bundle_id.as_deref(),
1080        ) {
1081            return false;
1082        }
1083        if !match_optional_prefix(
1084            self.bundle_id_prefix.as_deref(),
1085            target_context.bundle_id.as_deref(),
1086        ) {
1087            return false;
1088        }
1089        if !match_optional_exact(self.app_name.as_deref(), target_context.app_name.as_deref()) {
1090            return false;
1091        }
1092        if !match_optional_contains(
1093            self.app_name_contains.as_deref(),
1094            target_context.app_name.as_deref(),
1095        ) {
1096            return false;
1097        }
1098        match_optional_contains(
1099            self.window_title_contains.as_deref(),
1100            target_context.window_title.as_deref(),
1101        )
1102    }
1103}
1104
1105#[derive(Debug, Clone, PartialEq, Eq)]
1106pub struct ResolvedProfileSelection {
1107    pub matched_rule_id: Option<String>,
1108    pub profile_id: String,
1109    pub voice_id: Option<String>,
1110    pub voice_glyph: Option<char>,
1111    pub fallback_reason: Option<String>,
1112}
1113
1114#[derive(Debug, Clone, PartialEq)]
1115pub struct ResolvedUtteranceConfig {
1116    pub target_context: TargetContextSnapshot,
1117    pub matched_rule_id: Option<String>,
1118    pub profile_id: String,
1119    pub voice_id: Option<String>,
1120    pub voice_glyph: Option<char>,
1121    pub fallback_reason: Option<String>,
1122    pub transcription_route: ResolvedTranscriptionRoute,
1123    pub effective_config: AppConfig,
1124    pub builtin_steps: ResolvedBuiltinStepConfig,
1125}
1126
1127#[derive(Debug, Clone, PartialEq)]
1128pub struct ResolvedBuiltinStepConfig {
1129    pub transcript: TranscriptConfig,
1130    pub refine: RefineConfig,
1131    pub providers: ProvidersConfig,
1132}
1133
1134impl ResolvedBuiltinStepConfig {
1135    #[must_use]
1136    pub fn from_app_config(config: &AppConfig) -> Self {
1137        let mut transcript = config.transcript.clone();
1138        transcript.materialize_system_prompt();
1139
1140        Self {
1141            transcript,
1142            refine: config.refine.clone(),
1143            providers: config.providers.clone(),
1144        }
1145    }
1146}
1147
1148#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1149#[serde(default, deny_unknown_fields)]
1150pub struct LoggingConfig {
1151    pub replay_enabled: bool,
1152    pub replay_retain_audio: bool,
1153    pub replay_dir: PathBuf,
1154    pub replay_retention_days: u32,
1155    pub replay_max_bytes: u64,
1156}
1157
1158impl Default for LoggingConfig {
1159    fn default() -> Self {
1160        Self {
1161            replay_enabled: false,
1162            replay_retain_audio: true,
1163            replay_dir: PathBuf::from("~/.local/state/muninn/replay"),
1164            replay_retention_days: 7,
1165            replay_max_bytes: 52_428_800,
1166        }
1167    }
1168}
1169
1170#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)]
1171#[serde(default, deny_unknown_fields)]
1172pub struct ProvidersConfig {
1173    pub apple_speech: AppleSpeechProviderConfig,
1174    pub whisper_cpp: WhisperCppProviderConfig,
1175    pub deepgram: DeepgramProviderConfig,
1176    pub openai: OpenAiProviderConfig,
1177    pub google: GoogleProviderConfig,
1178}
1179
1180impl ProvidersConfig {
1181    fn validate(&self) -> Result<(), ConfigValidationError> {
1182        self.apple_speech.validate()?;
1183        self.whisper_cpp.validate()?;
1184        self.deepgram.validate()
1185    }
1186}
1187
1188#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1189#[serde(default, deny_unknown_fields)]
1190pub struct AppleSpeechProviderConfig {
1191    pub locale: Option<String>,
1192    pub install_assets: bool,
1193}
1194
1195impl AppleSpeechProviderConfig {
1196    fn validate(&self) -> Result<(), ConfigValidationError> {
1197        if self
1198            .locale
1199            .as_deref()
1200            .is_some_and(|value| value.trim().is_empty())
1201        {
1202            return Err(ConfigValidationError::AppleSpeechLocaleMustNotBeEmpty);
1203        }
1204        Ok(())
1205    }
1206}
1207
1208impl Default for AppleSpeechProviderConfig {
1209    fn default() -> Self {
1210        Self {
1211            locale: None,
1212            install_assets: true,
1213        }
1214    }
1215}
1216
1217#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
1218#[serde(rename_all = "snake_case")]
1219pub enum WhisperCppDevicePreference {
1220    #[default]
1221    Auto,
1222    Cpu,
1223    Gpu,
1224}
1225
1226#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1227#[serde(default, deny_unknown_fields)]
1228pub struct WhisperCppProviderConfig {
1229    pub model: Option<String>,
1230    pub model_dir: PathBuf,
1231    pub device: WhisperCppDevicePreference,
1232}
1233
1234impl WhisperCppProviderConfig {
1235    fn validate(&self) -> Result<(), ConfigValidationError> {
1236        if self
1237            .model
1238            .as_deref()
1239            .is_some_and(|value| value.trim().is_empty())
1240        {
1241            return Err(ConfigValidationError::WhisperCppModelMustNotBeEmpty);
1242        }
1243        if self.model_dir.as_os_str().is_empty() {
1244            return Err(ConfigValidationError::WhisperCppModelDirMustNotBeEmpty);
1245        }
1246        Ok(())
1247    }
1248}
1249
1250impl Default for WhisperCppProviderConfig {
1251    fn default() -> Self {
1252        Self {
1253            model: None,
1254            model_dir: PathBuf::from("~/.local/share/muninn/models"),
1255            device: WhisperCppDevicePreference::Auto,
1256        }
1257    }
1258}
1259
1260#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1261#[serde(default, deny_unknown_fields)]
1262pub struct DeepgramProviderConfig {
1263    pub api_key: Option<String>,
1264    pub endpoint: String,
1265    pub model: String,
1266    pub language: String,
1267}
1268
1269impl Default for DeepgramProviderConfig {
1270    fn default() -> Self {
1271        Self {
1272            api_key: None,
1273            endpoint: "https://api.deepgram.com/v1/listen".to_string(),
1274            model: "nova-3".to_string(),
1275            language: "en".to_string(),
1276        }
1277    }
1278}
1279
1280impl DeepgramProviderConfig {
1281    fn validate(&self) -> Result<(), ConfigValidationError> {
1282        if self.endpoint.trim().is_empty() {
1283            return Err(ConfigValidationError::DeepgramEndpointMustNotBeEmpty);
1284        }
1285        if self.model.trim().is_empty() {
1286            return Err(ConfigValidationError::DeepgramModelMustNotBeEmpty);
1287        }
1288        if self.language.trim().is_empty() {
1289            return Err(ConfigValidationError::DeepgramLanguageMustNotBeEmpty);
1290        }
1291        Ok(())
1292    }
1293}
1294
1295#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1296#[serde(default, deny_unknown_fields)]
1297pub struct OpenAiProviderConfig {
1298    pub api_key: Option<String>,
1299    pub endpoint: String,
1300    pub model: String,
1301}
1302
1303impl Default for OpenAiProviderConfig {
1304    fn default() -> Self {
1305        Self {
1306            api_key: None,
1307            endpoint: "https://api.openai.com/v1/audio/transcriptions".to_string(),
1308            model: "gpt-4o-mini-transcribe".to_string(),
1309        }
1310    }
1311}
1312
1313#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
1314#[serde(default, deny_unknown_fields)]
1315pub struct GoogleProviderConfig {
1316    pub api_key: Option<String>,
1317    pub token: Option<String>,
1318    pub endpoint: String,
1319    pub model: Option<String>,
1320}
1321
1322impl Default for GoogleProviderConfig {
1323    fn default() -> Self {
1324        Self {
1325            api_key: None,
1326            token: None,
1327            endpoint: "https://speech.googleapis.com/v1/speech:recognize".to_string(),
1328            model: None,
1329        }
1330    }
1331}
1332
1333#[derive(Debug, Error)]
1334pub enum ConfigError {
1335    #[error("unable to resolve config path because HOME is not set")]
1336    HomeDirectoryNotSet,
1337    #[error("config file not found at expected path: {path}")]
1338    NotFound { path: PathBuf },
1339    #[error("failed to read config file at {path}: {source}")]
1340    Read {
1341        path: PathBuf,
1342        #[source]
1343        source: std::io::Error,
1344    },
1345    #[error("failed to parse config TOML at {path}: {source}")]
1346    ParseTomlAtPath {
1347        path: PathBuf,
1348        #[source]
1349        source: toml::de::Error,
1350    },
1351    #[error("failed to parse config TOML: {source}")]
1352    ParseToml {
1353        #[source]
1354        source: toml::de::Error,
1355    },
1356    #[error("failed to create config directory at {path}: {source}")]
1357    CreateConfigDir {
1358        path: PathBuf,
1359        #[source]
1360        source: std::io::Error,
1361    },
1362    #[error("failed to serialize launchable default config: {source}")]
1363    SerializeDefaultConfig {
1364        #[source]
1365        source: toml::ser::Error,
1366    },
1367    #[error("failed to write default config file at {path}: {source}")]
1368    WriteDefaultConfig {
1369        path: PathBuf,
1370        #[source]
1371        source: std::io::Error,
1372    },
1373    #[error(transparent)]
1374    Validation(#[from] ConfigValidationError),
1375}
1376
1377#[derive(Debug, Clone, Error, PartialEq, Eq)]
1378pub enum ConfigValidationError {
1379    #[error("{field_name} must not be empty")]
1380    ConfigIdentifierMustNotBeEmpty { field_name: String },
1381    #[error("{field_name} must include at least one provider")]
1382    TranscriptionProvidersMustNotBeEmpty { field_name: String },
1383    #[error("{field_name} must not contain duplicate providers ({provider_ids:?})")]
1384    DuplicateTranscriptionProviders {
1385        field_name: String,
1386        provider_ids: Vec<String>,
1387    },
1388    #[error("providers.apple_speech.locale must not be empty")]
1389    AppleSpeechLocaleMustNotBeEmpty,
1390    #[error("providers.whisper_cpp.model must not be empty")]
1391    WhisperCppModelMustNotBeEmpty,
1392    #[error("providers.whisper_cpp.model_dir must not be empty")]
1393    WhisperCppModelDirMustNotBeEmpty,
1394    #[error("providers.deepgram.endpoint must not be empty")]
1395    DeepgramEndpointMustNotBeEmpty,
1396    #[error("providers.deepgram.model must not be empty")]
1397    DeepgramModelMustNotBeEmpty,
1398    #[error("providers.deepgram.language must not be empty")]
1399    DeepgramLanguageMustNotBeEmpty,
1400    #[error("pipeline.deadline_ms must be greater than 0")]
1401    PipelineDeadlineMsMustBePositive,
1402    #[error("pipeline must include at least one step")]
1403    PipelineMustContainAtLeastOneStep,
1404    #[error("pipeline step timeout_ms must be greater than 0 (step id: {step_id})")]
1405    StepTimeoutMsMustBePositive { step_id: String },
1406    #[error("pipeline step ids must be unique (duplicate id: {step_id})")]
1407    DuplicatePipelineStepId { step_id: String },
1408    #[error("hotkey chord must not be empty ({hotkey_name})")]
1409    HotkeyChordMustNotBeEmpty { hotkey_name: String },
1410    #[error("double_tap timeout must be greater than 0 ({hotkey_name})")]
1411    DoubleTapTimeoutMsMustBePositive { hotkey_name: String },
1412    #[error("indicator color must be a #RRGGBB hex string ({color_name}={color_value})")]
1413    IndicatorColorMustBeHex {
1414        color_name: String,
1415        color_value: String,
1416    },
1417    #[error("recording.sample_rate_khz must be greater than 0")]
1418    RecordingSampleRateKhzMustBePositive,
1419    #[error("refine.endpoint must not be empty")]
1420    RefineEndpointMustNotBeEmpty,
1421    #[error("refine.model must not be empty")]
1422    RefineModelMustNotBeEmpty,
1423    #[error("refine.temperature must be non-negative")]
1424    RefineTemperatureMustBeNonNegative,
1425    #[error("refine.max_output_tokens must be greater than 0")]
1426    RefineMaxOutputTokensMustBePositive,
1427    #[error("{field_name} must be between 0.0 and 1.0 inclusive (got {value})")]
1428    RefineRatioMustBeBetweenZeroAndOne { field_name: String, value: String },
1429    #[error("voice indicator_glyph must be exactly one ASCII letter ({voice_id}={value})")]
1430    VoiceIndicatorGlyphMustBeSingleAsciiLetter { voice_id: String, value: String },
1431    #[error("profile references unknown voice ({profile_id} -> {voice_id})")]
1432    UnknownVoiceReference {
1433        profile_id: String,
1434        voice_id: String,
1435    },
1436    #[error("{field_name} references unknown profile ({profile_id})")]
1437    UnknownProfileReference {
1438        field_name: String,
1439        profile_id: String,
1440    },
1441    #[error("profile rule ids must be unique (duplicate id: {rule_id})")]
1442    DuplicateProfileRuleId { rule_id: String },
1443    #[error("profile rule must include at least one matcher ({rule_id})")]
1444    ProfileRuleMustIncludeAtLeastOneMatcher { rule_id: String },
1445    #[error("profile rule field must not be empty ({rule_id}.{field_name})")]
1446    ProfileRuleFieldMustNotBeEmpty { rule_id: String, field_name: String },
1447}
1448
1449pub fn resolve_config_path() -> Result<PathBuf, ConfigError> {
1450    resolve_config_path_with(
1451        |key| env::var_os(key),
1452        env::var_os("HOME").map(PathBuf::from),
1453    )
1454}
1455
1456fn resolve_config_path_with<F>(
1457    lookup_var: F,
1458    home_dir: Option<PathBuf>,
1459) -> Result<PathBuf, ConfigError>
1460where
1461    F: Fn(&str) -> Option<OsString>,
1462{
1463    if let Some(path) = lookup_var("MUNINN_CONFIG").and_then(non_empty_os_string) {
1464        return Ok(PathBuf::from(path));
1465    }
1466
1467    if let Some(xdg_config_home) = lookup_var("XDG_CONFIG_HOME").and_then(non_empty_os_string) {
1468        return Ok(PathBuf::from(xdg_config_home)
1469            .join(DEFAULT_CONFIG_DIR_NAME)
1470            .join(DEFAULT_CONFIG_FILE_NAME));
1471    }
1472
1473    let home = home_dir.ok_or(ConfigError::HomeDirectoryNotSet)?;
1474    Ok(home
1475        .join(".config")
1476        .join(DEFAULT_CONFIG_DIR_NAME)
1477        .join(DEFAULT_CONFIG_FILE_NAME))
1478}
1479
1480fn non_empty_os_string(value: OsString) -> Option<OsString> {
1481    if value.is_empty() {
1482        None
1483    } else {
1484        Some(value)
1485    }
1486}
1487
1488const fn default_step_timeout_ms() -> u64 {
1489    250
1490}
1491
1492const fn default_double_tap_timeout_ms() -> u64 {
1493    300
1494}
1495
1496fn is_valid_hex_color(value: &str) -> bool {
1497    let Some(hex) = value.strip_prefix('#') else {
1498        return false;
1499    };
1500
1501    hex.len() == 6 && hex.bytes().all(|byte| byte.is_ascii_hexdigit())
1502}
1503
1504fn validate_identifier(value: &str, field_name: &str) -> Result<(), ConfigValidationError> {
1505    if value.trim().is_empty() {
1506        return Err(ConfigValidationError::ConfigIdentifierMustNotBeEmpty {
1507            field_name: field_name.to_string(),
1508        });
1509    }
1510    Ok(())
1511}
1512
1513fn validate_prompt_fragment(
1514    value: &str,
1515    field_name: impl Into<String>,
1516) -> Result<(), ConfigValidationError> {
1517    let field_name = field_name.into();
1518    if value.trim().is_empty() {
1519        return Err(ConfigValidationError::ConfigIdentifierMustNotBeEmpty { field_name });
1520    }
1521    Ok(())
1522}
1523
1524fn append_prompt_fragment(target: &mut Option<String>, fragment: &str) {
1525    let fragment = fragment.trim();
1526    if fragment.is_empty() {
1527        return;
1528    }
1529
1530    if let Some(existing) = target.as_mut() {
1531        let normalized = existing.trim().to_string();
1532        existing.clear();
1533        existing.push_str(&normalized);
1534
1535        if !existing.is_empty() {
1536            existing.push_str("\n\n");
1537        }
1538        existing.push_str(fragment);
1539    } else {
1540        *target = Some(fragment.to_string());
1541    }
1542}
1543
1544fn compose_prompt_text(base: &str, append: Option<&str>) -> String {
1545    let base = base.trim();
1546    let append = append.map(str::trim).filter(|value| !value.is_empty());
1547
1548    match append {
1549        Some(fragment) if !base.is_empty() => format!("{base}\n\n{fragment}"),
1550        Some(fragment) => fragment.to_string(),
1551        None => base.to_string(),
1552    }
1553}
1554
1555fn validate_pipeline_steps(steps: &[PipelineStepConfig]) -> Result<(), ConfigValidationError> {
1556    let mut seen_ids = HashSet::new();
1557    for step in steps {
1558        if step.timeout_ms == 0 {
1559            return Err(ConfigValidationError::StepTimeoutMsMustBePositive {
1560                step_id: step.id.clone(),
1561            });
1562        }
1563
1564        if !seen_ids.insert(step.id.as_str()) {
1565            return Err(ConfigValidationError::DuplicatePipelineStepId {
1566                step_id: step.id.clone(),
1567            });
1568        }
1569    }
1570
1571    Ok(())
1572}
1573
1574fn validate_optional_refine_fields(
1575    temperature: Option<f32>,
1576    max_output_tokens: Option<u32>,
1577    max_length_delta_ratio: Option<f32>,
1578    max_token_change_ratio: Option<f32>,
1579    scope_prefix: &str,
1580    scope_id: &str,
1581) -> Result<(), ConfigValidationError> {
1582    if temperature.is_some_and(|value| !value.is_finite() || value < 0.0) {
1583        return Err(ConfigValidationError::RefineTemperatureMustBeNonNegative);
1584    }
1585    if max_output_tokens == Some(0) {
1586        return Err(ConfigValidationError::RefineMaxOutputTokensMustBePositive);
1587    }
1588    for (field_name, value) in [
1589        (
1590            format!("{scope_prefix}.{scope_id}.max_length_delta_ratio"),
1591            max_length_delta_ratio,
1592        ),
1593        (
1594            format!("{scope_prefix}.{scope_id}.max_token_change_ratio"),
1595            max_token_change_ratio,
1596        ),
1597    ] {
1598        if let Some(value) = value {
1599            if !value.is_finite() || !(0.0..=1.0).contains(&value) {
1600                return Err(ConfigValidationError::RefineRatioMustBeBetweenZeroAndOne {
1601                    field_name,
1602                    value: value.to_string(),
1603                });
1604            }
1605        }
1606    }
1607
1608    Ok(())
1609}
1610
1611fn validate_voices(voices: &BTreeMap<String, VoiceConfig>) -> Result<(), ConfigValidationError> {
1612    for (voice_id, voice) in voices {
1613        validate_identifier(voice_id, &format!("voices.{voice_id}"))?;
1614        voice.validate(voice_id)?;
1615    }
1616    Ok(())
1617}
1618
1619fn validate_profiles(config: &AppConfig) -> Result<(), ConfigValidationError> {
1620    for (profile_id, profile) in &config.profiles {
1621        validate_identifier(profile_id, &format!("profiles.{profile_id}"))?;
1622        profile.validate(profile_id)?;
1623        if let Some(voice_id) = profile.voice.as_deref() {
1624            if !config.voices.contains_key(voice_id) {
1625                return Err(ConfigValidationError::UnknownVoiceReference {
1626                    profile_id: profile_id.clone(),
1627                    voice_id: voice_id.to_string(),
1628                });
1629            }
1630        }
1631    }
1632    Ok(())
1633}
1634
1635fn validate_profile_rules(config: &AppConfig) -> Result<(), ConfigValidationError> {
1636    let mut seen_ids = HashSet::new();
1637
1638    for rule in &config.profile_rules {
1639        rule.validate(&config.app)?;
1640        if !seen_ids.insert(rule.id.as_str()) {
1641            return Err(ConfigValidationError::DuplicateProfileRuleId {
1642                rule_id: rule.id.clone(),
1643            });
1644        }
1645        if rule.profile != config.app.profile && !config.profiles.contains_key(&rule.profile) {
1646            return Err(ConfigValidationError::UnknownProfileReference {
1647                field_name: format!("profile_rules.{}.profile", rule.id),
1648                profile_id: rule.profile.clone(),
1649            });
1650        }
1651    }
1652
1653    Ok(())
1654}
1655
1656fn resolve_transcription_route_from_parts(
1657    transcription: &TranscriptionConfig,
1658    pipeline: &PipelineConfig,
1659) -> ResolvedTranscriptionRoute {
1660    if let Some(providers) = transcription.providers.clone() {
1661        return ResolvedTranscriptionRoute {
1662            providers,
1663            source: TranscriptionRouteSource::ExplicitConfig,
1664        };
1665    }
1666
1667    ResolvedTranscriptionRoute {
1668        providers: infer_transcription_route_from_pipeline(pipeline),
1669        source: TranscriptionRouteSource::PipelineInferred,
1670    }
1671}
1672
1673fn infer_transcription_route_from_pipeline(
1674    pipeline: &PipelineConfig,
1675) -> Vec<TranscriptionProvider> {
1676    pipeline
1677        .steps
1678        .iter()
1679        .filter_map(|step| TranscriptionProvider::lookup_step_name(&step.cmd))
1680        .collect()
1681}
1682
1683fn expand_pipeline_with_transcription_route(
1684    pipeline: &PipelineConfig,
1685    route: &ResolvedTranscriptionRoute,
1686) -> PipelineConfig {
1687    if route.source == TranscriptionRouteSource::PipelineInferred {
1688        return pipeline.clone();
1689    }
1690
1691    let mut steps = route
1692        .providers
1693        .iter()
1694        .copied()
1695        .map(route_step_for_provider)
1696        .collect::<Vec<_>>();
1697    steps.extend(
1698        pipeline
1699            .steps
1700            .iter()
1701            .filter(|step| TranscriptionProvider::lookup_step_name(&step.cmd).is_none())
1702            .cloned(),
1703    );
1704
1705    PipelineConfig {
1706        deadline_ms: pipeline.deadline_ms,
1707        payload_format: pipeline.payload_format,
1708        steps,
1709    }
1710}
1711
1712fn route_step_for_provider(provider: TranscriptionProvider) -> PipelineStepConfig {
1713    PipelineStepConfig {
1714        id: provider.canonical_step_name().to_string(),
1715        cmd: provider.canonical_step_name().to_string(),
1716        args: Vec::new(),
1717        io_mode: StepIoMode::EnvelopeJson,
1718        timeout_ms: provider.default_timeout_ms(),
1719        on_error: OnErrorPolicy::Continue,
1720    }
1721}
1722
1723fn fallback_reason(target_context: &TargetContextSnapshot, default_profile: &str) -> String {
1724    if target_context.bundle_id.is_none() && target_context.app_name.is_none() {
1725        return format!("frontmost app unavailable; using default profile `{default_profile}`");
1726    }
1727    if target_context.window_title.is_none() {
1728        return format!("no profile rule matched with app-only context; using default profile `{default_profile}`");
1729    }
1730    format!("no profile rule matched; using default profile `{default_profile}`")
1731}
1732
1733fn match_optional_exact(expected: Option<&str>, actual: Option<&str>) -> bool {
1734    match (
1735        expected.and_then(normalize_match_string),
1736        actual.and_then(normalize_match_string),
1737    ) {
1738        (Some(expected), Some(actual)) => actual == expected,
1739        (Some(_), None) => false,
1740        (None, _) => true,
1741    }
1742}
1743
1744fn match_optional_prefix(expected: Option<&str>, actual: Option<&str>) -> bool {
1745    match (
1746        expected.and_then(normalize_match_string),
1747        actual.and_then(normalize_match_string),
1748    ) {
1749        (Some(expected), Some(actual)) => actual.starts_with(&expected),
1750        (Some(_), None) => false,
1751        (None, _) => true,
1752    }
1753}
1754
1755fn match_optional_contains(expected: Option<&str>, actual: Option<&str>) -> bool {
1756    match (
1757        expected.and_then(normalize_match_string),
1758        actual.and_then(normalize_match_string),
1759    ) {
1760        (Some(expected), Some(actual)) => actual.contains(&expected),
1761        (Some(_), None) => false,
1762        (None, _) => true,
1763    }
1764}
1765
1766fn normalize_match_string(value: &str) -> Option<String> {
1767    let value = value.trim();
1768    if value.is_empty() {
1769        None
1770    } else {
1771        Some(value.to_ascii_lowercase())
1772    }
1773}
1774
1775fn write_default_config(path: &Path) -> Result<(), ConfigError> {
1776    if let Some(parent) = path.parent() {
1777        fs::create_dir_all(parent).map_err(|source| ConfigError::CreateConfigDir {
1778            path: parent.to_path_buf(),
1779            source,
1780        })?;
1781    }
1782
1783    let rendered = toml::to_string_pretty(&AppConfig::launchable_default())
1784        .map_err(|source| ConfigError::SerializeDefaultConfig { source })?;
1785    fs::write(path, rendered).map_err(|source| ConfigError::WriteDefaultConfig {
1786        path: path.to_path_buf(),
1787        source,
1788    })?;
1789
1790    Ok(())
1791}
1792
1793#[cfg(test)]
1794mod tests {
1795    use std::ffi::OsString;
1796    use std::path::PathBuf;
1797    use std::time::{SystemTime, UNIX_EPOCH};
1798
1799    use crate::transcription::{TranscriptionProvider, TranscriptionRouteSource};
1800
1801    use super::{
1802        resolve_config_path_with, AppConfig, ConfigError, ConfigValidationError, PayloadFormat,
1803        RefineProvider, TargetContextSnapshot, TriggerType, WhisperCppDevicePreference,
1804    };
1805
1806    #[test]
1807    fn parses_valid_config_and_applies_defaults() {
1808        let config = AppConfig::from_toml_str(valid_pipeline_toml()).expect("valid config");
1809
1810        assert_eq!(config.pipeline.deadline_ms, 500);
1811        assert_eq!(config.pipeline.payload_format, PayloadFormat::JsonObject);
1812        assert_eq!(config.pipeline.steps.len(), 2);
1813        assert!(!config.logging.replay_enabled);
1814        assert!(config.logging.replay_retain_audio);
1815        assert_eq!(config.providers.openai.model, "gpt-4o-mini-transcribe");
1816        assert_eq!(config.providers.apple_speech.locale, None);
1817        assert!(config.providers.apple_speech.install_assets);
1818        assert_eq!(config.providers.whisper_cpp.model, None);
1819        assert_eq!(
1820            config.providers.whisper_cpp.model_dir,
1821            PathBuf::from("~/.local/share/muninn/models")
1822        );
1823        assert_eq!(
1824            config.providers.whisper_cpp.device,
1825            WhisperCppDevicePreference::Auto
1826        );
1827        assert_eq!(
1828            config.providers.deepgram.endpoint,
1829            "https://api.deepgram.com/v1/listen"
1830        );
1831        assert_eq!(config.providers.deepgram.model, "nova-3");
1832        assert_eq!(config.providers.deepgram.language, "en");
1833        assert_eq!(config.refine.model, "gpt-4.1-mini");
1834        assert_eq!(config.indicator.colors.idle, "#636366");
1835        assert!(config.recording.mono);
1836        assert_eq!(config.recording.sample_rate_khz, 16);
1837        assert_eq!(config.transcript.system_prompt_append, None);
1838    }
1839
1840    #[test]
1841    fn defaults_match_plan() {
1842        let config = AppConfig::default();
1843
1844        assert_eq!(config.pipeline.deadline_ms, 500);
1845        assert!(!config.app.autostart);
1846        assert_eq!(config.hotkeys.push_to_talk.chord, vec!["ctrl"]);
1847        assert_eq!(config.hotkeys.push_to_talk.trigger, TriggerType::DoubleTap);
1848        assert_eq!(config.hotkeys.push_to_talk.double_tap_timeout_ms, Some(300));
1849        assert_eq!(
1850            config.hotkeys.done_mode_toggle.chord,
1851            vec!["ctrl", "shift", "d"]
1852        );
1853        assert_eq!(config.hotkeys.done_mode_toggle.double_tap_timeout_ms, None);
1854        assert_eq!(
1855            config.hotkeys.cancel_current_capture.chord,
1856            vec!["ctrl", "shift", "x"]
1857        );
1858        assert_eq!(
1859            config.hotkeys.cancel_current_capture.double_tap_timeout_ms,
1860            None
1861        );
1862        assert!(!config.logging.replay_enabled);
1863        assert!(config.logging.replay_retain_audio);
1864        assert_eq!(config.scoring.min_top_score, 0.84);
1865        assert_eq!(config.scoring.min_margin, 0.10);
1866        assert_eq!(config.scoring.acronym_min_top_score, 0.90);
1867        assert_eq!(config.scoring.acronym_min_margin, 0.15);
1868        assert_eq!(config.indicator.colors.recording, "#FF9F0A");
1869        assert_eq!(config.indicator.colors.transcribing, "#0A84FF");
1870        assert_eq!(config.indicator.colors.pipeline, "#BF5AF2");
1871        assert_eq!(config.indicator.colors.output, "#30D158");
1872        assert_eq!(config.indicator.colors.cancelled, "#FF453A");
1873        assert_eq!(config.indicator.colors.outline, "#2C2C2E");
1874        assert_eq!(config.indicator.colors.glyph, "#FFFFFF");
1875        assert!(config.recording.mono);
1876        assert_eq!(config.recording.sample_rate_khz, 16);
1877        assert_eq!(config.providers.apple_speech.locale, None);
1878        assert!(config.providers.apple_speech.install_assets);
1879        assert_eq!(config.providers.whisper_cpp.model, None);
1880        assert_eq!(
1881            config.providers.whisper_cpp.model_dir,
1882            PathBuf::from("~/.local/share/muninn/models")
1883        );
1884        assert_eq!(
1885            config.providers.whisper_cpp.device,
1886            WhisperCppDevicePreference::Auto
1887        );
1888        assert_eq!(
1889            config.providers.deepgram.endpoint,
1890            "https://api.deepgram.com/v1/listen"
1891        );
1892        assert_eq!(config.providers.deepgram.model, "nova-3");
1893        assert_eq!(config.providers.deepgram.language, "en");
1894        assert_eq!(config.refine.provider, RefineProvider::OpenAi);
1895        assert_eq!(
1896            config.transcript.system_prompt,
1897            "Prefer minimal corrections. Focus on technical terms, developer tools, package names, commands, flags, file names, paths, env vars, acronyms, and obvious dictation errors. If uncertain, keep the original wording."
1898        );
1899        assert_eq!(config.transcript.system_prompt_append, None);
1900    }
1901
1902    #[test]
1903    fn launchable_default_is_valid_and_uses_ordered_stt_fallback() {
1904        let config = AppConfig::launchable_default();
1905
1906        config.validate().expect("launchable default must validate");
1907        assert_eq!(config.pipeline.deadline_ms, 40_000);
1908        assert_eq!(
1909            config.transcription.providers,
1910            Some(TranscriptionProvider::default_ordered_route().to_vec())
1911        );
1912        assert_eq!(config.pipeline.steps.len(), 1);
1913        assert_eq!(config.pipeline.steps[0].id, "refine");
1914        assert_eq!(config.pipeline.steps[0].cmd, "refine");
1915        assert_eq!(config.pipeline.steps[0].timeout_ms, 2_500);
1916    }
1917
1918    #[test]
1919    fn resolve_effective_config_expands_explicit_transcription_route_before_postprocessing() {
1920        let config = AppConfig::launchable_default();
1921
1922        let resolved = config.resolve_effective_config(target_context(
1923            Some("com.openai.codex"),
1924            Some("Codex"),
1925            Some("Spec 29"),
1926        ));
1927
1928        assert_eq!(
1929            resolved.transcription_route,
1930            crate::ResolvedTranscriptionRoute {
1931                providers: TranscriptionProvider::default_ordered_route().to_vec(),
1932                source: TranscriptionRouteSource::ExplicitConfig,
1933            }
1934        );
1935        assert_eq!(
1936            resolved
1937                .effective_config
1938                .pipeline
1939                .steps
1940                .iter()
1941                .map(|step| step.cmd.as_str())
1942                .collect::<Vec<_>>(),
1943            vec![
1944                "stt_apple_speech",
1945                "stt_whisper_cpp",
1946                "stt_deepgram",
1947                "stt_openai",
1948                "stt_google",
1949                "refine",
1950            ]
1951        );
1952    }
1953
1954    #[test]
1955    fn parses_whisper_cpp_provider_overrides() {
1956        let config = AppConfig::from_toml_str(
1957            r#"
1958[providers.whisper_cpp]
1959model = "base.en"
1960model_dir = "/tmp/muninn-models"
1961device = "cpu"
1962
1963[pipeline]
1964deadline_ms = 500
1965payload_format = "json_object"
1966
1967[[pipeline.steps]]
1968id = "stt"
1969cmd = "step-a"
1970timeout_ms = 100
1971on_error = "abort"
1972"#,
1973        )
1974        .expect("whisper.cpp provider overrides should parse");
1975
1976        assert_eq!(
1977            config.providers.whisper_cpp,
1978            super::WhisperCppProviderConfig {
1979                model: Some("base.en".to_string()),
1980                model_dir: PathBuf::from("/tmp/muninn-models"),
1981                device: WhisperCppDevicePreference::Cpu,
1982            }
1983        );
1984    }
1985
1986    #[test]
1987    fn parses_deepgram_provider_overrides() {
1988        let config = AppConfig::from_toml_str(
1989            r#"
1990[providers.deepgram]
1991api_key = "config-deepgram-key"
1992endpoint = "https://api.deepgram.test/v1/listen"
1993model = "nova-3-medical"
1994language = "en-IE"
1995
1996[pipeline]
1997deadline_ms = 500
1998payload_format = "json_object"
1999
2000[[pipeline.steps]]
2001id = "stt"
2002cmd = "step-a"
2003timeout_ms = 100
2004on_error = "abort"
2005"#,
2006        )
2007        .expect("Deepgram provider overrides should parse");
2008
2009        assert_eq!(
2010            config.providers.deepgram,
2011            super::DeepgramProviderConfig {
2012                api_key: Some("config-deepgram-key".to_string()),
2013                endpoint: "https://api.deepgram.test/v1/listen".to_string(),
2014                model: "nova-3-medical".to_string(),
2015                language: "en-IE".to_string(),
2016            }
2017        );
2018    }
2019
2020    #[test]
2021    fn parses_apple_speech_provider_overrides() {
2022        let config = AppConfig::from_toml_str(
2023            r#"
2024[providers.apple_speech]
2025locale = "en-IE"
2026install_assets = false
2027
2028[pipeline]
2029deadline_ms = 500
2030payload_format = "json_object"
2031
2032[[pipeline.steps]]
2033id = "stt"
2034cmd = "step-a"
2035timeout_ms = 100
2036on_error = "abort"
2037"#,
2038        )
2039        .expect("apple speech provider overrides should parse");
2040
2041        assert_eq!(
2042            config.providers.apple_speech,
2043            super::AppleSpeechProviderConfig {
2044                locale: Some("en-IE".to_string()),
2045                install_assets: false,
2046            }
2047        );
2048    }
2049
2050    #[test]
2051    fn rejects_empty_apple_speech_locale() {
2052        let error = AppConfig::from_toml_str(
2053            r#"
2054[providers.apple_speech]
2055locale = "   "
2056
2057[pipeline]
2058deadline_ms = 500
2059payload_format = "json_object"
2060
2061[[pipeline.steps]]
2062id = "stt"
2063cmd = "step-a"
2064timeout_ms = 100
2065on_error = "abort"
2066"#,
2067        )
2068        .expect_err("apple speech locale must not be empty");
2069
2070        assert_eq!(
2071            error.to_validation_error(),
2072            Some(ConfigValidationError::AppleSpeechLocaleMustNotBeEmpty)
2073        );
2074    }
2075
2076    #[test]
2077    fn rejects_empty_whisper_cpp_model() {
2078        let error = AppConfig::from_toml_str(
2079            r#"
2080[providers.whisper_cpp]
2081model = "   "
2082
2083[pipeline]
2084deadline_ms = 500
2085payload_format = "json_object"
2086
2087[[pipeline.steps]]
2088id = "stt"
2089cmd = "step-a"
2090timeout_ms = 100
2091on_error = "abort"
2092"#,
2093        )
2094        .expect_err("whisper.cpp model must not be empty");
2095
2096        assert_eq!(
2097            error.to_validation_error(),
2098            Some(ConfigValidationError::WhisperCppModelMustNotBeEmpty)
2099        );
2100    }
2101
2102    #[test]
2103    fn rejects_empty_whisper_cpp_model_dir() {
2104        let error = AppConfig::from_toml_str(
2105            r#"
2106[providers.whisper_cpp]
2107model_dir = ""
2108
2109[pipeline]
2110deadline_ms = 500
2111payload_format = "json_object"
2112
2113[[pipeline.steps]]
2114id = "stt"
2115cmd = "step-a"
2116timeout_ms = 100
2117on_error = "abort"
2118"#,
2119        )
2120        .expect_err("whisper.cpp model dir must not be empty");
2121
2122        assert_eq!(
2123            error.to_validation_error(),
2124            Some(ConfigValidationError::WhisperCppModelDirMustNotBeEmpty)
2125        );
2126    }
2127
2128    #[test]
2129    fn rejects_invalid_deepgram_provider_values() {
2130        let error = AppConfig::from_toml_str(
2131            r#"
2132[providers.deepgram]
2133endpoint = ""
2134model = "nova-3"
2135language = "en"
2136
2137[pipeline]
2138deadline_ms = 500
2139payload_format = "json_object"
2140
2141[[pipeline.steps]]
2142id = "stt"
2143cmd = "step-a"
2144timeout_ms = 100
2145on_error = "abort"
2146"#,
2147        )
2148        .expect_err("empty Deepgram endpoint must fail");
2149
2150        assert_eq!(
2151            error.to_validation_error(),
2152            Some(ConfigValidationError::DeepgramEndpointMustNotBeEmpty)
2153        );
2154    }
2155
2156    #[test]
2157    fn resolve_effective_config_preserves_pipeline_only_transcription_order() {
2158        let config = AppConfig::from_toml_str(
2159            r#"
2160[pipeline]
2161deadline_ms = 500
2162payload_format = "json_object"
2163
2164[[pipeline.steps]]
2165id = "stt-openai"
2166cmd = "stt_openai"
2167timeout_ms = 100
2168on_error = "continue"
2169
2170[[pipeline.steps]]
2171id = "stt-google"
2172cmd = "stt_google"
2173timeout_ms = 100
2174on_error = "abort"
2175
2176[[pipeline.steps]]
2177id = "refine"
2178cmd = "refine"
2179timeout_ms = 100
2180on_error = "continue"
2181"#,
2182        )
2183        .expect("pipeline-only config should parse");
2184
2185        let resolved = config.resolve_effective_config(target_context(None, None, None));
2186
2187        assert_eq!(
2188            resolved.transcription_route.providers,
2189            vec![TranscriptionProvider::OpenAi, TranscriptionProvider::Google]
2190        );
2191        assert_eq!(
2192            resolved.transcription_route.source,
2193            TranscriptionRouteSource::PipelineInferred
2194        );
2195        assert_eq!(
2196            resolved
2197                .effective_config
2198                .pipeline
2199                .steps
2200                .iter()
2201                .map(|step| step.cmd.as_str())
2202                .collect::<Vec<_>>(),
2203            vec!["stt_openai", "stt_google", "refine"]
2204        );
2205    }
2206
2207    #[test]
2208    fn resolve_effective_config_infers_interleaved_pipeline_transcription_steps() {
2209        let config = AppConfig::from_toml_str(
2210            r#"
2211[pipeline]
2212deadline_ms = 500
2213payload_format = "json_object"
2214
2215[[pipeline.steps]]
2216id = "stt-openai"
2217cmd = "stt_openai"
2218timeout_ms = 100
2219on_error = "continue"
2220
2221[[pipeline.steps]]
2222id = "uppercase"
2223cmd = "/usr/bin/tr"
2224timeout_ms = 100
2225on_error = "continue"
2226
2227[[pipeline.steps]]
2228id = "stt-google"
2229cmd = "stt_google"
2230timeout_ms = 100
2231on_error = "abort"
2232
2233[[pipeline.steps]]
2234id = "refine"
2235cmd = "refine"
2236timeout_ms = 100
2237on_error = "continue"
2238"#,
2239        )
2240        .expect("pipeline-only config should parse");
2241
2242        let resolved = config.resolve_effective_config(target_context(None, None, None));
2243
2244        assert_eq!(
2245            resolved.transcription_route.providers,
2246            vec![TranscriptionProvider::OpenAi, TranscriptionProvider::Google]
2247        );
2248        assert_eq!(
2249            resolved
2250                .effective_config
2251                .pipeline
2252                .steps
2253                .iter()
2254                .map(|step| step.cmd.as_str())
2255                .collect::<Vec<_>>(),
2256            vec!["stt_openai", "/usr/bin/tr", "stt_google", "refine"]
2257        );
2258    }
2259
2260    #[test]
2261    fn resolve_effective_config_explicit_route_strips_all_existing_transcription_steps() {
2262        let config = AppConfig::from_toml_str(
2263            r#"
2264[transcription]
2265providers = ["deepgram", "google"]
2266
2267[pipeline]
2268deadline_ms = 500
2269payload_format = "json_object"
2270
2271[[pipeline.steps]]
2272id = "stt-openai"
2273cmd = "stt_openai"
2274timeout_ms = 100
2275on_error = "continue"
2276
2277[[pipeline.steps]]
2278id = "uppercase"
2279cmd = "/usr/bin/tr"
2280timeout_ms = 100
2281on_error = "continue"
2282
2283[[pipeline.steps]]
2284id = "stt-google"
2285cmd = "stt_google"
2286timeout_ms = 100
2287on_error = "abort"
2288
2289[[pipeline.steps]]
2290id = "refine"
2291cmd = "refine"
2292timeout_ms = 100
2293on_error = "continue"
2294"#,
2295        )
2296        .expect("explicit-route config should parse");
2297
2298        let resolved = config.resolve_effective_config(target_context(None, None, None));
2299
2300        assert_eq!(
2301            resolved.transcription_route.providers,
2302            vec![
2303                TranscriptionProvider::Deepgram,
2304                TranscriptionProvider::Google
2305            ]
2306        );
2307        assert_eq!(
2308            resolved
2309                .effective_config
2310                .pipeline
2311                .steps
2312                .iter()
2313                .map(|step| step.cmd.as_str())
2314                .collect::<Vec<_>>(),
2315            vec!["stt_deepgram", "stt_google", "/usr/bin/tr", "refine"]
2316        );
2317    }
2318
2319    #[test]
2320    fn rejects_invalid_refine_ratio() {
2321        let error = AppConfig::from_toml_str(
2322            r#"
2323[refine]
2324max_token_change_ratio = 1.5
2325
2326[pipeline]
2327deadline_ms = 500
2328payload_format = "json_object"
2329
2330[[pipeline.steps]]
2331id = "stt"
2332cmd = "step-a"
2333timeout_ms = 100
2334on_error = "abort"
2335"#,
2336        )
2337        .expect_err("refine ratio above one must fail");
2338
2339        assert_eq!(
2340            error.to_validation_error(),
2341            Some(ConfigValidationError::RefineRatioMustBeBetweenZeroAndOne {
2342                field_name: "refine.max_token_change_ratio".to_string(),
2343                value: "1.5".to_string(),
2344            })
2345        );
2346    }
2347
2348    #[test]
2349    fn rejects_duplicate_transcription_provider_list() {
2350        let error = AppConfig::from_toml_str(
2351            r#"
2352[transcription]
2353providers = ["openai", "openai"]
2354
2355[pipeline]
2356deadline_ms = 500
2357payload_format = "json_object"
2358
2359[[pipeline.steps]]
2360id = "refine"
2361cmd = "refine"
2362timeout_ms = 100
2363on_error = "continue"
2364"#,
2365        )
2366        .expect_err("duplicate transcription provider list should fail");
2367
2368        assert_eq!(
2369            error.to_validation_error(),
2370            Some(ConfigValidationError::DuplicateTranscriptionProviders {
2371                field_name: "transcription.providers".to_string(),
2372                provider_ids: vec!["openai".to_string()],
2373            })
2374        );
2375    }
2376
2377    #[test]
2378    fn resolve_effective_config_keeps_default_route_when_profile_transcription_table_is_empty() {
2379        let config = AppConfig::from_toml_str(
2380            r#"
2381[app]
2382profile = "mail"
2383
2384[transcription]
2385providers = ["openai", "google"]
2386
2387[pipeline]
2388deadline_ms = 500
2389payload_format = "json_object"
2390
2391[[pipeline.steps]]
2392id = "refine"
2393cmd = "refine"
2394timeout_ms = 100
2395on_error = "continue"
2396
2397[profiles.mail.transcription]
2398"#,
2399        )
2400        .expect("profile transcription override should parse");
2401
2402        let resolved = config.resolve_effective_config(target_context(None, None, None));
2403
2404        assert_eq!(
2405            resolved.transcription_route.providers,
2406            vec![TranscriptionProvider::OpenAi, TranscriptionProvider::Google]
2407        );
2408        assert_eq!(
2409            resolved
2410                .effective_config
2411                .pipeline
2412                .steps
2413                .iter()
2414                .map(|step| step.cmd.as_str())
2415                .collect::<Vec<_>>(),
2416            vec!["stt_openai", "stt_google", "refine"]
2417        );
2418    }
2419
2420    #[test]
2421    fn rejects_empty_transcription_provider_list() {
2422        let error = AppConfig::from_toml_str(
2423            r#"
2424[transcription]
2425providers = []
2426
2427[pipeline]
2428deadline_ms = 500
2429payload_format = "json_object"
2430
2431[[pipeline.steps]]
2432id = "refine"
2433cmd = "refine"
2434timeout_ms = 100
2435on_error = "continue"
2436"#,
2437        )
2438        .expect_err("empty transcription provider list should fail");
2439
2440        assert_eq!(
2441            error.to_validation_error(),
2442            Some(
2443                ConfigValidationError::TranscriptionProvidersMustNotBeEmpty {
2444                    field_name: "transcription.providers".to_string(),
2445                }
2446            )
2447        );
2448    }
2449
2450    #[test]
2451    fn accepts_refine_provider_openai() {
2452        let config = AppConfig::from_toml_str(
2453            r#"
2454[refine]
2455provider = "openai"
2456
2457[pipeline]
2458deadline_ms = 500
2459payload_format = "json_object"
2460
2461[[pipeline.steps]]
2462id = "stt"
2463cmd = "step-a"
2464timeout_ms = 100
2465on_error = "abort"
2466"#,
2467        )
2468        .expect("openai provider should parse");
2469
2470        assert_eq!(config.refine.provider, RefineProvider::OpenAi);
2471    }
2472
2473    #[test]
2474    fn rejects_legacy_refine_provider_open_ai_alias() {
2475        let error = AppConfig::from_toml_str(
2476            r#"
2477[refine]
2478provider = "open_ai"
2479
2480[pipeline]
2481deadline_ms = 500
2482payload_format = "json_object"
2483
2484[[pipeline.steps]]
2485id = "stt"
2486cmd = "step-a"
2487timeout_ms = 100
2488on_error = "abort"
2489"#,
2490        )
2491        .expect_err("legacy open_ai provider should fail");
2492
2493        assert!(matches!(error, ConfigError::ParseToml { .. }));
2494    }
2495
2496    #[test]
2497    fn rejects_legacy_indicator_color_aliases() {
2498        let error = AppConfig::from_toml_str(
2499            r##"
2500[indicator.colors]
2501idle = "#111111"
2502recording = "#222222"
2503processing = "#333333"
2504pipeline = "#444444"
2505injecting = "#555555"
2506cancelled = "#666666"
2507outer_ring = "#777777"
2508glyph = "#888888"
2509
2510[pipeline]
2511deadline_ms = 500
2512payload_format = "json_object"
2513
2514[[pipeline.steps]]
2515id = "stt"
2516cmd = "step-a"
2517timeout_ms = 100
2518on_error = "abort"
2519"##,
2520        )
2521        .expect_err("legacy indicator aliases should fail");
2522
2523        assert!(matches!(error, ConfigError::ParseToml { .. }));
2524    }
2525
2526    #[test]
2527    fn parses_replay_audio_retention_override() {
2528        let config = AppConfig::from_toml_str(
2529            r#"
2530[logging]
2531replay_retain_audio = false
2532
2533[pipeline]
2534deadline_ms = 500
2535payload_format = "json_object"
2536
2537[[pipeline.steps]]
2538id = "stt"
2539cmd = "step-a"
2540timeout_ms = 100
2541on_error = "abort"
2542"#,
2543        )
2544        .expect("replay audio retention override should parse");
2545
2546        assert!(!config.logging.replay_retain_audio);
2547    }
2548
2549    #[test]
2550    fn rejects_empty_refine_model() {
2551        let error = AppConfig::from_toml_str(
2552            r#"
2553[refine]
2554model = "   "
2555
2556[pipeline]
2557deadline_ms = 500
2558payload_format = "json_object"
2559
2560[[pipeline.steps]]
2561id = "stt"
2562cmd = "step-a"
2563timeout_ms = 100
2564on_error = "abort"
2565"#,
2566        )
2567        .expect_err("empty refine model must fail");
2568
2569        assert_eq!(
2570            error.to_validation_error(),
2571            Some(ConfigValidationError::RefineModelMustNotBeEmpty)
2572        );
2573    }
2574
2575    #[test]
2576    fn rejects_duplicate_pipeline_step_ids() {
2577        let error = AppConfig::from_toml_str(
2578            r#"
2579[pipeline]
2580deadline_ms = 500
2581payload_format = "json_object"
2582
2583[[pipeline.steps]]
2584id = "stt"
2585cmd = "step-a"
2586timeout_ms = 100
2587on_error = "continue"
2588
2589[[pipeline.steps]]
2590id = "stt"
2591cmd = "step-b"
2592timeout_ms = 200
2593on_error = "abort"
2594"#,
2595        )
2596        .expect_err("duplicate ids must fail");
2597
2598        assert_eq!(
2599            error.to_validation_error(),
2600            Some(ConfigValidationError::DuplicatePipelineStepId {
2601                step_id: "stt".to_owned(),
2602            })
2603        );
2604    }
2605
2606    #[test]
2607    fn rejects_non_positive_pipeline_deadline() {
2608        let error = AppConfig::from_toml_str(
2609            r#"
2610[pipeline]
2611deadline_ms = 0
2612payload_format = "json_object"
2613
2614[[pipeline.steps]]
2615id = "stt"
2616cmd = "step-a"
2617timeout_ms = 100
2618on_error = "continue"
2619"#,
2620        )
2621        .expect_err("deadline_ms must be > 0");
2622
2623        assert_eq!(
2624            error.to_validation_error(),
2625            Some(ConfigValidationError::PipelineDeadlineMsMustBePositive)
2626        );
2627    }
2628
2629    #[test]
2630    fn rejects_non_positive_step_timeout() {
2631        let error = AppConfig::from_toml_str(
2632            r#"
2633[pipeline]
2634deadline_ms = 500
2635payload_format = "json_object"
2636
2637[[pipeline.steps]]
2638id = "stt"
2639cmd = "step-a"
2640timeout_ms = 0
2641on_error = "continue"
2642"#,
2643        )
2644        .expect_err("timeout_ms must be > 0");
2645
2646        assert_eq!(
2647            error.to_validation_error(),
2648            Some(ConfigValidationError::StepTimeoutMsMustBePositive {
2649                step_id: "stt".to_owned(),
2650            })
2651        );
2652    }
2653
2654    #[test]
2655    fn rejects_empty_pipeline() {
2656        let error = AppConfig::from_toml_str(
2657            r#"
2658[pipeline]
2659deadline_ms = 500
2660payload_format = "json_object"
2661"#,
2662        )
2663        .expect_err("pipeline without steps must fail");
2664
2665        assert_eq!(
2666            error.to_validation_error(),
2667            Some(ConfigValidationError::PipelineMustContainAtLeastOneStep)
2668        );
2669    }
2670
2671    #[test]
2672    fn rejects_empty_hotkey_chord() {
2673        let error = AppConfig::from_toml_str(
2674            r#"
2675[hotkeys.push_to_talk]
2676trigger = "double_tap"
2677chord = []
2678
2679[hotkeys.done_mode_toggle]
2680trigger = "press"
2681chord = ["ctrl", "shift", "d"]
2682
2683[hotkeys.cancel_current_capture]
2684trigger = "press"
2685chord = ["ctrl", "shift", "x"]
2686
2687[pipeline]
2688deadline_ms = 500
2689payload_format = "json_object"
2690
2691[[pipeline.steps]]
2692id = "stt"
2693cmd = "step-a"
2694timeout_ms = 100
2695on_error = "abort"
2696"#,
2697        )
2698        .expect_err("empty chord should fail");
2699
2700        assert_eq!(
2701            error.to_validation_error(),
2702            Some(ConfigValidationError::HotkeyChordMustNotBeEmpty {
2703                hotkey_name: "push_to_talk".to_string(),
2704            })
2705        );
2706    }
2707
2708    #[test]
2709    fn rejects_non_positive_double_tap_timeout() {
2710        let error = AppConfig::from_toml_str(
2711            r#"
2712[hotkeys.push_to_talk]
2713trigger = "double_tap"
2714chord = ["ctrl"]
2715double_tap_timeout_ms = 0
2716
2717[hotkeys.done_mode_toggle]
2718trigger = "press"
2719chord = ["ctrl", "shift", "d"]
2720
2721[hotkeys.cancel_current_capture]
2722trigger = "press"
2723chord = ["ctrl", "shift", "x"]
2724
2725[pipeline]
2726deadline_ms = 500
2727payload_format = "json_object"
2728
2729[[pipeline.steps]]
2730id = "stt"
2731cmd = "step-a"
2732timeout_ms = 100
2733on_error = "abort"
2734"#,
2735        )
2736        .expect_err("double_tap_timeout_ms must be > 0");
2737
2738        assert_eq!(
2739            error.to_validation_error(),
2740            Some(ConfigValidationError::DoubleTapTimeoutMsMustBePositive {
2741                hotkey_name: "push_to_talk".to_string(),
2742            })
2743        );
2744    }
2745
2746    #[test]
2747    fn rejects_invalid_indicator_color() {
2748        let error = AppConfig::from_toml_str(
2749            r#"
2750[indicator.colors]
2751recording = "red"
2752
2753[pipeline]
2754deadline_ms = 500
2755payload_format = "json_object"
2756
2757[[pipeline.steps]]
2758id = "stt"
2759cmd = "step-a"
2760timeout_ms = 100
2761on_error = "abort"
2762"#,
2763        )
2764        .expect_err("indicator color should require #RRGGBB format");
2765
2766        assert_eq!(
2767            error.to_validation_error(),
2768            Some(ConfigValidationError::IndicatorColorMustBeHex {
2769                color_name: "indicator.colors.recording".to_string(),
2770                color_value: "red".to_string(),
2771            })
2772        );
2773    }
2774
2775    #[test]
2776    fn rejects_voice_indicator_glyphs_that_are_not_single_ascii_letters() {
2777        let error = AppConfig::from_toml_str(
2778            r#"
2779[voices.dev_mode]
2780indicator_glyph = "DM"
2781
2782[pipeline]
2783deadline_ms = 500
2784payload_format = "json_object"
2785
2786[[pipeline.steps]]
2787id = "stt"
2788cmd = "stt_openai"
2789timeout_ms = 100
2790on_error = "abort"
2791"#,
2792        )
2793        .expect_err("multi-character voice glyphs must fail validation");
2794
2795        assert_eq!(
2796            error.to_validation_error(),
2797            Some(
2798                ConfigValidationError::VoiceIndicatorGlyphMustBeSingleAsciiLetter {
2799                    voice_id: "dev_mode".to_string(),
2800                    value: "DM".to_string(),
2801                }
2802            )
2803        );
2804    }
2805
2806    #[test]
2807    fn resolve_profile_selection_matches_rules_and_falls_back_to_default_profile() {
2808        let config = AppConfig::from_toml_str(
2809            r#"
2810[app]
2811profile = "default"
2812
2813[voices.dev_mode]
2814indicator_glyph = "d"
2815
2816[profiles.default]
2817
2818[profiles.codex]
2819voice = "dev_mode"
2820
2821[[profile_rules]]
2822id = "codex_window"
2823profile = "codex"
2824bundle_id_prefix = "com.openai."
2825window_title_contains = "codex"
2826
2827[pipeline]
2828deadline_ms = 500
2829payload_format = "json_object"
2830
2831[[pipeline.steps]]
2832id = "stt"
2833cmd = "stt_openai"
2834timeout_ms = 100
2835on_error = "abort"
2836"#,
2837        )
2838        .expect("contextual config should parse");
2839
2840        let matched = config.resolve_profile_selection(&target_context(
2841            Some("com.openai.codex"),
2842            Some("Codex"),
2843            Some("Muninn - Codex"),
2844        ));
2845        assert_eq!(matched.matched_rule_id.as_deref(), Some("codex_window"));
2846        assert_eq!(matched.profile_id, "codex");
2847        assert_eq!(matched.voice_id.as_deref(), Some("dev_mode"));
2848        assert_eq!(matched.voice_glyph, Some('D'));
2849        assert_eq!(matched.fallback_reason, None);
2850
2851        let fallback = config.resolve_profile_selection(&target_context(
2852            Some("com.apple.Terminal"),
2853            Some("Terminal"),
2854            Some("notes.txt"),
2855        ));
2856        assert_eq!(fallback.matched_rule_id, None);
2857        assert_eq!(fallback.profile_id, "default");
2858        assert_eq!(fallback.voice_id, None);
2859        assert_eq!(fallback.voice_glyph, None);
2860        assert_eq!(
2861            fallback.fallback_reason.as_deref(),
2862            Some("no profile rule matched; using default profile `default`")
2863        );
2864    }
2865
2866    #[test]
2867    fn resolve_profile_selection_uses_generic_m_mode_for_unknown_contextual_apps() {
2868        let config = AppConfig::from_toml_str(
2869            r#"
2870[app]
2871profile = "default"
2872
2873[voices.default_mode]
2874indicator_glyph = "d"
2875
2876[profiles.default]
2877voice = "default_mode"
2878
2879[profiles.codex]
2880voice = "default_mode"
2881
2882[[profile_rules]]
2883id = "codex_window"
2884profile = "codex"
2885bundle_id_prefix = "com.openai."
2886
2887[pipeline]
2888deadline_ms = 500
2889payload_format = "json_object"
2890
2891[[pipeline.steps]]
2892id = "stt"
2893cmd = "stt_openai"
2894timeout_ms = 100
2895on_error = "abort"
2896"#,
2897        )
2898        .expect("contextual config should parse");
2899
2900        let fallback = config.resolve_profile_selection(&target_context(
2901            Some("com.apple.Terminal"),
2902            Some("Terminal"),
2903            Some("notes.txt"),
2904        ));
2905
2906        assert_eq!(fallback.matched_rule_id, None);
2907        assert_eq!(fallback.profile_id, "default");
2908        assert_eq!(fallback.voice_id.as_deref(), Some("default_mode"));
2909        assert_eq!(fallback.voice_glyph, None);
2910        assert_eq!(
2911            fallback.fallback_reason.as_deref(),
2912            Some("no profile rule matched; using default profile `default`")
2913        );
2914    }
2915
2916    #[test]
2917    fn resolve_effective_config_prefers_profile_overrides_over_voice_defaults() {
2918        let config = AppConfig::from_toml_str(
2919            r#"
2920[app]
2921profile = "default"
2922
2923[recording]
2924sample_rate_khz = 16
2925
2926[transcription]
2927providers = ["openai", "google"]
2928
2929[transcript]
2930system_prompt = "base prompt"
2931
2932[refine]
2933temperature = 0.0
2934max_output_tokens = 512
2935max_length_delta_ratio = 0.25
2936max_token_change_ratio = 0.60
2937
2938[voices.dev_mode]
2939indicator_glyph = "d"
2940system_prompt = "voice prompt"
2941temperature = 0.8
2942max_output_tokens = 128
2943max_length_delta_ratio = 0.4
2944
2945[profiles.default]
2946
2947[profiles.codex]
2948voice = "dev_mode"
2949[profiles.codex.recording]
2950sample_rate_khz = 48
2951[profiles.codex.transcription]
2952providers = ["whisper_cpp", "google"]
2953[profiles.codex.transcript]
2954system_prompt = "profile prompt"
2955[profiles.codex.refine]
2956temperature = 0.2
2957max_output_tokens = 256
2958
2959[[profile_rules]]
2960id = "codex_window"
2961profile = "codex"
2962app_name = "Codex"
2963
2964[pipeline]
2965deadline_ms = 500
2966payload_format = "json_object"
2967
2968[[pipeline.steps]]
2969id = "stt"
2970cmd = "stt_openai"
2971timeout_ms = 100
2972on_error = "abort"
2973"#,
2974        )
2975        .expect("contextual config should parse");
2976
2977        let resolved = config.resolve_effective_config(target_context(
2978            Some("com.openai.codex"),
2979            Some("Codex"),
2980            Some("Spec"),
2981        ));
2982
2983        assert_eq!(resolved.profile_id, "codex");
2984        assert_eq!(resolved.voice_id.as_deref(), Some("dev_mode"));
2985        assert_eq!(resolved.voice_glyph, Some('D'));
2986        assert_eq!(resolved.effective_config.recording.sample_rate_khz, 48);
2987        assert_eq!(
2988            resolved.effective_config.transcript.system_prompt,
2989            "profile prompt"
2990        );
2991        assert_eq!(
2992            resolved.transcription_route.providers,
2993            vec![
2994                TranscriptionProvider::WhisperCpp,
2995                TranscriptionProvider::Google
2996            ]
2997        );
2998        assert_eq!(
2999            resolved.transcription_route.source,
3000            TranscriptionRouteSource::ExplicitConfig
3001        );
3002        assert_eq!(
3003            resolved
3004                .effective_config
3005                .pipeline
3006                .steps
3007                .iter()
3008                .take(2)
3009                .map(|step| step.cmd.as_str())
3010                .collect::<Vec<_>>(),
3011            vec!["stt_whisper_cpp", "stt_google"]
3012        );
3013        assert_eq!(resolved.effective_config.refine.temperature, 0.2);
3014        assert_eq!(resolved.effective_config.refine.max_output_tokens, 256);
3015        assert_eq!(resolved.effective_config.refine.max_length_delta_ratio, 0.4);
3016        assert_eq!(
3017            resolved.effective_config.refine.max_token_change_ratio,
3018            0.60
3019        );
3020    }
3021
3022    #[test]
3023    fn resolve_effective_config_appends_prompt_fragments_from_base_voice_and_profile() {
3024        let config = AppConfig::from_toml_str(
3025            r#"
3026[app]
3027profile = "default"
3028
3029[transcript]
3030system_prompt = "base prompt"
3031system_prompt_append = """
3032Vocabulary JSON:
3033{"terms":["Muninn"]}
3034"""
3035
3036[voices.codex]
3037system_prompt_append = """
3038Vocabulary JSON:
3039{"terms":["Deepgram"]}
3040"""
3041
3042[profiles.default]
3043
3044[profiles.codex]
3045voice = "codex"
3046[profiles.codex.transcript]
3047system_prompt_append = """
3048Vocabulary JSON:
3049{"terms":["Cargo.toml"]}
3050"""
3051
3052[[profile_rules]]
3053id = "codex_window"
3054profile = "codex"
3055app_name = "Codex"
3056
3057[pipeline]
3058deadline_ms = 500
3059payload_format = "json_object"
3060
3061[[pipeline.steps]]
3062id = "refine"
3063cmd = "refine"
3064timeout_ms = 100
3065on_error = "abort"
3066"#,
3067        )
3068        .expect("contextual config with prompt appends should parse");
3069
3070        let resolved = config.resolve_effective_config(target_context(
3071            Some("com.openai.codex"),
3072            Some("Codex"),
3073            Some("Spec"),
3074        ));
3075
3076        let expected_prompt = r#"base prompt
3077
3078Vocabulary JSON:
3079{"terms":["Muninn"]}
3080
3081Vocabulary JSON:
3082{"terms":["Deepgram"]}
3083
3084Vocabulary JSON:
3085{"terms":["Cargo.toml"]}"#;
3086
3087        assert_eq!(
3088            resolved.effective_config.transcript.system_prompt,
3089            expected_prompt
3090        );
3091        assert_eq!(
3092            resolved.effective_config.transcript.system_prompt_append,
3093            None
3094        );
3095        assert_eq!(
3096            resolved.builtin_steps.transcript.system_prompt,
3097            expected_prompt
3098        );
3099        assert_eq!(resolved.builtin_steps.transcript.system_prompt_append, None);
3100    }
3101
3102    #[test]
3103    fn rejects_empty_transcript_system_prompt_append() {
3104        let error = AppConfig::from_toml_str(
3105            r#"
3106[transcript]
3107system_prompt_append = "   "
3108
3109[pipeline]
3110deadline_ms = 500
3111payload_format = "json_object"
3112
3113[[pipeline.steps]]
3114id = "stt"
3115cmd = "step-a"
3116timeout_ms = 100
3117on_error = "abort"
3118"#,
3119        )
3120        .expect_err("blank transcript prompt append must fail");
3121
3122        assert_eq!(
3123            error.to_validation_error(),
3124            Some(ConfigValidationError::ConfigIdentifierMustNotBeEmpty {
3125                field_name: "transcript.system_prompt_append".to_string(),
3126            })
3127        );
3128    }
3129
3130    #[test]
3131    fn allows_empty_root_transcript_system_prompt_for_append_only_configs() {
3132        let config = AppConfig::from_toml_str(
3133            r#"
3134[transcript]
3135system_prompt = ""
3136system_prompt_append = """
3137Vocabulary JSON:
3138{"terms":["Muninn"]}
3139"""
3140
3141[pipeline]
3142deadline_ms = 500
3143payload_format = "json_object"
3144
3145[[pipeline.steps]]
3146id = "refine"
3147cmd = "refine"
3148timeout_ms = 100
3149on_error = "abort"
3150"#,
3151        )
3152        .expect("empty root transcript prompt should remain valid");
3153
3154        let resolved = config.resolve_effective_config(target_context(None, None, None));
3155
3156        assert_eq!(
3157            resolved.effective_config.transcript.system_prompt,
3158            "Vocabulary JSON:\n{\"terms\":[\"Muninn\"]}"
3159        );
3160    }
3161
3162    #[test]
3163    fn resolve_effective_config_clears_base_append_when_voice_replaces_prompt() {
3164        let config = AppConfig::from_toml_str(
3165            r#"
3166[app]
3167profile = "default"
3168
3169[transcript]
3170system_prompt = "base prompt"
3171system_prompt_append = """
3172Vocabulary JSON:
3173{"terms":["Muninn"]}
3174"""
3175
3176[voices.codex]
3177system_prompt = "voice prompt"
3178
3179[profiles.default]
3180
3181[profiles.codex]
3182voice = "codex"
3183
3184[[profile_rules]]
3185id = "codex_window"
3186profile = "codex"
3187app_name = "Codex"
3188
3189[pipeline]
3190deadline_ms = 500
3191payload_format = "json_object"
3192
3193[[pipeline.steps]]
3194id = "refine"
3195cmd = "refine"
3196timeout_ms = 100
3197on_error = "abort"
3198"#,
3199        )
3200        .expect("voice replacement config should parse");
3201
3202        let resolved = config.resolve_effective_config(target_context(
3203            Some("com.openai.codex"),
3204            Some("Codex"),
3205            Some("Spec"),
3206        ));
3207
3208        assert_eq!(
3209            resolved.effective_config.transcript.system_prompt,
3210            "voice prompt"
3211        );
3212    }
3213
3214    #[test]
3215    fn resolve_effective_config_clears_inherited_appends_when_profile_replaces_prompt() {
3216        let config = AppConfig::from_toml_str(
3217            r#"
3218[app]
3219profile = "default"
3220
3221[transcript]
3222system_prompt = "base prompt"
3223system_prompt_append = """
3224Vocabulary JSON:
3225{"terms":["Muninn"]}
3226"""
3227
3228[voices.codex]
3229system_prompt_append = """
3230Vocabulary JSON:
3231{"terms":["Deepgram"]}
3232"""
3233
3234[profiles.default]
3235
3236[profiles.codex]
3237voice = "codex"
3238[profiles.codex.transcript]
3239system_prompt = "profile prompt"
3240system_prompt_append = """
3241Vocabulary JSON:
3242{"terms":["Cargo.toml"]}
3243"""
3244
3245[[profile_rules]]
3246id = "codex_window"
3247profile = "codex"
3248app_name = "Codex"
3249
3250[pipeline]
3251deadline_ms = 500
3252payload_format = "json_object"
3253
3254[[pipeline.steps]]
3255id = "refine"
3256cmd = "refine"
3257timeout_ms = 100
3258on_error = "abort"
3259"#,
3260        )
3261        .expect("profile replacement config should parse");
3262
3263        let resolved = config.resolve_effective_config(target_context(
3264            Some("com.openai.codex"),
3265            Some("Codex"),
3266            Some("Spec"),
3267        ));
3268
3269        let expected_prompt = r#"profile prompt
3270
3271Vocabulary JSON:
3272{"terms":["Cargo.toml"]}"#;
3273
3274        assert_eq!(
3275            resolved.effective_config.transcript.system_prompt,
3276            expected_prompt
3277        );
3278    }
3279
3280    #[test]
3281    fn accepts_empty_root_transcript_system_prompt() {
3282        let config = AppConfig::from_toml_str(
3283            r#"
3284[transcript]
3285system_prompt = ""
3286
3287[pipeline]
3288deadline_ms = 500
3289payload_format = "json_object"
3290
3291[[pipeline.steps]]
3292id = "refine"
3293cmd = "refine"
3294timeout_ms = 100
3295on_error = "abort"
3296"#,
3297        )
3298        .expect("empty root transcript system prompt should remain valid");
3299
3300        assert_eq!(config.transcript.system_prompt, "");
3301        assert_eq!(config.transcript.system_prompt_append, None);
3302    }
3303
3304    #[test]
3305    fn resolve_effective_config_voice_prompt_replacement_clears_base_append() {
3306        let config = AppConfig::from_toml_str(
3307            r#"
3308[app]
3309profile = "default"
3310
3311[transcript]
3312system_prompt = "base prompt"
3313system_prompt_append = """
3314Vocabulary JSON:
3315{"terms":["Muninn"]}
3316"""
3317
3318[voices.codex]
3319system_prompt = "voice prompt"
3320
3321[profiles.default]
3322
3323[profiles.codex]
3324voice = "codex"
3325
3326[[profile_rules]]
3327id = "codex_window"
3328profile = "codex"
3329app_name = "Codex"
3330
3331[pipeline]
3332deadline_ms = 500
3333payload_format = "json_object"
3334
3335[[pipeline.steps]]
3336id = "refine"
3337cmd = "refine"
3338timeout_ms = 100
3339on_error = "abort"
3340"#,
3341        )
3342        .expect("voice prompt replacement config should parse");
3343
3344        let resolved = config.resolve_effective_config(target_context(
3345            Some("com.openai.codex"),
3346            Some("Codex"),
3347            Some("Spec"),
3348        ));
3349
3350        assert_eq!(
3351            resolved.effective_config.transcript.system_prompt,
3352            "voice prompt"
3353        );
3354        assert_eq!(
3355            resolved.effective_config.transcript.system_prompt_append,
3356            None
3357        );
3358    }
3359
3360    #[test]
3361    fn resolve_effective_config_profile_prompt_replacement_clears_inherited_appends() {
3362        let config = AppConfig::from_toml_str(
3363            r#"
3364[app]
3365profile = "default"
3366
3367[transcript]
3368system_prompt = "base prompt"
3369system_prompt_append = """
3370Vocabulary JSON:
3371{"terms":["Muninn"]}
3372"""
3373
3374[voices.codex]
3375system_prompt_append = """
3376Vocabulary JSON:
3377{"terms":["Deepgram"]}
3378"""
3379
3380[profiles.default]
3381
3382[profiles.codex]
3383voice = "codex"
3384[profiles.codex.transcript]
3385system_prompt = "profile prompt"
3386
3387[[profile_rules]]
3388id = "codex_window"
3389profile = "codex"
3390app_name = "Codex"
3391
3392[pipeline]
3393deadline_ms = 500
3394payload_format = "json_object"
3395
3396[[pipeline.steps]]
3397id = "refine"
3398cmd = "refine"
3399timeout_ms = 100
3400on_error = "abort"
3401"#,
3402        )
3403        .expect("profile prompt replacement config should parse");
3404
3405        let resolved = config.resolve_effective_config(target_context(
3406            Some("com.openai.codex"),
3407            Some("Codex"),
3408            Some("Spec"),
3409        ));
3410
3411        assert_eq!(
3412            resolved.effective_config.transcript.system_prompt,
3413            "profile prompt"
3414        );
3415        assert_eq!(
3416            resolved.effective_config.transcript.system_prompt_append,
3417            None
3418        );
3419        assert_eq!(
3420            resolved.builtin_steps.transcript.system_prompt,
3421            "profile prompt"
3422        );
3423        assert_eq!(resolved.builtin_steps.transcript.system_prompt_append, None);
3424    }
3425
3426    #[test]
3427    fn resolve_profile_selection_hides_default_profile_glyph_when_no_rule_matches() {
3428        let config = AppConfig::from_toml_str(
3429            r#"
3430[app]
3431profile = "default"
3432
3433[recording]
3434sample_rate_khz = 16
3435
3436[transcript]
3437system_prompt = "base prompt"
3438
3439[refine]
3440temperature = 0.0
3441max_output_tokens = 512
3442max_length_delta_ratio = 0.25
3443max_token_change_ratio = 0.60
3444
3445[voices.mail]
3446indicator_glyph = "e"
3447system_prompt = "mail prompt"
3448
3449[profiles.default]
3450voice = "mail"
3451
3452[profiles.codex]
3453
3454[[profile_rules]]
3455id = "codex_app"
3456profile = "codex"
3457app_name = "Codex"
3458
3459[pipeline]
3460deadline_ms = 500
3461payload_format = "json_object"
3462
3463[[pipeline.steps]]
3464id = "stt"
3465cmd = "stt_openai"
3466timeout_ms = 100
3467on_error = "abort"
3468"#,
3469        )
3470        .expect("config with default fallback voice should parse");
3471
3472        let fallback = config.resolve_profile_selection(&target_context(
3473            Some("com.apple.Terminal"),
3474            Some("Terminal"),
3475            Some("notes.txt"),
3476        ));
3477
3478        assert_eq!(fallback.matched_rule_id, None);
3479        assert_eq!(fallback.profile_id, "default");
3480        assert_eq!(fallback.voice_id.as_deref(), Some("mail"));
3481        assert_eq!(fallback.voice_glyph, None);
3482        assert_eq!(
3483            fallback.fallback_reason.as_deref(),
3484            Some("no profile rule matched; using default profile `default`")
3485        );
3486    }
3487
3488    #[test]
3489    fn accepts_recording_overrides() {
3490        let config = AppConfig::from_toml_str(
3491            r#"
3492[recording]
3493mono = false
3494sample_rate_khz = 48
3495
3496[pipeline]
3497deadline_ms = 500
3498payload_format = "json_object"
3499
3500[[pipeline.steps]]
3501id = "stt"
3502cmd = "step-a"
3503timeout_ms = 100
3504on_error = "abort"
3505"#,
3506        )
3507        .expect("recording overrides should parse");
3508
3509        assert!(!config.recording.mono);
3510        assert_eq!(config.recording.sample_rate_khz, 48);
3511        assert_eq!(config.recording.sample_rate_hz(), 48_000);
3512    }
3513
3514    #[test]
3515    fn rejects_non_positive_recording_sample_rate() {
3516        let error = AppConfig::from_toml_str(
3517            r#"
3518[recording]
3519sample_rate_khz = 0
3520
3521[pipeline]
3522deadline_ms = 500
3523payload_format = "json_object"
3524
3525[[pipeline.steps]]
3526id = "stt"
3527cmd = "step-a"
3528timeout_ms = 100
3529on_error = "abort"
3530"#,
3531        )
3532        .expect_err("recording sample rate must be > 0");
3533
3534        assert_eq!(
3535            error.to_validation_error(),
3536            Some(ConfigValidationError::RecordingSampleRateKhzMustBePositive)
3537        );
3538    }
3539
3540    #[test]
3541    fn rejects_unknown_enum_values() {
3542        let error = AppConfig::from_toml_str(
3543            r#"
3544[pipeline]
3545deadline_ms = 500
3546payload_format = "json_object"
3547
3548[[pipeline.steps]]
3549id = "stt"
3550cmd = "step-a"
3551timeout_ms = 100
3552on_error = "skip"
3553"#,
3554        )
3555        .expect_err("unknown enum must fail");
3556
3557        assert!(matches!(error, ConfigError::ParseToml { .. }));
3558    }
3559
3560    #[test]
3561    fn resolve_config_path_uses_expected_precedence() {
3562        let from_env = resolve_config_path_with(
3563            |name| match name {
3564                "MUNINN_CONFIG" => Some(OsString::from("/tmp/override.toml")),
3565                "XDG_CONFIG_HOME" => Some(OsString::from("/xdg")),
3566                _ => None,
3567            },
3568            Some(PathBuf::from("/Users/alice")),
3569        )
3570        .expect("env override should resolve");
3571        assert_eq!(from_env, PathBuf::from("/tmp/override.toml"));
3572
3573        let from_xdg = resolve_config_path_with(
3574            |name| match name {
3575                "XDG_CONFIG_HOME" => Some(OsString::from("/xdg")),
3576                _ => None,
3577            },
3578            Some(PathBuf::from("/Users/alice")),
3579        )
3580        .expect("xdg should resolve");
3581        assert_eq!(from_xdg, PathBuf::from("/xdg/muninn/config.toml"));
3582
3583        let from_home = resolve_config_path_with(|_| None, Some(PathBuf::from("/Users/alice")))
3584            .expect("home should resolve");
3585        assert_eq!(
3586            from_home,
3587            PathBuf::from("/Users/alice/.config/muninn/config.toml")
3588        );
3589    }
3590
3591    #[test]
3592    fn load_from_path_returns_not_found_with_expected_path() {
3593        let unique_suffix = SystemTime::now()
3594            .duration_since(UNIX_EPOCH)
3595            .expect("system clock before UNIX_EPOCH")
3596            .as_nanos();
3597        let path = std::env::temp_dir().join(format!(
3598            "muninn-missing-config-{}-{}.toml",
3599            std::process::id(),
3600            unique_suffix
3601        ));
3602
3603        let error = AppConfig::load_from_path(&path).expect_err("missing path must fail");
3604        match error {
3605            ConfigError::NotFound { path: actual } => assert_eq!(actual, path),
3606            other => panic!("expected NotFound, got {other:?}"),
3607        }
3608    }
3609
3610    #[test]
3611    fn load_creates_launchable_default_when_config_is_missing() {
3612        let unique_suffix = SystemTime::now()
3613            .duration_since(UNIX_EPOCH)
3614            .expect("system clock before UNIX_EPOCH")
3615            .as_nanos();
3616        let config_root = std::env::temp_dir().join(format!(
3617            "muninn-auto-config-{}-{}",
3618            std::process::id(),
3619            unique_suffix
3620        ));
3621        let config_path = config_root.join("muninn").join("config.toml");
3622
3623        let config = AppConfig::load_or_create_default(&config_path)
3624            .expect("missing config should auto-create");
3625
3626        assert_eq!(config, AppConfig::launchable_default());
3627        assert!(config_path.exists(), "config file should be written");
3628
3629        let rendered = std::fs::read_to_string(&config_path).expect("read written config");
3630        let reparsed = AppConfig::from_toml_str(&rendered).expect("reparse written config");
3631        assert_eq!(reparsed, AppConfig::launchable_default());
3632    }
3633
3634    fn valid_pipeline_toml() -> &'static str {
3635        r#"
3636[hotkeys.push_to_talk]
3637trigger = "double_tap"
3638chord = ["ctrl"]
3639
3640[hotkeys.done_mode_toggle]
3641trigger = "press"
3642chord = ["ctrl", "shift", "d"]
3643
3644[hotkeys.cancel_current_capture]
3645trigger = "press"
3646chord = ["ctrl", "shift", "x"]
3647
3648[pipeline]
3649deadline_ms = 500
3650payload_format = "json_object"
3651
3652[[pipeline.steps]]
3653id = "stt"
3654cmd = "stt_openai"
3655timeout_ms = 300
3656on_error = "abort"
3657
3658[[pipeline.steps]]
3659id = "normalize"
3660cmd = "muninn-normalize"
3661timeout_ms = 60
3662on_error = "continue"
3663"#
3664    }
3665
3666    fn target_context(
3667        bundle_id: Option<&str>,
3668        app_name: Option<&str>,
3669        window_title: Option<&str>,
3670    ) -> TargetContextSnapshot {
3671        TargetContextSnapshot {
3672            bundle_id: bundle_id.map(ToOwned::to_owned),
3673            app_name: app_name.map(ToOwned::to_owned),
3674            window_title: window_title.map(ToOwned::to_owned),
3675            captured_at: "2026-03-06T00:00:00Z".to_string(),
3676        }
3677    }
3678
3679    trait ValidationErrorExt {
3680        fn to_validation_error(&self) -> Option<ConfigValidationError>;
3681    }
3682
3683    impl ValidationErrorExt for ConfigError {
3684        fn to_validation_error(&self) -> Option<ConfigValidationError> {
3685            match self {
3686                ConfigError::Validation(validation) => Some(validation.clone()),
3687                _ => None,
3688            }
3689        }
3690    }
3691}