Skip to main content

hoosh/
config.rs

1//! Configuration file loading for hoosh.
2//!
3//! Loads `hoosh.toml` from the current directory or a specified path.
4//! Environment variables in API keys are resolved at load time.
5
6use std::path::Path;
7
8use serde::Deserialize;
9
10use crate::budget::TokenPool;
11use crate::cache::CacheConfig;
12use crate::provider::ProviderType;
13use crate::router::{ProviderRoute, RoutingStrategy};
14use crate::server::ServerConfig;
15
16/// Top-level configuration file structure.
17#[derive(Debug, Deserialize)]
18pub struct HooshConfig {
19    #[serde(default)]
20    pub server: ServerSection,
21    #[serde(default)]
22    pub cache: CacheSection,
23    #[serde(default)]
24    pub providers: Vec<ProviderSection>,
25    #[serde(default)]
26    pub budgets: Vec<BudgetPoolSection>,
27    #[serde(default)]
28    pub whisper: WhisperSection,
29    #[serde(default)]
30    pub tts: TtsSection,
31    #[serde(default)]
32    pub audit: AuditSection,
33    #[serde(default)]
34    pub auth: AuthConfig,
35    #[serde(default)]
36    pub telemetry: TelemetrySection,
37    #[serde(default)]
38    pub context: ContextSection,
39    #[serde(default)]
40    pub retry: crate::provider::retry::RetryConfig,
41}
42
43#[derive(Debug, Default, Deserialize)]
44pub struct WhisperSection {
45    /// Path to whisper model file (e.g. "models/ggml-base.en.bin").
46    pub model: Option<String>,
47}
48
49#[derive(Debug, Default, Deserialize)]
50pub struct TtsSection {
51    /// URL of the TTS backend (e.g. "http://localhost:5500" for openedai-speech).
52    pub url: Option<String>,
53}
54
55#[derive(Default, Deserialize)]
56pub struct AuditSection {
57    /// Enable audit logging. Defaults to false.
58    #[serde(default)]
59    pub enabled: bool,
60    /// HMAC signing key. Generated randomly if not set.
61    pub signing_key: Option<String>,
62    /// Max entries to keep in memory. Defaults to 10000.
63    #[serde(default = "default_audit_max")]
64    pub max_entries: usize,
65}
66
67fn default_audit_max() -> usize {
68    10_000
69}
70
71impl std::fmt::Debug for AuditSection {
72    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
73        f.debug_struct("AuditSection")
74            .field("enabled", &self.enabled)
75            .field(
76                "signing_key",
77                &self.signing_key.as_ref().map(|_| "[REDACTED]"),
78            )
79            .field("max_entries", &self.max_entries)
80            .finish()
81    }
82}
83
84/// Context management configuration.
85#[derive(Debug, Deserialize)]
86pub struct ContextSection {
87    /// Token ratio threshold (0.0–1.0) at which context compaction triggers.
88    /// Default 0.8 = compact when messages exceed 80% of the model's context window.
89    #[serde(default = "default_compaction_threshold")]
90    pub compaction_threshold: f64,
91    /// Number of most-recent messages to keep when truncating.
92    #[serde(default = "default_keep_last")]
93    pub keep_last_messages: usize,
94    /// Enable context compaction. Defaults to true.
95    #[serde(default = "default_true")]
96    pub enabled: bool,
97}
98
99impl Default for ContextSection {
100    fn default() -> Self {
101        Self {
102            compaction_threshold: default_compaction_threshold(),
103            keep_last_messages: default_keep_last(),
104            enabled: true,
105        }
106    }
107}
108
109fn default_compaction_threshold() -> f64 {
110    0.8
111}
112fn default_keep_last() -> usize {
113    10
114}
115
116#[derive(Debug, Default, Deserialize)]
117pub struct TelemetrySection {
118    /// OTLP endpoint (e.g. "http://localhost:4317"). Enables OpenTelemetry when set.
119    pub otlp_endpoint: Option<String>,
120    /// Service name for traces. Defaults to "hoosh".
121    #[serde(default = "default_service_name")]
122    pub service_name: String,
123}
124
125fn default_service_name() -> String {
126    "hoosh".into()
127}
128
129#[derive(Default, Deserialize)]
130pub struct AuthConfig {
131    /// Bearer tokens that are allowed to access the API.
132    #[serde(default)]
133    pub tokens: Vec<String>,
134}
135
136impl std::fmt::Debug for AuthConfig {
137    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
138        f.debug_struct("AuthConfig")
139            .field("tokens", &format!("[{} configured]", self.tokens.len()))
140            .finish()
141    }
142}
143
144impl std::fmt::Debug for ProviderSection {
145    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
146        f.debug_struct("ProviderSection")
147            .field("provider_type", &self.provider_type)
148            .field("base_url", &self.base_url)
149            .field("api_key", &self.api_key.as_ref().map(|_| "[REDACTED]"))
150            .field("priority", &self.priority)
151            .field("models", &self.models)
152            .field("enabled", &self.enabled)
153            .finish()
154    }
155}
156
157#[derive(Debug, Deserialize)]
158pub struct BudgetPoolSection {
159    /// Pool name (e.g. "default", "agent-1").
160    pub name: String,
161    /// Maximum tokens allowed in this pool.
162    pub capacity: u64,
163}
164
165#[derive(Debug, Deserialize)]
166pub struct ServerSection {
167    #[serde(default = "default_bind")]
168    pub bind: String,
169    #[serde(default = "default_port")]
170    pub port: u16,
171    #[serde(default)]
172    pub strategy: StrategyValue,
173    /// Health check interval in seconds. 0 = disabled. Defaults to 30.
174    #[serde(default = "default_health_interval")]
175    pub health_check_interval_secs: u64,
176}
177
178impl Default for ServerSection {
179    fn default() -> Self {
180        Self {
181            bind: default_bind(),
182            port: default_port(),
183            strategy: StrategyValue::default(),
184            health_check_interval_secs: default_health_interval(),
185        }
186    }
187}
188
189impl From<StrategyValue> for RoutingStrategy {
190    fn from(v: StrategyValue) -> Self {
191        match v {
192            StrategyValue::Priority => RoutingStrategy::Priority,
193            StrategyValue::RoundRobin => RoutingStrategy::RoundRobin,
194            StrategyValue::LowestLatency => RoutingStrategy::LowestLatency,
195            StrategyValue::Direct => RoutingStrategy::Direct,
196        }
197    }
198}
199
200#[derive(Debug, Default, Deserialize)]
201#[serde(rename_all = "snake_case")]
202pub enum StrategyValue {
203    #[default]
204    Priority,
205    RoundRobin,
206    LowestLatency,
207    Direct,
208}
209
210#[derive(Debug, Deserialize)]
211pub struct CacheSection {
212    #[serde(default = "default_cache_max")]
213    pub max_entries: usize,
214    #[serde(default = "default_cache_ttl")]
215    pub ttl_secs: u64,
216    #[serde(default = "default_true")]
217    pub enabled: bool,
218}
219
220#[derive(Deserialize)]
221pub struct ProviderSection {
222    /// Provider type name (e.g. "ollama", "openai", "anthropic").
223    #[serde(rename = "type")]
224    pub provider_type: ProviderType,
225    /// Base URL. Uses provider-specific default if omitted.
226    pub base_url: Option<String>,
227    /// API key — literal string or `"$ENV_VAR"` to read from environment.
228    pub api_key: Option<String>,
229    /// Priority (lower = preferred). Defaults to 10.
230    #[serde(default = "default_priority")]
231    pub priority: u32,
232    /// Model patterns this provider handles.
233    #[serde(default)]
234    pub models: Vec<String>,
235    /// Whether this provider is enabled. Defaults to true.
236    #[serde(default = "default_true")]
237    pub enabled: bool,
238    /// Maximum tokens per request for this provider.
239    #[serde(default)]
240    pub max_tokens_limit: Option<u32>,
241    /// Maximum requests per minute for this provider.
242    #[serde(default)]
243    pub rate_limit_rpm: Option<u32>,
244    /// Paths to PEM certificates to pin for this provider's TLS.
245    #[serde(default)]
246    pub tls_pinned_certs: Vec<String>,
247    /// Path to client certificate for mTLS.
248    pub client_cert: Option<String>,
249    /// Path to client key for mTLS.
250    pub client_key: Option<String>,
251}
252
253fn default_bind() -> String {
254    "127.0.0.1".into()
255}
256fn default_port() -> u16 {
257    8088
258}
259fn default_cache_max() -> usize {
260    1000
261}
262fn default_cache_ttl() -> u64 {
263    300
264}
265fn default_true() -> bool {
266    true
267}
268fn default_priority() -> u32 {
269    10
270}
271fn default_health_interval() -> u64 {
272    30
273}
274
275impl Default for CacheSection {
276    fn default() -> Self {
277        Self {
278            max_entries: default_cache_max(),
279            ttl_secs: default_cache_ttl(),
280            enabled: true,
281        }
282    }
283}
284
285/// Resolve an API key value. If it starts with `$`, read from environment.
286fn resolve_api_key(raw: &Option<String>) -> Option<String> {
287    let raw = raw.as_ref()?;
288    if let Some(var_name) = raw.strip_prefix('$') {
289        match std::env::var(var_name) {
290            Ok(val) => Some(val),
291            Err(_) => {
292                tracing::warn!(
293                    "API key env var ${var_name} is not set — provider will have no API key"
294                );
295                None
296            }
297        }
298    } else {
299        Some(raw.clone())
300    }
301}
302
303/// Default base URL for a provider type.
304fn default_base_url(provider_type: ProviderType) -> &'static str {
305    match provider_type {
306        ProviderType::Ollama => "http://localhost:11434",
307        ProviderType::LlamaCpp => "http://localhost:8080",
308        ProviderType::Synapse => "http://localhost:5000",
309        ProviderType::LmStudio => "http://localhost:1234",
310        ProviderType::LocalAi => "http://localhost:8080",
311        ProviderType::OpenAi => "https://api.openai.com",
312        ProviderType::Anthropic => "https://api.anthropic.com",
313        ProviderType::DeepSeek => "https://api.deepseek.com",
314        ProviderType::Mistral => "https://api.mistral.ai",
315        ProviderType::Groq => "https://api.groq.com/openai",
316        ProviderType::OpenRouter => "https://openrouter.ai/api",
317        ProviderType::Google => "https://generativelanguage.googleapis.com",
318        ProviderType::Grok => "https://api.x.ai",
319        ProviderType::Whisper => "http://localhost:8080",
320    }
321}
322
323impl HooshConfig {
324    /// Load configuration from a TOML file.
325    pub fn load(path: impl AsRef<Path>) -> anyhow::Result<Self> {
326        let contents = std::fs::read_to_string(path.as_ref())?;
327        let config: HooshConfig = toml::from_str(&contents).map_err(|e| {
328            // Redact error details that might contain API keys
329            let msg = e.to_string();
330            if msg.contains("api_key") {
331                anyhow::anyhow!("failed to parse config: TOML syntax error near api_key field")
332            } else {
333                anyhow::anyhow!("failed to parse config: {e}")
334            }
335        })?;
336        Ok(config)
337    }
338
339    /// Try loading from `hoosh.toml` in the current directory, or return defaults.
340    pub fn load_or_default() -> Self {
341        if Path::new("hoosh.toml").exists() {
342            match Self::load("hoosh.toml") {
343                Ok(config) => {
344                    tracing::info!("loaded config from hoosh.toml");
345                    config
346                }
347                Err(e) => {
348                    tracing::error!("failed to load hoosh.toml: {e}");
349                    std::process::exit(1);
350                }
351            }
352        } else {
353            Self {
354                server: ServerSection::default(),
355                cache: CacheSection::default(),
356                providers: Vec::new(),
357                budgets: Vec::new(),
358                whisper: WhisperSection::default(),
359                tts: TtsSection::default(),
360                audit: AuditSection::default(),
361                auth: AuthConfig::default(),
362                telemetry: TelemetrySection::default(),
363                context: ContextSection::default(),
364                retry: crate::provider::retry::RetryConfig::default(),
365            }
366        }
367    }
368
369    /// Convert to provider routes.
370    pub fn routes(&self) -> Vec<ProviderRoute> {
371        self.providers
372            .iter()
373            .map(|p| {
374                let base_url = p
375                    .base_url
376                    .clone()
377                    .unwrap_or_else(|| default_base_url(p.provider_type).into());
378                let tls_config = if !p.tls_pinned_certs.is_empty()
379                    || p.client_cert.is_some()
380                    || p.client_key.is_some()
381                {
382                    Some(crate::provider::TlsConfig {
383                        pinned_certs: p.tls_pinned_certs.clone(),
384                        client_cert: p.client_cert.clone(),
385                        client_key: p.client_key.clone(),
386                    })
387                } else {
388                    None
389                };
390                ProviderRoute {
391                    provider: p.provider_type,
392                    priority: p.priority,
393                    model_patterns: p.models.clone(),
394                    enabled: p.enabled,
395                    base_url,
396                    api_key: resolve_api_key(&p.api_key),
397                    max_tokens_limit: p.max_tokens_limit,
398                    rate_limit_rpm: p.rate_limit_rpm,
399                    tls_config,
400                }
401            })
402            .collect()
403    }
404
405    /// Convert to ServerConfig, merging CLI overrides.
406    pub fn into_server_config(
407        self,
408        bind_override: Option<&str>,
409        port_override: Option<u16>,
410        config_path: Option<String>,
411    ) -> ServerConfig {
412        let routes = self.routes();
413        let strategy: RoutingStrategy = self.server.strategy.into();
414        let budget_pools = self
415            .budgets
416            .iter()
417            .map(|b| TokenPool::new(&b.name, b.capacity))
418            .collect();
419
420        ServerConfig {
421            bind: bind_override.map(String::from).unwrap_or(self.server.bind),
422            port: port_override.unwrap_or(self.server.port),
423            routes,
424            strategy,
425            cache_config: CacheConfig {
426                max_entries: self.cache.max_entries,
427                ttl_secs: self.cache.ttl_secs,
428                enabled: self.cache.enabled,
429            },
430            budget_pools,
431            whisper_model: self.whisper.model,
432            tts_model: self.tts.url,
433            audit_enabled: self.audit.enabled,
434            audit_signing_key: resolve_api_key(&self.audit.signing_key),
435            audit_max_entries: self.audit.max_entries,
436            auth_tokens: self.auth.tokens,
437            otlp_endpoint: self.telemetry.otlp_endpoint,
438            telemetry_service_name: self.telemetry.service_name,
439            health_check_interval_secs: self.server.health_check_interval_secs,
440            config_path,
441            context_config: self.context,
442            retry_config: self.retry,
443        }
444    }
445}
446
447#[cfg(test)]
448mod tests {
449    use super::*;
450
451    #[test]
452    fn parse_minimal_config() {
453        let toml = "";
454        let config: HooshConfig = toml::from_str(toml).unwrap();
455        assert_eq!(config.server.port, 8088);
456        assert_eq!(config.server.bind, "127.0.0.1");
457        assert!(config.providers.is_empty());
458    }
459
460    #[test]
461    fn parse_full_config() {
462        let toml = r#"
463[server]
464bind = "0.0.0.0"
465port = 9000
466strategy = "round_robin"
467
468[cache]
469max_entries = 500
470ttl_secs = 600
471enabled = false
472
473[[providers]]
474type = "Ollama"
475base_url = "http://gpu-box:11434"
476priority = 1
477models = ["llama*", "mistral*"]
478
479[[providers]]
480type = "OpenAi"
481api_key = "$OPENAI_API_KEY"
482priority = 10
483models = ["gpt-*"]
484"#;
485        let config: HooshConfig = toml::from_str(toml).unwrap();
486        assert_eq!(config.server.port, 9000);
487        assert_eq!(config.server.bind, "0.0.0.0");
488        assert_eq!(config.cache.max_entries, 500);
489        assert!(!config.cache.enabled);
490        assert_eq!(config.providers.len(), 2);
491        assert_eq!(config.providers[0].provider_type, ProviderType::Ollama);
492        assert_eq!(config.providers[1].provider_type, ProviderType::OpenAi);
493        assert_eq!(
494            config.providers[1].api_key.as_deref(),
495            Some("$OPENAI_API_KEY")
496        );
497    }
498
499    #[test]
500    fn routes_from_config() {
501        let toml = r#"
502[[providers]]
503type = "Ollama"
504priority = 1
505models = ["llama*"]
506
507[[providers]]
508type = "OpenAi"
509api_key = "sk-test-key"
510priority = 5
511models = ["gpt-*"]
512"#;
513        let config: HooshConfig = toml::from_str(toml).unwrap();
514        let routes = config.routes();
515        assert_eq!(routes.len(), 2);
516        assert_eq!(routes[0].base_url, "http://localhost:11434");
517        assert!(routes[0].api_key.is_none());
518        assert_eq!(routes[1].base_url, "https://api.openai.com");
519        assert_eq!(routes[1].api_key.as_deref(), Some("sk-test-key"));
520    }
521
522    #[test]
523    fn resolve_api_key_literal() {
524        let key = Some("sk-literal".into());
525        assert_eq!(resolve_api_key(&key).as_deref(), Some("sk-literal"));
526    }
527
528    #[test]
529    fn resolve_api_key_env_var() {
530        // SAFETY: test is single-threaded, no concurrent env access
531        unsafe { std::env::set_var("HOOSH_TEST_KEY_1234", "sk-from-env") };
532        let key = Some("$HOOSH_TEST_KEY_1234".into());
533        assert_eq!(resolve_api_key(&key).as_deref(), Some("sk-from-env"));
534        unsafe { std::env::remove_var("HOOSH_TEST_KEY_1234") };
535    }
536
537    #[test]
538    fn resolve_api_key_missing_env() {
539        let key = Some("$HOOSH_NONEXISTENT_KEY_999".into());
540        assert!(resolve_api_key(&key).is_none());
541    }
542
543    #[test]
544    fn resolve_api_key_none() {
545        assert!(resolve_api_key(&None).is_none());
546    }
547
548    #[test]
549    fn default_base_urls() {
550        assert_eq!(
551            default_base_url(ProviderType::Ollama),
552            "http://localhost:11434"
553        );
554        assert_eq!(
555            default_base_url(ProviderType::OpenAi),
556            "https://api.openai.com"
557        );
558        assert_eq!(
559            default_base_url(ProviderType::Anthropic),
560            "https://api.anthropic.com"
561        );
562        assert_eq!(
563            default_base_url(ProviderType::Groq),
564            "https://api.groq.com/openai"
565        );
566    }
567
568    #[test]
569    fn into_server_config_with_overrides() {
570        let toml = r#"
571[server]
572port = 9000
573bind = "0.0.0.0"
574"#;
575        let config: HooshConfig = toml::from_str(toml).unwrap();
576        let sc = config.into_server_config(Some("127.0.0.1"), Some(8080), None);
577        assert_eq!(sc.bind, "127.0.0.1");
578        assert_eq!(sc.port, 8080);
579    }
580
581    #[test]
582    fn into_server_config_no_overrides() {
583        let toml = r#"
584[server]
585port = 9000
586bind = "0.0.0.0"
587"#;
588        let config: HooshConfig = toml::from_str(toml).unwrap();
589        let sc = config.into_server_config(None, None, None);
590        assert_eq!(sc.bind, "0.0.0.0");
591        assert_eq!(sc.port, 9000);
592    }
593
594    #[test]
595    fn parse_with_budgets() {
596        let toml = r#"
597[[budgets]]
598name = "default"
599capacity = 100000
600
601[[budgets]]
602name = "agents"
603capacity = 50000
604"#;
605        let config: HooshConfig = toml::from_str(toml).unwrap();
606        assert_eq!(config.budgets.len(), 2);
607        assert_eq!(config.budgets[0].name, "default");
608        assert_eq!(config.budgets[0].capacity, 100000);
609    }
610
611    #[test]
612    fn parse_with_whisper_and_tts() {
613        let toml = r#"
614[whisper]
615model = "models/ggml-base.en.bin"
616
617[tts]
618url = "http://localhost:5500"
619"#;
620        let config: HooshConfig = toml::from_str(toml).unwrap();
621        assert_eq!(
622            config.whisper.model.as_deref(),
623            Some("models/ggml-base.en.bin")
624        );
625        assert_eq!(config.tts.url.as_deref(), Some("http://localhost:5500"));
626    }
627
628    #[test]
629    fn into_server_config_with_budgets() {
630        let toml = r#"
631[[budgets]]
632name = "pool1"
633capacity = 5000
634"#;
635        let config: HooshConfig = toml::from_str(toml).unwrap();
636        let sc = config.into_server_config(None, None, None);
637        assert_eq!(sc.budget_pools.len(), 1);
638        assert_eq!(sc.budget_pools[0].name, "pool1");
639        assert_eq!(sc.budget_pools[0].capacity, 5000);
640    }
641
642    #[test]
643    fn into_server_config_with_whisper_tts() {
644        let toml = r#"
645[whisper]
646model = "model.bin"
647
648[tts]
649url = "http://tts:5500"
650"#;
651        let config: HooshConfig = toml::from_str(toml).unwrap();
652        let sc = config.into_server_config(None, None, None);
653        assert_eq!(sc.whisper_model.as_deref(), Some("model.bin"));
654        assert_eq!(sc.tts_model.as_deref(), Some("http://tts:5500"));
655    }
656
657    #[test]
658    fn into_server_config_all_strategies() {
659        for (strategy_str, _) in [
660            ("priority", "Priority"),
661            ("round_robin", "RoundRobin"),
662            ("lowest_latency", "LowestLatency"),
663            ("direct", "Direct"),
664        ] {
665            let toml = format!("[server]\nstrategy = \"{strategy_str}\"");
666            let config: HooshConfig = toml::from_str(&toml).unwrap();
667            let sc = config.into_server_config(None, None, None);
668            // Just verify it doesn't panic
669            let _ = sc.strategy;
670        }
671    }
672
673    #[test]
674    fn all_default_base_urls_covered() {
675        let types = [
676            (ProviderType::Ollama, "http://localhost:11434"),
677            (ProviderType::LlamaCpp, "http://localhost:8080"),
678            (ProviderType::Synapse, "http://localhost:5000"),
679            (ProviderType::LmStudio, "http://localhost:1234"),
680            (ProviderType::LocalAi, "http://localhost:8080"),
681            (ProviderType::OpenAi, "https://api.openai.com"),
682            (ProviderType::Anthropic, "https://api.anthropic.com"),
683            (ProviderType::DeepSeek, "https://api.deepseek.com"),
684            (ProviderType::Mistral, "https://api.mistral.ai"),
685            (ProviderType::Groq, "https://api.groq.com/openai"),
686            (ProviderType::OpenRouter, "https://openrouter.ai/api"),
687            (
688                ProviderType::Google,
689                "https://generativelanguage.googleapis.com",
690            ),
691            (ProviderType::Grok, "https://api.x.ai"),
692            (ProviderType::Whisper, "http://localhost:8080"),
693        ];
694        for (pt, expected) in types {
695            assert_eq!(default_base_url(pt), expected, "mismatch for {pt}");
696        }
697    }
698
699    #[test]
700    fn provider_with_max_tokens_limit() {
701        let toml = r#"
702[[providers]]
703type = "OpenAi"
704api_key = "sk-test"
705max_tokens_limit = 4096
706models = ["gpt-*"]
707"#;
708        let config: HooshConfig = toml::from_str(toml).unwrap();
709        assert_eq!(config.providers[0].max_tokens_limit, Some(4096));
710        let routes = config.routes();
711        assert_eq!(routes[0].max_tokens_limit, Some(4096));
712    }
713
714    #[test]
715    fn provider_defaults() {
716        let toml = r#"
717[[providers]]
718type = "Ollama"
719"#;
720        let config: HooshConfig = toml::from_str(toml).unwrap();
721        let p = &config.providers[0];
722        assert_eq!(p.priority, 10);
723        assert!(p.enabled);
724        assert!(p.models.is_empty());
725        assert!(p.base_url.is_none());
726    }
727
728    #[test]
729    fn routes_with_tls_config() {
730        let toml = r#"
731[[providers]]
732type = "OpenAi"
733api_key = "sk-test"
734models = ["gpt-*"]
735tls_pinned_certs = ["/path/to/cert.pem"]
736client_cert = "/path/to/client.pem"
737client_key = "/path/to/client-key.pem"
738"#;
739        let config: HooshConfig = toml::from_str(toml).unwrap();
740        let routes = config.routes();
741        assert_eq!(routes.len(), 1);
742        let tls = routes[0].tls_config.as_ref().unwrap();
743        assert_eq!(tls.pinned_certs.len(), 1);
744        assert_eq!(tls.client_cert.as_deref(), Some("/path/to/client.pem"));
745        assert_eq!(tls.client_key.as_deref(), Some("/path/to/client-key.pem"));
746    }
747
748    #[test]
749    fn routes_without_tls_config() {
750        let toml = r#"
751[[providers]]
752type = "Ollama"
753models = ["llama*"]
754"#;
755        let config: HooshConfig = toml::from_str(toml).unwrap();
756        let routes = config.routes();
757        assert!(routes[0].tls_config.is_none());
758    }
759
760    #[test]
761    fn routes_with_rate_limit() {
762        let toml = r#"
763[[providers]]
764type = "OpenAi"
765api_key = "sk-test"
766models = ["gpt-*"]
767rate_limit_rpm = 60
768"#;
769        let config: HooshConfig = toml::from_str(toml).unwrap();
770        let routes = config.routes();
771        assert_eq!(routes[0].rate_limit_rpm, Some(60));
772    }
773
774    #[test]
775    fn routes_disabled_provider() {
776        let toml = r#"
777[[providers]]
778type = "Ollama"
779enabled = false
780models = ["llama*"]
781"#;
782        let config: HooshConfig = toml::from_str(toml).unwrap();
783        let routes = config.routes();
784        assert!(!routes[0].enabled);
785    }
786
787    #[test]
788    fn parse_audit_section() {
789        let toml = r#"
790[audit]
791enabled = true
792signing_key = "my-secret-key"
793max_entries = 5000
794"#;
795        let config: HooshConfig = toml::from_str(toml).unwrap();
796        assert!(config.audit.enabled);
797        assert_eq!(config.audit.signing_key.as_deref(), Some("my-secret-key"));
798        assert_eq!(config.audit.max_entries, 5000);
799    }
800
801    #[test]
802    fn parse_audit_defaults() {
803        let config: HooshConfig = toml::from_str("").unwrap();
804        assert!(!config.audit.enabled);
805        assert!(config.audit.signing_key.is_none());
806        // AuditSection default() uses #[serde(default = "default_audit_max")] = 10_000
807        // but when deserialized from empty TOML, [audit] section is absent, so Default is used
808    }
809
810    #[test]
811    fn parse_context_section() {
812        let toml = r#"
813[context]
814compaction_threshold = 0.6
815keep_last_messages = 5
816enabled = false
817"#;
818        let config: HooshConfig = toml::from_str(toml).unwrap();
819        assert!((config.context.compaction_threshold - 0.6).abs() < f64::EPSILON);
820        assert_eq!(config.context.keep_last_messages, 5);
821        assert!(!config.context.enabled);
822    }
823
824    #[test]
825    fn parse_context_defaults() {
826        let config: HooshConfig = toml::from_str("").unwrap();
827        assert!((config.context.compaction_threshold - 0.8).abs() < f64::EPSILON);
828        assert_eq!(config.context.keep_last_messages, 10);
829        assert!(config.context.enabled);
830    }
831
832    #[test]
833    fn parse_telemetry_section() {
834        let toml = r#"
835[telemetry]
836otlp_endpoint = "http://localhost:4317"
837service_name = "my-hoosh"
838"#;
839        let config: HooshConfig = toml::from_str(toml).unwrap();
840        assert_eq!(
841            config.telemetry.otlp_endpoint.as_deref(),
842            Some("http://localhost:4317")
843        );
844        assert_eq!(config.telemetry.service_name, "my-hoosh");
845    }
846
847    #[test]
848    fn parse_telemetry_defaults() {
849        let config: HooshConfig = toml::from_str("").unwrap();
850        assert!(config.telemetry.otlp_endpoint.is_none());
851    }
852
853    #[test]
854    fn parse_telemetry_with_service_name() {
855        let toml = r#"
856[telemetry]
857service_name = "hoosh"
858"#;
859        let config: HooshConfig = toml::from_str(toml).unwrap();
860        assert_eq!(config.telemetry.service_name, "hoosh");
861    }
862
863    #[test]
864    fn parse_auth_section() {
865        let toml = r#"
866[auth]
867tokens = ["token1", "token2"]
868"#;
869        let config: HooshConfig = toml::from_str(toml).unwrap();
870        assert_eq!(config.auth.tokens.len(), 2);
871    }
872
873    #[test]
874    fn parse_retry_section() {
875        let toml = r#"
876[retry]
877max_retries = 5
878base_delay_ms = 1000
879max_delay_ms = 60000
880jitter_factor = 0.3
881"#;
882        let config: HooshConfig = toml::from_str(toml).unwrap();
883        assert_eq!(config.retry.max_retries, 5);
884        assert_eq!(config.retry.base_delay_ms, 1000);
885        assert_eq!(config.retry.max_delay_ms, 60_000);
886    }
887
888    #[test]
889    fn into_server_config_full() {
890        let toml = r#"
891[server]
892port = 9000
893bind = "0.0.0.0"
894strategy = "lowest_latency"
895health_check_interval_secs = 60
896
897[cache]
898max_entries = 500
899ttl_secs = 600
900enabled = false
901
902[audit]
903enabled = true
904max_entries = 5000
905
906[telemetry]
907otlp_endpoint = "http://otel:4317"
908service_name = "test-hoosh"
909
910[auth]
911tokens = ["tok1"]
912
913[context]
914compaction_threshold = 0.9
915keep_last_messages = 20
916
917[retry]
918max_retries = 5
919base_delay_ms = 500
920max_delay_ms = 30000
921jitter_factor = 0.5
922
923[[budgets]]
924name = "default"
925capacity = 100000
926
927[[providers]]
928type = "Ollama"
929priority = 1
930models = ["llama*"]
931rate_limit_rpm = 120
932"#;
933        let config: HooshConfig = toml::from_str(toml).unwrap();
934        let sc = config.into_server_config(None, None, Some("/path/to/config.toml".into()));
935        assert_eq!(sc.port, 9000);
936        assert_eq!(sc.bind, "0.0.0.0");
937        assert_eq!(sc.cache_config.max_entries, 500);
938        assert!(!sc.cache_config.enabled);
939        assert!(sc.audit_enabled);
940        assert_eq!(sc.audit_max_entries, 5000);
941        assert_eq!(sc.otlp_endpoint.as_deref(), Some("http://otel:4317"));
942        assert_eq!(sc.telemetry_service_name, "test-hoosh");
943        assert_eq!(sc.auth_tokens.len(), 1);
944        assert_eq!(sc.health_check_interval_secs, 60);
945        assert_eq!(sc.config_path.as_deref(), Some("/path/to/config.toml"));
946        assert!((sc.context_config.compaction_threshold - 0.9).abs() < f64::EPSILON);
947        assert_eq!(sc.context_config.keep_last_messages, 20);
948        assert_eq!(sc.retry_config.max_retries, 5);
949        assert_eq!(sc.budget_pools.len(), 1);
950        assert_eq!(sc.routes.len(), 1);
951    }
952
953    #[test]
954    fn audit_section_debug_redacts_key() {
955        let section = AuditSection {
956            enabled: true,
957            signing_key: Some("super-secret".into()),
958            max_entries: 1000,
959        };
960        let debug = format!("{section:?}");
961        assert!(!debug.contains("super-secret"));
962        assert!(debug.contains("[REDACTED]"));
963    }
964
965    #[test]
966    fn auth_config_debug_shows_count() {
967        let auth = AuthConfig {
968            tokens: vec!["tok1".into(), "tok2".into()],
969        };
970        let debug = format!("{auth:?}");
971        assert!(debug.contains("2 configured"));
972        assert!(!debug.contains("tok1"));
973    }
974
975    #[test]
976    fn provider_section_debug_redacts_key() {
977        let section: ProviderSection = toml::from_str(
978            r#"
979type = "OpenAi"
980api_key = "sk-secret-key"
981models = ["gpt-*"]
982"#,
983        )
984        .unwrap();
985        let debug = format!("{section:?}");
986        assert!(!debug.contains("sk-secret-key"));
987        assert!(debug.contains("[REDACTED]"));
988    }
989
990    #[test]
991    fn load_nonexistent_config_file() {
992        let result = HooshConfig::load("/nonexistent/path/hoosh.toml");
993        assert!(result.is_err());
994    }
995
996    #[test]
997    fn load_invalid_toml() {
998        let dir = std::env::temp_dir();
999        let path = dir.join("hoosh_test_invalid.toml");
1000        std::fs::write(&path, "invalid {{{{ toml content").unwrap();
1001        let result = HooshConfig::load(&path);
1002        assert!(result.is_err());
1003        let _ = std::fs::remove_file(&path);
1004    }
1005
1006    #[test]
1007    fn context_section_default() {
1008        let ctx = ContextSection::default();
1009        assert!((ctx.compaction_threshold - 0.8).abs() < f64::EPSILON);
1010        assert_eq!(ctx.keep_last_messages, 10);
1011        assert!(ctx.enabled);
1012    }
1013
1014    #[test]
1015    fn server_section_default() {
1016        let s = ServerSection::default();
1017        assert_eq!(s.bind, "127.0.0.1");
1018        assert_eq!(s.port, 8088);
1019        assert_eq!(s.health_check_interval_secs, 30);
1020    }
1021
1022    #[test]
1023    fn cache_section_default() {
1024        let c = CacheSection::default();
1025        assert_eq!(c.max_entries, 1000);
1026        assert_eq!(c.ttl_secs, 300);
1027        assert!(c.enabled);
1028    }
1029}