Skip to main content

a3s_code_core/
config.rs

1//! Configuration module for A3S Code
2//!
3//! Provides configuration for:
4//! - LLM providers and models (defaultModel in "provider/model" format, providers)
5//! - Queue configuration (a3s-lane integration)
6//! - Search configuration (a3s-search integration)
7//! - Directories for dynamic skill and agent loading
8//!
9//! Configuration is loaded from HCL files or HCL strings only.
10//! JSON support has been removed.
11
12use crate::error::{CodeError, Result};
13use crate::llm::LlmConfig;
14use crate::memory::MemoryConfig;
15use serde::{Deserialize, Serialize};
16use std::collections::HashMap;
17use std::path::{Path, PathBuf};
18
19// ============================================================================
20// Provider Configuration
21// ============================================================================
22
23/// Model cost information (per million tokens)
24#[derive(Debug, Clone, Serialize, Deserialize, Default)]
25#[serde(rename_all = "camelCase")]
26pub struct ModelCost {
27    /// Input token cost
28    #[serde(default)]
29    pub input: f64,
30    /// Output token cost
31    #[serde(default)]
32    pub output: f64,
33    /// Cache read cost
34    #[serde(default)]
35    pub cache_read: f64,
36    /// Cache write cost
37    #[serde(default)]
38    pub cache_write: f64,
39}
40
41/// Model limits
42#[derive(Debug, Clone, Serialize, Deserialize, Default)]
43pub struct ModelLimit {
44    /// Maximum context tokens
45    #[serde(default)]
46    pub context: u32,
47    /// Maximum output tokens
48    #[serde(default)]
49    pub output: u32,
50}
51
52/// Model modalities (input/output types)
53#[derive(Debug, Clone, Serialize, Deserialize, Default)]
54pub struct ModelModalities {
55    /// Supported input types
56    #[serde(default)]
57    pub input: Vec<String>,
58    /// Supported output types
59    #[serde(default)]
60    pub output: Vec<String>,
61}
62
63/// Model configuration
64#[derive(Debug, Clone, Serialize, Deserialize)]
65#[serde(rename_all = "camelCase")]
66pub struct ModelConfig {
67    /// Model ID (e.g., "claude-sonnet-4-20250514")
68    pub id: String,
69    /// Display name
70    #[serde(default)]
71    pub name: String,
72    /// Model family (e.g., "claude-sonnet")
73    #[serde(default)]
74    pub family: String,
75    /// Per-model API key override
76    #[serde(default)]
77    pub api_key: Option<String>,
78    /// Per-model base URL override
79    #[serde(default)]
80    pub base_url: Option<String>,
81    /// Static HTTP headers for this model
82    #[serde(default)]
83    pub headers: HashMap<String, String>,
84    /// Header name to receive the runtime session ID
85    #[serde(default)]
86    pub session_id_header: Option<String>,
87    /// Supports file attachments
88    #[serde(default)]
89    pub attachment: bool,
90    /// Supports reasoning/thinking
91    #[serde(default)]
92    pub reasoning: bool,
93    /// Supports tool calling
94    #[serde(default = "default_true")]
95    pub tool_call: bool,
96    /// Supports temperature setting
97    #[serde(default = "default_true")]
98    pub temperature: bool,
99    /// Release date
100    #[serde(default)]
101    pub release_date: Option<String>,
102    /// Input/output modalities
103    #[serde(default)]
104    pub modalities: ModelModalities,
105    /// Cost information
106    #[serde(default)]
107    pub cost: ModelCost,
108    /// Token limits
109    #[serde(default)]
110    pub limit: ModelLimit,
111}
112
113fn default_true() -> bool {
114    true
115}
116
117/// Provider configuration
118#[derive(Debug, Clone, Serialize, Deserialize)]
119#[serde(rename_all = "camelCase")]
120pub struct ProviderConfig {
121    /// Provider name (e.g., "anthropic", "openai")
122    pub name: String,
123    /// API key for this provider
124    #[serde(default)]
125    pub api_key: Option<String>,
126    /// Base URL for the API
127    #[serde(default)]
128    pub base_url: Option<String>,
129    /// Static HTTP headers for this provider
130    #[serde(default)]
131    pub headers: HashMap<String, String>,
132    /// Header name to receive the runtime session ID
133    #[serde(default)]
134    pub session_id_header: Option<String>,
135    /// Available models
136    #[serde(default)]
137    pub models: Vec<ModelConfig>,
138}
139
140/// Apply model capability flags to an LlmConfig.
141///
142/// - `temperature = false` → omit temperature (model ignores it, e.g. o1)
143/// - `reasoning = true` + `thinking_budget` set → pass budget to client
144/// - `limit.output > 0` → use as max_tokens
145fn apply_model_caps(
146    mut config: LlmConfig,
147    model: &ModelConfig,
148    thinking_budget: Option<usize>,
149) -> LlmConfig {
150    // reasoning=true + thinking_budget set → pass budget to client (Anthropic only)
151    if model.reasoning {
152        if let Some(budget) = thinking_budget {
153            config = config.with_thinking_budget(budget);
154        }
155    }
156
157    // limit.output > 0 → use as max_tokens cap
158    if model.limit.output > 0 {
159        config = config.with_max_tokens(model.limit.output as usize);
160    }
161
162    // temperature=false models (e.g. o1) must not receive a temperature param.
163    // Store the flag so the LLM client can gate it at call time.
164    if !model.temperature {
165        config.disable_temperature = true;
166    }
167
168    config
169}
170
171impl ProviderConfig {
172    /// Find a model by ID
173    pub fn find_model(&self, model_id: &str) -> Option<&ModelConfig> {
174        self.models.iter().find(|m| m.id == model_id)
175    }
176
177    /// Get the effective API key for a model (model override or provider default)
178    pub fn get_api_key<'a>(&'a self, model: &'a ModelConfig) -> Option<&'a str> {
179        model.api_key.as_deref().or(self.api_key.as_deref())
180    }
181
182    /// Get the effective base URL for a model (model override or provider default)
183    pub fn get_base_url<'a>(&'a self, model: &'a ModelConfig) -> Option<&'a str> {
184        model.base_url.as_deref().or(self.base_url.as_deref())
185    }
186
187    /// Get the effective static headers for a model (provider defaults with model overrides)
188    pub fn get_headers(&self, model: &ModelConfig) -> HashMap<String, String> {
189        let mut headers = self.headers.clone();
190        headers.extend(model.headers.clone());
191        headers
192    }
193
194    /// Get the header name that should carry the runtime session ID.
195    pub fn get_session_id_header<'a>(&'a self, model: &'a ModelConfig) -> Option<&'a str> {
196        model
197            .session_id_header
198            .as_deref()
199            .or(self.session_id_header.as_deref())
200    }
201}
202
203// ============================================================================
204// Storage Configuration
205// ============================================================================
206
207/// Session storage backend type
208#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Hash)]
209#[serde(rename_all = "lowercase")]
210pub enum StorageBackend {
211    /// In-memory storage (no persistence)
212    Memory,
213    /// File-based storage (JSON files)
214    #[default]
215    File,
216    /// Custom external storage (Redis, PostgreSQL, etc.)
217    ///
218    /// Requires a `SessionStore` implementation registered via `SessionManager::with_store()`.
219    /// Use `storage_url` in config to pass connection details.
220    Custom,
221}
222
223// ============================================================================
224// Main Configuration
225// ============================================================================
226
227/// Configuration for A3S Code
228#[derive(Debug, Clone, Serialize, Deserialize, Default)]
229#[serde(rename_all = "camelCase")]
230pub struct CodeConfig {
231    /// Default model in "provider/model" format (e.g., "anthropic/claude-sonnet-4-20250514")
232    #[serde(default, alias = "default_model")]
233    pub default_model: Option<String>,
234
235    /// Provider configurations
236    #[serde(default)]
237    pub providers: Vec<ProviderConfig>,
238
239    /// Session storage backend
240    #[serde(default)]
241    pub storage_backend: StorageBackend,
242
243    /// Sessions directory (for file backend)
244    #[serde(skip_serializing_if = "Option::is_none")]
245    pub sessions_dir: Option<PathBuf>,
246
247    /// Connection URL for custom storage backend (e.g., "redis://localhost:6379", "postgres://user:pass@localhost/a3s")
248    #[serde(default, skip_serializing_if = "Option::is_none")]
249    pub storage_url: Option<String>,
250
251    /// Directories to scan for skill files (*.md with tool definitions)
252    #[serde(default, alias = "skill_dirs")]
253    pub skill_dirs: Vec<PathBuf>,
254
255    /// Directories to scan for agent files (*.yaml or *.md)
256    #[serde(default, alias = "agent_dirs")]
257    pub agent_dirs: Vec<PathBuf>,
258
259    /// Maximum tool execution rounds per turn (default: 25)
260    #[serde(default, alias = "max_tool_rounds")]
261    pub max_tool_rounds: Option<usize>,
262
263    /// Thinking/reasoning budget in tokens
264    #[serde(default, alias = "thinking_budget")]
265    pub thinking_budget: Option<usize>,
266
267    /// Memory system configuration
268    #[serde(default, skip_serializing_if = "Option::is_none")]
269    pub memory: Option<MemoryConfig>,
270
271    /// Queue configuration (a3s-lane integration)
272    #[serde(default, skip_serializing_if = "Option::is_none")]
273    pub queue: Option<crate::queue::SessionQueueConfig>,
274
275    /// Search configuration (a3s-search integration)
276    #[serde(default, skip_serializing_if = "Option::is_none")]
277    pub search: Option<SearchConfig>,
278
279    /// Agentic search tool configuration.
280    #[serde(
281        default,
282        alias = "agentic_search",
283        skip_serializing_if = "Option::is_none"
284    )]
285    pub agentic_search: Option<AgenticSearchConfig>,
286
287    /// Agentic parse tool configuration.
288    #[serde(
289        default,
290        alias = "agentic_parse",
291        skip_serializing_if = "Option::is_none"
292    )]
293    pub agentic_parse: Option<AgenticParseConfig>,
294
295    /// Built-in document context extraction configuration.
296    #[serde(default, skip_serializing_if = "Option::is_none")]
297    pub document_parser: Option<DocumentParserConfig>,
298
299    /// MCP server configurations
300    #[serde(default, alias = "mcp_servers")]
301    pub mcp_servers: Vec<crate::mcp::McpServerConfig>,
302}
303
304/// Search engine configuration (a3s-search integration)
305#[derive(Debug, Clone, Serialize, Deserialize)]
306#[serde(rename_all = "camelCase")]
307pub struct SearchConfig {
308    /// Default timeout in seconds for all engines
309    #[serde(default = "default_search_timeout")]
310    pub timeout: u64,
311
312    /// Health monitor configuration
313    #[serde(default, skip_serializing_if = "Option::is_none")]
314    pub health: Option<SearchHealthConfig>,
315
316    /// Engine configurations
317    #[serde(default, rename = "engine")]
318    pub engines: std::collections::HashMap<String, SearchEngineConfig>,
319
320    /// Headless browser configuration for JS-rendered engines (google, baidu, bing_cn).
321    /// When enabled, the browser binary is auto-detected or downloaded.
322    #[serde(default, skip_serializing_if = "Option::is_none")]
323    pub headless: Option<HeadlessConfig>,
324}
325
326/// Headless browser backend selection.
327#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
328#[serde(rename_all = "lowercase")]
329pub enum BrowserBackend {
330    /// Chrome/Chromium headless. Auto-detected or downloaded from Google.
331    Chrome,
332    /// Lightpanda headless browser. Auto-detected or downloaded from GitHub.
333    /// Supported on Linux and macOS only.
334    Lightpanda,
335}
336
337#[allow(clippy::derivable_impls)]
338impl Default for BrowserBackend {
339    fn default() -> Self {
340        BrowserBackend::Chrome
341    }
342}
343
344/// Headless browser configuration.
345#[derive(Debug, Clone, Serialize, Deserialize)]
346#[serde(rename_all = "camelCase")]
347pub struct HeadlessConfig {
348    /// Which headless backend to use.
349    #[serde(default)]
350    pub backend: BrowserBackend,
351
352    /// Path to the browser executable. If None, auto-detected or downloaded.
353    #[serde(default, skip_serializing_if = "Option::is_none")]
354    pub browser_path: Option<String>,
355
356    /// Maximum number of concurrent browser tabs.
357    #[serde(default = "default_headless_max_tabs")]
358    pub max_tabs: usize,
359
360    /// Additional launch arguments for the browser.
361    #[serde(default, skip_serializing_if = "Vec::is_empty")]
362    pub launch_args: Vec<String>,
363}
364
365impl Default for HeadlessConfig {
366    fn default() -> Self {
367        Self {
368            backend: BrowserBackend::default(),
369            browser_path: None,
370            max_tabs: 4,
371            launch_args: Vec::new(),
372        }
373    }
374}
375
376/// Default configuration for the built-in `agentic_search` tool.
377#[derive(Debug, Clone, Serialize, Deserialize)]
378#[serde(rename_all = "camelCase")]
379pub struct AgenticSearchConfig {
380    /// Whether the tool is registered by default.
381    #[serde(default = "default_enabled")]
382    pub enabled: bool,
383
384    /// Default search mode when tool input omits `mode`.
385    #[serde(default = "default_agentic_search_mode")]
386    pub default_mode: String,
387
388    /// Default max results when tool input omits `max_results`.
389    #[serde(default = "default_agentic_search_max_results")]
390    pub max_results: usize,
391
392    /// Default context lines when tool input omits `context_lines`.
393    #[serde(default = "default_agentic_search_context_lines")]
394    pub context_lines: usize,
395}
396
397impl Default for AgenticSearchConfig {
398    fn default() -> Self {
399        Self {
400            enabled: true,
401            default_mode: default_agentic_search_mode(),
402            max_results: default_agentic_search_max_results(),
403            context_lines: default_agentic_search_context_lines(),
404        }
405    }
406}
407
408impl AgenticSearchConfig {
409    pub fn normalized(&self) -> Self {
410        let default_mode = match self.default_mode.to_ascii_lowercase().as_str() {
411            "fast" => "fast".to_string(),
412            "deep" => "deep".to_string(),
413            "filename_only" | "filename" => "filename_only".to_string(),
414            _ => default_agentic_search_mode(),
415        };
416
417        Self {
418            enabled: self.enabled,
419            default_mode,
420            max_results: self.max_results.clamp(1, 100),
421            context_lines: self.context_lines.min(20),
422        }
423    }
424}
425
426/// Default configuration for the built-in `agentic_parse` tool.
427#[derive(Debug, Clone, Serialize, Deserialize)]
428#[serde(rename_all = "camelCase")]
429pub struct AgenticParseConfig {
430    /// Whether the tool is registered by default.
431    #[serde(default = "default_enabled")]
432    pub enabled: bool,
433
434    /// Default parse strategy when tool input omits `strategy`.
435    #[serde(default = "default_agentic_parse_strategy")]
436    pub default_strategy: String,
437
438    /// Default maximum characters sent to the LLM when tool input omits `max_chars`.
439    #[serde(default = "default_agentic_parse_max_chars")]
440    pub max_chars: usize,
441}
442
443impl Default for AgenticParseConfig {
444    fn default() -> Self {
445        Self {
446            enabled: true,
447            default_strategy: default_agentic_parse_strategy(),
448            max_chars: default_agentic_parse_max_chars(),
449        }
450    }
451}
452
453impl AgenticParseConfig {
454    pub fn normalized(&self) -> Self {
455        let default_strategy = match self.default_strategy.to_ascii_lowercase().as_str() {
456            "auto" => "auto".to_string(),
457            "structured" => "structured".to_string(),
458            "narrative" => "narrative".to_string(),
459            "tabular" => "tabular".to_string(),
460            "code" => "code".to_string(),
461            _ => default_agentic_parse_strategy(),
462        };
463
464        Self {
465            enabled: self.enabled,
466            default_strategy,
467            max_chars: self.max_chars.clamp(500, 200_000),
468        }
469    }
470}
471
472/// Default configuration for built-in document context extraction.
473#[derive(Debug, Clone, Serialize, Deserialize)]
474#[serde(rename_all = "camelCase")]
475pub struct DocumentParserConfig {
476    /// Whether the default document extraction stack is registered in the parser registry.
477    #[serde(default = "default_enabled")]
478    pub enabled: bool,
479
480    /// Maximum file size accepted by the parser, in MiB.
481    #[serde(default = "default_document_parser_max_file_size_mb")]
482    pub max_file_size_mb: u64,
483
484    /// Optional OCR / vision-model settings for image-heavy documents.
485    ///
486    /// These settings control OCR fallback when context extraction reaches
487    /// scanned or image-heavy inputs. Current parsers may not execute OCR for
488    /// every format.
489    #[serde(default, skip_serializing_if = "Option::is_none")]
490    pub ocr: Option<DocumentOcrConfig>,
491
492    /// Optional cache settings for parsed / normalized document context.
493    #[serde(default, skip_serializing_if = "Option::is_none")]
494    pub cache: Option<DocumentCacheConfig>,
495}
496
497impl Default for DocumentParserConfig {
498    fn default() -> Self {
499        Self {
500            enabled: true,
501            max_file_size_mb: default_document_parser_max_file_size_mb(),
502            ocr: None,
503            cache: Some(DocumentCacheConfig::default()),
504        }
505    }
506}
507
508impl DocumentParserConfig {
509    pub fn normalized(&self) -> Self {
510        Self {
511            enabled: self.enabled,
512            max_file_size_mb: self.max_file_size_mb.clamp(1, 1024),
513            ocr: self.ocr.as_ref().map(DocumentOcrConfig::normalized),
514            cache: self.cache.as_ref().map(DocumentCacheConfig::normalized),
515        }
516    }
517}
518
519#[derive(Debug, Clone, Serialize, Deserialize)]
520#[serde(rename_all = "camelCase")]
521pub struct DocumentCacheConfig {
522    #[serde(default = "default_enabled")]
523    pub enabled: bool,
524
525    #[serde(default, skip_serializing_if = "Option::is_none")]
526    pub directory: Option<PathBuf>,
527}
528
529impl Default for DocumentCacheConfig {
530    fn default() -> Self {
531        Self {
532            enabled: true,
533            directory: None,
534        }
535    }
536}
537
538impl DocumentCacheConfig {
539    pub fn normalized(&self) -> Self {
540        Self {
541            enabled: self.enabled,
542            directory: self.directory.clone(),
543        }
544    }
545}
546
547/// OCR / vision-model configuration for built-in document context extraction.
548#[derive(Debug, Clone, Serialize, Deserialize)]
549#[serde(rename_all = "camelCase")]
550pub struct DocumentOcrConfig {
551    /// Whether OCR fallback is enabled for image-heavy documents.
552    #[serde(default = "default_enabled")]
553    pub enabled: bool,
554
555    /// Vision-capable model identifier, for example `openai/gpt-4.1-mini`.
556    #[serde(default, skip_serializing_if = "Option::is_none")]
557    pub model: Option<String>,
558
559    /// Optional custom OCR prompt / extraction instruction.
560    #[serde(default, skip_serializing_if = "Option::is_none")]
561    pub prompt: Option<String>,
562
563    /// Maximum number of rendered images/pages to send for OCR fallback.
564    #[serde(default = "default_document_ocr_max_images")]
565    pub max_images: usize,
566
567    /// Render DPI when rasterizing pages for OCR fallback.
568    #[serde(default = "default_document_ocr_dpi")]
569    pub dpi: u32,
570
571    /// OCR provider backend. Defaults to "vision" when model is set.
572    /// "vision" - Vision API (OpenAI-compatible)
573    /// "builtin" - Local tesseract (requires tesseract + pdftoppm binaries)
574    #[serde(default, skip_serializing_if = "Option::is_none")]
575    pub provider: Option<String>,
576
577    /// Base URL for vision API. Defaults to OpenAI API if not set.
578    #[serde(default, skip_serializing_if = "Option::is_none")]
579    pub base_url: Option<String>,
580
581    /// API key for vision API.
582    #[serde(default, skip_serializing_if = "Option::is_none")]
583    pub api_key: Option<String>,
584}
585
586impl Default for DocumentOcrConfig {
587    fn default() -> Self {
588        Self {
589            enabled: false,
590            model: None,
591            prompt: None,
592            max_images: default_document_ocr_max_images(),
593            dpi: default_document_ocr_dpi(),
594            provider: None,
595            base_url: None,
596            api_key: None,
597        }
598    }
599}
600
601impl DocumentOcrConfig {
602    pub fn normalized(&self) -> Self {
603        Self {
604            enabled: self.enabled,
605            model: self.model.clone(),
606            prompt: self.prompt.clone(),
607            max_images: self.max_images.clamp(1, 64),
608            dpi: self.dpi.clamp(72, 600),
609            provider: self.provider.clone(),
610            base_url: self.base_url.clone(),
611            api_key: self.api_key.clone(),
612        }
613    }
614}
615
616/// Search health monitor configuration
617#[derive(Debug, Clone, Serialize, Deserialize)]
618#[serde(rename_all = "camelCase")]
619pub struct SearchHealthConfig {
620    /// Number of consecutive failures before suspending
621    #[serde(default = "default_max_failures")]
622    pub max_failures: u32,
623
624    /// Suspension duration in seconds
625    #[serde(default = "default_suspend_seconds")]
626    pub suspend_seconds: u64,
627}
628
629/// Per-engine search configuration
630#[derive(Debug, Clone, Serialize, Deserialize)]
631#[serde(rename_all = "camelCase")]
632pub struct SearchEngineConfig {
633    /// Whether the engine is enabled
634    #[serde(default = "default_enabled")]
635    pub enabled: bool,
636
637    /// Weight for ranking (higher = more influence)
638    #[serde(default = "default_weight")]
639    pub weight: f64,
640
641    /// Per-engine timeout override in seconds
642    #[serde(skip_serializing_if = "Option::is_none")]
643    pub timeout: Option<u64>,
644}
645
646fn default_search_timeout() -> u64 {
647    10
648}
649
650fn default_headless_max_tabs() -> usize {
651    4
652}
653
654fn default_max_failures() -> u32 {
655    3
656}
657
658fn default_suspend_seconds() -> u64 {
659    60
660}
661
662fn default_enabled() -> bool {
663    true
664}
665
666fn default_weight() -> f64 {
667    1.0
668}
669
670fn default_agentic_search_mode() -> String {
671    "fast".to_string()
672}
673
674fn default_agentic_search_max_results() -> usize {
675    10
676}
677
678fn default_agentic_search_context_lines() -> usize {
679    2
680}
681
682fn default_agentic_parse_strategy() -> String {
683    "auto".to_string()
684}
685
686fn default_agentic_parse_max_chars() -> usize {
687    8000
688}
689
690fn default_document_parser_max_file_size_mb() -> u64 {
691    50
692}
693
694fn default_document_ocr_max_images() -> usize {
695    8
696}
697
698fn default_document_ocr_dpi() -> u32 {
699    144
700}
701
702impl CodeConfig {
703    /// Create a new empty configuration
704    pub fn new() -> Self {
705        Self::default()
706    }
707
708    /// Load configuration from an HCL file.
709    ///
710    /// Only `.hcl` files are supported. JSON support has been removed.
711    pub fn from_file(path: &Path) -> Result<Self> {
712        let content = std::fs::read_to_string(path).map_err(|e| {
713            CodeError::Config(format!(
714                "Failed to read config file {}: {}",
715                path.display(),
716                e
717            ))
718        })?;
719
720        Self::from_acl(&content).map_err(|e| {
721            CodeError::Config(format!(
722                "Failed to parse ACL config {}: {}",
723                path.display(),
724                e
725            ))
726        })
727    }
728
729    /// Parse configuration from an ACL string.
730    ///
731    /// ACL (Agent Configuration Language) is similar to HCL but uses labeled blocks
732    /// like `providers "openai" { }` instead of `providers { name = "openai" }`.
733    pub fn from_acl(content: &str) -> Result<Self> {
734        use a3s_acl::{parse_acl, Value as AclValue};
735
736        let doc = parse_acl(content)
737            .map_err(|e| CodeError::Config(format!("Failed to parse ACL: {}", e)))?;
738
739        let mut config = Self::default();
740
741        for block in doc.blocks {
742            match block.name.as_str() {
743                "default_model" => {
744                    // ACL: default_model = "openai/gpt-4" or just "openai/gpt-4" as label
745                    if let Some(v) = block.attributes.get("default_model") {
746                        if let AclValue::String(s) = v {
747                            config.default_model = Some(s.clone());
748                        }
749                    } else if let Some(s) = block.labels.first() {
750                        config.default_model = Some(s.clone());
751                    }
752                }
753                "providers" => {
754                    // ACL: providers "name" { ... }
755                    // HCL: providers { name = "name" }
756                    let provider_name = block.labels.first().cloned().ok_or_else(|| {
757                        CodeError::Config(
758                            "providers block requires a label (e.g., providers \"openai\")".into(),
759                        )
760                    })?;
761
762                    let mut provider = ProviderConfig {
763                        name: provider_name.clone(),
764                        api_key: None,
765                        base_url: None,
766                        headers: HashMap::new(),
767                        session_id_header: None,
768                        models: Vec::new(),
769                    };
770
771                    for (key, value) in &block.attributes {
772                        match key.as_str() {
773                            "apiKey" | "api_key" => {
774                                if let AclValue::String(s) = value {
775                                    provider.api_key = Some(s.clone());
776                                }
777                            }
778                            "baseUrl" | "base_url" => {
779                                if let AclValue::String(s) = value {
780                                    provider.base_url = Some(s.clone());
781                                }
782                            }
783                            _ => {}
784                        }
785                    }
786
787                    // Process nested models blocks
788                    for model_block in &block.blocks {
789                        if model_block.name == "models" {
790                            let model_name =
791                                model_block.labels.first().cloned().ok_or_else(|| {
792                                    CodeError::Config(
793                                        "models block requires a label (e.g., models \"gpt-4\")"
794                                            .into(),
795                                    )
796                                })?;
797
798                            let mut model = ModelConfig {
799                                id: model_name.clone(),
800                                name: model_name.clone(),
801                                family: String::new(),
802                                api_key: None,
803                                base_url: None,
804                                headers: HashMap::new(),
805                                session_id_header: None,
806                                attachment: false,
807                                reasoning: false,
808                                tool_call: true,
809                                temperature: true,
810                                release_date: None,
811                                modalities: ModelModalities::default(),
812                                cost: ModelCost::default(),
813                                limit: ModelLimit::default(),
814                            };
815
816                            for (key, value) in &model_block.attributes {
817                                match key.as_str() {
818                                    "name" => {
819                                        if let AclValue::String(s) = value {
820                                            model.name = s.clone();
821                                        }
822                                    }
823                                    "apiKey" | "api_key" => {
824                                        if let AclValue::String(s) = value {
825                                            model.api_key = Some(s.clone());
826                                        }
827                                    }
828                                    "baseUrl" | "base_url" => {
829                                        if let AclValue::String(s) = value {
830                                            model.base_url = Some(s.clone());
831                                        }
832                                    }
833                                    _ => {}
834                                }
835                            }
836
837                            provider.models.push(model);
838                        }
839                    }
840
841                    config.providers.push(provider);
842                }
843                _ => {
844                    // Other top-level blocks are not supported in ACL format for now
845                    // (queue, search, etc. are HCL-only)
846                }
847            }
848        }
849
850        Ok(config)
851    }
852
853    /// Save configuration to a JSON file (used for persistence)
854    ///
855    /// Note: This saves as JSON format. To use HCL format, manually create .hcl files.
856    pub fn save_to_file(&self, path: &Path) -> Result<()> {
857        if let Some(parent) = path.parent() {
858            std::fs::create_dir_all(parent).map_err(|e| {
859                CodeError::Config(format!(
860                    "Failed to create config directory {}: {}",
861                    parent.display(),
862                    e
863                ))
864            })?;
865        }
866
867        let content = serde_json::to_string_pretty(self)
868            .map_err(|e| CodeError::Config(format!("Failed to serialize config: {}", e)))?;
869
870        std::fs::write(path, content).map_err(|e| {
871            CodeError::Config(format!(
872                "Failed to write config file {}: {}",
873                path.display(),
874                e
875            ))
876        })?;
877
878        Ok(())
879    }
880
881    /// Find a provider by name
882    pub fn find_provider(&self, name: &str) -> Option<&ProviderConfig> {
883        self.providers.iter().find(|p| p.name == name)
884    }
885
886    /// Get the default provider configuration (parsed from `default_model` "provider/model" format)
887    pub fn default_provider_config(&self) -> Option<&ProviderConfig> {
888        let default = self.default_model.as_ref()?;
889        let (provider_name, _) = default.split_once('/')?;
890        self.find_provider(provider_name)
891    }
892
893    /// Get the default model configuration (parsed from `default_model` "provider/model" format)
894    pub fn default_model_config(&self) -> Option<(&ProviderConfig, &ModelConfig)> {
895        let default = self.default_model.as_ref()?;
896        let (provider_name, model_id) = default.split_once('/')?;
897        let provider = self.find_provider(provider_name)?;
898        let model = provider.find_model(model_id)?;
899        Some((provider, model))
900    }
901
902    /// Get LlmConfig for the default provider and model
903    ///
904    /// Returns None if default provider/model is not configured or API key is missing.
905    pub fn default_llm_config(&self) -> Option<LlmConfig> {
906        let (provider, model) = self.default_model_config()?;
907        let api_key = provider.get_api_key(model)?;
908        let base_url = provider.get_base_url(model);
909        let headers = provider.get_headers(model);
910        let session_id_header = provider.get_session_id_header(model);
911
912        let mut config = LlmConfig::new(&provider.name, &model.id, api_key);
913        if let Some(url) = base_url {
914            config = config.with_base_url(url);
915        }
916        if !headers.is_empty() {
917            config = config.with_headers(headers);
918        }
919        if let Some(header_name) = session_id_header {
920            config = config.with_session_id_header(header_name);
921        }
922        config = apply_model_caps(config, model, self.thinking_budget);
923        Some(config)
924    }
925
926    /// Get LlmConfig for a specific provider and model
927    ///
928    /// Returns None if provider/model is not found or API key is missing.
929    pub fn llm_config(&self, provider_name: &str, model_id: &str) -> Option<LlmConfig> {
930        let provider = self.find_provider(provider_name)?;
931        let model = provider.find_model(model_id)?;
932        let api_key = provider.get_api_key(model)?;
933        let base_url = provider.get_base_url(model);
934        let headers = provider.get_headers(model);
935        let session_id_header = provider.get_session_id_header(model);
936
937        let mut config = LlmConfig::new(&provider.name, &model.id, api_key);
938        if let Some(url) = base_url {
939            config = config.with_base_url(url);
940        }
941        if !headers.is_empty() {
942            config = config.with_headers(headers);
943        }
944        if let Some(header_name) = session_id_header {
945            config = config.with_session_id_header(header_name);
946        }
947        config = apply_model_caps(config, model, self.thinking_budget);
948        Some(config)
949    }
950
951    /// List all available models across all providers
952    pub fn list_models(&self) -> Vec<(&ProviderConfig, &ModelConfig)> {
953        self.providers
954            .iter()
955            .flat_map(|p| p.models.iter().map(move |m| (p, m)))
956            .collect()
957    }
958
959    /// Add a skill directory
960    pub fn add_skill_dir(mut self, dir: impl Into<PathBuf>) -> Self {
961        self.skill_dirs.push(dir.into());
962        self
963    }
964
965    /// Add an agent directory
966    pub fn add_agent_dir(mut self, dir: impl Into<PathBuf>) -> Self {
967        self.agent_dirs.push(dir.into());
968        self
969    }
970
971    /// Check if any directories are configured
972    pub fn has_directories(&self) -> bool {
973        !self.skill_dirs.is_empty() || !self.agent_dirs.is_empty()
974    }
975
976    /// Check if provider configuration is available
977    pub fn has_providers(&self) -> bool {
978        !self.providers.is_empty()
979    }
980}
981
982// ============================================================================
983// ACL Parsing Helpers
984// ============================================================================
985
986#[cfg(test)]
987mod tests {
988    use super::*;
989
990    #[test]
991    fn test_config_default() {
992        let config = CodeConfig::default();
993        assert!(config.skill_dirs.is_empty());
994        assert!(config.agent_dirs.is_empty());
995        assert!(config.providers.is_empty());
996        assert!(config.default_model.is_none());
997        assert_eq!(config.storage_backend, StorageBackend::File);
998        assert!(config.sessions_dir.is_none());
999    }
1000
1001    #[test]
1002    fn test_storage_backend_default() {
1003        let backend = StorageBackend::default();
1004        assert_eq!(backend, StorageBackend::File);
1005    }
1006
1007    #[test]
1008    fn test_storage_backend_serde() {
1009        // Test serialization
1010        let memory = StorageBackend::Memory;
1011        let json = serde_json::to_string(&memory).unwrap();
1012        assert_eq!(json, "\"memory\"");
1013
1014        let file = StorageBackend::File;
1015        let json = serde_json::to_string(&file).unwrap();
1016        assert_eq!(json, "\"file\"");
1017
1018        // Test deserialization
1019        let memory: StorageBackend = serde_json::from_str("\"memory\"").unwrap();
1020        assert_eq!(memory, StorageBackend::Memory);
1021
1022        let file: StorageBackend = serde_json::from_str("\"file\"").unwrap();
1023        assert_eq!(file, StorageBackend::File);
1024    }
1025
1026    #[test]
1027    fn test_config_with_storage_backend() {
1028        let temp_dir = tempfile::tempdir().unwrap();
1029        let config_path = temp_dir.path().join("config.acl");
1030
1031        std::fs::write(
1032            &config_path,
1033            r#"
1034                storage_backend = "memory"
1035                sessions_dir = "/tmp/sessions"
1036            "#,
1037        )
1038        .unwrap();
1039
1040        let config = CodeConfig::from_file(&config_path).unwrap();
1041        assert_eq!(config.storage_backend, StorageBackend::Memory);
1042        assert_eq!(config.sessions_dir, Some(PathBuf::from("/tmp/sessions")));
1043    }
1044
1045    #[test]
1046    fn test_config_builder() {
1047        let config = CodeConfig::new()
1048            .add_skill_dir("/tmp/skills")
1049            .add_agent_dir("/tmp/agents");
1050
1051        assert_eq!(config.skill_dirs.len(), 1);
1052        assert_eq!(config.agent_dirs.len(), 1);
1053    }
1054
1055    #[test]
1056    fn test_find_provider() {
1057        let config = CodeConfig {
1058            providers: vec![
1059                ProviderConfig {
1060                    name: "anthropic".to_string(),
1061                    api_key: Some("key1".to_string()),
1062                    base_url: None,
1063                    headers: HashMap::new(),
1064                    session_id_header: None,
1065                    models: vec![],
1066                },
1067                ProviderConfig {
1068                    name: "openai".to_string(),
1069                    api_key: Some("key2".to_string()),
1070                    base_url: None,
1071                    headers: HashMap::new(),
1072                    session_id_header: None,
1073                    models: vec![],
1074                },
1075            ],
1076            ..Default::default()
1077        };
1078
1079        assert!(config.find_provider("anthropic").is_some());
1080        assert!(config.find_provider("openai").is_some());
1081        assert!(config.find_provider("unknown").is_none());
1082    }
1083
1084    #[test]
1085    fn test_default_llm_config() {
1086        let config = CodeConfig {
1087            default_model: Some("anthropic/claude-sonnet-4".to_string()),
1088            providers: vec![ProviderConfig {
1089                name: "anthropic".to_string(),
1090                api_key: Some("test-api-key".to_string()),
1091                base_url: Some("https://api.anthropic.com".to_string()),
1092                headers: HashMap::new(),
1093                session_id_header: None,
1094                models: vec![ModelConfig {
1095                    id: "claude-sonnet-4".to_string(),
1096                    name: "Claude Sonnet 4".to_string(),
1097                    family: "claude-sonnet".to_string(),
1098                    api_key: None,
1099                    base_url: None,
1100                    headers: HashMap::new(),
1101                    session_id_header: None,
1102                    attachment: false,
1103                    reasoning: false,
1104                    tool_call: true,
1105                    temperature: true,
1106                    release_date: None,
1107                    modalities: ModelModalities::default(),
1108                    cost: ModelCost::default(),
1109                    limit: ModelLimit::default(),
1110                }],
1111            }],
1112            ..Default::default()
1113        };
1114
1115        let llm_config = config.default_llm_config().unwrap();
1116        assert_eq!(llm_config.provider, "anthropic");
1117        assert_eq!(llm_config.model, "claude-sonnet-4");
1118        assert_eq!(llm_config.api_key.expose(), "test-api-key");
1119        assert_eq!(
1120            llm_config.base_url,
1121            Some("https://api.anthropic.com".to_string())
1122        );
1123    }
1124
1125    #[test]
1126    fn test_model_api_key_override() {
1127        let provider = ProviderConfig {
1128            name: "openai".to_string(),
1129            api_key: Some("provider-key".to_string()),
1130            base_url: Some("https://api.openai.com".to_string()),
1131            headers: HashMap::new(),
1132            session_id_header: None,
1133            models: vec![
1134                ModelConfig {
1135                    id: "gpt-4".to_string(),
1136                    name: "GPT-4".to_string(),
1137                    family: "gpt".to_string(),
1138                    api_key: None, // Uses provider key
1139                    base_url: None,
1140                    headers: HashMap::new(),
1141                    session_id_header: None,
1142                    attachment: false,
1143                    reasoning: false,
1144                    tool_call: true,
1145                    temperature: true,
1146                    release_date: None,
1147                    modalities: ModelModalities::default(),
1148                    cost: ModelCost::default(),
1149                    limit: ModelLimit::default(),
1150                },
1151                ModelConfig {
1152                    id: "custom-model".to_string(),
1153                    name: "Custom Model".to_string(),
1154                    family: "custom".to_string(),
1155                    api_key: Some("model-specific-key".to_string()), // Override
1156                    base_url: Some("https://custom.api.com".to_string()), // Override
1157                    headers: HashMap::new(),
1158                    session_id_header: None,
1159                    attachment: false,
1160                    reasoning: false,
1161                    tool_call: true,
1162                    temperature: true,
1163                    release_date: None,
1164                    modalities: ModelModalities::default(),
1165                    cost: ModelCost::default(),
1166                    limit: ModelLimit::default(),
1167                },
1168            ],
1169        };
1170
1171        // Model without override uses provider key
1172        let model1 = provider.find_model("gpt-4").unwrap();
1173        assert_eq!(provider.get_api_key(model1), Some("provider-key"));
1174        assert_eq!(
1175            provider.get_base_url(model1),
1176            Some("https://api.openai.com")
1177        );
1178
1179        // Model with override uses its own key
1180        let model2 = provider.find_model("custom-model").unwrap();
1181        assert_eq!(provider.get_api_key(model2), Some("model-specific-key"));
1182        assert_eq!(
1183            provider.get_base_url(model2),
1184            Some("https://custom.api.com")
1185        );
1186    }
1187
1188    #[test]
1189    fn test_list_models() {
1190        let config = CodeConfig {
1191            providers: vec![
1192                ProviderConfig {
1193                    name: "anthropic".to_string(),
1194                    api_key: None,
1195                    base_url: None,
1196                    headers: HashMap::new(),
1197                    session_id_header: None,
1198                    models: vec![
1199                        ModelConfig {
1200                            id: "claude-1".to_string(),
1201                            name: "Claude 1".to_string(),
1202                            family: "claude".to_string(),
1203                            api_key: None,
1204                            base_url: None,
1205                            headers: HashMap::new(),
1206                            session_id_header: None,
1207                            attachment: false,
1208                            reasoning: false,
1209                            tool_call: true,
1210                            temperature: true,
1211                            release_date: None,
1212                            modalities: ModelModalities::default(),
1213                            cost: ModelCost::default(),
1214                            limit: ModelLimit::default(),
1215                        },
1216                        ModelConfig {
1217                            id: "claude-2".to_string(),
1218                            name: "Claude 2".to_string(),
1219                            family: "claude".to_string(),
1220                            api_key: None,
1221                            base_url: None,
1222                            headers: HashMap::new(),
1223                            session_id_header: None,
1224                            attachment: false,
1225                            reasoning: false,
1226                            tool_call: true,
1227                            temperature: true,
1228                            release_date: None,
1229                            modalities: ModelModalities::default(),
1230                            cost: ModelCost::default(),
1231                            limit: ModelLimit::default(),
1232                        },
1233                    ],
1234                },
1235                ProviderConfig {
1236                    name: "openai".to_string(),
1237                    api_key: None,
1238                    base_url: None,
1239                    headers: HashMap::new(),
1240                    session_id_header: None,
1241                    models: vec![ModelConfig {
1242                        id: "gpt-4".to_string(),
1243                        name: "GPT-4".to_string(),
1244                        family: "gpt".to_string(),
1245                        api_key: None,
1246                        base_url: None,
1247                        headers: HashMap::new(),
1248                        session_id_header: None,
1249                        attachment: false,
1250                        reasoning: false,
1251                        tool_call: true,
1252                        temperature: true,
1253                        release_date: None,
1254                        modalities: ModelModalities::default(),
1255                        cost: ModelCost::default(),
1256                        limit: ModelLimit::default(),
1257                    }],
1258                },
1259            ],
1260            ..Default::default()
1261        };
1262
1263        let models = config.list_models();
1264        assert_eq!(models.len(), 3);
1265    }
1266
1267    #[test]
1268    fn test_config_from_file_not_found() {
1269        let result = CodeConfig::from_file(Path::new("/nonexistent/config.json"));
1270        assert!(result.is_err());
1271    }
1272
1273    #[test]
1274    fn test_config_has_directories() {
1275        let empty = CodeConfig::default();
1276        assert!(!empty.has_directories());
1277
1278        let with_skills = CodeConfig::new().add_skill_dir("/tmp/skills");
1279        assert!(with_skills.has_directories());
1280
1281        let with_agents = CodeConfig::new().add_agent_dir("/tmp/agents");
1282        assert!(with_agents.has_directories());
1283    }
1284
1285    #[test]
1286    fn test_config_has_providers() {
1287        let empty = CodeConfig::default();
1288        assert!(!empty.has_providers());
1289
1290        let with_providers = CodeConfig {
1291            providers: vec![ProviderConfig {
1292                name: "test".to_string(),
1293                api_key: None,
1294                base_url: None,
1295                headers: HashMap::new(),
1296                session_id_header: None,
1297                models: vec![],
1298            }],
1299            ..Default::default()
1300        };
1301        assert!(with_providers.has_providers());
1302    }
1303
1304    #[test]
1305    fn test_storage_backend_equality() {
1306        assert_eq!(StorageBackend::Memory, StorageBackend::Memory);
1307        assert_eq!(StorageBackend::File, StorageBackend::File);
1308        assert_ne!(StorageBackend::Memory, StorageBackend::File);
1309    }
1310
1311    #[test]
1312    fn test_storage_backend_serde_custom() {
1313        let custom = StorageBackend::Custom;
1314        // Custom variant is now serializable
1315        let json = serde_json::to_string(&custom).unwrap();
1316        assert_eq!(json, "\"custom\"");
1317
1318        // And deserializable
1319        let parsed: StorageBackend = serde_json::from_str("\"custom\"").unwrap();
1320        assert_eq!(parsed, StorageBackend::Custom);
1321    }
1322
1323    #[test]
1324    fn test_model_cost_default() {
1325        let cost = ModelCost::default();
1326        assert_eq!(cost.input, 0.0);
1327        assert_eq!(cost.output, 0.0);
1328        assert_eq!(cost.cache_read, 0.0);
1329        assert_eq!(cost.cache_write, 0.0);
1330    }
1331
1332    #[test]
1333    fn test_model_cost_serialization() {
1334        let cost = ModelCost {
1335            input: 3.0,
1336            output: 15.0,
1337            cache_read: 0.3,
1338            cache_write: 3.75,
1339        };
1340        let json = serde_json::to_string(&cost).unwrap();
1341        assert!(json.contains("\"input\":3"));
1342        assert!(json.contains("\"output\":15"));
1343    }
1344
1345    #[test]
1346    fn test_model_cost_deserialization_missing_fields() {
1347        let json = r#"{"input":3.0}"#;
1348        let cost: ModelCost = serde_json::from_str(json).unwrap();
1349        assert_eq!(cost.input, 3.0);
1350        assert_eq!(cost.output, 0.0);
1351        assert_eq!(cost.cache_read, 0.0);
1352        assert_eq!(cost.cache_write, 0.0);
1353    }
1354
1355    #[test]
1356    fn test_model_limit_default() {
1357        let limit = ModelLimit::default();
1358        assert_eq!(limit.context, 0);
1359        assert_eq!(limit.output, 0);
1360    }
1361
1362    #[test]
1363    fn test_model_limit_serialization() {
1364        let limit = ModelLimit {
1365            context: 200000,
1366            output: 8192,
1367        };
1368        let json = serde_json::to_string(&limit).unwrap();
1369        assert!(json.contains("\"context\":200000"));
1370        assert!(json.contains("\"output\":8192"));
1371    }
1372
1373    #[test]
1374    fn test_model_limit_deserialization_missing_fields() {
1375        let json = r#"{"context":100000}"#;
1376        let limit: ModelLimit = serde_json::from_str(json).unwrap();
1377        assert_eq!(limit.context, 100000);
1378        assert_eq!(limit.output, 0);
1379    }
1380
1381    #[test]
1382    fn test_model_modalities_default() {
1383        let modalities = ModelModalities::default();
1384        assert!(modalities.input.is_empty());
1385        assert!(modalities.output.is_empty());
1386    }
1387
1388    #[test]
1389    fn test_model_modalities_serialization() {
1390        let modalities = ModelModalities {
1391            input: vec!["text".to_string(), "image".to_string()],
1392            output: vec!["text".to_string()],
1393        };
1394        let json = serde_json::to_string(&modalities).unwrap();
1395        assert!(json.contains("\"input\""));
1396        assert!(json.contains("\"text\""));
1397    }
1398
1399    #[test]
1400    fn test_model_modalities_deserialization_missing_fields() {
1401        let json = r#"{"input":["text"]}"#;
1402        let modalities: ModelModalities = serde_json::from_str(json).unwrap();
1403        assert_eq!(modalities.input.len(), 1);
1404        assert!(modalities.output.is_empty());
1405    }
1406
1407    #[test]
1408    fn test_model_config_serialization() {
1409        let config = ModelConfig {
1410            id: "gpt-4o".to_string(),
1411            name: "GPT-4o".to_string(),
1412            family: "gpt-4".to_string(),
1413            api_key: Some("sk-test".to_string()),
1414            base_url: None,
1415            headers: HashMap::new(),
1416            session_id_header: None,
1417            attachment: true,
1418            reasoning: false,
1419            tool_call: true,
1420            temperature: true,
1421            release_date: Some("2024-05-13".to_string()),
1422            modalities: ModelModalities::default(),
1423            cost: ModelCost::default(),
1424            limit: ModelLimit::default(),
1425        };
1426        let json = serde_json::to_string(&config).unwrap();
1427        assert!(json.contains("\"id\":\"gpt-4o\""));
1428        assert!(json.contains("\"attachment\":true"));
1429    }
1430
1431    #[test]
1432    fn test_model_config_deserialization_with_defaults() {
1433        let json = r#"{"id":"test-model"}"#;
1434        let config: ModelConfig = serde_json::from_str(json).unwrap();
1435        assert_eq!(config.id, "test-model");
1436        assert_eq!(config.name, "");
1437        assert_eq!(config.family, "");
1438        assert!(config.api_key.is_none());
1439        assert!(!config.attachment);
1440        assert!(config.tool_call);
1441        assert!(config.temperature);
1442    }
1443
1444    #[test]
1445    fn test_model_config_all_optional_fields() {
1446        let json = r#"{
1447            "id": "claude-sonnet-4",
1448            "name": "Claude Sonnet 4",
1449            "family": "claude-sonnet",
1450            "apiKey": "sk-test",
1451            "baseUrl": "https://api.anthropic.com",
1452            "attachment": true,
1453            "reasoning": true,
1454            "toolCall": false,
1455            "temperature": false,
1456            "releaseDate": "2025-05-14"
1457        }"#;
1458        let config: ModelConfig = serde_json::from_str(json).unwrap();
1459        assert_eq!(config.id, "claude-sonnet-4");
1460        assert_eq!(config.name, "Claude Sonnet 4");
1461        assert_eq!(config.api_key, Some("sk-test".to_string()));
1462        assert_eq!(
1463            config.base_url,
1464            Some("https://api.anthropic.com".to_string())
1465        );
1466        assert!(config.attachment);
1467        assert!(config.reasoning);
1468        assert!(!config.tool_call);
1469        assert!(!config.temperature);
1470    }
1471
1472    #[test]
1473    fn test_provider_config_serialization() {
1474        let provider = ProviderConfig {
1475            name: "anthropic".to_string(),
1476            api_key: Some("sk-test".to_string()),
1477            base_url: Some("https://api.anthropic.com".to_string()),
1478            headers: HashMap::new(),
1479            session_id_header: None,
1480            models: vec![],
1481        };
1482        let json = serde_json::to_string(&provider).unwrap();
1483        assert!(json.contains("\"name\":\"anthropic\""));
1484        assert!(json.contains("\"apiKey\":\"sk-test\""));
1485    }
1486
1487    #[test]
1488    fn test_provider_config_deserialization_missing_optional() {
1489        let json = r#"{"name":"openai"}"#;
1490        let provider: ProviderConfig = serde_json::from_str(json).unwrap();
1491        assert_eq!(provider.name, "openai");
1492        assert!(provider.api_key.is_none());
1493        assert!(provider.base_url.is_none());
1494        assert!(provider.models.is_empty());
1495    }
1496
1497    #[test]
1498    fn test_provider_config_find_model() {
1499        let provider = ProviderConfig {
1500            name: "anthropic".to_string(),
1501            api_key: None,
1502            base_url: None,
1503            headers: HashMap::new(),
1504            session_id_header: None,
1505            models: vec![ModelConfig {
1506                id: "claude-sonnet-4".to_string(),
1507                name: "Claude Sonnet 4".to_string(),
1508                family: "claude-sonnet".to_string(),
1509                api_key: None,
1510                base_url: None,
1511                headers: HashMap::new(),
1512                session_id_header: None,
1513                attachment: false,
1514                reasoning: false,
1515                tool_call: true,
1516                temperature: true,
1517                release_date: None,
1518                modalities: ModelModalities::default(),
1519                cost: ModelCost::default(),
1520                limit: ModelLimit::default(),
1521            }],
1522        };
1523
1524        let found = provider.find_model("claude-sonnet-4");
1525        assert!(found.is_some());
1526        assert_eq!(found.unwrap().id, "claude-sonnet-4");
1527
1528        let not_found = provider.find_model("gpt-4o");
1529        assert!(not_found.is_none());
1530    }
1531
1532    #[test]
1533    fn test_provider_config_get_api_key() {
1534        let provider = ProviderConfig {
1535            name: "anthropic".to_string(),
1536            api_key: Some("provider-key".to_string()),
1537            base_url: None,
1538            headers: HashMap::new(),
1539            session_id_header: None,
1540            models: vec![],
1541        };
1542
1543        let model_with_key = ModelConfig {
1544            id: "test".to_string(),
1545            name: "".to_string(),
1546            family: "".to_string(),
1547            api_key: Some("model-key".to_string()),
1548            base_url: None,
1549            headers: HashMap::new(),
1550            session_id_header: None,
1551            attachment: false,
1552            reasoning: false,
1553            tool_call: true,
1554            temperature: true,
1555            release_date: None,
1556            modalities: ModelModalities::default(),
1557            cost: ModelCost::default(),
1558            limit: ModelLimit::default(),
1559        };
1560
1561        let model_without_key = ModelConfig {
1562            id: "test2".to_string(),
1563            name: "".to_string(),
1564            family: "".to_string(),
1565            api_key: None,
1566            base_url: None,
1567            headers: HashMap::new(),
1568            session_id_header: None,
1569            attachment: false,
1570            reasoning: false,
1571            tool_call: true,
1572            temperature: true,
1573            release_date: None,
1574            modalities: ModelModalities::default(),
1575            cost: ModelCost::default(),
1576            limit: ModelLimit::default(),
1577        };
1578
1579        assert_eq!(provider.get_api_key(&model_with_key), Some("model-key"));
1580        assert_eq!(
1581            provider.get_api_key(&model_without_key),
1582            Some("provider-key")
1583        );
1584    }
1585
1586    #[test]
1587    fn test_provider_config_get_headers_and_session_id_header() {
1588        let mut provider_headers = HashMap::new();
1589        provider_headers.insert("X-Provider".to_string(), "provider".to_string());
1590        provider_headers.insert("X-Shared".to_string(), "provider".to_string());
1591
1592        let mut model_headers = HashMap::new();
1593        model_headers.insert("X-Model".to_string(), "model".to_string());
1594        model_headers.insert("X-Shared".to_string(), "model".to_string());
1595
1596        let provider = ProviderConfig {
1597            name: "openai".to_string(),
1598            api_key: Some("provider-key".to_string()),
1599            base_url: None,
1600            headers: provider_headers,
1601            session_id_header: Some("X-Session-Id".to_string()),
1602            models: vec![],
1603        };
1604
1605        let model = ModelConfig {
1606            id: "gpt-4o".to_string(),
1607            name: "".to_string(),
1608            family: "".to_string(),
1609            api_key: None,
1610            base_url: None,
1611            headers: model_headers,
1612            session_id_header: Some("X-Model-Session".to_string()),
1613            attachment: false,
1614            reasoning: false,
1615            tool_call: true,
1616            temperature: true,
1617            release_date: None,
1618            modalities: ModelModalities::default(),
1619            cost: ModelCost::default(),
1620            limit: ModelLimit::default(),
1621        };
1622
1623        let headers = provider.get_headers(&model);
1624        assert_eq!(headers.get("X-Provider"), Some(&"provider".to_string()));
1625        assert_eq!(headers.get("X-Model"), Some(&"model".to_string()));
1626        assert_eq!(headers.get("X-Shared"), Some(&"model".to_string()));
1627        assert_eq!(
1628            provider.get_session_id_header(&model),
1629            Some("X-Model-Session")
1630        );
1631    }
1632
1633    #[test]
1634    fn test_llm_config_includes_headers_and_runtime_session_header() {
1635        let mut provider_headers = HashMap::new();
1636        provider_headers.insert("X-Provider".to_string(), "provider".to_string());
1637
1638        let config = CodeConfig {
1639            default_model: Some("openai/gpt-4o".to_string()),
1640            providers: vec![ProviderConfig {
1641                name: "openai".to_string(),
1642                api_key: Some("sk-test".to_string()),
1643                base_url: Some("https://api.example.com".to_string()),
1644                headers: provider_headers,
1645                session_id_header: Some("X-Session-Id".to_string()),
1646                models: vec![ModelConfig {
1647                    id: "gpt-4o".to_string(),
1648                    name: "".to_string(),
1649                    family: "".to_string(),
1650                    api_key: None,
1651                    base_url: None,
1652                    headers: HashMap::new(),
1653                    session_id_header: None,
1654                    attachment: false,
1655                    reasoning: false,
1656                    tool_call: true,
1657                    temperature: true,
1658                    release_date: None,
1659                    modalities: ModelModalities::default(),
1660                    cost: ModelCost::default(),
1661                    limit: ModelLimit::default(),
1662                }],
1663            }],
1664            ..Default::default()
1665        };
1666
1667        let llm_config = config.default_llm_config().unwrap();
1668        assert_eq!(
1669            llm_config.headers.get("X-Provider"),
1670            Some(&"provider".to_string())
1671        );
1672        assert_eq!(
1673            llm_config.session_id_header.as_deref(),
1674            Some("X-Session-Id")
1675        );
1676    }
1677
1678    #[test]
1679    fn test_code_config_default_provider_config() {
1680        let config = CodeConfig {
1681            default_model: Some("anthropic/claude-sonnet-4".to_string()),
1682            providers: vec![ProviderConfig {
1683                name: "anthropic".to_string(),
1684                api_key: Some("sk-test".to_string()),
1685                base_url: None,
1686                headers: HashMap::new(),
1687                session_id_header: None,
1688                models: vec![],
1689            }],
1690            ..Default::default()
1691        };
1692
1693        let provider = config.default_provider_config();
1694        assert!(provider.is_some());
1695        assert_eq!(provider.unwrap().name, "anthropic");
1696    }
1697
1698    #[test]
1699    fn test_code_config_default_model_config() {
1700        let config = CodeConfig {
1701            default_model: Some("anthropic/claude-sonnet-4".to_string()),
1702            providers: vec![ProviderConfig {
1703                name: "anthropic".to_string(),
1704                api_key: Some("sk-test".to_string()),
1705                base_url: None,
1706                headers: HashMap::new(),
1707                session_id_header: None,
1708                models: vec![ModelConfig {
1709                    id: "claude-sonnet-4".to_string(),
1710                    name: "Claude Sonnet 4".to_string(),
1711                    family: "claude-sonnet".to_string(),
1712                    api_key: None,
1713                    base_url: None,
1714                    headers: HashMap::new(),
1715                    session_id_header: None,
1716                    attachment: false,
1717                    reasoning: false,
1718                    tool_call: true,
1719                    temperature: true,
1720                    release_date: None,
1721                    modalities: ModelModalities::default(),
1722                    cost: ModelCost::default(),
1723                    limit: ModelLimit::default(),
1724                }],
1725            }],
1726            ..Default::default()
1727        };
1728
1729        let result = config.default_model_config();
1730        assert!(result.is_some());
1731        let (provider, model) = result.unwrap();
1732        assert_eq!(provider.name, "anthropic");
1733        assert_eq!(model.id, "claude-sonnet-4");
1734    }
1735
1736    #[test]
1737    fn test_code_config_default_llm_config() {
1738        let config = CodeConfig {
1739            default_model: Some("anthropic/claude-sonnet-4".to_string()),
1740            providers: vec![ProviderConfig {
1741                name: "anthropic".to_string(),
1742                api_key: Some("sk-test".to_string()),
1743                base_url: Some("https://api.anthropic.com".to_string()),
1744                headers: HashMap::new(),
1745                session_id_header: None,
1746                models: vec![ModelConfig {
1747                    id: "claude-sonnet-4".to_string(),
1748                    name: "Claude Sonnet 4".to_string(),
1749                    family: "claude-sonnet".to_string(),
1750                    api_key: None,
1751                    base_url: None,
1752                    headers: HashMap::new(),
1753                    session_id_header: None,
1754                    attachment: false,
1755                    reasoning: false,
1756                    tool_call: true,
1757                    temperature: true,
1758                    release_date: None,
1759                    modalities: ModelModalities::default(),
1760                    cost: ModelCost::default(),
1761                    limit: ModelLimit::default(),
1762                }],
1763            }],
1764            ..Default::default()
1765        };
1766
1767        let llm_config = config.default_llm_config();
1768        assert!(llm_config.is_some());
1769    }
1770
1771    #[test]
1772    fn test_code_config_list_models() {
1773        let config = CodeConfig {
1774            providers: vec![
1775                ProviderConfig {
1776                    name: "anthropic".to_string(),
1777                    api_key: None,
1778                    base_url: None,
1779                    headers: HashMap::new(),
1780                    session_id_header: None,
1781                    models: vec![ModelConfig {
1782                        id: "claude-sonnet-4".to_string(),
1783                        name: "".to_string(),
1784                        family: "".to_string(),
1785                        api_key: None,
1786                        base_url: None,
1787                        headers: HashMap::new(),
1788                        session_id_header: None,
1789                        attachment: false,
1790                        reasoning: false,
1791                        tool_call: true,
1792                        temperature: true,
1793                        release_date: None,
1794                        modalities: ModelModalities::default(),
1795                        cost: ModelCost::default(),
1796                        limit: ModelLimit::default(),
1797                    }],
1798                },
1799                ProviderConfig {
1800                    name: "openai".to_string(),
1801                    api_key: None,
1802                    base_url: None,
1803                    headers: HashMap::new(),
1804                    session_id_header: None,
1805                    models: vec![ModelConfig {
1806                        id: "gpt-4o".to_string(),
1807                        name: "".to_string(),
1808                        family: "".to_string(),
1809                        api_key: None,
1810                        base_url: None,
1811                        headers: HashMap::new(),
1812                        session_id_header: None,
1813                        attachment: false,
1814                        reasoning: false,
1815                        tool_call: true,
1816                        temperature: true,
1817                        release_date: None,
1818                        modalities: ModelModalities::default(),
1819                        cost: ModelCost::default(),
1820                        limit: ModelLimit::default(),
1821                    }],
1822                },
1823            ],
1824            ..Default::default()
1825        };
1826
1827        let models = config.list_models();
1828        assert_eq!(models.len(), 2);
1829    }
1830
1831    #[test]
1832    fn test_llm_config_specific_provider_model() {
1833        let model: ModelConfig = serde_json::from_value(serde_json::json!({
1834            "id": "claude-3",
1835            "name": "Claude 3"
1836        }))
1837        .unwrap();
1838
1839        let config = CodeConfig {
1840            providers: vec![ProviderConfig {
1841                name: "anthropic".to_string(),
1842                api_key: Some("sk-test".to_string()),
1843                base_url: None,
1844                headers: HashMap::new(),
1845                session_id_header: None,
1846                models: vec![model],
1847            }],
1848            ..Default::default()
1849        };
1850
1851        let llm = config.llm_config("anthropic", "claude-3");
1852        assert!(llm.is_some());
1853        let llm = llm.unwrap();
1854        assert_eq!(llm.provider, "anthropic");
1855        assert_eq!(llm.model, "claude-3");
1856    }
1857
1858    #[test]
1859    fn test_llm_config_missing_provider() {
1860        let config = CodeConfig::default();
1861        assert!(config.llm_config("nonexistent", "model").is_none());
1862    }
1863
1864    #[test]
1865    fn test_llm_config_missing_model() {
1866        let config = CodeConfig {
1867            providers: vec![ProviderConfig {
1868                name: "anthropic".to_string(),
1869                api_key: Some("sk-test".to_string()),
1870                base_url: None,
1871                headers: HashMap::new(),
1872                session_id_header: None,
1873                models: vec![],
1874            }],
1875            ..Default::default()
1876        };
1877        assert!(config.llm_config("anthropic", "nonexistent").is_none());
1878    }
1879
1880    #[test]
1881    fn test_agentic_search_config_normalizes_invalid_values() {
1882        let config = AgenticSearchConfig {
1883            enabled: true,
1884            default_mode: "weird".to_string(),
1885            max_results: 0,
1886            context_lines: 999,
1887        }
1888        .normalized();
1889
1890        assert_eq!(config.default_mode, "fast");
1891        assert_eq!(config.max_results, 1);
1892        assert_eq!(config.context_lines, 20);
1893    }
1894
1895    #[test]
1896    fn test_agentic_parse_config_normalizes_invalid_values() {
1897        let config = AgenticParseConfig {
1898            enabled: true,
1899            default_strategy: "unknown".to_string(),
1900            max_chars: 1,
1901        }
1902        .normalized();
1903
1904        assert_eq!(config.default_strategy, "auto");
1905        assert_eq!(config.max_chars, 500);
1906    }
1907
1908    #[test]
1909    fn test_document_parser_config_normalizes_nested_ocr_values() {
1910        let config = DocumentParserConfig {
1911            enabled: true,
1912            max_file_size_mb: 0,
1913            cache: Some(DocumentCacheConfig {
1914                enabled: true,
1915                directory: Some(PathBuf::from("/tmp/cache")),
1916            }),
1917            ocr: Some(DocumentOcrConfig {
1918                enabled: true,
1919                model: Some("openai/gpt-4.1-mini".to_string()),
1920                prompt: None,
1921                max_images: 0,
1922                dpi: 10,
1923                provider: None,
1924                base_url: None,
1925                api_key: None,
1926            }),
1927        }
1928        .normalized();
1929
1930        assert_eq!(config.max_file_size_mb, 1);
1931        let cache = config.cache.unwrap();
1932        assert!(cache.enabled);
1933        assert_eq!(cache.directory, Some(PathBuf::from("/tmp/cache")));
1934        let ocr = config.ocr.unwrap();
1935        assert_eq!(ocr.max_images, 1);
1936        assert_eq!(ocr.dpi, 72);
1937    }
1938}