Skip to main content

vtcode_config/core/
provider.rs

1use crate::constants::reasoning;
2use serde::{Deserialize, Serialize};
3
4/// Native OpenAI service tier selection.
5#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
6#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
7#[serde(rename_all = "lowercase")]
8pub enum OpenAIServiceTier {
9    Flex,
10    Priority,
11}
12
13impl OpenAIServiceTier {
14    pub const fn as_str(self) -> &'static str {
15        match self {
16            Self::Flex => "flex",
17            Self::Priority => "priority",
18        }
19    }
20
21    pub fn parse(value: &str) -> Option<Self> {
22        let normalized = value.trim();
23        if normalized.eq_ignore_ascii_case("flex") {
24            Some(Self::Flex)
25        } else if normalized.eq_ignore_ascii_case("priority") {
26            Some(Self::Priority)
27        } else {
28            None
29        }
30    }
31}
32
33/// How VT Code should provision OpenAI hosted shell environments.
34#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
35#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
36#[serde(rename_all = "snake_case")]
37pub enum OpenAIHostedShellEnvironment {
38    #[default]
39    ContainerAuto,
40    ContainerReference,
41}
42
43impl OpenAIHostedShellEnvironment {
44    pub const fn as_str(self) -> &'static str {
45        match self {
46            Self::ContainerAuto => "container_auto",
47            Self::ContainerReference => "container_reference",
48        }
49    }
50}
51
52impl OpenAIHostedShellEnvironment {
53    pub const fn uses_container_reference(self) -> bool {
54        matches!(self, Self::ContainerReference)
55    }
56}
57
58/// Hosted shell network access policy.
59#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
60#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
61#[serde(rename_all = "snake_case")]
62pub enum OpenAIHostedShellNetworkPolicyType {
63    #[default]
64    Disabled,
65    Allowlist,
66}
67
68impl OpenAIHostedShellNetworkPolicyType {
69    pub const fn as_str(self) -> &'static str {
70        match self {
71            Self::Disabled => "disabled",
72            Self::Allowlist => "allowlist",
73        }
74    }
75}
76
77/// Per-domain secret injected by the OpenAI hosted shell runtime.
78#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
79#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
80pub struct OpenAIHostedShellDomainSecret {
81    pub domain: String,
82    pub name: String,
83    pub value: String,
84}
85
86impl OpenAIHostedShellDomainSecret {
87    pub fn validation_error(&self, index: usize) -> Option<String> {
88        let base = format!("provider.openai.hosted_shell.network_policy.domain_secrets[{index}]");
89
90        if self.domain.trim().is_empty() {
91            return Some(format!("`{base}.domain` must not be empty when set."));
92        }
93        if self.name.trim().is_empty() {
94            return Some(format!("`{base}.name` must not be empty when set."));
95        }
96        if self.value.trim().is_empty() {
97            return Some(format!("`{base}.value` must not be empty when set."));
98        }
99
100        None
101    }
102}
103
104/// Request-scoped network policy for OpenAI hosted shell environments.
105#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
106#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
107pub struct OpenAIHostedShellNetworkPolicy {
108    #[serde(rename = "type", default)]
109    pub policy_type: OpenAIHostedShellNetworkPolicyType,
110
111    #[serde(default, skip_serializing_if = "Vec::is_empty")]
112    pub allowed_domains: Vec<String>,
113
114    #[serde(default, skip_serializing_if = "Vec::is_empty")]
115    pub domain_secrets: Vec<OpenAIHostedShellDomainSecret>,
116}
117
118impl OpenAIHostedShellNetworkPolicy {
119    pub const fn is_allowlist(&self) -> bool {
120        matches!(
121            self.policy_type,
122            OpenAIHostedShellNetworkPolicyType::Allowlist
123        )
124    }
125
126    pub fn first_invalid_message(&self) -> Option<String> {
127        match self.policy_type {
128            OpenAIHostedShellNetworkPolicyType::Disabled => {
129                if !self.allowed_domains.is_empty() || !self.domain_secrets.is_empty() {
130                    return Some(
131                        "`provider.openai.hosted_shell.network_policy.allowed_domains` and `provider.openai.hosted_shell.network_policy.domain_secrets` require `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
132                            .to_string(),
133                    );
134                }
135            }
136            OpenAIHostedShellNetworkPolicyType::Allowlist => {
137                if let Some(index) = self
138                    .allowed_domains
139                    .iter()
140                    .position(|value| value.trim().is_empty())
141                {
142                    return Some(format!(
143                        "`provider.openai.hosted_shell.network_policy.allowed_domains[{index}]` must not be empty when set."
144                    ));
145                }
146
147                if self.allowed_domains.is_empty() {
148                    return Some(
149                        "`provider.openai.hosted_shell.network_policy.allowed_domains` must include at least one domain when `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
150                            .to_string(),
151                    );
152                }
153
154                for (index, secret) in self.domain_secrets.iter().enumerate() {
155                    if let Some(message) = secret.validation_error(index) {
156                        return Some(message);
157                    }
158
159                    let secret_domain = secret.domain.trim();
160                    if !self
161                        .allowed_domains
162                        .iter()
163                        .any(|domain| domain.trim().eq_ignore_ascii_case(secret_domain))
164                    {
165                        return Some(format!(
166                            "`provider.openai.hosted_shell.network_policy.domain_secrets[{index}].domain` must also appear in `provider.openai.hosted_shell.network_policy.allowed_domains`."
167                        ));
168                    }
169                }
170            }
171        }
172
173        None
174    }
175}
176
177/// Reserved keyword values for hosted skill version selection.
178#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
179#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
180#[serde(rename_all = "lowercase")]
181pub enum OpenAIHostedSkillVersionKeyword {
182    #[default]
183    Latest,
184}
185
186/// Hosted skill version selector for OpenAI Responses hosted shell mounts.
187#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
188#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
189#[serde(untagged)]
190pub enum OpenAIHostedSkillVersion {
191    Latest(OpenAIHostedSkillVersionKeyword),
192    Number(u64),
193    String(String),
194}
195
196impl Default for OpenAIHostedSkillVersion {
197    fn default() -> Self {
198        Self::Latest(OpenAIHostedSkillVersionKeyword::Latest)
199    }
200}
201
202impl OpenAIHostedSkillVersion {
203    pub fn validation_error(&self, field_path: &str) -> Option<String> {
204        match self {
205            Self::String(value) if value.trim().is_empty() => {
206                Some(format!("`{field_path}` must not be empty when set."))
207            }
208            _ => None,
209        }
210    }
211}
212
213/// Hosted skill reference mounted into an OpenAI hosted shell environment.
214#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
215#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
216#[serde(tag = "type", rename_all = "snake_case")]
217pub enum OpenAIHostedSkill {
218    /// Reference to a pre-registered hosted skill.
219    SkillReference {
220        skill_id: String,
221        #[serde(default)]
222        version: OpenAIHostedSkillVersion,
223    },
224    /// Inline base64 zip bundle.
225    Inline {
226        bundle_b64: String,
227        #[serde(skip_serializing_if = "Option::is_none")]
228        sha256: Option<String>,
229    },
230}
231
232impl OpenAIHostedSkill {
233    pub fn validation_error(&self, index: usize) -> Option<String> {
234        match self {
235            Self::SkillReference { skill_id, version } => {
236                let skill_id_path =
237                    format!("provider.openai.hosted_shell.skills[{index}].skill_id");
238                if skill_id.trim().is_empty() {
239                    return Some(format!(
240                        "`{skill_id_path}` must not be empty when `type = \"skill_reference\"`."
241                    ));
242                }
243
244                let version_path = format!("provider.openai.hosted_shell.skills[{index}].version");
245                version.validation_error(&version_path)
246            }
247            Self::Inline { bundle_b64, .. } => {
248                let bundle_path =
249                    format!("provider.openai.hosted_shell.skills[{index}].bundle_b64");
250                if bundle_b64.trim().is_empty() {
251                    return Some(format!(
252                        "`{bundle_path}` must not be empty when `type = \"inline\"`."
253                    ));
254                }
255                None
256            }
257        }
258    }
259}
260
261/// OpenAI hosted shell configuration.
262#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
263#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
264pub struct OpenAIHostedShellConfig {
265    /// Enable OpenAI hosted shell instead of VT Code's local shell tool.
266    #[serde(default)]
267    pub enabled: bool,
268
269    /// Environment provisioning mode for hosted shell.
270    #[serde(default)]
271    pub environment: OpenAIHostedShellEnvironment,
272
273    /// Existing OpenAI container ID to reuse when `environment = "container_reference"`.
274    #[serde(default, skip_serializing_if = "Option::is_none")]
275    pub container_id: Option<String>,
276
277    /// File IDs to mount when using `container_auto`.
278    #[serde(default, skip_serializing_if = "Vec::is_empty")]
279    pub file_ids: Vec<String>,
280
281    /// Hosted skills to mount when using `container_auto`.
282    #[serde(default, skip_serializing_if = "Vec::is_empty")]
283    pub skills: Vec<OpenAIHostedSkill>,
284
285    /// Request-scoped network policy for `container_auto` hosted shells.
286    #[serde(default)]
287    pub network_policy: OpenAIHostedShellNetworkPolicy,
288}
289
290impl OpenAIHostedShellConfig {
291    pub fn container_id_ref(&self) -> Option<&str> {
292        self.container_id
293            .as_deref()
294            .map(str::trim)
295            .filter(|value| !value.is_empty())
296    }
297
298    pub const fn uses_container_reference(&self) -> bool {
299        self.environment.uses_container_reference()
300    }
301
302    pub fn first_invalid_skill_message(&self) -> Option<String> {
303        if self.uses_container_reference() {
304            return None;
305        }
306
307        self.skills
308            .iter()
309            .enumerate()
310            .find_map(|(index, skill)| skill.validation_error(index))
311    }
312
313    pub fn has_valid_skill_mounts(&self) -> bool {
314        self.first_invalid_skill_message().is_none()
315    }
316
317    pub fn first_invalid_network_policy_message(&self) -> Option<String> {
318        if self.uses_container_reference() {
319            return None;
320        }
321
322        self.network_policy.first_invalid_message()
323    }
324
325    pub fn has_valid_network_policy(&self) -> bool {
326        self.first_invalid_network_policy_message().is_none()
327    }
328
329    pub fn has_valid_reference_target(&self) -> bool {
330        !self.uses_container_reference() || self.container_id_ref().is_some()
331    }
332
333    pub fn is_valid_for_runtime(&self) -> bool {
334        self.has_valid_reference_target()
335            && self.has_valid_skill_mounts()
336            && self.has_valid_network_policy()
337    }
338}
339
340/// OpenAI hosted tool search configuration.
341#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
342#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
343pub struct OpenAIToolSearchConfig {
344    /// Enable hosted tool search for OpenAI Responses-compatible models.
345    #[serde(default = "default_tool_search_enabled")]
346    pub enabled: bool,
347
348    /// Automatically defer loading of all tools except the core always-on set.
349    #[serde(default = "default_defer_by_default")]
350    pub defer_by_default: bool,
351
352    /// Tool names that should never be deferred (always available).
353    #[serde(default)]
354    pub always_available_tools: Vec<String>,
355}
356
357impl Default for OpenAIToolSearchConfig {
358    fn default() -> Self {
359        Self {
360            enabled: default_tool_search_enabled(),
361            defer_by_default: default_defer_by_default(),
362            always_available_tools: Vec::new(),
363        }
364    }
365}
366
367/// Manual compaction defaults for the native OpenAI `/responses/compact` flow.
368#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
369#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
370pub struct OpenAIManualCompactionConfig {
371    /// Optional custom instructions appended to manual `/compact` requests.
372    #[serde(default, skip_serializing_if = "Option::is_none")]
373    pub instructions: Option<String>,
374}
375
376/// OpenAI-specific provider configuration
377#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
378#[derive(Debug, Clone, Deserialize, Serialize, Default)]
379pub struct OpenAIConfig {
380    /// Enable Responses API WebSocket transport for non-streaming requests.
381    /// This is an opt-in path designed for long-running, tool-heavy workflows.
382    #[serde(default)]
383    pub websocket_mode: bool,
384
385    /// Optional Responses API `store` flag.
386    /// Set to `false` to avoid server-side storage when using Responses-compatible models.
387    #[serde(default, skip_serializing_if = "Option::is_none")]
388    pub responses_store: Option<bool>,
389
390    /// Optional Responses API `include` selectors.
391    /// Example: `["reasoning.encrypted_content"]` for encrypted reasoning continuity.
392    #[serde(default, skip_serializing_if = "Vec::is_empty")]
393    pub responses_include: Vec<String>,
394
395    /// Optional native OpenAI `service_tier` request parameter.
396    /// Leave unset to inherit the Project-level default service tier.
397    /// Options: "flex", "priority"
398    #[serde(default, skip_serializing_if = "Option::is_none")]
399    pub service_tier: Option<OpenAIServiceTier>,
400
401    /// Manual `/compact` defaults for the native OpenAI standalone compaction endpoint.
402    #[serde(default)]
403    pub manual_compaction: OpenAIManualCompactionConfig,
404
405    /// Optional hosted shell configuration for OpenAI native Responses models.
406    #[serde(default)]
407    pub hosted_shell: OpenAIHostedShellConfig,
408
409    /// Hosted tool search configuration for OpenAI Responses-compatible models.
410    #[serde(default)]
411    pub tool_search: OpenAIToolSearchConfig,
412}
413
414/// Anthropic-specific provider configuration
415#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
416#[derive(Debug, Clone, Deserialize, Serialize)]
417pub struct AnthropicConfig {
418    /// DEPRECATED: Model name validation has been removed. The Anthropic API validates
419    /// model names directly, avoiding maintenance burden and allowing flexibility.
420    /// This field is kept for backward compatibility but has no effect.
421    #[deprecated(
422        since = "0.75.0",
423        note = "Model validation removed. API validates model names directly."
424    )]
425    #[serde(default)]
426    pub skip_model_validation: bool,
427
428    /// Enable adaptive or extended thinking for Anthropic models
429    /// When enabled, Claude uses internal reasoning before responding, providing
430    /// enhanced reasoning capabilities for complex tasks.
431    /// Only supported by Claude 4, Claude 4.5, and Claude 3.7 Sonnet models.
432    /// Claude Opus 4.7 uses adaptive thinking instead of budgeted extended thinking.
433    /// Note: Extended thinking is now auto-enabled by default (31,999 tokens).
434    /// Set MAX_THINKING_TOKENS=63999 environment variable for 2x budget on 64K models.
435    /// See: <https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking>
436    #[serde(default = "default_extended_thinking_enabled")]
437    pub extended_thinking_enabled: bool,
438
439    /// Beta header for interleaved thinking feature
440    #[serde(default = "default_interleaved_thinking_beta")]
441    pub interleaved_thinking_beta: String,
442
443    /// Budget tokens for extended thinking (minimum: 1024, default: 31999)
444    /// On 64K output models (Opus 4.5, Sonnet 4.5, Haiku 4.5): default 31,999, max 63,999
445    /// On 32K output models (Opus 4): max 31,999
446    /// Claude Opus 4.7 ignores this setting and uses adaptive thinking instead.
447    /// Use MAX_THINKING_TOKENS environment variable to override.
448    #[serde(default = "default_interleaved_thinking_budget_tokens")]
449    pub interleaved_thinking_budget_tokens: u32,
450
451    /// Type value for enabling interleaved thinking
452    #[serde(default = "default_interleaved_thinking_type")]
453    pub interleaved_thinking_type_enabled: String,
454
455    /// Tool search configuration for dynamic tool discovery (advanced-tool-use beta)
456    #[serde(default)]
457    pub tool_search: ToolSearchConfig,
458
459    /// Native Anthropic memory tool configuration.
460    #[serde(default)]
461    pub memory: AnthropicMemoryConfig,
462
463    /// Effort level for adaptive thinking/token usage (low, medium, high, xhigh, max)
464    /// Controls how many tokens Claude uses when responding, trading off between
465    /// response thoroughness and token efficiency.
466    /// The default config value keeps Claude Opus 4.7 on `xhigh`; models that do not
467    /// support `xhigh` fall back to their supported model default, typically `high`.
468    #[serde(default = "default_effort")]
469    pub effort: String,
470
471    /// Optional Anthropic task budget token total for Claude Opus 4.7.
472    /// When set, VT Code sends `output_config.task_budget = { type = "tokens", total = N }`
473    /// and the required beta header.
474    /// Anthropic currently requires a minimum of 20,000 tokens.
475    #[serde(default)]
476    pub task_budget_tokens: Option<u32>,
477
478    /// Beta header for Anthropic task budgets.
479    #[serde(default = "default_task_budget_beta")]
480    pub task_budget_beta: String,
481
482    /// Controls how thinking content is returned in API responses.
483    ///   - "summarized": Thinking blocks contain summarized text (default on Claude 4 models).
484    ///   - "omitted": Thinking blocks have an empty `thinking` field (default on Opus 4.7).
485    ///
486    /// When set, this overrides the model-specific default.
487    /// See: <https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#controlling-thinking-display>
488    #[serde(default)]
489    pub thinking_display: Option<String>,
490
491    /// Enable token counting via the count_tokens endpoint
492    /// When enabled, the agent can estimate input token counts before making API calls
493    /// Useful for proactive management of rate limits and costs
494    #[serde(default = "default_count_tokens_enabled")]
495    pub count_tokens_enabled: bool,
496}
497
498#[allow(deprecated)]
499impl Default for AnthropicConfig {
500    fn default() -> Self {
501        Self {
502            skip_model_validation: false,
503            extended_thinking_enabled: default_extended_thinking_enabled(),
504            interleaved_thinking_beta: default_interleaved_thinking_beta(),
505            interleaved_thinking_budget_tokens: default_interleaved_thinking_budget_tokens(),
506            interleaved_thinking_type_enabled: default_interleaved_thinking_type(),
507            tool_search: ToolSearchConfig::default(),
508            memory: AnthropicMemoryConfig::default(),
509            effort: default_effort(),
510            task_budget_tokens: None,
511            task_budget_beta: default_task_budget_beta(),
512            thinking_display: None,
513            count_tokens_enabled: default_count_tokens_enabled(),
514        }
515    }
516}
517
518#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
519#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq, Eq)]
520pub struct AnthropicMemoryConfig {
521    #[serde(default)]
522    pub enabled: bool,
523}
524
525#[inline]
526fn default_count_tokens_enabled() -> bool {
527    false
528}
529
530/// Configuration for Anthropic's tool search feature (advanced-tool-use beta)
531/// Enables dynamic tool discovery for large tool catalogs (up to 10k tools)
532#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
533#[derive(Debug, Clone, Deserialize, Serialize)]
534pub struct ToolSearchConfig {
535    /// Enable tool search feature (requires advanced-tool-use-2025-11-20 beta)
536    #[serde(default = "default_tool_search_enabled")]
537    pub enabled: bool,
538
539    /// Search algorithm: "regex" (Python regex patterns) or "bm25" (natural language)
540    #[serde(default = "default_tool_search_algorithm")]
541    pub algorithm: String,
542
543    /// Automatically defer loading of all tools except core tools
544    #[serde(default = "default_defer_by_default")]
545    pub defer_by_default: bool,
546
547    /// Maximum number of tool search results to return
548    #[serde(default = "default_max_results")]
549    pub max_results: u32,
550
551    /// Tool names that should never be deferred (always available)
552    #[serde(default)]
553    pub always_available_tools: Vec<String>,
554}
555
556impl Default for ToolSearchConfig {
557    fn default() -> Self {
558        Self {
559            enabled: default_tool_search_enabled(),
560            algorithm: default_tool_search_algorithm(),
561            defer_by_default: default_defer_by_default(),
562            max_results: default_max_results(),
563            always_available_tools: vec![],
564        }
565    }
566}
567
568#[inline]
569fn default_tool_search_enabled() -> bool {
570    true
571}
572
573#[inline]
574fn default_tool_search_algorithm() -> String {
575    "regex".to_string()
576}
577
578#[inline]
579fn default_defer_by_default() -> bool {
580    true
581}
582
583#[inline]
584fn default_max_results() -> u32 {
585    5
586}
587
588#[inline]
589fn default_extended_thinking_enabled() -> bool {
590    true
591}
592
593#[inline]
594fn default_interleaved_thinking_beta() -> String {
595    "interleaved-thinking-2025-05-14".to_string()
596}
597
598#[inline]
599fn default_interleaved_thinking_budget_tokens() -> u32 {
600    31999
601}
602
603#[inline]
604fn default_interleaved_thinking_type() -> String {
605    "enabled".to_string()
606}
607
608#[inline]
609fn default_effort() -> String {
610    reasoning::XHIGH.to_string()
611}
612
613#[inline]
614fn default_task_budget_beta() -> String {
615    "task-budgets-2026-03-13".to_string()
616}
617
618#[cfg(test)]
619mod tests {
620    use super::{
621        AnthropicConfig, OpenAIConfig, OpenAIHostedShellConfig, OpenAIHostedShellDomainSecret,
622        OpenAIHostedShellEnvironment, OpenAIHostedShellNetworkPolicy,
623        OpenAIHostedShellNetworkPolicyType, OpenAIHostedSkill, OpenAIHostedSkillVersion,
624        OpenAIManualCompactionConfig, OpenAIServiceTier,
625    };
626
627    #[test]
628    fn openai_config_defaults_to_websocket_mode_disabled() {
629        let config = OpenAIConfig::default();
630        assert!(!config.websocket_mode);
631        assert_eq!(config.responses_store, None);
632        assert!(config.responses_include.is_empty());
633        assert_eq!(config.service_tier, None);
634        assert_eq!(
635            config.manual_compaction,
636            OpenAIManualCompactionConfig::default()
637        );
638        assert_eq!(config.hosted_shell, OpenAIHostedShellConfig::default());
639        assert!(config.tool_search.enabled);
640        assert!(config.tool_search.defer_by_default);
641        assert!(config.tool_search.always_available_tools.is_empty());
642    }
643
644    #[test]
645    fn anthropic_config_defaults_native_memory_to_disabled() {
646        let config = AnthropicConfig::default();
647        assert!(!config.memory.enabled);
648    }
649
650    #[test]
651    fn anthropic_config_parses_native_memory_opt_in() {
652        let parsed: AnthropicConfig =
653            toml::from_str("[memory]\nenabled = true").expect("config should parse");
654        assert!(parsed.memory.enabled);
655    }
656
657    #[test]
658    fn openai_config_parses_websocket_mode_opt_in() {
659        let parsed: OpenAIConfig =
660            toml::from_str("websocket_mode = true").expect("config should parse");
661        assert!(parsed.websocket_mode);
662        assert_eq!(parsed.responses_store, None);
663        assert!(parsed.responses_include.is_empty());
664        assert_eq!(parsed.service_tier, None);
665        assert_eq!(
666            parsed.manual_compaction,
667            OpenAIManualCompactionConfig::default()
668        );
669        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
670        assert_eq!(parsed.tool_search, super::OpenAIToolSearchConfig::default());
671    }
672
673    #[test]
674    fn openai_config_parses_responses_options() {
675        let parsed: OpenAIConfig = toml::from_str(
676            r#"
677responses_store = false
678responses_include = ["reasoning.encrypted_content", "output_text.annotations"]
679"#,
680        )
681        .expect("config should parse");
682        assert_eq!(parsed.responses_store, Some(false));
683        assert_eq!(
684            parsed.responses_include,
685            vec![
686                "reasoning.encrypted_content".to_string(),
687                "output_text.annotations".to_string()
688            ]
689        );
690        assert_eq!(parsed.service_tier, None);
691        assert_eq!(
692            parsed.manual_compaction,
693            OpenAIManualCompactionConfig::default()
694        );
695        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
696    }
697
698    #[test]
699    fn openai_config_parses_manual_compaction_defaults() {
700        let parsed: OpenAIConfig = toml::from_str(
701            r#"
702[manual_compaction]
703instructions = "Preserve the bug reproduction steps."
704"#,
705        )
706        .expect("config should parse");
707
708        assert_eq!(
709            parsed.manual_compaction.instructions.as_deref(),
710            Some("Preserve the bug reproduction steps.")
711        );
712    }
713
714    #[test]
715    fn openai_config_parses_service_tier() {
716        let parsed: OpenAIConfig =
717            toml::from_str(r#"service_tier = "priority""#).expect("config should parse");
718        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Priority));
719    }
720
721    #[test]
722    fn openai_config_parses_flex_service_tier() {
723        let parsed: OpenAIConfig =
724            toml::from_str(r#"service_tier = "flex""#).expect("config should parse");
725        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Flex));
726    }
727
728    #[test]
729    fn openai_config_parses_hosted_shell() {
730        let parsed: OpenAIConfig = toml::from_str(
731            r#"
732[hosted_shell]
733enabled = true
734environment = "container_auto"
735file_ids = ["file_123"]
736
737[[hosted_shell.skills]]
738type = "skill_reference"
739skill_id = "skill_123"
740"#,
741        )
742        .expect("config should parse");
743
744        assert!(parsed.hosted_shell.enabled);
745        assert_eq!(
746            parsed.hosted_shell.environment,
747            OpenAIHostedShellEnvironment::ContainerAuto
748        );
749        assert_eq!(parsed.hosted_shell.file_ids, vec!["file_123".to_string()]);
750        assert_eq!(
751            parsed.hosted_shell.skills,
752            vec![OpenAIHostedSkill::SkillReference {
753                skill_id: "skill_123".to_string(),
754                version: OpenAIHostedSkillVersion::default(),
755            }]
756        );
757    }
758
759    #[test]
760    fn openai_config_parses_hosted_shell_pinned_version_and_inline_bundle() {
761        let parsed: OpenAIConfig = toml::from_str(
762            r#"
763[hosted_shell]
764enabled = true
765
766[[hosted_shell.skills]]
767type = "skill_reference"
768skill_id = "skill_123"
769version = 2
770
771[[hosted_shell.skills]]
772type = "inline"
773bundle_b64 = "UEsFBgAAAAAAAA=="
774sha256 = "deadbeef"
775"#,
776        )
777        .expect("config should parse");
778
779        assert_eq!(
780            parsed.hosted_shell.skills,
781            vec![
782                OpenAIHostedSkill::SkillReference {
783                    skill_id: "skill_123".to_string(),
784                    version: OpenAIHostedSkillVersion::Number(2),
785                },
786                OpenAIHostedSkill::Inline {
787                    bundle_b64: "UEsFBgAAAAAAAA==".to_string(),
788                    sha256: Some("deadbeef".to_string()),
789                },
790            ]
791        );
792    }
793
794    #[test]
795    fn openai_config_parses_hosted_shell_network_policy() {
796        let parsed: OpenAIConfig = toml::from_str(
797            r#"
798[hosted_shell]
799enabled = true
800
801[hosted_shell.network_policy]
802type = "allowlist"
803allowed_domains = ["httpbin.org"]
804
805[[hosted_shell.network_policy.domain_secrets]]
806domain = "httpbin.org"
807name = "API_KEY"
808value = "debug-secret-123"
809"#,
810        )
811        .expect("config should parse");
812
813        assert_eq!(
814            parsed.hosted_shell.network_policy,
815            OpenAIHostedShellNetworkPolicy {
816                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
817                allowed_domains: vec!["httpbin.org".to_string()],
818                domain_secrets: vec![OpenAIHostedShellDomainSecret {
819                    domain: "httpbin.org".to_string(),
820                    name: "API_KEY".to_string(),
821                    value: "debug-secret-123".to_string(),
822                }],
823            }
824        );
825    }
826
827    #[test]
828    fn openai_config_parses_tool_search() {
829        let parsed: OpenAIConfig = toml::from_str(
830            r#"
831[tool_search]
832enabled = false
833defer_by_default = false
834always_available_tools = ["unified_search", "custom_tool"]
835"#,
836        )
837        .expect("config should parse");
838
839        assert!(!parsed.tool_search.enabled);
840        assert!(!parsed.tool_search.defer_by_default);
841        assert_eq!(
842            parsed.tool_search.always_available_tools,
843            vec!["unified_search".to_string(), "custom_tool".to_string()]
844        );
845    }
846
847    #[test]
848    fn anthropic_tool_search_defaults_to_enabled() {
849        let config = AnthropicConfig::default();
850
851        assert!(config.tool_search.enabled);
852        assert!(config.tool_search.defer_by_default);
853        assert_eq!(config.tool_search.algorithm, "regex");
854        assert!(config.tool_search.always_available_tools.is_empty());
855    }
856
857    #[test]
858    fn hosted_shell_container_reference_requires_non_empty_container_id() {
859        let config = OpenAIHostedShellConfig {
860            enabled: true,
861            environment: OpenAIHostedShellEnvironment::ContainerReference,
862            container_id: Some("   ".to_string()),
863            file_ids: Vec::new(),
864            skills: Vec::new(),
865            network_policy: OpenAIHostedShellNetworkPolicy::default(),
866        };
867
868        assert!(!config.has_valid_reference_target());
869        assert!(config.container_id_ref().is_none());
870    }
871
872    #[test]
873    fn hosted_shell_reports_invalid_skill_reference_mounts() {
874        let config = OpenAIHostedShellConfig {
875            enabled: true,
876            environment: OpenAIHostedShellEnvironment::ContainerAuto,
877            container_id: None,
878            file_ids: Vec::new(),
879            skills: vec![OpenAIHostedSkill::SkillReference {
880                skill_id: "   ".to_string(),
881                version: OpenAIHostedSkillVersion::default(),
882            }],
883            network_policy: OpenAIHostedShellNetworkPolicy::default(),
884        };
885
886        let message = config
887            .first_invalid_skill_message()
888            .expect("invalid mount should be reported");
889
890        assert!(message.contains("provider.openai.hosted_shell.skills[0].skill_id"));
891        assert!(!config.has_valid_skill_mounts());
892        assert!(!config.is_valid_for_runtime());
893    }
894
895    #[test]
896    fn hosted_shell_ignores_skill_validation_for_container_reference() {
897        let config = OpenAIHostedShellConfig {
898            enabled: true,
899            environment: OpenAIHostedShellEnvironment::ContainerReference,
900            container_id: Some("cntr_123".to_string()),
901            file_ids: Vec::new(),
902            skills: vec![OpenAIHostedSkill::Inline {
903                bundle_b64: "   ".to_string(),
904                sha256: None,
905            }],
906            network_policy: OpenAIHostedShellNetworkPolicy::default(),
907        };
908
909        assert!(config.first_invalid_skill_message().is_none());
910        assert!(config.has_valid_skill_mounts());
911        assert!(config.is_valid_for_runtime());
912    }
913
914    #[test]
915    fn hosted_shell_reports_invalid_allowlist_without_domains() {
916        let config = OpenAIHostedShellConfig {
917            enabled: true,
918            environment: OpenAIHostedShellEnvironment::ContainerAuto,
919            container_id: None,
920            file_ids: Vec::new(),
921            skills: Vec::new(),
922            network_policy: OpenAIHostedShellNetworkPolicy {
923                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
924                allowed_domains: Vec::new(),
925                domain_secrets: Vec::new(),
926            },
927        };
928
929        let message = config
930            .first_invalid_network_policy_message()
931            .expect("invalid network policy should be reported");
932
933        assert!(message.contains("network_policy.allowed_domains"));
934        assert!(!config.has_valid_network_policy());
935        assert!(!config.is_valid_for_runtime());
936    }
937
938    #[test]
939    fn hosted_shell_reports_domain_secret_outside_allowlist() {
940        let config = OpenAIHostedShellConfig {
941            enabled: true,
942            environment: OpenAIHostedShellEnvironment::ContainerAuto,
943            container_id: None,
944            file_ids: Vec::new(),
945            skills: Vec::new(),
946            network_policy: OpenAIHostedShellNetworkPolicy {
947                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
948                allowed_domains: vec!["pypi.org".to_string()],
949                domain_secrets: vec![OpenAIHostedShellDomainSecret {
950                    domain: "httpbin.org".to_string(),
951                    name: "API_KEY".to_string(),
952                    value: "secret".to_string(),
953                }],
954            },
955        };
956
957        let message = config
958            .first_invalid_network_policy_message()
959            .expect("invalid domain secret should be reported");
960
961        assert!(message.contains("domain_secrets[0].domain"));
962        assert!(!config.has_valid_network_policy());
963    }
964
965    #[test]
966    fn hosted_shell_ignores_network_policy_validation_for_container_reference() {
967        let config = OpenAIHostedShellConfig {
968            enabled: true,
969            environment: OpenAIHostedShellEnvironment::ContainerReference,
970            container_id: Some("cntr_123".to_string()),
971            file_ids: Vec::new(),
972            skills: Vec::new(),
973            network_policy: OpenAIHostedShellNetworkPolicy {
974                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
975                allowed_domains: Vec::new(),
976                domain_secrets: Vec::new(),
977            },
978        };
979
980        assert!(config.first_invalid_network_policy_message().is_none());
981        assert!(config.has_valid_network_policy());
982    }
983}