Skip to main content

vtcode_config/core/
provider.rs

1use crate::constants::reasoning;
2use serde::{Deserialize, Serialize};
3
4/// Native OpenAI service tier selection.
5#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
6#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
7#[serde(rename_all = "lowercase")]
8pub enum OpenAIServiceTier {
9    Flex,
10    Priority,
11}
12
13impl OpenAIServiceTier {
14    pub const fn as_str(self) -> &'static str {
15        match self {
16            Self::Flex => "flex",
17            Self::Priority => "priority",
18        }
19    }
20
21    pub fn parse(value: &str) -> Option<Self> {
22        let normalized = value.trim();
23        if normalized.eq_ignore_ascii_case("flex") {
24            Some(Self::Flex)
25        } else if normalized.eq_ignore_ascii_case("priority") {
26            Some(Self::Priority)
27        } else {
28            None
29        }
30    }
31}
32
33/// How VT Code should provision OpenAI hosted shell environments.
34#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
35#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
36#[serde(rename_all = "snake_case")]
37pub enum OpenAIHostedShellEnvironment {
38    #[default]
39    ContainerAuto,
40    ContainerReference,
41}
42
43impl OpenAIHostedShellEnvironment {
44    pub const fn as_str(self) -> &'static str {
45        match self {
46            Self::ContainerAuto => "container_auto",
47            Self::ContainerReference => "container_reference",
48        }
49    }
50}
51
52impl OpenAIHostedShellEnvironment {
53    pub const fn uses_container_reference(self) -> bool {
54        matches!(self, Self::ContainerReference)
55    }
56}
57
58/// Hosted shell network access policy.
59#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
60#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
61#[serde(rename_all = "snake_case")]
62pub enum OpenAIHostedShellNetworkPolicyType {
63    #[default]
64    Disabled,
65    Allowlist,
66}
67
68impl OpenAIHostedShellNetworkPolicyType {
69    pub const fn as_str(self) -> &'static str {
70        match self {
71            Self::Disabled => "disabled",
72            Self::Allowlist => "allowlist",
73        }
74    }
75}
76
77/// Per-domain secret injected by the OpenAI hosted shell runtime.
78#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
79#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
80pub struct OpenAIHostedShellDomainSecret {
81    pub domain: String,
82    pub name: String,
83    pub value: String,
84}
85
86impl OpenAIHostedShellDomainSecret {
87    pub fn validation_error(&self, index: usize) -> Option<String> {
88        let base = format!("provider.openai.hosted_shell.network_policy.domain_secrets[{index}]");
89
90        if self.domain.trim().is_empty() {
91            return Some(format!("`{base}.domain` must not be empty when set."));
92        }
93        if self.name.trim().is_empty() {
94            return Some(format!("`{base}.name` must not be empty when set."));
95        }
96        if self.value.trim().is_empty() {
97            return Some(format!("`{base}.value` must not be empty when set."));
98        }
99
100        None
101    }
102}
103
104/// Request-scoped network policy for OpenAI hosted shell environments.
105#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
106#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
107pub struct OpenAIHostedShellNetworkPolicy {
108    #[serde(rename = "type", default)]
109    pub policy_type: OpenAIHostedShellNetworkPolicyType,
110
111    #[serde(default, skip_serializing_if = "Vec::is_empty")]
112    pub allowed_domains: Vec<String>,
113
114    #[serde(default, skip_serializing_if = "Vec::is_empty")]
115    pub domain_secrets: Vec<OpenAIHostedShellDomainSecret>,
116}
117
118impl OpenAIHostedShellNetworkPolicy {
119    pub const fn is_allowlist(&self) -> bool {
120        matches!(
121            self.policy_type,
122            OpenAIHostedShellNetworkPolicyType::Allowlist
123        )
124    }
125
126    pub fn first_invalid_message(&self) -> Option<String> {
127        match self.policy_type {
128            OpenAIHostedShellNetworkPolicyType::Disabled => {
129                if !self.allowed_domains.is_empty() || !self.domain_secrets.is_empty() {
130                    return Some(
131                        "`provider.openai.hosted_shell.network_policy.allowed_domains` and `provider.openai.hosted_shell.network_policy.domain_secrets` require `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
132                            .to_string(),
133                    );
134                }
135            }
136            OpenAIHostedShellNetworkPolicyType::Allowlist => {
137                if let Some(index) = self
138                    .allowed_domains
139                    .iter()
140                    .position(|value| value.trim().is_empty())
141                {
142                    return Some(format!(
143                        "`provider.openai.hosted_shell.network_policy.allowed_domains[{index}]` must not be empty when set."
144                    ));
145                }
146
147                if self.allowed_domains.is_empty() {
148                    return Some(
149                        "`provider.openai.hosted_shell.network_policy.allowed_domains` must include at least one domain when `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
150                            .to_string(),
151                    );
152                }
153
154                for (index, secret) in self.domain_secrets.iter().enumerate() {
155                    if let Some(message) = secret.validation_error(index) {
156                        return Some(message);
157                    }
158
159                    let secret_domain = secret.domain.trim();
160                    if !self
161                        .allowed_domains
162                        .iter()
163                        .any(|domain| domain.trim().eq_ignore_ascii_case(secret_domain))
164                    {
165                        return Some(format!(
166                            "`provider.openai.hosted_shell.network_policy.domain_secrets[{index}].domain` must also appear in `provider.openai.hosted_shell.network_policy.allowed_domains`."
167                        ));
168                    }
169                }
170            }
171        }
172
173        None
174    }
175}
176
177/// Reserved keyword values for hosted skill version selection.
178#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
179#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
180#[serde(rename_all = "lowercase")]
181pub enum OpenAIHostedSkillVersionKeyword {
182    #[default]
183    Latest,
184}
185
186/// Hosted skill version selector for OpenAI Responses hosted shell mounts.
187#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
188#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
189#[serde(untagged)]
190pub enum OpenAIHostedSkillVersion {
191    Latest(OpenAIHostedSkillVersionKeyword),
192    Number(u64),
193    String(String),
194}
195
196impl Default for OpenAIHostedSkillVersion {
197    fn default() -> Self {
198        Self::Latest(OpenAIHostedSkillVersionKeyword::Latest)
199    }
200}
201
202impl OpenAIHostedSkillVersion {
203    pub fn validation_error(&self, field_path: &str) -> Option<String> {
204        match self {
205            Self::String(value) if value.trim().is_empty() => {
206                Some(format!("`{field_path}` must not be empty when set."))
207            }
208            _ => None,
209        }
210    }
211}
212
213/// Hosted skill reference mounted into an OpenAI hosted shell environment.
214#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
215#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
216#[serde(tag = "type", rename_all = "snake_case")]
217pub enum OpenAIHostedSkill {
218    /// Reference to a pre-registered hosted skill.
219    SkillReference {
220        skill_id: String,
221        #[serde(default)]
222        version: OpenAIHostedSkillVersion,
223    },
224    /// Inline base64 zip bundle.
225    Inline {
226        bundle_b64: String,
227        #[serde(skip_serializing_if = "Option::is_none")]
228        sha256: Option<String>,
229    },
230}
231
232impl OpenAIHostedSkill {
233    pub fn validation_error(&self, index: usize) -> Option<String> {
234        match self {
235            Self::SkillReference { skill_id, version } => {
236                let skill_id_path =
237                    format!("provider.openai.hosted_shell.skills[{index}].skill_id");
238                if skill_id.trim().is_empty() {
239                    return Some(format!(
240                        "`{skill_id_path}` must not be empty when `type = \"skill_reference\"`."
241                    ));
242                }
243
244                let version_path = format!("provider.openai.hosted_shell.skills[{index}].version");
245                version.validation_error(&version_path)
246            }
247            Self::Inline { bundle_b64, .. } => {
248                let bundle_path =
249                    format!("provider.openai.hosted_shell.skills[{index}].bundle_b64");
250                if bundle_b64.trim().is_empty() {
251                    return Some(format!(
252                        "`{bundle_path}` must not be empty when `type = \"inline\"`."
253                    ));
254                }
255                None
256            }
257        }
258    }
259}
260
261/// OpenAI hosted shell configuration.
262#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
263#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
264pub struct OpenAIHostedShellConfig {
265    /// Enable OpenAI hosted shell instead of VT Code's local shell tool.
266    #[serde(default)]
267    pub enabled: bool,
268
269    /// Environment provisioning mode for hosted shell.
270    #[serde(default)]
271    pub environment: OpenAIHostedShellEnvironment,
272
273    /// Existing OpenAI container ID to reuse when `environment = "container_reference"`.
274    #[serde(default, skip_serializing_if = "Option::is_none")]
275    pub container_id: Option<String>,
276
277    /// File IDs to mount when using `container_auto`.
278    #[serde(default, skip_serializing_if = "Vec::is_empty")]
279    pub file_ids: Vec<String>,
280
281    /// Hosted skills to mount when using `container_auto`.
282    #[serde(default, skip_serializing_if = "Vec::is_empty")]
283    pub skills: Vec<OpenAIHostedSkill>,
284
285    /// Request-scoped network policy for `container_auto` hosted shells.
286    #[serde(default)]
287    pub network_policy: OpenAIHostedShellNetworkPolicy,
288}
289
290impl OpenAIHostedShellConfig {
291    pub fn container_id_ref(&self) -> Option<&str> {
292        self.container_id
293            .as_deref()
294            .map(str::trim)
295            .filter(|value| !value.is_empty())
296    }
297
298    pub const fn uses_container_reference(&self) -> bool {
299        self.environment.uses_container_reference()
300    }
301
302    pub fn first_invalid_skill_message(&self) -> Option<String> {
303        if self.uses_container_reference() {
304            return None;
305        }
306
307        self.skills
308            .iter()
309            .enumerate()
310            .find_map(|(index, skill)| skill.validation_error(index))
311    }
312
313    pub fn has_valid_skill_mounts(&self) -> bool {
314        self.first_invalid_skill_message().is_none()
315    }
316
317    pub fn first_invalid_network_policy_message(&self) -> Option<String> {
318        if self.uses_container_reference() {
319            return None;
320        }
321
322        self.network_policy.first_invalid_message()
323    }
324
325    pub fn has_valid_network_policy(&self) -> bool {
326        self.first_invalid_network_policy_message().is_none()
327    }
328
329    pub fn has_valid_reference_target(&self) -> bool {
330        !self.uses_container_reference() || self.container_id_ref().is_some()
331    }
332
333    pub fn is_valid_for_runtime(&self) -> bool {
334        self.has_valid_reference_target()
335            && self.has_valid_skill_mounts()
336            && self.has_valid_network_policy()
337    }
338}
339
340/// OpenAI hosted tool search configuration.
341#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
342#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
343pub struct OpenAIToolSearchConfig {
344    /// Enable hosted tool search for OpenAI Responses-compatible models.
345    #[serde(default = "default_tool_search_enabled")]
346    pub enabled: bool,
347
348    /// Automatically defer loading of all tools except the core always-on set.
349    #[serde(default = "default_defer_by_default")]
350    pub defer_by_default: bool,
351
352    /// Tool names that should never be deferred (always available).
353    #[serde(default)]
354    pub always_available_tools: Vec<String>,
355}
356
357impl Default for OpenAIToolSearchConfig {
358    fn default() -> Self {
359        Self {
360            enabled: default_tool_search_enabled(),
361            defer_by_default: default_defer_by_default(),
362            always_available_tools: Vec::new(),
363        }
364    }
365}
366
367/// Manual compaction defaults for the native OpenAI `/responses/compact` flow.
368#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
369#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
370pub struct OpenAIManualCompactionConfig {
371    /// Optional custom instructions appended to manual `/compact` requests.
372    #[serde(default, skip_serializing_if = "Option::is_none")]
373    pub instructions: Option<String>,
374}
375
376/// OpenAI-specific provider configuration
377#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
378#[derive(Debug, Clone, Deserialize, Serialize, Default)]
379pub struct OpenAIConfig {
380    /// Enable Responses API WebSocket transport for non-streaming requests on
381    /// native OpenAI and OpenAI-compatible Responses endpoints.
382    /// This is an opt-in path designed for long-running, tool-heavy workflows.
383    #[serde(default)]
384    pub websocket_mode: bool,
385
386    /// Optional Responses API `store` flag.
387    /// Set to `false` to avoid server-side storage when using Responses-compatible models.
388    #[serde(default, skip_serializing_if = "Option::is_none")]
389    pub responses_store: Option<bool>,
390
391    /// Optional Responses API `include` selectors.
392    /// Example: `["reasoning.encrypted_content"]` for encrypted reasoning continuity.
393    #[serde(default, skip_serializing_if = "Vec::is_empty")]
394    pub responses_include: Vec<String>,
395
396    /// Optional native OpenAI `service_tier` request parameter.
397    /// Leave unset to inherit the Project-level default service tier.
398    /// Options: "flex", "priority"
399    #[serde(default, skip_serializing_if = "Option::is_none")]
400    pub service_tier: Option<OpenAIServiceTier>,
401
402    /// Manual `/compact` defaults for the native OpenAI standalone compaction endpoint.
403    #[serde(default)]
404    pub manual_compaction: OpenAIManualCompactionConfig,
405
406    /// Optional hosted shell configuration for OpenAI native Responses models.
407    #[serde(default)]
408    pub hosted_shell: OpenAIHostedShellConfig,
409
410    /// Hosted tool search configuration for OpenAI Responses-compatible models.
411    #[serde(default)]
412    pub tool_search: OpenAIToolSearchConfig,
413}
414
415/// Anthropic-specific provider configuration
416#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
417#[derive(Debug, Clone, Deserialize, Serialize)]
418pub struct AnthropicConfig {
419    /// DEPRECATED: Model name validation has been removed. The Anthropic API validates
420    /// model names directly, avoiding maintenance burden and allowing flexibility.
421    /// This field is kept for backward compatibility but has no effect.
422    #[deprecated(
423        since = "0.75.0",
424        note = "Model validation removed. API validates model names directly."
425    )]
426    #[serde(default)]
427    pub skip_model_validation: bool,
428
429    /// Enable adaptive or extended thinking for Anthropic models
430    /// When enabled, Claude uses internal reasoning before responding, providing
431    /// enhanced reasoning capabilities for complex tasks.
432    /// Only supported by Claude 4, Claude 4.5, and Claude 3.7 Sonnet models.
433    /// Claude Opus 4.7 uses adaptive thinking instead of budgeted extended thinking.
434    /// Note: Extended thinking is now auto-enabled by default (31,999 tokens).
435    /// Set MAX_THINKING_TOKENS=63999 environment variable for 2x budget on 64K models.
436    /// See: <https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking>
437    #[serde(default = "default_extended_thinking_enabled")]
438    pub extended_thinking_enabled: bool,
439
440    /// Beta header for interleaved thinking feature
441    #[serde(default = "default_interleaved_thinking_beta")]
442    pub interleaved_thinking_beta: String,
443
444    /// Budget tokens for extended thinking (minimum: 1024, default: 31999)
445    /// On 64K output models (Opus 4.5, Sonnet 4.5, Haiku 4.5): default 31,999, max 63,999
446    /// On 32K output models (Opus 4): max 31,999
447    /// Claude Opus 4.7 ignores this setting and uses adaptive thinking instead.
448    /// Use MAX_THINKING_TOKENS environment variable to override.
449    #[serde(default = "default_interleaved_thinking_budget_tokens")]
450    pub interleaved_thinking_budget_tokens: u32,
451
452    /// Type value for enabling interleaved thinking
453    #[serde(default = "default_interleaved_thinking_type")]
454    pub interleaved_thinking_type_enabled: String,
455
456    /// Tool search configuration for dynamic tool discovery (advanced-tool-use beta)
457    #[serde(default)]
458    pub tool_search: ToolSearchConfig,
459
460    /// Native Anthropic memory tool configuration.
461    #[serde(default)]
462    pub memory: AnthropicMemoryConfig,
463
464    /// Effort level for adaptive thinking/token usage (low, medium, high, xhigh, max)
465    /// Controls how many tokens Claude uses when responding, trading off between
466    /// response thoroughness and token efficiency.
467    /// The default config value keeps Claude Opus 4.7 on `xhigh`; models that do not
468    /// support `xhigh` fall back to their supported model default, typically `high`.
469    #[serde(default = "default_effort")]
470    pub effort: String,
471
472    /// Optional Anthropic task budget token total for Claude Opus 4.7.
473    /// When set, VT Code sends `output_config.task_budget = { type = "tokens", total = N }`
474    /// and the required beta header.
475    /// Anthropic currently requires a minimum of 20,000 tokens.
476    #[serde(default)]
477    pub task_budget_tokens: Option<u32>,
478
479    /// Beta header for Anthropic task budgets.
480    #[serde(default = "default_task_budget_beta")]
481    pub task_budget_beta: String,
482
483    /// Controls how thinking content is returned in API responses.
484    ///   - "summarized": Thinking blocks contain summarized text (default on Claude 4 models).
485    ///   - "omitted": Thinking blocks have an empty `thinking` field (default on Opus 4.7).
486    ///
487    /// When set, this overrides the model-specific default.
488    /// See: <https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#controlling-thinking-display>
489    #[serde(default)]
490    pub thinking_display: Option<String>,
491
492    /// Enable token counting via the count_tokens endpoint
493    /// When enabled, the agent can estimate input token counts before making API calls
494    /// Useful for proactive management of rate limits and costs
495    #[serde(default = "default_count_tokens_enabled")]
496    pub count_tokens_enabled: bool,
497}
498
499#[allow(deprecated)]
500impl Default for AnthropicConfig {
501    fn default() -> Self {
502        Self {
503            skip_model_validation: false,
504            extended_thinking_enabled: default_extended_thinking_enabled(),
505            interleaved_thinking_beta: default_interleaved_thinking_beta(),
506            interleaved_thinking_budget_tokens: default_interleaved_thinking_budget_tokens(),
507            interleaved_thinking_type_enabled: default_interleaved_thinking_type(),
508            tool_search: ToolSearchConfig::default(),
509            memory: AnthropicMemoryConfig::default(),
510            effort: default_effort(),
511            task_budget_tokens: None,
512            task_budget_beta: default_task_budget_beta(),
513            thinking_display: None,
514            count_tokens_enabled: default_count_tokens_enabled(),
515        }
516    }
517}
518
519#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
520#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq, Eq)]
521pub struct AnthropicMemoryConfig {
522    #[serde(default)]
523    pub enabled: bool,
524}
525
526#[inline]
527fn default_count_tokens_enabled() -> bool {
528    false
529}
530
531/// Configuration for Anthropic's tool search feature (advanced-tool-use beta)
532/// Enables dynamic tool discovery for large tool catalogs (up to 10k tools)
533#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
534#[derive(Debug, Clone, Deserialize, Serialize)]
535pub struct ToolSearchConfig {
536    /// Enable tool search feature (requires advanced-tool-use-2025-11-20 beta)
537    #[serde(default = "default_tool_search_enabled")]
538    pub enabled: bool,
539
540    /// Search algorithm: "regex" (Python regex patterns) or "bm25" (natural language)
541    #[serde(default = "default_tool_search_algorithm")]
542    pub algorithm: String,
543
544    /// Automatically defer loading of all tools except core tools
545    #[serde(default = "default_defer_by_default")]
546    pub defer_by_default: bool,
547
548    /// Maximum number of tool search results to return
549    #[serde(default = "default_max_results")]
550    pub max_results: u32,
551
552    /// Tool names that should never be deferred (always available)
553    #[serde(default)]
554    pub always_available_tools: Vec<String>,
555}
556
557impl Default for ToolSearchConfig {
558    fn default() -> Self {
559        Self {
560            enabled: default_tool_search_enabled(),
561            algorithm: default_tool_search_algorithm(),
562            defer_by_default: default_defer_by_default(),
563            max_results: default_max_results(),
564            always_available_tools: vec![],
565        }
566    }
567}
568
569#[inline]
570fn default_tool_search_enabled() -> bool {
571    true
572}
573
574#[inline]
575fn default_tool_search_algorithm() -> String {
576    "regex".to_string()
577}
578
579#[inline]
580fn default_defer_by_default() -> bool {
581    true
582}
583
584#[inline]
585fn default_max_results() -> u32 {
586    5
587}
588
589#[inline]
590fn default_extended_thinking_enabled() -> bool {
591    true
592}
593
594#[inline]
595fn default_interleaved_thinking_beta() -> String {
596    "interleaved-thinking-2025-05-14".to_string()
597}
598
599#[inline]
600fn default_interleaved_thinking_budget_tokens() -> u32 {
601    31999
602}
603
604#[inline]
605fn default_interleaved_thinking_type() -> String {
606    "enabled".to_string()
607}
608
609#[inline]
610fn default_effort() -> String {
611    reasoning::XHIGH.to_string()
612}
613
614#[inline]
615fn default_task_budget_beta() -> String {
616    "task-budgets-2026-03-13".to_string()
617}
618
619#[cfg(test)]
620mod tests {
621    use super::{
622        AnthropicConfig, OpenAIConfig, OpenAIHostedShellConfig, OpenAIHostedShellDomainSecret,
623        OpenAIHostedShellEnvironment, OpenAIHostedShellNetworkPolicy,
624        OpenAIHostedShellNetworkPolicyType, OpenAIHostedSkill, OpenAIHostedSkillVersion,
625        OpenAIManualCompactionConfig, OpenAIServiceTier,
626    };
627
628    #[test]
629    fn openai_config_defaults_to_websocket_mode_disabled() {
630        let config = OpenAIConfig::default();
631        assert!(!config.websocket_mode);
632        assert_eq!(config.responses_store, None);
633        assert!(config.responses_include.is_empty());
634        assert_eq!(config.service_tier, None);
635        assert_eq!(
636            config.manual_compaction,
637            OpenAIManualCompactionConfig::default()
638        );
639        assert_eq!(config.hosted_shell, OpenAIHostedShellConfig::default());
640        assert!(config.tool_search.enabled);
641        assert!(config.tool_search.defer_by_default);
642        assert!(config.tool_search.always_available_tools.is_empty());
643    }
644
645    #[test]
646    fn anthropic_config_defaults_native_memory_to_disabled() {
647        let config = AnthropicConfig::default();
648        assert!(!config.memory.enabled);
649    }
650
651    #[test]
652    fn anthropic_config_parses_native_memory_opt_in() {
653        let parsed: AnthropicConfig =
654            toml::from_str("[memory]\nenabled = true").expect("config should parse");
655        assert!(parsed.memory.enabled);
656    }
657
658    #[test]
659    fn openai_config_parses_websocket_mode_opt_in() {
660        let parsed: OpenAIConfig =
661            toml::from_str("websocket_mode = true").expect("config should parse");
662        assert!(parsed.websocket_mode);
663        assert_eq!(parsed.responses_store, None);
664        assert!(parsed.responses_include.is_empty());
665        assert_eq!(parsed.service_tier, None);
666        assert_eq!(
667            parsed.manual_compaction,
668            OpenAIManualCompactionConfig::default()
669        );
670        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
671        assert_eq!(parsed.tool_search, super::OpenAIToolSearchConfig::default());
672    }
673
674    #[test]
675    fn openai_config_parses_responses_options() {
676        let parsed: OpenAIConfig = toml::from_str(
677            r#"
678responses_store = false
679responses_include = ["reasoning.encrypted_content", "output_text.annotations"]
680"#,
681        )
682        .expect("config should parse");
683        assert_eq!(parsed.responses_store, Some(false));
684        assert_eq!(
685            parsed.responses_include,
686            vec![
687                "reasoning.encrypted_content".to_string(),
688                "output_text.annotations".to_string()
689            ]
690        );
691        assert_eq!(parsed.service_tier, None);
692        assert_eq!(
693            parsed.manual_compaction,
694            OpenAIManualCompactionConfig::default()
695        );
696        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
697    }
698
699    #[test]
700    fn openai_config_parses_manual_compaction_defaults() {
701        let parsed: OpenAIConfig = toml::from_str(
702            r#"
703[manual_compaction]
704instructions = "Preserve the bug reproduction steps."
705"#,
706        )
707        .expect("config should parse");
708
709        assert_eq!(
710            parsed.manual_compaction.instructions.as_deref(),
711            Some("Preserve the bug reproduction steps.")
712        );
713    }
714
715    #[test]
716    fn openai_config_parses_service_tier() {
717        let parsed: OpenAIConfig =
718            toml::from_str(r#"service_tier = "priority""#).expect("config should parse");
719        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Priority));
720    }
721
722    #[test]
723    fn openai_config_parses_flex_service_tier() {
724        let parsed: OpenAIConfig =
725            toml::from_str(r#"service_tier = "flex""#).expect("config should parse");
726        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Flex));
727    }
728
729    #[test]
730    fn openai_config_parses_hosted_shell() {
731        let parsed: OpenAIConfig = toml::from_str(
732            r#"
733[hosted_shell]
734enabled = true
735environment = "container_auto"
736file_ids = ["file_123"]
737
738[[hosted_shell.skills]]
739type = "skill_reference"
740skill_id = "skill_123"
741"#,
742        )
743        .expect("config should parse");
744
745        assert!(parsed.hosted_shell.enabled);
746        assert_eq!(
747            parsed.hosted_shell.environment,
748            OpenAIHostedShellEnvironment::ContainerAuto
749        );
750        assert_eq!(parsed.hosted_shell.file_ids, vec!["file_123".to_string()]);
751        assert_eq!(
752            parsed.hosted_shell.skills,
753            vec![OpenAIHostedSkill::SkillReference {
754                skill_id: "skill_123".to_string(),
755                version: OpenAIHostedSkillVersion::default(),
756            }]
757        );
758    }
759
760    #[test]
761    fn openai_config_parses_hosted_shell_pinned_version_and_inline_bundle() {
762        let parsed: OpenAIConfig = toml::from_str(
763            r#"
764[hosted_shell]
765enabled = true
766
767[[hosted_shell.skills]]
768type = "skill_reference"
769skill_id = "skill_123"
770version = 2
771
772[[hosted_shell.skills]]
773type = "inline"
774bundle_b64 = "UEsFBgAAAAAAAA=="
775sha256 = "deadbeef"
776"#,
777        )
778        .expect("config should parse");
779
780        assert_eq!(
781            parsed.hosted_shell.skills,
782            vec![
783                OpenAIHostedSkill::SkillReference {
784                    skill_id: "skill_123".to_string(),
785                    version: OpenAIHostedSkillVersion::Number(2),
786                },
787                OpenAIHostedSkill::Inline {
788                    bundle_b64: "UEsFBgAAAAAAAA==".to_string(),
789                    sha256: Some("deadbeef".to_string()),
790                },
791            ]
792        );
793    }
794
795    #[test]
796    fn openai_config_parses_hosted_shell_network_policy() {
797        let parsed: OpenAIConfig = toml::from_str(
798            r#"
799[hosted_shell]
800enabled = true
801
802[hosted_shell.network_policy]
803type = "allowlist"
804allowed_domains = ["httpbin.org"]
805
806[[hosted_shell.network_policy.domain_secrets]]
807domain = "httpbin.org"
808name = "API_KEY"
809value = "debug-secret-123"
810"#,
811        )
812        .expect("config should parse");
813
814        assert_eq!(
815            parsed.hosted_shell.network_policy,
816            OpenAIHostedShellNetworkPolicy {
817                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
818                allowed_domains: vec!["httpbin.org".to_string()],
819                domain_secrets: vec![OpenAIHostedShellDomainSecret {
820                    domain: "httpbin.org".to_string(),
821                    name: "API_KEY".to_string(),
822                    value: "debug-secret-123".to_string(),
823                }],
824            }
825        );
826    }
827
828    #[test]
829    fn openai_config_parses_tool_search() {
830        let parsed: OpenAIConfig = toml::from_str(
831            r#"
832[tool_search]
833enabled = false
834defer_by_default = false
835always_available_tools = ["unified_search", "custom_tool"]
836"#,
837        )
838        .expect("config should parse");
839
840        assert!(!parsed.tool_search.enabled);
841        assert!(!parsed.tool_search.defer_by_default);
842        assert_eq!(
843            parsed.tool_search.always_available_tools,
844            vec!["unified_search".to_string(), "custom_tool".to_string()]
845        );
846    }
847
848    #[test]
849    fn anthropic_tool_search_defaults_to_enabled() {
850        let config = AnthropicConfig::default();
851
852        assert!(config.tool_search.enabled);
853        assert!(config.tool_search.defer_by_default);
854        assert_eq!(config.tool_search.algorithm, "regex");
855        assert!(config.tool_search.always_available_tools.is_empty());
856    }
857
858    #[test]
859    fn hosted_shell_container_reference_requires_non_empty_container_id() {
860        let config = OpenAIHostedShellConfig {
861            enabled: true,
862            environment: OpenAIHostedShellEnvironment::ContainerReference,
863            container_id: Some("   ".to_string()),
864            file_ids: Vec::new(),
865            skills: Vec::new(),
866            network_policy: OpenAIHostedShellNetworkPolicy::default(),
867        };
868
869        assert!(!config.has_valid_reference_target());
870        assert!(config.container_id_ref().is_none());
871    }
872
873    #[test]
874    fn hosted_shell_reports_invalid_skill_reference_mounts() {
875        let config = OpenAIHostedShellConfig {
876            enabled: true,
877            environment: OpenAIHostedShellEnvironment::ContainerAuto,
878            container_id: None,
879            file_ids: Vec::new(),
880            skills: vec![OpenAIHostedSkill::SkillReference {
881                skill_id: "   ".to_string(),
882                version: OpenAIHostedSkillVersion::default(),
883            }],
884            network_policy: OpenAIHostedShellNetworkPolicy::default(),
885        };
886
887        let message = config
888            .first_invalid_skill_message()
889            .expect("invalid mount should be reported");
890
891        assert!(message.contains("provider.openai.hosted_shell.skills[0].skill_id"));
892        assert!(!config.has_valid_skill_mounts());
893        assert!(!config.is_valid_for_runtime());
894    }
895
896    #[test]
897    fn hosted_shell_ignores_skill_validation_for_container_reference() {
898        let config = OpenAIHostedShellConfig {
899            enabled: true,
900            environment: OpenAIHostedShellEnvironment::ContainerReference,
901            container_id: Some("cntr_123".to_string()),
902            file_ids: Vec::new(),
903            skills: vec![OpenAIHostedSkill::Inline {
904                bundle_b64: "   ".to_string(),
905                sha256: None,
906            }],
907            network_policy: OpenAIHostedShellNetworkPolicy::default(),
908        };
909
910        assert!(config.first_invalid_skill_message().is_none());
911        assert!(config.has_valid_skill_mounts());
912        assert!(config.is_valid_for_runtime());
913    }
914
915    #[test]
916    fn hosted_shell_reports_invalid_allowlist_without_domains() {
917        let config = OpenAIHostedShellConfig {
918            enabled: true,
919            environment: OpenAIHostedShellEnvironment::ContainerAuto,
920            container_id: None,
921            file_ids: Vec::new(),
922            skills: Vec::new(),
923            network_policy: OpenAIHostedShellNetworkPolicy {
924                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
925                allowed_domains: Vec::new(),
926                domain_secrets: Vec::new(),
927            },
928        };
929
930        let message = config
931            .first_invalid_network_policy_message()
932            .expect("invalid network policy should be reported");
933
934        assert!(message.contains("network_policy.allowed_domains"));
935        assert!(!config.has_valid_network_policy());
936        assert!(!config.is_valid_for_runtime());
937    }
938
939    #[test]
940    fn hosted_shell_reports_domain_secret_outside_allowlist() {
941        let config = OpenAIHostedShellConfig {
942            enabled: true,
943            environment: OpenAIHostedShellEnvironment::ContainerAuto,
944            container_id: None,
945            file_ids: Vec::new(),
946            skills: Vec::new(),
947            network_policy: OpenAIHostedShellNetworkPolicy {
948                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
949                allowed_domains: vec!["pypi.org".to_string()],
950                domain_secrets: vec![OpenAIHostedShellDomainSecret {
951                    domain: "httpbin.org".to_string(),
952                    name: "API_KEY".to_string(),
953                    value: "secret".to_string(),
954                }],
955            },
956        };
957
958        let message = config
959            .first_invalid_network_policy_message()
960            .expect("invalid domain secret should be reported");
961
962        assert!(message.contains("domain_secrets[0].domain"));
963        assert!(!config.has_valid_network_policy());
964    }
965
966    #[test]
967    fn hosted_shell_ignores_network_policy_validation_for_container_reference() {
968        let config = OpenAIHostedShellConfig {
969            enabled: true,
970            environment: OpenAIHostedShellEnvironment::ContainerReference,
971            container_id: Some("cntr_123".to_string()),
972            file_ids: Vec::new(),
973            skills: Vec::new(),
974            network_policy: OpenAIHostedShellNetworkPolicy {
975                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
976                allowed_domains: Vec::new(),
977                domain_secrets: Vec::new(),
978            },
979        };
980
981        assert!(config.first_invalid_network_policy_message().is_none());
982        assert!(config.has_valid_network_policy());
983    }
984}