Skip to main content

vtcode_config/core/
provider.rs

1use serde::{Deserialize, Serialize};
2
3/// Native OpenAI service tier selection.
4#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
5#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
6#[serde(rename_all = "lowercase")]
7pub enum OpenAIServiceTier {
8    Flex,
9    Priority,
10}
11
12impl OpenAIServiceTier {
13    pub const fn as_str(self) -> &'static str {
14        match self {
15            Self::Flex => "flex",
16            Self::Priority => "priority",
17        }
18    }
19
20    pub fn parse(value: &str) -> Option<Self> {
21        let normalized = value.trim();
22        if normalized.eq_ignore_ascii_case("flex") {
23            Some(Self::Flex)
24        } else if normalized.eq_ignore_ascii_case("priority") {
25            Some(Self::Priority)
26        } else {
27            None
28        }
29    }
30}
31
32/// How VT Code should provision OpenAI hosted shell environments.
33#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
34#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
35#[serde(rename_all = "snake_case")]
36pub enum OpenAIHostedShellEnvironment {
37    #[default]
38    ContainerAuto,
39    ContainerReference,
40}
41
42impl OpenAIHostedShellEnvironment {
43    pub const fn as_str(self) -> &'static str {
44        match self {
45            Self::ContainerAuto => "container_auto",
46            Self::ContainerReference => "container_reference",
47        }
48    }
49}
50
51impl OpenAIHostedShellEnvironment {
52    pub const fn uses_container_reference(self) -> bool {
53        matches!(self, Self::ContainerReference)
54    }
55}
56
57/// Hosted shell network access policy.
58#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
59#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
60#[serde(rename_all = "snake_case")]
61pub enum OpenAIHostedShellNetworkPolicyType {
62    #[default]
63    Disabled,
64    Allowlist,
65}
66
67impl OpenAIHostedShellNetworkPolicyType {
68    pub const fn as_str(self) -> &'static str {
69        match self {
70            Self::Disabled => "disabled",
71            Self::Allowlist => "allowlist",
72        }
73    }
74}
75
76/// Per-domain secret injected by the OpenAI hosted shell runtime.
77#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
78#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
79pub struct OpenAIHostedShellDomainSecret {
80    pub domain: String,
81    pub name: String,
82    pub value: String,
83}
84
85impl OpenAIHostedShellDomainSecret {
86    pub fn validation_error(&self, index: usize) -> Option<String> {
87        let base = format!("provider.openai.hosted_shell.network_policy.domain_secrets[{index}]");
88
89        if self.domain.trim().is_empty() {
90            return Some(format!("`{base}.domain` must not be empty when set."));
91        }
92        if self.name.trim().is_empty() {
93            return Some(format!("`{base}.name` must not be empty when set."));
94        }
95        if self.value.trim().is_empty() {
96            return Some(format!("`{base}.value` must not be empty when set."));
97        }
98
99        None
100    }
101}
102
103/// Request-scoped network policy for OpenAI hosted shell environments.
104#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
105#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
106pub struct OpenAIHostedShellNetworkPolicy {
107    #[serde(rename = "type", default)]
108    pub policy_type: OpenAIHostedShellNetworkPolicyType,
109
110    #[serde(default, skip_serializing_if = "Vec::is_empty")]
111    pub allowed_domains: Vec<String>,
112
113    #[serde(default, skip_serializing_if = "Vec::is_empty")]
114    pub domain_secrets: Vec<OpenAIHostedShellDomainSecret>,
115}
116
117impl OpenAIHostedShellNetworkPolicy {
118    pub const fn is_allowlist(&self) -> bool {
119        matches!(
120            self.policy_type,
121            OpenAIHostedShellNetworkPolicyType::Allowlist
122        )
123    }
124
125    pub fn first_invalid_message(&self) -> Option<String> {
126        match self.policy_type {
127            OpenAIHostedShellNetworkPolicyType::Disabled => {
128                if !self.allowed_domains.is_empty() || !self.domain_secrets.is_empty() {
129                    return Some(
130                        "`provider.openai.hosted_shell.network_policy.allowed_domains` and `provider.openai.hosted_shell.network_policy.domain_secrets` require `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
131                            .to_string(),
132                    );
133                }
134            }
135            OpenAIHostedShellNetworkPolicyType::Allowlist => {
136                if let Some(index) = self
137                    .allowed_domains
138                    .iter()
139                    .position(|value| value.trim().is_empty())
140                {
141                    return Some(format!(
142                        "`provider.openai.hosted_shell.network_policy.allowed_domains[{index}]` must not be empty when set."
143                    ));
144                }
145
146                if self.allowed_domains.is_empty() {
147                    return Some(
148                        "`provider.openai.hosted_shell.network_policy.allowed_domains` must include at least one domain when `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
149                            .to_string(),
150                    );
151                }
152
153                for (index, secret) in self.domain_secrets.iter().enumerate() {
154                    if let Some(message) = secret.validation_error(index) {
155                        return Some(message);
156                    }
157
158                    let secret_domain = secret.domain.trim();
159                    if !self
160                        .allowed_domains
161                        .iter()
162                        .any(|domain| domain.trim().eq_ignore_ascii_case(secret_domain))
163                    {
164                        return Some(format!(
165                            "`provider.openai.hosted_shell.network_policy.domain_secrets[{index}].domain` must also appear in `provider.openai.hosted_shell.network_policy.allowed_domains`."
166                        ));
167                    }
168                }
169            }
170        }
171
172        None
173    }
174}
175
176/// Reserved keyword values for hosted skill version selection.
177#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
178#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
179#[serde(rename_all = "lowercase")]
180pub enum OpenAIHostedSkillVersionKeyword {
181    #[default]
182    Latest,
183}
184
185/// Hosted skill version selector for OpenAI Responses hosted shell mounts.
186#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
187#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
188#[serde(untagged)]
189pub enum OpenAIHostedSkillVersion {
190    Latest(OpenAIHostedSkillVersionKeyword),
191    Number(u64),
192    String(String),
193}
194
195impl Default for OpenAIHostedSkillVersion {
196    fn default() -> Self {
197        Self::Latest(OpenAIHostedSkillVersionKeyword::Latest)
198    }
199}
200
201impl OpenAIHostedSkillVersion {
202    pub fn validation_error(&self, field_path: &str) -> Option<String> {
203        match self {
204            Self::String(value) if value.trim().is_empty() => {
205                Some(format!("`{field_path}` must not be empty when set."))
206            }
207            _ => None,
208        }
209    }
210}
211
212/// Hosted skill reference mounted into an OpenAI hosted shell environment.
213#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
214#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
215#[serde(tag = "type", rename_all = "snake_case")]
216pub enum OpenAIHostedSkill {
217    /// Reference to a pre-registered hosted skill.
218    SkillReference {
219        skill_id: String,
220        #[serde(default)]
221        version: OpenAIHostedSkillVersion,
222    },
223    /// Inline base64 zip bundle.
224    Inline {
225        bundle_b64: String,
226        #[serde(skip_serializing_if = "Option::is_none")]
227        sha256: Option<String>,
228    },
229}
230
231impl OpenAIHostedSkill {
232    pub fn validation_error(&self, index: usize) -> Option<String> {
233        match self {
234            Self::SkillReference { skill_id, version } => {
235                let skill_id_path =
236                    format!("provider.openai.hosted_shell.skills[{index}].skill_id");
237                if skill_id.trim().is_empty() {
238                    return Some(format!(
239                        "`{skill_id_path}` must not be empty when `type = \"skill_reference\"`."
240                    ));
241                }
242
243                let version_path = format!("provider.openai.hosted_shell.skills[{index}].version");
244                version.validation_error(&version_path)
245            }
246            Self::Inline { bundle_b64, .. } => {
247                let bundle_path =
248                    format!("provider.openai.hosted_shell.skills[{index}].bundle_b64");
249                if bundle_b64.trim().is_empty() {
250                    return Some(format!(
251                        "`{bundle_path}` must not be empty when `type = \"inline\"`."
252                    ));
253                }
254                None
255            }
256        }
257    }
258}
259
260/// OpenAI hosted shell configuration.
261#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
262#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
263pub struct OpenAIHostedShellConfig {
264    /// Enable OpenAI hosted shell instead of VT Code's local shell tool.
265    #[serde(default)]
266    pub enabled: bool,
267
268    /// Environment provisioning mode for hosted shell.
269    #[serde(default)]
270    pub environment: OpenAIHostedShellEnvironment,
271
272    /// Existing OpenAI container ID to reuse when `environment = "container_reference"`.
273    #[serde(default, skip_serializing_if = "Option::is_none")]
274    pub container_id: Option<String>,
275
276    /// File IDs to mount when using `container_auto`.
277    #[serde(default, skip_serializing_if = "Vec::is_empty")]
278    pub file_ids: Vec<String>,
279
280    /// Hosted skills to mount when using `container_auto`.
281    #[serde(default, skip_serializing_if = "Vec::is_empty")]
282    pub skills: Vec<OpenAIHostedSkill>,
283
284    /// Request-scoped network policy for `container_auto` hosted shells.
285    #[serde(default)]
286    pub network_policy: OpenAIHostedShellNetworkPolicy,
287}
288
289impl OpenAIHostedShellConfig {
290    pub fn container_id_ref(&self) -> Option<&str> {
291        self.container_id
292            .as_deref()
293            .map(str::trim)
294            .filter(|value| !value.is_empty())
295    }
296
297    pub const fn uses_container_reference(&self) -> bool {
298        self.environment.uses_container_reference()
299    }
300
301    pub fn first_invalid_skill_message(&self) -> Option<String> {
302        if self.uses_container_reference() {
303            return None;
304        }
305
306        self.skills
307            .iter()
308            .enumerate()
309            .find_map(|(index, skill)| skill.validation_error(index))
310    }
311
312    pub fn has_valid_skill_mounts(&self) -> bool {
313        self.first_invalid_skill_message().is_none()
314    }
315
316    pub fn first_invalid_network_policy_message(&self) -> Option<String> {
317        if self.uses_container_reference() {
318            return None;
319        }
320
321        self.network_policy.first_invalid_message()
322    }
323
324    pub fn has_valid_network_policy(&self) -> bool {
325        self.first_invalid_network_policy_message().is_none()
326    }
327
328    pub fn has_valid_reference_target(&self) -> bool {
329        !self.uses_container_reference() || self.container_id_ref().is_some()
330    }
331
332    pub fn is_valid_for_runtime(&self) -> bool {
333        self.has_valid_reference_target()
334            && self.has_valid_skill_mounts()
335            && self.has_valid_network_policy()
336    }
337}
338
339/// OpenAI hosted tool search configuration.
340#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
341#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
342pub struct OpenAIToolSearchConfig {
343    /// Enable hosted tool search for OpenAI Responses-compatible models.
344    #[serde(default = "default_tool_search_enabled")]
345    pub enabled: bool,
346
347    /// Automatically defer loading of all tools except the core always-on set.
348    #[serde(default = "default_defer_by_default")]
349    pub defer_by_default: bool,
350
351    /// Tool names that should never be deferred (always available).
352    #[serde(default)]
353    pub always_available_tools: Vec<String>,
354}
355
356impl Default for OpenAIToolSearchConfig {
357    fn default() -> Self {
358        Self {
359            enabled: default_tool_search_enabled(),
360            defer_by_default: default_defer_by_default(),
361            always_available_tools: Vec::new(),
362        }
363    }
364}
365
366/// Manual compaction defaults for the native OpenAI `/responses/compact` flow.
367#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
368#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
369pub struct OpenAIManualCompactionConfig {
370    /// Optional custom instructions appended to manual `/compact` requests.
371    #[serde(default, skip_serializing_if = "Option::is_none")]
372    pub instructions: Option<String>,
373}
374
375/// OpenAI-specific provider configuration
376#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
377#[derive(Debug, Clone, Deserialize, Serialize, Default)]
378pub struct OpenAIConfig {
379    /// Enable Responses API WebSocket transport for non-streaming requests.
380    /// This is an opt-in path designed for long-running, tool-heavy workflows.
381    #[serde(default)]
382    pub websocket_mode: bool,
383
384    /// Optional Responses API `store` flag.
385    /// Set to `false` to avoid server-side storage when using Responses-compatible models.
386    #[serde(default, skip_serializing_if = "Option::is_none")]
387    pub responses_store: Option<bool>,
388
389    /// Optional Responses API `include` selectors.
390    /// Example: `["reasoning.encrypted_content"]` for encrypted reasoning continuity.
391    #[serde(default, skip_serializing_if = "Vec::is_empty")]
392    pub responses_include: Vec<String>,
393
394    /// Optional native OpenAI `service_tier` request parameter.
395    /// Leave unset to inherit the Project-level default service tier.
396    /// Options: "flex", "priority"
397    #[serde(default, skip_serializing_if = "Option::is_none")]
398    pub service_tier: Option<OpenAIServiceTier>,
399
400    /// Manual `/compact` defaults for the native OpenAI standalone compaction endpoint.
401    #[serde(default)]
402    pub manual_compaction: OpenAIManualCompactionConfig,
403
404    /// Optional hosted shell configuration for OpenAI native Responses models.
405    #[serde(default)]
406    pub hosted_shell: OpenAIHostedShellConfig,
407
408    /// Hosted tool search configuration for OpenAI Responses-compatible models.
409    #[serde(default)]
410    pub tool_search: OpenAIToolSearchConfig,
411}
412
413/// Anthropic-specific provider configuration
414#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
415#[derive(Debug, Clone, Deserialize, Serialize)]
416pub struct AnthropicConfig {
417    /// DEPRECATED: Model name validation has been removed. The Anthropic API validates
418    /// model names directly, avoiding maintenance burden and allowing flexibility.
419    /// This field is kept for backward compatibility but has no effect.
420    #[deprecated(
421        since = "0.75.0",
422        note = "Model validation removed. API validates model names directly."
423    )]
424    #[serde(default)]
425    pub skip_model_validation: bool,
426
427    /// Enable extended thinking feature for Anthropic models
428    /// When enabled, Claude uses internal reasoning before responding, providing
429    /// enhanced reasoning capabilities for complex tasks.
430    /// Only supported by Claude 4, Claude 4.5, and Claude 3.7 Sonnet models.
431    /// Claude 4.6 uses adaptive thinking instead of extended thinking.
432    /// Note: Extended thinking is now auto-enabled by default (31,999 tokens).
433    /// Set MAX_THINKING_TOKENS=63999 environment variable for 2x budget on 64K models.
434    /// See: https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking
435    #[serde(default = "default_extended_thinking_enabled")]
436    pub extended_thinking_enabled: bool,
437
438    /// Beta header for interleaved thinking feature
439    #[serde(default = "default_interleaved_thinking_beta")]
440    pub interleaved_thinking_beta: String,
441
442    /// Budget tokens for extended thinking (minimum: 1024, default: 31999)
443    /// On 64K output models (Opus 4.5, Sonnet 4.5, Haiku 4.5): default 31,999, max 63,999
444    /// On 32K output models (Opus 4): max 31,999
445    /// Use MAX_THINKING_TOKENS environment variable to override.
446    #[serde(default = "default_interleaved_thinking_budget_tokens")]
447    pub interleaved_thinking_budget_tokens: u32,
448
449    /// Type value for enabling interleaved thinking
450    #[serde(default = "default_interleaved_thinking_type")]
451    pub interleaved_thinking_type_enabled: String,
452
453    /// Tool search configuration for dynamic tool discovery (advanced-tool-use beta)
454    #[serde(default)]
455    pub tool_search: ToolSearchConfig,
456
457    /// Effort level for token usage (high, medium, low)
458    /// Controls how many tokens Claude uses when responding, trading off between
459    /// response thoroughness and token efficiency.
460    /// Supported by Claude Opus 4.5/4.6 (4.5 requires effort beta header)
461    #[serde(default = "default_effort")]
462    pub effort: String,
463
464    /// Enable token counting via the count_tokens endpoint
465    /// When enabled, the agent can estimate input token counts before making API calls
466    /// Useful for proactive management of rate limits and costs
467    #[serde(default = "default_count_tokens_enabled")]
468    pub count_tokens_enabled: bool,
469}
470
471#[allow(deprecated)]
472impl Default for AnthropicConfig {
473    fn default() -> Self {
474        Self {
475            skip_model_validation: false,
476            extended_thinking_enabled: default_extended_thinking_enabled(),
477            interleaved_thinking_beta: default_interleaved_thinking_beta(),
478            interleaved_thinking_budget_tokens: default_interleaved_thinking_budget_tokens(),
479            interleaved_thinking_type_enabled: default_interleaved_thinking_type(),
480            tool_search: ToolSearchConfig::default(),
481            effort: default_effort(),
482            count_tokens_enabled: default_count_tokens_enabled(),
483        }
484    }
485}
486
487#[inline]
488fn default_count_tokens_enabled() -> bool {
489    false
490}
491
492/// Configuration for Anthropic's tool search feature (advanced-tool-use beta)
493/// Enables dynamic tool discovery for large tool catalogs (up to 10k tools)
494#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
495#[derive(Debug, Clone, Deserialize, Serialize)]
496pub struct ToolSearchConfig {
497    /// Enable tool search feature (requires advanced-tool-use-2025-11-20 beta)
498    #[serde(default = "default_tool_search_enabled")]
499    pub enabled: bool,
500
501    /// Search algorithm: "regex" (Python regex patterns) or "bm25" (natural language)
502    #[serde(default = "default_tool_search_algorithm")]
503    pub algorithm: String,
504
505    /// Automatically defer loading of all tools except core tools
506    #[serde(default = "default_defer_by_default")]
507    pub defer_by_default: bool,
508
509    /// Maximum number of tool search results to return
510    #[serde(default = "default_max_results")]
511    pub max_results: u32,
512
513    /// Tool names that should never be deferred (always available)
514    #[serde(default)]
515    pub always_available_tools: Vec<String>,
516}
517
518impl Default for ToolSearchConfig {
519    fn default() -> Self {
520        Self {
521            enabled: default_tool_search_enabled(),
522            algorithm: default_tool_search_algorithm(),
523            defer_by_default: default_defer_by_default(),
524            max_results: default_max_results(),
525            always_available_tools: vec![],
526        }
527    }
528}
529
530#[inline]
531fn default_tool_search_enabled() -> bool {
532    true
533}
534
535#[inline]
536fn default_tool_search_algorithm() -> String {
537    "regex".to_string()
538}
539
540#[inline]
541fn default_defer_by_default() -> bool {
542    true
543}
544
545#[inline]
546fn default_max_results() -> u32 {
547    5
548}
549
550#[inline]
551fn default_extended_thinking_enabled() -> bool {
552    true
553}
554
555#[inline]
556fn default_interleaved_thinking_beta() -> String {
557    "interleaved-thinking-2025-05-14".to_string()
558}
559
560#[inline]
561fn default_interleaved_thinking_budget_tokens() -> u32 {
562    31999
563}
564
565#[inline]
566fn default_interleaved_thinking_type() -> String {
567    "enabled".to_string()
568}
569
570#[inline]
571fn default_effort() -> String {
572    "low".to_string()
573}
574
575#[cfg(test)]
576mod tests {
577    use super::{
578        AnthropicConfig, OpenAIConfig, OpenAIHostedShellConfig, OpenAIHostedShellDomainSecret,
579        OpenAIHostedShellEnvironment, OpenAIHostedShellNetworkPolicy,
580        OpenAIHostedShellNetworkPolicyType, OpenAIHostedSkill, OpenAIHostedSkillVersion,
581        OpenAIManualCompactionConfig, OpenAIServiceTier,
582    };
583
584    #[test]
585    fn openai_config_defaults_to_websocket_mode_disabled() {
586        let config = OpenAIConfig::default();
587        assert!(!config.websocket_mode);
588        assert_eq!(config.responses_store, None);
589        assert!(config.responses_include.is_empty());
590        assert_eq!(config.service_tier, None);
591        assert_eq!(
592            config.manual_compaction,
593            OpenAIManualCompactionConfig::default()
594        );
595        assert_eq!(config.hosted_shell, OpenAIHostedShellConfig::default());
596        assert!(config.tool_search.enabled);
597        assert!(config.tool_search.defer_by_default);
598        assert!(config.tool_search.always_available_tools.is_empty());
599    }
600
601    #[test]
602    fn openai_config_parses_websocket_mode_opt_in() {
603        let parsed: OpenAIConfig =
604            toml::from_str("websocket_mode = true").expect("config should parse");
605        assert!(parsed.websocket_mode);
606        assert_eq!(parsed.responses_store, None);
607        assert!(parsed.responses_include.is_empty());
608        assert_eq!(parsed.service_tier, None);
609        assert_eq!(
610            parsed.manual_compaction,
611            OpenAIManualCompactionConfig::default()
612        );
613        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
614        assert_eq!(parsed.tool_search, super::OpenAIToolSearchConfig::default());
615    }
616
617    #[test]
618    fn openai_config_parses_responses_options() {
619        let parsed: OpenAIConfig = toml::from_str(
620            r#"
621responses_store = false
622responses_include = ["reasoning.encrypted_content", "output_text.annotations"]
623"#,
624        )
625        .expect("config should parse");
626        assert_eq!(parsed.responses_store, Some(false));
627        assert_eq!(
628            parsed.responses_include,
629            vec![
630                "reasoning.encrypted_content".to_string(),
631                "output_text.annotations".to_string()
632            ]
633        );
634        assert_eq!(parsed.service_tier, None);
635        assert_eq!(
636            parsed.manual_compaction,
637            OpenAIManualCompactionConfig::default()
638        );
639        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
640    }
641
642    #[test]
643    fn openai_config_parses_manual_compaction_defaults() {
644        let parsed: OpenAIConfig = toml::from_str(
645            r#"
646[manual_compaction]
647instructions = "Preserve the bug reproduction steps."
648"#,
649        )
650        .expect("config should parse");
651
652        assert_eq!(
653            parsed.manual_compaction.instructions.as_deref(),
654            Some("Preserve the bug reproduction steps.")
655        );
656    }
657
658    #[test]
659    fn openai_config_parses_service_tier() {
660        let parsed: OpenAIConfig =
661            toml::from_str(r#"service_tier = "priority""#).expect("config should parse");
662        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Priority));
663    }
664
665    #[test]
666    fn openai_config_parses_flex_service_tier() {
667        let parsed: OpenAIConfig =
668            toml::from_str(r#"service_tier = "flex""#).expect("config should parse");
669        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Flex));
670    }
671
672    #[test]
673    fn openai_config_parses_hosted_shell() {
674        let parsed: OpenAIConfig = toml::from_str(
675            r#"
676[hosted_shell]
677enabled = true
678environment = "container_auto"
679file_ids = ["file_123"]
680
681[[hosted_shell.skills]]
682type = "skill_reference"
683skill_id = "skill_123"
684"#,
685        )
686        .expect("config should parse");
687
688        assert!(parsed.hosted_shell.enabled);
689        assert_eq!(
690            parsed.hosted_shell.environment,
691            OpenAIHostedShellEnvironment::ContainerAuto
692        );
693        assert_eq!(parsed.hosted_shell.file_ids, vec!["file_123".to_string()]);
694        assert_eq!(
695            parsed.hosted_shell.skills,
696            vec![OpenAIHostedSkill::SkillReference {
697                skill_id: "skill_123".to_string(),
698                version: OpenAIHostedSkillVersion::default(),
699            }]
700        );
701    }
702
703    #[test]
704    fn openai_config_parses_hosted_shell_pinned_version_and_inline_bundle() {
705        let parsed: OpenAIConfig = toml::from_str(
706            r#"
707[hosted_shell]
708enabled = true
709
710[[hosted_shell.skills]]
711type = "skill_reference"
712skill_id = "skill_123"
713version = 2
714
715[[hosted_shell.skills]]
716type = "inline"
717bundle_b64 = "UEsFBgAAAAAAAA=="
718sha256 = "deadbeef"
719"#,
720        )
721        .expect("config should parse");
722
723        assert_eq!(
724            parsed.hosted_shell.skills,
725            vec![
726                OpenAIHostedSkill::SkillReference {
727                    skill_id: "skill_123".to_string(),
728                    version: OpenAIHostedSkillVersion::Number(2),
729                },
730                OpenAIHostedSkill::Inline {
731                    bundle_b64: "UEsFBgAAAAAAAA==".to_string(),
732                    sha256: Some("deadbeef".to_string()),
733                },
734            ]
735        );
736    }
737
738    #[test]
739    fn openai_config_parses_hosted_shell_network_policy() {
740        let parsed: OpenAIConfig = toml::from_str(
741            r#"
742[hosted_shell]
743enabled = true
744
745[hosted_shell.network_policy]
746type = "allowlist"
747allowed_domains = ["httpbin.org"]
748
749[[hosted_shell.network_policy.domain_secrets]]
750domain = "httpbin.org"
751name = "API_KEY"
752value = "debug-secret-123"
753"#,
754        )
755        .expect("config should parse");
756
757        assert_eq!(
758            parsed.hosted_shell.network_policy,
759            OpenAIHostedShellNetworkPolicy {
760                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
761                allowed_domains: vec!["httpbin.org".to_string()],
762                domain_secrets: vec![OpenAIHostedShellDomainSecret {
763                    domain: "httpbin.org".to_string(),
764                    name: "API_KEY".to_string(),
765                    value: "debug-secret-123".to_string(),
766                }],
767            }
768        );
769    }
770
771    #[test]
772    fn openai_config_parses_tool_search() {
773        let parsed: OpenAIConfig = toml::from_str(
774            r#"
775[tool_search]
776enabled = false
777defer_by_default = false
778always_available_tools = ["unified_search", "custom_tool"]
779"#,
780        )
781        .expect("config should parse");
782
783        assert!(!parsed.tool_search.enabled);
784        assert!(!parsed.tool_search.defer_by_default);
785        assert_eq!(
786            parsed.tool_search.always_available_tools,
787            vec!["unified_search".to_string(), "custom_tool".to_string()]
788        );
789    }
790
791    #[test]
792    fn anthropic_tool_search_defaults_to_enabled() {
793        let config = AnthropicConfig::default();
794
795        assert!(config.tool_search.enabled);
796        assert!(config.tool_search.defer_by_default);
797        assert_eq!(config.tool_search.algorithm, "regex");
798        assert!(config.tool_search.always_available_tools.is_empty());
799    }
800
801    #[test]
802    fn hosted_shell_container_reference_requires_non_empty_container_id() {
803        let config = OpenAIHostedShellConfig {
804            enabled: true,
805            environment: OpenAIHostedShellEnvironment::ContainerReference,
806            container_id: Some("   ".to_string()),
807            file_ids: Vec::new(),
808            skills: Vec::new(),
809            network_policy: OpenAIHostedShellNetworkPolicy::default(),
810        };
811
812        assert!(!config.has_valid_reference_target());
813        assert!(config.container_id_ref().is_none());
814    }
815
816    #[test]
817    fn hosted_shell_reports_invalid_skill_reference_mounts() {
818        let config = OpenAIHostedShellConfig {
819            enabled: true,
820            environment: OpenAIHostedShellEnvironment::ContainerAuto,
821            container_id: None,
822            file_ids: Vec::new(),
823            skills: vec![OpenAIHostedSkill::SkillReference {
824                skill_id: "   ".to_string(),
825                version: OpenAIHostedSkillVersion::default(),
826            }],
827            network_policy: OpenAIHostedShellNetworkPolicy::default(),
828        };
829
830        let message = config
831            .first_invalid_skill_message()
832            .expect("invalid mount should be reported");
833
834        assert!(message.contains("provider.openai.hosted_shell.skills[0].skill_id"));
835        assert!(!config.has_valid_skill_mounts());
836        assert!(!config.is_valid_for_runtime());
837    }
838
839    #[test]
840    fn hosted_shell_ignores_skill_validation_for_container_reference() {
841        let config = OpenAIHostedShellConfig {
842            enabled: true,
843            environment: OpenAIHostedShellEnvironment::ContainerReference,
844            container_id: Some("cntr_123".to_string()),
845            file_ids: Vec::new(),
846            skills: vec![OpenAIHostedSkill::Inline {
847                bundle_b64: "   ".to_string(),
848                sha256: None,
849            }],
850            network_policy: OpenAIHostedShellNetworkPolicy::default(),
851        };
852
853        assert!(config.first_invalid_skill_message().is_none());
854        assert!(config.has_valid_skill_mounts());
855        assert!(config.is_valid_for_runtime());
856    }
857
858    #[test]
859    fn hosted_shell_reports_invalid_allowlist_without_domains() {
860        let config = OpenAIHostedShellConfig {
861            enabled: true,
862            environment: OpenAIHostedShellEnvironment::ContainerAuto,
863            container_id: None,
864            file_ids: Vec::new(),
865            skills: Vec::new(),
866            network_policy: OpenAIHostedShellNetworkPolicy {
867                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
868                allowed_domains: Vec::new(),
869                domain_secrets: Vec::new(),
870            },
871        };
872
873        let message = config
874            .first_invalid_network_policy_message()
875            .expect("invalid network policy should be reported");
876
877        assert!(message.contains("network_policy.allowed_domains"));
878        assert!(!config.has_valid_network_policy());
879        assert!(!config.is_valid_for_runtime());
880    }
881
882    #[test]
883    fn hosted_shell_reports_domain_secret_outside_allowlist() {
884        let config = OpenAIHostedShellConfig {
885            enabled: true,
886            environment: OpenAIHostedShellEnvironment::ContainerAuto,
887            container_id: None,
888            file_ids: Vec::new(),
889            skills: Vec::new(),
890            network_policy: OpenAIHostedShellNetworkPolicy {
891                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
892                allowed_domains: vec!["pypi.org".to_string()],
893                domain_secrets: vec![OpenAIHostedShellDomainSecret {
894                    domain: "httpbin.org".to_string(),
895                    name: "API_KEY".to_string(),
896                    value: "secret".to_string(),
897                }],
898            },
899        };
900
901        let message = config
902            .first_invalid_network_policy_message()
903            .expect("invalid domain secret should be reported");
904
905        assert!(message.contains("domain_secrets[0].domain"));
906        assert!(!config.has_valid_network_policy());
907    }
908
909    #[test]
910    fn hosted_shell_ignores_network_policy_validation_for_container_reference() {
911        let config = OpenAIHostedShellConfig {
912            enabled: true,
913            environment: OpenAIHostedShellEnvironment::ContainerReference,
914            container_id: Some("cntr_123".to_string()),
915            file_ids: Vec::new(),
916            skills: Vec::new(),
917            network_policy: OpenAIHostedShellNetworkPolicy {
918                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
919                allowed_domains: Vec::new(),
920                domain_secrets: Vec::new(),
921            },
922        };
923
924        assert!(config.first_invalid_network_policy_message().is_none());
925        assert!(config.has_valid_network_policy());
926    }
927}