Skip to main content

vtcode_config/core/
provider.rs

1use serde::{Deserialize, Serialize};
2
3/// Native OpenAI service tier selection.
4#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
5#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
6#[serde(rename_all = "lowercase")]
7pub enum OpenAIServiceTier {
8    Flex,
9    Priority,
10}
11
12impl OpenAIServiceTier {
13    pub const fn as_str(self) -> &'static str {
14        match self {
15            Self::Flex => "flex",
16            Self::Priority => "priority",
17        }
18    }
19}
20
21/// How VT Code should provision OpenAI hosted shell environments.
22#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
23#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
24#[serde(rename_all = "snake_case")]
25pub enum OpenAIHostedShellEnvironment {
26    #[default]
27    ContainerAuto,
28    ContainerReference,
29}
30
31impl OpenAIHostedShellEnvironment {
32    pub const fn as_str(self) -> &'static str {
33        match self {
34            Self::ContainerAuto => "container_auto",
35            Self::ContainerReference => "container_reference",
36        }
37    }
38}
39
40impl OpenAIHostedShellEnvironment {
41    pub const fn uses_container_reference(self) -> bool {
42        matches!(self, Self::ContainerReference)
43    }
44}
45
46/// Hosted shell network access policy.
47#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
48#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
49#[serde(rename_all = "snake_case")]
50pub enum OpenAIHostedShellNetworkPolicyType {
51    #[default]
52    Disabled,
53    Allowlist,
54}
55
56impl OpenAIHostedShellNetworkPolicyType {
57    pub const fn as_str(self) -> &'static str {
58        match self {
59            Self::Disabled => "disabled",
60            Self::Allowlist => "allowlist",
61        }
62    }
63}
64
65/// Per-domain secret injected by the OpenAI hosted shell runtime.
66#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
67#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
68pub struct OpenAIHostedShellDomainSecret {
69    pub domain: String,
70    pub name: String,
71    pub value: String,
72}
73
74impl OpenAIHostedShellDomainSecret {
75    pub fn validation_error(&self, index: usize) -> Option<String> {
76        let base = format!("provider.openai.hosted_shell.network_policy.domain_secrets[{index}]");
77
78        if self.domain.trim().is_empty() {
79            return Some(format!("`{base}.domain` must not be empty when set."));
80        }
81        if self.name.trim().is_empty() {
82            return Some(format!("`{base}.name` must not be empty when set."));
83        }
84        if self.value.trim().is_empty() {
85            return Some(format!("`{base}.value` must not be empty when set."));
86        }
87
88        None
89    }
90}
91
92/// Request-scoped network policy for OpenAI hosted shell environments.
93#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
94#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
95pub struct OpenAIHostedShellNetworkPolicy {
96    #[serde(rename = "type", default)]
97    pub policy_type: OpenAIHostedShellNetworkPolicyType,
98
99    #[serde(default, skip_serializing_if = "Vec::is_empty")]
100    pub allowed_domains: Vec<String>,
101
102    #[serde(default, skip_serializing_if = "Vec::is_empty")]
103    pub domain_secrets: Vec<OpenAIHostedShellDomainSecret>,
104}
105
106impl OpenAIHostedShellNetworkPolicy {
107    pub const fn is_allowlist(&self) -> bool {
108        matches!(
109            self.policy_type,
110            OpenAIHostedShellNetworkPolicyType::Allowlist
111        )
112    }
113
114    pub fn first_invalid_message(&self) -> Option<String> {
115        match self.policy_type {
116            OpenAIHostedShellNetworkPolicyType::Disabled => {
117                if !self.allowed_domains.is_empty() || !self.domain_secrets.is_empty() {
118                    return Some(
119                        "`provider.openai.hosted_shell.network_policy.allowed_domains` and `provider.openai.hosted_shell.network_policy.domain_secrets` require `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
120                            .to_string(),
121                    );
122                }
123            }
124            OpenAIHostedShellNetworkPolicyType::Allowlist => {
125                if let Some(index) = self
126                    .allowed_domains
127                    .iter()
128                    .position(|value| value.trim().is_empty())
129                {
130                    return Some(format!(
131                        "`provider.openai.hosted_shell.network_policy.allowed_domains[{index}]` must not be empty when set."
132                    ));
133                }
134
135                if self.allowed_domains.is_empty() {
136                    return Some(
137                        "`provider.openai.hosted_shell.network_policy.allowed_domains` must include at least one domain when `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
138                            .to_string(),
139                    );
140                }
141
142                for (index, secret) in self.domain_secrets.iter().enumerate() {
143                    if let Some(message) = secret.validation_error(index) {
144                        return Some(message);
145                    }
146
147                    let secret_domain = secret.domain.trim();
148                    if !self
149                        .allowed_domains
150                        .iter()
151                        .any(|domain| domain.trim().eq_ignore_ascii_case(secret_domain))
152                    {
153                        return Some(format!(
154                            "`provider.openai.hosted_shell.network_policy.domain_secrets[{index}].domain` must also appear in `provider.openai.hosted_shell.network_policy.allowed_domains`."
155                        ));
156                    }
157                }
158            }
159        }
160
161        None
162    }
163}
164
165/// Reserved keyword values for hosted skill version selection.
166#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
167#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
168#[serde(rename_all = "lowercase")]
169pub enum OpenAIHostedSkillVersionKeyword {
170    #[default]
171    Latest,
172}
173
174/// Hosted skill version selector for OpenAI Responses hosted shell mounts.
175#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
176#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
177#[serde(untagged)]
178pub enum OpenAIHostedSkillVersion {
179    Latest(OpenAIHostedSkillVersionKeyword),
180    Number(u64),
181    String(String),
182}
183
184impl Default for OpenAIHostedSkillVersion {
185    fn default() -> Self {
186        Self::Latest(OpenAIHostedSkillVersionKeyword::Latest)
187    }
188}
189
190impl OpenAIHostedSkillVersion {
191    pub fn validation_error(&self, field_path: &str) -> Option<String> {
192        match self {
193            Self::String(value) if value.trim().is_empty() => {
194                Some(format!("`{field_path}` must not be empty when set."))
195            }
196            _ => None,
197        }
198    }
199}
200
201/// Hosted skill reference mounted into an OpenAI hosted shell environment.
202#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
203#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
204#[serde(tag = "type", rename_all = "snake_case")]
205pub enum OpenAIHostedSkill {
206    /// Reference to a pre-registered hosted skill.
207    SkillReference {
208        skill_id: String,
209        #[serde(default)]
210        version: OpenAIHostedSkillVersion,
211    },
212    /// Inline base64 zip bundle.
213    Inline {
214        bundle_b64: String,
215        #[serde(skip_serializing_if = "Option::is_none")]
216        sha256: Option<String>,
217    },
218}
219
220impl OpenAIHostedSkill {
221    pub fn validation_error(&self, index: usize) -> Option<String> {
222        match self {
223            Self::SkillReference { skill_id, version } => {
224                let skill_id_path =
225                    format!("provider.openai.hosted_shell.skills[{index}].skill_id");
226                if skill_id.trim().is_empty() {
227                    return Some(format!(
228                        "`{skill_id_path}` must not be empty when `type = \"skill_reference\"`."
229                    ));
230                }
231
232                let version_path = format!("provider.openai.hosted_shell.skills[{index}].version");
233                version.validation_error(&version_path)
234            }
235            Self::Inline { bundle_b64, .. } => {
236                let bundle_path =
237                    format!("provider.openai.hosted_shell.skills[{index}].bundle_b64");
238                if bundle_b64.trim().is_empty() {
239                    return Some(format!(
240                        "`{bundle_path}` must not be empty when `type = \"inline\"`."
241                    ));
242                }
243                None
244            }
245        }
246    }
247}
248
249/// OpenAI hosted shell configuration.
250#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
251#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
252pub struct OpenAIHostedShellConfig {
253    /// Enable OpenAI hosted shell instead of VT Code's local shell tool.
254    #[serde(default)]
255    pub enabled: bool,
256
257    /// Environment provisioning mode for hosted shell.
258    #[serde(default)]
259    pub environment: OpenAIHostedShellEnvironment,
260
261    /// Existing OpenAI container ID to reuse when `environment = "container_reference"`.
262    #[serde(default, skip_serializing_if = "Option::is_none")]
263    pub container_id: Option<String>,
264
265    /// File IDs to mount when using `container_auto`.
266    #[serde(default, skip_serializing_if = "Vec::is_empty")]
267    pub file_ids: Vec<String>,
268
269    /// Hosted skills to mount when using `container_auto`.
270    #[serde(default, skip_serializing_if = "Vec::is_empty")]
271    pub skills: Vec<OpenAIHostedSkill>,
272
273    /// Request-scoped network policy for `container_auto` hosted shells.
274    #[serde(default)]
275    pub network_policy: OpenAIHostedShellNetworkPolicy,
276}
277
278impl OpenAIHostedShellConfig {
279    pub fn container_id_ref(&self) -> Option<&str> {
280        self.container_id
281            .as_deref()
282            .map(str::trim)
283            .filter(|value| !value.is_empty())
284    }
285
286    pub const fn uses_container_reference(&self) -> bool {
287        self.environment.uses_container_reference()
288    }
289
290    pub fn first_invalid_skill_message(&self) -> Option<String> {
291        if self.uses_container_reference() {
292            return None;
293        }
294
295        self.skills
296            .iter()
297            .enumerate()
298            .find_map(|(index, skill)| skill.validation_error(index))
299    }
300
301    pub fn has_valid_skill_mounts(&self) -> bool {
302        self.first_invalid_skill_message().is_none()
303    }
304
305    pub fn first_invalid_network_policy_message(&self) -> Option<String> {
306        if self.uses_container_reference() {
307            return None;
308        }
309
310        self.network_policy.first_invalid_message()
311    }
312
313    pub fn has_valid_network_policy(&self) -> bool {
314        self.first_invalid_network_policy_message().is_none()
315    }
316
317    pub fn has_valid_reference_target(&self) -> bool {
318        !self.uses_container_reference() || self.container_id_ref().is_some()
319    }
320
321    pub fn is_valid_for_runtime(&self) -> bool {
322        self.has_valid_reference_target()
323            && self.has_valid_skill_mounts()
324            && self.has_valid_network_policy()
325    }
326}
327
328/// OpenAI hosted tool search configuration.
329#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
330#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
331pub struct OpenAIToolSearchConfig {
332    /// Enable hosted tool search for OpenAI Responses-compatible models.
333    #[serde(default = "default_tool_search_enabled")]
334    pub enabled: bool,
335
336    /// Automatically defer loading of all tools except the core always-on set.
337    #[serde(default = "default_defer_by_default")]
338    pub defer_by_default: bool,
339
340    /// Tool names that should never be deferred (always available).
341    #[serde(default)]
342    pub always_available_tools: Vec<String>,
343}
344
345impl Default for OpenAIToolSearchConfig {
346    fn default() -> Self {
347        Self {
348            enabled: default_tool_search_enabled(),
349            defer_by_default: default_defer_by_default(),
350            always_available_tools: Vec::new(),
351        }
352    }
353}
354
355/// OpenAI-specific provider configuration
356#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
357#[derive(Debug, Clone, Deserialize, Serialize, Default)]
358pub struct OpenAIConfig {
359    /// Enable Responses API WebSocket transport for non-streaming requests.
360    /// This is an opt-in path designed for long-running, tool-heavy workflows.
361    #[serde(default)]
362    pub websocket_mode: bool,
363
364    /// Optional Responses API `store` flag.
365    /// Set to `false` to avoid server-side storage when using Responses-compatible models.
366    #[serde(default, skip_serializing_if = "Option::is_none")]
367    pub responses_store: Option<bool>,
368
369    /// Optional Responses API `include` selectors.
370    /// Example: `["reasoning.encrypted_content"]` for encrypted reasoning continuity.
371    #[serde(default, skip_serializing_if = "Vec::is_empty")]
372    pub responses_include: Vec<String>,
373
374    /// Optional native OpenAI `service_tier` request parameter.
375    /// Leave unset to inherit the Project-level default service tier.
376    /// Options: "flex", "priority"
377    #[serde(default, skip_serializing_if = "Option::is_none")]
378    pub service_tier: Option<OpenAIServiceTier>,
379
380    /// Optional hosted shell configuration for OpenAI native Responses models.
381    #[serde(default)]
382    pub hosted_shell: OpenAIHostedShellConfig,
383
384    /// Hosted tool search configuration for OpenAI Responses-compatible models.
385    #[serde(default)]
386    pub tool_search: OpenAIToolSearchConfig,
387}
388
389/// Anthropic-specific provider configuration
390#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
391#[derive(Debug, Clone, Deserialize, Serialize)]
392pub struct AnthropicConfig {
393    /// DEPRECATED: Model name validation has been removed. The Anthropic API validates
394    /// model names directly, avoiding maintenance burden and allowing flexibility.
395    /// This field is kept for backward compatibility but has no effect.
396    #[deprecated(
397        since = "0.75.0",
398        note = "Model validation removed. API validates model names directly."
399    )]
400    #[serde(default)]
401    pub skip_model_validation: bool,
402
403    /// Enable extended thinking feature for Anthropic models
404    /// When enabled, Claude uses internal reasoning before responding, providing
405    /// enhanced reasoning capabilities for complex tasks.
406    /// Only supported by Claude 4, Claude 4.5, and Claude 3.7 Sonnet models.
407    /// Claude 4.6 uses adaptive thinking instead of extended thinking.
408    /// Note: Extended thinking is now auto-enabled by default (31,999 tokens).
409    /// Set MAX_THINKING_TOKENS=63999 environment variable for 2x budget on 64K models.
410    /// See: https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking
411    #[serde(default = "default_extended_thinking_enabled")]
412    pub extended_thinking_enabled: bool,
413
414    /// Beta header for interleaved thinking feature
415    #[serde(default = "default_interleaved_thinking_beta")]
416    pub interleaved_thinking_beta: String,
417
418    /// Budget tokens for extended thinking (minimum: 1024, default: 31999)
419    /// On 64K output models (Opus 4.5, Sonnet 4.5, Haiku 4.5): default 31,999, max 63,999
420    /// On 32K output models (Opus 4): max 31,999
421    /// Use MAX_THINKING_TOKENS environment variable to override.
422    #[serde(default = "default_interleaved_thinking_budget_tokens")]
423    pub interleaved_thinking_budget_tokens: u32,
424
425    /// Type value for enabling interleaved thinking
426    #[serde(default = "default_interleaved_thinking_type")]
427    pub interleaved_thinking_type_enabled: String,
428
429    /// Tool search configuration for dynamic tool discovery (advanced-tool-use beta)
430    #[serde(default)]
431    pub tool_search: ToolSearchConfig,
432
433    /// Effort level for token usage (high, medium, low)
434    /// Controls how many tokens Claude uses when responding, trading off between
435    /// response thoroughness and token efficiency.
436    /// Supported by Claude Opus 4.5/4.6 (4.5 requires effort beta header)
437    #[serde(default = "default_effort")]
438    pub effort: String,
439
440    /// Enable token counting via the count_tokens endpoint
441    /// When enabled, the agent can estimate input token counts before making API calls
442    /// Useful for proactive management of rate limits and costs
443    #[serde(default = "default_count_tokens_enabled")]
444    pub count_tokens_enabled: bool,
445}
446
447#[allow(deprecated)]
448impl Default for AnthropicConfig {
449    fn default() -> Self {
450        Self {
451            skip_model_validation: false,
452            extended_thinking_enabled: default_extended_thinking_enabled(),
453            interleaved_thinking_beta: default_interleaved_thinking_beta(),
454            interleaved_thinking_budget_tokens: default_interleaved_thinking_budget_tokens(),
455            interleaved_thinking_type_enabled: default_interleaved_thinking_type(),
456            tool_search: ToolSearchConfig::default(),
457            effort: default_effort(),
458            count_tokens_enabled: default_count_tokens_enabled(),
459        }
460    }
461}
462
463#[inline]
464fn default_count_tokens_enabled() -> bool {
465    false
466}
467
468/// Configuration for Anthropic's tool search feature (advanced-tool-use beta)
469/// Enables dynamic tool discovery for large tool catalogs (up to 10k tools)
470#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
471#[derive(Debug, Clone, Deserialize, Serialize)]
472pub struct ToolSearchConfig {
473    /// Enable tool search feature (requires advanced-tool-use-2025-11-20 beta)
474    #[serde(default = "default_tool_search_enabled")]
475    pub enabled: bool,
476
477    /// Search algorithm: "regex" (Python regex patterns) or "bm25" (natural language)
478    #[serde(default = "default_tool_search_algorithm")]
479    pub algorithm: String,
480
481    /// Automatically defer loading of all tools except core tools
482    #[serde(default = "default_defer_by_default")]
483    pub defer_by_default: bool,
484
485    /// Maximum number of tool search results to return
486    #[serde(default = "default_max_results")]
487    pub max_results: u32,
488
489    /// Tool names that should never be deferred (always available)
490    #[serde(default)]
491    pub always_available_tools: Vec<String>,
492}
493
494impl Default for ToolSearchConfig {
495    fn default() -> Self {
496        Self {
497            enabled: default_tool_search_enabled(),
498            algorithm: default_tool_search_algorithm(),
499            defer_by_default: default_defer_by_default(),
500            max_results: default_max_results(),
501            always_available_tools: vec![],
502        }
503    }
504}
505
506#[inline]
507fn default_tool_search_enabled() -> bool {
508    true
509}
510
511#[inline]
512fn default_tool_search_algorithm() -> String {
513    "regex".to_string()
514}
515
516#[inline]
517fn default_defer_by_default() -> bool {
518    true
519}
520
521#[inline]
522fn default_max_results() -> u32 {
523    5
524}
525
526#[inline]
527fn default_extended_thinking_enabled() -> bool {
528    true
529}
530
531#[inline]
532fn default_interleaved_thinking_beta() -> String {
533    "interleaved-thinking-2025-05-14".to_string()
534}
535
536#[inline]
537fn default_interleaved_thinking_budget_tokens() -> u32 {
538    31999
539}
540
541#[inline]
542fn default_interleaved_thinking_type() -> String {
543    "enabled".to_string()
544}
545
546#[inline]
547fn default_effort() -> String {
548    "low".to_string()
549}
550
551#[cfg(test)]
552mod tests {
553    use super::{
554        AnthropicConfig, OpenAIConfig, OpenAIHostedShellConfig, OpenAIHostedShellDomainSecret,
555        OpenAIHostedShellEnvironment, OpenAIHostedShellNetworkPolicy,
556        OpenAIHostedShellNetworkPolicyType, OpenAIHostedSkill, OpenAIHostedSkillVersion,
557        OpenAIServiceTier,
558    };
559
560    #[test]
561    fn openai_config_defaults_to_websocket_mode_disabled() {
562        let config = OpenAIConfig::default();
563        assert!(!config.websocket_mode);
564        assert_eq!(config.responses_store, None);
565        assert!(config.responses_include.is_empty());
566        assert_eq!(config.service_tier, None);
567        assert_eq!(config.hosted_shell, OpenAIHostedShellConfig::default());
568        assert!(config.tool_search.enabled);
569        assert!(config.tool_search.defer_by_default);
570        assert!(config.tool_search.always_available_tools.is_empty());
571    }
572
573    #[test]
574    fn openai_config_parses_websocket_mode_opt_in() {
575        let parsed: OpenAIConfig =
576            toml::from_str("websocket_mode = true").expect("config should parse");
577        assert!(parsed.websocket_mode);
578        assert_eq!(parsed.responses_store, None);
579        assert!(parsed.responses_include.is_empty());
580        assert_eq!(parsed.service_tier, None);
581        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
582        assert_eq!(parsed.tool_search, super::OpenAIToolSearchConfig::default());
583    }
584
585    #[test]
586    fn openai_config_parses_responses_options() {
587        let parsed: OpenAIConfig = toml::from_str(
588            r#"
589responses_store = false
590responses_include = ["reasoning.encrypted_content", "output_text.annotations"]
591"#,
592        )
593        .expect("config should parse");
594        assert_eq!(parsed.responses_store, Some(false));
595        assert_eq!(
596            parsed.responses_include,
597            vec![
598                "reasoning.encrypted_content".to_string(),
599                "output_text.annotations".to_string()
600            ]
601        );
602        assert_eq!(parsed.service_tier, None);
603        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
604    }
605
606    #[test]
607    fn openai_config_parses_service_tier() {
608        let parsed: OpenAIConfig =
609            toml::from_str(r#"service_tier = "priority""#).expect("config should parse");
610        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Priority));
611    }
612
613    #[test]
614    fn openai_config_parses_flex_service_tier() {
615        let parsed: OpenAIConfig =
616            toml::from_str(r#"service_tier = "flex""#).expect("config should parse");
617        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Flex));
618    }
619
620    #[test]
621    fn openai_config_parses_hosted_shell() {
622        let parsed: OpenAIConfig = toml::from_str(
623            r#"
624[hosted_shell]
625enabled = true
626environment = "container_auto"
627file_ids = ["file_123"]
628
629[[hosted_shell.skills]]
630type = "skill_reference"
631skill_id = "skill_123"
632"#,
633        )
634        .expect("config should parse");
635
636        assert!(parsed.hosted_shell.enabled);
637        assert_eq!(
638            parsed.hosted_shell.environment,
639            OpenAIHostedShellEnvironment::ContainerAuto
640        );
641        assert_eq!(parsed.hosted_shell.file_ids, vec!["file_123".to_string()]);
642        assert_eq!(
643            parsed.hosted_shell.skills,
644            vec![OpenAIHostedSkill::SkillReference {
645                skill_id: "skill_123".to_string(),
646                version: OpenAIHostedSkillVersion::default(),
647            }]
648        );
649    }
650
651    #[test]
652    fn openai_config_parses_hosted_shell_pinned_version_and_inline_bundle() {
653        let parsed: OpenAIConfig = toml::from_str(
654            r#"
655[hosted_shell]
656enabled = true
657
658[[hosted_shell.skills]]
659type = "skill_reference"
660skill_id = "skill_123"
661version = 2
662
663[[hosted_shell.skills]]
664type = "inline"
665bundle_b64 = "UEsFBgAAAAAAAA=="
666sha256 = "deadbeef"
667"#,
668        )
669        .expect("config should parse");
670
671        assert_eq!(
672            parsed.hosted_shell.skills,
673            vec![
674                OpenAIHostedSkill::SkillReference {
675                    skill_id: "skill_123".to_string(),
676                    version: OpenAIHostedSkillVersion::Number(2),
677                },
678                OpenAIHostedSkill::Inline {
679                    bundle_b64: "UEsFBgAAAAAAAA==".to_string(),
680                    sha256: Some("deadbeef".to_string()),
681                },
682            ]
683        );
684    }
685
686    #[test]
687    fn openai_config_parses_hosted_shell_network_policy() {
688        let parsed: OpenAIConfig = toml::from_str(
689            r#"
690[hosted_shell]
691enabled = true
692
693[hosted_shell.network_policy]
694type = "allowlist"
695allowed_domains = ["httpbin.org"]
696
697[[hosted_shell.network_policy.domain_secrets]]
698domain = "httpbin.org"
699name = "API_KEY"
700value = "debug-secret-123"
701"#,
702        )
703        .expect("config should parse");
704
705        assert_eq!(
706            parsed.hosted_shell.network_policy,
707            OpenAIHostedShellNetworkPolicy {
708                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
709                allowed_domains: vec!["httpbin.org".to_string()],
710                domain_secrets: vec![OpenAIHostedShellDomainSecret {
711                    domain: "httpbin.org".to_string(),
712                    name: "API_KEY".to_string(),
713                    value: "debug-secret-123".to_string(),
714                }],
715            }
716        );
717    }
718
719    #[test]
720    fn openai_config_parses_tool_search() {
721        let parsed: OpenAIConfig = toml::from_str(
722            r#"
723[tool_search]
724enabled = false
725defer_by_default = false
726always_available_tools = ["unified_search", "custom_tool"]
727"#,
728        )
729        .expect("config should parse");
730
731        assert!(!parsed.tool_search.enabled);
732        assert!(!parsed.tool_search.defer_by_default);
733        assert_eq!(
734            parsed.tool_search.always_available_tools,
735            vec!["unified_search".to_string(), "custom_tool".to_string()]
736        );
737    }
738
739    #[test]
740    fn anthropic_tool_search_defaults_to_enabled() {
741        let config = AnthropicConfig::default();
742
743        assert!(config.tool_search.enabled);
744        assert!(config.tool_search.defer_by_default);
745        assert_eq!(config.tool_search.algorithm, "regex");
746        assert!(config.tool_search.always_available_tools.is_empty());
747    }
748
749    #[test]
750    fn hosted_shell_container_reference_requires_non_empty_container_id() {
751        let config = OpenAIHostedShellConfig {
752            enabled: true,
753            environment: OpenAIHostedShellEnvironment::ContainerReference,
754            container_id: Some("   ".to_string()),
755            file_ids: Vec::new(),
756            skills: Vec::new(),
757            network_policy: OpenAIHostedShellNetworkPolicy::default(),
758        };
759
760        assert!(!config.has_valid_reference_target());
761        assert!(config.container_id_ref().is_none());
762    }
763
764    #[test]
765    fn hosted_shell_reports_invalid_skill_reference_mounts() {
766        let config = OpenAIHostedShellConfig {
767            enabled: true,
768            environment: OpenAIHostedShellEnvironment::ContainerAuto,
769            container_id: None,
770            file_ids: Vec::new(),
771            skills: vec![OpenAIHostedSkill::SkillReference {
772                skill_id: "   ".to_string(),
773                version: OpenAIHostedSkillVersion::default(),
774            }],
775            network_policy: OpenAIHostedShellNetworkPolicy::default(),
776        };
777
778        let message = config
779            .first_invalid_skill_message()
780            .expect("invalid mount should be reported");
781
782        assert!(message.contains("provider.openai.hosted_shell.skills[0].skill_id"));
783        assert!(!config.has_valid_skill_mounts());
784        assert!(!config.is_valid_for_runtime());
785    }
786
787    #[test]
788    fn hosted_shell_ignores_skill_validation_for_container_reference() {
789        let config = OpenAIHostedShellConfig {
790            enabled: true,
791            environment: OpenAIHostedShellEnvironment::ContainerReference,
792            container_id: Some("cntr_123".to_string()),
793            file_ids: Vec::new(),
794            skills: vec![OpenAIHostedSkill::Inline {
795                bundle_b64: "   ".to_string(),
796                sha256: None,
797            }],
798            network_policy: OpenAIHostedShellNetworkPolicy::default(),
799        };
800
801        assert!(config.first_invalid_skill_message().is_none());
802        assert!(config.has_valid_skill_mounts());
803        assert!(config.is_valid_for_runtime());
804    }
805
806    #[test]
807    fn hosted_shell_reports_invalid_allowlist_without_domains() {
808        let config = OpenAIHostedShellConfig {
809            enabled: true,
810            environment: OpenAIHostedShellEnvironment::ContainerAuto,
811            container_id: None,
812            file_ids: Vec::new(),
813            skills: Vec::new(),
814            network_policy: OpenAIHostedShellNetworkPolicy {
815                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
816                allowed_domains: Vec::new(),
817                domain_secrets: Vec::new(),
818            },
819        };
820
821        let message = config
822            .first_invalid_network_policy_message()
823            .expect("invalid network policy should be reported");
824
825        assert!(message.contains("network_policy.allowed_domains"));
826        assert!(!config.has_valid_network_policy());
827        assert!(!config.is_valid_for_runtime());
828    }
829
830    #[test]
831    fn hosted_shell_reports_domain_secret_outside_allowlist() {
832        let config = OpenAIHostedShellConfig {
833            enabled: true,
834            environment: OpenAIHostedShellEnvironment::ContainerAuto,
835            container_id: None,
836            file_ids: Vec::new(),
837            skills: Vec::new(),
838            network_policy: OpenAIHostedShellNetworkPolicy {
839                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
840                allowed_domains: vec!["pypi.org".to_string()],
841                domain_secrets: vec![OpenAIHostedShellDomainSecret {
842                    domain: "httpbin.org".to_string(),
843                    name: "API_KEY".to_string(),
844                    value: "secret".to_string(),
845                }],
846            },
847        };
848
849        let message = config
850            .first_invalid_network_policy_message()
851            .expect("invalid domain secret should be reported");
852
853        assert!(message.contains("domain_secrets[0].domain"));
854        assert!(!config.has_valid_network_policy());
855    }
856
857    #[test]
858    fn hosted_shell_ignores_network_policy_validation_for_container_reference() {
859        let config = OpenAIHostedShellConfig {
860            enabled: true,
861            environment: OpenAIHostedShellEnvironment::ContainerReference,
862            container_id: Some("cntr_123".to_string()),
863            file_ids: Vec::new(),
864            skills: Vec::new(),
865            network_policy: OpenAIHostedShellNetworkPolicy {
866                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
867                allowed_domains: Vec::new(),
868                domain_secrets: Vec::new(),
869            },
870        };
871
872        assert!(config.first_invalid_network_policy_message().is_none());
873        assert!(config.has_valid_network_policy());
874    }
875}