Skip to main content

vtcode_config/core/
provider.rs

1use serde::{Deserialize, Serialize};
2
3/// Native OpenAI service tier selection.
4#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
5#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
6#[serde(rename_all = "lowercase")]
7pub enum OpenAIServiceTier {
8    Flex,
9    Priority,
10}
11
12impl OpenAIServiceTier {
13    pub const fn as_str(self) -> &'static str {
14        match self {
15            Self::Flex => "flex",
16            Self::Priority => "priority",
17        }
18    }
19
20    pub fn parse(value: &str) -> Option<Self> {
21        let normalized = value.trim();
22        if normalized.eq_ignore_ascii_case("flex") {
23            Some(Self::Flex)
24        } else if normalized.eq_ignore_ascii_case("priority") {
25            Some(Self::Priority)
26        } else {
27            None
28        }
29    }
30}
31
32/// How VT Code should provision OpenAI hosted shell environments.
33#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
34#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
35#[serde(rename_all = "snake_case")]
36pub enum OpenAIHostedShellEnvironment {
37    #[default]
38    ContainerAuto,
39    ContainerReference,
40}
41
42impl OpenAIHostedShellEnvironment {
43    pub const fn as_str(self) -> &'static str {
44        match self {
45            Self::ContainerAuto => "container_auto",
46            Self::ContainerReference => "container_reference",
47        }
48    }
49}
50
51impl OpenAIHostedShellEnvironment {
52    pub const fn uses_container_reference(self) -> bool {
53        matches!(self, Self::ContainerReference)
54    }
55}
56
57/// Hosted shell network access policy.
58#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
59#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
60#[serde(rename_all = "snake_case")]
61pub enum OpenAIHostedShellNetworkPolicyType {
62    #[default]
63    Disabled,
64    Allowlist,
65}
66
67impl OpenAIHostedShellNetworkPolicyType {
68    pub const fn as_str(self) -> &'static str {
69        match self {
70            Self::Disabled => "disabled",
71            Self::Allowlist => "allowlist",
72        }
73    }
74}
75
76/// Per-domain secret injected by the OpenAI hosted shell runtime.
77#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
78#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
79pub struct OpenAIHostedShellDomainSecret {
80    pub domain: String,
81    pub name: String,
82    pub value: String,
83}
84
85impl OpenAIHostedShellDomainSecret {
86    pub fn validation_error(&self, index: usize) -> Option<String> {
87        let base = format!("provider.openai.hosted_shell.network_policy.domain_secrets[{index}]");
88
89        if self.domain.trim().is_empty() {
90            return Some(format!("`{base}.domain` must not be empty when set."));
91        }
92        if self.name.trim().is_empty() {
93            return Some(format!("`{base}.name` must not be empty when set."));
94        }
95        if self.value.trim().is_empty() {
96            return Some(format!("`{base}.value` must not be empty when set."));
97        }
98
99        None
100    }
101}
102
103/// Request-scoped network policy for OpenAI hosted shell environments.
104#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
105#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
106pub struct OpenAIHostedShellNetworkPolicy {
107    #[serde(rename = "type", default)]
108    pub policy_type: OpenAIHostedShellNetworkPolicyType,
109
110    #[serde(default, skip_serializing_if = "Vec::is_empty")]
111    pub allowed_domains: Vec<String>,
112
113    #[serde(default, skip_serializing_if = "Vec::is_empty")]
114    pub domain_secrets: Vec<OpenAIHostedShellDomainSecret>,
115}
116
117impl OpenAIHostedShellNetworkPolicy {
118    pub const fn is_allowlist(&self) -> bool {
119        matches!(
120            self.policy_type,
121            OpenAIHostedShellNetworkPolicyType::Allowlist
122        )
123    }
124
125    pub fn first_invalid_message(&self) -> Option<String> {
126        match self.policy_type {
127            OpenAIHostedShellNetworkPolicyType::Disabled => {
128                if !self.allowed_domains.is_empty() || !self.domain_secrets.is_empty() {
129                    return Some(
130                        "`provider.openai.hosted_shell.network_policy.allowed_domains` and `provider.openai.hosted_shell.network_policy.domain_secrets` require `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
131                            .to_string(),
132                    );
133                }
134            }
135            OpenAIHostedShellNetworkPolicyType::Allowlist => {
136                if let Some(index) = self
137                    .allowed_domains
138                    .iter()
139                    .position(|value| value.trim().is_empty())
140                {
141                    return Some(format!(
142                        "`provider.openai.hosted_shell.network_policy.allowed_domains[{index}]` must not be empty when set."
143                    ));
144                }
145
146                if self.allowed_domains.is_empty() {
147                    return Some(
148                        "`provider.openai.hosted_shell.network_policy.allowed_domains` must include at least one domain when `provider.openai.hosted_shell.network_policy.type = \"allowlist\"`."
149                            .to_string(),
150                    );
151                }
152
153                for (index, secret) in self.domain_secrets.iter().enumerate() {
154                    if let Some(message) = secret.validation_error(index) {
155                        return Some(message);
156                    }
157
158                    let secret_domain = secret.domain.trim();
159                    if !self
160                        .allowed_domains
161                        .iter()
162                        .any(|domain| domain.trim().eq_ignore_ascii_case(secret_domain))
163                    {
164                        return Some(format!(
165                            "`provider.openai.hosted_shell.network_policy.domain_secrets[{index}].domain` must also appear in `provider.openai.hosted_shell.network_policy.allowed_domains`."
166                        ));
167                    }
168                }
169            }
170        }
171
172        None
173    }
174}
175
176/// Reserved keyword values for hosted skill version selection.
177#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
178#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
179#[serde(rename_all = "lowercase")]
180pub enum OpenAIHostedSkillVersionKeyword {
181    #[default]
182    Latest,
183}
184
185/// Hosted skill version selector for OpenAI Responses hosted shell mounts.
186#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
187#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
188#[serde(untagged)]
189pub enum OpenAIHostedSkillVersion {
190    Latest(OpenAIHostedSkillVersionKeyword),
191    Number(u64),
192    String(String),
193}
194
195impl Default for OpenAIHostedSkillVersion {
196    fn default() -> Self {
197        Self::Latest(OpenAIHostedSkillVersionKeyword::Latest)
198    }
199}
200
201impl OpenAIHostedSkillVersion {
202    pub fn validation_error(&self, field_path: &str) -> Option<String> {
203        match self {
204            Self::String(value) if value.trim().is_empty() => {
205                Some(format!("`{field_path}` must not be empty when set."))
206            }
207            _ => None,
208        }
209    }
210}
211
212/// Hosted skill reference mounted into an OpenAI hosted shell environment.
213#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
214#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
215#[serde(tag = "type", rename_all = "snake_case")]
216pub enum OpenAIHostedSkill {
217    /// Reference to a pre-registered hosted skill.
218    SkillReference {
219        skill_id: String,
220        #[serde(default)]
221        version: OpenAIHostedSkillVersion,
222    },
223    /// Inline base64 zip bundle.
224    Inline {
225        bundle_b64: String,
226        #[serde(skip_serializing_if = "Option::is_none")]
227        sha256: Option<String>,
228    },
229}
230
231impl OpenAIHostedSkill {
232    pub fn validation_error(&self, index: usize) -> Option<String> {
233        match self {
234            Self::SkillReference { skill_id, version } => {
235                let skill_id_path =
236                    format!("provider.openai.hosted_shell.skills[{index}].skill_id");
237                if skill_id.trim().is_empty() {
238                    return Some(format!(
239                        "`{skill_id_path}` must not be empty when `type = \"skill_reference\"`."
240                    ));
241                }
242
243                let version_path = format!("provider.openai.hosted_shell.skills[{index}].version");
244                version.validation_error(&version_path)
245            }
246            Self::Inline { bundle_b64, .. } => {
247                let bundle_path =
248                    format!("provider.openai.hosted_shell.skills[{index}].bundle_b64");
249                if bundle_b64.trim().is_empty() {
250                    return Some(format!(
251                        "`{bundle_path}` must not be empty when `type = \"inline\"`."
252                    ));
253                }
254                None
255            }
256        }
257    }
258}
259
260/// OpenAI hosted shell configuration.
261#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
262#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
263pub struct OpenAIHostedShellConfig {
264    /// Enable OpenAI hosted shell instead of VT Code's local shell tool.
265    #[serde(default)]
266    pub enabled: bool,
267
268    /// Environment provisioning mode for hosted shell.
269    #[serde(default)]
270    pub environment: OpenAIHostedShellEnvironment,
271
272    /// Existing OpenAI container ID to reuse when `environment = "container_reference"`.
273    #[serde(default, skip_serializing_if = "Option::is_none")]
274    pub container_id: Option<String>,
275
276    /// File IDs to mount when using `container_auto`.
277    #[serde(default, skip_serializing_if = "Vec::is_empty")]
278    pub file_ids: Vec<String>,
279
280    /// Hosted skills to mount when using `container_auto`.
281    #[serde(default, skip_serializing_if = "Vec::is_empty")]
282    pub skills: Vec<OpenAIHostedSkill>,
283
284    /// Request-scoped network policy for `container_auto` hosted shells.
285    #[serde(default)]
286    pub network_policy: OpenAIHostedShellNetworkPolicy,
287}
288
289impl OpenAIHostedShellConfig {
290    pub fn container_id_ref(&self) -> Option<&str> {
291        self.container_id
292            .as_deref()
293            .map(str::trim)
294            .filter(|value| !value.is_empty())
295    }
296
297    pub const fn uses_container_reference(&self) -> bool {
298        self.environment.uses_container_reference()
299    }
300
301    pub fn first_invalid_skill_message(&self) -> Option<String> {
302        if self.uses_container_reference() {
303            return None;
304        }
305
306        self.skills
307            .iter()
308            .enumerate()
309            .find_map(|(index, skill)| skill.validation_error(index))
310    }
311
312    pub fn has_valid_skill_mounts(&self) -> bool {
313        self.first_invalid_skill_message().is_none()
314    }
315
316    pub fn first_invalid_network_policy_message(&self) -> Option<String> {
317        if self.uses_container_reference() {
318            return None;
319        }
320
321        self.network_policy.first_invalid_message()
322    }
323
324    pub fn has_valid_network_policy(&self) -> bool {
325        self.first_invalid_network_policy_message().is_none()
326    }
327
328    pub fn has_valid_reference_target(&self) -> bool {
329        !self.uses_container_reference() || self.container_id_ref().is_some()
330    }
331
332    pub fn is_valid_for_runtime(&self) -> bool {
333        self.has_valid_reference_target()
334            && self.has_valid_skill_mounts()
335            && self.has_valid_network_policy()
336    }
337}
338
339/// OpenAI hosted tool search configuration.
340#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
341#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
342pub struct OpenAIToolSearchConfig {
343    /// Enable hosted tool search for OpenAI Responses-compatible models.
344    #[serde(default = "default_tool_search_enabled")]
345    pub enabled: bool,
346
347    /// Automatically defer loading of all tools except the core always-on set.
348    #[serde(default = "default_defer_by_default")]
349    pub defer_by_default: bool,
350
351    /// Tool names that should never be deferred (always available).
352    #[serde(default)]
353    pub always_available_tools: Vec<String>,
354}
355
356impl Default for OpenAIToolSearchConfig {
357    fn default() -> Self {
358        Self {
359            enabled: default_tool_search_enabled(),
360            defer_by_default: default_defer_by_default(),
361            always_available_tools: Vec::new(),
362        }
363    }
364}
365
366/// Manual compaction defaults for the native OpenAI `/responses/compact` flow.
367#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
368#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
369pub struct OpenAIManualCompactionConfig {
370    /// Optional custom instructions appended to manual `/compact` requests.
371    #[serde(default, skip_serializing_if = "Option::is_none")]
372    pub instructions: Option<String>,
373}
374
375/// OpenAI-specific provider configuration
376#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
377#[derive(Debug, Clone, Deserialize, Serialize, Default)]
378pub struct OpenAIConfig {
379    /// Enable Responses API WebSocket transport for non-streaming requests.
380    /// This is an opt-in path designed for long-running, tool-heavy workflows.
381    #[serde(default)]
382    pub websocket_mode: bool,
383
384    /// Optional Responses API `store` flag.
385    /// Set to `false` to avoid server-side storage when using Responses-compatible models.
386    #[serde(default, skip_serializing_if = "Option::is_none")]
387    pub responses_store: Option<bool>,
388
389    /// Optional Responses API `include` selectors.
390    /// Example: `["reasoning.encrypted_content"]` for encrypted reasoning continuity.
391    #[serde(default, skip_serializing_if = "Vec::is_empty")]
392    pub responses_include: Vec<String>,
393
394    /// Optional native OpenAI `service_tier` request parameter.
395    /// Leave unset to inherit the Project-level default service tier.
396    /// Options: "flex", "priority"
397    #[serde(default, skip_serializing_if = "Option::is_none")]
398    pub service_tier: Option<OpenAIServiceTier>,
399
400    /// Manual `/compact` defaults for the native OpenAI standalone compaction endpoint.
401    #[serde(default)]
402    pub manual_compaction: OpenAIManualCompactionConfig,
403
404    /// Optional hosted shell configuration for OpenAI native Responses models.
405    #[serde(default)]
406    pub hosted_shell: OpenAIHostedShellConfig,
407
408    /// Hosted tool search configuration for OpenAI Responses-compatible models.
409    #[serde(default)]
410    pub tool_search: OpenAIToolSearchConfig,
411}
412
413/// Anthropic-specific provider configuration
414#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
415#[derive(Debug, Clone, Deserialize, Serialize)]
416pub struct AnthropicConfig {
417    /// DEPRECATED: Model name validation has been removed. The Anthropic API validates
418    /// model names directly, avoiding maintenance burden and allowing flexibility.
419    /// This field is kept for backward compatibility but has no effect.
420    #[deprecated(
421        since = "0.75.0",
422        note = "Model validation removed. API validates model names directly."
423    )]
424    #[serde(default)]
425    pub skip_model_validation: bool,
426
427    /// Enable extended thinking feature for Anthropic models
428    /// When enabled, Claude uses internal reasoning before responding, providing
429    /// enhanced reasoning capabilities for complex tasks.
430    /// Only supported by Claude 4, Claude 4.5, and Claude 3.7 Sonnet models.
431    /// Claude 4.6 uses adaptive thinking instead of extended thinking.
432    /// Note: Extended thinking is now auto-enabled by default (31,999 tokens).
433    /// Set MAX_THINKING_TOKENS=63999 environment variable for 2x budget on 64K models.
434    /// See: https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking
435    #[serde(default = "default_extended_thinking_enabled")]
436    pub extended_thinking_enabled: bool,
437
438    /// Beta header for interleaved thinking feature
439    #[serde(default = "default_interleaved_thinking_beta")]
440    pub interleaved_thinking_beta: String,
441
442    /// Budget tokens for extended thinking (minimum: 1024, default: 31999)
443    /// On 64K output models (Opus 4.5, Sonnet 4.5, Haiku 4.5): default 31,999, max 63,999
444    /// On 32K output models (Opus 4): max 31,999
445    /// Use MAX_THINKING_TOKENS environment variable to override.
446    #[serde(default = "default_interleaved_thinking_budget_tokens")]
447    pub interleaved_thinking_budget_tokens: u32,
448
449    /// Type value for enabling interleaved thinking
450    #[serde(default = "default_interleaved_thinking_type")]
451    pub interleaved_thinking_type_enabled: String,
452
453    /// Tool search configuration for dynamic tool discovery (advanced-tool-use beta)
454    #[serde(default)]
455    pub tool_search: ToolSearchConfig,
456
457    /// Native Anthropic memory tool configuration.
458    #[serde(default)]
459    pub memory: AnthropicMemoryConfig,
460
461    /// Effort level for token usage (high, medium, low)
462    /// Controls how many tokens Claude uses when responding, trading off between
463    /// response thoroughness and token efficiency.
464    /// Supported by Claude Opus 4.5/4.6 (4.5 requires effort beta header)
465    #[serde(default = "default_effort")]
466    pub effort: String,
467
468    /// Enable token counting via the count_tokens endpoint
469    /// When enabled, the agent can estimate input token counts before making API calls
470    /// Useful for proactive management of rate limits and costs
471    #[serde(default = "default_count_tokens_enabled")]
472    pub count_tokens_enabled: bool,
473}
474
475#[allow(deprecated)]
476impl Default for AnthropicConfig {
477    fn default() -> Self {
478        Self {
479            skip_model_validation: false,
480            extended_thinking_enabled: default_extended_thinking_enabled(),
481            interleaved_thinking_beta: default_interleaved_thinking_beta(),
482            interleaved_thinking_budget_tokens: default_interleaved_thinking_budget_tokens(),
483            interleaved_thinking_type_enabled: default_interleaved_thinking_type(),
484            tool_search: ToolSearchConfig::default(),
485            memory: AnthropicMemoryConfig::default(),
486            effort: default_effort(),
487            count_tokens_enabled: default_count_tokens_enabled(),
488        }
489    }
490}
491
492#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
493#[derive(Debug, Clone, Default, Deserialize, Serialize, PartialEq, Eq)]
494pub struct AnthropicMemoryConfig {
495    #[serde(default)]
496    pub enabled: bool,
497}
498
499#[inline]
500fn default_count_tokens_enabled() -> bool {
501    false
502}
503
504/// Configuration for Anthropic's tool search feature (advanced-tool-use beta)
505/// Enables dynamic tool discovery for large tool catalogs (up to 10k tools)
506#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
507#[derive(Debug, Clone, Deserialize, Serialize)]
508pub struct ToolSearchConfig {
509    /// Enable tool search feature (requires advanced-tool-use-2025-11-20 beta)
510    #[serde(default = "default_tool_search_enabled")]
511    pub enabled: bool,
512
513    /// Search algorithm: "regex" (Python regex patterns) or "bm25" (natural language)
514    #[serde(default = "default_tool_search_algorithm")]
515    pub algorithm: String,
516
517    /// Automatically defer loading of all tools except core tools
518    #[serde(default = "default_defer_by_default")]
519    pub defer_by_default: bool,
520
521    /// Maximum number of tool search results to return
522    #[serde(default = "default_max_results")]
523    pub max_results: u32,
524
525    /// Tool names that should never be deferred (always available)
526    #[serde(default)]
527    pub always_available_tools: Vec<String>,
528}
529
530impl Default for ToolSearchConfig {
531    fn default() -> Self {
532        Self {
533            enabled: default_tool_search_enabled(),
534            algorithm: default_tool_search_algorithm(),
535            defer_by_default: default_defer_by_default(),
536            max_results: default_max_results(),
537            always_available_tools: vec![],
538        }
539    }
540}
541
542#[inline]
543fn default_tool_search_enabled() -> bool {
544    true
545}
546
547#[inline]
548fn default_tool_search_algorithm() -> String {
549    "regex".to_string()
550}
551
552#[inline]
553fn default_defer_by_default() -> bool {
554    true
555}
556
557#[inline]
558fn default_max_results() -> u32 {
559    5
560}
561
562#[inline]
563fn default_extended_thinking_enabled() -> bool {
564    true
565}
566
567#[inline]
568fn default_interleaved_thinking_beta() -> String {
569    "interleaved-thinking-2025-05-14".to_string()
570}
571
572#[inline]
573fn default_interleaved_thinking_budget_tokens() -> u32 {
574    31999
575}
576
577#[inline]
578fn default_interleaved_thinking_type() -> String {
579    "enabled".to_string()
580}
581
582#[inline]
583fn default_effort() -> String {
584    "low".to_string()
585}
586
587#[cfg(test)]
588mod tests {
589    use super::{
590        AnthropicConfig, OpenAIConfig, OpenAIHostedShellConfig, OpenAIHostedShellDomainSecret,
591        OpenAIHostedShellEnvironment, OpenAIHostedShellNetworkPolicy,
592        OpenAIHostedShellNetworkPolicyType, OpenAIHostedSkill, OpenAIHostedSkillVersion,
593        OpenAIManualCompactionConfig, OpenAIServiceTier,
594    };
595
596    #[test]
597    fn openai_config_defaults_to_websocket_mode_disabled() {
598        let config = OpenAIConfig::default();
599        assert!(!config.websocket_mode);
600        assert_eq!(config.responses_store, None);
601        assert!(config.responses_include.is_empty());
602        assert_eq!(config.service_tier, None);
603        assert_eq!(
604            config.manual_compaction,
605            OpenAIManualCompactionConfig::default()
606        );
607        assert_eq!(config.hosted_shell, OpenAIHostedShellConfig::default());
608        assert!(config.tool_search.enabled);
609        assert!(config.tool_search.defer_by_default);
610        assert!(config.tool_search.always_available_tools.is_empty());
611    }
612
613    #[test]
614    fn anthropic_config_defaults_native_memory_to_disabled() {
615        let config = AnthropicConfig::default();
616        assert!(!config.memory.enabled);
617    }
618
619    #[test]
620    fn anthropic_config_parses_native_memory_opt_in() {
621        let parsed: AnthropicConfig =
622            toml::from_str("[memory]\nenabled = true").expect("config should parse");
623        assert!(parsed.memory.enabled);
624    }
625
626    #[test]
627    fn openai_config_parses_websocket_mode_opt_in() {
628        let parsed: OpenAIConfig =
629            toml::from_str("websocket_mode = true").expect("config should parse");
630        assert!(parsed.websocket_mode);
631        assert_eq!(parsed.responses_store, None);
632        assert!(parsed.responses_include.is_empty());
633        assert_eq!(parsed.service_tier, None);
634        assert_eq!(
635            parsed.manual_compaction,
636            OpenAIManualCompactionConfig::default()
637        );
638        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
639        assert_eq!(parsed.tool_search, super::OpenAIToolSearchConfig::default());
640    }
641
642    #[test]
643    fn openai_config_parses_responses_options() {
644        let parsed: OpenAIConfig = toml::from_str(
645            r#"
646responses_store = false
647responses_include = ["reasoning.encrypted_content", "output_text.annotations"]
648"#,
649        )
650        .expect("config should parse");
651        assert_eq!(parsed.responses_store, Some(false));
652        assert_eq!(
653            parsed.responses_include,
654            vec![
655                "reasoning.encrypted_content".to_string(),
656                "output_text.annotations".to_string()
657            ]
658        );
659        assert_eq!(parsed.service_tier, None);
660        assert_eq!(
661            parsed.manual_compaction,
662            OpenAIManualCompactionConfig::default()
663        );
664        assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
665    }
666
667    #[test]
668    fn openai_config_parses_manual_compaction_defaults() {
669        let parsed: OpenAIConfig = toml::from_str(
670            r#"
671[manual_compaction]
672instructions = "Preserve the bug reproduction steps."
673"#,
674        )
675        .expect("config should parse");
676
677        assert_eq!(
678            parsed.manual_compaction.instructions.as_deref(),
679            Some("Preserve the bug reproduction steps.")
680        );
681    }
682
683    #[test]
684    fn openai_config_parses_service_tier() {
685        let parsed: OpenAIConfig =
686            toml::from_str(r#"service_tier = "priority""#).expect("config should parse");
687        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Priority));
688    }
689
690    #[test]
691    fn openai_config_parses_flex_service_tier() {
692        let parsed: OpenAIConfig =
693            toml::from_str(r#"service_tier = "flex""#).expect("config should parse");
694        assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Flex));
695    }
696
697    #[test]
698    fn openai_config_parses_hosted_shell() {
699        let parsed: OpenAIConfig = toml::from_str(
700            r#"
701[hosted_shell]
702enabled = true
703environment = "container_auto"
704file_ids = ["file_123"]
705
706[[hosted_shell.skills]]
707type = "skill_reference"
708skill_id = "skill_123"
709"#,
710        )
711        .expect("config should parse");
712
713        assert!(parsed.hosted_shell.enabled);
714        assert_eq!(
715            parsed.hosted_shell.environment,
716            OpenAIHostedShellEnvironment::ContainerAuto
717        );
718        assert_eq!(parsed.hosted_shell.file_ids, vec!["file_123".to_string()]);
719        assert_eq!(
720            parsed.hosted_shell.skills,
721            vec![OpenAIHostedSkill::SkillReference {
722                skill_id: "skill_123".to_string(),
723                version: OpenAIHostedSkillVersion::default(),
724            }]
725        );
726    }
727
728    #[test]
729    fn openai_config_parses_hosted_shell_pinned_version_and_inline_bundle() {
730        let parsed: OpenAIConfig = toml::from_str(
731            r#"
732[hosted_shell]
733enabled = true
734
735[[hosted_shell.skills]]
736type = "skill_reference"
737skill_id = "skill_123"
738version = 2
739
740[[hosted_shell.skills]]
741type = "inline"
742bundle_b64 = "UEsFBgAAAAAAAA=="
743sha256 = "deadbeef"
744"#,
745        )
746        .expect("config should parse");
747
748        assert_eq!(
749            parsed.hosted_shell.skills,
750            vec![
751                OpenAIHostedSkill::SkillReference {
752                    skill_id: "skill_123".to_string(),
753                    version: OpenAIHostedSkillVersion::Number(2),
754                },
755                OpenAIHostedSkill::Inline {
756                    bundle_b64: "UEsFBgAAAAAAAA==".to_string(),
757                    sha256: Some("deadbeef".to_string()),
758                },
759            ]
760        );
761    }
762
763    #[test]
764    fn openai_config_parses_hosted_shell_network_policy() {
765        let parsed: OpenAIConfig = toml::from_str(
766            r#"
767[hosted_shell]
768enabled = true
769
770[hosted_shell.network_policy]
771type = "allowlist"
772allowed_domains = ["httpbin.org"]
773
774[[hosted_shell.network_policy.domain_secrets]]
775domain = "httpbin.org"
776name = "API_KEY"
777value = "debug-secret-123"
778"#,
779        )
780        .expect("config should parse");
781
782        assert_eq!(
783            parsed.hosted_shell.network_policy,
784            OpenAIHostedShellNetworkPolicy {
785                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
786                allowed_domains: vec!["httpbin.org".to_string()],
787                domain_secrets: vec![OpenAIHostedShellDomainSecret {
788                    domain: "httpbin.org".to_string(),
789                    name: "API_KEY".to_string(),
790                    value: "debug-secret-123".to_string(),
791                }],
792            }
793        );
794    }
795
796    #[test]
797    fn openai_config_parses_tool_search() {
798        let parsed: OpenAIConfig = toml::from_str(
799            r#"
800[tool_search]
801enabled = false
802defer_by_default = false
803always_available_tools = ["unified_search", "custom_tool"]
804"#,
805        )
806        .expect("config should parse");
807
808        assert!(!parsed.tool_search.enabled);
809        assert!(!parsed.tool_search.defer_by_default);
810        assert_eq!(
811            parsed.tool_search.always_available_tools,
812            vec!["unified_search".to_string(), "custom_tool".to_string()]
813        );
814    }
815
816    #[test]
817    fn anthropic_tool_search_defaults_to_enabled() {
818        let config = AnthropicConfig::default();
819
820        assert!(config.tool_search.enabled);
821        assert!(config.tool_search.defer_by_default);
822        assert_eq!(config.tool_search.algorithm, "regex");
823        assert!(config.tool_search.always_available_tools.is_empty());
824    }
825
826    #[test]
827    fn hosted_shell_container_reference_requires_non_empty_container_id() {
828        let config = OpenAIHostedShellConfig {
829            enabled: true,
830            environment: OpenAIHostedShellEnvironment::ContainerReference,
831            container_id: Some("   ".to_string()),
832            file_ids: Vec::new(),
833            skills: Vec::new(),
834            network_policy: OpenAIHostedShellNetworkPolicy::default(),
835        };
836
837        assert!(!config.has_valid_reference_target());
838        assert!(config.container_id_ref().is_none());
839    }
840
841    #[test]
842    fn hosted_shell_reports_invalid_skill_reference_mounts() {
843        let config = OpenAIHostedShellConfig {
844            enabled: true,
845            environment: OpenAIHostedShellEnvironment::ContainerAuto,
846            container_id: None,
847            file_ids: Vec::new(),
848            skills: vec![OpenAIHostedSkill::SkillReference {
849                skill_id: "   ".to_string(),
850                version: OpenAIHostedSkillVersion::default(),
851            }],
852            network_policy: OpenAIHostedShellNetworkPolicy::default(),
853        };
854
855        let message = config
856            .first_invalid_skill_message()
857            .expect("invalid mount should be reported");
858
859        assert!(message.contains("provider.openai.hosted_shell.skills[0].skill_id"));
860        assert!(!config.has_valid_skill_mounts());
861        assert!(!config.is_valid_for_runtime());
862    }
863
864    #[test]
865    fn hosted_shell_ignores_skill_validation_for_container_reference() {
866        let config = OpenAIHostedShellConfig {
867            enabled: true,
868            environment: OpenAIHostedShellEnvironment::ContainerReference,
869            container_id: Some("cntr_123".to_string()),
870            file_ids: Vec::new(),
871            skills: vec![OpenAIHostedSkill::Inline {
872                bundle_b64: "   ".to_string(),
873                sha256: None,
874            }],
875            network_policy: OpenAIHostedShellNetworkPolicy::default(),
876        };
877
878        assert!(config.first_invalid_skill_message().is_none());
879        assert!(config.has_valid_skill_mounts());
880        assert!(config.is_valid_for_runtime());
881    }
882
883    #[test]
884    fn hosted_shell_reports_invalid_allowlist_without_domains() {
885        let config = OpenAIHostedShellConfig {
886            enabled: true,
887            environment: OpenAIHostedShellEnvironment::ContainerAuto,
888            container_id: None,
889            file_ids: Vec::new(),
890            skills: Vec::new(),
891            network_policy: OpenAIHostedShellNetworkPolicy {
892                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
893                allowed_domains: Vec::new(),
894                domain_secrets: Vec::new(),
895            },
896        };
897
898        let message = config
899            .first_invalid_network_policy_message()
900            .expect("invalid network policy should be reported");
901
902        assert!(message.contains("network_policy.allowed_domains"));
903        assert!(!config.has_valid_network_policy());
904        assert!(!config.is_valid_for_runtime());
905    }
906
907    #[test]
908    fn hosted_shell_reports_domain_secret_outside_allowlist() {
909        let config = OpenAIHostedShellConfig {
910            enabled: true,
911            environment: OpenAIHostedShellEnvironment::ContainerAuto,
912            container_id: None,
913            file_ids: Vec::new(),
914            skills: Vec::new(),
915            network_policy: OpenAIHostedShellNetworkPolicy {
916                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
917                allowed_domains: vec!["pypi.org".to_string()],
918                domain_secrets: vec![OpenAIHostedShellDomainSecret {
919                    domain: "httpbin.org".to_string(),
920                    name: "API_KEY".to_string(),
921                    value: "secret".to_string(),
922                }],
923            },
924        };
925
926        let message = config
927            .first_invalid_network_policy_message()
928            .expect("invalid domain secret should be reported");
929
930        assert!(message.contains("domain_secrets[0].domain"));
931        assert!(!config.has_valid_network_policy());
932    }
933
934    #[test]
935    fn hosted_shell_ignores_network_policy_validation_for_container_reference() {
936        let config = OpenAIHostedShellConfig {
937            enabled: true,
938            environment: OpenAIHostedShellEnvironment::ContainerReference,
939            container_id: Some("cntr_123".to_string()),
940            file_ids: Vec::new(),
941            skills: Vec::new(),
942            network_policy: OpenAIHostedShellNetworkPolicy {
943                policy_type: OpenAIHostedShellNetworkPolicyType::Allowlist,
944                allowed_domains: Vec::new(),
945                domain_secrets: Vec::new(),
946            },
947        };
948
949        assert!(config.first_invalid_network_policy_message().is_none());
950        assert!(config.has_valid_network_policy());
951    }
952}