1use serde::{Deserialize, Serialize};
2
3#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
5#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
6#[serde(rename_all = "lowercase")]
7pub enum OpenAIServiceTier {
8 Priority,
9}
10
11impl OpenAIServiceTier {
12 pub const fn as_str(self) -> &'static str {
13 match self {
14 Self::Priority => "priority",
15 }
16 }
17}
18
19#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
21#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
22#[serde(rename_all = "snake_case")]
23pub enum OpenAIHostedShellEnvironment {
24 #[default]
25 ContainerAuto,
26 ContainerReference,
27}
28
29impl OpenAIHostedShellEnvironment {
30 pub const fn as_str(self) -> &'static str {
31 match self {
32 Self::ContainerAuto => "container_auto",
33 Self::ContainerReference => "container_reference",
34 }
35 }
36}
37
38impl OpenAIHostedShellEnvironment {
39 pub const fn uses_container_reference(self) -> bool {
40 matches!(self, Self::ContainerReference)
41 }
42}
43
44#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
46#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
47#[serde(rename_all = "lowercase")]
48pub enum OpenAIHostedSkillVersionKeyword {
49 #[default]
50 Latest,
51}
52
53#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
55#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
56#[serde(untagged)]
57pub enum OpenAIHostedSkillVersion {
58 Latest(OpenAIHostedSkillVersionKeyword),
59 Number(u64),
60 String(String),
61}
62
63impl Default for OpenAIHostedSkillVersion {
64 fn default() -> Self {
65 Self::Latest(OpenAIHostedSkillVersionKeyword::Latest)
66 }
67}
68
69impl OpenAIHostedSkillVersion {
70 pub fn validation_error(&self, field_path: &str) -> Option<String> {
71 match self {
72 Self::String(value) if value.trim().is_empty() => {
73 Some(format!("`{field_path}` must not be empty when set."))
74 }
75 _ => None,
76 }
77 }
78}
79
80#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
82#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
83#[serde(tag = "type", rename_all = "snake_case")]
84pub enum OpenAIHostedSkill {
85 SkillReference {
87 skill_id: String,
88 #[serde(default)]
89 version: OpenAIHostedSkillVersion,
90 },
91 Inline {
93 bundle_b64: String,
94 #[serde(skip_serializing_if = "Option::is_none")]
95 sha256: Option<String>,
96 },
97}
98
99impl OpenAIHostedSkill {
100 pub fn validation_error(&self, index: usize) -> Option<String> {
101 match self {
102 Self::SkillReference { skill_id, version } => {
103 let skill_id_path =
104 format!("provider.openai.hosted_shell.skills[{index}].skill_id");
105 if skill_id.trim().is_empty() {
106 return Some(format!(
107 "`{skill_id_path}` must not be empty when `type = \"skill_reference\"`."
108 ));
109 }
110
111 let version_path = format!("provider.openai.hosted_shell.skills[{index}].version");
112 version.validation_error(&version_path)
113 }
114 Self::Inline { bundle_b64, .. } => {
115 let bundle_path =
116 format!("provider.openai.hosted_shell.skills[{index}].bundle_b64");
117 if bundle_b64.trim().is_empty() {
118 return Some(format!(
119 "`{bundle_path}` must not be empty when `type = \"inline\"`."
120 ));
121 }
122 None
123 }
124 }
125 }
126}
127
128#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
130#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
131pub struct OpenAIHostedShellConfig {
132 #[serde(default)]
134 pub enabled: bool,
135
136 #[serde(default)]
138 pub environment: OpenAIHostedShellEnvironment,
139
140 #[serde(default, skip_serializing_if = "Option::is_none")]
142 pub container_id: Option<String>,
143
144 #[serde(default, skip_serializing_if = "Vec::is_empty")]
146 pub file_ids: Vec<String>,
147
148 #[serde(default, skip_serializing_if = "Vec::is_empty")]
150 pub skills: Vec<OpenAIHostedSkill>,
151}
152
153impl OpenAIHostedShellConfig {
154 pub fn container_id_ref(&self) -> Option<&str> {
155 self.container_id
156 .as_deref()
157 .map(str::trim)
158 .filter(|value| !value.is_empty())
159 }
160
161 pub const fn uses_container_reference(&self) -> bool {
162 self.environment.uses_container_reference()
163 }
164
165 pub fn first_invalid_skill_message(&self) -> Option<String> {
166 if self.uses_container_reference() {
167 return None;
168 }
169
170 self.skills
171 .iter()
172 .enumerate()
173 .find_map(|(index, skill)| skill.validation_error(index))
174 }
175
176 pub fn has_valid_skill_mounts(&self) -> bool {
177 self.first_invalid_skill_message().is_none()
178 }
179
180 pub fn has_valid_reference_target(&self) -> bool {
181 !self.uses_container_reference() || self.container_id_ref().is_some()
182 }
183
184 pub fn is_valid_for_runtime(&self) -> bool {
185 self.has_valid_reference_target() && self.has_valid_skill_mounts()
186 }
187}
188
189#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
191#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
192pub struct OpenAIToolSearchConfig {
193 #[serde(default = "default_tool_search_enabled")]
195 pub enabled: bool,
196
197 #[serde(default = "default_defer_by_default")]
199 pub defer_by_default: bool,
200
201 #[serde(default)]
203 pub always_available_tools: Vec<String>,
204}
205
206impl Default for OpenAIToolSearchConfig {
207 fn default() -> Self {
208 Self {
209 enabled: default_tool_search_enabled(),
210 defer_by_default: default_defer_by_default(),
211 always_available_tools: Vec::new(),
212 }
213 }
214}
215
216#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
218#[derive(Debug, Clone, Deserialize, Serialize, Default)]
219pub struct OpenAIConfig {
220 #[serde(default)]
223 pub websocket_mode: bool,
224
225 #[serde(default, skip_serializing_if = "Option::is_none")]
228 pub responses_store: Option<bool>,
229
230 #[serde(default, skip_serializing_if = "Vec::is_empty")]
233 pub responses_include: Vec<String>,
234
235 #[serde(default, skip_serializing_if = "Option::is_none")]
239 pub service_tier: Option<OpenAIServiceTier>,
240
241 #[serde(default)]
243 pub hosted_shell: OpenAIHostedShellConfig,
244
245 #[serde(default)]
247 pub tool_search: OpenAIToolSearchConfig,
248}
249
250#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
252#[derive(Debug, Clone, Deserialize, Serialize)]
253pub struct AnthropicConfig {
254 #[deprecated(
258 since = "0.75.0",
259 note = "Model validation removed. API validates model names directly."
260 )]
261 #[serde(default)]
262 pub skip_model_validation: bool,
263
264 #[serde(default = "default_extended_thinking_enabled")]
273 pub extended_thinking_enabled: bool,
274
275 #[serde(default = "default_interleaved_thinking_beta")]
277 pub interleaved_thinking_beta: String,
278
279 #[serde(default = "default_interleaved_thinking_budget_tokens")]
284 pub interleaved_thinking_budget_tokens: u32,
285
286 #[serde(default = "default_interleaved_thinking_type")]
288 pub interleaved_thinking_type_enabled: String,
289
290 #[serde(default)]
292 pub tool_search: ToolSearchConfig,
293
294 #[serde(default = "default_effort")]
299 pub effort: String,
300
301 #[serde(default = "default_count_tokens_enabled")]
305 pub count_tokens_enabled: bool,
306}
307
308#[allow(deprecated)]
309impl Default for AnthropicConfig {
310 fn default() -> Self {
311 Self {
312 skip_model_validation: false,
313 extended_thinking_enabled: default_extended_thinking_enabled(),
314 interleaved_thinking_beta: default_interleaved_thinking_beta(),
315 interleaved_thinking_budget_tokens: default_interleaved_thinking_budget_tokens(),
316 interleaved_thinking_type_enabled: default_interleaved_thinking_type(),
317 tool_search: ToolSearchConfig::default(),
318 effort: default_effort(),
319 count_tokens_enabled: default_count_tokens_enabled(),
320 }
321 }
322}
323
324#[inline]
325fn default_count_tokens_enabled() -> bool {
326 false
327}
328
329#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
332#[derive(Debug, Clone, Deserialize, Serialize)]
333pub struct ToolSearchConfig {
334 #[serde(default = "default_tool_search_enabled")]
336 pub enabled: bool,
337
338 #[serde(default = "default_tool_search_algorithm")]
340 pub algorithm: String,
341
342 #[serde(default = "default_defer_by_default")]
344 pub defer_by_default: bool,
345
346 #[serde(default = "default_max_results")]
348 pub max_results: u32,
349
350 #[serde(default)]
352 pub always_available_tools: Vec<String>,
353}
354
355impl Default for ToolSearchConfig {
356 fn default() -> Self {
357 Self {
358 enabled: default_tool_search_enabled(),
359 algorithm: default_tool_search_algorithm(),
360 defer_by_default: default_defer_by_default(),
361 max_results: default_max_results(),
362 always_available_tools: vec![],
363 }
364 }
365}
366
367#[inline]
368fn default_tool_search_enabled() -> bool {
369 true
370}
371
372#[inline]
373fn default_tool_search_algorithm() -> String {
374 "regex".to_string()
375}
376
377#[inline]
378fn default_defer_by_default() -> bool {
379 true
380}
381
382#[inline]
383fn default_max_results() -> u32 {
384 5
385}
386
387#[inline]
388fn default_extended_thinking_enabled() -> bool {
389 true
390}
391
392#[inline]
393fn default_interleaved_thinking_beta() -> String {
394 "interleaved-thinking-2025-05-14".to_string()
395}
396
397#[inline]
398fn default_interleaved_thinking_budget_tokens() -> u32 {
399 31999
400}
401
402#[inline]
403fn default_interleaved_thinking_type() -> String {
404 "enabled".to_string()
405}
406
407#[inline]
408fn default_effort() -> String {
409 "low".to_string()
410}
411
412#[cfg(test)]
413mod tests {
414 use super::{
415 AnthropicConfig, OpenAIConfig, OpenAIHostedShellConfig, OpenAIHostedShellEnvironment,
416 OpenAIHostedSkill, OpenAIHostedSkillVersion, OpenAIServiceTier,
417 };
418
419 #[test]
420 fn openai_config_defaults_to_websocket_mode_disabled() {
421 let config = OpenAIConfig::default();
422 assert!(!config.websocket_mode);
423 assert_eq!(config.responses_store, None);
424 assert!(config.responses_include.is_empty());
425 assert_eq!(config.service_tier, None);
426 assert_eq!(config.hosted_shell, OpenAIHostedShellConfig::default());
427 assert!(config.tool_search.enabled);
428 assert!(config.tool_search.defer_by_default);
429 assert!(config.tool_search.always_available_tools.is_empty());
430 }
431
432 #[test]
433 fn openai_config_parses_websocket_mode_opt_in() {
434 let parsed: OpenAIConfig =
435 toml::from_str("websocket_mode = true").expect("config should parse");
436 assert!(parsed.websocket_mode);
437 assert_eq!(parsed.responses_store, None);
438 assert!(parsed.responses_include.is_empty());
439 assert_eq!(parsed.service_tier, None);
440 assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
441 assert_eq!(parsed.tool_search, super::OpenAIToolSearchConfig::default());
442 }
443
444 #[test]
445 fn openai_config_parses_responses_options() {
446 let parsed: OpenAIConfig = toml::from_str(
447 r#"
448responses_store = false
449responses_include = ["reasoning.encrypted_content", "output_text.annotations"]
450"#,
451 )
452 .expect("config should parse");
453 assert_eq!(parsed.responses_store, Some(false));
454 assert_eq!(
455 parsed.responses_include,
456 vec![
457 "reasoning.encrypted_content".to_string(),
458 "output_text.annotations".to_string()
459 ]
460 );
461 assert_eq!(parsed.service_tier, None);
462 assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
463 }
464
465 #[test]
466 fn openai_config_parses_service_tier() {
467 let parsed: OpenAIConfig =
468 toml::from_str(r#"service_tier = "priority""#).expect("config should parse");
469 assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Priority));
470 }
471
472 #[test]
473 fn openai_config_parses_hosted_shell() {
474 let parsed: OpenAIConfig = toml::from_str(
475 r#"
476[hosted_shell]
477enabled = true
478environment = "container_auto"
479file_ids = ["file_123"]
480
481[[hosted_shell.skills]]
482type = "skill_reference"
483skill_id = "skill_123"
484"#,
485 )
486 .expect("config should parse");
487
488 assert!(parsed.hosted_shell.enabled);
489 assert_eq!(
490 parsed.hosted_shell.environment,
491 OpenAIHostedShellEnvironment::ContainerAuto
492 );
493 assert_eq!(parsed.hosted_shell.file_ids, vec!["file_123".to_string()]);
494 assert_eq!(
495 parsed.hosted_shell.skills,
496 vec![OpenAIHostedSkill::SkillReference {
497 skill_id: "skill_123".to_string(),
498 version: OpenAIHostedSkillVersion::default(),
499 }]
500 );
501 }
502
503 #[test]
504 fn openai_config_parses_hosted_shell_pinned_version_and_inline_bundle() {
505 let parsed: OpenAIConfig = toml::from_str(
506 r#"
507[hosted_shell]
508enabled = true
509
510[[hosted_shell.skills]]
511type = "skill_reference"
512skill_id = "skill_123"
513version = 2
514
515[[hosted_shell.skills]]
516type = "inline"
517bundle_b64 = "UEsFBgAAAAAAAA=="
518sha256 = "deadbeef"
519"#,
520 )
521 .expect("config should parse");
522
523 assert_eq!(
524 parsed.hosted_shell.skills,
525 vec![
526 OpenAIHostedSkill::SkillReference {
527 skill_id: "skill_123".to_string(),
528 version: OpenAIHostedSkillVersion::Number(2),
529 },
530 OpenAIHostedSkill::Inline {
531 bundle_b64: "UEsFBgAAAAAAAA==".to_string(),
532 sha256: Some("deadbeef".to_string()),
533 },
534 ]
535 );
536 }
537
538 #[test]
539 fn openai_config_parses_tool_search() {
540 let parsed: OpenAIConfig = toml::from_str(
541 r#"
542[tool_search]
543enabled = false
544defer_by_default = false
545always_available_tools = ["unified_search", "custom_tool"]
546"#,
547 )
548 .expect("config should parse");
549
550 assert!(!parsed.tool_search.enabled);
551 assert!(!parsed.tool_search.defer_by_default);
552 assert_eq!(
553 parsed.tool_search.always_available_tools,
554 vec!["unified_search".to_string(), "custom_tool".to_string()]
555 );
556 }
557
558 #[test]
559 fn anthropic_tool_search_defaults_to_enabled() {
560 let config = AnthropicConfig::default();
561
562 assert!(config.tool_search.enabled);
563 assert!(config.tool_search.defer_by_default);
564 assert_eq!(config.tool_search.algorithm, "regex");
565 assert!(config.tool_search.always_available_tools.is_empty());
566 }
567
568 #[test]
569 fn hosted_shell_container_reference_requires_non_empty_container_id() {
570 let config = OpenAIHostedShellConfig {
571 enabled: true,
572 environment: OpenAIHostedShellEnvironment::ContainerReference,
573 container_id: Some(" ".to_string()),
574 file_ids: Vec::new(),
575 skills: Vec::new(),
576 };
577
578 assert!(!config.has_valid_reference_target());
579 assert!(config.container_id_ref().is_none());
580 }
581
582 #[test]
583 fn hosted_shell_reports_invalid_skill_reference_mounts() {
584 let config = OpenAIHostedShellConfig {
585 enabled: true,
586 environment: OpenAIHostedShellEnvironment::ContainerAuto,
587 container_id: None,
588 file_ids: Vec::new(),
589 skills: vec![OpenAIHostedSkill::SkillReference {
590 skill_id: " ".to_string(),
591 version: OpenAIHostedSkillVersion::default(),
592 }],
593 };
594
595 let message = config
596 .first_invalid_skill_message()
597 .expect("invalid mount should be reported");
598
599 assert!(message.contains("provider.openai.hosted_shell.skills[0].skill_id"));
600 assert!(!config.has_valid_skill_mounts());
601 assert!(!config.is_valid_for_runtime());
602 }
603
604 #[test]
605 fn hosted_shell_ignores_skill_validation_for_container_reference() {
606 let config = OpenAIHostedShellConfig {
607 enabled: true,
608 environment: OpenAIHostedShellEnvironment::ContainerReference,
609 container_id: Some("cntr_123".to_string()),
610 file_ids: Vec::new(),
611 skills: vec![OpenAIHostedSkill::Inline {
612 bundle_b64: " ".to_string(),
613 sha256: None,
614 }],
615 };
616
617 assert!(config.first_invalid_skill_message().is_none());
618 assert!(config.has_valid_skill_mounts());
619 assert!(config.is_valid_for_runtime());
620 }
621}