1use serde::{Deserialize, Serialize};
2
3#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
5#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)]
6#[serde(rename_all = "lowercase")]
7pub enum OpenAIServiceTier {
8 Priority,
9}
10
11impl OpenAIServiceTier {
12 pub const fn as_str(self) -> &'static str {
13 match self {
14 Self::Priority => "priority",
15 }
16 }
17}
18
19#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
21#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
22#[serde(rename_all = "snake_case")]
23pub enum OpenAIHostedShellEnvironment {
24 #[default]
25 ContainerAuto,
26 ContainerReference,
27}
28
29impl OpenAIHostedShellEnvironment {
30 pub const fn as_str(self) -> &'static str {
31 match self {
32 Self::ContainerAuto => "container_auto",
33 Self::ContainerReference => "container_reference",
34 }
35 }
36}
37
38impl OpenAIHostedShellEnvironment {
39 pub const fn uses_container_reference(self) -> bool {
40 matches!(self, Self::ContainerReference)
41 }
42}
43
44#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
46#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize, Serialize, Default)]
47#[serde(rename_all = "lowercase")]
48pub enum OpenAIHostedSkillVersionKeyword {
49 #[default]
50 Latest,
51}
52
53#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
55#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
56#[serde(untagged)]
57pub enum OpenAIHostedSkillVersion {
58 Latest(OpenAIHostedSkillVersionKeyword),
59 Number(u64),
60 String(String),
61}
62
63impl Default for OpenAIHostedSkillVersion {
64 fn default() -> Self {
65 Self::Latest(OpenAIHostedSkillVersionKeyword::Latest)
66 }
67}
68
69impl OpenAIHostedSkillVersion {
70 pub fn validation_error(&self, field_path: &str) -> Option<String> {
71 match self {
72 Self::String(value) if value.trim().is_empty() => {
73 Some(format!("`{field_path}` must not be empty when set."))
74 }
75 _ => None,
76 }
77 }
78}
79
80#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
82#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
83#[serde(tag = "type", rename_all = "snake_case")]
84pub enum OpenAIHostedSkill {
85 SkillReference {
87 skill_id: String,
88 #[serde(default)]
89 version: OpenAIHostedSkillVersion,
90 },
91 Inline {
93 bundle_b64: String,
94 #[serde(skip_serializing_if = "Option::is_none")]
95 sha256: Option<String>,
96 },
97}
98
99impl OpenAIHostedSkill {
100 pub fn validation_error(&self, index: usize) -> Option<String> {
101 match self {
102 Self::SkillReference { skill_id, version } => {
103 let skill_id_path =
104 format!("provider.openai.hosted_shell.skills[{index}].skill_id");
105 if skill_id.trim().is_empty() {
106 return Some(format!(
107 "`{skill_id_path}` must not be empty when `type = \"skill_reference\"`."
108 ));
109 }
110
111 let version_path = format!("provider.openai.hosted_shell.skills[{index}].version");
112 version.validation_error(&version_path)
113 }
114 Self::Inline { bundle_b64, .. } => {
115 let bundle_path =
116 format!("provider.openai.hosted_shell.skills[{index}].bundle_b64");
117 if bundle_b64.trim().is_empty() {
118 return Some(format!(
119 "`{bundle_path}` must not be empty when `type = \"inline\"`."
120 ));
121 }
122 None
123 }
124 }
125 }
126}
127
128#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
130#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, Default)]
131pub struct OpenAIHostedShellConfig {
132 #[serde(default)]
134 pub enabled: bool,
135
136 #[serde(default)]
138 pub environment: OpenAIHostedShellEnvironment,
139
140 #[serde(default, skip_serializing_if = "Option::is_none")]
142 pub container_id: Option<String>,
143
144 #[serde(default, skip_serializing_if = "Vec::is_empty")]
146 pub file_ids: Vec<String>,
147
148 #[serde(default, skip_serializing_if = "Vec::is_empty")]
150 pub skills: Vec<OpenAIHostedSkill>,
151}
152
153impl OpenAIHostedShellConfig {
154 pub fn container_id_ref(&self) -> Option<&str> {
155 self.container_id
156 .as_deref()
157 .map(str::trim)
158 .filter(|value| !value.is_empty())
159 }
160
161 pub const fn uses_container_reference(&self) -> bool {
162 self.environment.uses_container_reference()
163 }
164
165 pub fn first_invalid_skill_message(&self) -> Option<String> {
166 if self.uses_container_reference() {
167 return None;
168 }
169
170 self.skills
171 .iter()
172 .enumerate()
173 .find_map(|(index, skill)| skill.validation_error(index))
174 }
175
176 pub fn has_valid_skill_mounts(&self) -> bool {
177 self.first_invalid_skill_message().is_none()
178 }
179
180 pub fn has_valid_reference_target(&self) -> bool {
181 !self.uses_container_reference() || self.container_id_ref().is_some()
182 }
183
184 pub fn is_valid_for_runtime(&self) -> bool {
185 self.has_valid_reference_target() && self.has_valid_skill_mounts()
186 }
187}
188
189#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
191#[derive(Debug, Clone, Deserialize, Serialize, Default)]
192pub struct OpenAIConfig {
193 #[serde(default)]
196 pub websocket_mode: bool,
197
198 #[serde(default, skip_serializing_if = "Option::is_none")]
201 pub responses_store: Option<bool>,
202
203 #[serde(default, skip_serializing_if = "Vec::is_empty")]
206 pub responses_include: Vec<String>,
207
208 #[serde(default, skip_serializing_if = "Option::is_none")]
212 pub service_tier: Option<OpenAIServiceTier>,
213
214 #[serde(default)]
216 pub hosted_shell: OpenAIHostedShellConfig,
217}
218
219#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
221#[derive(Debug, Clone, Deserialize, Serialize)]
222pub struct AnthropicConfig {
223 #[deprecated(
227 since = "0.75.0",
228 note = "Model validation removed. API validates model names directly."
229 )]
230 #[serde(default)]
231 pub skip_model_validation: bool,
232
233 #[serde(default = "default_extended_thinking_enabled")]
242 pub extended_thinking_enabled: bool,
243
244 #[serde(default = "default_interleaved_thinking_beta")]
246 pub interleaved_thinking_beta: String,
247
248 #[serde(default = "default_interleaved_thinking_budget_tokens")]
253 pub interleaved_thinking_budget_tokens: u32,
254
255 #[serde(default = "default_interleaved_thinking_type")]
257 pub interleaved_thinking_type_enabled: String,
258
259 #[serde(default)]
261 pub tool_search: ToolSearchConfig,
262
263 #[serde(default = "default_effort")]
268 pub effort: String,
269
270 #[serde(default = "default_count_tokens_enabled")]
274 pub count_tokens_enabled: bool,
275}
276
277#[allow(deprecated)]
278impl Default for AnthropicConfig {
279 fn default() -> Self {
280 Self {
281 skip_model_validation: false,
282 extended_thinking_enabled: default_extended_thinking_enabled(),
283 interleaved_thinking_beta: default_interleaved_thinking_beta(),
284 interleaved_thinking_budget_tokens: default_interleaved_thinking_budget_tokens(),
285 interleaved_thinking_type_enabled: default_interleaved_thinking_type(),
286 tool_search: ToolSearchConfig::default(),
287 effort: default_effort(),
288 count_tokens_enabled: default_count_tokens_enabled(),
289 }
290 }
291}
292
293#[inline]
294fn default_count_tokens_enabled() -> bool {
295 false
296}
297
298#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
301#[derive(Debug, Clone, Deserialize, Serialize)]
302pub struct ToolSearchConfig {
303 #[serde(default)]
305 pub enabled: bool,
306
307 #[serde(default = "default_tool_search_algorithm")]
309 pub algorithm: String,
310
311 #[serde(default = "default_defer_by_default")]
313 pub defer_by_default: bool,
314
315 #[serde(default = "default_max_results")]
317 pub max_results: u32,
318
319 #[serde(default)]
321 pub always_available_tools: Vec<String>,
322}
323
324impl Default for ToolSearchConfig {
325 fn default() -> Self {
326 Self {
327 enabled: false,
328 algorithm: default_tool_search_algorithm(),
329 defer_by_default: default_defer_by_default(),
330 max_results: default_max_results(),
331 always_available_tools: vec![],
332 }
333 }
334}
335
336#[inline]
337fn default_tool_search_algorithm() -> String {
338 "regex".to_string()
339}
340
341#[inline]
342fn default_defer_by_default() -> bool {
343 true
344}
345
346#[inline]
347fn default_max_results() -> u32 {
348 5
349}
350
351#[inline]
352fn default_extended_thinking_enabled() -> bool {
353 true
354}
355
356#[inline]
357fn default_interleaved_thinking_beta() -> String {
358 "interleaved-thinking-2025-05-14".to_string()
359}
360
361#[inline]
362fn default_interleaved_thinking_budget_tokens() -> u32 {
363 31999
364}
365
366#[inline]
367fn default_interleaved_thinking_type() -> String {
368 "enabled".to_string()
369}
370
371#[inline]
372fn default_effort() -> String {
373 "low".to_string()
374}
375
376#[cfg(test)]
377mod tests {
378 use super::{
379 OpenAIConfig, OpenAIHostedShellConfig, OpenAIHostedShellEnvironment, OpenAIHostedSkill,
380 OpenAIHostedSkillVersion, OpenAIServiceTier,
381 };
382
383 #[test]
384 fn openai_config_defaults_to_websocket_mode_disabled() {
385 let config = OpenAIConfig::default();
386 assert!(!config.websocket_mode);
387 assert_eq!(config.responses_store, None);
388 assert!(config.responses_include.is_empty());
389 assert_eq!(config.service_tier, None);
390 assert_eq!(config.hosted_shell, OpenAIHostedShellConfig::default());
391 }
392
393 #[test]
394 fn openai_config_parses_websocket_mode_opt_in() {
395 let parsed: OpenAIConfig =
396 toml::from_str("websocket_mode = true").expect("config should parse");
397 assert!(parsed.websocket_mode);
398 assert_eq!(parsed.responses_store, None);
399 assert!(parsed.responses_include.is_empty());
400 assert_eq!(parsed.service_tier, None);
401 assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
402 }
403
404 #[test]
405 fn openai_config_parses_responses_options() {
406 let parsed: OpenAIConfig = toml::from_str(
407 r#"
408responses_store = false
409responses_include = ["reasoning.encrypted_content", "output_text.annotations"]
410"#,
411 )
412 .expect("config should parse");
413 assert_eq!(parsed.responses_store, Some(false));
414 assert_eq!(
415 parsed.responses_include,
416 vec![
417 "reasoning.encrypted_content".to_string(),
418 "output_text.annotations".to_string()
419 ]
420 );
421 assert_eq!(parsed.service_tier, None);
422 assert_eq!(parsed.hosted_shell, OpenAIHostedShellConfig::default());
423 }
424
425 #[test]
426 fn openai_config_parses_service_tier() {
427 let parsed: OpenAIConfig =
428 toml::from_str(r#"service_tier = "priority""#).expect("config should parse");
429 assert_eq!(parsed.service_tier, Some(OpenAIServiceTier::Priority));
430 }
431
432 #[test]
433 fn openai_config_parses_hosted_shell() {
434 let parsed: OpenAIConfig = toml::from_str(
435 r#"
436[hosted_shell]
437enabled = true
438environment = "container_auto"
439file_ids = ["file_123"]
440
441[[hosted_shell.skills]]
442type = "skill_reference"
443skill_id = "skill_123"
444"#,
445 )
446 .expect("config should parse");
447
448 assert!(parsed.hosted_shell.enabled);
449 assert_eq!(
450 parsed.hosted_shell.environment,
451 OpenAIHostedShellEnvironment::ContainerAuto
452 );
453 assert_eq!(parsed.hosted_shell.file_ids, vec!["file_123".to_string()]);
454 assert_eq!(
455 parsed.hosted_shell.skills,
456 vec![OpenAIHostedSkill::SkillReference {
457 skill_id: "skill_123".to_string(),
458 version: OpenAIHostedSkillVersion::default(),
459 }]
460 );
461 }
462
463 #[test]
464 fn openai_config_parses_hosted_shell_pinned_version_and_inline_bundle() {
465 let parsed: OpenAIConfig = toml::from_str(
466 r#"
467[hosted_shell]
468enabled = true
469
470[[hosted_shell.skills]]
471type = "skill_reference"
472skill_id = "skill_123"
473version = 2
474
475[[hosted_shell.skills]]
476type = "inline"
477bundle_b64 = "UEsFBgAAAAAAAA=="
478sha256 = "deadbeef"
479"#,
480 )
481 .expect("config should parse");
482
483 assert_eq!(
484 parsed.hosted_shell.skills,
485 vec![
486 OpenAIHostedSkill::SkillReference {
487 skill_id: "skill_123".to_string(),
488 version: OpenAIHostedSkillVersion::Number(2),
489 },
490 OpenAIHostedSkill::Inline {
491 bundle_b64: "UEsFBgAAAAAAAA==".to_string(),
492 sha256: Some("deadbeef".to_string()),
493 },
494 ]
495 );
496 }
497
498 #[test]
499 fn hosted_shell_container_reference_requires_non_empty_container_id() {
500 let config = OpenAIHostedShellConfig {
501 enabled: true,
502 environment: OpenAIHostedShellEnvironment::ContainerReference,
503 container_id: Some(" ".to_string()),
504 file_ids: Vec::new(),
505 skills: Vec::new(),
506 };
507
508 assert!(!config.has_valid_reference_target());
509 assert!(config.container_id_ref().is_none());
510 }
511
512 #[test]
513 fn hosted_shell_reports_invalid_skill_reference_mounts() {
514 let config = OpenAIHostedShellConfig {
515 enabled: true,
516 environment: OpenAIHostedShellEnvironment::ContainerAuto,
517 container_id: None,
518 file_ids: Vec::new(),
519 skills: vec![OpenAIHostedSkill::SkillReference {
520 skill_id: " ".to_string(),
521 version: OpenAIHostedSkillVersion::default(),
522 }],
523 };
524
525 let message = config
526 .first_invalid_skill_message()
527 .expect("invalid mount should be reported");
528
529 assert!(message.contains("provider.openai.hosted_shell.skills[0].skill_id"));
530 assert!(!config.has_valid_skill_mounts());
531 assert!(!config.is_valid_for_runtime());
532 }
533
534 #[test]
535 fn hosted_shell_ignores_skill_validation_for_container_reference() {
536 let config = OpenAIHostedShellConfig {
537 enabled: true,
538 environment: OpenAIHostedShellEnvironment::ContainerReference,
539 container_id: Some("cntr_123".to_string()),
540 file_ids: Vec::new(),
541 skills: vec![OpenAIHostedSkill::Inline {
542 bundle_b64: " ".to_string(),
543 sha256: None,
544 }],
545 };
546
547 assert!(config.first_invalid_skill_message().is_none());
548 assert!(config.has_valid_skill_mounts());
549 assert!(config.is_valid_for_runtime());
550 }
551}