vtcode_config/core/
provider.rs1use serde::{Deserialize, Serialize};
2
3#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
5#[derive(Debug, Clone, Deserialize, Serialize, Default)]
6pub struct OpenAIConfig {
7 #[serde(default)]
10 pub websocket_mode: bool,
11
12 #[serde(default, skip_serializing_if = "Option::is_none")]
15 pub responses_store: Option<bool>,
16
17 #[serde(default, skip_serializing_if = "Vec::is_empty")]
20 pub responses_include: Vec<String>,
21}
22
23#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
25#[derive(Debug, Clone, Deserialize, Serialize)]
26pub struct AnthropicConfig {
27 #[deprecated(
31 since = "0.75.0",
32 note = "Model validation removed. API validates model names directly."
33 )]
34 #[serde(default)]
35 pub skip_model_validation: bool,
36
37 #[serde(default = "default_extended_thinking_enabled")]
46 pub extended_thinking_enabled: bool,
47
48 #[serde(default = "default_interleaved_thinking_beta")]
50 pub interleaved_thinking_beta: String,
51
52 #[serde(default = "default_interleaved_thinking_budget_tokens")]
57 pub interleaved_thinking_budget_tokens: u32,
58
59 #[serde(default = "default_interleaved_thinking_type")]
61 pub interleaved_thinking_type_enabled: String,
62
63 #[serde(default)]
65 pub tool_search: ToolSearchConfig,
66
67 #[serde(default = "default_effort")]
72 pub effort: String,
73
74 #[serde(default = "default_count_tokens_enabled")]
78 pub count_tokens_enabled: bool,
79}
80
81#[allow(deprecated)]
82impl Default for AnthropicConfig {
83 fn default() -> Self {
84 Self {
85 skip_model_validation: false,
86 extended_thinking_enabled: default_extended_thinking_enabled(),
87 interleaved_thinking_beta: default_interleaved_thinking_beta(),
88 interleaved_thinking_budget_tokens: default_interleaved_thinking_budget_tokens(),
89 interleaved_thinking_type_enabled: default_interleaved_thinking_type(),
90 tool_search: ToolSearchConfig::default(),
91 effort: default_effort(),
92 count_tokens_enabled: default_count_tokens_enabled(),
93 }
94 }
95}
96
97#[inline]
98fn default_count_tokens_enabled() -> bool {
99 false
100}
101
102#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
105#[derive(Debug, Clone, Deserialize, Serialize)]
106pub struct ToolSearchConfig {
107 #[serde(default)]
109 pub enabled: bool,
110
111 #[serde(default = "default_tool_search_algorithm")]
113 pub algorithm: String,
114
115 #[serde(default = "default_defer_by_default")]
117 pub defer_by_default: bool,
118
119 #[serde(default = "default_max_results")]
121 pub max_results: u32,
122
123 #[serde(default)]
125 pub always_available_tools: Vec<String>,
126}
127
128impl Default for ToolSearchConfig {
129 fn default() -> Self {
130 Self {
131 enabled: false,
132 algorithm: default_tool_search_algorithm(),
133 defer_by_default: default_defer_by_default(),
134 max_results: default_max_results(),
135 always_available_tools: vec![],
136 }
137 }
138}
139
140#[inline]
141fn default_tool_search_algorithm() -> String {
142 "regex".to_string()
143}
144
145#[inline]
146fn default_defer_by_default() -> bool {
147 true
148}
149
150#[inline]
151fn default_max_results() -> u32 {
152 5
153}
154
155#[inline]
156fn default_extended_thinking_enabled() -> bool {
157 true
158}
159
160#[inline]
161fn default_interleaved_thinking_beta() -> String {
162 "interleaved-thinking-2025-05-14".to_string()
163}
164
165#[inline]
166fn default_interleaved_thinking_budget_tokens() -> u32 {
167 31999
168}
169
170#[inline]
171fn default_interleaved_thinking_type() -> String {
172 "enabled".to_string()
173}
174
175#[inline]
176fn default_effort() -> String {
177 "low".to_string()
178}
179
180#[cfg(test)]
181mod tests {
182 use super::OpenAIConfig;
183
184 #[test]
185 fn openai_config_defaults_to_websocket_mode_disabled() {
186 let config = OpenAIConfig::default();
187 assert!(!config.websocket_mode);
188 assert_eq!(config.responses_store, None);
189 assert!(config.responses_include.is_empty());
190 }
191
192 #[test]
193 fn openai_config_parses_websocket_mode_opt_in() {
194 let parsed: OpenAIConfig =
195 toml::from_str("websocket_mode = true").expect("config should parse");
196 assert!(parsed.websocket_mode);
197 assert_eq!(parsed.responses_store, None);
198 assert!(parsed.responses_include.is_empty());
199 }
200
201 #[test]
202 fn openai_config_parses_responses_options() {
203 let parsed: OpenAIConfig = toml::from_str(
204 r#"
205responses_store = false
206responses_include = ["reasoning.encrypted_content", "output_text.annotations"]
207"#,
208 )
209 .expect("config should parse");
210 assert_eq!(parsed.responses_store, Some(false));
211 assert_eq!(
212 parsed.responses_include,
213 vec![
214 "reasoning.encrypted_content".to_string(),
215 "output_text.annotations".to_string()
216 ]
217 );
218 }
219}