1use serde::Deserialize;
2use serde::Serialize;
3use std::borrow::Cow;
4use std::collections::BTreeMap;
5
6use crate::ContentBlock;
7use crate::Message;
8use crate::ProviderError;
9use crate::model::ToolChoice;
10use crate::tool::ToolSpec;
11
12#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
14pub struct ReasoningOptions {
15 #[serde(default, skip_serializing_if = "Option::is_none")]
16 pub effort: Option<ReasoningEffort>,
17 #[serde(default, skip_serializing_if = "Option::is_none")]
18 pub summary: Option<ReasoningSummary>,
19}
20
21#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
23#[serde(rename_all = "snake_case")]
24pub enum ReasoningEffort {
25 Low,
26 Medium,
27 High,
28}
29
30#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
32#[serde(rename_all = "lowercase")]
33pub enum ReasoningSummary {
34 Auto,
35 Concise,
36 Detailed,
37}
38
39#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
41#[serde(rename_all = "snake_case")]
42pub enum ToolSearchMode {
43 #[default]
44 Disabled,
45 Hosted,
46}
47
48#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
50#[serde(rename_all = "lowercase")]
51pub enum ResponsesVerbosity {
52 Low,
53 #[default]
54 Medium,
55 High,
56}
57
58#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
60#[serde(rename_all = "lowercase")]
61pub enum ResponsesRequestCompression {
62 #[default]
63 None,
64 Zstd,
65}
66
67#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
69#[serde(rename_all = "snake_case")]
70pub enum ResponsesTextFormatType {
71 #[default]
72 JsonSchema,
73}
74
75#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
77pub struct ResponsesTextFormat {
78 #[serde(default)]
79 pub r#type: ResponsesTextFormatType,
80 #[serde(default)]
81 pub strict: bool,
82 pub schema: serde_json::Value,
83 pub name: String,
84}
85
86#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
88pub struct ResponsesTextControls {
89 #[serde(default, skip_serializing_if = "Option::is_none")]
90 pub verbosity: Option<ResponsesVerbosity>,
91 #[serde(default, skip_serializing_if = "Option::is_none")]
92 pub format: Option<ResponsesTextFormat>,
93}
94
95#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
97pub struct ResponsesRequestOptions {
98 #[serde(default)]
99 pub parallel_tool_calls: Option<bool>,
100 #[serde(default)]
101 pub store: Option<bool>,
102 #[serde(default)]
103 pub stream: Option<bool>,
104 #[serde(default, skip_serializing_if = "Vec::is_empty")]
105 pub include: Vec<String>,
106 #[serde(default, skip_serializing_if = "Option::is_none")]
107 pub service_tier: Option<String>,
108 #[serde(default, skip_serializing_if = "Option::is_none")]
109 pub prompt_cache_key: Option<String>,
110 #[serde(default, skip_serializing_if = "Option::is_none")]
111 pub text: Option<ResponsesTextControls>,
112 #[serde(default)]
113 pub compression: ResponsesRequestCompression,
114}
115
116impl Default for ResponsesRequestOptions {
117 fn default() -> Self {
118 Self {
119 parallel_tool_calls: None,
120 store: None,
121 stream: Some(true),
122 include: Vec::new(),
123 service_tier: None,
124 prompt_cache_key: None,
125 text: None,
126 compression: ResponsesRequestCompression::None,
127 }
128 }
129}
130
131#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
133pub struct AnthropicRequestOptions {
134 #[serde(default)]
135 pub disable_parallel_tool_use: Option<bool>,
136}
137
138#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
140pub struct GeminiRequestOptions {
141 #[serde(default)]
142 pub thoughts: Option<bool>,
143}
144
145#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
147pub struct SessionRequestOptions {
148 #[serde(default, skip_serializing_if = "Option::is_none")]
149 pub sticky_turn_state: Option<String>,
150 #[serde(default, skip_serializing_if = "Option::is_none")]
151 pub turn_metadata: Option<String>,
152 #[serde(default, skip_serializing_if = "Option::is_none")]
153 pub subagent: Option<String>,
154 #[serde(default, skip_serializing_if = "Option::is_none")]
155 pub prefer_connection_reuse: Option<bool>,
156 #[serde(default, skip_serializing_if = "Option::is_none")]
157 pub session_affinity: Option<String>,
158 #[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
159 pub extra_headers: BTreeMap<String, String>,
160}
161
162#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Default)]
164pub struct ProviderRequestOptions {
165 #[serde(default)]
166 pub tool_search_mode: ToolSearchMode,
167 #[serde(default, skip_serializing_if = "Option::is_none")]
168 pub reasoning: Option<ReasoningOptions>,
169 #[serde(default)]
170 pub responses: ResponsesRequestOptions,
171 #[serde(default)]
172 pub anthropic: AnthropicRequestOptions,
173 #[serde(default)]
174 pub gemini: GeminiRequestOptions,
175 #[serde(default)]
176 pub session: SessionRequestOptions,
177}
178
179#[derive(Debug, Clone)]
181pub struct Request<'a> {
182 pub model: Cow<'a, str>,
183 pub system: Option<Cow<'a, str>>,
184 pub messages: Cow<'a, [Message]>,
185 pub tools: Cow<'a, [ToolSpec]>,
186 pub tool_choice: Option<ToolChoice>,
187 pub temperature: Option<f32>,
188 pub max_output_tokens: Option<u32>,
189 pub metadata: Cow<'a, BTreeMap<String, String>>,
190 pub provider_request_options: ProviderRequestOptions,
191}
192
193impl Request<'_> {
194 pub fn into_owned(self) -> Request<'static> {
195 Request {
196 model: Cow::Owned(self.model.into_owned()),
197 system: self.system.map(|system| Cow::Owned(system.into_owned())),
198 messages: Cow::Owned(self.messages.into_owned()),
199 tools: Cow::Owned(self.tools.into_owned()),
200 tool_choice: self.tool_choice,
201 temperature: self.temperature,
202 max_output_tokens: self.max_output_tokens,
203 metadata: Cow::Owned(self.metadata.into_owned()),
204 provider_request_options: self.provider_request_options,
205 }
206 }
207}
208
209#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
211#[serde(tag = "type", rename_all = "snake_case")]
212pub enum CompactionInputItem {
213 UserTurn {
214 content: String,
215 },
216 AssistantTurn {
217 content: String,
218 },
219 ToolExchange {
220 #[serde(default, skip_serializing_if = "Option::is_none")]
221 request: Option<String>,
222 result: String,
223 is_error: bool,
224 },
225 CanonicalContext {
226 content: String,
227 },
228 MemoryRecall {
229 content: String,
230 },
231 DelegationResult {
232 agent_id: String,
233 agent_name: String,
234 #[serde(default, skip_serializing_if = "Option::is_none")]
235 role: Option<String>,
236 status: String,
237 content: String,
238 },
239 CompactionSummary {
240 content: String,
241 },
242}
243
244#[derive(Debug, Clone)]
246pub struct CompactionRequest<'a> {
247 pub model: Cow<'a, str>,
248 pub instructions: Cow<'a, str>,
249 pub input: Cow<'a, [CompactionInputItem]>,
250 pub metadata: Cow<'a, BTreeMap<String, String>>,
251 pub provider_request_options: ProviderRequestOptions,
252}
253
254impl CompactionRequest<'_> {
255 pub fn into_model_request(self) -> Result<Request<'static>, ProviderError> {
257 let input_json =
258 serde_json::to_string(self.input.as_ref()).map_err(ProviderError::Serialize)?;
259
260 Ok(Request {
261 model: Cow::Owned(self.model.into_owned()),
262 system: Some(Cow::Owned(self.instructions.into_owned())),
263 messages: Cow::Owned(vec![Message::user(ContentBlock::text(format!(
264 "Compaction input JSON:\n{input_json}"
265 )))]),
266 tools: Cow::Owned(Vec::new()),
267 tool_choice: None,
268 temperature: None,
269 max_output_tokens: None,
270 metadata: Cow::Owned(self.metadata.into_owned()),
271 provider_request_options: self.provider_request_options,
272 })
273 }
274}
275
276#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
278pub struct RawMemory {
279 pub id: String,
280 pub metadata: RawMemoryMetadata,
281 pub items: Vec<serde_json::Value>,
282}
283
284#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
285pub struct RawMemoryMetadata {
286 pub source_path: String,
287}
288
289#[derive(Debug, Clone)]
291pub struct MemorySummarizeRequest<'a> {
292 pub model: Cow<'a, str>,
293 pub raw_memories: Cow<'a, [RawMemory]>,
294 pub reasoning: Option<ReasoningOptions>,
295 pub metadata: Cow<'a, BTreeMap<String, String>>,
296 pub provider_request_options: ProviderRequestOptions,
297}
298
299impl MemorySummarizeRequest<'_> {
300 pub fn into_model_request(self) -> Result<Request<'static>, ProviderError> {
302 let raw_memories_json =
303 serde_json::to_string(self.raw_memories.as_ref()).map_err(ProviderError::Serialize)?;
304
305 Ok(Request {
306 model: Cow::Owned(self.model.into_owned()),
307 system: Some(Cow::Borrowed(MEMORY_SUMMARIZE_SYSTEM_PROMPT)),
308 messages: Cow::Owned(vec![Message::user(ContentBlock::text(format!(
309 "Memory summarize input JSON:\n{raw_memories_json}"
310 )))]),
311 tools: Cow::Owned(Vec::new()),
312 tool_choice: None,
313 temperature: None,
314 max_output_tokens: None,
315 metadata: Cow::Owned(self.metadata.into_owned()),
316 provider_request_options: ProviderRequestOptions {
317 reasoning: self.reasoning,
318 ..self.provider_request_options
319 },
320 })
321 }
322}
323
324const MEMORY_SUMMARIZE_SYSTEM_PROMPT: &str = concat!(
325 "You summarize trace memories for Codex.\n",
326 "Return valid JSON only.\n",
327 "The output must be a JSON array with one object per input trace, in the same order.\n",
328 "Each object must have exactly these string fields: `raw_memory` and `memory_summary`.\n",
329 "`raw_memory` should be a concrete, detailed summary of the trace contents.\n",
330 "`memory_summary` should be a shorter durable takeaway focused on reusable context.\n",
331 "Use empty strings when information is unavailable.\n",
332 "Do not include markdown fences or extra commentary.\n",
333);
334
335#[cfg(test)]
336mod tests {
337 use super::*;
338 use serde_json::Value;
339
340 #[test]
341 fn compaction_request_into_model_request_serializes_input_as_prompt_text() {
342 let request = CompactionRequest {
343 model: Cow::Borrowed("gpt-5"),
344 instructions: Cow::Borrowed("Summarize the transcript."),
345 input: Cow::Owned(vec![
346 CompactionInputItem::UserTurn {
347 content: "hello".to_string(),
348 },
349 CompactionInputItem::AssistantTurn {
350 content: "world".to_string(),
351 },
352 ]),
353 metadata: Cow::Owned(BTreeMap::from([("scope".to_string(), "test".to_string())])),
354 provider_request_options: ProviderRequestOptions {
355 session: SessionRequestOptions {
356 sticky_turn_state: Some("sticky".to_string()),
357 turn_metadata: None,
358 subagent: Some("compact".to_string()),
359 prefer_connection_reuse: Some(true),
360 session_affinity: None,
361 extra_headers: BTreeMap::new(),
362 },
363 ..ProviderRequestOptions::default()
364 },
365 };
366
367 let model_request = request
368 .into_model_request()
369 .expect("compaction request should convert");
370
371 assert_eq!(model_request.model.as_ref(), "gpt-5");
372 assert_eq!(
373 model_request.system.as_deref(),
374 Some("Summarize the transcript.")
375 );
376 assert_eq!(model_request.metadata["scope"], "test");
377 assert_eq!(
378 model_request
379 .provider_request_options
380 .session
381 .sticky_turn_state
382 .as_deref(),
383 Some("sticky")
384 );
385 assert_eq!(
386 model_request
387 .provider_request_options
388 .session
389 .subagent
390 .as_deref(),
391 Some("compact")
392 );
393 assert_eq!(model_request.messages.len(), 1);
394
395 let prompt = model_request.messages[0].text();
396 assert!(prompt.starts_with("Compaction input JSON:\n"));
397 let payload = prompt
398 .strip_prefix("Compaction input JSON:\n")
399 .expect("prompt should contain the compaction prefix");
400 let input: Vec<Value> = serde_json::from_str(payload).expect("prompt should be json");
401 assert_eq!(input[0]["type"], "user_turn");
402 assert_eq!(input[0]["content"], "hello");
403 assert_eq!(input[1]["type"], "assistant_turn");
404 assert_eq!(input[1]["content"], "world");
405 }
406
407 #[test]
408 fn memory_summarize_request_into_model_request_serializes_input_as_prompt_text() {
409 let request = MemorySummarizeRequest {
410 model: Cow::Borrowed("gpt-5"),
411 raw_memories: Cow::Owned(vec![RawMemory {
412 id: "memory-1".to_string(),
413 metadata: RawMemoryMetadata {
414 source_path: "/tmp/trace.jsonl".to_string(),
415 },
416 items: vec![serde_json::json!({"type":"message","role":"user"})],
417 }]),
418 reasoning: Some(ReasoningOptions {
419 effort: Some(ReasoningEffort::Medium),
420 summary: None,
421 }),
422 metadata: Cow::Owned(BTreeMap::from([("scope".to_string(), "test".to_string())])),
423 provider_request_options: ProviderRequestOptions {
424 session: SessionRequestOptions {
425 sticky_turn_state: None,
426 turn_metadata: Some("{\"turn_id\":\"t1\"}".to_string()),
427 subagent: None,
428 prefer_connection_reuse: Some(true),
429 session_affinity: Some("thread-1".to_string()),
430 extra_headers: BTreeMap::new(),
431 },
432 ..ProviderRequestOptions::default()
433 },
434 };
435
436 let model_request = request
437 .into_model_request()
438 .expect("memory summarize request should convert");
439
440 assert_eq!(model_request.model.as_ref(), "gpt-5");
441 assert_eq!(
442 model_request.system.as_deref(),
443 Some(MEMORY_SUMMARIZE_SYSTEM_PROMPT)
444 );
445 assert_eq!(model_request.metadata["scope"], "test");
446 assert_eq!(
447 model_request
448 .provider_request_options
449 .session
450 .turn_metadata
451 .as_deref(),
452 Some("{\"turn_id\":\"t1\"}")
453 );
454 assert_eq!(
455 model_request
456 .provider_request_options
457 .reasoning
458 .as_ref()
459 .expect("reasoning options")
460 .effort,
461 Some(ReasoningEffort::Medium)
462 );
463 assert_eq!(model_request.messages.len(), 1);
464
465 let prompt = model_request.messages[0].text();
466 assert!(prompt.starts_with("Memory summarize input JSON:\n"));
467 let payload = prompt
468 .strip_prefix("Memory summarize input JSON:\n")
469 .expect("prompt should contain the memory summarize prefix");
470 let input: Vec<RawMemory> = serde_json::from_str(payload).expect("prompt should be json");
471 assert_eq!(input[0].id, "memory-1");
472 assert_eq!(input[0].metadata.source_path, "/tmp/trace.jsonl");
473 assert_eq!(input[0].items[0]["role"], "user");
474 }
475}