distri_types/
execution.rs

1use schemars::JsonSchema;
2use serde::{Deserialize, Serialize};
3
4use crate::{Part, PlanStep, TaskStatus, ToolResponse, core::FileType};
5
6/// Execution strategy types
7#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
8pub enum ExecutionType {
9    Sequential,
10    Interleaved,
11    Retriable,
12    React,
13    Code,
14}
15
16/// Execution result with detailed information
17#[derive(Debug, Clone, JsonSchema, Serialize, Deserialize)]
18#[serde(rename_all = "snake_case")]
19pub struct ExecutionResult {
20    pub step_id: String,
21    pub parts: Vec<Part>,
22    pub status: ExecutionStatus,
23    pub reason: Option<String>, // for rejection or failure
24    pub timestamp: i64,
25}
26
27impl ExecutionResult {
28    pub fn is_success(&self) -> bool {
29        self.status == ExecutionStatus::Success || self.status == ExecutionStatus::InputRequired
30    }
31    pub fn is_failed(&self) -> bool {
32        self.status == ExecutionStatus::Failed
33    }
34    pub fn is_rejected(&self) -> bool {
35        self.status == ExecutionStatus::Rejected
36    }
37    pub fn is_input_required(&self) -> bool {
38        self.status == ExecutionStatus::InputRequired
39    }
40
41    pub fn as_observation(&self) -> String {
42        let mut txt = String::new();
43        if let Some(reason) = &self.reason {
44            txt.push_str(&reason);
45        }
46        let parts_txt = self
47            .parts
48            .iter()
49            .filter_map(|p| match p {
50                Part::Text(text) => Some(text.clone()),
51                Part::ToolCall(tool_call) => Some(format!(
52                    "Action: {} with {}",
53                    tool_call.tool_name,
54                    serde_json::to_string(&tool_call.input).unwrap_or_default()
55                )),
56                Part::Data(data) => serde_json::to_string(&data).ok(),
57                Part::ToolResult(tool_result) => serde_json::to_string(&tool_result.result()).ok(),
58                Part::Image(image) => match image {
59                    FileType::Url { url, .. } => Some(format!("[Image: {}]", url)),
60                    FileType::Bytes {
61                        name, mime_type, ..
62                    } => Some(format!(
63                        "[Image: {} ({})]",
64                        name.as_deref().unwrap_or("unnamed"),
65                        mime_type
66                    )),
67                },
68                Part::Artifact(artifact) => Some(format!(
69                    "[Artifact ID:{}\n You can use artifact tools to read the full content\n{}]",
70                    artifact.file_id,
71                    if let Some(stats) = &artifact.stats {
72                        format!(" ({})", stats.context_info())
73                    } else {
74                        String::new()
75                    }
76                )),
77            })
78            .collect::<Vec<_>>()
79            .join("\n");
80        if !parts_txt.is_empty() {
81            txt.push_str("\n");
82            txt.push_str(&parts_txt);
83        }
84        txt
85    }
86}
87
88#[derive(Debug, Clone, JsonSchema, Serialize, Deserialize, PartialEq, Eq)]
89#[serde(rename_all = "snake_case")]
90pub enum ExecutionStatus {
91    Success,
92    Failed,
93    Rejected,
94    InputRequired,
95}
96
97impl Into<TaskStatus> for ExecutionStatus {
98    fn into(self) -> TaskStatus {
99        match self {
100            ExecutionStatus::Success => TaskStatus::Completed,
101            ExecutionStatus::Failed => TaskStatus::Failed,
102            ExecutionStatus::Rejected => TaskStatus::Canceled,
103            ExecutionStatus::InputRequired => TaskStatus::InputRequired,
104        }
105    }
106}
107
108pub enum ToolResultWithSkip {
109    ToolResult(ToolResponse),
110    // Skip tool call if it is external
111    Skip {
112        tool_call_id: String,
113        reason: String,
114    },
115}
116
117pub fn from_tool_results(tool_results: Vec<ToolResultWithSkip>) -> Vec<Part> {
118    tool_results
119        .iter()
120        .filter_map(|result| match result {
121            ToolResultWithSkip::ToolResult(tool_result) => {
122                // Simply extract parts from the tool response
123                Some(tool_result.parts.clone())
124            }
125            _ => None,
126        })
127        .flatten()
128        .collect()
129}
130
131#[derive(Debug, Clone, Serialize, Deserialize, Default)]
132pub struct ContextUsage {
133    pub tokens: u32,
134    pub input_tokens: u32,
135    pub output_tokens: u32,
136    pub current_iteration: usize,
137    pub context_size: ContextSize,
138}
139
140#[derive(Debug, Clone, Serialize, Deserialize, Default)]
141pub struct ContextSize {
142    pub message_count: usize,
143    pub message_chars: usize,
144    pub message_estimated_tokens: usize,
145    pub execution_history_count: usize,
146    pub execution_history_chars: usize,
147    pub execution_history_estimated_tokens: usize,
148    pub scratchpad_chars: usize,
149    pub scratchpad_estimated_tokens: usize,
150    pub total_chars: usize,
151    pub total_estimated_tokens: usize,
152    /// Per-agent context size breakdown
153    pub agent_breakdown: std::collections::HashMap<String, AgentContextSize>,
154}
155
156#[derive(Debug, Clone, Serialize, Deserialize, Default)]
157pub struct AgentContextSize {
158    pub agent_id: String,
159    pub task_count: usize,
160    pub execution_history_count: usize,
161    pub execution_history_chars: usize,
162    pub execution_history_estimated_tokens: usize,
163    pub scratchpad_chars: usize,
164    pub scratchpad_estimated_tokens: usize,
165}
166
167/// Enriched execution history entry that includes context metadata
168#[derive(Debug, Clone, Serialize, Deserialize)]
169pub struct ExecutionHistoryEntry {
170    pub thread_id: String, // Conversation context
171    pub task_id: String,   // Individual user task/request
172    pub run_id: String,    // Specific execution strand
173    pub execution_result: ExecutionResult,
174    pub stored_at: i64, // When this was stored
175}
176
177/// Entry for scratchpad formatting
178#[derive(Debug, Clone, Serialize, Deserialize)]
179pub struct ScratchpadEntry {
180    pub timestamp: i64,
181    #[serde(flatten)]
182    pub entry_type: ScratchpadEntryType,
183    pub task_id: String,
184    #[serde(default)]
185    pub parent_task_id: Option<String>,
186    pub entry_kind: Option<String>,
187}
188
189/// Type of scratchpad entry - only for Thought/Action/Observation tracking
190#[derive(Debug, Clone, Serialize, Deserialize)]
191#[serde(rename_all = "snake_case", tag = "type", content = "data")]
192pub enum ScratchpadEntryType {
193    #[serde(rename = "task")]
194    Task(Vec<Part>),
195    #[serde(rename = "plan")]
196    PlanStep(PlanStep),
197    #[serde(rename = "execution")]
198    Execution(ExecutionHistoryEntry),
199}
200
201#[cfg(test)]
202mod tests {
203    use super::*;
204    use serde_json::json;
205
206    #[test]
207    fn test_scratchpad_large_observation_issue() {
208        println!("=== TESTING LARGE DATA OBSERVATION IN SCRATCHPAD ===");
209
210        // Create a very large tool response observation (similar to search results)
211        let large_data = json!({
212            "results": (0..100).map(|i| json!({
213                "id": i,
214                "name": format!("Minister {}", i),
215                "email": format!("minister{}@gov.sg", i),
216                "portfolio": format!("Ministry of Complex Affairs {}", i),
217                "biography": format!("Very long biography text that goes on and on for minister {} with lots of details about their career, education, achievements, and political history. This is intentionally verbose to demonstrate the issue with large content in scratchpad observations.", i),
218            })).collect::<Vec<_>>()
219        });
220
221        println!(
222            "Large data size: {} bytes",
223            serde_json::to_string(&large_data).unwrap().len()
224        );
225
226        // Test 1: Direct Part::Data (BROKEN - causes scratchpad bloat)
227        let execution_result_data = ExecutionResult {
228            step_id: "test-step-1".to_string(),
229            parts: vec![Part::Data(large_data.clone())],
230            status: ExecutionStatus::Success,
231            reason: None,
232            timestamp: 1234567890,
233        };
234
235        let observation_data = execution_result_data.as_observation();
236        println!(
237            "🚨 BROKEN: Direct Part::Data observation size: {} chars",
238            observation_data.len()
239        );
240        println!(
241            "Preview (first 200 chars): {}",
242            &observation_data.chars().take(200).collect::<String>()
243        );
244
245        // Test 2: File metadata (GOOD - concise)
246        let file_metadata = crate::filesystem::FileMetadata {
247            file_id: "large-search-results.json".to_string(),
248            relative_path: "thread123/task456/large-search-results.json".to_string(),
249            size: serde_json::to_string(&large_data).unwrap().len() as u64,
250            content_type: Some("application/json".to_string()),
251            original_filename: Some("search_results.json".to_string()),
252            created_at: chrono::Utc::now(),
253            updated_at: chrono::Utc::now(),
254            checksum: Some("abc123".to_string()),
255            stats: None,
256            preview: Some("JSON search results with 100 minister entries".to_string()),
257        };
258
259        let execution_result_file = ExecutionResult {
260            step_id: "test-step-2".to_string(),
261            parts: vec![Part::Artifact(file_metadata)],
262            status: ExecutionStatus::Success,
263            reason: None,
264            timestamp: 1234567890,
265        };
266
267        let observation_file = execution_result_file.as_observation();
268        println!(
269            "āœ… GOOD: File metadata observation size: {} chars",
270            observation_file.len()
271        );
272        println!("Content: {}", observation_file);
273
274        // Demonstrate the problem
275        println!("\n=== SCRATCHPAD IMPACT ===");
276        println!(
277            "āŒ Direct approach adds {} chars to scratchpad (CAUSES LOOPS!)",
278            observation_data.len()
279        );
280        println!(
281            "āœ… File metadata adds only {} chars to scratchpad",
282            observation_file.len()
283        );
284        println!(
285            "šŸ’” Size reduction: {:.1}%",
286            (1.0 - (observation_file.len() as f64 / observation_data.len() as f64)) * 100.0
287        );
288
289        // This test shows the fix is working - observations are now truncated
290        assert!(observation_data.len() < 1000, "Large data is now truncated"); // Fixed expectation
291        assert!(
292            observation_file.len() < 300,
293            "File metadata stays reasonably concise"
294        ); // Updated for detailed format
295
296        println!("\n🚨 CONCLUSION: as_observation() needs to truncate large Part::Data!");
297    }
298
299    #[test]
300    fn test_observation_truncation_fix() {
301        println!("=== TESTING OBSERVATION TRUNCATION FIX ===");
302
303        // Test large data truncation
304        let large_data = json!({
305            "big_array": (0..200).map(|i| format!("item_{}", i)).collect::<Vec<_>>()
306        });
307
308        let execution_result = ExecutionResult {
309            step_id: "test-truncation".to_string(),
310            parts: vec![Part::Data(large_data)],
311            status: ExecutionStatus::Success,
312            reason: None,
313            timestamp: 1234567890,
314        };
315
316        let observation = execution_result.as_observation();
317        println!("Truncated observation size: {} chars", observation.len());
318        println!("Content: {}", observation);
319
320        // Should be truncated and include total char count
321        assert!(
322            observation.len() < 600,
323            "Observation should be truncated to <600 chars"
324        );
325        assert!(
326            observation.contains("truncated"),
327            "Should indicate truncation"
328        );
329        assert!(
330            observation.contains("total chars"),
331            "Should show total char count"
332        );
333
334        // Test long text truncation
335        let long_text = "This is a very long text. ".repeat(100);
336        let text_result = ExecutionResult {
337            step_id: "test-text-truncation".to_string(),
338            parts: vec![Part::Text(long_text.clone())],
339            status: ExecutionStatus::Success,
340            reason: None,
341            timestamp: 1234567890,
342        };
343
344        let text_observation = text_result.as_observation();
345        println!("Text observation size: {} chars", text_observation.len());
346        assert!(
347            text_observation.len() < 1100,
348            "Text should be truncated to ~1000 chars"
349        );
350        if long_text.len() > 1000 {
351            assert!(
352                text_observation.contains("truncated"),
353                "Long text should be truncated"
354            );
355        }
356
357        println!("āœ… Observation truncation is working!");
358    }
359}