use crate::segment::{ContextPriority, ContextSegment};
use crate::token_counter::TokenCounter;
use chrono::{DateTime, Utc};
use enact_core::kernel::{ExecutionId, StepId};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::atomic::{AtomicU64, Ordering};
static CONDENSE_SEQUENCE: AtomicU64 = AtomicU64::new(3000);
fn next_sequence() -> u64 {
CONDENSE_SEQUENCE.fetch_add(1, Ordering::SeqCst)
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CondenserConfig {
pub target_tokens: usize,
pub max_tokens: usize,
pub include_steps: bool,
pub max_steps: usize,
pub include_tools: bool,
pub include_errors: bool,
pub include_timing: bool,
pub preserve_decisions: bool,
pub max_decisions: usize,
}
impl Default for CondenserConfig {
fn default() -> Self {
Self {
target_tokens: 1500,
max_tokens: 2000,
include_steps: true,
max_steps: 10,
include_tools: true,
include_errors: true,
include_timing: true,
preserve_decisions: true,
max_decisions: 5,
}
}
}
impl CondenserConfig {
pub fn minimal() -> Self {
Self {
target_tokens: 500,
max_tokens: 750,
include_steps: false,
max_steps: 3,
include_tools: false,
include_errors: true,
include_timing: false,
preserve_decisions: true,
max_decisions: 2,
}
}
pub fn detailed() -> Self {
Self {
target_tokens: 3000,
max_tokens: 4000,
include_steps: true,
max_steps: 20,
include_tools: true,
include_errors: true,
include_timing: true,
preserve_decisions: true,
max_decisions: 10,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StepSummary {
pub step_id: StepId,
pub step_type: String,
pub summary: String,
pub success: bool,
pub duration_ms: Option<u64>,
pub key_output: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DecisionSummary {
pub decision: String,
pub rationale: String,
pub confidence: f64,
pub step_id: StepId,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExecutionTrace {
pub execution_id: ExecutionId,
pub parent_execution_id: Option<ExecutionId>,
pub parent_step_id: Option<StepId>,
pub started_at: DateTime<Utc>,
pub ended_at: Option<DateTime<Utc>>,
pub status: ExecutionStatus,
pub steps: Vec<StepSummary>,
pub decisions: Vec<DecisionSummary>,
pub final_output: Option<String>,
pub error: Option<String>,
pub metadata: HashMap<String, String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ExecutionStatus {
Completed,
Failed,
Cancelled,
TimedOut,
Running,
}
impl ExecutionTrace {
pub fn new(execution_id: ExecutionId) -> Self {
Self {
execution_id,
parent_execution_id: None,
parent_step_id: None,
started_at: Utc::now(),
ended_at: None,
status: ExecutionStatus::Running,
steps: Vec::new(),
decisions: Vec::new(),
final_output: None,
error: None,
metadata: HashMap::new(),
}
}
pub fn with_parent(
execution_id: ExecutionId,
parent_execution_id: ExecutionId,
parent_step_id: StepId,
) -> Self {
Self {
execution_id,
parent_execution_id: Some(parent_execution_id),
parent_step_id: Some(parent_step_id),
started_at: Utc::now(),
ended_at: None,
status: ExecutionStatus::Running,
steps: Vec::new(),
decisions: Vec::new(),
final_output: None,
error: None,
metadata: HashMap::new(),
}
}
pub fn complete(mut self, output: impl Into<String>) -> Self {
self.ended_at = Some(Utc::now());
self.status = ExecutionStatus::Completed;
self.final_output = Some(output.into());
self
}
pub fn fail(mut self, error: impl Into<String>) -> Self {
self.ended_at = Some(Utc::now());
self.status = ExecutionStatus::Failed;
self.error = Some(error.into());
self
}
pub fn add_step(mut self, step: StepSummary) -> Self {
self.steps.push(step);
self
}
pub fn add_decision(mut self, decision: DecisionSummary) -> Self {
self.decisions.push(decision);
self
}
pub fn duration_ms(&self) -> Option<i64> {
self.ended_at
.map(|end| (end - self.started_at).num_milliseconds())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CondensedResult {
pub execution_id: ExecutionId,
pub summary: String,
pub outcomes: Vec<String>,
pub learnings: Vec<String>,
pub context_segment: ContextSegment,
pub token_count: usize,
pub original_tokens: usize,
pub compression_ratio: f64,
pub condensed_at: DateTime<Utc>,
}
pub struct ResultCondenser {
token_counter: TokenCounter,
config: CondenserConfig,
}
impl ResultCondenser {
pub fn new() -> Self {
Self {
token_counter: TokenCounter::default(),
config: CondenserConfig::default(),
}
}
pub fn with_config(config: CondenserConfig) -> Self {
Self {
token_counter: TokenCounter::default(),
config,
}
}
pub fn condense(&self, trace: &ExecutionTrace) -> CondensedResult {
let mut parts: Vec<String> = Vec::new();
let mut outcomes: Vec<String> = Vec::new();
let mut learnings: Vec<String> = Vec::new();
let header = self.build_header(trace);
parts.push(header);
let status_section = self.build_status_section(trace, &mut outcomes);
parts.push(status_section);
if self.config.include_steps && !trace.steps.is_empty() {
let steps_section = self.build_steps_section(&trace.steps);
parts.push(steps_section);
}
if self.config.preserve_decisions && !trace.decisions.is_empty() {
let decisions_section = self.build_decisions_section(&trace.decisions, &mut learnings);
parts.push(decisions_section);
}
if self.config.include_errors {
if let Some(error) = &trace.error {
parts.push(format!("Error: {}", self.truncate(error, 200)));
learnings.push(format!("Failure mode: {}", self.truncate(error, 100)));
}
}
let mut summary = parts.join("\n\n");
let mut token_count = self.token_counter.count(&summary);
if token_count > self.config.max_tokens {
let (truncated, new_count) = self
.token_counter
.truncate(&summary, self.config.target_tokens);
summary = truncated;
token_count = new_count;
}
let original_tokens = self.estimate_original_tokens(trace);
let compression_ratio = if original_tokens > 0 {
token_count as f64 / original_tokens as f64
} else {
1.0
};
let segment_content = format!(
"[Child Execution: {}]\n{}",
trace.execution_id.as_str(),
summary
);
let segment_tokens = self.token_counter.count(&segment_content);
let context_segment = ContextSegment::child_summary(
segment_content,
segment_tokens,
next_sequence(),
trace.parent_step_id.clone().unwrap_or_default(),
)
.with_priority(if trace.status == ExecutionStatus::Completed {
ContextPriority::Medium
} else {
ContextPriority::High
});
CondensedResult {
execution_id: trace.execution_id.clone(),
summary,
outcomes,
learnings,
context_segment,
token_count,
original_tokens,
compression_ratio,
condensed_at: Utc::now(),
}
}
fn build_header(&self, trace: &ExecutionTrace) -> String {
let mut header = format!("Execution: {}", trace.execution_id.as_str());
if self.config.include_timing {
if let Some(duration) = trace.duration_ms() {
header.push_str(&format!(" ({}ms)", duration));
}
}
if let Some(parent) = &trace.parent_step_id {
header.push_str(&format!("\nSpawned from: {}", parent.as_str()));
}
header
}
fn build_status_section(&self, trace: &ExecutionTrace, outcomes: &mut Vec<String>) -> String {
let status_str = match trace.status {
ExecutionStatus::Completed => "COMPLETED",
ExecutionStatus::Failed => "FAILED",
ExecutionStatus::Cancelled => "CANCELLED",
ExecutionStatus::TimedOut => "TIMED_OUT",
ExecutionStatus::Running => "RUNNING",
};
let mut section = format!("Status: {}", status_str);
if let Some(output) = &trace.final_output {
let truncated = self.truncate(output, 300);
section.push_str(&format!("\nResult: {}", truncated));
outcomes.push(format!("Output: {}", self.truncate(output, 100)));
}
section
}
fn build_steps_section(&self, steps: &[StepSummary]) -> String {
let steps_to_show: Vec<_> = steps.iter().take(self.config.max_steps).collect();
let total = steps.len();
let shown = steps_to_show.len();
let mut lines: Vec<String> = vec![format!("Steps ({}/{}):", shown, total)];
for (i, step) in steps_to_show.iter().enumerate() {
let status = if step.success { "✓" } else { "✗" };
let mut line = format!(
" {}. {} {} - {}",
i + 1,
status,
step.step_type,
self.truncate(&step.summary, 50)
);
if self.config.include_timing {
if let Some(ms) = step.duration_ms {
line.push_str(&format!(" ({}ms)", ms));
}
}
lines.push(line);
}
if total > shown {
lines.push(format!(" ... and {} more steps", total - shown));
}
lines.join("\n")
}
fn build_decisions_section(
&self,
decisions: &[DecisionSummary],
learnings: &mut Vec<String>,
) -> String {
let decisions_to_show: Vec<_> = decisions.iter().take(self.config.max_decisions).collect();
let mut lines: Vec<String> = vec!["Key Decisions:".to_string()];
for decision in decisions_to_show {
lines.push(format!(
" • {} (confidence: {:.0}%)",
self.truncate(&decision.decision, 80),
decision.confidence * 100.0
));
learnings.push(format!(
"Decision: {} - Rationale: {}",
self.truncate(&decision.decision, 50),
self.truncate(&decision.rationale, 50)
));
}
lines.join("\n")
}
fn estimate_original_tokens(&self, trace: &ExecutionTrace) -> usize {
let mut estimate = 0;
for step in &trace.steps {
estimate += self.token_counter.count(&step.summary);
if let Some(output) = &step.key_output {
estimate += self.token_counter.count(output);
}
}
for decision in &trace.decisions {
estimate += self.token_counter.count(&decision.decision);
estimate += self.token_counter.count(&decision.rationale);
}
if let Some(output) = &trace.final_output {
estimate += self.token_counter.count(output);
}
if let Some(error) = &trace.error {
estimate += self.token_counter.count(error);
}
estimate
}
fn truncate(&self, text: &str, max_len: usize) -> String {
if text.len() <= max_len {
text.to_string()
} else {
format!("{}...", &text[..max_len.saturating_sub(3)])
}
}
pub fn condense_multiple(&self, traces: &[ExecutionTrace]) -> CondensedResult {
if traces.is_empty() {
let empty_segment = ContextSegment::child_summary(
"No child executions".to_string(),
3,
next_sequence(),
StepId::new(),
)
.with_priority(ContextPriority::Low);
return CondensedResult {
execution_id: ExecutionId::new(),
summary: "No executions to condense".to_string(),
outcomes: Vec::new(),
learnings: Vec::new(),
context_segment: empty_segment,
token_count: 0,
original_tokens: 0,
compression_ratio: 1.0,
condensed_at: Utc::now(),
};
}
if traces.len() == 1 {
return self.condense(&traces[0]);
}
let mut parts: Vec<String> = Vec::new();
let mut all_outcomes: Vec<String> = Vec::new();
let mut all_learnings: Vec<String> = Vec::new();
let mut total_original = 0;
parts.push(format!("Parallel Executions: {} total", traces.len()));
let tokens_per_trace = self.config.target_tokens / traces.len();
for (i, trace) in traces.iter().enumerate() {
let brief_config = CondenserConfig {
target_tokens: tokens_per_trace,
max_tokens: tokens_per_trace + 100,
include_steps: false,
max_steps: 3,
..self.config.clone()
};
let condenser = ResultCondenser::with_config(brief_config);
let condensed = condenser.condense(trace);
parts.push(format!(
"\n[{}/{}] {}",
i + 1,
traces.len(),
condensed.summary
));
all_outcomes.extend(condensed.outcomes);
all_learnings.extend(condensed.learnings);
total_original += condensed.original_tokens;
}
let summary = parts.join("\n");
let token_count = self.token_counter.count(&summary);
let segment_content = format!("[Parallel Executions]\n{}", summary);
let segment_tokens = self.token_counter.count(&segment_content);
let context_segment = ContextSegment::child_summary(
segment_content,
segment_tokens,
next_sequence(),
traces[0].parent_step_id.clone().unwrap_or_default(),
)
.with_priority(ContextPriority::Medium);
CondensedResult {
execution_id: traces[0].execution_id.clone(),
summary,
outcomes: all_outcomes,
learnings: all_learnings,
context_segment,
token_count,
original_tokens: total_original,
compression_ratio: if total_original > 0 {
token_count as f64 / total_original as f64
} else {
1.0
},
condensed_at: Utc::now(),
}
}
}
impl Default for ResultCondenser {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_execution_id() -> ExecutionId {
ExecutionId::new()
}
fn test_step_id() -> StepId {
StepId::new()
}
#[test]
fn test_condenser_config_defaults() {
let config = CondenserConfig::default();
assert_eq!(config.target_tokens, 1500);
assert!(config.include_steps);
}
#[test]
fn test_condense_simple_trace() {
let condenser = ResultCondenser::new();
let trace =
ExecutionTrace::new(test_execution_id()).complete("Task completed successfully");
let result = condenser.condense(&trace);
assert!(result.summary.contains("COMPLETED"));
assert!(result.token_count > 0);
}
#[test]
fn test_condense_with_steps() {
let condenser = ResultCondenser::new();
let trace = ExecutionTrace::new(test_execution_id())
.add_step(StepSummary {
step_id: test_step_id(),
step_type: "llm_call".to_string(),
summary: "Generated response".to_string(),
success: true,
duration_ms: Some(500),
key_output: Some("Response text".to_string()),
})
.add_step(StepSummary {
step_id: test_step_id(),
step_type: "tool_call".to_string(),
summary: "Called search API".to_string(),
success: true,
duration_ms: Some(200),
key_output: None,
})
.complete("Done");
let result = condenser.condense(&trace);
assert!(result.summary.contains("Steps"));
assert!(result.summary.contains("llm_call"));
assert!(result.summary.contains("tool_call"));
}
#[test]
fn test_condense_failed_trace() {
let condenser = ResultCondenser::new();
let trace =
ExecutionTrace::new(test_execution_id()).fail("Connection timeout after 30 seconds");
let result = condenser.condense(&trace);
assert!(result.summary.contains("FAILED"));
assert!(result.summary.contains("timeout"));
assert!(!result.learnings.is_empty());
}
#[test]
fn test_condense_with_decisions() {
let condenser = ResultCondenser::new();
let trace = ExecutionTrace::new(test_execution_id())
.add_decision(DecisionSummary {
decision: "Use caching strategy".to_string(),
rationale: "Reduce API calls".to_string(),
confidence: 0.85,
step_id: test_step_id(),
})
.complete("Done");
let result = condenser.condense(&trace);
assert!(result.summary.contains("Key Decisions"));
assert!(result.summary.contains("caching"));
assert!(!result.learnings.is_empty());
}
#[test]
fn test_condense_respects_token_limit() {
let config = CondenserConfig {
max_tokens: 100,
target_tokens: 50,
..Default::default()
};
let condenser = ResultCondenser::with_config(config);
let mut trace = ExecutionTrace::new(test_execution_id());
for i in 0..20 {
trace = trace.add_step(StepSummary {
step_id: test_step_id(),
step_type: format!("step_{}", i),
summary: format!(
"This is a detailed summary of step {} with lots of information",
i
),
success: true,
duration_ms: Some(100),
key_output: Some(format!("Output from step {}", i)),
});
}
trace = trace.complete("Final result with lots of detail");
let result = condenser.condense(&trace);
assert!(result.token_count <= 150); }
#[test]
fn test_condense_multiple_traces() {
let condenser = ResultCondenser::new();
let traces = vec![
ExecutionTrace::new(test_execution_id()).complete("Result 1"),
ExecutionTrace::new(test_execution_id()).complete("Result 2"),
ExecutionTrace::new(test_execution_id()).fail("Error in trace 3"),
];
let result = condenser.condense_multiple(&traces);
assert!(result.summary.contains("Parallel Executions: 3"));
assert!(result.summary.contains("COMPLETED"));
assert!(result.summary.contains("FAILED"));
}
#[test]
fn test_compression_ratio() {
let condenser = ResultCondenser::new();
let mut trace = ExecutionTrace::new(test_execution_id());
for i in 0..10 {
trace = trace.add_step(StepSummary {
step_id: test_step_id(),
step_type: "step".to_string(),
summary: format!("Detailed summary for step {} with additional context and more information to ensure we have enough content", i),
success: true,
duration_ms: Some(100),
key_output: Some(format!("Long output content for step {} that adds more tokens and even more details to increase the token count significantly beyond what will be included in the final summary. This should definitely be truncated.", i)),
});
}
trace = trace.complete("Comprehensive final output with all the details and extra information that extends the content significantly.");
let result = condenser.condense(&trace);
assert!(result.original_tokens > 0);
assert!(result.token_count > 0);
assert!(
result.original_tokens >= result.token_count / 2,
"Original tokens ({}) should be at least half of final tokens ({})",
result.original_tokens,
result.token_count
);
}
#[test]
fn test_context_segment_priority() {
let condenser = ResultCondenser::new();
let success_trace = ExecutionTrace::new(test_execution_id()).complete("Done");
let success_result = condenser.condense(&success_trace);
assert_eq!(
success_result.context_segment.priority,
ContextPriority::Medium
);
let fail_trace = ExecutionTrace::new(test_execution_id()).fail("Error");
let fail_result = condenser.condense(&fail_trace);
assert_eq!(fail_result.context_segment.priority, ContextPriority::High);
}
}