Skip to main content

scud/commands/swarm/
events.rs

1//! Swarm event logging and aggregation
2//!
3//! Captures structured events from agent execution for retrospective analysis.
4//! Events are stored in SQLite for queryable access.
5
6use std::path::{Path, PathBuf};
7
8use anyhow::Result;
9use chrono::{DateTime, Utc};
10use serde::{Deserialize, Serialize};
11
12#[cfg(feature = "zmq")]
13use super::publisher::EventPublisher;
14use crate::db::Database;
15
16#[cfg(feature = "zmq")]
17use zmq;
18
19/// Event kinds that can be logged
20#[derive(Debug, Clone, Serialize, Deserialize)]
21#[serde(tag = "kind", rename_all = "snake_case")]
22pub enum EventKind {
23    // Lifecycle events (from orchestrator)
24    Spawned,
25    Started,
26    Completed {
27        success: bool,
28        duration_ms: u64,
29    },
30    Failed {
31        reason: String,
32    },
33
34    // Tool events (from hooks)
35    ToolCall {
36        tool: String,
37        #[serde(skip_serializing_if = "Option::is_none")]
38        input_summary: Option<String>,
39    },
40    ToolResult {
41        tool: String,
42        success: bool,
43        #[serde(skip_serializing_if = "Option::is_none")]
44        duration_ms: Option<u64>,
45    },
46
47    // File events (from hooks)
48    FileRead {
49        path: String,
50    },
51    FileWrite {
52        path: String,
53        #[serde(skip_serializing_if = "Option::is_none")]
54        lines_changed: Option<u32>,
55    },
56
57    // Dependency events (from orchestrator)
58    DependencyMet {
59        dependency_id: String,
60    },
61    Unblocked {
62        by_task_id: String,
63    },
64
65    // Output capture
66    Output {
67        line: String,
68    },
69
70    // Wave lifecycle events (from orchestrator)
71    WaveStarted {
72        wave_number: usize,
73        task_count: usize,
74    },
75    WaveCompleted {
76        wave_number: usize,
77        duration_ms: u64,
78    },
79
80    // Validation events (from orchestrator)
81    ValidationPassed,
82    ValidationFailed {
83        failures: Vec<String>,
84    },
85
86    // Repair events (from orchestrator)
87    RepairStarted {
88        attempt: usize,
89        task_ids: Vec<String>,
90    },
91    RepairCompleted {
92        attempt: usize,
93        success: bool,
94    },
95
96    // Heartbeat events (for connection liveness detection)
97    Heartbeat,
98
99    // Custom events
100    Custom {
101        name: String,
102        #[serde(skip_serializing_if = "Option::is_none")]
103        data: Option<serde_json::Value>,
104    },
105}
106
107/// A single event in the timeline
108#[derive(Debug, Clone, Serialize, Deserialize)]
109pub struct AgentEvent {
110    /// When this event occurred
111    pub timestamp: DateTime<Utc>,
112    /// The swarm session this belongs to
113    pub session_id: String,
114    /// Which task/agent generated this event
115    pub task_id: String,
116    /// The event details
117    #[serde(flatten)]
118    pub event: EventKind,
119}
120
121impl AgentEvent {
122    pub fn new(session_id: &str, task_id: &str, event: EventKind) -> Self {
123        Self {
124            timestamp: Utc::now(),
125            session_id: session_id.to_string(),
126            task_id: task_id.to_string(),
127            event,
128        }
129    }
130
131    /// Create a spawned event
132    pub fn spawned(session_id: &str, task_id: &str) -> Self {
133        Self::new(session_id, task_id, EventKind::Spawned)
134    }
135
136    /// Create a completed event
137    pub fn completed(session_id: &str, task_id: &str, success: bool, duration_ms: u64) -> Self {
138        Self::new(
139            session_id,
140            task_id,
141            EventKind::Completed {
142                success,
143                duration_ms,
144            },
145        )
146    }
147
148    /// Create a tool call event
149    pub fn tool_call(
150        session_id: &str,
151        task_id: &str,
152        tool: &str,
153        input_summary: Option<&str>,
154    ) -> Self {
155        Self::new(
156            session_id,
157            task_id,
158            EventKind::ToolCall {
159                tool: tool.to_string(),
160                input_summary: input_summary.map(String::from),
161            },
162        )
163    }
164
165    /// Create an unblocked event (task was unblocked by another task completing)
166    pub fn unblocked(session_id: &str, task_id: &str, by_task_id: &str) -> Self {
167        Self::new(
168            session_id,
169            task_id,
170            EventKind::Unblocked {
171                by_task_id: by_task_id.to_string(),
172            },
173        )
174    }
175}
176
177/// Writer for appending events to SQLite database
178pub struct EventWriter {
179    session_id: String,
180    db: Option<Database>,
181    /// ZMQ publisher for real-time event streaming
182    #[cfg(feature = "zmq")]
183    zmq_publisher: Option<super::publisher::EventPublisher>,
184    /// Flag to indicate if ZMQ is enabled (for when zmq feature is disabled)
185    #[cfg(not(feature = "zmq"))]
186    #[allow(dead_code)]
187    zmq_enabled: bool,
188}
189
190impl EventWriter {
191    pub fn new(project_root: &Path, session_id: &str) -> Result<Self> {
192        // Ensure .scud directory exists
193        let scud_dir = project_root.join(".scud");
194        std::fs::create_dir_all(&scud_dir)?;
195
196        let db = Database::new(project_root);
197        db.initialize()?;
198
199        Ok(Self {
200            session_id: session_id.to_string(),
201            db: Some(db),
202            #[cfg(feature = "zmq")]
203            zmq_publisher: None,
204            #[cfg(not(feature = "zmq"))]
205            zmq_enabled: false,
206        })
207    }
208
209    /// Create EventWriter with optional ZMQ publishing
210    pub fn new_with_zmq(project_root: &Path, session_id: &str, enable_zmq: bool) -> Result<Self> {
211        // Ensure .scud directory exists
212        let scud_dir = project_root.join(".scud");
213        std::fs::create_dir_all(&scud_dir)?;
214
215        let db = Database::new(project_root);
216
217        #[cfg(feature = "zmq")]
218        let zmq_publisher = if enable_zmq {
219            let session_dir = project_root.join(".scud/swarm").join(session_id);
220            match super::publisher::EventPublisher::new(&session_dir) {
221                Ok(pub_) => {
222                    tracing::info!("ZMQ event publishing enabled for session {}", session_id);
223                    Some(pub_)
224                }
225                Err(e) => {
226                    tracing::warn!("Failed to create ZMQ publisher: {}", e);
227                    None
228                }
229            }
230        } else {
231            None
232        };
233
234        Ok(Self {
235            session_id: session_id.to_string(),
236            db: Some(db),
237            #[cfg(feature = "zmq")]
238            zmq_publisher,
239            #[cfg(not(feature = "zmq"))]
240            zmq_enabled: enable_zmq,
241        })
242    }
243
244    /// Get the session ID
245    pub fn session_id(&self) -> &str {
246        &self.session_id
247    }
248
249    /// Get the path to the database file (for display purposes)
250    pub fn session_file(&self) -> Option<PathBuf> {
251        self.db.as_ref().map(|db| db.path().to_path_buf())
252    }
253
254    /// Get the ZMQ publisher for control command handling
255    #[cfg(feature = "zmq")]
256    pub fn zmq_publisher(&self) -> Option<&super::publisher::EventPublisher> {
257        self.zmq_publisher.as_ref()
258    }
259
260    /// Publish event via ZMQ (non-blocking, best-effort)
261    #[cfg(feature = "zmq")]
262    fn zmq_publish(&self, event: super::publisher::ZmqEvent) {
263        if let Some(ref publisher) = self.zmq_publisher {
264            if let Err(e) = publisher.publish(&event) {
265                tracing::debug!("ZMQ publish error (non-fatal): {}", e);
266            }
267        }
268    }
269
270    /// No-op ZMQ publish when zmq feature is disabled
271    #[cfg(not(feature = "zmq"))]
272    #[allow(dead_code)]
273    fn zmq_publish(&self, _event: super::publisher::ZmqEvent) {
274        // ZMQ not available
275    }
276
277    /// Write an event to the database
278    pub fn write(&self, event: &AgentEvent) -> Result<()> {
279        if let Some(ref db) = self.db {
280            let guard = db.connection()?;
281            let conn = guard.as_ref().unwrap();
282            crate::db::events::insert_event(conn, event)?;
283        }
284
285        // Also publish via ZMQ if configured
286        self.zmq_publish_event(event);
287
288        Ok(())
289    }
290
291    /// Convert AgentEvent to ZmqEvent and publish
292    #[cfg(feature = "zmq")]
293    fn zmq_publish_event(&self, event: &AgentEvent) {
294        use super::publisher::ZmqEvent;
295
296        let zmq_event = match &event.event {
297            EventKind::Spawned => Some(ZmqEvent::TaskSpawned {
298                task_id: event.task_id.clone(),
299            }),
300            EventKind::WaveStarted {
301                wave_number,
302                task_count,
303            } => Some(ZmqEvent::WaveStarted {
304                wave: *wave_number,
305                tasks: vec![], // TODO: could populate if needed
306                task_count: *task_count,
307            }),
308            EventKind::WaveCompleted {
309                wave_number,
310                duration_ms,
311            } => Some(ZmqEvent::WaveCompleted {
312                wave: *wave_number,
313                duration_ms: Some(*duration_ms),
314            }),
315            EventKind::ValidationPassed => Some(ZmqEvent::ValidationPassed),
316            EventKind::ValidationFailed { failures } => Some(ZmqEvent::ValidationFailed {
317                failures: failures.clone(),
318            }),
319            EventKind::ToolCall {
320                tool,
321                input_summary,
322                ..
323            } => Some(ZmqEvent::ToolCall {
324                task_id: event.task_id.clone(),
325                tool: tool.clone(),
326                input_summary: input_summary.clone(),
327            }),
328            EventKind::ToolResult {
329                tool,
330                success,
331                duration_ms,
332                ..
333            } => Some(ZmqEvent::ToolResult {
334                task_id: event.task_id.clone(),
335                tool: tool.clone(),
336                success: *success,
337                duration_ms: *duration_ms,
338            }),
339            EventKind::FileRead { path, .. } => Some(ZmqEvent::FileRead {
340                task_id: event.task_id.clone(),
341                path: path.clone(),
342            }),
343            EventKind::FileWrite {
344                path,
345                lines_changed,
346                ..
347            } => Some(ZmqEvent::FileWrite {
348                task_id: event.task_id.clone(),
349                path: path.clone(),
350                lines_changed: *lines_changed,
351            }),
352            EventKind::Completed {
353                success,
354                duration_ms,
355            } => Some(ZmqEvent::TaskCompleted {
356                task_id: event.task_id.clone(),
357                success: *success,
358                duration_ms: Some(*duration_ms),
359            }),
360            EventKind::Heartbeat => Some(ZmqEvent::Heartbeat {
361                timestamp: event.timestamp.to_rfc3339(),
362            }),
363            _ => None, // Other events not published via ZMQ for now
364        };
365
366        if let Some(zmq_event) = zmq_event {
367            self.zmq_publish(zmq_event);
368        }
369    }
370
371    /// No-op ZMQ publish when zmq feature is disabled
372    #[cfg(not(feature = "zmq"))]
373    fn zmq_publish_event(&self, _event: &AgentEvent) {
374        // ZMQ not available
375    }
376
377    /// Write an event (SQLite stores all events in one table, no separate task log needed)
378    pub fn write_with_task_log(&self, event: &AgentEvent) -> Result<()> {
379        self.write(event)
380    }
381
382    /// Log a spawn event
383    pub fn log_spawned(&self, task_id: &str) -> Result<()> {
384        self.write(&AgentEvent::spawned(&self.session_id, task_id))
385    }
386
387    /// Log a completion event
388    pub fn log_completed(&self, task_id: &str, success: bool, duration_ms: u64) -> Result<()> {
389        self.write(&AgentEvent::completed(
390            &self.session_id,
391            task_id,
392            success,
393            duration_ms,
394        ))
395    }
396
397    /// Log an unblocked event
398    pub fn log_unblocked(&self, task_id: &str, by_task_id: &str) -> Result<()> {
399        self.write(&AgentEvent::unblocked(
400            &self.session_id,
401            task_id,
402            by_task_id,
403        ))
404    }
405
406    /// Log a wave started event
407    pub fn log_wave_started(&self, wave_number: usize, task_count: usize) -> Result<()> {
408        self.write(&AgentEvent::new(
409            &self.session_id,
410            &format!("wave:{}", wave_number),
411            EventKind::WaveStarted {
412                wave_number,
413                task_count,
414            },
415        ))
416    }
417
418    /// Log a wave completed event
419    pub fn log_wave_completed(&self, wave_number: usize, duration_ms: u64) -> Result<()> {
420        self.write(&AgentEvent::new(
421            &self.session_id,
422            &format!("wave:{}", wave_number),
423            EventKind::WaveCompleted {
424                wave_number,
425                duration_ms,
426            },
427        ))
428    }
429
430    /// Log a validation passed event
431    pub fn log_validation_passed(&self) -> Result<()> {
432        self.write(&AgentEvent::new(
433            &self.session_id,
434            "validation",
435            EventKind::ValidationPassed,
436        ))
437    }
438
439    /// Log a validation failed event
440    pub fn log_validation_failed(&self, failures: &[String]) -> Result<()> {
441        self.write(&AgentEvent::new(
442            &self.session_id,
443            "validation",
444            EventKind::ValidationFailed {
445                failures: failures.to_vec(),
446            },
447        ))
448    }
449
450    /// Log a repair started event
451    pub fn log_repair_started(&self, attempt: usize, task_ids: &[String]) -> Result<()> {
452        self.write(&AgentEvent::new(
453            &self.session_id,
454            "repair",
455            EventKind::RepairStarted {
456                attempt,
457                task_ids: task_ids.to_vec(),
458            },
459        ))
460    }
461
462    /// Log a repair completed event
463    pub fn log_repair_completed(&self, attempt: usize, success: bool) -> Result<()> {
464        self.write(&AgentEvent::new(
465            &self.session_id,
466            "repair",
467            EventKind::RepairCompleted { attempt, success },
468        ))
469    }
470
471    /// Log a heartbeat event
472    pub fn log_heartbeat(&self) -> Result<()> {
473        self.write(&AgentEvent::new(
474            &self.session_id,
475            "heartbeat",
476            EventKind::Heartbeat,
477        ))
478    }
479
480    /// Log a swarm started event
481    pub fn log_swarm_started(&self, tag: &str, total_waves: usize) -> Result<()> {
482        self.write(&AgentEvent::new(
483            &self.session_id,
484            "swarm",
485            EventKind::Custom {
486                name: "swarm_started".to_string(),
487                data: Some(serde_json::json!({
488                    "tag": tag,
489                    "total_waves": total_waves
490                })),
491            },
492        ))
493    }
494
495    /// Log a swarm completed event
496    pub fn log_swarm_completed(&self, success: bool) -> Result<()> {
497        self.write(&AgentEvent::new(
498            &self.session_id,
499            "swarm",
500            EventKind::Custom {
501                name: "swarm_completed".to_string(),
502                data: Some(serde_json::json!({
503                    "success": success
504                })),
505            },
506        ))
507    }
508
509    /// Publish a ZMQ event directly (for swarm lifecycle events)
510    #[cfg(feature = "zmq")]
511    pub fn publish_event(&self, event: &super::publisher::ZmqEvent) -> Result<()> {
512        if let Some(ref publisher) = self.zmq_publisher {
513            publisher.publish(event)?;
514        }
515        Ok(())
516    }
517
518    /// No-op publish_event when zmq feature is disabled
519    #[cfg(not(feature = "zmq"))]
520    pub fn publish_event(&self, _event: &super::publisher::ZmqEvent) -> Result<()> {
521        Ok(())
522    }
523}
524
525/// Reader for loading events from SQLite database
526pub struct EventReader {
527    db: Database,
528}
529
530impl EventReader {
531    pub fn new(project_root: &Path) -> Self {
532        Self {
533            db: Database::new(project_root),
534        }
535    }
536
537    /// Load all events for a session
538    pub fn load_session(&self, session_id: &str) -> Result<Vec<AgentEvent>> {
539        self.load_all_for_session(session_id)
540    }
541
542    /// Load all events for a session (already sorted by timestamp in SQLite)
543    pub fn load_all_for_session(&self, session_id: &str) -> Result<Vec<AgentEvent>> {
544        let guard = self.db.connection()?;
545        let conn = guard.as_ref().unwrap();
546        crate::db::events::get_events_for_session(conn, session_id)
547    }
548
549    /// List available sessions
550    pub fn list_sessions(&self) -> Result<Vec<String>> {
551        let guard = self.db.connection()?;
552        let conn = guard.as_ref().unwrap();
553        crate::db::events::list_sessions(conn)
554    }
555}
556
557/// Aggregated timeline for retrospective analysis
558#[derive(Debug, Clone, Serialize, Deserialize)]
559pub struct RetrospectiveTimeline {
560    pub session_id: String,
561    pub started_at: Option<DateTime<Utc>>,
562    pub completed_at: Option<DateTime<Utc>>,
563    pub tasks: Vec<TaskTimeline>,
564    pub total_events: usize,
565}
566
567/// Timeline for a single task
568#[derive(Debug, Clone, Serialize, Deserialize)]
569pub struct TaskTimeline {
570    pub task_id: String,
571    pub spawned_at: Option<DateTime<Utc>>,
572    pub completed_at: Option<DateTime<Utc>>,
573    pub success: Option<bool>,
574    pub duration_ms: Option<u64>,
575    pub tools_used: Vec<String>,
576    pub files_read: Vec<String>,
577    pub files_written: Vec<String>,
578    pub unblocked_by: Vec<String>,
579    pub events: Vec<AgentEvent>,
580}
581
582impl RetrospectiveTimeline {
583    /// Build a timeline from events
584    pub fn from_events(session_id: &str, events: Vec<AgentEvent>) -> Self {
585        use std::collections::HashMap;
586
587        let mut task_map: HashMap<String, TaskTimeline> = HashMap::new();
588
589        for event in &events {
590            let task = task_map
591                .entry(event.task_id.clone())
592                .or_insert_with(|| TaskTimeline {
593                    task_id: event.task_id.clone(),
594                    spawned_at: None,
595                    completed_at: None,
596                    success: None,
597                    duration_ms: None,
598                    tools_used: Vec::new(),
599                    files_read: Vec::new(),
600                    files_written: Vec::new(),
601                    unblocked_by: Vec::new(),
602                    events: Vec::new(),
603                });
604
605            task.events.push(event.clone());
606
607            match &event.event {
608                EventKind::Spawned => {
609                    task.spawned_at = Some(event.timestamp);
610                }
611                EventKind::Completed {
612                    success,
613                    duration_ms,
614                } => {
615                    task.completed_at = Some(event.timestamp);
616                    task.success = Some(*success);
617                    task.duration_ms = Some(*duration_ms);
618                }
619                EventKind::ToolCall { tool, .. } => {
620                    if !task.tools_used.contains(tool) {
621                        task.tools_used.push(tool.clone());
622                    }
623                }
624                EventKind::FileRead { path } => {
625                    if !task.files_read.contains(path) {
626                        task.files_read.push(path.clone());
627                    }
628                }
629                EventKind::FileWrite { path, .. } => {
630                    if !task.files_written.contains(path) {
631                        task.files_written.push(path.clone());
632                    }
633                }
634                EventKind::Unblocked { by_task_id } => {
635                    if !task.unblocked_by.contains(by_task_id) {
636                        task.unblocked_by.push(by_task_id.clone());
637                    }
638                }
639                _ => {}
640            }
641        }
642
643        let tasks: Vec<TaskTimeline> = task_map.into_values().collect();
644
645        let started_at = events.first().map(|e| e.timestamp);
646        let completed_at = events.last().map(|e| e.timestamp);
647
648        Self {
649            session_id: session_id.to_string(),
650            started_at,
651            completed_at,
652            tasks,
653            total_events: events.len(),
654        }
655    }
656
657    /// Generate a text summary
658    pub fn to_summary(&self) -> String {
659        use std::fmt::Write;
660        let mut s = String::new();
661
662        writeln!(s, "Session: {}", self.session_id).unwrap();
663        if let (Some(start), Some(end)) = (self.started_at, self.completed_at) {
664            let duration = end.signed_duration_since(start);
665            writeln!(s, "Duration: {}s", duration.num_seconds()).unwrap();
666        }
667        writeln!(s, "Total events: {}", self.total_events).unwrap();
668        writeln!(s, "Tasks: {}", self.tasks.len()).unwrap();
669        writeln!(s).unwrap();
670
671        for task in &self.tasks {
672            writeln!(s, "  [{}]", task.task_id).unwrap();
673            if let Some(success) = task.success {
674                writeln!(s, "    Status: {}", if success { "✓" } else { "✗" }).unwrap();
675            }
676            if let Some(duration) = task.duration_ms {
677                writeln!(s, "    Duration: {}ms", duration).unwrap();
678            }
679            if !task.tools_used.is_empty() {
680                writeln!(s, "    Tools: {}", task.tools_used.join(", ")).unwrap();
681            }
682            if !task.files_written.is_empty() {
683                writeln!(s, "    Files written: {}", task.files_written.len()).unwrap();
684            }
685            if !task.unblocked_by.is_empty() {
686                writeln!(s, "    Unblocked by: {}", task.unblocked_by.join(", ")).unwrap();
687            }
688        }
689
690        s
691    }
692}
693
694/// Print a retrospective for a session
695pub fn print_retro(project_root: &Path, session_id: Option<&str>) -> Result<()> {
696    use colored::Colorize;
697
698    let reader = EventReader::new(project_root);
699
700    // If no session specified, list available sessions
701    let session_id = match session_id {
702        Some(id) => id.to_string(),
703        None => {
704            let sessions = reader.list_sessions()?;
705            if sessions.is_empty() {
706                println!("{}", "No swarm sessions found.".yellow());
707                println!("Run a swarm first: scud swarm --tag <tag>");
708                return Ok(());
709            }
710
711            println!("{}", "Available sessions:".blue().bold());
712            for session in &sessions {
713                println!("  • {}", session);
714            }
715
716            // Use the most recent session
717            if let Some(latest) = sessions.last() {
718                println!();
719                println!("Showing latest session: {}", latest.cyan());
720                latest.clone()
721            } else {
722                return Ok(());
723            }
724        }
725    };
726
727    // Load events
728    let events = reader.load_all_for_session(&session_id)?;
729
730    if events.is_empty() {
731        println!("{}", "No events found for this session.".yellow());
732        return Ok(());
733    }
734
735    // Build timeline
736    let timeline = RetrospectiveTimeline::from_events(&session_id, events);
737
738    // Print header
739    println!();
740    println!("{}", "Swarm Retrospective".blue().bold());
741    println!("{}", "═".repeat(60).blue());
742    println!();
743
744    println!("  {} {}", "Session:".dimmed(), timeline.session_id.cyan());
745
746    if let (Some(start), Some(end)) = (timeline.started_at, timeline.completed_at) {
747        let duration = end.signed_duration_since(start);
748        println!(
749            "  {} {}s",
750            "Duration:".dimmed(),
751            duration.num_seconds().to_string().cyan()
752        );
753        println!(
754            "  {} {}",
755            "Started:".dimmed(),
756            start.format("%Y-%m-%d %H:%M:%S").to_string().dimmed()
757        );
758    }
759
760    println!(
761        "  {} {}",
762        "Events:".dimmed(),
763        timeline.total_events.to_string().cyan()
764    );
765    println!(
766        "  {} {}",
767        "Tasks:".dimmed(),
768        timeline.tasks.len().to_string().cyan()
769    );
770    println!();
771
772    // Print task details
773    println!("{}", "Task Timeline".yellow().bold());
774    println!("{}", "─".repeat(60).yellow());
775
776    for task in &timeline.tasks {
777        let status_icon = match task.success {
778            Some(true) => "✓".green(),
779            Some(false) => "✗".red(),
780            None => "?".yellow(),
781        };
782
783        println!();
784        println!("  {} [{}]", status_icon, task.task_id.cyan());
785
786        if let Some(duration) = task.duration_ms {
787            println!("    Duration: {}ms", duration.to_string().dimmed());
788        }
789
790        if !task.tools_used.is_empty() {
791            println!("    Tools: {}", task.tools_used.join(", ").dimmed());
792        }
793
794        if !task.files_written.is_empty() {
795            println!(
796                "    Files written: {}",
797                task.files_written.len().to_string().dimmed()
798            );
799            for file in task.files_written.iter().take(5) {
800                println!("      • {}", file.dimmed());
801            }
802            if task.files_written.len() > 5 {
803                println!(
804                    "      ... and {} more",
805                    (task.files_written.len() - 5).to_string().dimmed()
806                );
807            }
808        }
809
810        if !task.unblocked_by.is_empty() {
811            println!(
812                "    Unblocked by: {}",
813                task.unblocked_by.join(", ").dimmed()
814            );
815        }
816    }
817
818    println!();
819    Ok(())
820}
821
822/// Export retrospective as JSON
823pub fn export_retro_json(project_root: &Path, session_id: &str) -> Result<String> {
824    let reader = EventReader::new(project_root);
825    let events = reader.load_all_for_session(session_id)?;
826    let timeline = RetrospectiveTimeline::from_events(session_id, events);
827    Ok(serde_json::to_string_pretty(&timeline)?)
828}
829
830#[cfg(test)]
831mod tests {
832    use super::*;
833    use tempfile::TempDir;
834
835    #[test]
836    fn test_event_serialization() {
837        let event = AgentEvent::spawned("session-1", "task:1");
838        let json = serde_json::to_string(&event).unwrap();
839        assert!(json.contains("spawned"));
840        assert!(json.contains("task:1"));
841
842        let parsed: AgentEvent = serde_json::from_str(&json).unwrap();
843        assert_eq!(parsed.task_id, "task:1");
844    }
845
846    #[test]
847    fn test_event_writer_reader() {
848        let temp_dir = TempDir::new().unwrap();
849        let project_root = temp_dir.path();
850
851        let writer = EventWriter::new(project_root, "test-session").unwrap();
852
853        // Write events
854        writer.log_spawned("task:1").unwrap();
855        writer.log_spawned("task:2").unwrap();
856        writer.log_completed("task:1", true, 1000).unwrap();
857
858        // Read events
859        let reader = EventReader::new(project_root);
860        let events = reader.load_session("test-session").unwrap();
861
862        assert_eq!(events.len(), 3);
863    }
864
865    #[test]
866    fn test_retrospective_timeline() {
867        let events = vec![
868            AgentEvent::spawned("s1", "task:1"),
869            AgentEvent::spawned("s1", "task:2"),
870            AgentEvent::tool_call("s1", "task:1", "Read", Some("src/main.rs")),
871            AgentEvent::completed("s1", "task:1", true, 5000),
872            AgentEvent::unblocked("s1", "task:3", "task:1"),
873            AgentEvent::completed("s1", "task:2", true, 3000),
874        ];
875
876        let timeline = RetrospectiveTimeline::from_events("s1", events);
877
878        assert_eq!(timeline.tasks.len(), 3); // task:1, task:2, task:3
879        assert_eq!(timeline.total_events, 6);
880
881        let task1 = timeline
882            .tasks
883            .iter()
884            .find(|t| t.task_id == "task:1")
885            .unwrap();
886        assert_eq!(task1.success, Some(true));
887        assert_eq!(task1.duration_ms, Some(5000));
888        assert!(task1.tools_used.contains(&"Read".to_string()));
889    }
890
891    #[test]
892    fn test_deduplication_preserves_different_tool_calls() {
893        use chrono::TimeZone;
894
895        // Create two tool call events with the same timestamp and task_id
896        // but different tool names - these should NOT be deduplicated
897        let fixed_time = Utc.with_ymd_and_hms(2025, 1, 15, 12, 0, 0).unwrap();
898
899        let event1 = AgentEvent {
900            timestamp: fixed_time,
901            session_id: "s1".to_string(),
902            task_id: "task:1".to_string(),
903            event: EventKind::ToolCall {
904                tool: "Read".to_string(),
905                input_summary: Some("file1.rs".to_string()),
906            },
907        };
908
909        let event2 = AgentEvent {
910            timestamp: fixed_time,
911            session_id: "s1".to_string(),
912            task_id: "task:1".to_string(),
913            event: EventKind::ToolCall {
914                tool: "Write".to_string(),
915                input_summary: Some("file2.rs".to_string()),
916            },
917        };
918
919        let mut events = vec![event1, event2];
920
921        // Sort and dedup using the same logic as load_all_for_session
922        events.sort_by_key(|e| e.timestamp);
923        events.dedup_by(|a, b| {
924            a.timestamp == b.timestamp
925                && a.task_id == b.task_id
926                && serde_json::to_string(&a.event).ok() == serde_json::to_string(&b.event).ok()
927        });
928
929        // Both events should remain (different tool names)
930        assert_eq!(events.len(), 2);
931    }
932
933    #[test]
934    fn test_deduplication_removes_true_duplicates() {
935        use chrono::TimeZone;
936
937        // Create two identical events - these SHOULD be deduplicated
938        let fixed_time = Utc.with_ymd_and_hms(2025, 1, 15, 12, 0, 0).unwrap();
939
940        let event1 = AgentEvent {
941            timestamp: fixed_time,
942            session_id: "s1".to_string(),
943            task_id: "task:1".to_string(),
944            event: EventKind::Spawned,
945        };
946
947        let event2 = AgentEvent {
948            timestamp: fixed_time,
949            session_id: "s1".to_string(),
950            task_id: "task:1".to_string(),
951            event: EventKind::Spawned,
952        };
953
954        let mut events = vec![event1, event2];
955
956        events.sort_by_key(|e| e.timestamp);
957        events.dedup_by(|a, b| {
958            a.timestamp == b.timestamp
959                && a.task_id == b.task_id
960                && serde_json::to_string(&a.event).ok() == serde_json::to_string(&b.event).ok()
961        });
962
963        // Only one event should remain (true duplicate)
964        assert_eq!(events.len(), 1);
965    }
966}