1pub use super::status::{
2 WorkflowMetrics, compute_metrics, compute_metrics_with_events, compute_metrics_with_telemetry,
3};
4
5use std::collections::BTreeMap;
6use std::path::{Path, PathBuf};
7
8use anyhow::Result;
9use chrono::{DateTime, Duration, FixedOffset, TimeZone, Utc};
10
11use crate::task::{Task, load_tasks_from_dir};
12
13use super::board::read_task_lifecycle_timestamps;
14
15#[derive(Debug, Clone, PartialEq, Eq)]
16pub struct TaskCycleTimeRecord {
17 pub task_id: u32,
18 pub title: String,
19 pub engineer: Option<String>,
20 pub priority: String,
21 pub status: String,
22 pub created_at: Option<i64>,
23 pub started_at: Option<i64>,
24 pub completed_at: Option<i64>,
25 pub cycle_time_minutes: Option<i64>,
26 pub lead_time_minutes: Option<i64>,
27}
28
29#[derive(Debug, Clone, PartialEq)]
30pub struct PriorityCycleTimeSummary {
31 pub priority: String,
32 pub average_cycle_time_minutes: f64,
33 pub completed_tasks: usize,
34}
35
36#[derive(Debug, Clone, PartialEq)]
37pub struct EngineerThroughputSummary {
38 pub engineer: String,
39 pub completed_tasks: usize,
40 pub average_cycle_time_minutes: Option<f64>,
41 pub average_lead_time_minutes: Option<f64>,
42}
43
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub struct HourlyCompletionCount {
46 pub hour_start: i64,
47 pub completed_tasks: i64,
48}
49
50#[derive(Debug, Clone, PartialEq, Eq)]
51pub struct InProgressTaskSummary {
52 pub task_id: u32,
53 pub title: String,
54 pub engineer: Option<String>,
55 pub priority: String,
56 pub minutes_in_progress: i64,
57}
58
59#[allow(clippy::too_many_arguments)]
60pub fn build_task_cycle_time_record(
61 task_id: u32,
62 title: impl Into<String>,
63 engineer: Option<&str>,
64 priority: impl Into<String>,
65 status: impl Into<String>,
66 created_at: Option<DateTime<FixedOffset>>,
67 started_at: Option<DateTime<FixedOffset>>,
68 completed_at: Option<DateTime<FixedOffset>>,
69) -> TaskCycleTimeRecord {
70 let created_ts = created_at.map(|value| value.timestamp());
71 let started_ts = started_at.map(|value| value.timestamp());
72 let completed_ts = completed_at.map(|value| value.timestamp());
73 let cycle_time_minutes = duration_minutes(started_at, completed_at);
74 let lead_time_minutes = duration_minutes(created_at, completed_at);
75
76 TaskCycleTimeRecord {
77 task_id,
78 title: title.into(),
79 engineer: engineer.map(str::to_string),
80 priority: priority.into(),
81 status: status.into(),
82 created_at: created_ts,
83 started_at: started_ts,
84 completed_at: completed_ts,
85 cycle_time_minutes,
86 lead_time_minutes,
87 }
88}
89
90pub fn collect_task_cycle_time_records(board_dir: &Path) -> Result<Vec<TaskCycleTimeRecord>> {
91 let mut records = Vec::new();
92 for task in load_tasks_from_paths(&task_data_dirs(board_dir))? {
93 let lifecycle = read_task_lifecycle_timestamps(&task.source_path)?;
94 records.push(build_task_cycle_time_record(
95 task.id,
96 &task.title,
97 task.claimed_by.as_deref(),
98 normalized_priority(&task.priority),
99 &task.status,
100 lifecycle.created,
101 lifecycle.started,
102 lifecycle.completed,
103 ));
104 }
105 records.sort_by_key(|record| record.task_id);
106 Ok(records)
107}
108
109pub fn average_cycle_time_by_priority(
110 records: &[TaskCycleTimeRecord],
111) -> Vec<PriorityCycleTimeSummary> {
112 let mut buckets = BTreeMap::<String, (i64, usize)>::new();
113 for record in records {
114 let Some(cycle_time_minutes) = record.cycle_time_minutes else {
115 continue;
116 };
117 let entry = buckets
118 .entry(normalized_priority(&record.priority))
119 .or_default();
120 entry.0 += cycle_time_minutes;
121 entry.1 += 1;
122 }
123
124 buckets
125 .into_iter()
126 .filter(|(_, (_, count))| *count > 0)
127 .map(|(priority, (sum, count))| PriorityCycleTimeSummary {
128 priority,
129 average_cycle_time_minutes: sum as f64 / count as f64,
130 completed_tasks: count,
131 })
132 .collect()
133}
134
135pub fn engineer_throughput_ranking(
136 records: &[TaskCycleTimeRecord],
137) -> Vec<EngineerThroughputSummary> {
138 #[derive(Default)]
139 struct Totals {
140 completed_tasks: usize,
141 cycle_minutes_sum: i64,
142 cycle_samples: usize,
143 lead_minutes_sum: i64,
144 lead_samples: usize,
145 }
146
147 let mut by_engineer = BTreeMap::<String, Totals>::new();
148 for record in records {
149 let Some(engineer) = record.engineer.as_deref() else {
150 continue;
151 };
152 if record.completed_at.is_none() {
153 continue;
154 }
155
156 let entry = by_engineer.entry(engineer.to_string()).or_default();
157 entry.completed_tasks += 1;
158 if let Some(cycle_time_minutes) = record.cycle_time_minutes {
159 entry.cycle_minutes_sum += cycle_time_minutes;
160 entry.cycle_samples += 1;
161 }
162 if let Some(lead_time_minutes) = record.lead_time_minutes {
163 entry.lead_minutes_sum += lead_time_minutes;
164 entry.lead_samples += 1;
165 }
166 }
167
168 let mut summaries = by_engineer
169 .into_iter()
170 .map(|(engineer, totals)| EngineerThroughputSummary {
171 engineer,
172 completed_tasks: totals.completed_tasks,
173 average_cycle_time_minutes: (totals.cycle_samples > 0)
174 .then(|| totals.cycle_minutes_sum as f64 / totals.cycle_samples as f64),
175 average_lead_time_minutes: (totals.lead_samples > 0)
176 .then(|| totals.lead_minutes_sum as f64 / totals.lead_samples as f64),
177 })
178 .collect::<Vec<_>>();
179
180 summaries.sort_by(|left, right| {
181 right
182 .completed_tasks
183 .cmp(&left.completed_tasks)
184 .then_with(|| left.engineer.cmp(&right.engineer))
185 });
186 summaries
187}
188
189pub fn tasks_completed_per_hour(
190 records: &[TaskCycleTimeRecord],
191 now: DateTime<Utc>,
192 window_hours: i64,
193) -> Vec<HourlyCompletionCount> {
194 let window_hours = window_hours.max(1);
195 let current_hour = now.timestamp().div_euclid(3600) * 3600;
196 let start_hour = current_hour - (window_hours - 1) * 3600;
197 let mut counts = BTreeMap::<i64, i64>::new();
198
199 for offset in 0..window_hours {
200 counts.insert(start_hour + offset * 3600, 0);
201 }
202
203 for record in records {
204 let Some(completed_at) = record.completed_at else {
205 continue;
206 };
207 let bucket = completed_at.div_euclid(3600) * 3600;
208 if let Some(count) = counts.get_mut(&bucket) {
209 *count += 1;
210 }
211 }
212
213 counts
214 .into_iter()
215 .map(|(hour_start, completed_tasks)| HourlyCompletionCount {
216 hour_start,
217 completed_tasks,
218 })
219 .collect()
220}
221
222pub fn longest_running_in_progress_tasks(
223 records: &[TaskCycleTimeRecord],
224 now: DateTime<Utc>,
225 limit: usize,
226) -> Vec<InProgressTaskSummary> {
227 let mut tasks = records
228 .iter()
229 .filter(|record| record.status == "in-progress" && record.completed_at.is_none())
230 .filter_map(|record| {
231 let started_at = record.started_at?;
232 let started_at = Utc.timestamp_opt(started_at, 0).single()?;
233 let elapsed = now.signed_duration_since(started_at).num_minutes().max(0);
234 Some(InProgressTaskSummary {
235 task_id: record.task_id,
236 title: record.title.clone(),
237 engineer: record.engineer.clone(),
238 priority: normalized_priority(&record.priority),
239 minutes_in_progress: elapsed,
240 })
241 })
242 .collect::<Vec<_>>();
243
244 tasks.sort_by(|left, right| {
245 right
246 .minutes_in_progress
247 .cmp(&left.minutes_in_progress)
248 .then_with(|| left.task_id.cmp(&right.task_id))
249 });
250 tasks.truncate(limit);
251 tasks
252}
253
254fn duration_minutes(
255 start: Option<DateTime<FixedOffset>>,
256 end: Option<DateTime<FixedOffset>>,
257) -> Option<i64> {
258 let (start, end) = (start?, end?);
259 let duration = end.signed_duration_since(start);
260 (duration >= Duration::zero()).then(|| duration.num_minutes())
261}
262
263fn normalized_priority(priority: &str) -> String {
264 let trimmed = priority.trim();
265 if trimmed.is_empty() {
266 "unspecified".to_string()
267 } else {
268 trimmed.to_lowercase()
269 }
270}
271
272fn task_data_dirs(board_dir: &Path) -> Vec<PathBuf> {
273 [board_dir.join("tasks"), board_dir.join("archive")]
274 .into_iter()
275 .filter(|path| path.is_dir())
276 .collect()
277}
278
279fn load_tasks_from_paths(paths: &[PathBuf]) -> Result<Vec<Task>> {
280 let mut tasks = Vec::new();
281 for path in paths {
282 tasks.extend(load_tasks_from_dir(path)?);
283 }
284 Ok(tasks)
285}
286
287#[cfg(test)]
288use super::status::format_metrics;
289
290#[cfg(test)]
291mod tests {
292 use std::path::Path;
293
294 use super::*;
295 use crate::team::config::RoleType;
296 use crate::team::hierarchy::MemberInstance;
297
298 fn make_member(name: &str, role_type: RoleType) -> MemberInstance {
299 MemberInstance {
300 name: name.to_string(),
301 role_name: name.to_string(),
302 role_type,
303 agent: Some("codex".to_string()),
304 prompt: None,
305 reports_to: None,
306 use_worktrees: false,
307 ..Default::default()
308 }
309 }
310
311 fn write_task(
312 board_dir: &Path,
313 id: u32,
314 title: &str,
315 status: &str,
316 claimed_by: Option<&str>,
317 blocked: Option<&str>,
318 depends_on: &[u32],
319 ) {
320 let tasks_dir = board_dir.join("tasks");
321 std::fs::create_dir_all(&tasks_dir).unwrap();
322 let mut content =
323 format!("---\nid: {id}\ntitle: {title}\nstatus: {status}\npriority: medium\n");
324 if let Some(claimed_by) = claimed_by {
325 content.push_str(&format!("claimed_by: {claimed_by}\n"));
326 }
327 if let Some(blocked) = blocked {
328 content.push_str(&format!("blocked: {blocked}\n"));
329 }
330 if !depends_on.is_empty() {
331 content.push_str("depends_on:\n");
332 for dep in depends_on {
333 content.push_str(&format!(" - {dep}\n"));
334 }
335 }
336 content.push_str("class: standard\n---\n\nTask body.\n");
337 std::fs::write(tasks_dir.join(format!("{id:03}-{title}.md")), content).unwrap();
338 }
339
340 #[test]
341 fn compute_metrics_handles_empty_board() {
342 let tmp = tempfile::tempdir().unwrap();
343 let board_dir = tmp.path().join(".batty").join("team_config").join("board");
344 std::fs::create_dir_all(board_dir.join("tasks")).unwrap();
345
346 let metrics = compute_metrics(&board_dir, &[]).unwrap();
347 assert_eq!(metrics, WorkflowMetrics::default());
348 }
349
350 #[test]
351 fn compute_metrics_counts_mixed_workflow_states() {
352 let tmp = tempfile::tempdir().unwrap();
353 let board_dir = tmp.path().join(".batty").join("team_config").join("board");
354 write_task(&board_dir, 1, "done-dep", "done", None, None, &[]);
355 write_task(&board_dir, 2, "runnable", "todo", None, None, &[1]);
356 write_task(
357 &board_dir,
358 3,
359 "blocked",
360 "blocked",
361 Some("eng-1"),
362 Some("waiting"),
363 &[],
364 );
365 write_task(&board_dir, 4, "review", "review", Some("eng-2"), None, &[]);
366 write_task(
367 &board_dir,
368 5,
369 "active",
370 "in-progress",
371 Some("eng-1"),
372 None,
373 &[],
374 );
375
376 let members = vec![
377 make_member("eng-1", RoleType::Engineer),
378 make_member("eng-2", RoleType::Engineer),
379 make_member("eng-3", RoleType::Engineer),
380 ];
381 let metrics = compute_metrics(&board_dir, &members).unwrap();
382
383 assert_eq!(metrics.runnable_count, 1);
384 assert_eq!(metrics.blocked_count, 1);
385 assert_eq!(metrics.in_review_count, 1);
386 assert_eq!(metrics.in_progress_count, 1);
387 assert_eq!(metrics.idle_with_runnable, vec!["eng-3"]);
388 assert!(metrics.oldest_review_age_secs.is_some());
389 assert!(metrics.oldest_assignment_age_secs.is_some());
390 }
391
392 #[test]
393 fn format_metrics_produces_readable_summary() {
394 let text = format_metrics(&WorkflowMetrics {
395 runnable_count: 2,
396 blocked_count: 1,
397 in_review_count: 3,
398 in_progress_count: 4,
399 stale_in_progress_count: 1,
400 aged_todo_count: 2,
401 stale_review_count: 3,
402 idle_with_runnable: vec!["eng-1".to_string(), "eng-2".to_string()],
403 top_runnable_tasks: vec![
404 "#42 (high) Inbox routing".to_string(),
405 "#43 (medium) Status cleanup".to_string(),
406 ],
407 oldest_review_age_secs: Some(120),
408 oldest_assignment_age_secs: Some(360),
409 ..Default::default()
410 });
411
412 assert!(text.contains("Workflow Metrics"));
413 assert!(text.contains("Runnable: 2"));
414 assert!(text.contains("Blocked: 1"));
415 assert!(text.contains("In Review: 3"));
416 assert!(text.contains("In Progress: 4"));
417 assert!(text.contains("Aging Alerts: stale in-progress 1 | aged todo 2 | stale review 3"));
418 assert!(text.contains("Idle With Runnable: eng-1, eng-2"));
419 assert!(
420 text.contains("Top Runnable: #42 (high) Inbox routing; #43 (medium) Status cleanup")
421 );
422 assert!(text.contains("Oldest Review Age: 120s"));
423 assert!(text.contains("Oldest Assignment Age: 360s"));
424 assert!(text.contains("Review Pipeline"));
425 }
426
427 fn write_events(path: &Path, events: &[crate::team::events::TeamEvent]) {
428 let mut lines = Vec::new();
429 for event in events {
430 lines.push(serde_json::to_string(event).unwrap());
431 }
432 if let Some(parent) = path.parent() {
433 std::fs::create_dir_all(parent).unwrap();
434 }
435 std::fs::write(path, lines.join("\n")).unwrap();
436 }
437
438 #[test]
439 fn review_metrics_count_events() {
440 use crate::team::events::TeamEvent;
441
442 let tmp = tempfile::tempdir().unwrap();
443 let board_dir = tmp.path().join(".batty").join("team_config").join("board");
444 let events_path = tmp.path().join("events.jsonl");
445 write_task(&board_dir, 1, "t1", "done", None, None, &[]);
446
447 write_events(
448 &events_path,
449 &[
450 TeamEvent::task_auto_merged("eng-1", "1", 0.9, 2, 30),
451 TeamEvent::task_auto_merged("eng-1", "2", 0.9, 2, 30),
452 TeamEvent::task_auto_merged("eng-1", "3", 0.9, 2, 30),
453 TeamEvent::task_manual_merged("4"),
454 TeamEvent::task_manual_merged("5"),
455 TeamEvent::task_reworked("eng-1", "6"),
456 TeamEvent::review_nudge_sent("manager", "7"),
457 TeamEvent::review_escalated_by_role("manager", "8"),
458 TeamEvent::review_escalated_by_role("manager", "9"),
459 ],
460 );
461
462 let metrics = compute_metrics_with_events(&board_dir, &[], Some(&events_path)).unwrap();
463
464 assert_eq!(metrics.auto_merge_count, 3);
465 assert_eq!(metrics.manual_merge_count, 2);
466 assert_eq!(metrics.rework_count, 1);
467 assert_eq!(metrics.review_nudge_count, 1);
468 assert_eq!(metrics.review_escalation_count, 2);
469
470 let rate = metrics.auto_merge_rate.unwrap();
472 assert!((rate - 0.6).abs() < 0.01);
473
474 let rework = metrics.rework_rate.unwrap();
476 assert!((rework - 1.0 / 6.0).abs() < 0.01);
477 }
478
479 #[test]
480 fn review_metrics_compute_latency() {
481 use crate::team::events::TeamEvent;
482
483 let tmp = tempfile::tempdir().unwrap();
484 let board_dir = tmp.path().join(".batty").join("team_config").join("board");
485 let events_path = tmp.path().join("events.jsonl");
486 write_task(&board_dir, 1, "t1", "done", None, None, &[]);
487
488 let mut e1 = TeamEvent::task_completed("eng-1", Some("10"));
490 e1.ts = 1000;
491 let mut e2 = TeamEvent::task_auto_merged("eng-1", "10", 0.9, 2, 30);
492 e2.ts = 1100; let mut e3 = TeamEvent::task_completed("eng-2", Some("20"));
495 e3.ts = 2000;
496 let mut e4 = TeamEvent::task_manual_merged("20");
497 e4.ts = 2300; write_events(&events_path, &[e1, e2, e3, e4]);
500
501 let metrics = compute_metrics_with_events(&board_dir, &[], Some(&events_path)).unwrap();
502
503 let avg = metrics.avg_review_latency_secs.unwrap();
505 assert!((avg - 200.0).abs() < 0.01);
506 }
507
508 #[test]
509 fn review_metrics_handle_no_merges() {
510 let tmp = tempfile::tempdir().unwrap();
511 let board_dir = tmp.path().join(".batty").join("team_config").join("board");
512 let events_path = tmp.path().join("events.jsonl");
513 write_task(&board_dir, 1, "t1", "done", None, None, &[]);
514
515 std::fs::write(&events_path, "").unwrap();
517
518 let metrics = compute_metrics_with_events(&board_dir, &[], Some(&events_path)).unwrap();
519
520 assert_eq!(metrics.auto_merge_count, 0);
521 assert_eq!(metrics.manual_merge_count, 0);
522 assert!(metrics.auto_merge_rate.is_none());
523 assert!(metrics.rework_rate.is_none());
524 assert!(metrics.avg_review_latency_secs.is_none());
525 }
526
527 #[test]
528 fn status_includes_review_pipeline() {
529 let text = format_metrics(&WorkflowMetrics {
530 in_review_count: 2,
531 auto_merge_count: 3,
532 manual_merge_count: 2,
533 auto_merge_rate: Some(0.6),
534 rework_count: 1,
535 rework_rate: Some(1.0 / 6.0),
536 review_nudge_count: 1,
537 review_escalation_count: 0,
538 avg_review_latency_secs: Some(272.0),
539 ..Default::default()
540 });
541
542 assert!(text.contains("Review Pipeline"));
543 assert!(text.contains("Queue: 2"));
544 assert!(text.contains("Auto-merge Rate: 60%"));
545 assert!(text.contains("Auto: 3"));
546 assert!(text.contains("Manual: 2"));
547 assert!(text.contains("Rework: 1"));
548 assert!(text.contains("Nudges: 1"));
549 assert!(text.contains("Escalations: 0"));
550 }
551
552 #[test]
553 fn retro_includes_review_section() {
554 use crate::team::retrospective::{RunStats, generate_retrospective};
555
556 let tmp = tempfile::tempdir().unwrap();
557 let stats = RunStats {
558 run_start: 100,
559 run_end: 500,
560 total_duration_secs: 400,
561 task_stats: Vec::new(),
562 average_cycle_time_secs: None,
563 fastest_task_id: None,
564 fastest_cycle_time_secs: None,
565 longest_task_id: None,
566 longest_cycle_time_secs: None,
567 idle_time_pct: 0.0,
568 escalation_count: 0,
569 message_count: 0,
570 auto_merge_count: 5,
571 manual_merge_count: 2,
572 rework_count: 1,
573 review_nudge_count: 3,
574 review_escalation_count: 0,
575 avg_review_stall_secs: Some(120),
576 max_review_stall_secs: Some(200),
577 max_review_stall_task: Some("T-1".to_string()),
578 task_rework_counts: vec![("T-2".to_string(), 1)],
579 };
580
581 let path = generate_retrospective(tmp.path(), &stats).unwrap();
582 let content = std::fs::read_to_string(path).unwrap();
583
584 assert!(content.contains("## Review Pipeline"));
585 assert!(content.contains("Auto-merged: 5"));
586 assert!(content.contains("Manually merged: 2"));
587 assert!(content.contains("Auto-merge rate: 71%"));
588 assert!(content.contains("Rework cycles: 1"));
589 assert!(content.contains("Review nudges: 3"));
590 assert!(content.contains("Review escalations: 0"));
591 assert!(content.contains("Avg review stall: 2m 00s"));
592 assert!(content.contains("Max review stall: 3m 20s (T-1)"));
593 }
594
595 #[test]
596 fn compute_cycle_time_from_mock_timestamps() {
597 let offset = FixedOffset::west_opt(4 * 3600).unwrap();
598 let created_at = offset.with_ymd_and_hms(2026, 4, 5, 10, 0, 0).unwrap();
599 let started_at = offset.with_ymd_and_hms(2026, 4, 5, 11, 0, 0).unwrap();
600 let completed_at = offset.with_ymd_and_hms(2026, 4, 5, 13, 30, 0).unwrap();
601
602 let record = build_task_cycle_time_record(
603 473,
604 "Track cycle time",
605 Some("eng-1-3"),
606 "high",
607 "done",
608 Some(created_at),
609 Some(started_at),
610 Some(completed_at),
611 );
612
613 assert_eq!(record.cycle_time_minutes, Some(150));
614 }
615
616 #[test]
617 fn compute_lead_time_from_mock_timestamps() {
618 let offset = FixedOffset::west_opt(4 * 3600).unwrap();
619 let created_at = offset.with_ymd_and_hms(2026, 4, 5, 9, 0, 0).unwrap();
620 let started_at = offset.with_ymd_and_hms(2026, 4, 5, 11, 0, 0).unwrap();
621 let completed_at = offset.with_ymd_and_hms(2026, 4, 5, 13, 30, 0).unwrap();
622
623 let record = build_task_cycle_time_record(
624 474,
625 "Track lead time",
626 Some("eng-1-4"),
627 "medium",
628 "done",
629 Some(created_at),
630 Some(started_at),
631 Some(completed_at),
632 );
633
634 assert_eq!(record.lead_time_minutes, Some(270));
635 }
636
637 #[test]
638 fn cycle_time_metrics_aggregation_groups_priority_and_engineer() {
639 let records = vec![
640 TaskCycleTimeRecord {
641 task_id: 1,
642 title: "One".to_string(),
643 engineer: Some("eng-1".to_string()),
644 priority: "high".to_string(),
645 status: "done".to_string(),
646 created_at: Some(100),
647 started_at: Some(160),
648 completed_at: Some(460),
649 cycle_time_minutes: Some(5),
650 lead_time_minutes: Some(6),
651 },
652 TaskCycleTimeRecord {
653 task_id: 2,
654 title: "Two".to_string(),
655 engineer: Some("eng-1".to_string()),
656 priority: "high".to_string(),
657 status: "done".to_string(),
658 created_at: Some(200),
659 started_at: Some(260),
660 completed_at: Some(860),
661 cycle_time_minutes: Some(10),
662 lead_time_minutes: Some(11),
663 },
664 TaskCycleTimeRecord {
665 task_id: 3,
666 title: "Three".to_string(),
667 engineer: Some("eng-2".to_string()),
668 priority: "low".to_string(),
669 status: "done".to_string(),
670 created_at: Some(300),
671 started_at: Some(360),
672 completed_at: Some(540),
673 cycle_time_minutes: Some(3),
674 lead_time_minutes: Some(4),
675 },
676 ];
677
678 let by_priority = average_cycle_time_by_priority(&records);
679 assert_eq!(by_priority.len(), 2);
680 assert_eq!(by_priority[0].priority, "high");
681 assert!((by_priority[0].average_cycle_time_minutes - 7.5).abs() < f64::EPSILON);
682
683 let by_engineer = engineer_throughput_ranking(&records);
684 assert_eq!(by_engineer[0].engineer, "eng-1");
685 assert_eq!(by_engineer[0].completed_tasks, 2);
686 assert!((by_engineer[0].average_cycle_time_minutes.unwrap() - 7.5).abs() < f64::EPSILON);
687 }
688
689 #[test]
690 fn cycle_time_edge_cases_handle_missing_started_and_created() {
691 let no_started = TaskCycleTimeRecord {
692 task_id: 10,
693 title: "No started".to_string(),
694 engineer: Some("eng-1".to_string()),
695 priority: "medium".to_string(),
696 status: "done".to_string(),
697 created_at: Some(100),
698 started_at: None,
699 completed_at: Some(400),
700 cycle_time_minutes: None,
701 lead_time_minutes: Some(5),
702 };
703 let no_created = TaskCycleTimeRecord {
704 task_id: 11,
705 title: "No created".to_string(),
706 engineer: Some("eng-2".to_string()),
707 priority: "medium".to_string(),
708 status: "done".to_string(),
709 created_at: None,
710 started_at: Some(100),
711 completed_at: Some(400),
712 cycle_time_minutes: Some(5),
713 lead_time_minutes: None,
714 };
715
716 let now = Utc.timestamp_opt(7200, 0).single().unwrap();
717 let in_progress = longest_running_in_progress_tasks(
718 &[TaskCycleTimeRecord {
719 task_id: 12,
720 title: "Active".to_string(),
721 engineer: Some("eng-3".to_string()),
722 priority: "high".to_string(),
723 status: "in-progress".to_string(),
724 created_at: Some(100),
725 started_at: Some(3600),
726 completed_at: None,
727 cycle_time_minutes: None,
728 lead_time_minutes: None,
729 }],
730 now,
731 5,
732 );
733
734 assert!(no_started.cycle_time_minutes.is_none());
735 assert!(no_created.lead_time_minutes.is_none());
736 assert_eq!(in_progress[0].minutes_in_progress, 60);
737 }
738}