1#![cfg_attr(not(test), allow(dead_code))]
2
3use std::fs::{File, OpenOptions};
4use std::io::{BufRead, BufReader, Write};
5use std::path::Path;
6
7use anyhow::{Context, Result};
8use serde::{Deserialize, Serialize};
9
10use super::board::WorkflowMetadata;
11
12#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
13#[serde(rename_all = "snake_case")]
14pub enum ArtifactType {
15 TestResult,
16 BuildOutput,
17 Documentation,
18 Other,
19}
20
21#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
22pub struct ArtifactRecord {
23 pub path: String,
24 pub artifact_type: ArtifactType,
25 pub created_at: Option<u64>,
26 pub verified: bool,
27}
28
29#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
30pub struct MergeRecord {
31 pub task_id: u32,
32 pub branch: String,
33 pub commit: String,
34 pub merged_at: u64,
35 pub merged_by: String,
36 pub artifacts: Vec<ArtifactRecord>,
37}
38
39#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
40pub struct TestTimingRecord {
41 pub task_id: u32,
42 pub engineer: String,
43 pub branch: String,
44 pub measured_at: u64,
45 pub duration_ms: u64,
46 pub rolling_average_ms: Option<u64>,
47 pub regression_pct: Option<u32>,
48 pub regression_detected: bool,
49}
50
51pub fn record_merge(log_path: &Path, record: &MergeRecord) -> Result<()> {
52 if let Some(parent) = log_path.parent() {
53 std::fs::create_dir_all(parent)
54 .with_context(|| format!("failed to create {}", parent.display()))?;
55 }
56
57 let mut file = OpenOptions::new()
58 .create(true)
59 .append(true)
60 .open(log_path)
61 .with_context(|| format!("failed to open {}", log_path.display()))?;
62
63 serde_json::to_writer(&mut file, record).with_context(|| {
64 format!(
65 "failed to serialize merge record for {}",
66 log_path.display()
67 )
68 })?;
69 file.write_all(b"\n")
70 .with_context(|| format!("failed to append newline to {}", log_path.display()))?;
71 Ok(())
72}
73
74pub fn read_merge_log(log_path: &Path) -> Result<Vec<MergeRecord>> {
75 if !log_path.exists() {
76 return Ok(Vec::new());
77 }
78
79 let file =
80 File::open(log_path).with_context(|| format!("failed to open {}", log_path.display()))?;
81 let reader = BufReader::new(file);
82 let mut records = Vec::new();
83
84 for (index, line) in reader.lines().enumerate() {
85 let line = line.with_context(|| {
86 format!(
87 "failed to read line {} from {}",
88 index + 1,
89 log_path.display()
90 )
91 })?;
92 if line.trim().is_empty() {
93 continue;
94 }
95 let record = serde_json::from_str::<MergeRecord>(&line).with_context(|| {
96 format!(
97 "failed to parse merge record line {} from {}",
98 index + 1,
99 log_path.display()
100 )
101 })?;
102 records.push(record);
103 }
104
105 Ok(records)
106}
107
108pub fn record_test_timing(log_path: &Path, record: &TestTimingRecord) -> Result<()> {
109 if let Some(parent) = log_path.parent() {
110 std::fs::create_dir_all(parent)
111 .with_context(|| format!("failed to create {}", parent.display()))?;
112 }
113
114 let mut file = OpenOptions::new()
115 .create(true)
116 .append(true)
117 .open(log_path)
118 .with_context(|| format!("failed to open {}", log_path.display()))?;
119
120 serde_json::to_writer(&mut file, record).with_context(|| {
121 format!(
122 "failed to serialize test timing record for {}",
123 log_path.display()
124 )
125 })?;
126 file.write_all(b"\n")
127 .with_context(|| format!("failed to append newline to {}", log_path.display()))?;
128 Ok(())
129}
130
131pub fn read_test_timing_log(log_path: &Path) -> Result<Vec<TestTimingRecord>> {
132 if !log_path.exists() {
133 return Ok(Vec::new());
134 }
135
136 let file =
137 File::open(log_path).with_context(|| format!("failed to open {}", log_path.display()))?;
138 let reader = BufReader::new(file);
139 let mut records = Vec::new();
140
141 for (index, line) in reader.lines().enumerate() {
142 let line = line.with_context(|| {
143 format!(
144 "failed to read line {} from {}",
145 index + 1,
146 log_path.display()
147 )
148 })?;
149 if line.trim().is_empty() {
150 continue;
151 }
152 let record = serde_json::from_str::<TestTimingRecord>(&line).with_context(|| {
153 format!(
154 "failed to parse test timing record line {} from {}",
155 index + 1,
156 log_path.display()
157 )
158 })?;
159 records.push(record);
160 }
161
162 Ok(records)
163}
164
165pub fn build_test_timing_record(
166 history: &[TestTimingRecord],
167 task_id: u32,
168 engineer: &str,
169 branch: &str,
170 measured_at: u64,
171 duration_ms: u64,
172) -> TestTimingRecord {
173 let window: Vec<&TestTimingRecord> = history.iter().rev().take(5).collect();
174 let (rolling_average_ms, regression_pct, regression_detected) = if window.len() == 5 {
175 let total_ms: u64 = window.iter().map(|record| record.duration_ms).sum();
176 let average_ms = total_ms / 5;
177 let pct = if average_ms == 0 {
178 None
179 } else {
180 Some(((duration_ms.saturating_sub(average_ms)) * 100 / average_ms) as u32)
181 };
182 let detected = average_ms > 0 && duration_ms.saturating_mul(100) > average_ms * 120;
183 (Some(average_ms), pct, detected)
184 } else {
185 (None, None, false)
186 };
187
188 TestTimingRecord {
189 task_id,
190 engineer: engineer.to_string(),
191 branch: branch.to_string(),
192 measured_at,
193 duration_ms,
194 rolling_average_ms,
195 regression_pct,
196 regression_detected,
197 }
198}
199
200pub fn append_test_timing_record(
201 log_path: &Path,
202 task_id: u32,
203 engineer: &str,
204 branch: &str,
205 measured_at: u64,
206 duration_ms: u64,
207) -> Result<TestTimingRecord> {
208 let history = read_test_timing_log(log_path)?;
209 let record = build_test_timing_record(
210 &history,
211 task_id,
212 engineer,
213 branch,
214 measured_at,
215 duration_ms,
216 );
217 record_test_timing(log_path, &record)?;
218 Ok(record)
219}
220
221pub fn track_artifact(meta: &mut WorkflowMetadata, artifact: &str) {
222 let artifact = artifact.trim();
223 if artifact.is_empty() {
224 return;
225 }
226 if !meta.artifacts.iter().any(|existing| existing == artifact) {
227 meta.artifacts.push(artifact.to_string());
228 }
229}
230
231#[cfg(test)]
232mod tests {
233 use super::*;
234
235 fn sample_record(path: &str, artifact_type: ArtifactType) -> ArtifactRecord {
236 ArtifactRecord {
237 path: path.to_string(),
238 artifact_type,
239 created_at: Some(1_777_000_000),
240 verified: true,
241 }
242 }
243
244 fn sample_merge_record() -> MergeRecord {
245 MergeRecord {
246 task_id: 29,
247 branch: "eng-1-3/task-29".to_string(),
248 commit: "abc1234".to_string(),
249 merged_at: 1_777_000_123,
250 merged_by: "manager".to_string(),
251 artifacts: vec![
252 sample_record("target/debug/batty", ArtifactType::BuildOutput),
253 sample_record("target/nextest/default.xml", ArtifactType::TestResult),
254 ],
255 }
256 }
257
258 fn sample_test_timing_record(task_id: u32, duration_ms: u64) -> TestTimingRecord {
259 TestTimingRecord {
260 task_id,
261 engineer: "eng-1".to_string(),
262 branch: format!("eng-1/task-{task_id}"),
263 measured_at: 1_777_000_000 + task_id as u64,
264 duration_ms,
265 rolling_average_ms: None,
266 regression_pct: None,
267 regression_detected: false,
268 }
269 }
270
271 #[test]
272 fn record_merge_appends_to_log_file() {
273 let tmp = tempfile::tempdir().unwrap();
274 let log_path = tmp.path().join(".batty").join("merge-log.jsonl");
275 let first = sample_merge_record();
276 let mut second = sample_merge_record();
277 second.task_id = 30;
278 second.commit = "def5678".to_string();
279
280 record_merge(&log_path, &first).unwrap();
281 record_merge(&log_path, &second).unwrap();
282
283 let content = std::fs::read_to_string(&log_path).unwrap();
284 assert_eq!(content.lines().count(), 2);
285 assert!(content.contains("\"task_id\":29"));
286 assert!(content.contains("\"task_id\":30"));
287 }
288
289 #[test]
290 fn read_merge_log_parses_back() {
291 let tmp = tempfile::tempdir().unwrap();
292 let log_path = tmp.path().join("merge-log.jsonl");
293 let record = sample_merge_record();
294
295 record_merge(&log_path, &record).unwrap();
296
297 let parsed = read_merge_log(&log_path).unwrap();
298 assert_eq!(parsed, vec![record]);
299 }
300
301 #[test]
302 fn record_test_timing_appends_to_log_file() {
303 let tmp = tempfile::tempdir().unwrap();
304 let log_path = tmp.path().join(".batty").join("test_timing.jsonl");
305 let first = sample_test_timing_record(29, 950);
306 let mut second = sample_test_timing_record(30, 1_150);
307 second.regression_detected = true;
308 second.rolling_average_ms = Some(900);
309 second.regression_pct = Some(27);
310
311 record_test_timing(&log_path, &first).unwrap();
312 record_test_timing(&log_path, &second).unwrap();
313
314 let content = std::fs::read_to_string(&log_path).unwrap();
315 assert_eq!(content.lines().count(), 2);
316 assert!(content.contains("\"task_id\":29"));
317 assert!(content.contains("\"task_id\":30"));
318 }
319
320 #[test]
321 fn read_test_timing_log_parses_back() {
322 let tmp = tempfile::tempdir().unwrap();
323 let log_path = tmp.path().join("test_timing.jsonl");
324 let record = sample_test_timing_record(31, 1_250);
325
326 record_test_timing(&log_path, &record).unwrap();
327
328 let parsed = read_test_timing_log(&log_path).unwrap();
329 assert_eq!(parsed, vec![record]);
330 }
331
332 #[test]
333 fn build_test_timing_record_skips_regression_without_five_prior_merges() {
334 let history = vec![
335 sample_test_timing_record(1, 900),
336 sample_test_timing_record(2, 950),
337 sample_test_timing_record(3, 1_000),
338 sample_test_timing_record(4, 980),
339 ];
340
341 let record = build_test_timing_record(&history, 5, "eng-1", "eng-1/task-5", 100, 1_300);
342
343 assert_eq!(record.rolling_average_ms, None);
344 assert_eq!(record.regression_pct, None);
345 assert!(!record.regression_detected);
346 }
347
348 #[test]
349 fn build_test_timing_record_detects_regression_against_previous_five_merges() {
350 let history = vec![
351 sample_test_timing_record(1, 1_000),
352 sample_test_timing_record(2, 1_000),
353 sample_test_timing_record(3, 1_000),
354 sample_test_timing_record(4, 1_000),
355 sample_test_timing_record(5, 1_000),
356 ];
357
358 let record = build_test_timing_record(&history, 6, "eng-1", "eng-1/task-6", 106, 1_250);
359
360 assert_eq!(record.rolling_average_ms, Some(1_000));
361 assert_eq!(record.regression_pct, Some(25));
362 assert!(record.regression_detected);
363 }
364
365 #[test]
366 fn track_artifact_adds_to_metadata_and_deduplicates() {
367 let mut meta = WorkflowMetadata::default();
368
369 track_artifact(&mut meta, "target/debug/batty");
370 track_artifact(&mut meta, "target/debug/batty");
371 track_artifact(&mut meta, "target/doc/index.html");
372
373 assert_eq!(
374 meta.artifacts,
375 vec![
376 "target/debug/batty".to_string(),
377 "target/doc/index.html".to_string()
378 ]
379 );
380 }
381
382 #[test]
383 fn artifact_record_serde_round_trip() {
384 let record = sample_record("docs/workflow.md", ArtifactType::Documentation);
385
386 let json = serde_json::to_string(&record).unwrap();
387 let parsed: ArtifactRecord = serde_json::from_str(&json).unwrap();
388
389 assert_eq!(parsed, record);
390 assert!(json.contains("\"artifact_type\":\"documentation\""));
391 }
392}