1use crate::comparison::ComparisonReport;
4use crate::pdf::{ExecutionReport, ReportMetrics};
5use crate::Result;
6use serde::{Deserialize, Serialize};
7use std::fs::File;
8use std::io::Write;
9
10#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct CsvExportConfig {
13 pub delimiter: char,
14 pub include_headers: bool,
15 pub quote_strings: bool,
16}
17
18impl Default for CsvExportConfig {
19 fn default() -> Self {
20 Self {
21 delimiter: ',',
22 include_headers: true,
23 quote_strings: true,
24 }
25 }
26}
27
28pub struct CsvExporter {
30 config: CsvExportConfig,
31}
32
33impl CsvExporter {
34 pub fn new(config: CsvExportConfig) -> Self {
36 Self { config }
37 }
38
39 pub fn export_execution_report(
41 &self,
42 report: &ExecutionReport,
43 output_path: &str,
44 ) -> Result<()> {
45 let mut file = File::create(output_path)?;
46
47 if self.config.include_headers {
49 writeln!(
50 file,
51 "orchestration_name,start_time,end_time,duration_seconds,status,total_steps,completed_steps,failed_steps,total_requests,successful_requests,failed_requests,error_rate,avg_latency_ms,p95_latency_ms,p99_latency_ms"
52 )?;
53 }
54
55 writeln!(
57 file,
58 "{},{},{},{},{},{},{},{},{},{},{},{:.4},{:.2},{:.2},{:.2}",
59 self.quote_if_needed(&report.orchestration_name),
60 report.start_time.to_rfc3339(),
61 report.end_time.to_rfc3339(),
62 report.duration_seconds,
63 self.quote_if_needed(&report.status),
64 report.total_steps,
65 report.completed_steps,
66 report.failed_steps,
67 report.metrics.total_requests,
68 report.metrics.successful_requests,
69 report.metrics.failed_requests,
70 report.metrics.error_rate,
71 report.metrics.avg_latency_ms,
72 report.metrics.p95_latency_ms,
73 report.metrics.p99_latency_ms,
74 )?;
75
76 Ok(())
77 }
78
79 pub fn export_execution_reports(
81 &self,
82 reports: &[ExecutionReport],
83 output_path: &str,
84 ) -> Result<()> {
85 let mut file = File::create(output_path)?;
86
87 if self.config.include_headers {
89 writeln!(
90 file,
91 "orchestration_name,start_time,end_time,duration_seconds,status,total_steps,completed_steps,failed_steps,total_requests,successful_requests,failed_requests,error_rate,avg_latency_ms,p95_latency_ms,p99_latency_ms"
92 )?;
93 }
94
95 for report in reports {
97 writeln!(
98 file,
99 "{},{},{},{},{},{},{},{},{},{},{},{:.4},{:.2},{:.2},{:.2}",
100 self.quote_if_needed(&report.orchestration_name),
101 report.start_time.to_rfc3339(),
102 report.end_time.to_rfc3339(),
103 report.duration_seconds,
104 self.quote_if_needed(&report.status),
105 report.total_steps,
106 report.completed_steps,
107 report.failed_steps,
108 report.metrics.total_requests,
109 report.metrics.successful_requests,
110 report.metrics.failed_requests,
111 report.metrics.error_rate,
112 report.metrics.avg_latency_ms,
113 report.metrics.p95_latency_ms,
114 report.metrics.p99_latency_ms,
115 )?;
116 }
117
118 Ok(())
119 }
120
121 pub fn export_comparison_report(
123 &self,
124 report: &ComparisonReport,
125 output_path: &str,
126 ) -> Result<()> {
127 let mut file = File::create(output_path)?;
128
129 if self.config.include_headers {
131 writeln!(
132 file,
133 "metric_name,baseline_value,comparison_value,absolute_difference,percentage_difference,direction,significance"
134 )?;
135 }
136
137 for diff in &report.metric_differences {
139 writeln!(
140 file,
141 "{},{:.4},{:.4},{:.4},{:.2},{:?},{:?}",
142 self.quote_if_needed(&diff.metric_name),
143 diff.baseline_value,
144 diff.comparison_value,
145 diff.absolute_difference,
146 diff.percentage_difference,
147 diff.direction,
148 diff.significance,
149 )?;
150 }
151
152 Ok(())
153 }
154
155 pub fn export_metrics_time_series(
157 &self,
158 metrics: &[(i64, ReportMetrics)],
159 output_path: &str,
160 ) -> Result<()> {
161 let mut file = File::create(output_path)?;
162
163 if self.config.include_headers {
165 writeln!(
166 file,
167 "timestamp,total_requests,successful_requests,failed_requests,error_rate,avg_latency_ms,p95_latency_ms,p99_latency_ms"
168 )?;
169 }
170
171 for (timestamp, metric) in metrics {
173 writeln!(
174 file,
175 "{},{},{},{},{:.4},{:.2},{:.2},{:.2}",
176 timestamp,
177 metric.total_requests,
178 metric.successful_requests,
179 metric.failed_requests,
180 metric.error_rate,
181 metric.avg_latency_ms,
182 metric.p95_latency_ms,
183 metric.p99_latency_ms,
184 )?;
185 }
186
187 Ok(())
188 }
189
190 pub fn export_regressions(&self, report: &ComparisonReport, output_path: &str) -> Result<()> {
192 let mut file = File::create(output_path)?;
193
194 if self.config.include_headers {
196 writeln!(
197 file,
198 "metric_name,baseline_value,regressed_value,impact_percentage,severity,description"
199 )?;
200 }
201
202 for regression in &report.regressions {
204 writeln!(
205 file,
206 "{},{:.4},{:.4},{:.2},{},{}",
207 self.quote_if_needed(®ression.metric_name),
208 regression.baseline_value,
209 regression.regressed_value,
210 regression.impact_percentage,
211 self.quote_if_needed(®ression.severity),
212 self.quote_if_needed(®ression.description),
213 )?;
214 }
215
216 Ok(())
217 }
218
219 pub fn export_improvements(&self, report: &ComparisonReport, output_path: &str) -> Result<()> {
221 let mut file = File::create(output_path)?;
222
223 if self.config.include_headers {
225 writeln!(
226 file,
227 "metric_name,baseline_value,improved_value,improvement_percentage,description"
228 )?;
229 }
230
231 for improvement in &report.improvements {
233 writeln!(
234 file,
235 "{},{:.4},{:.4},{:.2},{}",
236 self.quote_if_needed(&improvement.metric_name),
237 improvement.baseline_value,
238 improvement.improved_value,
239 improvement.improvement_percentage,
240 self.quote_if_needed(&improvement.description),
241 )?;
242 }
243
244 Ok(())
245 }
246
247 fn quote_if_needed(&self, s: &str) -> String {
249 if self.config.quote_strings {
250 format!("\"{}\"", s.replace('"', "\"\""))
251 } else {
252 s.to_string()
253 }
254 }
255}
256
257impl Default for CsvExporter {
258 fn default() -> Self {
259 Self::new(CsvExportConfig::default())
260 }
261}
262
263pub struct CsvBatchExporter {
265 exporter: CsvExporter,
266}
267
268impl CsvBatchExporter {
269 pub fn new(config: CsvExportConfig) -> Self {
271 Self {
272 exporter: CsvExporter::new(config),
273 }
274 }
275
276 pub fn export_all(
278 &self,
279 execution_reports: &[ExecutionReport],
280 comparison_report: Option<&ComparisonReport>,
281 output_dir: &str,
282 ) -> Result<()> {
283 std::fs::create_dir_all(output_dir)?;
285
286 let exec_path = format!("{}/execution_reports.csv", output_dir);
288 self.exporter.export_execution_reports(execution_reports, &exec_path)?;
289
290 if let Some(comparison) = comparison_report {
292 let comp_path = format!("{}/comparison.csv", output_dir);
293 self.exporter.export_comparison_report(comparison, &comp_path)?;
294
295 let reg_path = format!("{}/regressions.csv", output_dir);
296 self.exporter.export_regressions(comparison, ®_path)?;
297
298 let imp_path = format!("{}/improvements.csv", output_dir);
299 self.exporter.export_improvements(comparison, &imp_path)?;
300 }
301
302 Ok(())
303 }
304}
305
306impl Default for CsvBatchExporter {
307 fn default() -> Self {
308 Self::new(CsvExportConfig::default())
309 }
310}
311
312#[cfg(test)]
313mod tests {
314 use super::*;
315 use chrono::Utc;
316 use tempfile::tempdir;
317
318 #[test]
319 fn test_csv_export_execution_report() {
320 let config = CsvExportConfig::default();
321 let exporter = CsvExporter::new(config);
322
323 let report = ExecutionReport {
324 orchestration_name: "test-orch".to_string(),
325 start_time: Utc::now(),
326 end_time: Utc::now(),
327 duration_seconds: 120,
328 status: "Completed".to_string(),
329 total_steps: 5,
330 completed_steps: 5,
331 failed_steps: 0,
332 metrics: ReportMetrics {
333 total_requests: 1000,
334 successful_requests: 980,
335 failed_requests: 20,
336 avg_latency_ms: 125.5,
337 p95_latency_ms: 250.0,
338 p99_latency_ms: 350.0,
339 error_rate: 0.02,
340 },
341 failures: vec![],
342 recommendations: vec![],
343 };
344
345 let temp_dir = tempdir().unwrap();
346 let output_path = temp_dir.path().join("report.csv");
347
348 let result = exporter.export_execution_report(&report, output_path.to_str().unwrap());
349 assert!(result.is_ok());
350 assert!(output_path.exists());
351
352 let content = std::fs::read_to_string(output_path).unwrap();
354 assert!(content.contains("orchestration_name"));
355 assert!(content.contains("test-orch"));
356 }
357
358 #[test]
359 fn test_csv_export_multiple_reports() {
360 let config = CsvExportConfig::default();
361 let exporter = CsvExporter::new(config);
362
363 let reports = vec![
364 ExecutionReport {
365 orchestration_name: "test-1".to_string(),
366 start_time: Utc::now(),
367 end_time: Utc::now(),
368 duration_seconds: 100,
369 status: "Completed".to_string(),
370 total_steps: 3,
371 completed_steps: 3,
372 failed_steps: 0,
373 metrics: ReportMetrics {
374 total_requests: 500,
375 successful_requests: 490,
376 failed_requests: 10,
377 avg_latency_ms: 100.0,
378 p95_latency_ms: 200.0,
379 p99_latency_ms: 300.0,
380 error_rate: 0.02,
381 },
382 failures: vec![],
383 recommendations: vec![],
384 },
385 ExecutionReport {
386 orchestration_name: "test-2".to_string(),
387 start_time: Utc::now(),
388 end_time: Utc::now(),
389 duration_seconds: 150,
390 status: "Completed".to_string(),
391 total_steps: 4,
392 completed_steps: 4,
393 failed_steps: 0,
394 metrics: ReportMetrics {
395 total_requests: 750,
396 successful_requests: 740,
397 failed_requests: 10,
398 avg_latency_ms: 110.0,
399 p95_latency_ms: 220.0,
400 p99_latency_ms: 320.0,
401 error_rate: 0.013,
402 },
403 failures: vec![],
404 recommendations: vec![],
405 },
406 ];
407
408 let temp_dir = tempdir().unwrap();
409 let output_path = temp_dir.path().join("reports.csv");
410
411 let result = exporter.export_execution_reports(&reports, output_path.to_str().unwrap());
412 assert!(result.is_ok());
413
414 let content = std::fs::read_to_string(output_path).unwrap();
415 let lines: Vec<&str> = content.lines().collect();
416 assert_eq!(lines.len(), 3); }
418}