use crate::{ReportingError, Result};
use chrono::{DateTime, Utc};
use printpdf::*;
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::BufWriter;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PdfConfig {
pub title: String,
pub author: String,
pub include_charts: bool,
pub include_metrics: bool,
pub include_recommendations: bool,
}
impl Default for PdfConfig {
fn default() -> Self {
Self {
title: "Chaos Orchestration Report".to_string(),
author: "MockForge".to_string(),
include_charts: true,
include_metrics: true,
include_recommendations: true,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExecutionReport {
pub orchestration_name: String,
pub start_time: DateTime<Utc>,
pub end_time: DateTime<Utc>,
pub duration_seconds: u64,
pub status: String,
pub total_steps: usize,
pub completed_steps: usize,
pub failed_steps: usize,
pub metrics: ReportMetrics,
pub failures: Vec<FailureDetail>,
pub recommendations: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ReportMetrics {
pub total_requests: u64,
pub successful_requests: u64,
pub failed_requests: u64,
pub avg_latency_ms: f64,
pub p95_latency_ms: f64,
pub p99_latency_ms: f64,
pub error_rate: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FailureDetail {
pub step_name: String,
pub error_message: String,
pub timestamp: DateTime<Utc>,
}
pub struct PdfReportGenerator {
config: PdfConfig,
}
impl PdfReportGenerator {
pub fn new(config: PdfConfig) -> Self {
Self { config }
}
pub fn generate(&self, report: &ExecutionReport, output_path: &str) -> Result<()> {
let (doc, page1, layer1) =
PdfDocument::new(&self.config.title, Mm(210.0), Mm(297.0), "Layer 1");
let font = doc
.add_builtin_font(BuiltinFont::Helvetica)
.map_err(|e| ReportingError::Pdf(e.to_string()))?;
let font_bold = doc
.add_builtin_font(BuiltinFont::HelveticaBold)
.map_err(|e| ReportingError::Pdf(e.to_string()))?;
let current_layer = doc.get_page(page1).get_layer(layer1);
current_layer.use_text(&self.config.title, 24.0, Mm(20.0), Mm(270.0), &font_bold);
let mut y = 255.0;
current_layer.use_text(
format!("Orchestration: {}", report.orchestration_name),
12.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 7.0;
current_layer.use_text(
format!("Start: {}", report.start_time.format("%Y-%m-%d %H:%M:%S UTC")),
10.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 5.0;
current_layer.use_text(
format!("End: {}", report.end_time.format("%Y-%m-%d %H:%M:%S UTC")),
10.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 5.0;
current_layer.use_text(
format!("Duration: {}s", report.duration_seconds),
10.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 5.0;
current_layer.use_text(
format!("Status: {}", report.status),
10.0,
Mm(20.0),
Mm(y),
&font_bold,
);
y -= 15.0;
current_layer.use_text("Summary", 14.0, Mm(20.0), Mm(y), &font_bold);
y -= 7.0;
current_layer.use_text(
format!("Total Steps: {}", report.total_steps),
10.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 5.0;
current_layer.use_text(
format!("Completed: {}", report.completed_steps),
10.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 5.0;
current_layer.use_text(
format!("Failed: {}", report.failed_steps),
10.0,
Mm(20.0),
Mm(y),
&font,
);
if self.config.include_metrics {
y -= 15.0;
current_layer.use_text("Metrics", 14.0, Mm(20.0), Mm(y), &font_bold);
y -= 7.0;
current_layer.use_text(
format!("Total Requests: {}", report.metrics.total_requests),
10.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 5.0;
current_layer.use_text(
format!("Error Rate: {:.2}%", report.metrics.error_rate * 100.0),
10.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 5.0;
current_layer.use_text(
format!("Avg Latency: {:.2}ms", report.metrics.avg_latency_ms),
10.0,
Mm(20.0),
Mm(y),
&font,
);
y -= 5.0;
current_layer.use_text(
format!("P95 Latency: {:.2}ms", report.metrics.p95_latency_ms),
10.0,
Mm(20.0),
Mm(y),
&font,
);
}
if self.config.include_charts {
y -= 15.0;
current_layer.use_text("Performance Overview", 14.0, Mm(20.0), Mm(y), &font_bold);
y -= 8.0;
let success_rate = if report.metrics.total_requests > 0 {
report.metrics.successful_requests as f64 / report.metrics.total_requests as f64
} else {
0.0
};
let bar_len = (success_rate * 30.0) as usize;
let bar =
format!("Success Rate: [{:>30}] {:.1}%", "#".repeat(bar_len), success_rate * 100.0);
current_layer.use_text(bar, 9.0, Mm(20.0), Mm(y), &font);
y -= 6.0;
let step_rate = if report.total_steps > 0 {
report.completed_steps as f64 / report.total_steps as f64
} else {
0.0
};
let bar_len = (step_rate * 30.0) as usize;
let bar =
format!("Steps Done: [{:>30}] {:.1}%", "#".repeat(bar_len), step_rate * 100.0);
current_layer.use_text(bar, 9.0, Mm(20.0), Mm(y), &font);
y -= 6.0;
current_layer.use_text(
format!(
"Latency (ms): avg={:.1} p95={:.1} p99={:.1}",
report.metrics.avg_latency_ms,
report.metrics.p95_latency_ms,
report.metrics.p99_latency_ms,
),
9.0,
Mm(20.0),
Mm(y),
&font,
);
}
if !report.failures.is_empty() {
y -= 15.0;
current_layer.use_text("Failures", 14.0, Mm(20.0), Mm(y), &font_bold);
for failure in &report.failures {
y -= 7.0;
if y < 20.0 {
break; }
current_layer.use_text(
format!("• {}: {}", failure.step_name, failure.error_message),
9.0,
Mm(25.0),
Mm(y),
&font,
);
}
}
if self.config.include_recommendations && !report.recommendations.is_empty() {
y -= 15.0;
current_layer.use_text("Recommendations", 14.0, Mm(20.0), Mm(y), &font_bold);
for recommendation in &report.recommendations {
y -= 7.0;
if y < 20.0 {
break;
}
current_layer.use_text(
format!("• {}", recommendation),
9.0,
Mm(25.0),
Mm(y),
&font,
);
}
}
current_layer.use_text(
format!("Generated by MockForge on {}", Utc::now().format("%Y-%m-%d %H:%M UTC")),
8.0,
Mm(20.0),
Mm(10.0),
&font,
);
doc.save(&mut BufWriter::new(File::create(output_path)?))
.map_err(|e| ReportingError::Pdf(e.to_string()))?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
fn create_test_report() -> ExecutionReport {
ExecutionReport {
orchestration_name: "test-orch".to_string(),
start_time: Utc::now(),
end_time: Utc::now(),
duration_seconds: 120,
status: "Completed".to_string(),
total_steps: 5,
completed_steps: 5,
failed_steps: 0,
metrics: ReportMetrics {
total_requests: 1000,
successful_requests: 980,
failed_requests: 20,
avg_latency_ms: 125.5,
p95_latency_ms: 250.0,
p99_latency_ms: 350.0,
error_rate: 0.02,
},
failures: vec![],
recommendations: vec!["Increase timeout thresholds".to_string()],
}
}
#[test]
fn test_pdf_generation() {
let config = PdfConfig::default();
let generator = PdfReportGenerator::new(config);
let report = create_test_report();
let temp_dir = tempdir().unwrap();
let output_path = temp_dir.path().join("report.pdf");
let result = generator.generate(&report, output_path.to_str().unwrap());
assert!(result.is_ok());
assert!(output_path.exists());
}
#[test]
fn test_pdf_config_default() {
let config = PdfConfig::default();
assert_eq!(config.title, "Chaos Orchestration Report");
assert_eq!(config.author, "MockForge");
assert!(config.include_charts);
assert!(config.include_metrics);
assert!(config.include_recommendations);
}
#[test]
fn test_pdf_config_custom() {
let config = PdfConfig {
title: "Custom Report".to_string(),
author: "Test Author".to_string(),
include_charts: false,
include_metrics: true,
include_recommendations: false,
};
assert_eq!(config.title, "Custom Report");
assert_eq!(config.author, "Test Author");
assert!(!config.include_charts);
assert!(config.include_metrics);
assert!(!config.include_recommendations);
}
#[test]
fn test_pdf_config_clone() {
let config = PdfConfig::default();
let cloned = config.clone();
assert_eq!(config.title, cloned.title);
assert_eq!(config.author, cloned.author);
}
#[test]
fn test_pdf_config_serialize() {
let config = PdfConfig::default();
let json = serde_json::to_string(&config).unwrap();
assert!(json.contains("title"));
assert!(json.contains("author"));
assert!(json.contains("include_charts"));
}
#[test]
fn test_pdf_config_deserialize() {
let json = r#"{"title":"Test","author":"Author","include_charts":true,"include_metrics":false,"include_recommendations":true}"#;
let config: PdfConfig = serde_json::from_str(json).unwrap();
assert_eq!(config.title, "Test");
assert_eq!(config.author, "Author");
assert!(config.include_charts);
assert!(!config.include_metrics);
}
#[test]
fn test_execution_report_clone() {
let report = create_test_report();
let cloned = report.clone();
assert_eq!(report.orchestration_name, cloned.orchestration_name);
assert_eq!(report.duration_seconds, cloned.duration_seconds);
}
#[test]
fn test_execution_report_serialize() {
let report = create_test_report();
let json = serde_json::to_string(&report).unwrap();
assert!(json.contains("orchestration_name"));
assert!(json.contains("metrics"));
assert!(json.contains("status"));
}
#[test]
fn test_report_metrics_clone() {
let metrics = ReportMetrics {
total_requests: 1000,
successful_requests: 980,
failed_requests: 20,
avg_latency_ms: 100.0,
p95_latency_ms: 200.0,
p99_latency_ms: 300.0,
error_rate: 0.02,
};
let cloned = metrics.clone();
assert_eq!(metrics.total_requests, cloned.total_requests);
assert_eq!(metrics.error_rate, cloned.error_rate);
}
#[test]
fn test_report_metrics_serialize() {
let metrics = ReportMetrics {
total_requests: 1000,
successful_requests: 980,
failed_requests: 20,
avg_latency_ms: 100.0,
p95_latency_ms: 200.0,
p99_latency_ms: 300.0,
error_rate: 0.02,
};
let json = serde_json::to_string(&metrics).unwrap();
assert!(json.contains("total_requests"));
assert!(json.contains("error_rate"));
}
#[test]
fn test_failure_detail_clone() {
let failure = FailureDetail {
step_name: "auth-step".to_string(),
error_message: "Connection timeout".to_string(),
timestamp: Utc::now(),
};
let cloned = failure.clone();
assert_eq!(failure.step_name, cloned.step_name);
assert_eq!(failure.error_message, cloned.error_message);
}
#[test]
fn test_failure_detail_serialize() {
let failure = FailureDetail {
step_name: "auth-step".to_string(),
error_message: "Connection timeout".to_string(),
timestamp: Utc::now(),
};
let json = serde_json::to_string(&failure).unwrap();
assert!(json.contains("step_name"));
assert!(json.contains("error_message"));
assert!(json.contains("timestamp"));
}
#[test]
fn test_pdf_with_failures() {
let config = PdfConfig::default();
let generator = PdfReportGenerator::new(config);
let mut report = create_test_report();
report.failures = vec![
FailureDetail {
step_name: "auth-step".to_string(),
error_message: "Connection timeout".to_string(),
timestamp: Utc::now(),
},
FailureDetail {
step_name: "data-step".to_string(),
error_message: "Invalid response".to_string(),
timestamp: Utc::now(),
},
];
report.failed_steps = 2;
let temp_dir = tempdir().unwrap();
let output_path = temp_dir.path().join("report_with_failures.pdf");
let result = generator.generate(&report, output_path.to_str().unwrap());
assert!(result.is_ok());
assert!(output_path.exists());
}
#[test]
fn test_pdf_without_metrics() {
let config = PdfConfig {
include_metrics: false,
..PdfConfig::default()
};
let generator = PdfReportGenerator::new(config);
let report = create_test_report();
let temp_dir = tempdir().unwrap();
let output_path = temp_dir.path().join("report_no_metrics.pdf");
let result = generator.generate(&report, output_path.to_str().unwrap());
assert!(result.is_ok());
}
#[test]
fn test_pdf_without_recommendations() {
let config = PdfConfig {
include_recommendations: false,
..PdfConfig::default()
};
let generator = PdfReportGenerator::new(config);
let mut report = create_test_report();
report.recommendations = vec![];
let temp_dir = tempdir().unwrap();
let output_path = temp_dir.path().join("report_no_recs.pdf");
let result = generator.generate(&report, output_path.to_str().unwrap());
assert!(result.is_ok());
}
#[test]
fn test_pdf_generator_invalid_path() {
let config = PdfConfig::default();
let generator = PdfReportGenerator::new(config);
let report = create_test_report();
let result = generator.generate(&report, "/nonexistent/path/report.pdf");
assert!(result.is_err());
}
#[test]
fn test_pdf_config_debug() {
let config = PdfConfig::default();
let debug = format!("{:?}", config);
assert!(debug.contains("PdfConfig"));
}
#[test]
fn test_execution_report_debug() {
let report = create_test_report();
let debug = format!("{:?}", report);
assert!(debug.contains("ExecutionReport"));
}
#[test]
fn test_report_metrics_debug() {
let metrics = ReportMetrics {
total_requests: 1000,
successful_requests: 980,
failed_requests: 20,
avg_latency_ms: 100.0,
p95_latency_ms: 200.0,
p99_latency_ms: 300.0,
error_rate: 0.02,
};
let debug = format!("{:?}", metrics);
assert!(debug.contains("ReportMetrics"));
}
}