1use crate::core::{Level, ValidationReport, ValidationResult};
19use crate::prelude::*;
20use serde_json;
21use std::fmt::Write;
22
23#[derive(Debug, Clone)]
25pub struct FormatterConfig {
26 pub include_metrics: bool,
28 pub include_issues: bool,
30 pub include_custom_metrics: bool,
32 pub max_issues: i32,
34 pub use_colors: bool,
36 pub include_timestamps: bool,
38}
39
40impl Default for FormatterConfig {
41 fn default() -> Self {
42 Self {
43 include_metrics: true,
44 include_issues: true,
45 include_custom_metrics: true,
46 max_issues: -1, use_colors: true,
48 include_timestamps: true,
49 }
50 }
51}
52
53impl FormatterConfig {
54 pub fn minimal() -> Self {
56 Self {
57 include_metrics: true,
58 include_issues: false,
59 include_custom_metrics: false,
60 max_issues: 0,
61 use_colors: false,
62 include_timestamps: false,
63 }
64 }
65
66 pub fn detailed() -> Self {
68 Self {
69 include_metrics: true,
70 include_issues: true,
71 include_custom_metrics: true,
72 max_issues: -1,
73 use_colors: true,
74 include_timestamps: true,
75 }
76 }
77
78 pub fn ci() -> Self {
80 Self {
81 include_metrics: true,
82 include_issues: true,
83 include_custom_metrics: false,
84 max_issues: 50, use_colors: false,
86 include_timestamps: true,
87 }
88 }
89
90 pub fn with_metrics(mut self, include: bool) -> Self {
92 self.include_metrics = include;
93 self
94 }
95
96 pub fn with_issues(mut self, include: bool) -> Self {
98 self.include_issues = include;
99 self
100 }
101
102 pub fn with_max_issues(mut self, max: i32) -> Self {
104 self.max_issues = max;
105 self
106 }
107
108 pub fn with_colors(mut self, use_colors: bool) -> Self {
110 self.use_colors = use_colors;
111 self
112 }
113}
114
115pub trait ResultFormatter {
136 fn format(&self, result: &ValidationResult) -> Result<String>;
146
147 fn format_with_config(
158 &self,
159 result: &ValidationResult,
160 _config: &FormatterConfig,
161 ) -> Result<String> {
162 self.format(result)
164 }
165}
166
167#[derive(Debug, Clone)]
187pub struct JsonFormatter {
188 config: FormatterConfig,
189 pretty: bool,
190}
191
192impl JsonFormatter {
193 pub fn new() -> Self {
195 Self {
196 config: FormatterConfig::default(),
197 pretty: true,
198 }
199 }
200
201 pub fn with_config(config: FormatterConfig) -> Self {
203 Self {
204 config,
205 pretty: true,
206 }
207 }
208
209 pub fn with_pretty(mut self, pretty: bool) -> Self {
211 self.pretty = pretty;
212 self
213 }
214}
215
216impl Default for JsonFormatter {
217 fn default() -> Self {
218 Self::new()
219 }
220}
221
222impl ResultFormatter for JsonFormatter {
223 fn format(&self, result: &ValidationResult) -> Result<String> {
224 self.format_with_config(result, &self.config)
225 }
226
227 fn format_with_config(
228 &self,
229 result: &ValidationResult,
230 config: &FormatterConfig,
231 ) -> Result<String> {
232 let filtered_result = filter_result_for_config(result, config);
234
235 if self.pretty {
236 serde_json::to_string_pretty(&filtered_result).map_err(|e| {
237 TermError::Internal(format!("Failed to serialize result to JSON: {e}"))
238 })
239 } else {
240 serde_json::to_string(&filtered_result).map_err(|e| {
241 TermError::Internal(format!("Failed to serialize result to JSON: {e}"))
242 })
243 }
244 }
245}
246
247#[derive(Debug, Clone)]
267pub struct HumanFormatter {
268 config: FormatterConfig,
269}
270
271impl HumanFormatter {
272 pub fn new() -> Self {
274 Self {
275 config: FormatterConfig::default(),
276 }
277 }
278
279 pub fn with_config(config: FormatterConfig) -> Self {
281 Self { config }
282 }
283}
284
285impl Default for HumanFormatter {
286 fn default() -> Self {
287 Self::new()
288 }
289}
290
291impl ResultFormatter for HumanFormatter {
292 fn format(&self, result: &ValidationResult) -> Result<String> {
293 self.format_with_config(result, &self.config)
294 }
295
296 fn format_with_config(
297 &self,
298 result: &ValidationResult,
299 config: &FormatterConfig,
300 ) -> Result<String> {
301 let mut output = String::new();
302 let report = result.report();
303
304 writeln!(output).unwrap();
306 if result.is_success() {
307 if config.use_colors {
308 writeln!(output, "✅ \x1b[32mValidation PASSED\x1b[0m").unwrap();
309 } else {
310 writeln!(output, "✅ Validation PASSED").unwrap();
311 }
312 } else if config.use_colors {
313 writeln!(output, "❌ \x1b[31mValidation FAILED\x1b[0m").unwrap();
314 } else {
315 writeln!(output, "❌ Validation FAILED").unwrap();
316 }
317
318 writeln!(output).unwrap();
319 writeln!(output, "Suite: {}", report.suite_name).unwrap();
320
321 if config.include_timestamps {
322 writeln!(output, "Timestamp: {}", report.timestamp).unwrap();
323 }
324
325 if config.include_metrics {
327 writeln!(output).unwrap();
328 writeln!(output, "📊 Summary Statistics:").unwrap();
329 writeln!(output, " Total Checks: {}", report.metrics.total_checks).unwrap();
330
331 if config.use_colors {
332 writeln!(
333 output,
334 " ✅ Passed: \x1b[32m{}\x1b[0m",
335 report.metrics.passed_checks
336 )
337 .unwrap();
338 writeln!(
339 output,
340 " ❌ Failed: \x1b[31m{}\x1b[0m",
341 report.metrics.failed_checks
342 )
343 .unwrap();
344 writeln!(
345 output,
346 " ⏭️ Skipped: \x1b[33m{}\x1b[0m",
347 report.metrics.skipped_checks
348 )
349 .unwrap();
350 } else {
351 writeln!(output, " ✅ Passed: {}", report.metrics.passed_checks).unwrap();
352 writeln!(output, " ❌ Failed: {}", report.metrics.failed_checks).unwrap();
353 writeln!(output, " ⏭️ Skipped: {}", report.metrics.skipped_checks).unwrap();
354 }
355
356 writeln!(
357 output,
358 " Success Rate: {:.1}%",
359 report.metrics.success_rate()
360 )
361 .unwrap();
362 writeln!(
363 output,
364 " Execution Time: {}ms",
365 report.metrics.execution_time_ms
366 )
367 .unwrap();
368 }
369
370 if config.include_custom_metrics && !report.metrics.custom_metrics.is_empty() {
372 writeln!(output).unwrap();
373 writeln!(output, "📈 Custom Metrics:").unwrap();
374 for (name, value) in &report.metrics.custom_metrics {
375 writeln!(output, " {name}: {value:.3}").unwrap();
376 }
377 }
378
379 if config.include_issues && !report.issues.is_empty() {
381 writeln!(output).unwrap();
382 writeln!(output, "🔍 Issues Found:").unwrap();
383
384 let issues_to_show = if config.max_issues < 0 {
385 report.issues.as_slice()
386 } else {
387 let max = config.max_issues as usize;
388 &report.issues[..std::cmp::min(max, report.issues.len())]
389 };
390
391 for (i, issue) in issues_to_show.iter().enumerate() {
392 writeln!(output).unwrap();
393 let level_symbol = match issue.level {
394 Level::Error => {
395 if config.use_colors {
396 "\x1b[31m🚨\x1b[0m"
397 } else {
398 "🚨"
399 }
400 }
401 Level::Warning => {
402 if config.use_colors {
403 "\x1b[33m⚠️\x1b[0m"
404 } else {
405 "⚠️"
406 }
407 }
408 Level::Info => {
409 if config.use_colors {
410 "\x1b[34mℹ️\x1b[0m"
411 } else {
412 "ℹ️"
413 }
414 }
415 };
416
417 writeln!(
418 output,
419 " {level_symbol} Issue #{}: {}",
420 i + 1,
421 issue.constraint_name
422 )
423 .unwrap();
424 writeln!(output, " Check: {}", issue.check_name).unwrap();
425 writeln!(output, " Level: {:?}", issue.level).unwrap();
426 writeln!(output, " Message: {}", issue.message).unwrap();
427
428 if let Some(metric) = issue.metric {
429 writeln!(output, " Metric: {metric:.3}").unwrap();
430 }
431 }
432
433 if report.issues.len() > issues_to_show.len() {
434 writeln!(output).unwrap();
435 writeln!(
436 output,
437 " ... and {} more issues (use --max-issues to show more)",
438 report.issues.len() - issues_to_show.len()
439 )
440 .unwrap();
441 }
442 }
443
444 writeln!(output).unwrap();
445 Ok(output)
446 }
447}
448
449#[derive(Debug, Clone)]
469pub struct MarkdownFormatter {
470 config: FormatterConfig,
471 heading_level: u8,
472}
473
474impl MarkdownFormatter {
475 pub fn new() -> Self {
477 Self {
478 config: FormatterConfig::default(),
479 heading_level: 2,
480 }
481 }
482
483 pub fn with_config(config: FormatterConfig) -> Self {
485 Self {
486 config,
487 heading_level: 2,
488 }
489 }
490
491 pub fn with_heading_level(mut self, level: u8) -> Self {
493 self.heading_level = level.clamp(1, 6);
494 self
495 }
496}
497
498impl Default for MarkdownFormatter {
499 fn default() -> Self {
500 Self::new()
501 }
502}
503
504impl ResultFormatter for MarkdownFormatter {
505 fn format(&self, result: &ValidationResult) -> Result<String> {
506 self.format_with_config(result, &self.config)
507 }
508
509 fn format_with_config(
510 &self,
511 result: &ValidationResult,
512 config: &FormatterConfig,
513 ) -> Result<String> {
514 let mut output = String::new();
515 let report = result.report();
516 let h = "#".repeat(self.heading_level as usize);
517
518 if result.is_success() {
520 writeln!(output, "{h} ✅ Validation Report - PASSED").unwrap();
521 } else {
522 writeln!(output, "{h} ❌ Validation Report - FAILED").unwrap();
523 }
524
525 writeln!(output).unwrap();
526 writeln!(output, "**Suite:** {}", report.suite_name).unwrap();
527
528 if config.include_timestamps {
529 writeln!(output, "**Timestamp:** {}", report.timestamp).unwrap();
530 }
531
532 if config.include_metrics {
534 writeln!(output).unwrap();
535 writeln!(output, "{h}# Summary").unwrap();
536 writeln!(output).unwrap();
537 writeln!(output, "| Metric | Value |").unwrap();
538 writeln!(output, "|--------|-------|").unwrap();
539 writeln!(output, "| Total Checks | {} |", report.metrics.total_checks).unwrap();
540 writeln!(output, "| Passed | {} |", report.metrics.passed_checks).unwrap();
541 writeln!(output, "| Failed | {} |", report.metrics.failed_checks).unwrap();
542 writeln!(output, "| Skipped | {} |", report.metrics.skipped_checks).unwrap();
543 writeln!(
544 output,
545 "| Success Rate | {:.1}% |",
546 report.metrics.success_rate()
547 )
548 .unwrap();
549 writeln!(
550 output,
551 "| Execution Time | {}ms |",
552 report.metrics.execution_time_ms
553 )
554 .unwrap();
555 }
556
557 if config.include_custom_metrics && !report.metrics.custom_metrics.is_empty() {
559 writeln!(output).unwrap();
560 writeln!(output, "{h}# Custom Metrics").unwrap();
561 writeln!(output).unwrap();
562 writeln!(output, "| Metric | Value |").unwrap();
563 writeln!(output, "|--------|-------|").unwrap();
564 for (name, value) in &report.metrics.custom_metrics {
565 writeln!(output, "| {name} | {value:.3} |").unwrap();
566 }
567 }
568
569 if config.include_issues && !report.issues.is_empty() {
571 writeln!(output).unwrap();
572 writeln!(output, "{h}# Issues").unwrap();
573 writeln!(output).unwrap();
574
575 let issues_to_show = if config.max_issues < 0 {
576 report.issues.as_slice()
577 } else {
578 let max = config.max_issues as usize;
579 &report.issues[..std::cmp::min(max, report.issues.len())]
580 };
581
582 for (i, issue) in issues_to_show.iter().enumerate() {
583 let level_emoji = match issue.level {
584 Level::Error => "🚨",
585 Level::Warning => "⚠️",
586 Level::Info => "ℹ️",
587 };
588
589 writeln!(
590 output,
591 "{h}## {level_emoji} Issue #{}: {}",
592 i + 1,
593 issue.constraint_name
594 )
595 .unwrap();
596 writeln!(output).unwrap();
597 writeln!(output, "- **Check:** {}", issue.check_name).unwrap();
598 writeln!(output, "- **Level:** {:?}", issue.level).unwrap();
599 writeln!(output, "- **Message:** {}", issue.message).unwrap();
600
601 if let Some(metric) = issue.metric {
602 writeln!(output, "- **Metric:** {metric:.3}").unwrap();
603 }
604
605 writeln!(output).unwrap();
606 }
607
608 if report.issues.len() > issues_to_show.len() {
609 writeln!(
610 output,
611 "> **Note:** {} additional issues not shown in this report.",
612 report.issues.len() - issues_to_show.len()
613 )
614 .unwrap();
615 writeln!(output).unwrap();
616 }
617 }
618
619 Ok(output)
620 }
621}
622
623fn filter_result_for_config(
625 result: &ValidationResult,
626 config: &FormatterConfig,
627) -> ValidationResult {
628 match result {
629 ValidationResult::Success { metrics, report } => {
630 let filtered_report = filter_report(report, config);
631 let filtered_metrics = if config.include_metrics {
632 metrics.clone()
633 } else {
634 let mut minimal_metrics = metrics.clone();
635 if !config.include_custom_metrics {
636 minimal_metrics.custom_metrics.clear();
637 }
638 minimal_metrics
639 };
640 ValidationResult::Success {
641 metrics: filtered_metrics,
642 report: filtered_report,
643 }
644 }
645 ValidationResult::Failure { report } => ValidationResult::Failure {
646 report: filter_report(report, config),
647 },
648 }
649}
650
651fn filter_report(report: &ValidationReport, config: &FormatterConfig) -> ValidationReport {
653 let mut filtered_report = report.clone();
654
655 if !config.include_issues {
656 filtered_report.issues.clear();
657 } else if config.max_issues >= 0 {
658 let max = config.max_issues as usize;
659 filtered_report.issues.truncate(max);
660 }
661
662 if !config.include_custom_metrics {
663 filtered_report.metrics.custom_metrics.clear();
664 }
665
666 if !config.include_timestamps {
667 filtered_report.timestamp = String::new();
668 }
669
670 filtered_report
671}
672
673#[cfg(test)]
674mod tests {
675 use super::*;
676 use crate::core::{Level, ValidationIssue, ValidationMetrics, ValidationReport};
677
678 fn create_test_result() -> ValidationResult {
679 let mut metrics = ValidationMetrics::new();
680 metrics.total_checks = 10;
681 metrics.passed_checks = 8;
682 metrics.failed_checks = 2;
683 metrics.skipped_checks = 0;
684 metrics.execution_time_ms = 150;
685 metrics
686 .custom_metrics
687 .insert("data.completeness".to_string(), 0.85);
688
689 let mut report = ValidationReport::new("test_suite");
690 report.add_issue(ValidationIssue {
691 check_name: "completeness_check".to_string(),
692 constraint_name: "completeness".to_string(),
693 level: Level::Error,
694 message: "Column has insufficient completeness".to_string(),
695 metric: Some(0.75),
696 });
697
698 report.add_issue(ValidationIssue {
699 check_name: "size_check".to_string(),
700 constraint_name: "size".to_string(),
701 level: Level::Warning,
702 message: "Dataset size is below expected range".to_string(),
703 metric: Some(150.0),
704 });
705
706 report.metrics = metrics.clone();
707 ValidationResult::failure(report)
708 }
709
710 #[test]
711 fn test_formatter_config() {
712 let config = FormatterConfig::default();
713 assert!(config.include_metrics);
714 assert!(config.include_issues);
715 assert!(config.use_colors);
716
717 let minimal = FormatterConfig::minimal();
718 assert!(minimal.include_metrics);
719 assert!(!minimal.include_issues);
720 assert!(!minimal.use_colors);
721
722 let ci = FormatterConfig::ci();
723 assert!(!ci.use_colors);
724 assert_eq!(ci.max_issues, 50);
725 }
726
727 #[test]
728 fn test_json_formatter() {
729 let result = create_test_result();
730 let formatter = JsonFormatter::new();
731
732 let output = formatter.format(&result).unwrap();
733 assert!(output.contains("\"status\": \"failure\""));
734 assert!(output.contains("\"test_suite\""));
735 assert!(output.contains("completeness_check"));
736
737 let config = FormatterConfig::minimal();
739 let output = formatter.format_with_config(&result, &config).unwrap();
740 assert!(output.contains("\"status\": \"failure\""));
741 }
742
743 #[test]
744 fn test_human_formatter() {
745 let result = create_test_result();
746 let formatter = HumanFormatter::new();
747
748 let output = formatter.format(&result).unwrap();
749 assert!(output.contains("Validation FAILED"));
750 assert!(output.contains("test_suite"));
751 assert!(output.contains("Total Checks: 10"));
752 assert!(output.contains("completeness_check"));
753
754 let config = FormatterConfig::default().with_colors(false);
756 let output = formatter.format_with_config(&result, &config).unwrap();
757 assert!(output.contains("Validation FAILED"));
758 assert!(!output.contains("\x1b["));
759 }
760
761 #[test]
762 fn test_markdown_formatter() {
763 let result = create_test_result();
764 let formatter = MarkdownFormatter::new();
765
766 let output = formatter.format(&result).unwrap();
767 assert!(output.contains("## ❌ Validation Report - FAILED"));
768 assert!(output.contains("**Suite:** test_suite"));
769 assert!(output.contains("| Total Checks | 10 |"));
770 assert!(output.contains("### 🚨 Issue #1: completeness"));
771
772 let formatter = MarkdownFormatter::new().with_heading_level(1);
774 let output = formatter.format(&result).unwrap();
775 assert!(output.contains("# ❌ Validation Report - FAILED"));
776 }
777
778 #[test]
779 fn test_config_max_issues() {
780 let result = create_test_result();
781 let config = FormatterConfig::default().with_max_issues(1);
782
783 let formatter = HumanFormatter::new();
784 let output = formatter.format_with_config(&result, &config).unwrap();
785 assert!(output.contains("Issue #1"));
786 assert!(output.contains("... and 1 more issues"));
787 }
788}