1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
//! Validation result types.
use super::Level;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
/// Metrics collected during validation.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ValidationMetrics {
/// Total number of checks executed
pub total_checks: usize,
/// Number of checks that passed
pub passed_checks: usize,
/// Number of checks that failed
pub failed_checks: usize,
/// Number of checks that were skipped
pub skipped_checks: usize,
/// Total execution time in milliseconds
pub execution_time_ms: u64,
/// Custom metrics collected during validation
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub custom_metrics: HashMap<String, f64>,
}
impl ValidationMetrics {
/// Creates new validation metrics with all counts set to zero.
pub fn new() -> Self {
Self {
total_checks: 0,
passed_checks: 0,
failed_checks: 0,
skipped_checks: 0,
execution_time_ms: 0,
custom_metrics: HashMap::new(),
}
}
/// Returns the success rate as a percentage (0.0 to 100.0).
pub fn success_rate(&self) -> f64 {
if self.total_checks == 0 {
100.0
} else {
(self.passed_checks as f64 / self.total_checks as f64) * 100.0
}
}
}
impl Default for ValidationMetrics {
fn default() -> Self {
Self::new()
}
}
/// A detailed validation issue found during checks.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ValidationIssue {
/// The name of the check that found the issue
pub check_name: String,
/// The name of the constraint that failed
pub constraint_name: String,
/// The severity level of the issue
pub level: Level,
/// A description of the issue
pub message: String,
/// Optional metric value associated with the issue
#[serde(skip_serializing_if = "Option::is_none")]
pub metric: Option<f64>,
}
/// A validation report containing all issues found.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ValidationReport {
/// The name of the validation suite that was run
pub suite_name: String,
/// Timestamp when the validation was run (ISO 8601 format)
pub timestamp: String,
/// Overall validation metrics
pub metrics: ValidationMetrics,
/// List of issues found during validation
pub issues: Vec<ValidationIssue>,
}
impl ValidationReport {
/// Creates a new validation report.
pub fn new(suite_name: impl Into<String>) -> Self {
Self {
suite_name: suite_name.into(),
timestamp: chrono::Utc::now().to_rfc3339(),
metrics: ValidationMetrics::new(),
issues: Vec::new(),
}
}
/// Adds an issue to the report.
pub fn add_issue(&mut self, issue: ValidationIssue) {
self.issues.push(issue);
}
/// Returns true if there are any error-level issues.
pub fn has_errors(&self) -> bool {
self.issues.iter().any(|issue| issue.level == Level::Error)
}
/// Returns true if there are any warning-level issues.
pub fn has_warnings(&self) -> bool {
self.issues
.iter()
.any(|issue| issue.level == Level::Warning)
}
/// Gets all issues of a specific level.
pub fn issues_by_level(&self, level: Level) -> Vec<&ValidationIssue> {
self.issues
.iter()
.filter(|issue| issue.level == level)
.collect()
}
}
/// The result of running a validation suite.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "status", rename_all = "lowercase")]
pub enum ValidationResult {
/// Validation completed successfully with no critical issues
Success {
/// Validation metrics
metrics: ValidationMetrics,
/// Detailed report (may contain warnings or info)
report: ValidationReport,
},
/// Validation failed due to one or more critical issues
Failure {
/// Detailed report containing the issues
report: ValidationReport,
},
}
impl ValidationResult {
/// Creates a successful validation result.
pub fn success(metrics: ValidationMetrics, report: ValidationReport) -> Self {
ValidationResult::Success { metrics, report }
}
/// Creates a failed validation result.
pub fn failure(report: ValidationReport) -> Self {
ValidationResult::Failure { report }
}
/// Returns true if the validation succeeded.
pub fn is_success(&self) -> bool {
matches!(self, ValidationResult::Success { .. })
}
/// Returns true if the validation failed.
pub fn is_failure(&self) -> bool {
matches!(self, ValidationResult::Failure { .. })
}
/// Returns the validation report.
pub fn report(&self) -> &ValidationReport {
match self {
ValidationResult::Success { report, .. } => report,
ValidationResult::Failure { report } => report,
}
}
/// Returns the validation metrics if available (only for success).
pub fn metrics(&self) -> Option<&ValidationMetrics> {
match self {
ValidationResult::Success { metrics, .. } => Some(metrics),
ValidationResult::Failure { .. } => None,
}
}
/// Formats the validation result as JSON.
///
/// This is a convenience method that uses the `JsonFormatter` to output
/// the result as structured JSON.
///
/// # Examples
///
/// ```rust
/// # use term_guard::core::{ValidationResult, ValidationReport, ValidationMetrics};
/// # let metrics = ValidationMetrics::new();
/// # let report = ValidationReport::new("test");
/// # let result = ValidationResult::success(metrics, report);
/// let json_output = result.to_json().unwrap();
/// println!("{}", json_output);
/// ```
pub fn to_json(&self) -> crate::prelude::Result<String> {
use crate::formatters::{JsonFormatter, ResultFormatter};
JsonFormatter::new().format(self)
}
/// Formats the validation result as JSON with pretty printing.
///
/// # Examples
///
/// ```rust
/// # use term_guard::core::{ValidationResult, ValidationReport, ValidationMetrics};
/// # let metrics = ValidationMetrics::new();
/// # let report = ValidationReport::new("test");
/// # let result = ValidationResult::success(metrics, report);
/// let pretty_json = result.to_json_pretty().unwrap();
/// println!("{}", pretty_json);
/// ```
pub fn to_json_pretty(&self) -> crate::prelude::Result<String> {
use crate::formatters::{JsonFormatter, ResultFormatter};
JsonFormatter::new().with_pretty(true).format(self)
}
/// Formats the validation result in a human-readable format.
///
/// This is a convenience method that uses the `HumanFormatter` to output
/// the result in a format suitable for console display.
///
/// # Examples
///
/// ```rust
/// # use term_guard::core::{ValidationResult, ValidationReport, ValidationMetrics};
/// # let metrics = ValidationMetrics::new();
/// # let report = ValidationReport::new("test");
/// # let result = ValidationResult::success(metrics, report);
/// let human_output = result.to_human().unwrap();
/// println!("{}", human_output);
/// ```
pub fn to_human(&self) -> crate::prelude::Result<String> {
use crate::formatters::{HumanFormatter, ResultFormatter};
HumanFormatter::new().format(self)
}
/// Formats the validation result as Markdown.
///
/// This is a convenience method that uses the `MarkdownFormatter` to output
/// the result in Markdown format suitable for documentation.
///
/// # Examples
///
/// ```rust
/// # use term_guard::core::{ValidationResult, ValidationReport, ValidationMetrics};
/// # let metrics = ValidationMetrics::new();
/// # let report = ValidationReport::new("test");
/// # let result = ValidationResult::success(metrics, report);
/// let markdown_output = result.to_markdown().unwrap();
/// println!("{}", markdown_output);
/// ```
pub fn to_markdown(&self) -> crate::prelude::Result<String> {
use crate::formatters::{MarkdownFormatter, ResultFormatter};
MarkdownFormatter::new().format(self)
}
/// Formats the validation result using a custom formatter.
///
/// # Arguments
///
/// * `formatter` - The formatter to use
///
/// # Examples
///
/// ```rust
/// # use term_guard::core::{ValidationResult, ValidationReport, ValidationMetrics};
/// use term_guard::formatters::{ResultFormatter, HumanFormatter, FormatterConfig};
/// # let metrics = ValidationMetrics::new();
/// # let report = ValidationReport::new("test");
/// # let result = ValidationResult::success(metrics, report);
///
/// let config = FormatterConfig::minimal();
/// let formatter = HumanFormatter::with_config(config);
/// let output = result.format_with(&formatter).unwrap();
/// println!("{}", output);
/// ```
pub fn format_with<F: crate::formatters::ResultFormatter>(
&self,
formatter: &F,
) -> crate::prelude::Result<String> {
formatter.format(self)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_validation_metrics_success_rate() {
let mut metrics = ValidationMetrics::new();
assert_eq!(metrics.success_rate(), 100.0);
metrics.total_checks = 10;
metrics.passed_checks = 8;
assert_eq!(metrics.success_rate(), 80.0);
}
#[test]
fn test_validation_report() {
let mut report = ValidationReport::new("test_suite");
assert!(!report.has_errors());
assert!(!report.has_warnings());
report.add_issue(ValidationIssue {
check_name: "test_check".to_string(),
constraint_name: "test_constraint".to_string(),
level: Level::Error,
message: "Test error".to_string(),
metric: Some(0.5),
});
assert!(report.has_errors());
assert_eq!(report.issues_by_level(Level::Error).len(), 1);
}
#[test]
fn test_validation_result() {
let metrics = ValidationMetrics::new();
let report = ValidationReport::new("test_suite");
let success_result = ValidationResult::success(metrics, report.clone());
assert!(success_result.is_success());
assert!(!success_result.is_failure());
assert!(success_result.metrics().is_some());
let failure_result = ValidationResult::failure(report);
assert!(!failure_result.is_success());
assert!(failure_result.is_failure());
assert!(failure_result.metrics().is_none());
}
#[test]
fn test_validation_result_formatting() {
let metrics = ValidationMetrics::new();
let mut report = ValidationReport::new("test_suite");
// Add a test issue
report.add_issue(ValidationIssue {
check_name: "test_check".to_string(),
constraint_name: "test_constraint".to_string(),
level: Level::Warning,
message: "Test warning message".to_string(),
metric: Some(0.8),
});
let result = ValidationResult::success(metrics, report);
// Test JSON formatting
let json_output = result.to_json().unwrap();
assert!(json_output.contains("\"status\": \"success\""));
assert!(json_output.contains("test_suite"));
// Test pretty JSON formatting
let pretty_json = result.to_json_pretty().unwrap();
assert!(pretty_json.contains("\"status\": \"success\""));
// Pretty JSON should contain the same content
assert!(pretty_json.contains("test_suite"));
// Test human formatting
let human_output = result.to_human().unwrap();
assert!(human_output.contains("Validation PASSED"));
assert!(human_output.contains("test_suite"));
// Test markdown formatting
let markdown_output = result.to_markdown().unwrap();
assert!(markdown_output.contains("## ✅ Validation Report - PASSED"));
assert!(markdown_output.contains("test_suite"));
}
}