dataprof 0.7.1

High-performance data profiler with ISO 8000/25012 quality metrics for CSV, JSON/JSONL, and Parquet files
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
//! Data Quality Metrics Calculation Module
//!
//! This module implements comprehensive data quality metric calculations following industry standards.
//! It provides structured assessment across five key dimensions: Completeness, Consistency, Uniqueness, Accuracy, and Timeliness.
//!
//! ## ISO Compliance
//!
//! This module follows:
//! - **ISO 8000-8**: Data Quality (Completeness, Timeliness)
//! - **ISO 8000-61**: Master Data Quality (Consistency)
//! - **ISO 8000-110**: Duplicate Detection (Uniqueness)
//! - **ISO 25012**: Data Quality Model (Accuracy)
//!
//! ## TODO: Future ISO 25012 Dimensions
//!
//! ISO/IEC 25012 defines 15 data quality characteristics. We currently implement 5.
//! Below are candidates for future implementation:
//!
//! ### Inherent Data Quality (content-focused)
//! - **Credibility**: Trustworthiness and believability of data origin
//!   - Impl: Track data source metadata, provenance scoring
//! - **Currentness**: Already partially covered by Timeliness
//!
//! ### System-Dependent Data Quality
//! - **Accessibility**: Data retrievability when needed
//!   - Impl: Response time metrics, availability checks
//! - **Portability**: Transferability across data models/platforms
//!   - Impl: Schema compatibility scoring, format conversion success rate
//! - **Recoverability**: Data integrity during system failures
//!   - Impl: Backup validation, checksum verification
//!
//! ### Additional Dimensions (easy wins)
//! - **Validity**: Conformance to domain-specific rules
//!   - Impl: Custom rule engine, regex pattern validation
//!   - Example: Email format, phone format, business rules
//! - **Precision**: Level of detail/decimal places
//!   - Impl: Decimal place analysis, significant figures check
//!
//! ### Research-Proposed Dimensions (MDPI 2024-2025)
//! - **Governance**: Data ownership and responsibility tracking
//! - **Usefulness**: Practical utility scoring for intended purpose
//! - **Semantics**: Meaning clarity and interpretability metrics
//!
//! ### How to Add a New Dimension
//! 1. Create `metrics/{dimension}.rs` with `{Dimension}Metrics` struct + `{Dimension}Calculator`
//! 2. Add `mod {dimension};` and `use {dimension}::...;` in this file
//! 3. Extend `QualityMetrics` in `src/types.rs` with new fields
//! 4. Call the calculator in `calculate_comprehensive_metrics()` and map fields
//! 5. Update `IsoQualityConfig` in `src/core/config.rs` if configurable thresholds needed
//!
//! ### References
//! - ISO 25012: <https://iso25000.com/index.php/en/iso-25000-standards/iso-25012>
//! - ISO 8000 Wikipedia: <https://en.wikipedia.org/wiki/ISO_8000>
//! - Data Quality 2025 Guide: <https://www.ewsolutions.com/data-quality-quide/>
//! - MDPI Framework Review: <https://www.mdpi.com/2504-2289/9/4/93>

mod accuracy;
mod completeness;
mod consistency;
mod timeliness;
mod uniqueness;
mod utils;

// Re-export public types for backward compatibility
pub use utils::{StatisticalValidation, validate_sample_size};

use accuracy::AccuracyCalculator;
use completeness::CompletenessCalculator;
use consistency::ConsistencyCalculator;
use timeliness::TimelinessCalculator;
use uniqueness::UniquenessCalculator;

use crate::core::config::IsoQualityConfig;
use crate::core::errors::DataProfilerError;
use crate::types::{
    AccuracyMetrics, ColumnProfile, CompletenessMetrics, ConsistencyMetrics, QualityDimension,
    QualityMetrics, TimelinessMetrics, UniquenessMetrics,
};
use std::collections::HashMap;

/// Engine for calculating comprehensive data quality metrics
/// Supports ISO 8000/25012 configurable thresholds
pub struct MetricsCalculator {
    /// ISO-compliant quality thresholds
    pub thresholds: IsoQualityConfig,
}

impl Default for MetricsCalculator {
    fn default() -> Self {
        Self::new()
    }
}

impl MetricsCalculator {
    /// Create a new calculator with default ISO thresholds
    pub fn new() -> Self {
        Self {
            thresholds: IsoQualityConfig::default(),
        }
    }

    /// Create a calculator with custom thresholds
    pub fn with_thresholds(thresholds: IsoQualityConfig) -> Self {
        Self { thresholds }
    }

    /// Create a calculator with strict thresholds (finance, healthcare)
    pub fn strict() -> Self {
        Self {
            thresholds: IsoQualityConfig::strict(),
        }
    }

    /// Create a calculator with lenient thresholds (exploratory, marketing)
    pub fn lenient() -> Self {
        Self {
            thresholds: IsoQualityConfig::lenient(),
        }
    }

    /// Validate statistical requirements for metric calculation
    pub fn validate_sample_size(sample_size: usize, metric_type: &str) -> StatisticalValidation {
        utils::validate_sample_size(sample_size, metric_type)
    }

    /// Check whether a dimension is requested.
    ///
    /// `None` → all dimensions are requested (backward-compatible default).
    fn is_requested(requested: &Option<&[QualityDimension]>, dim: QualityDimension) -> bool {
        match requested {
            None => true,
            Some(dims) => dims.contains(&dim),
        }
    }

    /// Calculate comprehensive data quality metrics from column data.
    ///
    /// When `requested_dimensions` is `None`, all dimensions are computed
    /// (backward-compatible default). When `Some(&[...])`, only the listed
    /// dimensions are computed; the rest are `None` in the returned struct.
    pub fn calculate_comprehensive_metrics(
        &self,
        data: &HashMap<String, Vec<String>>,
        column_profiles: &[ColumnProfile],
        requested_dimensions: Option<&[QualityDimension]>,
    ) -> Result<QualityMetrics, DataProfilerError> {
        if data.is_empty() {
            return Ok(Self::default_metrics_for_empty_dataset(
                &requested_dimensions,
            ));
        }

        let sample_size = Self::calculate_sample_size(data)?;
        let requested = &requested_dimensions;

        // Validate sample size for statistical reliability
        let validation = Self::validate_sample_size(sample_size, "general");
        if !validation.sufficient_sample {
            eprintln!(
                "Warning: Sample size ({}) is below recommended minimum ({}) for reliable statistics",
                validation.actual_sample_size, validation.min_sample_size
            );
        }

        // Completeness dimension
        let completeness = if Self::is_requested(requested, QualityDimension::Completeness) {
            let c = CompletenessCalculator::new(&self.thresholds).calculate(
                data,
                column_profiles,
                sample_size,
            )?;
            Some(CompletenessMetrics {
                missing_values_ratio: c.missing_values_ratio,
                complete_records_ratio: c.complete_records_ratio,
                null_columns: c.null_columns,
            })
        } else {
            None
        };

        // Consistency dimension
        let consistency = if Self::is_requested(requested, QualityDimension::Consistency) {
            let c = ConsistencyCalculator::calculate(data, column_profiles)?;
            Some(ConsistencyMetrics {
                data_type_consistency: c.data_type_consistency,
                format_violations: c.format_violations,
                encoding_issues: c.encoding_issues,
            })
        } else {
            None
        };

        // Uniqueness dimension
        let uniqueness = if Self::is_requested(requested, QualityDimension::Uniqueness) {
            let u = UniquenessCalculator::new(&self.thresholds).calculate(
                data,
                column_profiles,
                sample_size,
            )?;
            Some(UniquenessMetrics {
                duplicate_rows: u.duplicate_rows,
                key_uniqueness: u.key_uniqueness,
                high_cardinality_warning: u.high_cardinality_warning,
            })
        } else {
            None
        };

        // Accuracy dimension
        let accuracy = if Self::is_requested(requested, QualityDimension::Accuracy) {
            let a = AccuracyCalculator::new(&self.thresholds).calculate(data, column_profiles)?;
            Some(AccuracyMetrics {
                outlier_ratio: a.outlier_ratio,
                range_violations: a.range_violations,
                negative_values_in_positive: a.negative_values_in_positive,
            })
        } else {
            None
        };

        // Timeliness dimension
        let timeliness = if Self::is_requested(requested, QualityDimension::Timeliness) {
            let t = TimelinessCalculator::new(&self.thresholds).calculate(data, column_profiles)?;
            Some(TimelinessMetrics {
                future_dates_count: t.future_dates_count,
                stale_data_ratio: t.stale_data_ratio,
                temporal_violations: t.temporal_violations,
            })
        } else {
            None
        };

        Ok(QualityMetrics {
            completeness,
            consistency,
            uniqueness,
            accuracy,
            timeliness,
        })
    }

    /// Create default metrics for empty dataset (only requested dimensions are populated).
    fn default_metrics_for_empty_dataset(
        requested: &Option<&[QualityDimension]>,
    ) -> QualityMetrics {
        let is_req = |d| match requested {
            None => true,
            Some(dims) => dims.contains(&d),
        };

        QualityMetrics {
            completeness: if is_req(QualityDimension::Completeness) {
                Some(CompletenessMetrics {
                    missing_values_ratio: 0.0,
                    complete_records_ratio: 100.0,
                    null_columns: vec![],
                })
            } else {
                None
            },
            consistency: if is_req(QualityDimension::Consistency) {
                Some(ConsistencyMetrics {
                    data_type_consistency: 100.0,
                    format_violations: 0,
                    encoding_issues: 0,
                })
            } else {
                None
            },
            uniqueness: if is_req(QualityDimension::Uniqueness) {
                Some(UniquenessMetrics {
                    duplicate_rows: 0,
                    key_uniqueness: 100.0,
                    high_cardinality_warning: false,
                })
            } else {
                None
            },
            accuracy: if is_req(QualityDimension::Accuracy) {
                Some(AccuracyMetrics {
                    outlier_ratio: 0.0,
                    range_violations: 0,
                    negative_values_in_positive: 0,
                })
            } else {
                None
            },
            timeliness: if is_req(QualityDimension::Timeliness) {
                Some(TimelinessMetrics {
                    future_dates_count: 0,
                    stale_data_ratio: 0.0,
                    temporal_violations: 0,
                })
            } else {
                None
            },
        }
    }

    /// Calculate quality metrics with bifurcated computation for streaming.
    ///
    /// **Phase A (exact from global counters)**: Completeness metrics are computed
    /// from `ColumnProfile` stats (`null_count`, `total_count`) which are exact
    /// even for infinite streams. Key uniqueness already uses `ColumnProfile`.
    ///
    /// **Phase B (sampled)**: Consistency, Accuracy, Timeliness, and duplicate_rows
    /// are computed from the bounded reservoir sample.
    ///
    /// Returns a [`BifurcatedResult`] containing the metrics plus provenance
    /// for which dimensions are exact vs sampled.
    pub fn calculate_bifurcated_metrics(
        &self,
        data: &HashMap<String, Vec<String>>,
        column_profiles: &[ColumnProfile],
        requested_dimensions: Option<&[QualityDimension]>,
    ) -> Result<BifurcatedResult, DataProfilerError> {
        if data.is_empty() && column_profiles.is_empty() {
            return Ok(BifurcatedResult {
                metrics: Self::default_metrics_for_empty_dataset(&requested_dimensions),
                exact_dimensions: vec![],
                sampled_dimensions: vec![],
                sample_size: 0,
            });
        }

        let total_rows = column_profiles.first().map(|p| p.total_count).unwrap_or(0);
        let sample_rows = Self::calculate_sample_size(data).unwrap_or(0);
        let requested = &requested_dimensions;

        let mut exact_dimensions = Vec::new();
        let mut sampled_dimensions = Vec::new();

        // Phase A: Completeness from exact global counters
        let completeness = if Self::is_requested(requested, QualityDimension::Completeness) {
            let c = CompletenessCalculator::new(&self.thresholds)
                .calculate_from_profiles(column_profiles)?;
            exact_dimensions.push("completeness".to_string());
            Some(CompletenessMetrics {
                missing_values_ratio: c.missing_values_ratio,
                complete_records_ratio: c.complete_records_ratio,
                null_columns: c.null_columns,
            })
        } else {
            None
        };

        // Phase B: Sampled dimensions from reservoir data
        let consistency = if Self::is_requested(requested, QualityDimension::Consistency) {
            let c = if !data.is_empty() {
                ConsistencyCalculator::calculate(data, column_profiles)?
            } else {
                consistency::ConsistencyMetrics {
                    data_type_consistency: 100.0,
                    format_violations: 0,
                    encoding_issues: 0,
                }
            };
            sampled_dimensions.push("consistency".to_string());
            Some(ConsistencyMetrics {
                data_type_consistency: c.data_type_consistency,
                format_violations: c.format_violations,
                encoding_issues: c.encoding_issues,
            })
        } else {
            None
        };

        let uniqueness = if Self::is_requested(requested, QualityDimension::Uniqueness) {
            let u = UniquenessCalculator::new(&self.thresholds).calculate(
                data,
                column_profiles,
                total_rows,
            )?;
            exact_dimensions.push("key_uniqueness".to_string());
            sampled_dimensions.push("duplicate_rows".to_string());
            Some(UniquenessMetrics {
                duplicate_rows: u.duplicate_rows,
                key_uniqueness: u.key_uniqueness,
                high_cardinality_warning: u.high_cardinality_warning,
            })
        } else {
            None
        };

        let accuracy = if Self::is_requested(requested, QualityDimension::Accuracy) {
            let a = if !data.is_empty() {
                AccuracyCalculator::new(&self.thresholds).calculate(data, column_profiles)?
            } else {
                accuracy::AccuracyMetrics {
                    outlier_ratio: 0.0,
                    range_violations: 0,
                    negative_values_in_positive: 0,
                }
            };
            sampled_dimensions.push("accuracy".to_string());
            Some(AccuracyMetrics {
                outlier_ratio: a.outlier_ratio,
                range_violations: a.range_violations,
                negative_values_in_positive: a.negative_values_in_positive,
            })
        } else {
            None
        };

        let timeliness = if Self::is_requested(requested, QualityDimension::Timeliness) {
            let t = if !data.is_empty() {
                TimelinessCalculator::new(&self.thresholds).calculate(data, column_profiles)?
            } else {
                timeliness::TimelinessMetrics {
                    future_dates_count: 0,
                    stale_data_ratio: 0.0,
                    temporal_violations: 0,
                }
            };
            sampled_dimensions.push("timeliness".to_string());
            Some(TimelinessMetrics {
                future_dates_count: t.future_dates_count,
                stale_data_ratio: t.stale_data_ratio,
                temporal_violations: t.temporal_violations,
            })
        } else {
            None
        };

        let metrics = QualityMetrics {
            completeness,
            consistency,
            uniqueness,
            accuracy,
            timeliness,
        };

        Ok(BifurcatedResult {
            metrics,
            exact_dimensions,
            sampled_dimensions,
            sample_size: sample_rows,
        })
    }

    /// Calculate sample size across data column arrays (using max inner length heuristic)
    fn calculate_sample_size(
        data: &HashMap<String, Vec<String>>,
    ) -> Result<usize, DataProfilerError> {
        data.values().map(|v| v.len()).max().ok_or_else(|| {
            DataProfilerError::MetricsCalculationError {
                message: "No data columns found".to_string(),
            }
        })
    }
}

/// Result of bifurcated quality metric calculation.
///
/// Contains the computed metrics plus provenance information about which
/// dimensions were computed exactly (from global streaming counters) and
/// which were computed from a bounded sample.
pub struct BifurcatedResult {
    /// The computed quality metrics
    pub metrics: QualityMetrics,
    /// Dimensions computed from exact global counters (e.g., "completeness", "key_uniqueness")
    pub exact_dimensions: Vec<String>,
    /// Dimensions computed from the bounded reservoir sample (e.g., "consistency", "accuracy")
    pub sampled_dimensions: Vec<String>,
    /// Number of sample rows used for Phase B dimensions
    pub sample_size: usize,
}