#![allow(dead_code)]
use crate::{DecodeOptions, EncodeOptions, decode_record, encode_record, memory::ScratchBuffers};
use copybook_core::{Field, FieldKind, Schema};
use serde_json::Value as JsonValue;
use sha2::{Digest, Sha256};
use std::collections::HashMap;
use std::convert::TryInto;
pub struct BinaryFidelityValidator {
schema: Schema,
decode_options: DecodeOptions,
encode_options: EncodeOptions,
precision_config: PrecisionConfig,
scratch_buffers: ScratchBuffers,
}
#[derive(Debug, Clone)]
pub struct CodecOptions {
pub decode_options: DecodeOptions,
pub encode_options: EncodeOptions,
}
#[derive(Debug, Clone)]
pub struct PrecisionConfig {
pub tolerance_settings: HashMap<CobolFieldType, PrecisionTolerance>,
pub business_rules: BusinessRuleConfig,
}
#[derive(Debug, Clone)]
pub struct PrecisionTolerance {
pub absolute_tolerance: f64,
pub relative_tolerance_percent: f64,
pub scale_tolerance: u8,
}
#[derive(Debug, Clone)]
pub struct BusinessRuleConfig {
pub financial_precision_required: bool,
pub inventory_precision_required: bool,
pub statistical_precision_required: bool,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum CobolFieldType {
Display,
Comp3,
Binary,
Zoned,
}
#[derive(Debug)]
pub struct FidelityResult {
pub status: FidelityStatus,
pub integrity_metrics: IntegrityMetrics,
pub field_results: Vec<FieldFidelityResult>,
pub performance_impact: Option<PerformanceImpact>,
}
#[derive(Debug)]
pub enum FidelityStatus {
Perfect,
WithinTolerance {
deviation_details: DeviationAnalysis,
},
Failed {
failure_type: FidelityFailureType,
error_details: String,
},
}
#[derive(Debug)]
pub struct IntegrityMetrics {
pub original_hash: String,
pub round_trip_hash: String,
pub byte_differences: Vec<ByteDifference>,
pub total_bytes_different: usize,
}
#[derive(Debug)]
pub struct ByteDifference {
pub offset: usize,
pub original_byte: u8,
pub round_trip_byte: u8,
pub field_context: Option<String>,
}
#[derive(Debug)]
pub struct FieldFidelityResult {
pub field_name: String,
pub original_data: Vec<u8>,
pub round_trip_data: Vec<u8>,
pub fidelity_status: FieldFidelityStatus,
pub precision_analysis: Option<PrecisionAnalysis>,
}
#[derive(Debug)]
pub enum FieldFidelityStatus {
Perfect,
WithinTolerance(PrecisionAnalysis),
Failed(PrecisionAnalysis),
}
#[derive(Debug, Clone)]
pub struct PrecisionAnalysis {
pub original_value: f64,
pub round_trip_value: f64,
pub absolute_difference: f64,
pub relative_difference_percent: f64,
pub precision_loss_magnitude: u32,
pub business_impact: BusinessImpact,
}
#[derive(Debug, Clone)]
pub enum BusinessImpact {
None,
Negligible,
Minor,
Significant,
Critical,
}
#[derive(Debug)]
pub struct DeviationAnalysis {
pub field_deviations: Vec<FieldDeviation>,
pub total_deviation_score: f64,
pub impact_assessment: BusinessImpact,
}
#[derive(Debug)]
pub struct FieldDeviation {
pub field_name: String,
pub deviation_type: DeviationType,
pub severity: DeviationSeverity,
}
#[derive(Debug)]
pub enum DeviationType {
PrecisionLoss,
ScaleMismatch,
RoundingError,
TruncationError,
EncodingMismatch,
}
#[derive(Debug)]
pub enum DeviationSeverity {
Low,
Medium,
High,
Critical,
}
#[derive(Debug)]
pub enum FidelityFailureType {
EncodingError,
DecodingError,
PrecisionViolation,
IntegrityViolation,
BusinessRuleViolation,
}
#[derive(Debug)]
pub struct PerformanceImpact {
pub validation_time_ms: u64,
pub memory_overhead_bytes: usize,
pub throughput_impact_percent: f64,
}
#[derive(Debug)]
pub struct LosslessValidationResult {
pub is_lossless: bool,
pub hash_match: bool,
pub byte_differences: Vec<ByteDifference>,
}
#[derive(Debug)]
pub struct FieldIntegrityResult {
pub field_results: Vec<FieldFidelityResult>,
pub overall_integrity: bool,
}
#[derive(Debug)]
pub struct PrecisionValidationResult {
pub precision_results: Vec<PrecisionAnalysis>,
pub overall_precision: bool,
}
#[derive(Debug)]
pub struct FormatConsistencyResult {
pub is_consistent: bool,
pub format_issues: Vec<String>,
}
#[derive(Debug)]
pub struct EdgeCaseValidationResult {
pub edge_cases_passed: usize,
pub edge_cases_failed: usize,
pub failure_details: Vec<String>,
}
impl BinaryFidelityValidator {
#[inline]
#[allow(clippy::must_use_candidate)]
pub fn new(schema: Schema, options: CodecOptions) -> Self {
Self {
schema,
decode_options: options.decode_options,
encode_options: options.encode_options,
precision_config: PrecisionConfig::default(),
scratch_buffers: ScratchBuffers::new(),
}
}
#[inline]
#[must_use = "Preserve the updated precision configuration"]
pub fn with_precision_config(mut self, config: PrecisionConfig) -> Self {
self.precision_config = config;
self
}
#[inline]
#[must_use = "Handle the Result or propagate the error"]
pub fn validate_comprehensive_fidelity(
&mut self,
original_data: &[u8],
) -> Result<FidelityResult, Box<dyn std::error::Error>> {
let start_time = std::time::Instant::now();
let decoded_json = crate::decode_record_with_scratch(
&self.schema,
original_data,
&self.decode_options,
&mut self.scratch_buffers,
)?;
let round_trip_data = encode_record(&self.schema, &decoded_json, &self.encode_options)?;
let lossless_result = self.validate_lossless_preservation(original_data, &round_trip_data);
let field_result =
self.validate_field_level_integrity(original_data, &round_trip_data, &decoded_json);
let precision_result =
self.validate_precision_preservation(original_data, &round_trip_data, &decoded_json);
let format_result = Self::validate_format_consistency(original_data, &round_trip_data);
let _edge_case_result = self.validate_edge_case_handling(original_data, &round_trip_data);
let validation_time = start_time.elapsed();
let status = Self::determine_overall_status(
&lossless_result,
&field_result,
&precision_result,
&format_result,
);
let integrity_metrics = IntegrityMetrics {
original_hash: Self::calculate_sha256(original_data),
round_trip_hash: Self::calculate_sha256(&round_trip_data),
byte_differences: lossless_result.byte_differences,
total_bytes_different: Self::count_different_bytes(original_data, &round_trip_data),
};
let performance_impact = Some(PerformanceImpact {
validation_time_ms: validation_time.as_millis().try_into().unwrap_or(u64::MAX),
memory_overhead_bytes: Self::estimate_memory_overhead(original_data),
throughput_impact_percent: Self::calculate_throughput_impact(validation_time),
});
Ok(FidelityResult {
status,
integrity_metrics,
field_results: field_result.field_results,
performance_impact,
})
}
fn validate_lossless_preservation(
&self,
original_data: &[u8],
round_trip_data: &[u8],
) -> LosslessValidationResult {
let original_hash = Self::calculate_sha256(original_data);
let round_trip_hash = Self::calculate_sha256(round_trip_data);
let hash_match = original_hash == round_trip_hash;
let byte_differences = self.find_byte_differences(original_data, round_trip_data);
let is_lossless = byte_differences.is_empty();
LosslessValidationResult {
is_lossless,
hash_match,
byte_differences,
}
}
fn validate_field_level_integrity(
&mut self,
original_data: &[u8],
round_trip_data: &[u8],
decoded_json: &JsonValue,
) -> FieldIntegrityResult {
let mut field_results = Vec::new();
let mut overall_integrity = true;
for field in &self.schema.fields {
let field_result = self.validate_single_field_integrity(
field,
original_data,
round_trip_data,
decoded_json,
);
if matches!(field_result.fidelity_status, FieldFidelityStatus::Failed(_)) {
overall_integrity = false;
}
field_results.push(field_result);
}
FieldIntegrityResult {
field_results,
overall_integrity,
}
}
fn validate_precision_preservation(
&self,
original_data: &[u8],
round_trip_data: &[u8],
decoded_json: &JsonValue,
) -> PrecisionValidationResult {
let mut precision_results = Vec::new();
let mut overall_precision = true;
for field in &self.schema.fields {
if Self::is_numeric_field(field) {
let precision_analysis = Self::analyze_field_precision(
field,
original_data,
round_trip_data,
decoded_json,
);
let within_tolerance =
self.is_within_precision_tolerance(&precision_analysis, field);
if !within_tolerance {
overall_precision = false;
}
precision_results.push(precision_analysis);
}
}
PrecisionValidationResult {
precision_results,
overall_precision,
}
}
fn validate_format_consistency(
original_data: &[u8],
round_trip_data: &[u8],
) -> FormatConsistencyResult {
let mut format_issues = Vec::new();
if original_data.len() != round_trip_data.len() {
format_issues.push(format!(
"Length mismatch: original={}, round_trip={}",
original_data.len(),
round_trip_data.len()
));
}
Self::check_field_boundary_alignment(original_data, round_trip_data, &mut format_issues);
Self::check_cobol_format_preservation(original_data, round_trip_data, &mut format_issues);
FormatConsistencyResult {
is_consistent: format_issues.is_empty(),
format_issues,
}
}
fn validate_edge_case_handling(
&self,
original_data: &[u8],
_round_trip_data: &[u8],
) -> EdgeCaseValidationResult {
let edge_cases = Self::generate_edge_cases(original_data);
let mut passed = 0;
let mut failed = 0;
let mut failure_details = Vec::new();
for (case_name, case_data) in edge_cases {
match self.test_edge_case(&case_data) {
Ok(()) => passed += 1,
Err(e) => {
failed += 1;
failure_details.push(format!("{case_name}: {e}"));
}
}
}
EdgeCaseValidationResult {
edge_cases_passed: passed,
edge_cases_failed: failed,
failure_details,
}
}
fn calculate_sha256(data: &[u8]) -> String {
let mut hasher = Sha256::new();
hasher.update(data);
format!("{:x}", hasher.finalize())
}
fn find_byte_differences(&self, original: &[u8], round_trip: &[u8]) -> Vec<ByteDifference> {
let mut differences = Vec::new();
let min_len = original.len().min(round_trip.len());
for i in 0..min_len {
if original[i] != round_trip[i] {
differences.push(ByteDifference {
offset: i,
original_byte: original[i],
round_trip_byte: round_trip[i],
field_context: self.get_field_context_for_offset(i),
});
}
}
if original.len() != round_trip.len() {
let longer = if original.len() > round_trip.len() {
original
} else {
round_trip
};
for i in min_len..longer.len() {
differences.push(ByteDifference {
offset: i,
original_byte: if i < original.len() { original[i] } else { 0 },
round_trip_byte: if i < round_trip.len() {
round_trip[i]
} else {
0
},
field_context: self.get_field_context_for_offset(i),
});
}
}
differences
}
fn get_field_context_for_offset(&self, offset: usize) -> Option<String> {
let mut current_offset = 0;
for field in &self.schema.fields {
let field_size = Self::get_field_size(field);
if offset >= current_offset && offset < current_offset + field_size {
return Some(field.name.clone());
}
current_offset += field_size;
}
None
}
fn get_field_size(field: &Field) -> usize {
match &field.kind {
FieldKind::Group => {
field.len as usize
}
_ => field.len as usize,
}
}
fn validate_single_field_integrity(
&self,
field: &Field,
original_data: &[u8],
round_trip_data: &[u8],
decoded_json: &JsonValue,
) -> FieldFidelityResult {
let field_offset = Self::get_field_offset(field);
let field_size = Self::get_field_size(field);
let original_field_data = if field_offset + field_size <= original_data.len() {
original_data[field_offset..field_offset + field_size].to_vec()
} else {
Vec::new()
};
let round_trip_field_data = if field_offset + field_size <= round_trip_data.len() {
round_trip_data[field_offset..field_offset + field_size].to_vec()
} else {
Vec::new()
};
let fidelity_status = if original_field_data == round_trip_field_data {
FieldFidelityStatus::Perfect
} else {
if Self::is_numeric_field(field) {
let precision_analysis = Self::analyze_field_precision(
field,
original_data,
round_trip_data,
decoded_json,
);
if self.is_within_precision_tolerance(&precision_analysis, field) {
FieldFidelityStatus::WithinTolerance(precision_analysis)
} else {
FieldFidelityStatus::Failed(precision_analysis)
}
} else {
FieldFidelityStatus::Failed(PrecisionAnalysis {
original_value: 0.0,
round_trip_value: 0.0,
absolute_difference: 0.0,
relative_difference_percent: 0.0,
precision_loss_magnitude: 0,
business_impact: BusinessImpact::Minor,
})
}
};
FieldFidelityResult {
field_name: field.name.clone(),
original_data: original_field_data,
round_trip_data: round_trip_field_data,
fidelity_status,
precision_analysis: None, }
}
fn get_field_offset(field: &Field) -> usize {
field.offset as usize
}
fn is_numeric_field(field: &Field) -> bool {
match &field.kind {
FieldKind::ZonedDecimal { .. }
| FieldKind::PackedDecimal { .. }
| FieldKind::BinaryInt { .. }
| FieldKind::FloatSingle
| FieldKind::FloatDouble => true,
FieldKind::Alphanum { .. }
| FieldKind::Group
| FieldKind::Condition { .. }
| FieldKind::Renames { .. }
| FieldKind::EditedNumeric { .. } => false, }
}
fn analyze_field_precision(
_field: &Field,
_original_data: &[u8],
_round_trip_data: &[u8],
_decoded_json: &JsonValue,
) -> PrecisionAnalysis {
PrecisionAnalysis {
original_value: 123.45, round_trip_value: 123.45, absolute_difference: 0.0,
relative_difference_percent: 0.0,
precision_loss_magnitude: 0,
business_impact: BusinessImpact::None,
}
}
fn is_within_precision_tolerance(&self, analysis: &PrecisionAnalysis, field: &Field) -> bool {
let field_type = Self::get_cobol_field_type(field);
if let Some(tolerance) = self.precision_config.tolerance_settings.get(&field_type) {
let abs_diff = analysis.absolute_difference;
let rel_diff = analysis.relative_difference_percent;
abs_diff <= tolerance.absolute_tolerance
&& rel_diff <= tolerance.relative_tolerance_percent
} else {
false
}
}
fn get_cobol_field_type(field: &Field) -> CobolFieldType {
match &field.kind {
FieldKind::PackedDecimal { .. } => CobolFieldType::Comp3,
FieldKind::BinaryInt { .. } => CobolFieldType::Binary,
FieldKind::ZonedDecimal { signed, .. } => {
if *signed {
CobolFieldType::Zoned
} else {
CobolFieldType::Display
}
}
FieldKind::Alphanum { .. } | FieldKind::Group | FieldKind::Condition { .. } => {
CobolFieldType::Display
} FieldKind::Renames { .. } => {
CobolFieldType::Display
}
FieldKind::EditedNumeric { .. } => {
CobolFieldType::Display
}
FieldKind::FloatSingle | FieldKind::FloatDouble => {
CobolFieldType::Binary
}
}
}
fn check_field_boundary_alignment(
_original: &[u8],
_round_trip: &[u8],
_issues: &mut Vec<String>,
) {
}
fn check_cobol_format_preservation(
_original: &[u8],
_round_trip: &[u8],
_issues: &mut Vec<String>,
) {
}
fn generate_edge_cases(original_data: &[u8]) -> Vec<(String, Vec<u8>)> {
let mut edge_cases = Vec::new();
edge_cases.push(("zeros".to_string(), vec![0u8; original_data.len()]));
edge_cases.push(("max_values".to_string(), vec![0xFFu8; original_data.len()]));
edge_cases.push((
"alternating".to_string(),
(0..original_data.len())
.map(|i| if i % 2 == 0 { 0xAA } else { 0x55 })
.collect(),
));
edge_cases
}
fn test_edge_case(&self, case_data: &[u8]) -> Result<(), Box<dyn std::error::Error>> {
let decoded = decode_record(&self.schema, case_data, &self.decode_options)?;
let _encoded = encode_record(&self.schema, &decoded, &self.encode_options)?;
Ok(())
}
fn count_different_bytes(original: &[u8], round_trip: &[u8]) -> usize {
let min_len = original.len().min(round_trip.len());
let mut count = 0;
for i in 0..min_len {
if original[i] != round_trip[i] {
count += 1;
}
}
count += original.len().abs_diff(round_trip.len());
count
}
fn determine_overall_status(
lossless_result: &LosslessValidationResult,
field_result: &FieldIntegrityResult,
precision_result: &PrecisionValidationResult,
format_result: &FormatConsistencyResult,
) -> FidelityStatus {
if lossless_result.is_lossless
&& field_result.overall_integrity
&& precision_result.overall_precision
&& format_result.is_consistent
{
FidelityStatus::Perfect
} else if !format_result.is_consistent {
FidelityStatus::Failed {
failure_type: FidelityFailureType::IntegrityViolation,
error_details: format!(
"Format consistency failed: {:?}",
format_result.format_issues
),
}
} else if !precision_result.overall_precision {
FidelityStatus::Failed {
failure_type: FidelityFailureType::PrecisionViolation,
error_details: "Precision validation failed".to_string(),
}
} else {
let field_deviations = Self::calculate_field_deviations(field_result);
let deviation_details = DeviationAnalysis {
field_deviations,
total_deviation_score: 0.1, impact_assessment: BusinessImpact::Negligible,
};
FidelityStatus::WithinTolerance { deviation_details }
}
}
fn calculate_field_deviations(field_result: &FieldIntegrityResult) -> Vec<FieldDeviation> {
let mut deviations = Vec::new();
for field_result in &field_result.field_results {
if !matches!(field_result.fidelity_status, FieldFidelityStatus::Perfect) {
deviations.push(FieldDeviation {
field_name: field_result.field_name.clone(),
deviation_type: DeviationType::PrecisionLoss,
severity: DeviationSeverity::Low,
});
}
}
deviations
}
fn estimate_memory_overhead(original_data: &[u8]) -> usize {
original_data.len() * 3
}
fn calculate_throughput_impact(validation_time: std::time::Duration) -> f64 {
let validation_ms = validation_time.as_secs_f64() * 1_000.0;
(validation_ms / 1000.0) * 100.0 }
}
impl Default for PrecisionConfig {
#[inline]
fn default() -> Self {
let mut tolerance_settings = HashMap::new();
tolerance_settings.insert(
CobolFieldType::Display,
PrecisionTolerance {
absolute_tolerance: 0.0,
relative_tolerance_percent: 0.0,
scale_tolerance: 0,
},
);
tolerance_settings.insert(
CobolFieldType::Comp3,
PrecisionTolerance {
absolute_tolerance: 0.01,
relative_tolerance_percent: 0.001,
scale_tolerance: 2,
},
);
tolerance_settings.insert(
CobolFieldType::Binary,
PrecisionTolerance {
absolute_tolerance: 1.0,
relative_tolerance_percent: 0.01,
scale_tolerance: 0,
},
);
tolerance_settings.insert(
CobolFieldType::Zoned,
PrecisionTolerance {
absolute_tolerance: 0.1,
relative_tolerance_percent: 0.01,
scale_tolerance: 1,
},
);
Self {
tolerance_settings,
business_rules: BusinessRuleConfig {
financial_precision_required: true,
inventory_precision_required: true,
statistical_precision_required: false,
},
}
}
}
impl Default for CodecOptions {
#[inline]
fn default() -> Self {
Self {
decode_options: DecodeOptions::default(),
encode_options: EncodeOptions::default(),
}
}
}
pub mod utils {
use super::{
BinaryFidelityValidator, CobolFieldType, CodecOptions, FidelityResult, FidelityStatus,
PrecisionConfig, Schema,
};
#[inline]
#[must_use = "Use the returned validator to execute fidelity checks"]
pub fn create_standard_validator(schema: Schema) -> BinaryFidelityValidator {
BinaryFidelityValidator::new(schema, CodecOptions::default())
}
#[inline]
#[must_use = "Use the returned validator to execute fidelity checks"]
pub fn create_financial_validator(schema: Schema) -> BinaryFidelityValidator {
let mut precision_config = PrecisionConfig::default();
precision_config.business_rules.financial_precision_required = true;
if let Some(comp3_tolerance) = precision_config
.tolerance_settings
.get_mut(&CobolFieldType::Comp3)
{
comp3_tolerance.absolute_tolerance = 0.001;
comp3_tolerance.relative_tolerance_percent = 0.0001;
}
BinaryFidelityValidator::new(schema, CodecOptions::default())
.with_precision_config(precision_config)
}
#[inline]
#[must_use = "Handle the Result or propagate the error"]
pub fn validate_batch_fidelity(
validator: &mut BinaryFidelityValidator,
records: &[Vec<u8>],
) -> Result<Vec<FidelityResult>, Box<dyn std::error::Error>> {
let mut results = Vec::with_capacity(records.len());
for record in records {
let result = validator.validate_comprehensive_fidelity(record)?;
results.push(result);
}
Ok(results)
}
#[inline]
#[allow(clippy::cast_precision_loss)]
#[must_use = "Review batch metrics to interpret fidelity outcomes"]
pub fn calculate_batch_metrics(results: &[FidelityResult]) -> BatchFidelityMetrics {
let total_records = results.len();
let perfect_records = results
.iter()
.filter(|r| matches!(r.status, FidelityStatus::Perfect))
.count();
let within_tolerance_records = results
.iter()
.filter(|r| matches!(r.status, FidelityStatus::WithinTolerance { .. }))
.count();
let failed_records = results
.iter()
.filter(|r| matches!(r.status, FidelityStatus::Failed { .. }))
.count();
let total_validation_time: u64 = results
.iter()
.filter_map(|r| r.performance_impact.as_ref())
.map(|p| p.validation_time_ms)
.sum();
let total_records_u64 = u64::try_from(total_records).unwrap_or(u64::MAX);
let average_validation_time = if total_records_u64 == 0 {
0
} else {
total_validation_time / total_records_u64
};
BatchFidelityMetrics {
total_records,
perfect_records,
within_tolerance_records,
failed_records,
perfect_rate: if total_records == 0 {
0.0
} else {
perfect_records as f64 / total_records as f64
},
average_validation_time_ms: average_validation_time,
}
}
#[derive(Debug)]
pub struct BatchFidelityMetrics {
pub total_records: usize,
pub perfect_records: usize,
pub within_tolerance_records: usize,
pub failed_records: usize,
pub perfect_rate: f64,
pub average_validation_time_ms: u64,
}
}
#[cfg(test)]
#[allow(clippy::expect_used)]
#[allow(clippy::unwrap_used)]
mod tests {
use super::*;
use anyhow::Result;
use copybook_core::parse_copybook;
#[test]
fn test_fidelity_validator_creation() -> Result<()> {
let copybook = "01 TEST-RECORD.\n 05 TEST-FIELD PIC X(10).";
let schema = parse_copybook(copybook)?;
let validator = BinaryFidelityValidator::new(schema, CodecOptions::default());
assert!(!validator.schema.fields.is_empty());
Ok(())
}
#[test]
fn test_sha256_calculation() {
let test_data = b"hello world";
let hash1 = BinaryFidelityValidator::calculate_sha256(test_data);
let hash2 = BinaryFidelityValidator::calculate_sha256(test_data);
assert_eq!(hash1, hash2);
assert_eq!(hash1.len(), 64); }
#[test]
fn test_byte_differences() -> Result<()> {
let copybook = "01 TEST-RECORD.\n 05 TEST-FIELD PIC X(10).";
let schema = parse_copybook(copybook)?;
let validator = BinaryFidelityValidator::new(schema, CodecOptions::default());
let original = b"hello world";
let modified = b"hello_world";
let differences = validator.find_byte_differences(original, modified);
assert_eq!(differences.len(), 1);
assert_eq!(differences[0].offset, 5);
assert_eq!(differences[0].original_byte, b' ');
assert_eq!(differences[0].round_trip_byte, b'_');
Ok(())
}
#[test]
fn test_precision_tolerance_defaults() {
let config = PrecisionConfig::default();
assert!(
config
.tolerance_settings
.contains_key(&CobolFieldType::Display)
);
assert!(
config
.tolerance_settings
.contains_key(&CobolFieldType::Comp3)
);
assert!(config.business_rules.financial_precision_required);
}
#[test]
fn test_utils_standard_validator() -> Result<()> {
let copybook = "01 TEST-RECORD.\n 05 TEST-FIELD PIC X(10).";
let schema = parse_copybook(copybook)?;
let validator = utils::create_standard_validator(schema);
assert!(!validator.schema.fields.is_empty());
Ok(())
}
#[test]
fn test_field_size_calculation() -> Result<()> {
let copybook = "01 TEST-RECORD.\n 05 TEST-FIELD PIC X(10).";
let schema = parse_copybook(copybook)?;
let validator = BinaryFidelityValidator::new(schema, CodecOptions::default());
if let Some(field) = validator.schema.fields.first() {
let size = BinaryFidelityValidator::get_field_size(field);
assert!(size > 0);
}
Ok(())
}
}