1use crate::core::types::{TrackingError, TrackingResult};
7use std::fmt;
8use std::sync::atomic::{AtomicUsize, Ordering};
9use std::sync::Arc;
10use std::time::{Duration, Instant};
11
12#[derive(Debug, Clone)]
14pub enum ExportError {
15 ParallelProcessingError {
17 shard_index: usize,
19 thread_id: String,
21 error_message: String,
23 partial_results: Option<Vec<u8>>,
25 },
26 ResourceLimitExceeded {
28 resource_type: ResourceType,
30 limit: u64,
32 actual: u64,
34 suggested_action: String,
36 },
37 DataQualityError {
39 validation_type: ValidationType,
41 expected: String,
43 actual: String,
45 affected_records: usize,
47 },
48 PerformanceThresholdExceeded {
50 metric: PerformanceMetric,
52 threshold: f64,
54 actual: f64,
56 stage: ExportStage,
58 },
59 ConcurrencyConflict {
61 operation: String,
63 conflict_type: ConflictType,
65 retry_count: usize,
67 },
68 DataCorruption {
70 corruption_type: CorruptionType,
72 affected_data: String,
74 recovery_possible: bool,
76 },
77 InsufficientResources {
79 required_memory: usize,
81 available_memory: usize,
83 required_disk: usize,
85 available_disk: usize,
87 },
88 ExportInterrupted {
90 stage: ExportStage,
92 progress_percentage: f64,
94 partial_output_path: Option<String>,
96 },
97}
98
99#[derive(Debug, Clone, PartialEq)]
101pub enum ResourceType {
102 Memory,
104 Disk,
106 CPU,
108 FileHandles,
110 ThreadPool,
112}
113
114#[derive(Debug, Clone, PartialEq, Eq, Hash)]
116pub enum ValidationType {
117 JsonStructure,
119 DataIntegrity,
121 AllocationCount,
123 FileSize,
125 Encoding,
127}
128
129#[derive(Debug, Clone, PartialEq)]
131pub enum PerformanceMetric {
132 ExportTime,
134 MemoryUsage,
136 ThroughputRate,
138 ErrorRate,
140 ResponseTime,
142}
143
144#[derive(Debug, Clone, PartialEq)]
146pub enum ExportStage {
147 Initialization,
149 DataLocalization,
151 ParallelProcessing,
153 Writing,
155 Validation,
157 Finalization,
159}
160
161#[derive(Debug, Clone, PartialEq)]
163pub enum ConflictType {
164 LockContention,
166 DataRace,
168 ResourceContention,
170 ThreadPoolExhaustion,
172}
173
174#[derive(Debug, Clone, PartialEq)]
176pub enum CorruptionType {
177 IncompleteData,
179 InvalidFormat,
181 ChecksumMismatch,
183 StructuralDamage,
185}
186
187impl fmt::Display for ExportError {
188 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
189 match self {
190 ExportError::ParallelProcessingError {
191 shard_index,
192 thread_id,
193 error_message,
194 ..
195 } => {
196 write!(f, "parallel processing error - shard {shard_index} (thread {thread_id}): {error_message}")
197 }
198 ExportError::ResourceLimitExceeded {
199 resource_type,
200 limit,
201 actual,
202 suggested_action,
203 } => {
204 write!(f, "resource limit exceeded - {resource_type:?}: limit {limit}, actual {actual}. suggested action: {suggested_action}")
205 }
206 ExportError::DataQualityError {
207 validation_type,
208 expected,
209 actual,
210 affected_records,
211 } => {
212 write!(f, "data quality error - {validation_type:?}: expected {expected}, actual {actual}, affected records {affected_records}")
213 }
214 ExportError::PerformanceThresholdExceeded {
215 metric,
216 threshold,
217 actual,
218 stage,
219 } => {
220 write!(f, "performance threshold exceeded - {metric:?} in {stage:?}: threshold {threshold}, actual {actual}")
221 }
222 ExportError::ConcurrencyConflict {
223 operation,
224 conflict_type,
225 retry_count,
226 } => {
227 write!(f, "concurrency conflict - operation {operation}, type {conflict_type:?}, retry count {retry_count}")
228 }
229 ExportError::DataCorruption {
230 corruption_type,
231 affected_data,
232 recovery_possible,
233 } => {
234 write!(f, "data corruption - type {corruption_type:?}, affected data {affected_data}, recovery possible: {recovery_possible}")
235 }
236 ExportError::InsufficientResources {
237 required_memory,
238 available_memory,
239 required_disk,
240 available_disk,
241 } => {
242 write!(f, "insufficient resources - required memory {required_memory}MB, available {available_memory}MB, required disk {required_disk}MB, available {available_disk}MB")
243 }
244 ExportError::ExportInterrupted {
245 stage,
246 progress_percentage,
247 partial_output_path,
248 } => {
249 write!(f, "export interrupted - stage {stage:?}, progress {progress_percentage:.1}%, partial output: {partial_output_path:?}")
250 }
251 }
252 }
253}
254
255impl std::error::Error for ExportError {}
256
257#[derive(Debug, Clone)]
259pub enum ValidationError {
260 FileAccessError {
262 file_path: String,
264 error: String,
266 },
267 JsonParsingError {
269 file_path: String,
271 error: String,
273 },
274 TimeoutError {
276 file_path: String,
278 timeout_duration: std::time::Duration,
280 },
281 CancelledError {
283 file_path: String,
285 reason: String,
287 },
288 ConfigurationError {
290 error: String,
292 },
293 InternalError {
295 error: String,
297 },
298}
299
300impl fmt::Display for ValidationError {
301 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
302 match self {
303 ValidationError::FileAccessError { file_path, error } => {
304 write!(f, "file access error for {file_path}: {error}")
305 }
306 ValidationError::JsonParsingError { file_path, error } => {
307 write!(f, "JSON parsing error in {file_path}: {error}")
308 }
309 ValidationError::TimeoutError {
310 file_path,
311 timeout_duration,
312 } => {
313 write!(
314 f,
315 "validation timeout for {file_path} after {timeout_duration:?}",
316 )
317 }
318 ValidationError::CancelledError { file_path, reason } => {
319 write!(f, "validation cancelled for {file_path}: {reason}")
320 }
321 ValidationError::ConfigurationError { error } => {
322 write!(f, "validation configuration error: {error}")
323 }
324 ValidationError::InternalError { error } => {
325 write!(f, "internal validation error: {error}")
326 }
327 }
328 }
329}
330
331impl std::error::Error for ValidationError {}
332
333impl From<ValidationError> for TrackingError {
334 fn from(error: ValidationError) -> Self {
335 TrackingError::ExportError(error.to_string())
336 }
337}
338
339impl From<ExportError> for TrackingError {
340 fn from(error: ExportError) -> Self {
341 TrackingError::ExportError(error.to_string())
342 }
343}
344
345#[derive(Debug)]
347pub struct PerformanceLogger {
348 log_level: LogLevel,
350 metrics_collector: Arc<MetricsCollector>,
352 error_stats: Arc<ErrorStatistics>,
354 start_time: Instant,
356}
357
358#[derive(Debug, Clone, PartialEq)]
360pub enum LogLevel {
361 Error,
363 Warn,
365 Info,
367 Debug,
369 Trace,
371}
372
373#[derive(Debug)]
375pub struct MetricsCollector {
376 total_operations: AtomicUsize,
378 successful_operations: AtomicUsize,
380 failed_operations: AtomicUsize,
382 total_processing_time_ms: AtomicUsize,
384 peak_memory_usage: AtomicUsize,
386 current_memory_usage: AtomicUsize,
388}
389
390#[derive(Debug)]
392pub struct ErrorStatistics {
393 parallel_processing_errors: AtomicUsize,
395 resource_limit_errors: AtomicUsize,
397 data_quality_errors: AtomicUsize,
399 performance_threshold_errors: AtomicUsize,
401 concurrency_conflict_errors: AtomicUsize,
403 data_corruption_errors: AtomicUsize,
405 insufficient_resources_errors: AtomicUsize,
407 export_interrupted_errors: AtomicUsize,
409}
410
411impl PerformanceLogger {
412 pub fn new(log_level: LogLevel) -> Self {
414 Self {
415 log_level,
416 metrics_collector: Arc::new(MetricsCollector::new()),
417 error_stats: Arc::new(ErrorStatistics::new()),
418 start_time: Instant::now(),
419 }
420 }
421
422 pub fn log_operation_start(&self, operation: &str, details: &str) {
424 if self.should_log(LogLevel::Info) {
425 tracing::info!(
426 "🚀 [{}] start operation: {} - {}",
427 self.format_timestamp(),
428 operation,
429 details
430 );
431 }
432 self.metrics_collector
433 .total_operations
434 .fetch_add(1, Ordering::Relaxed);
435 }
436
437 pub fn log_operation_success(&self, operation: &str, duration: Duration, details: &str) {
439 if self.should_log(LogLevel::Info) {
440 tracing::info!(
441 "✅ [{}] operation success: {} ({:?}) - {}",
442 self.format_timestamp(),
443 operation,
444 duration,
445 details
446 );
447 }
448 self.metrics_collector
449 .successful_operations
450 .fetch_add(1, Ordering::Relaxed);
451 self.metrics_collector
452 .total_processing_time_ms
453 .fetch_add(duration.as_millis() as usize, Ordering::Relaxed);
454 }
455
456 pub fn log_operation_failure(&self, operation: &str, error: &ExportError, duration: Duration) {
458 if self.should_log(LogLevel::Error) {
459 tracing::error!(
460 "❌ [{}] operation failure: {} ({:?}) - {}",
461 self.format_timestamp(),
462 operation,
463 duration,
464 error
465 );
466 }
467 self.metrics_collector
468 .failed_operations
469 .fetch_add(1, Ordering::Relaxed);
470 self.update_error_statistics(error);
471 }
472
473 pub fn log_performance_metric(
475 &self,
476 metric: PerformanceMetric,
477 value: f64,
478 threshold: Option<f64>,
479 ) {
480 if self.should_log(LogLevel::Debug) {
481 let threshold_info = if let Some(t) = threshold {
482 format!(" (threshold: {t})")
483 } else {
484 String::new()
485 };
486 tracing::debug!(
487 "📊 [{}] performance metric - {metric:?}: {value}{threshold_info}",
488 self.format_timestamp()
489 );
490 }
491
492 if let Some(threshold) = threshold {
494 if value > threshold {
495 let error = ExportError::PerformanceThresholdExceeded {
496 metric,
497 threshold,
498 actual: value,
499 stage: ExportStage::ParallelProcessing, };
501 self.log_warning(&format!("performance threshold exceeded: {error}"));
502 self.update_error_statistics(&error);
503 }
504 }
505 }
506
507 pub fn log_memory_usage(&self, current_usage: usize, peak_usage: usize) {
509 if self.should_log(LogLevel::Debug) {
510 tracing::debug!(
511 "💾 [{}] memory usage - current: {:.2}MB, peak: {:.2}MB",
512 self.format_timestamp(),
513 current_usage as f64 / 1024.0 / 1024.0,
514 peak_usage as f64 / 1024.0 / 1024.0
515 );
516 }
517
518 self.metrics_collector
519 .current_memory_usage
520 .store(current_usage, Ordering::Relaxed);
521
522 let current_peak = self
524 .metrics_collector
525 .peak_memory_usage
526 .load(Ordering::Relaxed);
527 if peak_usage > current_peak {
528 self.metrics_collector
529 .peak_memory_usage
530 .store(peak_usage, Ordering::Relaxed);
531 }
532 }
533
534 pub fn log_warning(&self, message: &str) {
536 if self.should_log(LogLevel::Warn) {
537 tracing::warn!("⚠️ [{}] warning: {}", self.format_timestamp(), message);
538 }
539 }
540
541 pub fn log_debug(&self, message: &str) {
543 if self.should_log(LogLevel::Debug) {
544 tracing::debug!("🔍 [{}] debug: {}", self.format_timestamp(), message);
545 }
546 }
547
548 pub fn log_error(&self, error: &ExportError) {
550 if self.should_log(LogLevel::Error) {
551 tracing::error!("💥 [{}] error: {}", self.format_timestamp(), error);
552 }
553 self.update_error_statistics(error);
554 }
555
556 pub fn generate_performance_report(&self) -> PerformanceReport {
558 let total_time = self.start_time.elapsed();
559 let total_ops = self
560 .metrics_collector
561 .total_operations
562 .load(Ordering::Relaxed);
563 let successful_ops = self
564 .metrics_collector
565 .successful_operations
566 .load(Ordering::Relaxed);
567 let failed_ops = self
568 .metrics_collector
569 .failed_operations
570 .load(Ordering::Relaxed);
571 let total_processing_time = self
572 .metrics_collector
573 .total_processing_time_ms
574 .load(Ordering::Relaxed);
575 let peak_memory = self
576 .metrics_collector
577 .peak_memory_usage
578 .load(Ordering::Relaxed);
579 let current_memory = self
580 .metrics_collector
581 .current_memory_usage
582 .load(Ordering::Relaxed);
583
584 let success_rate = if total_ops > 0 {
585 (successful_ops as f64 / total_ops as f64) * 100.0
586 } else {
587 0.0
588 };
589
590 let avg_processing_time = if successful_ops > 0 {
591 total_processing_time as f64 / successful_ops as f64
592 } else {
593 0.0
594 };
595
596 PerformanceReport {
597 total_runtime: total_time,
598 total_operations: total_ops,
599 successful_operations: successful_ops,
600 failed_operations: failed_ops,
601 success_rate,
602 average_processing_time_ms: avg_processing_time,
603 peak_memory_usage_mb: peak_memory as f64 / 1024.0 / 1024.0,
604 current_memory_usage_mb: current_memory as f64 / 1024.0 / 1024.0,
605 error_breakdown: self.get_error_breakdown(),
606 }
607 }
608
609 fn should_log(&self, level: LogLevel) -> bool {
611 matches!(
612 (&self.log_level, &level),
613 (LogLevel::Error, LogLevel::Error)
614 | (LogLevel::Warn, LogLevel::Error | LogLevel::Warn)
615 | (
616 LogLevel::Info,
617 LogLevel::Error | LogLevel::Warn | LogLevel::Info
618 )
619 | (
620 LogLevel::Debug,
621 LogLevel::Error | LogLevel::Warn | LogLevel::Info | LogLevel::Debug
622 )
623 | (
624 LogLevel::Trace,
625 LogLevel::Error
626 | LogLevel::Warn
627 | LogLevel::Info
628 | LogLevel::Debug
629 | LogLevel::Trace
630 )
631 )
632 }
633
634 fn format_timestamp(&self) -> String {
636 let elapsed = self.start_time.elapsed();
637 format!("{:>8.3}s", elapsed.as_secs_f64())
638 }
639
640 fn update_error_statistics(&self, error: &ExportError) {
642 match error {
643 ExportError::ParallelProcessingError { .. } => {
644 self.error_stats
645 .parallel_processing_errors
646 .fetch_add(1, Ordering::Relaxed);
647 }
648 ExportError::ResourceLimitExceeded { .. } => {
649 self.error_stats
650 .resource_limit_errors
651 .fetch_add(1, Ordering::Relaxed);
652 }
653 ExportError::DataQualityError { .. } => {
654 self.error_stats
655 .data_quality_errors
656 .fetch_add(1, Ordering::Relaxed);
657 }
658 ExportError::PerformanceThresholdExceeded { .. } => {
659 self.error_stats
660 .performance_threshold_errors
661 .fetch_add(1, Ordering::Relaxed);
662 }
663 ExportError::ConcurrencyConflict { .. } => {
664 self.error_stats
665 .concurrency_conflict_errors
666 .fetch_add(1, Ordering::Relaxed);
667 }
668 ExportError::DataCorruption { .. } => {
669 self.error_stats
670 .data_corruption_errors
671 .fetch_add(1, Ordering::Relaxed);
672 }
673 ExportError::InsufficientResources { .. } => {
674 self.error_stats
675 .insufficient_resources_errors
676 .fetch_add(1, Ordering::Relaxed);
677 }
678 ExportError::ExportInterrupted { .. } => {
679 self.error_stats
680 .export_interrupted_errors
681 .fetch_add(1, Ordering::Relaxed);
682 }
683 }
684 }
685
686 fn get_error_breakdown(&self) -> ErrorBreakdown {
688 ErrorBreakdown {
689 parallel_processing_errors: self
690 .error_stats
691 .parallel_processing_errors
692 .load(Ordering::Relaxed),
693 resource_limit_errors: self
694 .error_stats
695 .resource_limit_errors
696 .load(Ordering::Relaxed),
697 data_quality_errors: self.error_stats.data_quality_errors.load(Ordering::Relaxed),
698 performance_threshold_errors: self
699 .error_stats
700 .performance_threshold_errors
701 .load(Ordering::Relaxed),
702 concurrency_conflict_errors: self
703 .error_stats
704 .concurrency_conflict_errors
705 .load(Ordering::Relaxed),
706 data_corruption_errors: self
707 .error_stats
708 .data_corruption_errors
709 .load(Ordering::Relaxed),
710 insufficient_resources_errors: self
711 .error_stats
712 .insufficient_resources_errors
713 .load(Ordering::Relaxed),
714 export_interrupted_errors: self
715 .error_stats
716 .export_interrupted_errors
717 .load(Ordering::Relaxed),
718 }
719 }
720}
721
722impl MetricsCollector {
723 fn new() -> Self {
724 Self {
725 total_operations: AtomicUsize::new(0),
726 successful_operations: AtomicUsize::new(0),
727 failed_operations: AtomicUsize::new(0),
728 total_processing_time_ms: AtomicUsize::new(0),
729 peak_memory_usage: AtomicUsize::new(0),
730 current_memory_usage: AtomicUsize::new(0),
731 }
732 }
733}
734
735impl ErrorStatistics {
736 fn new() -> Self {
737 Self {
738 parallel_processing_errors: AtomicUsize::new(0),
739 resource_limit_errors: AtomicUsize::new(0),
740 data_quality_errors: AtomicUsize::new(0),
741 performance_threshold_errors: AtomicUsize::new(0),
742 concurrency_conflict_errors: AtomicUsize::new(0),
743 data_corruption_errors: AtomicUsize::new(0),
744 insufficient_resources_errors: AtomicUsize::new(0),
745 export_interrupted_errors: AtomicUsize::new(0),
746 }
747 }
748}
749
750#[derive(Debug, Clone)]
752pub struct PerformanceReport {
753 pub total_runtime: Duration,
755 pub total_operations: usize,
757 pub successful_operations: usize,
759 pub failed_operations: usize,
761 pub success_rate: f64,
763 pub average_processing_time_ms: f64,
765 pub peak_memory_usage_mb: f64,
767 pub current_memory_usage_mb: f64,
769 pub error_breakdown: ErrorBreakdown,
771}
772
773#[derive(Debug, Clone)]
775pub struct ErrorBreakdown {
776 pub parallel_processing_errors: usize,
778 pub resource_limit_errors: usize,
780 pub data_quality_errors: usize,
782 pub performance_threshold_errors: usize,
784 pub concurrency_conflict_errors: usize,
786 pub data_corruption_errors: usize,
788 pub insufficient_resources_errors: usize,
790 pub export_interrupted_errors: usize,
792}
793
794impl PerformanceReport {
795 pub fn print_detailed_report(&self) {
797 tracing::info!("\n📈 detailed performance report");
798 tracing::info!("================");
799
800 tracing::info!("⏱️ runtime: {:?}", self.total_runtime);
801 tracing::info!("🔢 total operations: {}", self.total_operations);
802 tracing::info!(
803 "✅ successful operations: {} ({:.1}%)",
804 self.successful_operations,
805 self.success_rate
806 );
807 tracing::info!("❌ failed operations: {}", self.failed_operations);
808 tracing::info!(
809 "⚡ average processing time: {:.2}ms",
810 self.average_processing_time_ms
811 );
812 tracing::info!("💾 peak memory usage: {:.2}MB", self.peak_memory_usage_mb);
813 tracing::info!(
814 "💾 current memory usage: {:.2}MB",
815 self.current_memory_usage_mb
816 );
817
818 tracing::info!("\n🚨 error breakdown:");
819 tracing::info!(
820 " parallel processing errors: {}",
821 self.error_breakdown.parallel_processing_errors
822 );
823 tracing::info!(
824 " resource limit errors: {}",
825 self.error_breakdown.resource_limit_errors
826 );
827 tracing::info!(
828 " data quality errors: {}",
829 self.error_breakdown.data_quality_errors
830 );
831 tracing::info!(
832 " performance threshold errors: {}",
833 self.error_breakdown.performance_threshold_errors
834 );
835 tracing::info!(
836 " concurrency conflict errors: {}",
837 self.error_breakdown.concurrency_conflict_errors
838 );
839 tracing::info!(
840 " data corruption errors: {}",
841 self.error_breakdown.data_corruption_errors
842 );
843 tracing::info!(
844 " insufficient resources errors: {}",
845 self.error_breakdown.insufficient_resources_errors
846 );
847 tracing::info!(
848 " export interrupted errors: {}",
849 self.error_breakdown.export_interrupted_errors
850 );
851 }
852}
853
854#[derive(Debug)]
856pub struct ResourceMonitor {
857 memory_limit: usize,
859 disk_limit: usize,
861 cpu_limit: f64,
863}
864
865impl ResourceMonitor {
866 pub fn new(memory_limit_mb: usize, disk_limit_mb: usize, cpu_limit_percent: f64) -> Self {
868 Self {
869 memory_limit: memory_limit_mb * 1024 * 1024,
870 disk_limit: disk_limit_mb * 1024 * 1024,
871 cpu_limit: cpu_limit_percent,
872 }
873 }
874
875 pub fn check_resource_usage(&self) -> TrackingResult<ResourceUsage> {
877 let memory_usage = self.get_memory_usage()?;
878 let disk_usage = self.get_disk_usage()?;
879 let cpu_usage = self.get_cpu_usage()?;
880
881 if memory_usage > self.memory_limit {
883 return Err(ExportError::ResourceLimitExceeded {
884 resource_type: ResourceType::Memory,
885 limit: self.memory_limit as u64,
886 actual: memory_usage as u64,
887 suggested_action: "reduce parallelism or enable streaming processing".to_string(),
888 }
889 .into());
890 }
891
892 if disk_usage > self.disk_limit {
893 return Err(ExportError::ResourceLimitExceeded {
894 resource_type: ResourceType::Disk,
895 limit: self.disk_limit as u64,
896 actual: disk_usage as u64,
897 suggested_action: "clean up temporary files or select other output location"
898 .to_string(),
899 }
900 .into());
901 }
902
903 if cpu_usage > self.cpu_limit {
904 return Err(ExportError::ResourceLimitExceeded {
905 resource_type: ResourceType::CPU,
906 limit: (self.cpu_limit * 100.0) as u64,
907 actual: (cpu_usage * 100.0) as u64,
908 suggested_action: "reduce thread count or lower processing priority".to_string(),
909 }
910 .into());
911 }
912
913 Ok(ResourceUsage {
914 memory_usage,
915 disk_usage,
916 cpu_usage,
917 memory_limit: self.memory_limit,
918 disk_limit: self.disk_limit,
919 cpu_limit: self.cpu_limit,
920 })
921 }
922
923 fn get_memory_usage(&self) -> TrackingResult<usize> {
925 Ok(0) }
929
930 fn get_disk_usage(&self) -> TrackingResult<usize> {
932 Ok(0) }
935
936 fn get_cpu_usage(&self) -> TrackingResult<f64> {
938 Ok(0.0) }
941}
942
943#[derive(Debug, Clone)]
945pub struct ResourceUsage {
946 pub memory_usage: usize,
948 pub disk_usage: usize,
950 pub cpu_usage: f64,
952 pub memory_limit: usize,
954 pub disk_limit: usize,
956 pub cpu_limit: f64,
958}
959
960impl ResourceUsage {
961 pub fn memory_usage_percentage(&self) -> f64 {
963 if self.memory_limit > 0 {
964 (self.memory_usage as f64 / self.memory_limit as f64) * 100.0
965 } else {
966 0.0
967 }
968 }
969
970 pub fn disk_usage_percentage(&self) -> f64 {
972 if self.disk_limit > 0 {
973 (self.disk_usage as f64 / self.disk_limit as f64) * 100.0
974 } else {
975 0.0
976 }
977 }
978
979 pub fn cpu_usage_percentage(&self) -> f64 {
981 self.cpu_usage * 100.0
982 }
983}
984
985#[cfg(test)]
986mod tests {
987 use super::*;
988
989 #[test]
990 fn test_export_error_display() {
991 let error = ExportError::ParallelProcessingError {
992 shard_index: 5,
993 thread_id: "thread-1".to_string(),
994 error_message: "serialization failed".to_string(),
995 partial_results: None,
996 };
997
998 let display = format!("{error}");
999 assert!(display.contains("parallel processing error"));
1000 assert!(display.contains("shard 5"));
1001 assert!(display.contains("thread-1"));
1002 }
1003
1004 #[test]
1005 fn test_performance_logger() {
1006 let logger = PerformanceLogger::new(LogLevel::Info);
1007
1008 logger.log_operation_start("test operation", "test details");
1009 logger.log_operation_success("test operation", Duration::from_millis(100), "success");
1010
1011 let report = logger.generate_performance_report();
1012 assert_eq!(report.total_operations, 1);
1013 assert_eq!(report.successful_operations, 1);
1014 assert_eq!(report.failed_operations, 0);
1015 assert_eq!(report.success_rate, 100.0);
1016 }
1017
1018 #[test]
1019 fn test_resource_monitor() {
1020 let monitor = ResourceMonitor::new(1024, 2048, 80.0);
1021
1022 let result = monitor.check_resource_usage();
1024 assert!(result.is_ok());
1025
1026 let usage = result.expect("Failed to get memory usage");
1027 assert_eq!(usage.memory_limit, 1024 * 1024 * 1024);
1028 assert_eq!(usage.disk_limit, 2048 * 1024 * 1024);
1029 assert_eq!(usage.cpu_limit, 80.0);
1030 }
1031
1032 #[test]
1033 fn test_resource_usage_percentages() {
1034 let usage = ResourceUsage {
1035 memory_usage: 512 * 1024 * 1024, disk_usage: 1024 * 1024 * 1024, cpu_usage: 0.6, memory_limit: 1024 * 1024 * 1024, disk_limit: 2048 * 1024 * 1024, cpu_limit: 0.8, };
1042
1043 assert_eq!(usage.memory_usage_percentage(), 50.0);
1044 assert_eq!(usage.disk_usage_percentage(), 50.0);
1045 assert_eq!(usage.cpu_usage_percentage(), 60.0);
1046 }
1047
1048 #[test]
1049 fn test_all_export_error_variants_display() {
1050 let resource_error = ExportError::ResourceLimitExceeded {
1052 resource_type: ResourceType::Memory,
1053 limit: 1024,
1054 actual: 2048,
1055 suggested_action: "reduce memory usage".to_string(),
1056 };
1057 let display = format!("{resource_error}");
1058 assert!(display.contains("resource limit exceeded"));
1059 assert!(display.contains("Memory"));
1060 assert!(display.contains("reduce memory usage"));
1061
1062 let quality_error = ExportError::DataQualityError {
1064 validation_type: ValidationType::JsonStructure,
1065 expected: "valid JSON".to_string(),
1066 actual: "malformed JSON".to_string(),
1067 affected_records: 100,
1068 };
1069 let display = format!("{quality_error}");
1070 assert!(display.contains("data quality error"));
1071 assert!(display.contains("JsonStructure"));
1072 assert!(display.contains("affected records 100"));
1073
1074 let perf_error = ExportError::PerformanceThresholdExceeded {
1076 metric: PerformanceMetric::ExportTime,
1077 threshold: 10.0,
1078 actual: 15.5,
1079 stage: ExportStage::Writing,
1080 };
1081 let display = format!("{perf_error}");
1082 assert!(display.contains("performance threshold exceeded"));
1083 assert!(display.contains("ExportTime"));
1084 assert!(display.contains("Writing"));
1085
1086 let concurrency_error = ExportError::ConcurrencyConflict {
1088 operation: "write_data".to_string(),
1089 conflict_type: ConflictType::LockContention,
1090 retry_count: 3,
1091 };
1092 let display = format!("{concurrency_error}");
1093 assert!(display.contains("concurrency conflict"));
1094 assert!(display.contains("write_data"));
1095 assert!(display.contains("LockContention"));
1096
1097 let corruption_error = ExportError::DataCorruption {
1099 corruption_type: CorruptionType::ChecksumMismatch,
1100 affected_data: "allocation_data.json".to_string(),
1101 recovery_possible: true,
1102 };
1103 let display = format!("{corruption_error}");
1104 assert!(display.contains("data corruption"));
1105 assert!(display.contains("ChecksumMismatch"));
1106 assert!(display.contains("recovery possible: true"));
1107
1108 let insufficient_error = ExportError::InsufficientResources {
1110 required_memory: 2048,
1111 available_memory: 1024,
1112 required_disk: 4096,
1113 available_disk: 2048,
1114 };
1115 let display = format!("{insufficient_error}");
1116 assert!(display.contains("insufficient resources"));
1117 assert!(display.contains("required memory 2048MB"));
1118 assert!(display.contains("available 1024MB"));
1119
1120 let interrupted_error = ExportError::ExportInterrupted {
1122 stage: ExportStage::ParallelProcessing,
1123 progress_percentage: 75.5,
1124 partial_output_path: Some("temp_output.json".to_string()),
1125 };
1126 let display = format!("{interrupted_error}");
1127 assert!(display.contains("export interrupted"));
1128 assert!(display.contains("ParallelProcessing"));
1129 assert!(display.contains("progress 75.5%"));
1130 }
1131
1132 #[test]
1133 fn test_validation_error_variants_display() {
1134 let file_error = ValidationError::FileAccessError {
1136 file_path: "/path/to/file.json".to_string(),
1137 error: "permission denied".to_string(),
1138 };
1139 let display = format!("{file_error}");
1140 assert!(display.contains("file access error"));
1141 assert!(display.contains("/path/to/file.json"));
1142 assert!(display.contains("permission denied"));
1143
1144 let json_error = ValidationError::JsonParsingError {
1146 file_path: "data.json".to_string(),
1147 error: "unexpected token".to_string(),
1148 };
1149 let display = format!("{json_error}");
1150 assert!(display.contains("JSON parsing error"));
1151 assert!(display.contains("data.json"));
1152 assert!(display.contains("unexpected token"));
1153
1154 let timeout_error = ValidationError::TimeoutError {
1156 file_path: "large_file.json".to_string(),
1157 timeout_duration: Duration::from_secs(30),
1158 };
1159 let display = format!("{timeout_error}");
1160 assert!(display.contains("validation timeout"));
1161 assert!(display.contains("large_file.json"));
1162 assert!(display.contains("30s"));
1163
1164 let cancelled_error = ValidationError::CancelledError {
1166 file_path: "cancelled_file.json".to_string(),
1167 reason: "user requested cancellation".to_string(),
1168 };
1169 let display = format!("{cancelled_error}");
1170 assert!(display.contains("validation cancelled"));
1171 assert!(display.contains("cancelled_file.json"));
1172 assert!(display.contains("user requested cancellation"));
1173
1174 let config_error = ValidationError::ConfigurationError {
1176 error: "invalid timeout value".to_string(),
1177 };
1178 let display = format!("{config_error}");
1179 assert!(display.contains("validation configuration error"));
1180 assert!(display.contains("invalid timeout value"));
1181
1182 let internal_error = ValidationError::InternalError {
1184 error: "unexpected internal state".to_string(),
1185 };
1186 let display = format!("{internal_error}");
1187 assert!(display.contains("internal validation error"));
1188 assert!(display.contains("unexpected internal state"));
1189 }
1190
1191 #[test]
1192 fn test_error_conversion_to_tracking_error() {
1193 let validation_error = ValidationError::FileAccessError {
1195 file_path: "test.json".to_string(),
1196 error: "file not found".to_string(),
1197 };
1198 let tracking_error: TrackingError = validation_error.into();
1199 match tracking_error {
1200 TrackingError::ExportError(msg) => {
1201 assert!(msg.contains("file access error"));
1202 assert!(msg.contains("test.json"));
1203 }
1204 _ => panic!("Expected ExportError variant"),
1205 }
1206
1207 let export_error = ExportError::DataQualityError {
1209 validation_type: ValidationType::DataIntegrity,
1210 expected: "valid data".to_string(),
1211 actual: "corrupted data".to_string(),
1212 affected_records: 50,
1213 };
1214 let tracking_error: TrackingError = export_error.into();
1215 match tracking_error {
1216 TrackingError::ExportError(msg) => {
1217 assert!(msg.contains("data quality error"));
1218 assert!(msg.contains("DataIntegrity"));
1219 }
1220 _ => panic!("Expected ExportError variant"),
1221 }
1222 }
1223
1224 #[test]
1225 fn test_enum_variants_equality() {
1226 assert_eq!(ResourceType::Memory, ResourceType::Memory);
1228 assert_ne!(ResourceType::Memory, ResourceType::Disk);
1229
1230 use std::collections::HashMap;
1232 let mut validation_map = HashMap::new();
1233 validation_map.insert(ValidationType::JsonStructure, 1);
1234 validation_map.insert(ValidationType::DataIntegrity, 2);
1235 assert_eq!(validation_map.get(&ValidationType::JsonStructure), Some(&1));
1236 assert_eq!(validation_map.get(&ValidationType::DataIntegrity), Some(&2));
1237
1238 assert_eq!(PerformanceMetric::ExportTime, PerformanceMetric::ExportTime);
1240 assert_ne!(
1241 PerformanceMetric::ExportTime,
1242 PerformanceMetric::MemoryUsage
1243 );
1244
1245 assert_eq!(ExportStage::Initialization, ExportStage::Initialization);
1247 assert_ne!(ExportStage::Initialization, ExportStage::Writing);
1248
1249 assert_eq!(ConflictType::LockContention, ConflictType::LockContention);
1251 assert_ne!(ConflictType::LockContention, ConflictType::DataRace);
1252
1253 assert_eq!(
1255 CorruptionType::IncompleteData,
1256 CorruptionType::IncompleteData
1257 );
1258 assert_ne!(
1259 CorruptionType::IncompleteData,
1260 CorruptionType::InvalidFormat
1261 );
1262 }
1263
1264 #[test]
1265 fn test_performance_logger_different_log_levels() {
1266 let error_logger = PerformanceLogger::new(LogLevel::Error);
1268 assert!(error_logger.should_log(LogLevel::Error));
1269 assert!(!error_logger.should_log(LogLevel::Warn));
1270 assert!(!error_logger.should_log(LogLevel::Info));
1271
1272 let warn_logger = PerformanceLogger::new(LogLevel::Warn);
1274 assert!(warn_logger.should_log(LogLevel::Error));
1275 assert!(warn_logger.should_log(LogLevel::Warn));
1276 assert!(!warn_logger.should_log(LogLevel::Info));
1277
1278 let info_logger = PerformanceLogger::new(LogLevel::Info);
1280 assert!(info_logger.should_log(LogLevel::Error));
1281 assert!(info_logger.should_log(LogLevel::Warn));
1282 assert!(info_logger.should_log(LogLevel::Info));
1283 assert!(!info_logger.should_log(LogLevel::Debug));
1284
1285 let debug_logger = PerformanceLogger::new(LogLevel::Debug);
1287 assert!(debug_logger.should_log(LogLevel::Error));
1288 assert!(debug_logger.should_log(LogLevel::Warn));
1289 assert!(debug_logger.should_log(LogLevel::Info));
1290 assert!(debug_logger.should_log(LogLevel::Debug));
1291 assert!(!debug_logger.should_log(LogLevel::Trace));
1292
1293 let trace_logger = PerformanceLogger::new(LogLevel::Trace);
1295 assert!(trace_logger.should_log(LogLevel::Error));
1296 assert!(trace_logger.should_log(LogLevel::Warn));
1297 assert!(trace_logger.should_log(LogLevel::Info));
1298 assert!(trace_logger.should_log(LogLevel::Debug));
1299 assert!(trace_logger.should_log(LogLevel::Trace));
1300 }
1301
1302 #[test]
1303 fn test_performance_logger_error_statistics() {
1304 let logger = PerformanceLogger::new(LogLevel::Debug);
1305
1306 let parallel_error = ExportError::ParallelProcessingError {
1308 shard_index: 1,
1309 thread_id: "thread-1".to_string(),
1310 error_message: "test error".to_string(),
1311 partial_results: None,
1312 };
1313 logger.log_operation_failure("test_op", ¶llel_error, Duration::from_millis(50));
1314
1315 let resource_error = ExportError::ResourceLimitExceeded {
1316 resource_type: ResourceType::Memory,
1317 limit: 1000,
1318 actual: 1500,
1319 suggested_action: "reduce usage".to_string(),
1320 };
1321 logger.log_operation_failure("test_op2", &resource_error, Duration::from_millis(25));
1322
1323 let report = logger.generate_performance_report();
1324 assert_eq!(report.failed_operations, 2);
1325 assert_eq!(report.error_breakdown.parallel_processing_errors, 1);
1326 assert_eq!(report.error_breakdown.resource_limit_errors, 1);
1327 assert_eq!(report.error_breakdown.data_quality_errors, 0);
1328 }
1329
1330 #[test]
1331 fn test_performance_logger_memory_tracking() {
1332 let logger = PerformanceLogger::new(LogLevel::Debug);
1333
1334 logger.log_memory_usage(1024 * 1024, 2048 * 1024); logger.log_memory_usage(1536 * 1024, 2048 * 1024); logger.log_memory_usage(512 * 1024, 3072 * 1024); let report = logger.generate_performance_report();
1340 assert_eq!(report.current_memory_usage_mb, 0.5);
1341 assert_eq!(report.peak_memory_usage_mb, 3.0);
1342 }
1343
1344 #[test]
1345 fn test_performance_logger_metric_threshold_checking() {
1346 let logger = PerformanceLogger::new(LogLevel::Debug);
1347
1348 logger.log_performance_metric(PerformanceMetric::ExportTime, 5.0, None);
1350
1351 logger.log_performance_metric(PerformanceMetric::MemoryUsage, 80.0, Some(100.0));
1353
1354 logger.log_performance_metric(PerformanceMetric::ThroughputRate, 150.0, Some(100.0));
1356
1357 let report = logger.generate_performance_report();
1358 assert_eq!(report.error_breakdown.performance_threshold_errors, 1);
1360 }
1361
1362 #[test]
1363 fn test_performance_logger_comprehensive_operations() {
1364 let logger = PerformanceLogger::new(LogLevel::Info);
1365
1366 for i in 0..5 {
1368 logger.log_operation_start(&format!("operation_{}", i), "test details");
1369 if i < 3 {
1370 logger.log_operation_success(
1371 &format!("operation_{}", i),
1372 Duration::from_millis(100 + i as u64 * 10),
1373 "success",
1374 );
1375 } else {
1376 let error = ExportError::DataQualityError {
1377 validation_type: ValidationType::AllocationCount,
1378 expected: "1000".to_string(),
1379 actual: "999".to_string(),
1380 affected_records: 1,
1381 };
1382 logger.log_operation_failure(
1383 &format!("operation_{}", i),
1384 &error,
1385 Duration::from_millis(50),
1386 );
1387 }
1388 }
1389
1390 let report = logger.generate_performance_report();
1391 assert_eq!(report.total_operations, 5);
1392 assert_eq!(report.successful_operations, 3);
1393 assert_eq!(report.failed_operations, 2);
1394 assert_eq!(report.success_rate, 60.0);
1395 assert!(report.average_processing_time_ms > 0.0);
1396 assert_eq!(report.error_breakdown.data_quality_errors, 2);
1397 }
1398
1399 #[test]
1400 fn test_performance_logger_timestamp_formatting() {
1401 let logger = PerformanceLogger::new(LogLevel::Debug);
1402
1403 let timestamp = logger.format_timestamp();
1405 assert!(timestamp.contains("s"));
1406 assert!(!timestamp.is_empty());
1407 }
1408
1409 #[test]
1410 fn test_performance_report_detailed_output() {
1411 let logger = PerformanceLogger::new(LogLevel::Info);
1412
1413 logger.log_operation_start("test", "details");
1415 logger.log_operation_success("test", Duration::from_millis(100), "success");
1416 logger.log_memory_usage(1024 * 1024, 2048 * 1024);
1417
1418 let report = logger.generate_performance_report();
1419
1420 report.print_detailed_report();
1422
1423 assert!(report.total_runtime.as_nanos() > 0);
1425 assert_eq!(report.total_operations, 1);
1426 assert_eq!(report.successful_operations, 1);
1427 assert_eq!(report.success_rate, 100.0);
1428 }
1429
1430 #[test]
1431 fn test_resource_usage_edge_cases() {
1432 let usage_zero_limits = ResourceUsage {
1434 memory_usage: 100,
1435 disk_usage: 200,
1436 cpu_usage: 0.5,
1437 memory_limit: 0,
1438 disk_limit: 0,
1439 cpu_limit: 0.0,
1440 };
1441 assert_eq!(usage_zero_limits.memory_usage_percentage(), 0.0);
1442 assert_eq!(usage_zero_limits.disk_usage_percentage(), 0.0);
1443 assert_eq!(usage_zero_limits.cpu_usage_percentage(), 50.0);
1444
1445 let usage_max = ResourceUsage {
1447 memory_usage: 1024,
1448 disk_usage: 2048,
1449 cpu_usage: 1.0,
1450 memory_limit: 1024,
1451 disk_limit: 2048,
1452 cpu_limit: 1.0,
1453 };
1454 assert_eq!(usage_max.memory_usage_percentage(), 100.0);
1455 assert_eq!(usage_max.disk_usage_percentage(), 100.0);
1456 assert_eq!(usage_max.cpu_usage_percentage(), 100.0);
1457
1458 let usage_over = ResourceUsage {
1460 memory_usage: 2048,
1461 disk_usage: 4096,
1462 cpu_usage: 1.5,
1463 memory_limit: 1024,
1464 disk_limit: 2048,
1465 cpu_limit: 1.0,
1466 };
1467 assert_eq!(usage_over.memory_usage_percentage(), 200.0);
1468 assert_eq!(usage_over.disk_usage_percentage(), 200.0);
1469 assert_eq!(usage_over.cpu_usage_percentage(), 150.0);
1470 }
1471
1472 #[test]
1473 fn test_resource_monitor_creation() {
1474 let monitor = ResourceMonitor::new(512, 1024, 75.0);
1475
1476 assert_eq!(monitor.memory_limit, 512 * 1024 * 1024);
1478 assert_eq!(monitor.disk_limit, 1024 * 1024 * 1024);
1479 assert_eq!(monitor.cpu_limit, 75.0);
1480 }
1481
1482 #[test]
1483 fn test_metrics_collector_creation() {
1484 let collector = MetricsCollector::new();
1485
1486 assert_eq!(collector.total_operations.load(Ordering::Relaxed), 0);
1488 assert_eq!(collector.successful_operations.load(Ordering::Relaxed), 0);
1489 assert_eq!(collector.failed_operations.load(Ordering::Relaxed), 0);
1490 assert_eq!(
1491 collector.total_processing_time_ms.load(Ordering::Relaxed),
1492 0
1493 );
1494 assert_eq!(collector.peak_memory_usage.load(Ordering::Relaxed), 0);
1495 assert_eq!(collector.current_memory_usage.load(Ordering::Relaxed), 0);
1496 }
1497
1498 #[test]
1499 fn test_error_statistics_creation() {
1500 let stats = ErrorStatistics::new();
1501
1502 assert_eq!(stats.parallel_processing_errors.load(Ordering::Relaxed), 0);
1504 assert_eq!(stats.resource_limit_errors.load(Ordering::Relaxed), 0);
1505 assert_eq!(stats.data_quality_errors.load(Ordering::Relaxed), 0);
1506 assert_eq!(
1507 stats.performance_threshold_errors.load(Ordering::Relaxed),
1508 0
1509 );
1510 assert_eq!(stats.concurrency_conflict_errors.load(Ordering::Relaxed), 0);
1511 assert_eq!(stats.data_corruption_errors.load(Ordering::Relaxed), 0);
1512 assert_eq!(
1513 stats.insufficient_resources_errors.load(Ordering::Relaxed),
1514 0
1515 );
1516 assert_eq!(stats.export_interrupted_errors.load(Ordering::Relaxed), 0);
1517 }
1518
1519 #[test]
1520 fn test_all_error_types_statistics_update() {
1521 let logger = PerformanceLogger::new(LogLevel::Error);
1522
1523 let errors = vec![
1525 ExportError::ParallelProcessingError {
1526 shard_index: 1,
1527 thread_id: "thread-1".to_string(),
1528 error_message: "test".to_string(),
1529 partial_results: None,
1530 },
1531 ExportError::ResourceLimitExceeded {
1532 resource_type: ResourceType::Memory,
1533 limit: 1000,
1534 actual: 1500,
1535 suggested_action: "reduce".to_string(),
1536 },
1537 ExportError::DataQualityError {
1538 validation_type: ValidationType::JsonStructure,
1539 expected: "valid".to_string(),
1540 actual: "invalid".to_string(),
1541 affected_records: 1,
1542 },
1543 ExportError::PerformanceThresholdExceeded {
1544 metric: PerformanceMetric::ExportTime,
1545 threshold: 10.0,
1546 actual: 15.0,
1547 stage: ExportStage::Writing,
1548 },
1549 ExportError::ConcurrencyConflict {
1550 operation: "test".to_string(),
1551 conflict_type: ConflictType::LockContention,
1552 retry_count: 1,
1553 },
1554 ExportError::DataCorruption {
1555 corruption_type: CorruptionType::ChecksumMismatch,
1556 affected_data: "test.json".to_string(),
1557 recovery_possible: true,
1558 },
1559 ExportError::InsufficientResources {
1560 required_memory: 2048,
1561 available_memory: 1024,
1562 required_disk: 4096,
1563 available_disk: 2048,
1564 },
1565 ExportError::ExportInterrupted {
1566 stage: ExportStage::Finalization,
1567 progress_percentage: 90.0,
1568 partial_output_path: None,
1569 },
1570 ];
1571
1572 for (i, error) in errors.iter().enumerate() {
1573 logger.log_operation_failure(&format!("op_{}", i), error, Duration::from_millis(10));
1574 }
1575
1576 let report = logger.generate_performance_report();
1577 let breakdown = &report.error_breakdown;
1578
1579 assert_eq!(breakdown.parallel_processing_errors, 1);
1580 assert_eq!(breakdown.resource_limit_errors, 1);
1581 assert_eq!(breakdown.data_quality_errors, 1);
1582 assert_eq!(breakdown.performance_threshold_errors, 1);
1583 assert_eq!(breakdown.concurrency_conflict_errors, 1);
1584 assert_eq!(breakdown.data_corruption_errors, 1);
1585 assert_eq!(breakdown.insufficient_resources_errors, 1);
1586 assert_eq!(breakdown.export_interrupted_errors, 1);
1587 assert_eq!(report.failed_operations, 8);
1588 }
1589
1590 #[test]
1591 fn test_log_level_equality() {
1592 assert_eq!(LogLevel::Error, LogLevel::Error);
1593 assert_ne!(LogLevel::Error, LogLevel::Warn);
1594 assert_eq!(LogLevel::Info, LogLevel::Info);
1595 assert_ne!(LogLevel::Debug, LogLevel::Trace);
1596 }
1597
1598 #[test]
1599 fn test_export_error_with_partial_results() {
1600 let error = ExportError::ParallelProcessingError {
1601 shard_index: 2,
1602 thread_id: "thread-2".to_string(),
1603 error_message: "partial failure".to_string(),
1604 partial_results: Some(vec![1, 2, 3, 4, 5]),
1605 };
1606
1607 match error {
1609 ExportError::ParallelProcessingError {
1610 partial_results, ..
1611 } => {
1612 assert!(partial_results.is_some());
1613 assert_eq!(partial_results.unwrap(), vec![1, 2, 3, 4, 5]);
1614 }
1615 _ => panic!("Expected ParallelProcessingError"),
1616 }
1617 }
1618
1619 #[test]
1620 fn test_performance_logger_warning_and_debug_logging() {
1621 let logger = PerformanceLogger::new(LogLevel::Debug);
1622
1623 logger.log_warning("This is a test warning");
1625
1626 logger.log_debug("This is a test debug message");
1628
1629 let error = ExportError::DataCorruption {
1631 corruption_type: CorruptionType::StructuralDamage,
1632 affected_data: "critical_data.json".to_string(),
1633 recovery_possible: false,
1634 };
1635 logger.log_error(&error);
1636
1637 let report = logger.generate_performance_report();
1638 assert_eq!(report.error_breakdown.data_corruption_errors, 1);
1639 }
1640}