1use clap::ArgMatches;
7use rayon::prelude::*;
8use serde_json::Value;
9use std::collections::HashMap;
10use std::error::Error;
11use std::fs::{self, File};
12use std::io::BufReader;
13use std::path::Path;
14use std::time::Instant;
15
16mod data_integrator;
17pub mod data_normalizer;
18pub mod debug_logger;
19pub mod error_handler;
20pub mod json_file_discovery;
21pub mod large_file_optimizer;
22
23use data_integrator::DataIntegrator;
24use data_normalizer::DataNormalizer;
25use debug_logger::{DebugConfig, DebugLogger, LogLevel};
26use error_handler::{ErrorRecoveryContext, HtmlErrorHandler};
27use json_file_discovery::{JsonFileConfig, JsonFileDiscovery};
28use large_file_optimizer::{LargeFileConfig, LargeFileOptimizer};
29
30pub mod direct_json_template;
31
32pub fn run_html_from_json(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {
34 let input_dir = matches
35 .get_one::<String>("input-dir")
36 .ok_or("Input directory is required")?;
37 let validate_only = matches.get_flag("validate-only");
38 let default_output = "validation_only.html".to_string();
39 let output_file = if validate_only {
40 matches
41 .get_one::<String>("output")
42 .unwrap_or(&default_output)
43 } else {
44 matches
45 .get_one::<String>("output")
46 .ok_or("Output HTML file is required")?
47 };
48 let base_name = matches
49 .get_one::<String>("base-name")
50 .map(|s| s.as_str())
51 .unwrap_or("snapshot");
52
53 let verbose = matches.get_flag("verbose");
55 let debug_mode = matches.get_flag("debug");
56 let performance_mode = matches.get_flag("performance");
57
58 let debug_config = DebugConfig {
59 log_level: if debug_mode {
60 LogLevel::Debug
61 } else {
62 LogLevel::Info
63 },
64 enable_timing: performance_mode || verbose,
65 enable_progress: verbose || debug_mode,
66 enable_memory_tracking: performance_mode || debug_mode,
67 enable_file_ops: debug_mode,
68 enable_json_details: debug_mode,
69 progress_interval_ms: if debug_mode { 500 } else { 2000 },
70 include_timestamps: debug_mode || performance_mode,
71 };
72
73 let logger = DebugLogger::with_config(debug_config);
74
75 logger.info("π Generating HTML report from JSON files...");
76 logger.info(&format!("π Input directory: {input_dir}"));
77 logger.info(&format!("π Output file: {output_file}"));
78 logger.info(&format!("π·οΈ Base name: {base_name}"));
79
80 if verbose {
81 logger.info(&format!(
82 "π§ Debug mode: {debug_mode}, Verbose: {verbose}, Performance: {performance_mode}",
83 ));
84 }
85
86 logger.start_progress(5, "Initializing HTML generation");
88
89 let discovery_timing = logger.start_timing("json_file_discovery");
91 logger.next_progress_step("Loading JSON files", 1);
92 let json_data = load_json_files_with_logging(input_dir, base_name, &logger)?;
93 let discovery_time = logger.end_timing(&discovery_timing).unwrap_or(0);
94 logger.update_stats(|stats| stats.discovery_time_ms = discovery_time);
95
96 logger.next_progress_step("Normalizing data", 1);
98 let normalization_timing = logger.start_timing("data_normalization");
99 let normalizer = DataNormalizer::new();
100 let mut unified_data = normalizer.normalize(&json_data)?;
101 let normalization_time = logger.end_timing(&normalization_timing).unwrap_or(0);
102 logger.update_stats(|stats| stats.normalization_time_ms = normalization_time);
103 logger.debug(&format!(
104 "π Normalized {} allocations",
105 unified_data.allocations.len()
106 ));
107
108 if validate_only {
110 logger.info("β
JSON validation completed successfully!");
111 logger.info("π Validation results:");
112 logger.info(&format!(" - Files loaded: {}", json_data.len()));
113 logger.info(&format!(
114 " - Allocations found: {}",
115 unified_data.allocations.len()
116 ));
117 logger.info(&format!(
118 " - Lifecycle events: {}",
119 unified_data.lifecycle.lifecycle_events.len()
120 ));
121 logger.info("- Performance data: Available");
122 logger.info(&format!(
123 " - Security violations: {}",
124 unified_data.security.total_violations
125 ));
126 logger.info(&format!(
127 " - Complex types: {}",
128 unified_data.complex_types.summary.total_complex_types
129 ));
130 logger.info(&format!(
131 " - Active memory: {} bytes",
132 unified_data.stats.active_memory
133 ));
134 logger.info(&format!(
135 " - Peak memory: {} bytes",
136 unified_data.stats.peak_memory
137 ));
138 return Ok(());
139 }
140
141 logger.next_progress_step("Integrating data sources", 1);
143 let integration_timing = logger.start_timing("data_integration");
144 let integrator = DataIntegrator::new();
145 let integration_stats = integrator.integrate(&mut unified_data)?;
146 let integration_time = logger.end_timing(&integration_timing).unwrap_or(0);
147 logger.update_stats(|stats| stats.integration_time_ms = integration_time);
148
149 logger.debug(&format!(
150 "π Integration completed: {} cross-references, {} conflicts resolved",
151 integration_stats.cross_references_found, integration_stats.conflicts_resolved
152 ));
153
154 tracing::info!("π Integration Statistics:");
155 tracing::info!(
156 " Cross-references found: {}",
157 integration_stats.cross_references_found
158 );
159 tracing::info!(
160 " Conflicts resolved: {}",
161 integration_stats.conflicts_resolved
162 );
163 tracing::info!(
164 " Data enrichments: {}",
165 integration_stats.enrichments_performed
166 );
167 tracing::info!(
168 " Index build time: {}ms",
169 integration_stats.index_build_time_ms
170 );
171 tracing::info!(
172 " Total integration time: {}ms",
173 integration_stats.integration_time_ms
174 );
175
176 {
177 logger.next_progress_step("Generating HTML template", 1);
179 let template_timing = logger.start_timing("template_generation");
180 logger.info("π¨ Using direct JSON data template with charts...");
181 let html_content = direct_json_template::generate_direct_html(&json_data)?;
182 let template_time = logger.end_timing(&template_timing).unwrap_or(0);
183 logger.update_stats(|stats| stats.template_time_ms = template_time);
184
185 let template_size_bytes = html_content.len();
187 let generation_time_ms = template_time;
188
189 tracing::info!("π¨ Template Generation Statistics:");
190 tracing::info!(
191 " Template size: {:.1} KB",
192 template_size_bytes as f64 / 1024.0
193 );
194 tracing::info!(" Total generation time: {}ms", generation_time_ms);
195
196 let output_path = if Path::new(output_file).is_absolute() || output_file.contains('/') {
198 output_file.to_string()
200 } else {
201 format!("{}/{}", input_dir.trim_end_matches('/'), output_file)
203 };
204
205 logger.next_progress_step("Writing HTML file", 1);
206 let write_timing = logger.start_timing("file_write");
207 logger.info(&format!("π Writing HTML file to: {output_path}"));
208
209 fs::write(&output_path, &html_content)?;
211 logger.end_timing(&write_timing);
212
213 logger.log_file_operation("written", &output_path, Some(html_content.len()));
214
215 logger.info("β
HTML report generated successfully!");
217 logger.info(&format!(
218 "π Open {output_path} in your browser to view the interactive report",
219 ));
220
221 if performance_mode || debug_mode {
223 logger.print_performance_report();
224 logger.print_memory_info();
225 }
226 }
227
228 Ok(())
229}
230
231#[derive(Debug)]
233pub struct JsonLoadResult {
234 pub suffix: String,
236 pub success: bool,
238 pub data: Option<Value>,
240 pub error: Option<String>,
242 pub file_size: usize,
244 pub load_time_ms: u64,
246}
247
248#[derive(Debug)]
250pub struct JsonLoadStats {
251 pub total_files_attempted: usize,
253 pub files_loaded: usize,
255 pub files_skipped: usize,
257 pub files_failed: usize,
259 pub total_size_bytes: usize,
261 pub total_load_time_ms: u64,
263 pub parallel_loading_used: bool,
265}
266
267type JsonDataCollection = HashMap<String, Value>;
269
270fn load_json_streaming_safe(file_path: &str) -> Result<Value, Box<dyn Error>> {
272 let file = File::open(file_path)?;
273 let reader = BufReader::new(file);
274
275 let json_value: Value = serde_json::from_reader(reader)?;
277 Ok(json_value)
278}
279
280fn load_json_files_with_logging(
282 input_dir: &str,
283 base_name: &str,
284 logger: &DebugLogger,
285) -> Result<JsonDataCollection, Box<dyn Error>> {
286 let start_time = Instant::now();
287
288 logger.debug("π Starting optimized JSON file loading with comprehensive error handling...");
289 logger.debug(&format!("π Directory: {input_dir}"));
290 logger.debug(&format!("π·οΈ Base name: {base_name}"));
291
292 let recovery_context = ErrorRecoveryContext {
294 attempt_recovery: true,
295 max_retries: 3,
296 allow_partial_data: true,
297 use_fallbacks: true,
298 verbose_errors: false,
299 };
300 let mut error_handler = HtmlErrorHandler::with_context(recovery_context);
301
302 let discovery_timing = logger.start_timing("file_discovery");
304 let discovery = JsonFileDiscovery::new(input_dir.to_string(), base_name.to_string());
305 let discovery_result = match discovery.discover_files() {
306 Ok(result) => {
307 logger.debug(&format!(
308 "π Discovery completed: {} files found",
309 result.found_files.len()
310 ));
311 result
312 }
313 Err(e) => {
314 match error_handler.handle_file_discovery_error(input_dir, base_name, Box::new(e)) {
315 Ok(alternatives) => {
316 logger.warn(&format!(
317 "π Found alternative directories: {alternatives:?}",
318 ));
319 return Err("JSON file discovery failed after attempting recovery".into());
320 }
321 Err(handled_error) => {
322 logger.error(&format!("{handled_error}"));
323 return Err(handled_error.into());
324 }
325 }
326 }
327 };
328 logger.end_timing(&discovery_timing);
329
330 let mut valid_files = Vec::new();
332 let mut total_size = 0usize;
333
334 for file_info in &discovery_result.found_files {
335 let file_path = file_info.path.to_string_lossy().to_string();
336 let file_size = file_info.size_bytes as usize;
337
338 logger.log_file_operation("discovered", &file_path, Some(file_size));
339
340 total_size += file_size;
341 valid_files.push((file_info.config.clone(), file_path, file_size));
342 }
343
344 if valid_files.is_empty() {
345 let error_msg =
346 "No valid JSON files found! Please check the input directory and base name.";
347 logger.error(error_msg);
348 return Err(error_msg.into());
349 }
350
351 logger.info(&format!(
352 "π Found {} valid files, total size: {:.1} MB",
353 valid_files.len(),
354 total_size as f64 / 1024.0 / 1024.0
355 ));
356
357 logger.update_progress_items(valid_files.len());
359
360 let has_large_files = valid_files
362 .iter()
363 .any(|(_, _, size)| *size > 20 * 1024 * 1024);
364 let use_parallel = false; if use_parallel {
369 logger.info(&format!(
370 "β‘ Using parallel loading for {} files (total: {:.1} MB, has large files: {})",
371 valid_files.len(),
372 total_size as f64 / 1024.0 / 1024.0,
373 has_large_files
374 ));
375 } else {
376 logger.info(&format!(
377 "π Using sequential loading for {} files (total: {:.1} MB)",
378 valid_files.len(),
379 total_size as f64 / 1024.0 / 1024.0
380 ));
381 }
382
383 let loading_timing = logger.start_timing("file_loading");
385 let results = if use_parallel {
386 load_files_parallel_with_logging(&valid_files, logger)?
387 } else {
388 load_files_sequential_with_logging(&valid_files, logger)?
389 };
390 let loading_time = logger.end_timing(&loading_timing).unwrap_or(0);
391
392 let mut data = JsonDataCollection::new();
394 let mut stats = JsonLoadStats {
395 total_files_attempted: valid_files.len(),
396 files_loaded: 0,
397 files_skipped: 0,
398 files_failed: 0,
399 total_size_bytes: 0,
400 total_load_time_ms: start_time.elapsed().as_millis() as u64,
401 parallel_loading_used: use_parallel,
402 };
403
404 for result in results {
405 if result.success {
406 if let Some(json_data) = result.data {
407 data.insert(result.suffix.clone(), json_data);
408 stats.files_loaded += 1;
409 stats.total_size_bytes += result.file_size;
410 logger.debug(&format!(
411 "β
Loaded {} ({:.1} KB in {}ms)",
412 result.suffix,
413 result.file_size as f64 / 1024.0,
414 result.load_time_ms
415 ));
416 }
417 } else {
418 stats.files_failed += 1;
419 logger.error(&format!(
420 "β Failed to load {}: {}",
421 result.suffix,
422 result.error.unwrap_or_else(|| "Unknown error".to_string())
423 ));
424 }
425 }
426
427 print_load_statistics_with_logging(&stats, logger);
429
430 error_handler.print_recovery_summary();
432
433 logger.update_stats(|perf_stats| {
435 perf_stats.loading_time_ms = loading_time;
436 perf_stats.files_processed = stats.files_loaded;
437 perf_stats.data_size_bytes = stats.total_size_bytes;
438 });
439
440 if data.is_empty() {
441 let error_msg = "No JSON files were successfully loaded!";
442 logger.error(error_msg);
443 return Err(error_msg.into());
444 }
445
446 Ok(data)
447}
448
449#[allow(dead_code)]
451fn load_json_files(input_dir: &str, base_name: &str) -> Result<JsonDataCollection, Box<dyn Error>> {
452 let logger = DebugLogger::new();
453 load_json_files_with_logging(input_dir, base_name, &logger)
454}
455
456fn load_files_parallel_with_logging(
458 files: &[(JsonFileConfig, String, usize)],
459 logger: &DebugLogger,
460) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
461 logger.debug("β‘ Starting parallel file loading");
462
463 let results: Vec<JsonLoadResult> = files
464 .par_iter()
465 .enumerate()
466 .map(|(index, (config, file_path, file_size))| {
467 let file_timing = format!("load_file_{index}");
468 let timing_id = logger.start_timing(&file_timing);
469
470 logger.log_file_operation("loading", file_path, Some(*file_size));
471 let result = load_single_file_with_recovery(config, file_path, *file_size);
472
473 logger.end_timing(&timing_id);
474 result
475 })
476 .collect();
477
478 logger.debug("β
Parallel file loading completed");
479 Ok(results)
480}
481
482#[allow(dead_code)]
484fn load_files_parallel(
485 files: &[(JsonFileConfig, String, usize)],
486) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
487 let logger = DebugLogger::new();
488 load_files_parallel_with_logging(files, &logger)
489}
490
491fn load_files_sequential_with_logging(
493 files: &[(JsonFileConfig, String, usize)],
494 logger: &DebugLogger,
495) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
496 logger.debug("π Starting sequential file loading");
497 let mut results = Vec::new();
498
499 for (index, (config, file_path, file_size)) in files.iter().enumerate() {
500 let file_timing = format!("load_file_{index}");
501 let timing_id = logger.start_timing(&file_timing);
502
503 logger.log_file_operation("loading", file_path, Some(*file_size));
504 let result = load_single_file_with_recovery(config, file_path, *file_size);
505
506 logger.end_timing(&timing_id);
507 results.push(result);
508
509 logger.update_progress_items(index + 1);
511 }
512
513 logger.debug("β
Sequential file loading completed");
514 Ok(results)
515}
516
517#[allow(dead_code)]
519fn load_files_sequential(
520 files: &[(JsonFileConfig, String, usize)],
521) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
522 let logger = DebugLogger::new();
523 load_files_sequential_with_logging(files, &logger)
524}
525
526fn load_single_file_with_recovery(
528 config: &JsonFileConfig,
529 file_path: &str,
530 file_size: usize,
531) -> JsonLoadResult {
532 let mut local_error_handler = HtmlErrorHandler::new();
534
535 match load_single_file_internal(config, file_path, file_size, &mut local_error_handler) {
536 Ok(result) => result,
537 Err(e) => {
538 JsonLoadResult {
540 suffix: config.suffix.to_string(),
541 success: false,
542 data: None,
543 error: Some(e.to_string()),
544 file_size,
545 load_time_ms: 0,
546 }
547 }
548 }
549}
550
551fn load_single_file_internal(
553 config: &JsonFileConfig,
554 file_path: &str,
555 file_size: usize,
556 error_handler: &mut HtmlErrorHandler,
557) -> Result<JsonLoadResult, Box<dyn Error>> {
558 let start_time = Instant::now();
559
560 let use_large_file_optimizer = file_size > 50 * 1024 * 1024
562 || config
563 .max_size_mb
564 .is_some_and(|max_mb| file_size > max_mb * 1024 * 1024 / 2);
565
566 if use_large_file_optimizer {
567 let large_file_config = LargeFileConfig {
569 max_memory_bytes: 256 * 1024 * 1024, stream_chunk_size: 128 * 1024, enable_memory_monitoring: true,
572 enable_progress_reporting: true,
573 max_file_size_bytes: config.max_size_mb.unwrap_or(500) * 1024 * 1024,
574 };
575
576 let optimizer = LargeFileOptimizer::new(large_file_config);
577
578 match optimizer.process_file(file_path, config.suffix) {
579 Ok((json_value, processing_stats)) => {
580 tracing::info!(
581 "π Large file processing stats for {}: {:.1} MB/s, {} objects, streaming: {}",
582 config.suffix,
583 processing_stats.throughput_mb_per_sec,
584 processing_stats.objects_processed,
585 processing_stats.streaming_mode_used
586 );
587
588 Ok(JsonLoadResult {
589 suffix: config.suffix.to_string(),
590 success: true,
591 data: Some(json_value),
592 error: None,
593 file_size,
594 load_time_ms: processing_stats.processing_time_ms,
595 })
596 }
597 Err(e) => {
598 let file_path_buf = std::path::PathBuf::from(file_path);
600 match error_handler.handle_file_loading_error(
601 file_path_buf,
602 config.suffix,
603 file_size,
604 Box::new(e),
605 ) {
606 Ok(Some(recovered_data)) => {
607 tracing::info!("β
Recovered data for {} using fallback", config.suffix);
608 Ok(JsonLoadResult {
609 suffix: config.suffix.to_string(),
610 success: true,
611 data: Some(recovered_data),
612 error: None,
613 file_size,
614 load_time_ms: start_time.elapsed().as_millis() as u64,
615 })
616 }
617 Ok(None) => Err(format!(
618 "Failed to load {} and no fallback available",
619 config.suffix
620 )
621 .into()),
622 Err(handled_error) => Err(handled_error.into()),
623 }
624 }
625 }
626 } else {
627 match load_json_streaming_safe(file_path) {
629 Ok(json_value) => {
630 if let Err(validation_error) = validate_json_structure(&json_value, config.suffix) {
632 let validation_err = error_handler.handle_validation_error(
633 std::path::PathBuf::from(file_path),
634 config.suffix,
635 &validation_error,
636 &json_value,
637 );
638
639 tracing::error!("{}", validation_err);
640
641 let allow_partial = {
643 let stats = error_handler.get_stats();
644 stats.total_errors < 5 };
646 if allow_partial {
647 tracing::info!(
648 "β οΈ Continuing with potentially invalid data for {}",
649 config.suffix
650 );
651 Ok(JsonLoadResult {
652 suffix: config.suffix.to_string(),
653 success: true,
654 data: Some(json_value),
655 error: Some(format!("Validation warning: {validation_error}")),
656 file_size,
657 load_time_ms: start_time.elapsed().as_millis() as u64,
658 })
659 } else {
660 Err(validation_err.into())
661 }
662 } else {
663 Ok(JsonLoadResult {
664 suffix: config.suffix.to_string(),
665 success: true,
666 data: Some(json_value),
667 error: None,
668 file_size,
669 load_time_ms: start_time.elapsed().as_millis() as u64,
670 })
671 }
672 }
673 Err(e) => {
674 let parsing_err = error_handler
676 .handle_json_parsing_error(std::path::PathBuf::from(file_path), &e.to_string());
677
678 tracing::error!("{}", parsing_err);
679
680 let file_path_buf = std::path::PathBuf::from(file_path);
682 let error_msg = e.to_string();
683 let simple_error = std::io::Error::other(error_msg);
684 match error_handler.handle_file_loading_error(
685 file_path_buf,
686 config.suffix,
687 file_size,
688 Box::new(simple_error),
689 ) {
690 Ok(Some(recovered_data)) => {
691 tracing::info!("β
Recovered data for {} using fallback", config.suffix);
692 Ok(JsonLoadResult {
693 suffix: config.suffix.to_string(),
694 success: true,
695 data: Some(recovered_data),
696 error: None,
697 file_size,
698 load_time_ms: start_time.elapsed().as_millis() as u64,
699 })
700 }
701 Ok(None) => Err(format!(
702 "Failed to load {} and no fallback available",
703 config.suffix
704 )
705 .into()),
706 Err(handled_error) => Err(handled_error.into()),
707 }
708 }
709 }
710 }
711}
712
713#[allow(dead_code)]
715fn load_single_file(config: &JsonFileConfig, file_path: &str, file_size: usize) -> JsonLoadResult {
716 let start_time = Instant::now();
717
718 let use_large_file_optimizer = file_size > 50 * 1024 * 1024
720 || config
721 .max_size_mb
722 .is_some_and(|max_mb| file_size > max_mb * 1024 * 1024 / 2);
723
724 let result = if use_large_file_optimizer {
725 let large_file_config = LargeFileConfig {
727 max_memory_bytes: 256 * 1024 * 1024, stream_chunk_size: 128 * 1024, enable_memory_monitoring: true,
730 enable_progress_reporting: true,
731 max_file_size_bytes: config.max_size_mb.unwrap_or(500) * 1024 * 1024,
732 };
733
734 let optimizer = LargeFileOptimizer::new(large_file_config);
735
736 match optimizer.process_file(file_path, config.suffix) {
737 Ok((json_value, processing_stats)) => {
738 tracing::info!(
739 "π Large file processing stats for {}: {:.1} MB/s, {} objects, streaming: {}",
740 config.suffix,
741 processing_stats.throughput_mb_per_sec,
742 processing_stats.objects_processed,
743 processing_stats.streaming_mode_used
744 );
745
746 JsonLoadResult {
747 suffix: config.suffix.to_string(),
748 success: true,
749 data: Some(json_value),
750 error: None,
751 file_size,
752 load_time_ms: processing_stats.processing_time_ms,
753 }
754 }
755 Err(e) => JsonLoadResult {
756 suffix: config.suffix.to_string(),
757 success: false,
758 data: None,
759 error: Some(format!("Large file processing error: {e}")),
760 file_size,
761 load_time_ms: start_time.elapsed().as_millis() as u64,
762 },
763 }
764 } else {
765 match load_json_streaming_safe(file_path) {
767 Ok(json_value) => {
768 if let Err(validation_error) = validate_json_structure(&json_value, config.suffix) {
770 JsonLoadResult {
771 suffix: config.suffix.to_string(),
772 success: false,
773 data: None,
774 error: Some(format!("Validation error: {validation_error}")),
775 file_size,
776 load_time_ms: start_time.elapsed().as_millis() as u64,
777 }
778 } else {
779 JsonLoadResult {
780 suffix: config.suffix.to_string(),
781 success: true,
782 data: Some(json_value),
783 error: None,
784 file_size,
785 load_time_ms: start_time.elapsed().as_millis() as u64,
786 }
787 }
788 }
789 Err(e) => JsonLoadResult {
790 suffix: config.suffix.to_string(),
791 success: false,
792 data: None,
793 error: Some(format!("JSON loading error: {e}")),
794 file_size,
795 load_time_ms: start_time.elapsed().as_millis() as u64,
796 },
797 }
798 };
799
800 result
801}
802
803fn validate_json_structure(json: &Value, file_type: &str) -> Result<(), String> {
805 match file_type {
806 "memory_analysis" => {
807 if !json.is_object() {
808 return Err("Memory analysis JSON must be an object".to_string());
809 }
810 }
812 "performance" => {
813 if !json.is_object() {
814 return Err("Performance JSON must be an object".to_string());
815 }
816 }
817 _ => {
818 if !json.is_object() && !json.is_array() {
820 return Err("JSON must be an object or array".to_string());
821 }
822 }
823 }
824 Ok(())
825}
826
827fn print_load_statistics_with_logging(stats: &JsonLoadStats, logger: &DebugLogger) {
829 logger.info("\nπ JSON Loading Statistics:");
830 logger.info(&format!(
831 " Files attempted: {}",
832 stats.total_files_attempted
833 ));
834 logger.info(&format!(" Files loaded: {}", stats.files_loaded));
835 logger.info(&format!(" Files failed: {}", stats.files_failed));
836 logger.info(&format!(
837 " Total size: {:.1} MB",
838 stats.total_size_bytes as f64 / 1024.0 / 1024.0
839 ));
840 logger.info(&format!(" Total time: {}ms", stats.total_load_time_ms));
841 logger.info(&format!(
842 " Parallel loading: {}",
843 if stats.parallel_loading_used {
844 "Yes"
845 } else {
846 "No"
847 }
848 ));
849
850 if stats.files_loaded > 0 {
851 let avg_time = stats.total_load_time_ms / stats.files_loaded as u64;
852 let throughput = if stats.total_load_time_ms > 0 {
853 (stats.total_size_bytes as f64 / 1024.0 / 1024.0)
854 / (stats.total_load_time_ms as f64 / 1000.0)
855 } else {
856 0.0
857 };
858 logger.info(&format!(" Average time per file: {avg_time}ms"));
859 logger.info(&format!(" Throughput: {throughput:.1} MB/s"));
860
861 let memory_efficiency = if stats.total_size_bytes > 0 {
863 let estimated_peak_memory = stats.total_size_bytes as f64 * 1.5; let efficiency = (stats.total_size_bytes as f64 / estimated_peak_memory) * 100.0;
866 format!("{efficiency:.1}%")
867 } else {
868 "N/A".to_string()
869 };
870 logger.info(&format!(" Memory efficiency: {memory_efficiency}"));
871 }
872 logger.info("");
873}
874
875#[cfg(test)]
876mod tests {
877 use super::*;
878 use clap::{Arg, Command};
879 use serde_json::json;
880 use std::fs;
881 use tempfile::TempDir;
882
883 fn create_test_json_files(temp_dir: &TempDir, base_name: &str) -> Result<(), Box<dyn Error>> {
885 let dir_path = temp_dir.path();
886
887 let memory_analysis = json!({
889 "allocations": [
890 {
891 "address": "0x1000",
892 "size": 64,
893 "variable_name": "test_var",
894 "type_name": "String"
895 }
896 ],
897 "stats": {
898 "total_allocations": 1,
899 "active_memory": 64,
900 "peak_memory": 64
901 }
902 });
903 fs::write(
904 dir_path.join(format!("{base_name}_memory_analysis.json")),
905 serde_json::to_string_pretty(&memory_analysis)?,
906 )?;
907
908 let performance = json!({
910 "metrics": {
911 "allocation_rate": 1000,
912 "deallocation_rate": 950,
913 "peak_memory_usage": 1024
914 },
915 "timeline": []
916 });
917 fs::write(
918 dir_path.join(format!("{base_name}_performance.json")),
919 serde_json::to_string_pretty(&performance)?,
920 )?;
921
922 let lifecycle = json!({
924 "lifecycle_events": [
925 {
926 "timestamp": 1000,
927 "event_type": "allocation",
928 "address": "0x1000"
929 }
930 ]
931 });
932 fs::write(
933 dir_path.join(format!("{base_name}_lifecycle.json")),
934 serde_json::to_string_pretty(&lifecycle)?,
935 )?;
936
937 Ok(())
938 }
939
940 fn create_test_args(
942 input_dir: &str,
943 output_file: &str,
944 base_name: &str,
945 validate_only: bool,
946 ) -> ArgMatches {
947 let cmd = Command::new("test")
948 .arg(
949 Arg::new("input-dir")
950 .long("input-dir")
951 .value_name("DIR")
952 .required(true),
953 )
954 .arg(Arg::new("output").long("output").value_name("FILE"))
955 .arg(Arg::new("base-name").long("base-name").value_name("NAME"))
956 .arg(
957 Arg::new("validate-only")
958 .long("validate-only")
959 .action(clap::ArgAction::SetTrue),
960 )
961 .arg(
962 Arg::new("verbose")
963 .long("verbose")
964 .action(clap::ArgAction::SetTrue),
965 )
966 .arg(
967 Arg::new("debug")
968 .long("debug")
969 .action(clap::ArgAction::SetTrue),
970 )
971 .arg(
972 Arg::new("performance")
973 .long("performance")
974 .action(clap::ArgAction::SetTrue),
975 );
976
977 let mut args = vec!["test", "--input-dir", input_dir];
978 if !validate_only {
979 args.extend_from_slice(&["--output", output_file]);
980 } else {
981 args.push("--validate-only");
982 }
983 args.extend_from_slice(&["--base-name", base_name]);
984
985 cmd.try_get_matches_from(args).unwrap()
986 }
987
988 #[test]
989 fn test_validate_json_structure() {
990 let valid_memory = json!({
992 "allocations": [],
993 "stats": {}
994 });
995 assert!(validate_json_structure(&valid_memory, "memory_analysis").is_ok());
996
997 let invalid_memory = json!([1, 2, 3]);
999 assert!(validate_json_structure(&invalid_memory, "memory_analysis").is_err());
1000
1001 let valid_performance = json!({
1003 "metrics": {}
1004 });
1005 assert!(validate_json_structure(&valid_performance, "performance").is_ok());
1006
1007 let invalid_performance = json!("string");
1009 assert!(validate_json_structure(&invalid_performance, "performance").is_err());
1010
1011 let valid_generic = json!({
1013 "data": "value"
1014 });
1015 assert!(validate_json_structure(&valid_generic, "other").is_ok());
1016
1017 let valid_array = json!([1, 2, 3]);
1019 assert!(validate_json_structure(&valid_array, "other").is_ok());
1020
1021 let invalid_generic = json!(42);
1023 assert!(validate_json_structure(&invalid_generic, "other").is_err());
1024 }
1025
1026 #[test]
1027 fn test_json_load_result_creation() {
1028 let result = JsonLoadResult {
1029 suffix: "test".to_string(),
1030 success: true,
1031 data: Some(json!({"test": "data"})),
1032 error: None,
1033 file_size: 100,
1034 load_time_ms: 50,
1035 };
1036
1037 assert_eq!(result.suffix, "test");
1038 assert!(result.success);
1039 assert!(result.data.is_some());
1040 assert!(result.error.is_none());
1041 assert_eq!(result.file_size, 100);
1042 assert_eq!(result.load_time_ms, 50);
1043 }
1044
1045 #[test]
1046 fn test_json_load_stats_creation() {
1047 let stats = JsonLoadStats {
1048 total_files_attempted: 5,
1049 files_loaded: 4,
1050 files_skipped: 0,
1051 files_failed: 1,
1052 total_size_bytes: 1024,
1053 total_load_time_ms: 100,
1054 parallel_loading_used: true,
1055 };
1056
1057 assert_eq!(stats.total_files_attempted, 5);
1058 assert_eq!(stats.files_loaded, 4);
1059 assert_eq!(stats.files_failed, 1);
1060 assert_eq!(stats.total_size_bytes, 1024);
1061 assert!(stats.parallel_loading_used);
1062 }
1063
1064 #[test]
1065 fn test_load_json_files_with_valid_data() -> Result<(), Box<dyn Error>> {
1066 let temp_dir = TempDir::new()?;
1067 let base_name = "test_snapshot";
1068
1069 create_test_json_files(&temp_dir, base_name)?;
1071
1072 let logger = DebugLogger::new();
1073 let result =
1074 load_json_files_with_logging(temp_dir.path().to_str().unwrap(), base_name, &logger);
1075
1076 assert!(result.is_ok());
1077 let data = result.unwrap();
1078
1079 assert!(!data.is_empty());
1081 assert!(
1082 data.contains_key("memory_analysis")
1083 || data.contains_key("performance")
1084 || data.contains_key("lifecycle")
1085 );
1086
1087 Ok(())
1088 }
1089
1090 #[test]
1091 fn test_load_json_files_empty_directory() {
1092 let temp_dir = TempDir::new().unwrap();
1093 let logger = DebugLogger::new();
1094
1095 let result =
1096 load_json_files_with_logging(temp_dir.path().to_str().unwrap(), "nonexistent", &logger);
1097
1098 assert!(result.is_err());
1099 let error_msg = result.unwrap_err().to_string();
1100 assert!(
1102 error_msg.contains("No valid JSON files found")
1103 || error_msg.contains("discovery failed")
1104 || error_msg.contains("Missing required")
1105 || error_msg.contains("Directory not found")
1106 );
1107 }
1108
1109 #[test]
1110 fn test_run_html_from_json_validate_only() -> Result<(), Box<dyn Error>> {
1111 let temp_dir = TempDir::new()?;
1112 let base_name = "test_snapshot";
1113
1114 create_test_json_files(&temp_dir, base_name)?;
1116
1117 let matches = create_test_args(
1118 temp_dir.path().to_str().unwrap(),
1119 "output.html",
1120 base_name,
1121 true, );
1123
1124 let result = run_html_from_json(&matches);
1125 assert!(result.is_ok());
1126
1127 Ok(())
1128 }
1129
1130 #[test]
1131 fn test_run_html_from_json_full_generation() -> Result<(), Box<dyn Error>> {
1132 let temp_dir = TempDir::new()?;
1133 let base_name = "test_snapshot";
1134
1135 create_test_json_files(&temp_dir, base_name)?;
1137
1138 let output_file = "test_output.html";
1139 let matches = create_test_args(
1140 temp_dir.path().to_str().unwrap(),
1141 output_file,
1142 base_name,
1143 false, );
1145
1146 let result = run_html_from_json(&matches);
1147 assert!(result.is_ok());
1148
1149 let expected_output_path = temp_dir.path().join(output_file);
1151 assert!(expected_output_path.exists());
1152
1153 let html_content = fs::read_to_string(&expected_output_path)?;
1155 assert!(!html_content.is_empty());
1156 assert!(html_content.contains("<!DOCTYPE html") || html_content.contains("<html"));
1157
1158 Ok(())
1159 }
1160
1161 #[test]
1162 fn test_run_html_from_json_missing_input_dir() {
1163 let matches = Command::new("test")
1164 .arg(Arg::new("input-dir").long("input-dir").value_name("DIR"))
1165 .arg(Arg::new("output").long("output").value_name("FILE"))
1166 .arg(Arg::new("base-name").long("base-name").value_name("NAME"))
1167 .arg(
1168 Arg::new("validate-only")
1169 .long("validate-only")
1170 .action(clap::ArgAction::SetTrue),
1171 )
1172 .try_get_matches_from(vec!["test", "--output", "test.html"])
1173 .unwrap();
1174
1175 let result = run_html_from_json(&matches);
1176 assert!(result.is_err());
1177 let error_msg = result.unwrap_err().to_string();
1178 assert!(error_msg.contains("Input directory is required"));
1179 }
1180
1181 #[test]
1182 fn test_run_html_from_json_missing_output_file() {
1183 let temp_dir = TempDir::new().unwrap();
1184
1185 let matches = Command::new("test")
1186 .arg(Arg::new("input-dir").long("input-dir").value_name("DIR"))
1187 .arg(Arg::new("output").long("output").value_name("FILE"))
1188 .arg(Arg::new("base-name").long("base-name").value_name("NAME"))
1189 .arg(
1190 Arg::new("validate-only")
1191 .long("validate-only")
1192 .action(clap::ArgAction::SetTrue),
1193 )
1194 .try_get_matches_from(vec![
1195 "test",
1196 "--input-dir",
1197 temp_dir.path().to_str().unwrap(),
1198 ])
1199 .unwrap();
1200
1201 let result = run_html_from_json(&matches);
1202 assert!(result.is_err());
1203 let error_msg = result.unwrap_err().to_string();
1204 assert!(error_msg.contains("Output HTML file is required"));
1205 }
1206
1207 #[test]
1208 fn test_load_single_file_with_recovery_valid_file() -> Result<(), Box<dyn Error>> {
1209 let temp_dir = TempDir::new()?;
1210 let file_path = temp_dir.path().join("test.json");
1211
1212 let test_data = json!({
1213 "test": "data",
1214 "number": 42
1215 });
1216 fs::write(&file_path, serde_json::to_string_pretty(&test_data)?)?;
1217
1218 let config = JsonFileConfig {
1219 suffix: "test",
1220 description: "Test file",
1221 required: false,
1222 max_size_mb: Some(10),
1223 };
1224
1225 let result = load_single_file_with_recovery(&config, file_path.to_str().unwrap(), 100);
1226
1227 assert!(result.success);
1228 assert!(result.data.is_some());
1229 assert!(result.error.is_none());
1230 assert_eq!(result.suffix, "test");
1231
1232 Ok(())
1233 }
1234
1235 #[test]
1236 fn test_load_single_file_with_recovery_invalid_json() -> Result<(), Box<dyn Error>> {
1237 let temp_dir = TempDir::new()?;
1238 let file_path = temp_dir.path().join("invalid.json");
1239
1240 fs::write(&file_path, "{ invalid json content")?;
1242
1243 let config = JsonFileConfig {
1244 suffix: "test",
1245 description: "Test file",
1246 required: false,
1247 max_size_mb: Some(10),
1248 };
1249
1250 let result = load_single_file_with_recovery(&config, file_path.to_str().unwrap(), 100);
1251
1252 assert!(!result.success);
1253 assert!(result.data.is_none());
1254 assert!(result.error.is_some());
1255 let error_msg = result.error.unwrap();
1256 assert!(
1258 error_msg.contains("JSON parsing")
1259 || error_msg.contains("parsing")
1260 || error_msg.contains("error")
1261 || error_msg.contains("invalid")
1262 );
1263
1264 Ok(())
1265 }
1266
1267 #[test]
1268 fn test_load_single_file_with_recovery_nonexistent_file() {
1269 let config = JsonFileConfig {
1270 suffix: "test",
1271 description: "Test file",
1272 required: false,
1273 max_size_mb: Some(10),
1274 };
1275
1276 let result = load_single_file_with_recovery(&config, "/nonexistent/path/file.json", 100);
1277
1278 assert!(!result.success);
1279 assert!(result.data.is_none());
1280 assert!(result.error.is_some());
1281 }
1282
1283 #[test]
1284 fn test_print_load_statistics_with_logging() {
1285 let stats = JsonLoadStats {
1286 total_files_attempted: 3,
1287 files_loaded: 2,
1288 files_skipped: 0,
1289 files_failed: 1,
1290 total_size_bytes: 2048,
1291 total_load_time_ms: 150,
1292 parallel_loading_used: true,
1293 };
1294
1295 let logger = DebugLogger::new();
1296
1297 print_load_statistics_with_logging(&stats, &logger);
1299
1300 let empty_stats = JsonLoadStats {
1302 total_files_attempted: 1,
1303 files_loaded: 0,
1304 files_skipped: 0,
1305 files_failed: 1,
1306 total_size_bytes: 0,
1307 total_load_time_ms: 50,
1308 parallel_loading_used: false,
1309 };
1310
1311 print_load_statistics_with_logging(&empty_stats, &logger);
1312 }
1313
1314 #[test]
1315 fn test_load_files_sequential_with_logging() -> Result<(), Box<dyn Error>> {
1316 let temp_dir = TempDir::new()?;
1317
1318 let file_path = temp_dir.path().join("test.json");
1320 let test_data = json!({"test": "data"});
1321 fs::write(&file_path, serde_json::to_string_pretty(&test_data)?)?;
1322
1323 let config = JsonFileConfig {
1324 suffix: "test",
1325 description: "Test file",
1326 required: false,
1327 max_size_mb: Some(10),
1328 };
1329
1330 let files = vec![(config, file_path.to_string_lossy().to_string(), 100)];
1331 let logger = DebugLogger::new();
1332
1333 let result = load_files_sequential_with_logging(&files, &logger);
1334 assert!(result.is_ok());
1335
1336 let results = result.unwrap();
1337 assert_eq!(results.len(), 1);
1338 assert!(results[0].success);
1339
1340 Ok(())
1341 }
1342}