1use clap::ArgMatches;
7use rayon::prelude::*;
8use serde_json::Value;
9use std::collections::HashMap;
10use std::error::Error;
11use std::fs::{self, File};
12use std::io::BufReader;
13use std::path::Path;
14use std::time::Instant;
15
16mod data_integrator;
17pub mod data_normalizer;
18pub mod debug_logger;
19pub mod error_handler;
20pub mod json_file_discovery;
21pub mod large_file_optimizer;
22
23use data_integrator::DataIntegrator;
24use data_normalizer::DataNormalizer;
25use debug_logger::{DebugConfig, DebugLogger, LogLevel};
26use error_handler::{ErrorRecoveryContext, HtmlErrorHandler};
27use json_file_discovery::{JsonFileConfig, JsonFileDiscovery};
28use large_file_optimizer::{LargeFileConfig, LargeFileOptimizer};
29
30pub mod html;
31pub mod js;
32
33pub mod direct_json_template;
34
35pub fn run_html_from_json(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {
37 let input_dir = matches
38 .get_one::<String>("input-dir")
39 .ok_or("Input directory is required")?;
40 let validate_only = matches.get_flag("validate-only");
41 let default_output = "validation_only.html".to_string();
42 let output_file = if validate_only {
43 matches
44 .get_one::<String>("output")
45 .unwrap_or(&default_output)
46 } else {
47 matches
48 .get_one::<String>("output")
49 .ok_or("Output HTML file is required")?
50 };
51 let base_name = matches
52 .get_one::<String>("base-name")
53 .map(|s| s.as_str())
54 .unwrap_or("snapshot");
55
56 let verbose = matches.get_flag("verbose");
58 let debug_mode = matches.get_flag("debug");
59 let performance_mode = matches.get_flag("performance");
60
61 let debug_config = DebugConfig {
62 log_level: if debug_mode {
63 LogLevel::Debug
64 } else {
65 LogLevel::Info
66 },
67 enable_timing: performance_mode || verbose,
68 enable_progress: verbose || debug_mode,
69 enable_memory_tracking: performance_mode || debug_mode,
70 enable_file_ops: debug_mode,
71 enable_json_details: debug_mode,
72 progress_interval_ms: if debug_mode { 500 } else { 2000 },
73 include_timestamps: debug_mode || performance_mode,
74 };
75
76 let logger = DebugLogger::with_config(debug_config);
77
78 logger.info("š Generating HTML report from JSON files...");
79 logger.info(&format!("š Input directory: {input_dir}"));
80 logger.info(&format!("š Output file: {output_file}"));
81 logger.info(&format!("š·ļø Base name: {base_name}"));
82
83 if verbose {
84 logger.info(&format!(
85 "š§ Debug mode: {debug_mode}, Verbose: {verbose}, Performance: {performance_mode}",
86 ));
87 }
88
89 logger.start_progress(5, "Initializing HTML generation");
91
92 let discovery_timing = logger.start_timing("json_file_discovery");
94 logger.next_progress_step("Loading JSON files", 1);
95 let json_data = load_json_files_with_logging(input_dir, base_name, &logger)?;
96 let discovery_time = logger.end_timing(&discovery_timing).unwrap_or(0);
97 logger.update_stats(|stats| stats.discovery_time_ms = discovery_time);
98
99 logger.next_progress_step("Normalizing data", 1);
101 let normalization_timing = logger.start_timing("data_normalization");
102 let normalizer = DataNormalizer::new();
103 let mut unified_data = normalizer.normalize(&json_data)?;
104 let normalization_time = logger.end_timing(&normalization_timing).unwrap_or(0);
105 logger.update_stats(|stats| stats.normalization_time_ms = normalization_time);
106 logger.debug(&format!(
107 "š Normalized {} allocations",
108 unified_data.allocations.len()
109 ));
110
111 if validate_only {
113 logger.info("ā
JSON validation completed successfully!");
114 logger.info("š Validation results:");
115 logger.info(&format!(" - Files loaded: {}", json_data.len()));
116 logger.info(&format!(
117 " - Allocations found: {}",
118 unified_data.allocations.len()
119 ));
120 logger.info(&format!(
121 " - Lifecycle events: {}",
122 unified_data.lifecycle.lifecycle_events.len()
123 ));
124 logger.info("- Performance data: Available");
125 logger.info(&format!(
126 " - Security violations: {}",
127 unified_data.security.total_violations
128 ));
129 logger.info(&format!(
130 " - Complex types: {}",
131 unified_data.complex_types.summary.total_complex_types
132 ));
133 logger.info(&format!(
134 " - Active memory: {} bytes",
135 unified_data.stats.active_memory
136 ));
137 logger.info(&format!(
138 " - Peak memory: {} bytes",
139 unified_data.stats.peak_memory
140 ));
141 return Ok(());
142 }
143
144 logger.next_progress_step("Integrating data sources", 1);
146 let integration_timing = logger.start_timing("data_integration");
147 let integrator = DataIntegrator::new();
148 let integration_stats = integrator.integrate(&mut unified_data)?;
149 let integration_time = logger.end_timing(&integration_timing).unwrap_or(0);
150 logger.update_stats(|stats| stats.integration_time_ms = integration_time);
151
152 logger.debug(&format!(
153 "š Integration completed: {} cross-references, {} conflicts resolved",
154 integration_stats.cross_references_found, integration_stats.conflicts_resolved
155 ));
156
157 tracing::info!("š Integration Statistics:");
158 tracing::info!(
159 " Cross-references found: {}",
160 integration_stats.cross_references_found
161 );
162 tracing::info!(
163 " Conflicts resolved: {}",
164 integration_stats.conflicts_resolved
165 );
166 tracing::info!(
167 " Data enrichments: {}",
168 integration_stats.enrichments_performed
169 );
170 tracing::info!(
171 " Index build time: {}ms",
172 integration_stats.index_build_time_ms
173 );
174 tracing::info!(
175 " Total integration time: {}ms",
176 integration_stats.integration_time_ms
177 );
178
179 {
180 logger.next_progress_step("Generating HTML template", 1);
182 let template_timing = logger.start_timing("template_generation");
183 logger.info("šØ Using direct JSON data template with charts...");
184 let html_content = direct_json_template::generate_direct_html(&json_data)?;
185 let template_time = logger.end_timing(&template_timing).unwrap_or(0);
186 logger.update_stats(|stats| stats.template_time_ms = template_time);
187
188 let template_size_bytes = html_content.len();
190 let generation_time_ms = template_time;
191
192 tracing::info!("šØ Template Generation Statistics:");
193 tracing::info!(
194 " Template size: {:.1} KB",
195 template_size_bytes as f64 / 1024.0
196 );
197 tracing::info!(" Total generation time: {}ms", generation_time_ms);
198
199 let output_path = if Path::new(output_file).is_absolute() || output_file.contains('/') {
201 output_file.to_string()
203 } else {
204 format!("{}/{}", input_dir.trim_end_matches('/'), output_file)
206 };
207
208 logger.next_progress_step("Writing HTML file", 1);
209 let write_timing = logger.start_timing("file_write");
210 logger.info(&format!("š Writing HTML file to: {output_path}"));
211
212 fs::write(&output_path, &html_content)?;
214 logger.end_timing(&write_timing);
215
216 logger.log_file_operation("written", &output_path, Some(html_content.len()));
217
218 logger.info("ā
HTML report generated successfully!");
220 logger.info(&format!(
221 "š Open {output_path} in your browser to view the interactive report",
222 ));
223
224 if performance_mode || debug_mode {
226 logger.print_performance_report();
227 logger.print_memory_info();
228 }
229 }
230
231 Ok(())
232}
233
234#[derive(Debug)]
236pub struct JsonLoadResult {
237 pub suffix: String,
239 pub success: bool,
241 pub data: Option<Value>,
243 pub error: Option<String>,
245 pub file_size: usize,
247 pub load_time_ms: u64,
249}
250
251#[derive(Debug)]
253pub struct JsonLoadStats {
254 pub total_files_attempted: usize,
256 pub files_loaded: usize,
258 pub files_skipped: usize,
260 pub files_failed: usize,
262 pub total_size_bytes: usize,
264 pub total_load_time_ms: u64,
266 pub parallel_loading_used: bool,
268}
269
270type JsonDataCollection = HashMap<String, Value>;
272
273fn load_json_streaming_safe(file_path: &str) -> Result<Value, Box<dyn Error>> {
275 let file = File::open(file_path)?;
276 let reader = BufReader::new(file);
277
278 let json_value: Value = serde_json::from_reader(reader)?;
280 Ok(json_value)
281}
282
283fn load_json_files_with_logging(
285 input_dir: &str,
286 base_name: &str,
287 logger: &DebugLogger,
288) -> Result<JsonDataCollection, Box<dyn Error>> {
289 let start_time = Instant::now();
290
291 logger.debug("š Starting optimized JSON file loading with comprehensive error handling...");
292 logger.debug(&format!("š Directory: {input_dir}"));
293 logger.debug(&format!("š·ļø Base name: {base_name}"));
294
295 let recovery_context = ErrorRecoveryContext {
297 attempt_recovery: true,
298 max_retries: 3,
299 allow_partial_data: true,
300 use_fallbacks: true,
301 verbose_errors: false,
302 };
303 let mut error_handler = HtmlErrorHandler::with_context(recovery_context);
304
305 let discovery_timing = logger.start_timing("file_discovery");
307 let discovery = JsonFileDiscovery::new(input_dir.to_string(), base_name.to_string());
308 let discovery_result = match discovery.discover_files() {
309 Ok(result) => {
310 logger.debug(&format!(
311 "š Discovery completed: {} files found",
312 result.found_files.len()
313 ));
314 result
315 }
316 Err(e) => {
317 match error_handler.handle_file_discovery_error(input_dir, base_name, Box::new(e)) {
318 Ok(alternatives) => {
319 logger.warn(&format!(
320 "š Found alternative directories: {alternatives:?}",
321 ));
322 return Err("JSON file discovery failed after attempting recovery".into());
323 }
324 Err(handled_error) => {
325 logger.error(&format!("{handled_error}"));
326 return Err(handled_error.into());
327 }
328 }
329 }
330 };
331 logger.end_timing(&discovery_timing);
332
333 let mut valid_files = Vec::new();
335 let mut total_size = 0usize;
336
337 for file_info in &discovery_result.found_files {
338 let file_path = file_info.path.to_string_lossy().to_string();
339 let file_size = file_info.size_bytes as usize;
340
341 logger.log_file_operation("discovered", &file_path, Some(file_size));
342
343 total_size += file_size;
344 valid_files.push((file_info.config.clone(), file_path, file_size));
345 }
346
347 if valid_files.is_empty() {
348 let error_msg =
349 "No valid JSON files found! Please check the input directory and base name.";
350 logger.error(error_msg);
351 return Err(error_msg.into());
352 }
353
354 logger.info(&format!(
355 "š Found {} valid files, total size: {:.1} MB",
356 valid_files.len(),
357 total_size as f64 / 1024.0 / 1024.0
358 ));
359
360 logger.update_progress_items(valid_files.len());
362
363 let has_large_files = valid_files
365 .iter()
366 .any(|(_, _, size)| *size > 20 * 1024 * 1024);
367 let use_parallel = false; if use_parallel {
372 logger.info(&format!(
373 "ā” Using parallel loading for {} files (total: {:.1} MB, has large files: {})",
374 valid_files.len(),
375 total_size as f64 / 1024.0 / 1024.0,
376 has_large_files
377 ));
378 } else {
379 logger.info(&format!(
380 "š Using sequential loading for {} files (total: {:.1} MB)",
381 valid_files.len(),
382 total_size as f64 / 1024.0 / 1024.0
383 ));
384 }
385
386 let loading_timing = logger.start_timing("file_loading");
388 let results = if use_parallel {
389 load_files_parallel_with_logging(&valid_files, logger)?
390 } else {
391 load_files_sequential_with_logging(&valid_files, logger)?
392 };
393 let loading_time = logger.end_timing(&loading_timing).unwrap_or(0);
394
395 let mut data = JsonDataCollection::new();
397 let mut stats = JsonLoadStats {
398 total_files_attempted: valid_files.len(),
399 files_loaded: 0,
400 files_skipped: 0,
401 files_failed: 0,
402 total_size_bytes: 0,
403 total_load_time_ms: start_time.elapsed().as_millis() as u64,
404 parallel_loading_used: use_parallel,
405 };
406
407 for result in results {
408 if result.success {
409 if let Some(json_data) = result.data {
410 data.insert(result.suffix.clone(), json_data);
411 stats.files_loaded += 1;
412 stats.total_size_bytes += result.file_size;
413 logger.debug(&format!(
414 "ā
Loaded {} ({:.1} KB in {}ms)",
415 result.suffix,
416 result.file_size as f64 / 1024.0,
417 result.load_time_ms
418 ));
419 }
420 } else {
421 stats.files_failed += 1;
422 logger.error(&format!(
423 "ā Failed to load {}: {}",
424 result.suffix,
425 result.error.unwrap_or_else(|| "Unknown error".to_string())
426 ));
427 }
428 }
429
430 print_load_statistics_with_logging(&stats, logger);
432
433 error_handler.print_recovery_summary();
435
436 logger.update_stats(|perf_stats| {
438 perf_stats.loading_time_ms = loading_time;
439 perf_stats.files_processed = stats.files_loaded;
440 perf_stats.data_size_bytes = stats.total_size_bytes;
441 });
442
443 if data.is_empty() {
444 let error_msg = "No JSON files were successfully loaded!";
445 logger.error(error_msg);
446 return Err(error_msg.into());
447 }
448
449 Ok(data)
450}
451
452#[allow(dead_code)]
454fn load_json_files(input_dir: &str, base_name: &str) -> Result<JsonDataCollection, Box<dyn Error>> {
455 let logger = DebugLogger::new();
456 load_json_files_with_logging(input_dir, base_name, &logger)
457}
458
459fn load_files_parallel_with_logging(
461 files: &[(JsonFileConfig, String, usize)],
462 logger: &DebugLogger,
463) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
464 logger.debug("ā” Starting parallel file loading");
465
466 let results: Vec<JsonLoadResult> = files
467 .par_iter()
468 .enumerate()
469 .map(|(index, (config, file_path, file_size))| {
470 let file_timing = format!("load_file_{index}");
471 let timing_id = logger.start_timing(&file_timing);
472
473 logger.log_file_operation("loading", file_path, Some(*file_size));
474 let result = load_single_file_with_recovery(config, file_path, *file_size);
475
476 logger.end_timing(&timing_id);
477 result
478 })
479 .collect();
480
481 logger.debug("ā
Parallel file loading completed");
482 Ok(results)
483}
484
485#[allow(dead_code)]
487fn load_files_parallel(
488 files: &[(JsonFileConfig, String, usize)],
489) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
490 let logger = DebugLogger::new();
491 load_files_parallel_with_logging(files, &logger)
492}
493
494fn load_files_sequential_with_logging(
496 files: &[(JsonFileConfig, String, usize)],
497 logger: &DebugLogger,
498) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
499 logger.debug("š Starting sequential file loading");
500 let mut results = Vec::new();
501
502 for (index, (config, file_path, file_size)) in files.iter().enumerate() {
503 let file_timing = format!("load_file_{index}");
504 let timing_id = logger.start_timing(&file_timing);
505
506 logger.log_file_operation("loading", file_path, Some(*file_size));
507 let result = load_single_file_with_recovery(config, file_path, *file_size);
508
509 logger.end_timing(&timing_id);
510 results.push(result);
511
512 logger.update_progress_items(index + 1);
514 }
515
516 logger.debug("ā
Sequential file loading completed");
517 Ok(results)
518}
519
520#[allow(dead_code)]
522fn load_files_sequential(
523 files: &[(JsonFileConfig, String, usize)],
524) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
525 let logger = DebugLogger::new();
526 load_files_sequential_with_logging(files, &logger)
527}
528
529fn load_single_file_with_recovery(
531 config: &JsonFileConfig,
532 file_path: &str,
533 file_size: usize,
534) -> JsonLoadResult {
535 let mut local_error_handler = HtmlErrorHandler::new();
537
538 match load_single_file_internal(config, file_path, file_size, &mut local_error_handler) {
539 Ok(result) => result,
540 Err(e) => {
541 JsonLoadResult {
543 suffix: config.suffix.to_string(),
544 success: false,
545 data: None,
546 error: Some(e.to_string()),
547 file_size,
548 load_time_ms: 0,
549 }
550 }
551 }
552}
553
554fn load_single_file_internal(
556 config: &JsonFileConfig,
557 file_path: &str,
558 file_size: usize,
559 error_handler: &mut HtmlErrorHandler,
560) -> Result<JsonLoadResult, Box<dyn Error>> {
561 let start_time = Instant::now();
562
563 let use_large_file_optimizer = file_size > 50 * 1024 * 1024
565 || config
566 .max_size_mb
567 .is_some_and(|max_mb| file_size > max_mb * 1024 * 1024 / 2);
568
569 if use_large_file_optimizer {
570 let large_file_config = LargeFileConfig {
572 max_memory_bytes: 256 * 1024 * 1024, stream_chunk_size: 128 * 1024, enable_memory_monitoring: true,
575 enable_progress_reporting: true,
576 max_file_size_bytes: config.max_size_mb.unwrap_or(500) * 1024 * 1024,
577 };
578
579 let optimizer = LargeFileOptimizer::new(large_file_config);
580
581 match optimizer.process_file(file_path, config.suffix) {
582 Ok((json_value, processing_stats)) => {
583 tracing::info!(
584 "š Large file processing stats for {}: {:.1} MB/s, {} objects, streaming: {}",
585 config.suffix,
586 processing_stats.throughput_mb_per_sec,
587 processing_stats.objects_processed,
588 processing_stats.streaming_mode_used
589 );
590
591 Ok(JsonLoadResult {
592 suffix: config.suffix.to_string(),
593 success: true,
594 data: Some(json_value),
595 error: None,
596 file_size,
597 load_time_ms: processing_stats.processing_time_ms,
598 })
599 }
600 Err(e) => {
601 let file_path_buf = std::path::PathBuf::from(file_path);
603 match error_handler.handle_file_loading_error(
604 file_path_buf,
605 config.suffix,
606 file_size,
607 Box::new(e),
608 ) {
609 Ok(Some(recovered_data)) => {
610 tracing::info!("ā
Recovered data for {} using fallback", config.suffix);
611 Ok(JsonLoadResult {
612 suffix: config.suffix.to_string(),
613 success: true,
614 data: Some(recovered_data),
615 error: None,
616 file_size,
617 load_time_ms: start_time.elapsed().as_millis() as u64,
618 })
619 }
620 Ok(None) => Err(format!(
621 "Failed to load {} and no fallback available",
622 config.suffix
623 )
624 .into()),
625 Err(handled_error) => Err(handled_error.into()),
626 }
627 }
628 }
629 } else {
630 match load_json_streaming_safe(file_path) {
632 Ok(json_value) => {
633 if let Err(validation_error) = validate_json_structure(&json_value, config.suffix) {
635 let validation_err = error_handler.handle_validation_error(
636 std::path::PathBuf::from(file_path),
637 config.suffix,
638 &validation_error,
639 &json_value,
640 );
641
642 tracing::error!("{}", validation_err);
643
644 let allow_partial = {
646 let stats = error_handler.get_stats();
647 stats.total_errors < 5 };
649 if allow_partial {
650 tracing::info!(
651 "ā ļø Continuing with potentially invalid data for {}",
652 config.suffix
653 );
654 Ok(JsonLoadResult {
655 suffix: config.suffix.to_string(),
656 success: true,
657 data: Some(json_value),
658 error: Some(format!("Validation warning: {validation_error}")),
659 file_size,
660 load_time_ms: start_time.elapsed().as_millis() as u64,
661 })
662 } else {
663 Err(validation_err.into())
664 }
665 } else {
666 Ok(JsonLoadResult {
667 suffix: config.suffix.to_string(),
668 success: true,
669 data: Some(json_value),
670 error: None,
671 file_size,
672 load_time_ms: start_time.elapsed().as_millis() as u64,
673 })
674 }
675 }
676 Err(e) => {
677 let parsing_err = error_handler
679 .handle_json_parsing_error(std::path::PathBuf::from(file_path), &e.to_string());
680
681 tracing::error!("{}", parsing_err);
682
683 let file_path_buf = std::path::PathBuf::from(file_path);
685 let error_msg = e.to_string();
686 let simple_error = std::io::Error::other(error_msg);
687 match error_handler.handle_file_loading_error(
688 file_path_buf,
689 config.suffix,
690 file_size,
691 Box::new(simple_error),
692 ) {
693 Ok(Some(recovered_data)) => {
694 tracing::info!("ā
Recovered data for {} using fallback", config.suffix);
695 Ok(JsonLoadResult {
696 suffix: config.suffix.to_string(),
697 success: true,
698 data: Some(recovered_data),
699 error: None,
700 file_size,
701 load_time_ms: start_time.elapsed().as_millis() as u64,
702 })
703 }
704 Ok(None) => Err(format!(
705 "Failed to load {} and no fallback available",
706 config.suffix
707 )
708 .into()),
709 Err(handled_error) => Err(handled_error.into()),
710 }
711 }
712 }
713 }
714}
715
716#[allow(dead_code)]
718fn load_single_file(config: &JsonFileConfig, file_path: &str, file_size: usize) -> JsonLoadResult {
719 let start_time = Instant::now();
720
721 let use_large_file_optimizer = file_size > 50 * 1024 * 1024
723 || config
724 .max_size_mb
725 .is_some_and(|max_mb| file_size > max_mb * 1024 * 1024 / 2);
726
727 let result = if use_large_file_optimizer {
728 let large_file_config = LargeFileConfig {
730 max_memory_bytes: 256 * 1024 * 1024, stream_chunk_size: 128 * 1024, enable_memory_monitoring: true,
733 enable_progress_reporting: true,
734 max_file_size_bytes: config.max_size_mb.unwrap_or(500) * 1024 * 1024,
735 };
736
737 let optimizer = LargeFileOptimizer::new(large_file_config);
738
739 match optimizer.process_file(file_path, config.suffix) {
740 Ok((json_value, processing_stats)) => {
741 tracing::info!(
742 "š Large file processing stats for {}: {:.1} MB/s, {} objects, streaming: {}",
743 config.suffix,
744 processing_stats.throughput_mb_per_sec,
745 processing_stats.objects_processed,
746 processing_stats.streaming_mode_used
747 );
748
749 JsonLoadResult {
750 suffix: config.suffix.to_string(),
751 success: true,
752 data: Some(json_value),
753 error: None,
754 file_size,
755 load_time_ms: processing_stats.processing_time_ms,
756 }
757 }
758 Err(e) => JsonLoadResult {
759 suffix: config.suffix.to_string(),
760 success: false,
761 data: None,
762 error: Some(format!("Large file processing error: {e}")),
763 file_size,
764 load_time_ms: start_time.elapsed().as_millis() as u64,
765 },
766 }
767 } else {
768 match load_json_streaming_safe(file_path) {
770 Ok(json_value) => {
771 if let Err(validation_error) = validate_json_structure(&json_value, config.suffix) {
773 JsonLoadResult {
774 suffix: config.suffix.to_string(),
775 success: false,
776 data: None,
777 error: Some(format!("Validation error: {validation_error}")),
778 file_size,
779 load_time_ms: start_time.elapsed().as_millis() as u64,
780 }
781 } else {
782 JsonLoadResult {
783 suffix: config.suffix.to_string(),
784 success: true,
785 data: Some(json_value),
786 error: None,
787 file_size,
788 load_time_ms: start_time.elapsed().as_millis() as u64,
789 }
790 }
791 }
792 Err(e) => JsonLoadResult {
793 suffix: config.suffix.to_string(),
794 success: false,
795 data: None,
796 error: Some(format!("JSON loading error: {e}")),
797 file_size,
798 load_time_ms: start_time.elapsed().as_millis() as u64,
799 },
800 }
801 };
802
803 result
804}
805
806fn validate_json_structure(json: &Value, file_type: &str) -> Result<(), String> {
808 match file_type {
809 "memory_analysis" => {
810 if !json.is_object() {
811 return Err("Memory analysis JSON must be an object".to_string());
812 }
813 }
815 "performance" => {
816 if !json.is_object() {
817 return Err("Performance JSON must be an object".to_string());
818 }
819 }
820 _ => {
821 if !json.is_object() && !json.is_array() {
823 return Err("JSON must be an object or array".to_string());
824 }
825 }
826 }
827 Ok(())
828}
829
830fn print_load_statistics_with_logging(stats: &JsonLoadStats, logger: &DebugLogger) {
832 logger.info("\nš JSON Loading Statistics:");
833 logger.info(&format!(
834 " Files attempted: {}",
835 stats.total_files_attempted
836 ));
837 logger.info(&format!(" Files loaded: {}", stats.files_loaded));
838 logger.info(&format!(" Files failed: {}", stats.files_failed));
839 logger.info(&format!(
840 " Total size: {:.1} MB",
841 stats.total_size_bytes as f64 / 1024.0 / 1024.0
842 ));
843 logger.info(&format!(" Total time: {}ms", stats.total_load_time_ms));
844 logger.info(&format!(
845 " Parallel loading: {}",
846 if stats.parallel_loading_used {
847 "Yes"
848 } else {
849 "No"
850 }
851 ));
852
853 if stats.files_loaded > 0 {
854 let avg_time = stats.total_load_time_ms / stats.files_loaded as u64;
855 let throughput = if stats.total_load_time_ms > 0 {
856 (stats.total_size_bytes as f64 / 1024.0 / 1024.0)
857 / (stats.total_load_time_ms as f64 / 1000.0)
858 } else {
859 0.0
860 };
861 logger.info(&format!(" Average time per file: {avg_time}ms"));
862 logger.info(&format!(" Throughput: {throughput:.1} MB/s"));
863
864 let memory_efficiency = if stats.total_size_bytes > 0 {
866 let estimated_peak_memory = stats.total_size_bytes as f64 * 1.5; let efficiency = (stats.total_size_bytes as f64 / estimated_peak_memory) * 100.0;
869 format!("{efficiency:.1}%")
870 } else {
871 "N/A".to_string()
872 };
873 logger.info(&format!(" Memory efficiency: {memory_efficiency}"));
874 }
875 logger.info("");
876}
877
878#[cfg(test)]
879mod tests {
880 use super::*;
881 use clap::{Arg, Command};
882 use serde_json::json;
883 use std::fs;
884 use tempfile::TempDir;
885
886 fn create_test_json_files(temp_dir: &TempDir, base_name: &str) -> Result<(), Box<dyn Error>> {
888 let dir_path = temp_dir.path();
889
890 let memory_analysis = json!({
892 "allocations": [
893 {
894 "address": "0x1000",
895 "size": 64,
896 "variable_name": "test_var",
897 "type_name": "String"
898 }
899 ],
900 "stats": {
901 "total_allocations": 1,
902 "active_memory": 64,
903 "peak_memory": 64
904 }
905 });
906 fs::write(
907 dir_path.join(format!("{base_name}_memory_analysis.json")),
908 serde_json::to_string_pretty(&memory_analysis)?,
909 )?;
910
911 let performance = json!({
913 "metrics": {
914 "allocation_rate": 1000,
915 "deallocation_rate": 950,
916 "peak_memory_usage": 1024
917 },
918 "timeline": []
919 });
920 fs::write(
921 dir_path.join(format!("{base_name}_performance.json")),
922 serde_json::to_string_pretty(&performance)?,
923 )?;
924
925 let lifecycle = json!({
927 "lifecycle_events": [
928 {
929 "timestamp": 1000,
930 "event_type": "allocation",
931 "address": "0x1000"
932 }
933 ]
934 });
935 fs::write(
936 dir_path.join(format!("{base_name}_lifecycle.json")),
937 serde_json::to_string_pretty(&lifecycle)?,
938 )?;
939
940 Ok(())
941 }
942
943 fn create_test_args(
945 input_dir: &str,
946 output_file: &str,
947 base_name: &str,
948 validate_only: bool,
949 ) -> ArgMatches {
950 let cmd = Command::new("test")
951 .arg(
952 Arg::new("input-dir")
953 .long("input-dir")
954 .value_name("DIR")
955 .required(true),
956 )
957 .arg(Arg::new("output").long("output").value_name("FILE"))
958 .arg(Arg::new("base-name").long("base-name").value_name("NAME"))
959 .arg(
960 Arg::new("validate-only")
961 .long("validate-only")
962 .action(clap::ArgAction::SetTrue),
963 )
964 .arg(
965 Arg::new("verbose")
966 .long("verbose")
967 .action(clap::ArgAction::SetTrue),
968 )
969 .arg(
970 Arg::new("debug")
971 .long("debug")
972 .action(clap::ArgAction::SetTrue),
973 )
974 .arg(
975 Arg::new("performance")
976 .long("performance")
977 .action(clap::ArgAction::SetTrue),
978 );
979
980 let mut args = vec!["test", "--input-dir", input_dir];
981 if !validate_only {
982 args.extend_from_slice(&["--output", output_file]);
983 } else {
984 args.push("--validate-only");
985 }
986 args.extend_from_slice(&["--base-name", base_name]);
987
988 cmd.try_get_matches_from(args).unwrap()
989 }
990
991 #[test]
992 fn test_validate_json_structure() {
993 let valid_memory = json!({
995 "allocations": [],
996 "stats": {}
997 });
998 assert!(validate_json_structure(&valid_memory, "memory_analysis").is_ok());
999
1000 let invalid_memory = json!([1, 2, 3]);
1002 assert!(validate_json_structure(&invalid_memory, "memory_analysis").is_err());
1003
1004 let valid_performance = json!({
1006 "metrics": {}
1007 });
1008 assert!(validate_json_structure(&valid_performance, "performance").is_ok());
1009
1010 let invalid_performance = json!("string");
1012 assert!(validate_json_structure(&invalid_performance, "performance").is_err());
1013
1014 let valid_generic = json!({
1016 "data": "value"
1017 });
1018 assert!(validate_json_structure(&valid_generic, "other").is_ok());
1019
1020 let valid_array = json!([1, 2, 3]);
1022 assert!(validate_json_structure(&valid_array, "other").is_ok());
1023
1024 let invalid_generic = json!(42);
1026 assert!(validate_json_structure(&invalid_generic, "other").is_err());
1027 }
1028
1029 #[test]
1030 fn test_json_load_result_creation() {
1031 let result = JsonLoadResult {
1032 suffix: "test".to_string(),
1033 success: true,
1034 data: Some(json!({"test": "data"})),
1035 error: None,
1036 file_size: 100,
1037 load_time_ms: 50,
1038 };
1039
1040 assert_eq!(result.suffix, "test");
1041 assert!(result.success);
1042 assert!(result.data.is_some());
1043 assert!(result.error.is_none());
1044 assert_eq!(result.file_size, 100);
1045 assert_eq!(result.load_time_ms, 50);
1046 }
1047
1048 #[test]
1049 fn test_json_load_stats_creation() {
1050 let stats = JsonLoadStats {
1051 total_files_attempted: 5,
1052 files_loaded: 4,
1053 files_skipped: 0,
1054 files_failed: 1,
1055 total_size_bytes: 1024,
1056 total_load_time_ms: 100,
1057 parallel_loading_used: true,
1058 };
1059
1060 assert_eq!(stats.total_files_attempted, 5);
1061 assert_eq!(stats.files_loaded, 4);
1062 assert_eq!(stats.files_failed, 1);
1063 assert_eq!(stats.total_size_bytes, 1024);
1064 assert!(stats.parallel_loading_used);
1065 }
1066
1067 #[test]
1068 fn test_load_json_files_with_valid_data() -> Result<(), Box<dyn Error>> {
1069 let temp_dir = TempDir::new()?;
1070 let base_name = "test_snapshot";
1071
1072 create_test_json_files(&temp_dir, base_name)?;
1074
1075 let logger = DebugLogger::new();
1076 let result =
1077 load_json_files_with_logging(temp_dir.path().to_str().unwrap(), base_name, &logger);
1078
1079 assert!(result.is_ok());
1080 let data = result.unwrap();
1081
1082 assert!(!data.is_empty());
1084 assert!(
1085 data.contains_key("memory_analysis")
1086 || data.contains_key("performance")
1087 || data.contains_key("lifecycle")
1088 );
1089
1090 Ok(())
1091 }
1092
1093 #[test]
1094 fn test_load_json_files_empty_directory() {
1095 let temp_dir = TempDir::new().unwrap();
1096 let logger = DebugLogger::new();
1097
1098 let result =
1099 load_json_files_with_logging(temp_dir.path().to_str().unwrap(), "nonexistent", &logger);
1100
1101 assert!(result.is_err());
1102 let error_msg = result.unwrap_err().to_string();
1103 assert!(
1105 error_msg.contains("No valid JSON files found")
1106 || error_msg.contains("discovery failed")
1107 || error_msg.contains("Missing required")
1108 || error_msg.contains("Directory not found")
1109 );
1110 }
1111
1112 #[test]
1113 fn test_run_html_from_json_validate_only() -> Result<(), Box<dyn Error>> {
1114 let temp_dir = TempDir::new()?;
1115 let base_name = "test_snapshot";
1116
1117 create_test_json_files(&temp_dir, base_name)?;
1119
1120 let matches = create_test_args(
1121 temp_dir.path().to_str().unwrap(),
1122 "output.html",
1123 base_name,
1124 true, );
1126
1127 let result = run_html_from_json(&matches);
1128 assert!(result.is_ok());
1129
1130 Ok(())
1131 }
1132
1133 #[test]
1134 fn test_run_html_from_json_full_generation() -> Result<(), Box<dyn Error>> {
1135 let temp_dir = TempDir::new()?;
1136 let base_name = "test_snapshot";
1137
1138 create_test_json_files(&temp_dir, base_name)?;
1140
1141 let output_file = "test_output.html";
1142 let matches = create_test_args(
1143 temp_dir.path().to_str().unwrap(),
1144 output_file,
1145 base_name,
1146 false, );
1148
1149 let result = run_html_from_json(&matches);
1150 assert!(result.is_ok());
1151
1152 let expected_output_path = temp_dir.path().join(output_file);
1154 assert!(expected_output_path.exists());
1155
1156 let html_content = fs::read_to_string(&expected_output_path)?;
1158 assert!(!html_content.is_empty());
1159 assert!(html_content.contains("<!DOCTYPE html") || html_content.contains("<html"));
1160
1161 Ok(())
1162 }
1163
1164 #[test]
1165 fn test_run_html_from_json_missing_input_dir() {
1166 let matches = Command::new("test")
1167 .arg(Arg::new("input-dir").long("input-dir").value_name("DIR"))
1168 .arg(Arg::new("output").long("output").value_name("FILE"))
1169 .arg(Arg::new("base-name").long("base-name").value_name("NAME"))
1170 .arg(
1171 Arg::new("validate-only")
1172 .long("validate-only")
1173 .action(clap::ArgAction::SetTrue),
1174 )
1175 .try_get_matches_from(vec!["test", "--output", "test.html"])
1176 .unwrap();
1177
1178 let result = run_html_from_json(&matches);
1179 assert!(result.is_err());
1180 let error_msg = result.unwrap_err().to_string();
1181 assert!(error_msg.contains("Input directory is required"));
1182 }
1183
1184 #[test]
1185 fn test_run_html_from_json_missing_output_file() {
1186 let temp_dir = TempDir::new().unwrap();
1187
1188 let matches = Command::new("test")
1189 .arg(Arg::new("input-dir").long("input-dir").value_name("DIR"))
1190 .arg(Arg::new("output").long("output").value_name("FILE"))
1191 .arg(Arg::new("base-name").long("base-name").value_name("NAME"))
1192 .arg(
1193 Arg::new("validate-only")
1194 .long("validate-only")
1195 .action(clap::ArgAction::SetTrue),
1196 )
1197 .try_get_matches_from(vec![
1198 "test",
1199 "--input-dir",
1200 temp_dir.path().to_str().unwrap(),
1201 ])
1202 .unwrap();
1203
1204 let result = run_html_from_json(&matches);
1205 assert!(result.is_err());
1206 let error_msg = result.unwrap_err().to_string();
1207 assert!(error_msg.contains("Output HTML file is required"));
1208 }
1209
1210 #[test]
1211 fn test_load_single_file_with_recovery_valid_file() -> Result<(), Box<dyn Error>> {
1212 let temp_dir = TempDir::new()?;
1213 let file_path = temp_dir.path().join("test.json");
1214
1215 let test_data = json!({
1216 "test": "data",
1217 "number": 42
1218 });
1219 fs::write(&file_path, serde_json::to_string_pretty(&test_data)?)?;
1220
1221 let config = JsonFileConfig {
1222 suffix: "test",
1223 description: "Test file",
1224 required: false,
1225 max_size_mb: Some(10),
1226 };
1227
1228 let result = load_single_file_with_recovery(&config, file_path.to_str().unwrap(), 100);
1229
1230 assert!(result.success);
1231 assert!(result.data.is_some());
1232 assert!(result.error.is_none());
1233 assert_eq!(result.suffix, "test");
1234
1235 Ok(())
1236 }
1237
1238 #[test]
1239 fn test_load_single_file_with_recovery_invalid_json() -> Result<(), Box<dyn Error>> {
1240 let temp_dir = TempDir::new()?;
1241 let file_path = temp_dir.path().join("invalid.json");
1242
1243 fs::write(&file_path, "{ invalid json content")?;
1245
1246 let config = JsonFileConfig {
1247 suffix: "test",
1248 description: "Test file",
1249 required: false,
1250 max_size_mb: Some(10),
1251 };
1252
1253 let result = load_single_file_with_recovery(&config, file_path.to_str().unwrap(), 100);
1254
1255 assert!(!result.success);
1256 assert!(result.data.is_none());
1257 assert!(result.error.is_some());
1258 let error_msg = result.error.unwrap();
1259 assert!(
1261 error_msg.contains("JSON parsing")
1262 || error_msg.contains("parsing")
1263 || error_msg.contains("error")
1264 || error_msg.contains("invalid")
1265 );
1266
1267 Ok(())
1268 }
1269
1270 #[test]
1271 fn test_load_single_file_with_recovery_nonexistent_file() {
1272 let config = JsonFileConfig {
1273 suffix: "test",
1274 description: "Test file",
1275 required: false,
1276 max_size_mb: Some(10),
1277 };
1278
1279 let result = load_single_file_with_recovery(&config, "/nonexistent/path/file.json", 100);
1280
1281 assert!(!result.success);
1282 assert!(result.data.is_none());
1283 assert!(result.error.is_some());
1284 }
1285
1286 #[test]
1287 fn test_print_load_statistics_with_logging() {
1288 let stats = JsonLoadStats {
1289 total_files_attempted: 3,
1290 files_loaded: 2,
1291 files_skipped: 0,
1292 files_failed: 1,
1293 total_size_bytes: 2048,
1294 total_load_time_ms: 150,
1295 parallel_loading_used: true,
1296 };
1297
1298 let logger = DebugLogger::new();
1299
1300 print_load_statistics_with_logging(&stats, &logger);
1302
1303 let empty_stats = JsonLoadStats {
1305 total_files_attempted: 1,
1306 files_loaded: 0,
1307 files_skipped: 0,
1308 files_failed: 1,
1309 total_size_bytes: 0,
1310 total_load_time_ms: 50,
1311 parallel_loading_used: false,
1312 };
1313
1314 print_load_statistics_with_logging(&empty_stats, &logger);
1315 }
1316
1317 #[test]
1318 fn test_load_files_sequential_with_logging() -> Result<(), Box<dyn Error>> {
1319 let temp_dir = TempDir::new()?;
1320
1321 let file_path = temp_dir.path().join("test.json");
1323 let test_data = json!({"test": "data"});
1324 fs::write(&file_path, serde_json::to_string_pretty(&test_data)?)?;
1325
1326 let config = JsonFileConfig {
1327 suffix: "test",
1328 description: "Test file",
1329 required: false,
1330 max_size_mb: Some(10),
1331 };
1332
1333 let files = vec![(config, file_path.to_string_lossy().to_string(), 100)];
1334 let logger = DebugLogger::new();
1335
1336 let result = load_files_sequential_with_logging(&files, &logger);
1337 assert!(result.is_ok());
1338
1339 let results = result.unwrap();
1340 assert_eq!(results.len(), 1);
1341 assert!(results[0].success);
1342
1343 Ok(())
1344 }
1345}