1use clap::ArgMatches;
7use rayon::prelude::*;
8use serde_json::Value;
9use std::collections::HashMap;
10use std::error::Error;
11use std::fs;
12use std::path::Path;
13use std::time::Instant;
14
15mod data_integrator;
16pub mod data_normalizer;
17pub mod debug_logger;
18pub mod error_handler;
19pub mod json_file_discovery;
20pub mod large_file_optimizer;
21pub mod template_generator;
22
23use data_integrator::DataIntegrator;
24use data_normalizer::DataNormalizer;
25use debug_logger::{DebugConfig, DebugLogger, LogLevel};
26use error_handler::{ErrorRecoveryContext, HtmlErrorHandler};
27use json_file_discovery::{JsonFileConfig, JsonFileDiscovery};
28use large_file_optimizer::{LargeFileConfig, LargeFileOptimizer};
29
30pub mod direct_json_template;
31
32pub fn run_html_from_json(matches: &ArgMatches) -> Result<(), Box<dyn Error>> {
34 let input_dir = matches
35 .get_one::<String>("input-dir")
36 .ok_or("Input directory is required")?;
37 let validate_only = matches.get_flag("validate-only");
38 let default_output = "validation_only.html".to_string();
39 let output_file = if validate_only {
40 matches
41 .get_one::<String>("output")
42 .unwrap_or(&default_output)
43 } else {
44 matches
45 .get_one::<String>("output")
46 .ok_or("Output HTML file is required")?
47 };
48 let base_name = matches
49 .get_one::<String>("base-name")
50 .map(|s| s.as_str())
51 .unwrap_or("snapshot");
52
53 let verbose = matches.get_flag("verbose");
55 let debug_mode = matches.get_flag("debug");
56 let performance_mode = matches.get_flag("performance");
57
58 let debug_config = DebugConfig {
59 log_level: if debug_mode {
60 LogLevel::Debug
61 } else if verbose {
62 LogLevel::Info
63 } else {
64 LogLevel::Info
65 },
66 enable_timing: performance_mode || verbose,
67 enable_progress: verbose || debug_mode,
68 enable_memory_tracking: performance_mode || debug_mode,
69 enable_file_ops: debug_mode,
70 enable_json_details: debug_mode,
71 progress_interval_ms: if debug_mode { 500 } else { 2000 },
72 include_timestamps: debug_mode || performance_mode,
73 };
74
75 let logger = DebugLogger::with_config(debug_config);
76
77 logger.info("š Generating HTML report from JSON files...");
78 logger.info(&format!("š Input directory: {}", input_dir));
79 logger.info(&format!("š Output file: {}", output_file));
80 logger.info(&format!("š·ļø Base name: {}", base_name));
81
82 if verbose {
83 logger.info(&format!(
84 "š§ Debug mode: {}, Verbose: {}, Performance: {}",
85 debug_mode, verbose, performance_mode
86 ));
87 }
88
89 logger.start_progress(5, "Initializing HTML generation");
91
92 let discovery_timing = logger.start_timing("json_file_discovery");
94 logger.next_progress_step("Loading JSON files", 1);
95 let json_data = load_json_files_with_logging(input_dir, base_name, &logger)?;
96 let discovery_time = logger.end_timing(&discovery_timing).unwrap_or(0);
97 logger.update_stats(|stats| stats.discovery_time_ms = discovery_time);
98
99 logger.next_progress_step("Normalizing data", 1);
101 let normalization_timing = logger.start_timing("data_normalization");
102 let normalizer = DataNormalizer::new();
103 let mut unified_data = normalizer.normalize(&json_data)?;
104 let normalization_time = logger.end_timing(&normalization_timing).unwrap_or(0);
105 logger.update_stats(|stats| stats.normalization_time_ms = normalization_time);
106 logger.debug(&format!(
107 "š Normalized {} allocations",
108 unified_data.allocations.len()
109 ));
110
111 if validate_only {
113 logger.info("ā
JSON validation completed successfully!");
114 logger.info(&format!("š Validation results:"));
115 logger.info(&format!(" - Files loaded: {}", json_data.len()));
116 logger.info(&format!(
117 " - Allocations found: {}",
118 unified_data.allocations.len()
119 ));
120 logger.info(&format!(
121 " - Lifecycle events: {}",
122 unified_data.lifecycle.lifecycle_events.len()
123 ));
124 logger.info(&format!(" - Performance data: Available"));
125 logger.info(&format!(
126 " - Security violations: {}",
127 unified_data.security.total_violations
128 ));
129 logger.info(&format!(
130 " - Complex types: {}",
131 unified_data.complex_types.summary.total_complex_types
132 ));
133 logger.info(&format!(
134 " - Active memory: {} bytes",
135 unified_data.stats.active_memory
136 ));
137 logger.info(&format!(
138 " - Peak memory: {} bytes",
139 unified_data.stats.peak_memory
140 ));
141 return Ok(());
142 }
143
144 logger.next_progress_step("Integrating data sources", 1);
146 let integration_timing = logger.start_timing("data_integration");
147 let integrator = DataIntegrator::new();
148 let integration_stats = integrator.integrate(&mut unified_data)?;
149 let integration_time = logger.end_timing(&integration_timing).unwrap_or(0);
150 logger.update_stats(|stats| stats.integration_time_ms = integration_time);
151
152 logger.debug(&format!(
153 "š Integration completed: {} cross-references, {} conflicts resolved",
154 integration_stats.cross_references_found, integration_stats.conflicts_resolved
155 ));
156
157 println!("š Integration Statistics:");
158 println!(
159 " Cross-references found: {}",
160 integration_stats.cross_references_found
161 );
162 println!(
163 " Conflicts resolved: {}",
164 integration_stats.conflicts_resolved
165 );
166 println!(
167 " Data enrichments: {}",
168 integration_stats.enrichments_performed
169 );
170 println!(
171 " Index build time: {}ms",
172 integration_stats.index_build_time_ms
173 );
174 println!(
175 " Total integration time: {}ms",
176 integration_stats.integration_time_ms
177 );
178
179 {
180 logger.next_progress_step("Generating HTML template", 1);
182 let template_timing = logger.start_timing("template_generation");
183 logger.info("šØ Using direct JSON data template with charts...");
184 let html_content = direct_json_template::generate_direct_html(&json_data)?;
185 let template_time = logger.end_timing(&template_timing).unwrap_or(0);
186 logger.update_stats(|stats| stats.template_time_ms = template_time);
187
188 let template_stats =
189 crate::cli::commands::html_from_json::template_generator::TemplateStats {
190 template_size_bytes: html_content.len(),
191 css_processing_time_ms: 0,
192 js_processing_time_ms: 0,
193 serialization_time_ms: 0,
194 generation_time_ms: 1,
195 cache_hit_rate: 0.0,
196 compression_ratio: Some(1.0),
197 };
198
199 println!("šØ Template Generation Statistics:");
200 println!(
201 " Template size: {:.1} KB",
202 template_stats.template_size_bytes as f64 / 1024.0
203 );
204 println!(
205 " CSS processing: {}ms",
206 template_stats.css_processing_time_ms
207 );
208 println!(
209 " JS processing: {}ms",
210 template_stats.js_processing_time_ms
211 );
212 println!(
213 " Data serialization: {}ms",
214 template_stats.serialization_time_ms
215 );
216 println!(
217 " Total generation time: {}ms",
218 template_stats.generation_time_ms
219 );
220 println!(" Cache hit rate: {:.1}%", template_stats.cache_hit_rate);
221
222 let output_path = if Path::new(output_file).is_absolute() || output_file.contains('/') {
224 output_file.to_string()
226 } else {
227 format!("{}/{}", input_dir.trim_end_matches('/'), output_file)
229 };
230
231 logger.next_progress_step("Writing HTML file", 1);
232 let write_timing = logger.start_timing("file_write");
233 logger.info(&format!("š Writing HTML file to: {}", output_path));
234
235 fs::write(&output_path, &html_content)?;
237 logger.end_timing(&write_timing);
238
239 logger.log_file_operation("written", &output_path, Some(html_content.len()));
240
241 logger.info("ā
HTML report generated successfully!");
243 logger.info(&format!(
244 "š Open {} in your browser to view the interactive report",
245 output_path
246 ));
247
248 if performance_mode || debug_mode {
250 logger.print_performance_report();
251 logger.print_memory_info();
252 }
253 }
254
255 Ok(())
256}
257
258#[derive(Debug)]
260pub struct JsonLoadResult {
261 pub suffix: String,
263 pub success: bool,
265 pub data: Option<Value>,
267 pub error: Option<String>,
269 pub file_size: usize,
271 pub load_time_ms: u64,
273}
274
275#[derive(Debug)]
277pub struct JsonLoadStats {
278 pub total_files_attempted: usize,
280 pub files_loaded: usize,
282 pub files_skipped: usize,
284 pub files_failed: usize,
286 pub total_size_bytes: usize,
288 pub total_load_time_ms: u64,
290 pub parallel_loading_used: bool,
292}
293
294type JsonDataCollection = HashMap<String, Value>;
296
297fn load_json_files_with_logging(
299 input_dir: &str,
300 base_name: &str,
301 logger: &DebugLogger,
302) -> Result<JsonDataCollection, Box<dyn Error>> {
303 let start_time = Instant::now();
304
305 logger.debug("š Starting optimized JSON file loading with comprehensive error handling...");
306 logger.debug(&format!("š Directory: {}", input_dir));
307 logger.debug(&format!("š·ļø Base name: {}", base_name));
308
309 let recovery_context = ErrorRecoveryContext {
311 attempt_recovery: true,
312 max_retries: 3,
313 allow_partial_data: true,
314 use_fallbacks: true,
315 verbose_errors: false,
316 };
317 let mut error_handler = HtmlErrorHandler::with_context(recovery_context);
318
319 let discovery_timing = logger.start_timing("file_discovery");
321 let discovery = JsonFileDiscovery::new(input_dir.to_string(), base_name.to_string());
322 let discovery_result = match discovery.discover_files() {
323 Ok(result) => {
324 logger.debug(&format!(
325 "š Discovery completed: {} files found",
326 result.found_files.len()
327 ));
328 result
329 }
330 Err(e) => {
331 match error_handler.handle_file_discovery_error(input_dir, base_name, Box::new(e)) {
332 Ok(alternatives) => {
333 logger.warn(&format!(
334 "š Found alternative directories: {:?}",
335 alternatives
336 ));
337 return Err("JSON file discovery failed after attempting recovery".into());
338 }
339 Err(handled_error) => {
340 logger.error(&format!("{}", handled_error));
341 return Err(handled_error.into());
342 }
343 }
344 }
345 };
346 logger.end_timing(&discovery_timing);
347
348 let mut valid_files = Vec::new();
350 let mut total_size = 0usize;
351
352 for file_info in &discovery_result.found_files {
353 let file_path = file_info.path.to_string_lossy().to_string();
354 let file_size = file_info.size_bytes as usize;
355
356 logger.log_file_operation("discovered", &file_path, Some(file_size));
357
358 total_size += file_size;
359 valid_files.push((file_info.config.clone(), file_path, file_size));
360 }
361
362 if valid_files.is_empty() {
363 let error_msg =
364 "No valid JSON files found! Please check the input directory and base name.";
365 logger.error(error_msg);
366 return Err(error_msg.into());
367 }
368
369 logger.info(&format!(
370 "š Found {} valid files, total size: {:.1} MB",
371 valid_files.len(),
372 total_size as f64 / 1024.0 / 1024.0
373 ));
374
375 logger.update_progress_items(valid_files.len());
377
378 let has_large_files = valid_files
380 .iter()
381 .any(|(_, _, size)| *size > 20 * 1024 * 1024);
382 let use_parallel = valid_files.len() >= 3 || total_size >= 10 * 1024 * 1024 || has_large_files;
383
384 if use_parallel {
385 logger.info(&format!(
386 "ā” Using parallel loading for {} files (total: {:.1} MB, has large files: {})",
387 valid_files.len(),
388 total_size as f64 / 1024.0 / 1024.0,
389 has_large_files
390 ));
391 } else {
392 logger.info(&format!(
393 "š Using sequential loading for {} files (total: {:.1} MB)",
394 valid_files.len(),
395 total_size as f64 / 1024.0 / 1024.0
396 ));
397 }
398
399 let loading_timing = logger.start_timing("file_loading");
401 let results = if use_parallel {
402 load_files_parallel_with_logging(&valid_files, logger)?
403 } else {
404 load_files_sequential_with_logging(&valid_files, logger)?
405 };
406 let loading_time = logger.end_timing(&loading_timing).unwrap_or(0);
407
408 let mut data = JsonDataCollection::new();
410 let mut stats = JsonLoadStats {
411 total_files_attempted: valid_files.len(),
412 files_loaded: 0,
413 files_skipped: 0,
414 files_failed: 0,
415 total_size_bytes: 0,
416 total_load_time_ms: start_time.elapsed().as_millis() as u64,
417 parallel_loading_used: use_parallel,
418 };
419
420 for result in results {
421 if result.success {
422 if let Some(json_data) = result.data {
423 data.insert(result.suffix.clone(), json_data);
424 stats.files_loaded += 1;
425 stats.total_size_bytes += result.file_size;
426 logger.debug(&format!(
427 "ā
Loaded {} ({:.1} KB in {}ms)",
428 result.suffix,
429 result.file_size as f64 / 1024.0,
430 result.load_time_ms
431 ));
432 }
433 } else {
434 stats.files_failed += 1;
435 logger.error(&format!(
436 "ā Failed to load {}: {}",
437 result.suffix,
438 result.error.unwrap_or_else(|| "Unknown error".to_string())
439 ));
440 }
441 }
442
443 print_load_statistics_with_logging(&stats, logger);
445
446 error_handler.print_recovery_summary();
448
449 logger.update_stats(|perf_stats| {
451 perf_stats.loading_time_ms = loading_time;
452 perf_stats.files_processed = stats.files_loaded;
453 perf_stats.data_size_bytes = stats.total_size_bytes;
454 });
455
456 if data.is_empty() {
457 let error_msg = "No JSON files were successfully loaded!";
458 logger.error(error_msg);
459 return Err(error_msg.into());
460 }
461
462 Ok(data)
463}
464
465#[allow(dead_code)]
467fn load_json_files(input_dir: &str, base_name: &str) -> Result<JsonDataCollection, Box<dyn Error>> {
468 let logger = DebugLogger::new();
469 load_json_files_with_logging(input_dir, base_name, &logger)
470}
471
472fn load_files_parallel_with_logging(
474 files: &[(JsonFileConfig, String, usize)],
475 logger: &DebugLogger,
476) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
477 logger.debug("ā” Starting parallel file loading");
478
479 let results: Vec<JsonLoadResult> = files
480 .par_iter()
481 .enumerate()
482 .map(|(index, (config, file_path, file_size))| {
483 let file_timing = format!("load_file_{}", index);
484 let timing_id = logger.start_timing(&file_timing);
485
486 logger.log_file_operation("loading", file_path, Some(*file_size));
487 let result = load_single_file_with_recovery(config, file_path, *file_size);
488
489 logger.end_timing(&timing_id);
490 result
491 })
492 .collect();
493
494 logger.debug("ā
Parallel file loading completed");
495 Ok(results)
496}
497
498#[allow(dead_code)]
500fn load_files_parallel(
501 files: &[(JsonFileConfig, String, usize)],
502) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
503 let logger = DebugLogger::new();
504 load_files_parallel_with_logging(files, &logger)
505}
506
507fn load_files_sequential_with_logging(
509 files: &[(JsonFileConfig, String, usize)],
510 logger: &DebugLogger,
511) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
512 logger.debug("š Starting sequential file loading");
513 let mut results = Vec::new();
514
515 for (index, (config, file_path, file_size)) in files.iter().enumerate() {
516 let file_timing = format!("load_file_{}", index);
517 let timing_id = logger.start_timing(&file_timing);
518
519 logger.log_file_operation("loading", file_path, Some(*file_size));
520 let result = load_single_file_with_recovery(config, file_path, *file_size);
521
522 logger.end_timing(&timing_id);
523 results.push(result);
524
525 logger.update_progress_items(index + 1);
527 }
528
529 logger.debug("ā
Sequential file loading completed");
530 Ok(results)
531}
532
533#[allow(dead_code)]
535fn load_files_sequential(
536 files: &[(JsonFileConfig, String, usize)],
537) -> Result<Vec<JsonLoadResult>, Box<dyn Error>> {
538 let logger = DebugLogger::new();
539 load_files_sequential_with_logging(files, &logger)
540}
541
542fn load_single_file_with_recovery(
544 config: &JsonFileConfig,
545 file_path: &str,
546 file_size: usize,
547) -> JsonLoadResult {
548 let mut local_error_handler = HtmlErrorHandler::new();
550
551 match load_single_file_internal(config, file_path, file_size, &mut local_error_handler) {
552 Ok(result) => result,
553 Err(e) => {
554 JsonLoadResult {
556 suffix: config.suffix.to_string(),
557 success: false,
558 data: None,
559 error: Some(e.to_string()),
560 file_size,
561 load_time_ms: 0,
562 }
563 }
564 }
565}
566
567fn load_single_file_internal(
569 config: &JsonFileConfig,
570 file_path: &str,
571 file_size: usize,
572 error_handler: &mut HtmlErrorHandler,
573) -> Result<JsonLoadResult, Box<dyn Error>> {
574 let start_time = Instant::now();
575
576 let use_large_file_optimizer = file_size > 50 * 1024 * 1024
578 || config
579 .max_size_mb
580 .map_or(false, |max_mb| file_size > max_mb * 1024 * 1024 / 2);
581
582 if use_large_file_optimizer {
583 let large_file_config = LargeFileConfig {
585 max_memory_bytes: 256 * 1024 * 1024, stream_chunk_size: 128 * 1024, enable_memory_monitoring: true,
588 enable_progress_reporting: true,
589 max_file_size_bytes: config.max_size_mb.unwrap_or(500) * 1024 * 1024,
590 };
591
592 let optimizer = LargeFileOptimizer::new(large_file_config);
593
594 match optimizer.process_file(file_path, config.suffix) {
595 Ok((json_value, processing_stats)) => {
596 println!(
597 "š Large file processing stats for {}: {:.1} MB/s, {} objects, streaming: {}",
598 config.suffix,
599 processing_stats.throughput_mb_per_sec,
600 processing_stats.objects_processed,
601 processing_stats.streaming_mode_used
602 );
603
604 Ok(JsonLoadResult {
605 suffix: config.suffix.to_string(),
606 success: true,
607 data: Some(json_value),
608 error: None,
609 file_size,
610 load_time_ms: processing_stats.processing_time_ms,
611 })
612 }
613 Err(e) => {
614 let file_path_buf = std::path::PathBuf::from(file_path);
616 match error_handler.handle_file_loading_error(
617 file_path_buf,
618 config.suffix,
619 file_size,
620 Box::new(e),
621 ) {
622 Ok(Some(recovered_data)) => {
623 println!("ā
Recovered data for {} using fallback", config.suffix);
624 Ok(JsonLoadResult {
625 suffix: config.suffix.to_string(),
626 success: true,
627 data: Some(recovered_data),
628 error: None,
629 file_size,
630 load_time_ms: start_time.elapsed().as_millis() as u64,
631 })
632 }
633 Ok(None) => Err(format!(
634 "Failed to load {} and no fallback available",
635 config.suffix
636 )
637 .into()),
638 Err(handled_error) => Err(handled_error.into()),
639 }
640 }
641 }
642 } else {
643 match std::fs::read_to_string(file_path) {
645 Ok(content) => {
646 match serde_json::from_str::<Value>(&content) {
647 Ok(json_value) => {
648 if let Err(validation_error) =
650 validate_json_structure(&json_value, config.suffix)
651 {
652 let validation_err = error_handler.handle_validation_error(
653 std::path::PathBuf::from(file_path),
654 config.suffix,
655 &validation_error,
656 &json_value,
657 );
658
659 eprintln!("{}", validation_err);
660
661 let allow_partial = {
663 let stats = error_handler.get_stats();
664 stats.total_errors < 5 };
666 if allow_partial {
667 println!(
668 "ā ļø Continuing with potentially invalid data for {}",
669 config.suffix
670 );
671 Ok(JsonLoadResult {
672 suffix: config.suffix.to_string(),
673 success: true,
674 data: Some(json_value),
675 error: Some(format!("Validation warning: {validation_error}")),
676 file_size,
677 load_time_ms: start_time.elapsed().as_millis() as u64,
678 })
679 } else {
680 Err(validation_err.into())
681 }
682 } else {
683 Ok(JsonLoadResult {
684 suffix: config.suffix.to_string(),
685 success: true,
686 data: Some(json_value),
687 error: None,
688 file_size,
689 load_time_ms: start_time.elapsed().as_millis() as u64,
690 })
691 }
692 }
693 Err(e) => {
694 let parsing_err = error_handler.handle_json_parsing_error(
695 std::path::PathBuf::from(file_path),
696 &e.to_string(),
697 );
698
699 eprintln!("{}", parsing_err);
700 Err(parsing_err.into())
701 }
702 }
703 }
704 Err(e) => {
705 let file_path_buf = std::path::PathBuf::from(file_path);
707 match error_handler.handle_file_loading_error(
708 file_path_buf,
709 config.suffix,
710 file_size,
711 Box::new(e),
712 ) {
713 Ok(Some(recovered_data)) => {
714 println!("ā
Recovered data for {} using fallback", config.suffix);
715 Ok(JsonLoadResult {
716 suffix: config.suffix.to_string(),
717 success: true,
718 data: Some(recovered_data),
719 error: None,
720 file_size,
721 load_time_ms: start_time.elapsed().as_millis() as u64,
722 })
723 }
724 Ok(None) => Err(format!(
725 "Failed to load {} and no fallback available",
726 config.suffix
727 )
728 .into()),
729 Err(handled_error) => Err(handled_error.into()),
730 }
731 }
732 }
733 }
734}
735
736#[allow(dead_code)]
738fn load_single_file(config: &JsonFileConfig, file_path: &str, file_size: usize) -> JsonLoadResult {
739 let start_time = Instant::now();
740
741 let use_large_file_optimizer = file_size > 50 * 1024 * 1024
743 || config
744 .max_size_mb
745 .map_or(false, |max_mb| file_size > max_mb * 1024 * 1024 / 2);
746
747 let result = if use_large_file_optimizer {
748 let large_file_config = LargeFileConfig {
750 max_memory_bytes: 256 * 1024 * 1024, stream_chunk_size: 128 * 1024, enable_memory_monitoring: true,
753 enable_progress_reporting: true,
754 max_file_size_bytes: config.max_size_mb.unwrap_or(500) * 1024 * 1024,
755 };
756
757 let optimizer = LargeFileOptimizer::new(large_file_config);
758
759 match optimizer.process_file(file_path, config.suffix) {
760 Ok((json_value, processing_stats)) => {
761 println!(
762 "š Large file processing stats for {}: {:.1} MB/s, {} objects, streaming: {}",
763 config.suffix,
764 processing_stats.throughput_mb_per_sec,
765 processing_stats.objects_processed,
766 processing_stats.streaming_mode_used
767 );
768
769 JsonLoadResult {
770 suffix: config.suffix.to_string(),
771 success: true,
772 data: Some(json_value),
773 error: None,
774 file_size,
775 load_time_ms: processing_stats.processing_time_ms,
776 }
777 }
778 Err(e) => JsonLoadResult {
779 suffix: config.suffix.to_string(),
780 success: false,
781 data: None,
782 error: Some(format!("Large file processing error: {}", e)),
783 file_size,
784 load_time_ms: start_time.elapsed().as_millis() as u64,
785 },
786 }
787 } else {
788 match fs::read_to_string(file_path) {
790 Ok(content) => {
791 match serde_json::from_str::<Value>(&content) {
792 Ok(json_value) => {
793 if let Err(validation_error) =
795 validate_json_structure(&json_value, config.suffix)
796 {
797 JsonLoadResult {
798 suffix: config.suffix.to_string(),
799 success: false,
800 data: None,
801 error: Some(format!("Validation error: {validation_error}")),
802 file_size,
803 load_time_ms: start_time.elapsed().as_millis() as u64,
804 }
805 } else {
806 JsonLoadResult {
807 suffix: config.suffix.to_string(),
808 success: true,
809 data: Some(json_value),
810 error: None,
811 file_size,
812 load_time_ms: start_time.elapsed().as_millis() as u64,
813 }
814 }
815 }
816 Err(e) => JsonLoadResult {
817 suffix: config.suffix.to_string(),
818 success: false,
819 data: None,
820 error: Some(format!("JSON parsing error: {e}")),
821 file_size,
822 load_time_ms: start_time.elapsed().as_millis() as u64,
823 },
824 }
825 }
826 Err(e) => JsonLoadResult {
827 suffix: config.suffix.to_string(),
828 success: false,
829 data: None,
830 error: Some(format!("File read error: {e}")),
831 file_size,
832 load_time_ms: start_time.elapsed().as_millis() as u64,
833 },
834 }
835 };
836
837 result
838}
839
840fn validate_json_structure(json: &Value, file_type: &str) -> Result<(), String> {
842 match file_type {
843 "memory_analysis" => {
844 if !json.is_object() {
845 return Err("Memory analysis JSON must be an object".to_string());
846 }
847 }
849 "performance" => {
850 if !json.is_object() {
851 return Err("Performance JSON must be an object".to_string());
852 }
853 }
854 _ => {
855 if !json.is_object() && !json.is_array() {
857 return Err("JSON must be an object or array".to_string());
858 }
859 }
860 }
861 Ok(())
862}
863
864fn print_load_statistics_with_logging(stats: &JsonLoadStats, logger: &DebugLogger) {
866 logger.info("\nš JSON Loading Statistics:");
867 logger.info(&format!(
868 " Files attempted: {}",
869 stats.total_files_attempted
870 ));
871 logger.info(&format!(" Files loaded: {}", stats.files_loaded));
872 logger.info(&format!(" Files failed: {}", stats.files_failed));
873 logger.info(&format!(
874 " Total size: {:.1} MB",
875 stats.total_size_bytes as f64 / 1024.0 / 1024.0
876 ));
877 logger.info(&format!(" Total time: {}ms", stats.total_load_time_ms));
878 logger.info(&format!(
879 " Parallel loading: {}",
880 if stats.parallel_loading_used {
881 "Yes"
882 } else {
883 "No"
884 }
885 ));
886
887 if stats.files_loaded > 0 {
888 let avg_time = stats.total_load_time_ms / stats.files_loaded as u64;
889 let throughput = if stats.total_load_time_ms > 0 {
890 (stats.total_size_bytes as f64 / 1024.0 / 1024.0)
891 / (stats.total_load_time_ms as f64 / 1000.0)
892 } else {
893 0.0
894 };
895 logger.info(&format!(" Average time per file: {}ms", avg_time));
896 logger.info(&format!(" Throughput: {:.1} MB/s", throughput));
897
898 let memory_efficiency = if stats.total_size_bytes > 0 {
900 let estimated_peak_memory = stats.total_size_bytes as f64 * 1.5; let efficiency = (stats.total_size_bytes as f64 / estimated_peak_memory) * 100.0;
903 format!("{:.1}%", efficiency)
904 } else {
905 "N/A".to_string()
906 };
907 logger.info(&format!(" Memory efficiency: {}", memory_efficiency));
908 }
909 logger.info("");
910}