1use crate::export::binary::batch_processor::{BatchProcessor, BatchProcessorConfig};
8use crate::export::binary::cache::{IndexCache, IndexCacheConfig};
9use crate::export::binary::error::BinaryExportError;
10use crate::export::binary::field_parser::{FieldParser, PartialAllocationInfo};
11use crate::export::binary::filter_engine::FilterEngine;
12
13use crate::export::binary::selective_reader::{
14 AllocationField, AllocationFilter, SelectiveReadOptionsBuilder,
15};
16use crate::export::binary::streaming_json_writer::{
17 SelectiveSerializationOptions, StreamingJsonStats, StreamingJsonWriter,
18 StreamingJsonWriterConfig,
19};
20use std::collections::HashSet;
21use std::fs::File;
22use std::io::BufWriter;
23use std::path::Path;
24use std::time::Instant;
25
26#[derive(Debug, Clone)]
28pub struct SelectiveJsonExportConfig {
29 pub json_writer_config: StreamingJsonWriterConfig,
31
32 pub batch_processor_config: BatchProcessorConfig,
34
35 pub index_cache_config: IndexCacheConfig,
37
38 pub serialization_options: SelectiveSerializationOptions,
40
41 pub enable_parallel_processing: bool,
43
44 pub max_concurrent_exports: usize,
46
47 pub enable_error_recovery: bool,
49
50 pub enable_performance_monitoring: bool,
52}
53
54impl Default for SelectiveJsonExportConfig {
55 fn default() -> Self {
56 Self {
57 json_writer_config: StreamingJsonWriterConfig::default(),
58 batch_processor_config: BatchProcessorConfig::default(),
59 index_cache_config: IndexCacheConfig::default(),
60 serialization_options: SelectiveSerializationOptions::default(),
61 enable_parallel_processing: true,
62 max_concurrent_exports: 4,
63 enable_error_recovery: true,
64 enable_performance_monitoring: true,
65 }
66 }
67}
68
69#[derive(Debug, Clone, Default)]
71pub struct SelectiveJsonExportStats {
72 pub json_writer_stats: StreamingJsonStats,
74
75 pub total_export_time_us: u64,
77
78 pub files_processed: u32,
80
81 pub total_allocations_exported: u64,
83
84 pub total_bytes_written: u64,
86
87 pub index_cache_hits: u64,
89
90 pub index_cache_misses: u64,
92
93 pub errors_recovered: u32,
95
96 pub avg_export_throughput: f64,
98
99 pub memory_efficiency: f64,
101}
102
103impl SelectiveJsonExportStats {
104 pub fn cache_hit_rate(&self) -> f64 {
106 let total_requests = self.index_cache_hits + self.index_cache_misses;
107 if total_requests == 0 {
108 0.0
109 } else {
110 (self.index_cache_hits as f64 / total_requests as f64) * 100.0
111 }
112 }
113
114 pub fn export_efficiency(&self) -> f64 {
116 if self.total_export_time_us == 0 {
117 0.0
118 } else {
119 (self.files_processed as f64 * 1_000_000.0) / self.total_export_time_us as f64
120 }
121 }
122
123 pub fn compression_ratio(&self) -> f64 {
125 if self.total_allocations_exported == 0 {
126 0.0
127 } else {
128 let estimated_full_size = self.total_allocations_exported * 500; if estimated_full_size == 0 {
131 0.0
132 } else {
133 (self.total_bytes_written as f64 / estimated_full_size as f64) * 100.0
134 }
135 }
136 }
137}
138
139pub struct SelectiveJsonExporter {
141 config: SelectiveJsonExportConfig,
143
144 index_cache: IndexCache,
146
147 batch_processor: BatchProcessor,
149
150 filter_engine: FilterEngine,
152
153 #[allow(dead_code)]
155 field_parser: FieldParser,
156
157 stats: SelectiveJsonExportStats,
159}
160
161impl SelectiveJsonExporter {
162 pub fn new() -> Result<Self, BinaryExportError> {
164 Self::with_config(SelectiveJsonExportConfig::default())
165 }
166
167 pub fn with_config(config: SelectiveJsonExportConfig) -> Result<Self, BinaryExportError> {
169 let index_cache = IndexCache::new(config.index_cache_config.clone())?;
170 let batch_processor = BatchProcessor::with_config(config.batch_processor_config.clone());
171 let dummy_index = std::sync::Arc::new(crate::export::binary::index::BinaryIndex::new(
173 std::path::PathBuf::new(),
174 0,
175 0,
176 crate::export::binary::format::FileHeader::new_legacy(0),
177 ));
178 let filter_engine = FilterEngine::new(dummy_index);
179 let field_parser = FieldParser::new();
180
181 Ok(Self {
182 config,
183 index_cache,
184 batch_processor,
185 filter_engine,
186 field_parser,
187 stats: SelectiveJsonExportStats::default(),
188 })
189 }
190
191 pub fn export_to_json_selective<P: AsRef<Path>, Q: AsRef<Path>>(
193 &mut self,
194 binary_path: P,
195 json_path: Q,
196 requested_fields: &HashSet<AllocationField>,
197 filters: &[AllocationFilter],
198 ) -> Result<SelectiveJsonExportStats, BinaryExportError> {
199 let export_start = Instant::now();
200
201 let index = self.get_or_build_index(&binary_path)?;
203
204 let _read_options = SelectiveReadOptionsBuilder::new()
206 .with_fields(requested_fields.clone())
207 .filters(filters.to_vec())
208 .build()?;
209
210 let mut binary_file = File::open(&binary_path)?;
212
213 let json_file = File::create(&json_path)?;
215 let buffered_writer = BufWriter::new(json_file);
216 let mut json_writer = StreamingJsonWriter::with_config(
217 buffered_writer,
218 self.config.json_writer_config.clone(),
219 )?;
220
221 json_writer.write_header(index.record_count() as u64)?;
223
224 let mut processed_count = 0;
226 let batch_size = self.config.batch_processor_config.batch_size;
227
228 for batch_start in (0..index.record_count() as usize).step_by(batch_size) {
229 let batch_end = (batch_start + batch_size).min(index.record_count() as usize);
230 let batch_offsets: Vec<u64> = (batch_start..batch_end)
231 .filter_map(|i| index.get_record_offset(i))
232 .collect();
233
234 if batch_offsets.is_empty() {
235 continue;
236 }
237
238 let filtered_indices = self.filter_engine.filter_candidates(filters)?;
240 let filtered_offsets: Vec<u64> = batch_offsets
241 .into_iter()
242 .enumerate()
243 .filter(|(i, _)| filtered_indices.contains(&(batch_start + i)))
244 .map(|(_, offset)| offset)
245 .collect();
246
247 if filtered_offsets.is_empty() {
248 continue;
249 }
250
251 let records = self.batch_processor.process_batch(
253 &mut binary_file,
254 &filtered_offsets,
255 requested_fields,
256 )?;
257
258 let full_records: Vec<crate::core::types::AllocationInfo> = records
260 .records
261 .iter()
262 .map(|partial| partial.clone().to_full_allocation())
263 .collect();
264
265 let filtered_full_records = self
267 .filter_engine
268 .apply_precise_filters(full_records, filters)?;
269
270 let filtered_records: Vec<PartialAllocationInfo> = filtered_full_records
272 .iter()
273 .map(|full| PartialAllocationInfo {
274 ptr: Some(full.ptr),
275 size: Some(full.size),
276 var_name: Some(full.var_name.clone()),
277 type_name: Some(full.type_name.clone()),
278 scope_name: Some(full.scope_name.clone()),
279 timestamp_alloc: Some(full.timestamp_alloc),
280 timestamp_dealloc: Some(full.timestamp_dealloc),
281 thread_id: Some(full.thread_id.clone()),
282 borrow_count: Some(full.borrow_count),
283 stack_trace: Some(full.stack_trace.clone()),
284 is_leaked: Some(full.is_leaked),
285 lifetime_ms: Some(full.lifetime_ms),
286 borrow_info: full.borrow_info.clone(),
288 clone_info: full.clone_info.clone(),
289 ownership_history_available: Some(full.ownership_history_available),
290 })
291 .collect();
292
293 json_writer.write_allocation_batch_with_options(
295 &filtered_records,
296 requested_fields,
297 &self.config.serialization_options,
298 )?;
299
300 processed_count += filtered_records.len();
301 }
302
303 let json_stats = json_writer.finalize()?;
305
306 self.stats.json_writer_stats = json_stats;
308 self.stats.total_export_time_us += export_start.elapsed().as_micros() as u64;
309 self.stats.files_processed += 1;
310 self.stats.total_allocations_exported += processed_count as u64;
311 self.stats.total_bytes_written += self.stats.json_writer_stats.bytes_written;
312
313 self.update_derived_stats();
315
316 Ok(self.stats.clone())
317 }
318
319 pub fn export_multiple_json_types<P: AsRef<Path>>(
321 &mut self,
322 binary_files: &[(P, P)], requested_fields: &HashSet<AllocationField>,
324 filters: &[AllocationFilter],
325 ) -> Result<Vec<SelectiveJsonExportStats>, BinaryExportError> {
326 if !self.config.enable_parallel_processing || binary_files.len() <= 1 {
327 let mut results = Vec::new();
329 for (binary_path, json_path) in binary_files {
330 let stats = self.export_to_json_selective(
331 binary_path,
332 json_path,
333 requested_fields,
334 filters,
335 )?;
336 results.push(stats);
337 }
338 return Ok(results);
339 }
340
341 let mut results = Vec::new();
344 let chunk_size = self.config.max_concurrent_exports;
345
346 for chunk in binary_files.chunks(chunk_size) {
347 for (binary_path, json_path) in chunk {
348 let stats = self.export_to_json_selective(
349 binary_path,
350 json_path,
351 requested_fields,
352 filters,
353 )?;
354 results.push(stats);
355 }
356 }
357
358 Ok(results)
359 }
360
361 pub fn export_memory_analysis_json<P: AsRef<Path>, Q: AsRef<Path>>(
363 &mut self,
364 binary_path: P,
365 json_path: Q,
366 ) -> Result<SelectiveJsonExportStats, BinaryExportError> {
367 let fields = AllocationField::memory_analysis_fields();
369
370 self.export_to_json_selective(binary_path, json_path, &fields, &[])
371 }
372
373 pub fn export_lifetime_json<P: AsRef<Path>, Q: AsRef<Path>>(
375 &mut self,
376 binary_path: P,
377 json_path: Q,
378 ) -> Result<SelectiveJsonExportStats, BinaryExportError> {
379 let export_start = Instant::now();
380
381 let index = self.get_or_build_index(&binary_path)?;
383
384 let mut binary_file = File::open(&binary_path)?;
386
387 let json_file = File::create(&json_path)?;
389 let buffered_writer = BufWriter::new(json_file);
390 let mut json_writer = StreamingJsonWriter::with_config(
391 buffered_writer,
392 self.config.json_writer_config.clone(),
393 )?;
394
395 json_writer
397 .write_header_with_array_name(index.record_count() as u64, "lifecycle_events")?;
398
399 let fields = [
401 AllocationField::Ptr,
402 AllocationField::ScopeName,
403 AllocationField::Size,
404 AllocationField::TimestampAlloc,
405 AllocationField::TypeName,
406 AllocationField::VarName,
407 ]
408 .into_iter()
409 .collect();
410
411 let mut processed_count = 0;
412 let batch_size = self.config.batch_processor_config.batch_size;
413
414 for batch_start in (0..index.record_count() as usize).step_by(batch_size) {
415 let batch_end = (batch_start + batch_size).min(index.record_count() as usize);
416 let batch_offsets: Vec<u64> = (batch_start..batch_end)
417 .filter_map(|i| index.get_record_offset(i))
418 .collect();
419
420 if batch_offsets.is_empty() {
421 continue;
422 }
423
424 let records =
426 self.batch_processor
427 .process_batch(&mut binary_file, &batch_offsets, &fields)?;
428
429 for record in &records.records {
431 json_writer.write_lifecycle_event(record, "allocation")?;
432 }
433
434 processed_count += records.records.len();
435 }
436
437 let json_stats = json_writer.finalize()?;
439
440 self.stats.json_writer_stats = json_stats;
442 self.stats.total_export_time_us += export_start.elapsed().as_micros() as u64;
443 self.stats.files_processed += 1;
444 self.stats.total_allocations_exported += processed_count as u64;
445 self.stats.total_bytes_written += self.stats.json_writer_stats.bytes_written;
446
447 self.update_derived_stats();
448
449 Ok(self.stats.clone())
450 }
451
452 pub fn export_performance_json<P: AsRef<Path>, Q: AsRef<Path>>(
454 &mut self,
455 binary_path: P,
456 json_path: Q,
457 ) -> Result<SelectiveJsonExportStats, BinaryExportError> {
458 let fields = [
459 AllocationField::BorrowCount,
460 AllocationField::Ptr,
461 AllocationField::Size,
462 AllocationField::ThreadId,
463 AllocationField::TimestampAlloc,
464 AllocationField::TypeName,
465 AllocationField::VarName,
466 ]
467 .into_iter()
468 .collect();
469
470 self.export_to_json_selective(binary_path, json_path, &fields, &[])
471 }
472
473 pub fn export_unsafe_ffi_json<P: AsRef<Path>, Q: AsRef<Path>>(
475 &mut self,
476 binary_path: P,
477 json_path: Q,
478 ) -> Result<SelectiveJsonExportStats, BinaryExportError> {
479 let export_start = Instant::now();
480
481 let index = self.get_or_build_index(&binary_path)?;
483
484 let mut binary_file = File::open(&binary_path)?;
486
487 let json_file = File::create(&json_path)?;
489 let buffered_writer = BufWriter::new(json_file);
490 let mut json_writer = StreamingJsonWriter::with_config(
491 buffered_writer,
492 self.config.json_writer_config.clone(),
493 )?;
494
495 json_writer.write_raw("{\n")?;
497 json_writer.write_raw(" \"boundary_events\": [],\n")?;
498 json_writer.write_raw(" \"enhanced_ffi_data\": [\n")?;
499
500 let fields = [
502 AllocationField::Ptr,
503 AllocationField::Size,
504 AllocationField::StackTrace,
505 AllocationField::ThreadId,
506 AllocationField::TimestampAlloc,
507 AllocationField::TypeName,
508 AllocationField::VarName,
509 ]
510 .into_iter()
511 .collect();
512
513 let mut processed_count = 0;
514 let batch_size = self.config.batch_processor_config.batch_size;
515
516 for batch_start in (0..index.record_count() as usize).step_by(batch_size) {
517 let batch_end = (batch_start + batch_size).min(index.record_count() as usize);
518 let batch_offsets: Vec<u64> = (batch_start..batch_end)
519 .filter_map(|i| index.get_record_offset(i))
520 .collect();
521
522 if batch_offsets.is_empty() {
523 continue;
524 }
525
526 let records =
528 self.batch_processor
529 .process_batch(&mut binary_file, &batch_offsets, &fields)?;
530
531 for record in &records.records {
533 json_writer.write_unsafe_ffi_allocation(record)?;
534 }
535
536 processed_count += records.records.len();
537 }
538
539 json_writer.write_raw("\n ]\n")?;
541 json_writer.write_raw("}\n")?;
542
543 let json_stats = json_writer.finalize()?;
545
546 self.stats.json_writer_stats = json_stats;
548 self.stats.total_export_time_us += export_start.elapsed().as_micros() as u64;
549 self.stats.files_processed += 1;
550 self.stats.total_allocations_exported += processed_count as u64;
551 self.stats.total_bytes_written += self.stats.json_writer_stats.bytes_written;
552
553 self.update_derived_stats();
554
555 Ok(self.stats.clone())
556 }
557
558 pub fn export_complex_types_json<P: AsRef<Path>, Q: AsRef<Path>>(
560 &mut self,
561 binary_path: P,
562 json_path: Q,
563 ) -> Result<SelectiveJsonExportStats, BinaryExportError> {
564 let export_start = Instant::now();
565
566 let index = self.get_or_build_index(&binary_path)?;
568
569 let mut binary_file = File::open(&binary_path)?;
571
572 let json_file = File::create(&json_path)?;
574 let buffered_writer = BufWriter::new(json_file);
575 let mut json_writer = StreamingJsonWriter::with_config(
576 buffered_writer,
577 self.config.json_writer_config.clone(),
578 )?;
579
580 json_writer.write_raw("{\n")?;
582 json_writer.write_raw(" \"categorized_types\": {\n")?;
583 json_writer.write_raw(" \"primitive\": [\n")?;
584
585 let fields = [
587 AllocationField::Ptr,
588 AllocationField::Size,
589 AllocationField::TypeName,
590 AllocationField::VarName,
591 ]
592 .into_iter()
593 .collect();
594
595 let mut processed_count = 0;
596 let batch_size = self.config.batch_processor_config.batch_size;
597
598 for batch_start in (0..index.record_count() as usize).step_by(batch_size) {
599 let batch_end = (batch_start + batch_size).min(index.record_count() as usize);
600 let batch_offsets: Vec<u64> = (batch_start..batch_end)
601 .filter_map(|i| index.get_record_offset(i))
602 .collect();
603
604 if batch_offsets.is_empty() {
605 continue;
606 }
607
608 let records =
610 self.batch_processor
611 .process_batch(&mut binary_file, &batch_offsets, &fields)?;
612
613 for record in &records.records {
615 json_writer.write_complex_types_allocation(record)?;
616 }
617
618 processed_count += records.records.len();
619 }
620
621 json_writer.write_raw("\n ]\n")?;
623 json_writer.write_raw(" }\n")?;
624 json_writer.write_raw("}\n")?;
625
626 let json_stats = json_writer.finalize()?;
628
629 self.stats.json_writer_stats = json_stats;
631 self.stats.total_export_time_us += export_start.elapsed().as_micros() as u64;
632 self.stats.files_processed += 1;
633 self.stats.total_allocations_exported += processed_count as u64;
634 self.stats.total_bytes_written += self.stats.json_writer_stats.bytes_written;
635
636 self.update_derived_stats();
637
638 Ok(self.stats.clone())
639 }
640
641 pub fn export_all_standard_json_types<P: AsRef<Path>, Q: AsRef<Path>>(
643 &mut self,
644 binary_path: P,
645 output_dir: Q,
646 base_name: &str,
647 ) -> Result<Vec<SelectiveJsonExportStats>, BinaryExportError> {
648 let output_dir = output_dir.as_ref();
649 let mut results = Vec::new();
650
651 let memory_path = output_dir.join(format!("{base_name}_memory_analysis.json"));
653 results.push(self.export_memory_analysis_json(&binary_path, &memory_path)?);
654
655 let lifetime_path = output_dir.join(format!("{base_name}_lifetime.json"));
657 results.push(self.export_lifetime_json(&binary_path, &lifetime_path)?);
658
659 let performance_path = output_dir.join(format!("{base_name}_performance.json"));
661 results.push(self.export_performance_json(&binary_path, &performance_path)?);
662
663 let unsafe_ffi_path = output_dir.join(format!("{base_name}_unsafe_ffi.json"));
665 results.push(self.export_unsafe_ffi_json(&binary_path, &unsafe_ffi_path)?);
666
667 let complex_types_path = output_dir.join(format!("{base_name}_complex_types.json"));
669 results.push(self.export_complex_types_json(&binary_path, &complex_types_path)?);
670
671 Ok(results)
672 }
673
674 pub fn export_with_auto_field_selection<P: AsRef<Path>>(
676 &mut self,
677 binary_path: P,
678 json_path: P,
679 optimization_level: OptimizationLevel,
680 ) -> Result<SelectiveJsonExportStats, BinaryExportError> {
681 let index = self.get_or_build_index(&binary_path)?;
683 let auto_fields = self.analyze_optimal_fields(&index, optimization_level)?;
684 let auto_filters = self.analyze_optimal_filters(&index, optimization_level)?;
685
686 self.export_to_json_selective(&binary_path, &json_path, &auto_fields, &auto_filters)
687 }
688
689 pub fn get_stats(&self) -> &SelectiveJsonExportStats {
691 &self.stats
692 }
693
694 pub fn reset_stats(&mut self) {
696 self.stats = SelectiveJsonExportStats::default();
697 }
698
699 pub fn clear_caches(&mut self) {
701 let _ = self.index_cache.clear();
702 self.batch_processor.clear_cache();
703 }
704
705 fn get_or_build_index<P: AsRef<Path>>(
709 &mut self,
710 binary_path: P,
711 ) -> Result<crate::export::binary::index::BinaryIndex, BinaryExportError> {
712 let path = binary_path.as_ref();
713
714 let index_builder = crate::export::binary::index_builder::BinaryIndexBuilder::new();
716 let index = self.index_cache.get_or_build_index(path, &index_builder)?;
717
718 let cache_stats = self.index_cache.get_stats();
720 self.stats.index_cache_hits = cache_stats.cache_hits;
721 self.stats.index_cache_misses = cache_stats.cache_misses;
722
723 Ok(index)
724 }
725
726 fn analyze_optimal_fields(
728 &self,
729 index: &crate::export::binary::index::BinaryIndex,
730 optimization_level: OptimizationLevel,
731 ) -> Result<HashSet<AllocationField>, BinaryExportError> {
732 let mut fields = HashSet::new();
733
734 fields.insert(AllocationField::Ptr);
736 fields.insert(AllocationField::Size);
737 fields.insert(AllocationField::TimestampAlloc);
738
739 match optimization_level {
740 OptimizationLevel::Minimal => {
741 }
743 OptimizationLevel::Balanced => {
744 fields.insert(AllocationField::VarName);
746 fields.insert(AllocationField::TypeName);
747 fields.insert(AllocationField::ThreadId);
748 fields.insert(AllocationField::IsLeaked);
749 }
750 OptimizationLevel::Comprehensive => {
751 fields.extend(AllocationField::all_fields());
753 }
754 }
755
756 let available_fields = self.analyze_available_fields(index)?;
758 fields.retain(|field| available_fields.contains(field));
759
760 Ok(fields)
761 }
762
763 fn analyze_optimal_filters(
765 &self,
766 _index: &crate::export::binary::index::BinaryIndex,
767 optimization_level: OptimizationLevel,
768 ) -> Result<Vec<AllocationFilter>, BinaryExportError> {
769 let mut filters = Vec::new();
770
771 match optimization_level {
772 OptimizationLevel::Minimal => {
773 }
775 OptimizationLevel::Balanced => {
776 filters.push(AllocationFilter::SizeRange(32, usize::MAX));
778 }
779 OptimizationLevel::Comprehensive => {
780 filters.push(AllocationFilter::SizeRange(16, usize::MAX));
782 }
783 }
784
785 Ok(filters)
786 }
787
788 fn analyze_available_fields(
790 &self,
791 _index: &crate::export::binary::index::BinaryIndex,
792 ) -> Result<HashSet<AllocationField>, BinaryExportError> {
793 Ok(AllocationField::all_fields())
796 }
797
798 fn update_derived_stats(&mut self) {
800 if self.stats.total_export_time_us > 0 {
801 self.stats.avg_export_throughput = (self.stats.total_allocations_exported as f64
802 * 1_000_000.0)
803 / self.stats.total_export_time_us as f64;
804 }
805
806 if self.stats.total_allocations_exported > 0 {
807 self.stats.memory_efficiency = self.stats.total_bytes_written as f64
808 / self.stats.total_allocations_exported as f64;
809 }
810 }
811}
812
813impl Default for SelectiveJsonExporter {
814 fn default() -> Self {
815 Self::new().expect("Failed to create default SelectiveJsonExporter")
816 }
817}
818
819#[derive(Debug, Clone, Copy, PartialEq, Eq)]
821pub enum OptimizationLevel {
822 Minimal,
824 Balanced,
826 Comprehensive,
828}
829
830pub struct SelectiveJsonExportConfigBuilder {
832 config: SelectiveJsonExportConfig,
833}
834
835impl SelectiveJsonExportConfigBuilder {
836 pub fn new() -> Self {
838 Self {
839 config: SelectiveJsonExportConfig::default(),
840 }
841 }
842
843 pub fn json_writer_config(mut self, config: StreamingJsonWriterConfig) -> Self {
845 self.config.json_writer_config = config;
846 self
847 }
848
849 pub fn batch_processor_config(mut self, config: BatchProcessorConfig) -> Self {
851 self.config.batch_processor_config = config;
852 self
853 }
854
855 pub fn index_cache_config(mut self, config: IndexCacheConfig) -> Self {
857 self.config.index_cache_config = config;
858 self
859 }
860
861 pub fn serialization_options(mut self, options: SelectiveSerializationOptions) -> Self {
863 self.config.serialization_options = options;
864 self
865 }
866
867 pub fn parallel_processing(mut self, enabled: bool) -> Self {
869 self.config.enable_parallel_processing = enabled;
870 self
871 }
872
873 pub fn max_concurrent_exports(mut self, max: usize) -> Self {
875 self.config.max_concurrent_exports = max;
876 self
877 }
878
879 pub fn error_recovery(mut self, enabled: bool) -> Self {
881 self.config.enable_error_recovery = enabled;
882 self
883 }
884
885 pub fn performance_monitoring(mut self, enabled: bool) -> Self {
887 self.config.enable_performance_monitoring = enabled;
888 self
889 }
890
891 pub fn build(self) -> SelectiveJsonExportConfig {
893 self.config
894 }
895}
896
897impl Default for SelectiveJsonExportConfigBuilder {
898 fn default() -> Self {
899 Self::new()
900 }
901}
902
903#[cfg(test)]
904mod tests {
905 use super::*;
906 use std::collections::HashSet;
907 use tempfile::TempDir;
908
909 fn create_test_exporter() -> SelectiveJsonExporter {
910 let temp_dir = TempDir::new().expect("Failed to create temp dir");
911 let cache_config = IndexCacheConfig {
912 cache_directory: temp_dir.path().to_path_buf(),
913 max_entries: 100,
914 max_age_seconds: 3600,
915 enable_compression: false,
916 };
917
918 let config = SelectiveJsonExportConfig {
919 index_cache_config: cache_config,
920 enable_parallel_processing: false, max_concurrent_exports: 1,
922 enable_error_recovery: true,
923 enable_performance_monitoring: true,
924 ..Default::default()
925 };
926
927 SelectiveJsonExporter::with_config(config).expect("Failed to create test exporter")
928 }
929
930 #[test]
931 fn test_selective_json_exporter_creation() {
932 let temp_dir = tempfile::TempDir::new().expect("Failed to get test value");
934 let cache_config = IndexCacheConfig {
935 cache_directory: temp_dir.path().to_path_buf(),
936 max_entries: 1000,
937 max_age_seconds: 3600,
938 enable_compression: false,
939 };
940
941 let config = SelectiveJsonExportConfig {
942 index_cache_config: cache_config,
943 ..Default::default()
944 };
945
946 let exporter = SelectiveJsonExporter::with_config(config);
947 assert!(
948 exporter.is_ok(),
949 "Failed to create SelectiveJsonExporter: {:?}",
950 exporter.err()
951 );
952 }
953
954 #[test]
955 fn test_selective_json_exporter_new() {
956 let result = SelectiveJsonExporter::new();
958 match result {
960 Ok(_exporter) => {
961 }
963 Err(_) => {
964 }
966 }
967 }
968
969 #[test]
970 fn test_selective_json_exporter_default() {
971 let result = std::panic::catch_unwind(|| {
973 let _exporter = SelectiveJsonExporter::default();
974 });
975 match result {
977 Ok(_) => {
978 }
980 Err(_) => {
981 }
983 }
984 }
985
986 #[test]
987 fn test_config_builder() {
988 let config = SelectiveJsonExportConfigBuilder::new()
989 .parallel_processing(false)
990 .max_concurrent_exports(2)
991 .error_recovery(false)
992 .build();
993
994 assert!(!config.enable_parallel_processing);
995 assert_eq!(config.max_concurrent_exports, 2);
996 assert!(!config.enable_error_recovery);
997 }
998
999 #[test]
1000 fn test_config_builder_all_methods() {
1001 let json_writer_config = StreamingJsonWriterConfig::default();
1002 let batch_processor_config = BatchProcessorConfig::default();
1003 let temp_dir = TempDir::new().expect("Failed to create temp dir");
1004 let index_cache_config = IndexCacheConfig {
1005 cache_directory: temp_dir.path().to_path_buf(),
1006 max_entries: 500,
1007 max_age_seconds: 1800,
1008 enable_compression: true,
1009 };
1010 let serialization_options = SelectiveSerializationOptions::default();
1011
1012 let config = SelectiveJsonExportConfigBuilder::new()
1013 .json_writer_config(json_writer_config.clone())
1014 .batch_processor_config(batch_processor_config.clone())
1015 .index_cache_config(index_cache_config.clone())
1016 .serialization_options(serialization_options.clone())
1017 .parallel_processing(true)
1018 .max_concurrent_exports(8)
1019 .error_recovery(true)
1020 .performance_monitoring(false)
1021 .build();
1022
1023 assert_eq!(
1024 config.json_writer_config.buffer_size,
1025 json_writer_config.buffer_size
1026 );
1027 assert_eq!(
1028 config.batch_processor_config.batch_size,
1029 batch_processor_config.batch_size
1030 );
1031 assert_eq!(
1032 config.index_cache_config.max_entries,
1033 index_cache_config.max_entries
1034 );
1035 assert!(config.enable_parallel_processing);
1036 assert_eq!(config.max_concurrent_exports, 8);
1037 assert!(config.enable_error_recovery);
1038 assert!(!config.enable_performance_monitoring);
1039 }
1040
1041 #[test]
1042 fn test_config_builder_default() {
1043 let builder1 = SelectiveJsonExportConfigBuilder::new();
1044 let builder2 = SelectiveJsonExportConfigBuilder::default();
1045
1046 let config1 = builder1.build();
1047 let config2 = builder2.build();
1048
1049 assert_eq!(
1050 config1.enable_parallel_processing,
1051 config2.enable_parallel_processing
1052 );
1053 assert_eq!(
1054 config1.max_concurrent_exports,
1055 config2.max_concurrent_exports
1056 );
1057 assert_eq!(config1.enable_error_recovery, config2.enable_error_recovery);
1058 }
1059
1060 #[test]
1061 fn test_selective_json_export_config_default() {
1062 let config = SelectiveJsonExportConfig::default();
1063
1064 assert!(config.enable_parallel_processing);
1065 assert_eq!(config.max_concurrent_exports, 4);
1066 assert!(config.enable_error_recovery);
1067 assert!(config.enable_performance_monitoring);
1068 }
1069
1070 #[test]
1071 fn test_optimization_levels() {
1072 assert_eq!(OptimizationLevel::Minimal, OptimizationLevel::Minimal);
1073 assert_eq!(OptimizationLevel::Balanced, OptimizationLevel::Balanced);
1074 assert_eq!(
1075 OptimizationLevel::Comprehensive,
1076 OptimizationLevel::Comprehensive
1077 );
1078
1079 assert_ne!(OptimizationLevel::Minimal, OptimizationLevel::Balanced);
1080 assert_ne!(
1081 OptimizationLevel::Balanced,
1082 OptimizationLevel::Comprehensive
1083 );
1084 assert_ne!(OptimizationLevel::Minimal, OptimizationLevel::Comprehensive);
1085 }
1086
1087 #[test]
1088 fn test_export_stats_default() {
1089 let stats = SelectiveJsonExportStats::default();
1090
1091 assert_eq!(stats.total_export_time_us, 0);
1092 assert_eq!(stats.files_processed, 0);
1093 assert_eq!(stats.total_allocations_exported, 0);
1094 assert_eq!(stats.total_bytes_written, 0);
1095 assert_eq!(stats.index_cache_hits, 0);
1096 assert_eq!(stats.index_cache_misses, 0);
1097 assert_eq!(stats.errors_recovered, 0);
1098 assert_eq!(stats.avg_export_throughput, 0.0);
1099 assert_eq!(stats.memory_efficiency, 0.0);
1100 }
1101
1102 #[test]
1103 fn test_export_stats_calculations() {
1104 let stats = SelectiveJsonExportStats {
1105 index_cache_hits: 8,
1106 index_cache_misses: 2,
1107 total_export_time_us: 1_000_000, files_processed: 5,
1109 total_allocations_exported: 1000,
1110 total_bytes_written: 50000,
1111 ..Default::default()
1112 };
1113
1114 assert_eq!(stats.cache_hit_rate(), 80.0);
1115 assert_eq!(stats.export_efficiency(), 5.0); assert!(stats.compression_ratio() > 0.0);
1117 }
1118
1119 #[test]
1120 fn test_export_stats_edge_cases() {
1121 let stats = SelectiveJsonExportStats::default();
1123
1124 assert_eq!(stats.cache_hit_rate(), 0.0);
1125 assert_eq!(stats.export_efficiency(), 0.0);
1126 assert_eq!(stats.compression_ratio(), 0.0);
1127
1128 let stats = SelectiveJsonExportStats {
1130 index_cache_hits: 0,
1131 index_cache_misses: 10,
1132 total_export_time_us: 1_000_000,
1133 files_processed: 1,
1134 total_allocations_exported: 100,
1135 total_bytes_written: 5000,
1136 ..Default::default()
1137 };
1138
1139 assert_eq!(stats.cache_hit_rate(), 0.0);
1140 assert_eq!(stats.export_efficiency(), 1.0);
1141 assert!(stats.compression_ratio() > 0.0);
1142
1143 let stats = SelectiveJsonExportStats {
1145 index_cache_hits: 10,
1146 index_cache_misses: 0,
1147 total_export_time_us: 1_000_000,
1148 files_processed: 1,
1149 total_allocations_exported: 100,
1150 total_bytes_written: 5000,
1151 ..Default::default()
1152 };
1153
1154 assert_eq!(stats.cache_hit_rate(), 100.0);
1155 }
1156
1157 #[test]
1158 fn test_field_analysis() {
1159 let mut exporter = create_test_exporter();
1160
1161 let stats = exporter.get_stats();
1163 assert_eq!(stats.files_processed, 0);
1164 assert_eq!(stats.total_allocations_exported, 0);
1165
1166 exporter.reset_stats();
1167 assert_eq!(exporter.get_stats().files_processed, 0);
1168 }
1169
1170 #[test]
1171 fn test_cache_operations() {
1172 let mut exporter = create_test_exporter();
1173
1174 exporter.clear_caches();
1176
1177 let stats = exporter.get_stats();
1179 assert_eq!(stats.index_cache_hits, 0);
1180 assert_eq!(stats.index_cache_misses, 0);
1181 }
1182
1183 #[test]
1184 fn test_stats_reset() {
1185 let mut exporter = create_test_exporter();
1186
1187 exporter.stats.files_processed = 5;
1189 exporter.stats.total_allocations_exported = 1000;
1190 exporter.stats.total_bytes_written = 50000;
1191
1192 assert_eq!(exporter.get_stats().files_processed, 5);
1194 assert_eq!(exporter.get_stats().total_allocations_exported, 1000);
1195
1196 exporter.reset_stats();
1198
1199 assert_eq!(exporter.get_stats().files_processed, 0);
1201 assert_eq!(exporter.get_stats().total_allocations_exported, 0);
1202 assert_eq!(exporter.get_stats().total_bytes_written, 0);
1203 }
1204
1205 #[test]
1206 fn test_export_multiple_json_types_empty() {
1207 let mut exporter = create_test_exporter();
1208 let binary_files: Vec<(&str, &str)> = vec![];
1209 let fields = HashSet::new();
1210 let filters = vec![];
1211
1212 let result = exporter.export_multiple_json_types(&binary_files, &fields, &filters);
1213 assert!(result.is_ok());
1214
1215 let results = result.unwrap();
1216 assert_eq!(results.len(), 0);
1217 }
1218
1219 #[test]
1220 fn test_export_multiple_json_types_single_file() {
1221 let mut exporter = create_test_exporter();
1222
1223 let temp_dir = TempDir::new().expect("Failed to create temp dir");
1225 let binary_path = temp_dir.path().join("test.bin");
1226 let json_path = temp_dir.path().join("test.json");
1227
1228 std::fs::write(&binary_path, b"dummy binary data").expect("Failed to write test file");
1230
1231 let binary_files = vec![(&binary_path, &json_path)];
1232 let fields = HashSet::new();
1233 let filters = vec![];
1234
1235 let result = exporter.export_multiple_json_types(&binary_files, &fields, &filters);
1237 assert!(result.is_err());
1239 }
1240
1241 #[test]
1242 fn test_analyze_optimal_fields() {
1243 let exporter = create_test_exporter();
1244
1245 let temp_dir = TempDir::new().expect("Failed to create temp dir");
1247 let dummy_path = temp_dir.path().join("dummy.bin");
1248 let header = crate::export::binary::format::FileHeader::new_legacy(0);
1249 let index = crate::export::binary::index::BinaryIndex::new(dummy_path, 0, 0, header);
1250
1251 let fields = exporter.analyze_optimal_fields(&index, OptimizationLevel::Minimal);
1253 assert!(fields.is_ok());
1254 let fields = fields.unwrap();
1255 assert!(fields.contains(&AllocationField::Ptr));
1256 assert!(fields.contains(&AllocationField::Size));
1257 assert!(fields.contains(&AllocationField::TimestampAlloc));
1258
1259 let fields = exporter.analyze_optimal_fields(&index, OptimizationLevel::Balanced);
1261 assert!(fields.is_ok());
1262 let fields = fields.unwrap();
1263 assert!(fields.contains(&AllocationField::Ptr));
1264 assert!(fields.contains(&AllocationField::VarName));
1265 assert!(fields.contains(&AllocationField::TypeName));
1266
1267 let fields = exporter.analyze_optimal_fields(&index, OptimizationLevel::Comprehensive);
1269 assert!(fields.is_ok());
1270 let fields = fields.unwrap();
1271 assert!(!fields.is_empty());
1272 }
1273
1274 #[test]
1275 fn test_analyze_optimal_filters() {
1276 let exporter = create_test_exporter();
1277
1278 let temp_dir = TempDir::new().expect("Failed to create temp dir");
1280 let dummy_path = temp_dir.path().join("dummy.bin");
1281 let header = crate::export::binary::format::FileHeader::new_legacy(0);
1282 let index = crate::export::binary::index::BinaryIndex::new(dummy_path, 0, 0, header);
1283
1284 let filters = exporter.analyze_optimal_filters(&index, OptimizationLevel::Minimal);
1286 assert!(filters.is_ok());
1287 let filters = filters.unwrap();
1288 assert_eq!(filters.len(), 0);
1289
1290 let filters = exporter.analyze_optimal_filters(&index, OptimizationLevel::Balanced);
1292 assert!(filters.is_ok());
1293 let filters = filters.unwrap();
1294 assert_eq!(filters.len(), 1);
1295 if let AllocationFilter::SizeRange(min, max) = &filters[0] {
1296 assert_eq!(*min, 32);
1297 assert_eq!(*max, usize::MAX);
1298 } else {
1299 panic!("Expected SizeRange filter");
1300 }
1301
1302 let filters = exporter.analyze_optimal_filters(&index, OptimizationLevel::Comprehensive);
1304 assert!(filters.is_ok());
1305 let filters = filters.unwrap();
1306 assert_eq!(filters.len(), 1);
1307 if let AllocationFilter::SizeRange(min, max) = &filters[0] {
1308 assert_eq!(*min, 16);
1309 assert_eq!(*max, usize::MAX);
1310 } else {
1311 panic!("Expected SizeRange filter");
1312 }
1313 }
1314
1315 #[test]
1316 fn test_analyze_available_fields() {
1317 let exporter = create_test_exporter();
1318
1319 let temp_dir = TempDir::new().expect("Failed to create temp dir");
1321 let dummy_path = temp_dir.path().join("dummy.bin");
1322 let header = crate::export::binary::format::FileHeader::new_legacy(0);
1323 let index = crate::export::binary::index::BinaryIndex::new(dummy_path, 0, 0, header);
1324
1325 let available_fields = exporter.analyze_available_fields(&index);
1326 assert!(available_fields.is_ok());
1327
1328 let fields = available_fields.unwrap();
1329 assert!(!fields.is_empty());
1330 assert!(fields.contains(&AllocationField::Ptr));
1332 assert!(fields.contains(&AllocationField::Size));
1333 }
1334
1335 #[test]
1336 fn test_update_derived_stats() {
1337 let mut exporter = create_test_exporter();
1338
1339 exporter.stats.total_export_time_us = 1_000_000; exporter.stats.total_allocations_exported = 1000;
1342 exporter.stats.total_bytes_written = 50000;
1343
1344 exporter.update_derived_stats();
1346
1347 assert!(exporter.stats.avg_export_throughput > 0.0);
1349 assert!(exporter.stats.memory_efficiency > 0.0);
1350
1351 let original_throughput = exporter.stats.avg_export_throughput;
1353 exporter.stats.total_export_time_us = 0;
1354 exporter.update_derived_stats();
1355 assert_eq!(exporter.stats.avg_export_throughput, original_throughput);
1357
1358 let original_efficiency = exporter.stats.memory_efficiency;
1360 exporter.stats.total_export_time_us = 1_000_000;
1361 exporter.stats.total_allocations_exported = 0;
1362 exporter.update_derived_stats();
1363 assert_eq!(exporter.stats.memory_efficiency, original_efficiency);
1365 }
1366
1367 #[test]
1368 fn test_compression_ratio_calculation() {
1369 let stats = SelectiveJsonExportStats {
1370 total_allocations_exported: 100,
1371 total_bytes_written: 25000, ..Default::default()
1373 };
1374
1375 let ratio = stats.compression_ratio();
1376 assert_eq!(ratio, 50.0);
1378
1379 let stats = SelectiveJsonExportStats {
1381 total_allocations_exported: 0,
1382 total_bytes_written: 1000,
1383 ..Default::default()
1384 };
1385
1386 let ratio = stats.compression_ratio();
1387 assert_eq!(ratio, 0.0);
1388 }
1389
1390 #[test]
1391 fn test_export_efficiency_calculation() {
1392 let stats = SelectiveJsonExportStats {
1393 total_export_time_us: 2_000_000, files_processed: 10,
1395 ..Default::default()
1396 };
1397
1398 let efficiency = stats.export_efficiency();
1399 assert_eq!(efficiency, 5.0);
1401
1402 let stats = SelectiveJsonExportStats {
1404 total_export_time_us: 0,
1405 files_processed: 10,
1406 ..Default::default()
1407 };
1408
1409 let efficiency = stats.export_efficiency();
1410 assert_eq!(efficiency, 0.0);
1411 }
1412
1413 #[test]
1414 fn test_cache_hit_rate_calculation() {
1415 let stats = SelectiveJsonExportStats {
1417 index_cache_hits: 75,
1418 index_cache_misses: 25,
1419 ..Default::default()
1420 };
1421
1422 let hit_rate = stats.cache_hit_rate();
1423 assert_eq!(hit_rate, 75.0);
1424
1425 let stats = SelectiveJsonExportStats {
1427 index_cache_hits: 0,
1428 index_cache_misses: 0,
1429 ..Default::default()
1430 };
1431
1432 let hit_rate = stats.cache_hit_rate();
1433 assert_eq!(hit_rate, 0.0);
1434
1435 let stats = SelectiveJsonExportStats {
1437 index_cache_hits: 100,
1438 index_cache_misses: 0,
1439 ..Default::default()
1440 };
1441
1442 let hit_rate = stats.cache_hit_rate();
1443 assert_eq!(hit_rate, 100.0);
1444 }
1445
1446 #[test]
1447 fn test_debug_implementations() {
1448 let config = SelectiveJsonExportConfig::default();
1449 let debug_str = format!("{:?}", config);
1450 assert!(debug_str.contains("SelectiveJsonExportConfig"));
1451
1452 let stats = SelectiveJsonExportStats::default();
1453 let debug_str = format!("{:?}", stats);
1454 assert!(debug_str.contains("SelectiveJsonExportStats"));
1455
1456 let optimization_level = OptimizationLevel::Balanced;
1457 let debug_str = format!("{:?}", optimization_level);
1458 assert!(debug_str.contains("Balanced"));
1459 }
1460
1461 #[test]
1462 fn test_clone_implementations() {
1463 let config = SelectiveJsonExportConfig::default();
1464 let cloned_config = config.clone();
1465 assert_eq!(
1466 config.enable_parallel_processing,
1467 cloned_config.enable_parallel_processing
1468 );
1469 assert_eq!(
1470 config.max_concurrent_exports,
1471 cloned_config.max_concurrent_exports
1472 );
1473
1474 let stats = SelectiveJsonExportStats::default();
1475 let cloned_stats = stats.clone();
1476 assert_eq!(stats.files_processed, cloned_stats.files_processed);
1477 assert_eq!(
1478 stats.total_allocations_exported,
1479 cloned_stats.total_allocations_exported
1480 );
1481
1482 let optimization_level = OptimizationLevel::Comprehensive;
1483 let cloned_level = optimization_level;
1484 assert_eq!(optimization_level, cloned_level);
1485 }
1486}