1use std::{collections::HashMap, fs, time::Instant};
31use train_station::serialization::{
32 FieldValue, FromFieldValue, SerializationError, SerializationResult, StructDeserializer,
33 StructSerializable, StructSerializer, ToFieldValue,
34};
35
36#[derive(Debug, Clone, PartialEq)]
38pub struct PerformanceMetrics {
39 pub operation: String,
40 pub duration_micros: u64,
41 pub memory_usage_bytes: usize,
42 pub cpu_usage_percent: f32,
43 pub throughput_ops_per_sec: f64,
44 pub metadata: HashMap<String, String>,
45}
46
47impl StructSerializable for PerformanceMetrics {
48 fn to_serializer(&self) -> StructSerializer {
49 StructSerializer::new()
50 .field("operation", &self.operation)
51 .field("duration_micros", &self.duration_micros)
52 .field("memory_usage_bytes", &self.memory_usage_bytes)
53 .field("cpu_usage_percent", &self.cpu_usage_percent)
54 .field("throughput_ops_per_sec", &self.throughput_ops_per_sec)
55 .field("metadata", &self.metadata)
56 }
57
58 fn from_deserializer(deserializer: &mut StructDeserializer) -> SerializationResult<Self> {
59 let operation = deserializer.field("operation")?;
60 let duration_micros = deserializer.field("duration_micros")?;
61 let memory_usage_bytes = deserializer.field("memory_usage_bytes")?;
62 let cpu_usage_percent = deserializer.field("cpu_usage_percent")?;
63 let throughput_ops_per_sec = deserializer.field("throughput_ops_per_sec")?;
64 let metadata = deserializer.field("metadata")?;
65
66 Ok(PerformanceMetrics {
67 operation,
68 duration_micros,
69 memory_usage_bytes,
70 cpu_usage_percent,
71 throughput_ops_per_sec,
72 metadata,
73 })
74 }
75}
76
77impl ToFieldValue for PerformanceMetrics {
78 fn to_field_value(&self) -> FieldValue {
79 match self.to_json() {
80 Ok(json_str) => FieldValue::from_json_object(json_str),
81 Err(_) => FieldValue::from_string("serialization_error".to_string()),
82 }
83 }
84}
85
86impl FromFieldValue for PerformanceMetrics {
87 fn from_field_value(value: FieldValue, field_name: &str) -> SerializationResult<Self> {
88 if let Ok(json_data) = value.as_json_object() {
90 return Self::from_json(json_data).map_err(|e| SerializationError::ValidationFailed {
91 field: field_name.to_string(),
92 message: format!("Failed to deserialize PerformanceMetrics from JSON: {}", e),
93 });
94 }
95
96 if let Ok(binary_data) = value.as_binary_object() {
98 return Self::from_binary(binary_data).map_err(|e| {
99 SerializationError::ValidationFailed {
100 field: field_name.to_string(),
101 message: format!(
102 "Failed to deserialize PerformanceMetrics from binary: {}",
103 e
104 ),
105 }
106 });
107 }
108
109 Err(SerializationError::ValidationFailed {
110 field: field_name.to_string(),
111 message: format!(
112 "Expected JsonObject or BinaryObject for PerformanceMetrics, found {}",
113 value.type_name()
114 ),
115 })
116 }
117}
118
119#[derive(Debug, Clone, PartialEq)]
121pub struct LargeDataset {
122 pub name: String,
123 pub values: Vec<f32>, pub labels: Vec<String>,
125 pub feature_count: usize, pub feature_dimension: usize, pub metadata: HashMap<String, String>,
128 pub timestamp_count: usize, }
130
131impl StructSerializable for LargeDataset {
132 fn to_serializer(&self) -> StructSerializer {
133 StructSerializer::new()
134 .field("name", &self.name)
135 .field("values", &self.values)
136 .field("labels", &self.labels)
137 .field("feature_count", &self.feature_count)
138 .field("feature_dimension", &self.feature_dimension)
139 .field("metadata", &self.metadata)
140 .field("timestamp_count", &self.timestamp_count)
141 }
142
143 fn from_deserializer(deserializer: &mut StructDeserializer) -> SerializationResult<Self> {
144 let name = deserializer.field("name")?;
145 let values = deserializer.field("values")?;
146 let labels = deserializer.field("labels")?;
147 let feature_count = deserializer.field("feature_count")?;
148 let feature_dimension = deserializer.field("feature_dimension")?;
149 let metadata = deserializer.field("metadata")?;
150 let timestamp_count = deserializer.field("timestamp_count")?;
151
152 Ok(LargeDataset {
153 name,
154 values,
155 labels,
156 feature_count,
157 feature_dimension,
158 metadata,
159 timestamp_count,
160 })
161 }
162}
163
164impl ToFieldValue for LargeDataset {
165 fn to_field_value(&self) -> FieldValue {
166 match self.to_json() {
167 Ok(json_str) => FieldValue::from_json_object(json_str),
168 Err(_) => FieldValue::from_string("serialization_error".to_string()),
169 }
170 }
171}
172
173impl FromFieldValue for LargeDataset {
174 fn from_field_value(value: FieldValue, field_name: &str) -> SerializationResult<Self> {
175 if let Ok(json_data) = value.as_json_object() {
177 return Self::from_json(json_data).map_err(|e| SerializationError::ValidationFailed {
178 field: field_name.to_string(),
179 message: format!("Failed to deserialize LargeDataset from JSON: {}", e),
180 });
181 }
182
183 if let Ok(binary_data) = value.as_binary_object() {
185 return Self::from_binary(binary_data).map_err(|e| {
186 SerializationError::ValidationFailed {
187 field: field_name.to_string(),
188 message: format!("Failed to deserialize LargeDataset from binary: {}", e),
189 }
190 });
191 }
192
193 Err(SerializationError::ValidationFailed {
194 field: field_name.to_string(),
195 message: format!(
196 "Expected JsonObject or BinaryObject for LargeDataset, found {}",
197 value.type_name()
198 ),
199 })
200 }
201}
202
203#[derive(Debug, Clone, PartialEq)]
205pub struct Configuration {
206 pub version: String,
207 pub debug_enabled: bool,
208 pub log_level: String,
209 pub database_settings: HashMap<String, String>,
210 pub feature_flags_enabled: bool, pub max_connections: f32, pub timeout_seconds: f32,
213}
214
215impl StructSerializable for Configuration {
216 fn to_serializer(&self) -> StructSerializer {
217 StructSerializer::new()
218 .field("version", &self.version)
219 .field("debug_enabled", &self.debug_enabled)
220 .field("log_level", &self.log_level)
221 .field("database_settings", &self.database_settings)
222 .field("feature_flags_enabled", &self.feature_flags_enabled)
223 .field("max_connections", &self.max_connections)
224 .field("timeout_seconds", &self.timeout_seconds)
225 }
226
227 fn from_deserializer(deserializer: &mut StructDeserializer) -> SerializationResult<Self> {
228 let version = deserializer.field("version")?;
229 let debug_enabled = deserializer.field("debug_enabled")?;
230 let log_level = deserializer.field("log_level")?;
231 let database_settings = deserializer.field("database_settings")?;
232 let feature_flags_enabled = deserializer.field("feature_flags_enabled")?;
233 let max_connections = deserializer.field("max_connections")?;
234 let timeout_seconds = deserializer.field("timeout_seconds")?;
235
236 Ok(Configuration {
237 version,
238 debug_enabled,
239 log_level,
240 database_settings,
241 feature_flags_enabled,
242 max_connections,
243 timeout_seconds,
244 })
245 }
246}
247
248impl ToFieldValue for Configuration {
249 fn to_field_value(&self) -> FieldValue {
250 match self.to_json() {
251 Ok(json_str) => FieldValue::from_json_object(json_str),
252 Err(_) => FieldValue::from_string("serialization_error".to_string()),
253 }
254 }
255}
256
257impl FromFieldValue for Configuration {
258 fn from_field_value(value: FieldValue, field_name: &str) -> SerializationResult<Self> {
259 if let Ok(json_data) = value.as_json_object() {
261 return Self::from_json(json_data).map_err(|e| SerializationError::ValidationFailed {
262 field: field_name.to_string(),
263 message: format!("Failed to deserialize Configuration from JSON: {}", e),
264 });
265 }
266
267 if let Ok(binary_data) = value.as_binary_object() {
269 return Self::from_binary(binary_data).map_err(|e| {
270 SerializationError::ValidationFailed {
271 field: field_name.to_string(),
272 message: format!("Failed to deserialize Configuration from binary: {}", e),
273 }
274 });
275 }
276
277 Err(SerializationError::ValidationFailed {
278 field: field_name.to_string(),
279 message: format!(
280 "Expected JsonObject or BinaryObject for Configuration, found {}",
281 value.type_name()
282 ),
283 })
284 }
285}
286
287#[derive(Debug)]
289pub struct FormatComparison {
290 pub data_type: String,
291 pub json_size_bytes: u64,
292 pub binary_size_bytes: u64,
293 pub json_serialize_micros: u64,
294 pub binary_serialize_micros: u64,
295 pub json_deserialize_micros: u64,
296 pub binary_deserialize_micros: u64,
297 pub size_ratio: f64,
298 pub serialize_speed_ratio: f64,
299 pub deserialize_speed_ratio: f64,
300}
301
302impl FormatComparison {
303 fn new(data_type: String) -> Self {
304 Self {
305 data_type,
306 json_size_bytes: 0,
307 binary_size_bytes: 0,
308 json_serialize_micros: 0,
309 binary_serialize_micros: 0,
310 json_deserialize_micros: 0,
311 binary_deserialize_micros: 0,
312 size_ratio: 0.0,
313 serialize_speed_ratio: 0.0,
314 deserialize_speed_ratio: 0.0,
315 }
316 }
317
318 fn calculate_ratios(&mut self) {
319 self.size_ratio = self.json_size_bytes as f64 / self.binary_size_bytes as f64;
320 self.serialize_speed_ratio =
321 self.binary_serialize_micros as f64 / self.json_serialize_micros as f64;
322 self.deserialize_speed_ratio =
323 self.binary_deserialize_micros as f64 / self.json_deserialize_micros as f64;
324 }
325}
326
327fn main() -> Result<(), Box<dyn std::error::Error>> {
328 println!("=== JSON vs Binary Format Comparison Example ===\n");
329
330 demonstrate_format_characteristics()?;
331 demonstrate_size_comparisons()?;
332 demonstrate_performance_benchmarks()?;
333 demonstrate_use_case_recommendations()?;
334 demonstrate_debugging_capabilities()?;
335 cleanup_temp_files()?;
336
337 println!("\n=== Example completed successfully! ===");
338 Ok(())
339}
340
341fn demonstrate_format_characteristics() -> Result<(), Box<dyn std::error::Error>> {
343 println!("--- Format Characteristics ---");
344
345 let mut metadata = HashMap::new();
347 metadata.insert("operation_type".to_string(), "benchmark".to_string());
348 metadata.insert("system".to_string(), "train_station".to_string());
349
350 let metrics = PerformanceMetrics {
351 operation: "tensor_multiplication".to_string(),
352 duration_micros: 1234,
353 memory_usage_bytes: 8192,
354 cpu_usage_percent: 75.5,
355 throughput_ops_per_sec: 1000.0,
356 metadata,
357 };
358
359 println!("Format characteristics analysis:");
360
361 let json_data = metrics.to_json()?;
363 let json_lines = json_data.lines().count();
364 let json_chars = json_data.chars().count();
365
366 println!("\nJSON Format:");
367 println!(" Size: {} bytes", json_data.len());
368 println!(" Characters: {}", json_chars);
369 println!(" Lines: {}", json_lines);
370 println!(" Human readable: Yes");
371 println!(" Self-describing: Yes");
372 println!(" Cross-platform: Yes");
373 println!(" Compression ratio: Variable (depends on content)");
374
375 println!(" Sample output:");
377 for line in json_data.lines().take(3) {
378 println!(" {}", line);
379 }
380 if json_lines > 3 {
381 println!(" ... ({} more lines)", json_lines - 3);
382 }
383
384 let binary_data = metrics.to_binary()?;
386
387 println!("\nBinary Format:");
388 println!(" Size: {} bytes", binary_data.len());
389 println!(" Human readable: No");
390 println!(" Self-describing: No (requires schema)");
391 println!(" Cross-platform: Yes (with proper endianness handling)");
392 println!(" Compression ratio: High (efficient encoding)");
393
394 println!(" Sample output (first 32 bytes as hex):");
396 print!(" ");
397 for (i, byte) in binary_data.iter().take(32).enumerate() {
398 if i > 0 && i % 16 == 0 {
399 println!();
400 print!(" ");
401 }
402 print!("{:02x} ", byte);
403 }
404 if binary_data.len() > 32 {
405 println!("\n ... ({} more bytes)", binary_data.len() - 32);
406 } else {
407 println!();
408 }
409
410 let json_parsed = PerformanceMetrics::from_json(&json_data)?;
412 let binary_parsed = PerformanceMetrics::from_binary(&binary_data)?;
413
414 assert_eq!(metrics, json_parsed);
415 assert_eq!(metrics, binary_parsed);
416 println!("\nRoundtrip verification: PASSED");
417
418 Ok(())
419}
420
421fn demonstrate_size_comparisons() -> Result<(), Box<dyn std::error::Error>> {
423 println!("\n--- Size Comparison Analysis ---");
424
425 let mut db_settings = HashMap::new();
427 db_settings.insert("host".to_string(), "localhost".to_string());
428 db_settings.insert("port".to_string(), "5432".to_string());
429 db_settings.insert("database".to_string(), "myapp".to_string());
430
431 let config = Configuration {
432 version: "1.2.3".to_string(),
433 debug_enabled: true,
434 log_level: "info".to_string(),
435 database_settings: db_settings,
436 feature_flags_enabled: true,
437 max_connections: 100.0,
438 timeout_seconds: 30.0,
439 };
440
441 let large_dataset = LargeDataset {
443 name: "ML Training Data".to_string(),
444 values: (0..1000).map(|i| i as f32 * 0.1).collect(),
445 labels: (0..1000).map(|i| format!("label_{}", i)).collect(),
446 feature_count: 100,
447 feature_dimension: 50,
448 timestamp_count: 1000,
449 metadata: HashMap::new(),
450 };
451
452 println!("Size comparison results:");
453
454 let config_json = config.to_json()?;
456 let config_binary = config.to_binary()?;
457
458 println!("\nConfiguration Data (small, text-heavy):");
459 println!(" JSON: {} bytes", config_json.len());
460 println!(" Binary: {} bytes", config_binary.len());
461 println!(
462 " Ratio (JSON/Binary): {:.2}x",
463 config_json.len() as f64 / config_binary.len() as f64
464 );
465 println!(" Recommendation: JSON (human readable, small size difference)");
466
467 let dataset_json = large_dataset.to_json()?;
469 let dataset_binary = large_dataset.to_binary()?;
470
471 println!("\nLarge Numeric Dataset (1000 values, 100x50 matrix):");
472 println!(
473 " JSON: {} bytes ({:.1} KB)",
474 dataset_json.len(),
475 dataset_json.len() as f64 / 1024.0
476 );
477 println!(
478 " Binary: {} bytes ({:.1} KB)",
479 dataset_binary.len(),
480 dataset_binary.len() as f64 / 1024.0
481 );
482 println!(
483 " Ratio (JSON/Binary): {:.2}x",
484 dataset_json.len() as f64 / dataset_binary.len() as f64
485 );
486 if dataset_json.len() > dataset_binary.len() {
487 println!(
488 " Space saved with binary: {} bytes ({:.1} KB)",
489 dataset_json.len() - dataset_binary.len(),
490 (dataset_json.len() - dataset_binary.len()) as f64 / 1024.0
491 );
492 println!(" Recommendation: Binary (significant size reduction)");
493 } else {
494 println!(
495 " Binary overhead: {} bytes ({:.1} KB)",
496 dataset_binary.len() - dataset_json.len(),
497 (dataset_binary.len() - dataset_json.len()) as f64 / 1024.0
498 );
499 println!(" Recommendation: JSON (binary overhead not justified for this size)");
500 }
501
502 println!("\nContent Type Analysis:");
504
505 let json_numbers = dataset_json.matches(char::is_numeric).count();
507 let json_brackets = dataset_json.matches('[').count() + dataset_json.matches(']').count();
508 let json_quotes = dataset_json.matches('"').count();
509
510 println!(" JSON overhead sources:");
511 println!(" Numeric characters: ~{}", json_numbers);
512 println!(" Brackets and commas: ~{}", json_brackets);
513 println!(" Quote marks: {}", json_quotes);
514 println!(" Formatting/whitespace: Varies");
515
516 println!(" Binary advantages:");
517 println!(" Direct numeric encoding: 4-8 bytes per number");
518 println!(" No formatting overhead: Zero bytes");
519 println!(" Efficient length encoding: Minimal bytes");
520
521 Ok(())
522}
523
524fn demonstrate_performance_benchmarks() -> Result<(), Box<dyn std::error::Error>> {
526 println!("\n--- Performance Benchmark Analysis ---");
527
528 let small_config = Configuration {
530 version: "1.0.0".to_string(),
531 debug_enabled: false,
532 log_level: "warn".to_string(),
533 database_settings: HashMap::new(),
534 feature_flags_enabled: false,
535 max_connections: 100.0,
536 timeout_seconds: 30.0,
537 };
538
539 let large_dataset = LargeDataset {
540 name: "Large Dataset".to_string(),
541 values: (0..5000).map(|i| i as f32 * 0.001).collect(),
542 labels: (0..5000).map(|i| format!("large_item_{}", i)).collect(),
543 feature_count: 200,
544 feature_dimension: 25,
545 timestamp_count: 5000,
546 metadata: HashMap::new(),
547 };
548
549 println!("Performance benchmark results:");
550
551 let dataset_names = ["Small Config", "Large Dataset"];
553
554 for (i, name) in dataset_names.iter().enumerate() {
555 let mut comparison = FormatComparison::new(name.to_string());
556
557 let start = Instant::now();
559 let json_data = match i {
560 0 => small_config.to_json()?,
561 _ => large_dataset.to_json()?,
562 };
563 comparison.json_serialize_micros = start.elapsed().as_micros() as u64;
564 comparison.json_size_bytes = json_data.len() as u64;
565
566 if *name == "Small Config" {
568 let start = Instant::now();
569 let _parsed = Configuration::from_json(&json_data)?;
570 comparison.json_deserialize_micros = start.elapsed().as_micros() as u64;
571 } else {
572 let start = Instant::now();
573 let _parsed = LargeDataset::from_json(&json_data)?;
574 comparison.json_deserialize_micros = start.elapsed().as_micros() as u64;
575 }
576
577 let start = Instant::now();
579 let binary_data = match i {
580 0 => small_config.to_binary()?,
581 _ => large_dataset.to_binary()?,
582 };
583 comparison.binary_serialize_micros = start.elapsed().as_micros() as u64;
584 comparison.binary_size_bytes = binary_data.len() as u64;
585
586 if *name == "Small Config" {
588 let start = Instant::now();
589 let _parsed = Configuration::from_binary(&binary_data)?;
590 comparison.binary_deserialize_micros = start.elapsed().as_micros() as u64;
591 } else {
592 let start = Instant::now();
593 let _parsed = LargeDataset::from_binary(&binary_data)?;
594 comparison.binary_deserialize_micros = start.elapsed().as_micros() as u64;
595 }
596
597 comparison.calculate_ratios();
599
600 println!("\n{}:", name);
602 println!(
603 " Size - JSON: {} bytes, Binary: {} bytes (ratio: {:.2}x)",
604 comparison.json_size_bytes, comparison.binary_size_bytes, comparison.size_ratio
605 );
606 println!(
607 " Serialize - JSON: {}μs, Binary: {}μs (binary relative speed: {:.2}x)",
608 comparison.json_serialize_micros,
609 comparison.binary_serialize_micros,
610 comparison.serialize_speed_ratio
611 );
612 println!(
613 " Deserialize - JSON: {}μs, Binary: {}μs (binary relative speed: {:.2}x)",
614 comparison.json_deserialize_micros,
615 comparison.binary_deserialize_micros,
616 comparison.deserialize_speed_ratio
617 );
618 }
619
620 println!("\nPerformance Summary:");
621 println!(" - Binary format consistently uses less storage space");
622 println!(" - Performance differences vary by data type and size");
623 println!(" - Larger datasets show more significant binary advantages");
624 println!(" - JSON parsing overhead increases with structure complexity");
625
626 Ok(())
627}
628
629fn demonstrate_use_case_recommendations() -> Result<(), Box<dyn std::error::Error>> {
631 println!("\n--- Use Case Recommendations ---");
632
633 println!("JSON Format - Recommended for:");
634 println!(" ✓ Configuration files (human-editable)");
635 println!(" ✓ API responses (web compatibility)");
636 println!(" ✓ Debugging and development (readability)");
637 println!(" ✓ Small data structures (minimal overhead)");
638 println!(" ✓ Cross-language interoperability");
639 println!(" ✓ Schema evolution (self-describing)");
640 println!(" ✓ Text-heavy data with few numbers");
641
642 println!("\nBinary Format - Recommended for:");
643 println!(" ✓ Large datasets (memory/storage efficiency)");
644 println!(" ✓ High-performance applications (speed critical)");
645 println!(" ✓ Numeric-heavy data (ML models, matrices)");
646 println!(" ✓ Network transmission (bandwidth limited)");
647 println!(" ✓ Embedded systems (resource constrained)");
648 println!(" ✓ Long-term storage (space efficiency)");
649 println!(" ✓ Frequent serialization/deserialization");
650
651 println!("\nDecision Matrix Example:");
653
654 let scenarios = vec![
655 (
656 "Web API Configuration",
657 "JSON",
658 "Human readable, web standard, small size",
659 ),
660 (
661 "ML Model Weights",
662 "Binary",
663 "Large numeric data, performance critical",
664 ),
665 (
666 "User Preferences",
667 "JSON",
668 "Human editable, self-documenting",
669 ),
670 (
671 "Real-time Telemetry",
672 "Binary",
673 "High frequency, bandwidth limited",
674 ),
675 (
676 "Application Settings",
677 "JSON",
678 "Developer accessible, version control friendly",
679 ),
680 (
681 "Scientific Dataset",
682 "Binary",
683 "Large arrays, storage efficiency critical",
684 ),
685 ];
686
687 for (scenario, recommendation, reason) in scenarios {
688 println!(" {} -> {} ({})", scenario, recommendation, reason);
689 }
690
691 println!("\nPractical Examples:");
693
694 let config = Configuration {
696 version: "2.1.0".to_string(),
697 debug_enabled: false,
698 log_level: "info".to_string(),
699 database_settings: {
700 let mut map = HashMap::new();
701 map.insert("url".to_string(), "postgresql://localhost/app".to_string());
702 map.insert("pool_size".to_string(), "10".to_string());
703 map
704 },
705 feature_flags_enabled: true,
706 max_connections: 100.0,
707 timeout_seconds: 30.0,
708 };
709
710 config.save_json("temp_config_example.json")?;
711 let config_content = fs::read_to_string("temp_config_example.json")?;
712
713 println!("\nConfiguration File (JSON) - Human readable:");
714 for line in config_content.lines().take(5) {
715 println!(" {}", line);
716 }
717 println!(" ... (easily editable by developers)");
718
719 let export_data = LargeDataset {
721 name: "Training Export".to_string(),
722 values: (0..1000).map(|i| (i as f32).sin()).collect(),
723 labels: (0..1000).map(|i| format!("sample_{:04}", i)).collect(),
724 feature_count: 50,
725 feature_dimension: 20,
726 timestamp_count: 1000,
727 metadata: HashMap::new(),
728 };
729
730 export_data.save_binary("temp_export_example.bin")?;
731 let export_size = fs::metadata("temp_export_example.bin")?.len();
732
733 println!("\nData Export (Binary) - Efficient storage:");
734 println!(
735 " File size: {} bytes ({:.1} KB)",
736 export_size,
737 export_size as f64 / 1024.0
738 );
739 println!(" 1000 numeric values + 50x20 matrix + metadata");
740 println!(" Compact encoding saves significant space vs JSON");
741
742 Ok(())
743}
744
745fn demonstrate_debugging_capabilities() -> Result<(), Box<dyn std::error::Error>> {
747 println!("\n--- Debugging Capabilities ---");
748
749 let mut metadata = HashMap::new();
750 metadata.insert("debug_session".to_string(), "session_123".to_string());
751 metadata.insert("error_code".to_string(), "E001".to_string());
752
753 let debug_metrics = PerformanceMetrics {
754 operation: "debug_test".to_string(),
755 duration_micros: 5432,
756 memory_usage_bytes: 16384,
757 cpu_usage_percent: 42.7,
758 throughput_ops_per_sec: 750.0,
759 metadata,
760 };
761
762 println!("Debugging Comparison:");
763
764 let json_data = debug_metrics.to_json()?;
766 println!("\nJSON Format - Debugging Advantages:");
767 println!(" ✓ Human readable without tools");
768 println!(" ✓ Can inspect values directly");
769 println!(" ✓ Text editors show structure");
770 println!(" ✓ Diff tools work naturally");
771 println!(" ✓ Version control friendly");
772
773 println!("\n Sample JSON output for debugging:");
774 for (i, line) in json_data.lines().enumerate() {
775 if i < 5 {
776 println!(" {}", line);
777 }
778 }
779
780 let binary_data = debug_metrics.to_binary()?;
782 println!("\nBinary Format - Debugging Limitations:");
783 println!(" ✗ Requires special tools to inspect");
784 println!(" ✗ Not human readable");
785 println!(" ✗ Difficult to debug data corruption");
786 println!(" ✗ Version control shows as binary diff");
787
788 println!("\n Binary data (hex dump for debugging):");
789 print!(" ");
790 for (i, byte) in binary_data.iter().take(40).enumerate() {
791 if i > 0 && i % 16 == 0 {
792 println!();
793 print!(" ");
794 }
795 print!("{:02x} ", byte);
796 }
797 println!("\n (requires hex editor or custom tools)");
798
799 println!("\nDevelopment Workflow Impact:");
801
802 println!("\nJSON Workflow:");
803 println!(" 1. Save data to JSON file");
804 println!(" 2. Open in any text editor");
805 println!(" 3. Inspect values directly");
806 println!(" 4. Make manual edits if needed");
807 println!(" 5. Version control tracks changes");
808
809 println!("\nBinary Workflow:");
810 println!(" 1. Save data to binary file");
811 println!(" 2. Write debugging code to load and print");
812 println!(" 3. Use hex editor for low-level inspection");
813 println!(" 4. Cannot make manual edits easily");
814 println!(" 5. Version control shows binary changes only");
815
816 println!("\nHybrid Approach for Development:");
818 println!(" - Use JSON during development/debugging");
819 println!(" - Switch to binary for production deployment");
820 println!(" - Provide debugging tools that export binary to JSON");
821 println!(" - Include format conversion utilities");
822
823 println!("\nDebugging Scenario Example:");
825 println!(" Problem: Performance metrics show unexpected values");
826
827 debug_metrics.save_json("temp_debug_metrics.json")?;
829 debug_metrics.save_binary("temp_debug_metrics.bin")?;
830
831 println!(" JSON approach: Open temp_debug_metrics.json in editor");
832 println!(" -> Immediately see cpu_usage_percent: 42.7");
833 println!(" -> Compare with expected range");
834 println!(" -> Check metadata for debug_session: 'session_123'");
835
836 println!(" Binary approach: Write debugging code");
837 println!(" -> Load binary file programmatically");
838 println!(" -> Print values to console");
839 println!(" -> Additional development time required");
840
841 Ok(())
842}
843
844fn cleanup_temp_files() -> Result<(), Box<dyn std::error::Error>> {
846 println!("\n--- Cleanup ---");
847
848 let files_to_remove = [
849 "temp_config_example.json",
850 "temp_export_example.bin",
851 "temp_debug_metrics.json",
852 "temp_debug_metrics.bin",
853 ];
854
855 for file in &files_to_remove {
856 if fs::metadata(file).is_ok() {
857 fs::remove_file(file)?;
858 println!("Removed: {}", file);
859 }
860 }
861
862 println!("Cleanup completed");
863 Ok(())
864}
865
866#[cfg(test)]
867mod tests {
868 use super::*;
869
870 #[test]
871 fn test_performance_metrics_serialization() {
872 let mut metadata = HashMap::new();
873 metadata.insert("test".to_string(), "value".to_string());
874
875 let metrics = PerformanceMetrics {
876 operation: "test_op".to_string(),
877 duration_micros: 1000,
878 memory_usage_bytes: 2048,
879 cpu_usage_percent: 50.0,
880 throughput_ops_per_sec: 100.0,
881 metadata,
882 };
883
884 let json_data = metrics.to_json().unwrap();
886 let binary_data = metrics.to_binary().unwrap();
887
888 let json_parsed = PerformanceMetrics::from_json(&json_data).unwrap();
889 let binary_parsed = PerformanceMetrics::from_binary(&binary_data).unwrap();
890
891 assert_eq!(metrics, json_parsed);
892 assert_eq!(metrics, binary_parsed);
893 }
894
895 #[test]
896 fn test_format_size_comparison() {
897 let dataset = LargeDataset {
898 name: "Test".to_string(),
899 values: vec![1.0, 2.0, 3.0],
900 labels: vec!["a".to_string(), "b".to_string()],
901 feature_matrix: vec![vec![1.0, 2.0], vec![3.0, 4.0]],
902 metadata: HashMap::new(),
903 timestamps: vec![1, 2, 3],
904 };
905
906 let json_data = dataset.to_json().unwrap();
907 let binary_data = dataset.to_binary().unwrap();
908
909 assert!(binary_data.len() < json_data.len());
911 }
912
913 #[test]
914 fn test_configuration_human_readability() {
915 let config = Configuration {
916 version: "1.0.0".to_string(),
917 debug_enabled: true,
918 log_level: "debug".to_string(),
919 database_settings: HashMap::new(),
920 feature_flags_enabled: true,
921 max_connections: 50.0,
922 timeout_seconds: 60.0,
923 };
924
925 let json_data = config.to_json().unwrap();
926
927 assert!(json_data.contains("version"));
929 assert!(json_data.contains("1.0.0"));
930 assert!(json_data.contains("debug_enabled"));
931 assert!(json_data.contains("true"));
932 }
933}