#[cfg(test)]
mod bench_tests {
use proofmode::crypto::hash::calculate_hash;
use proofmode::generate::core::{PlatformCallbacks, ProofGenerator};
use proofmode::generate_types::*;
use std::collections::HashMap;
use std::time::{Duration, Instant};
struct BenchCallbacks;
impl PlatformCallbacks for BenchCallbacks {
fn get_device_info(&self) -> Option<DeviceData> {
Some(DeviceData {
manufacturer: "BenchDevice".to_string(),
model: "BenchModel".to_string(),
os_version: "Bench 1.0".to_string(),
device_id: Some("bench-123".to_string()),
})
}
fn get_location_info(&self) -> Option<LocationData> {
Some(LocationData {
latitude: 40.7128,
longitude: -74.0060,
altitude: Some(10.0),
accuracy: Some(5.0),
provider: Some("GPS".to_string()),
})
}
fn get_network_info(&self) -> Option<NetworkData> {
Some(NetworkData {
network_type: "WiFi".to_string(),
wifi_ssid: Some("BenchNetwork".to_string()),
cell_info: None,
})
}
fn save_data(
&self,
_hash: &str,
_filename: &str,
_data: &[u8],
) -> proofmode::generate_error::Result<()> {
Ok(())
}
fn save_text(
&self,
_hash: &str,
_filename: &str,
_text: &str,
) -> proofmode::generate_error::Result<()> {
Ok(())
}
fn sign_data(&self, _data: &[u8]) -> proofmode::generate_error::Result<Option<Vec<u8>>> {
Ok(Some(vec![0x01, 0x02, 0x03, 0x04]))
}
fn notarize_hash(
&self,
_hash: &str,
) -> proofmode::generate_error::Result<Option<NotarizationData>> {
Ok(None)
}
fn report_progress(&self, _message: &str) {}
}
fn create_benchmark_config() -> ProofModeConfig {
ProofModeConfig {
auto_notarize: false,
track_location: true,
track_device_id: true,
track_network: true,
add_credentials: true,
embed_c2pa: false,
}
}
#[test]
fn benchmark_hash_calculation() {
let test_sizes = vec![
("1KB", 1024),
("10KB", 10 * 1024),
("100KB", 100 * 1024),
("1MB", 1024 * 1024),
("10MB", 10 * 1024 * 1024),
];
println!("\n=== Hash Calculation Benchmarks ===");
for (name, size) in test_sizes {
let data = vec![0u8; size];
let start = Instant::now();
let hash = calculate_hash(&data);
let duration = start.elapsed();
println!(
"{}: {} in {:?} ({:.2} MB/s)",
name,
hash,
duration,
(size as f64 / (1024.0 * 1024.0)) / duration.as_secs_f64()
);
assert_eq!(hash.len(), 64);
}
}
#[test]
fn benchmark_proof_generation() {
let config = create_benchmark_config();
let generator = ProofGenerator::new(config);
let callbacks = BenchCallbacks;
let test_sizes = vec![
("Small (1KB)", 1024),
("Medium (100KB)", 100 * 1024),
("Large (1MB)", 1024 * 1024),
];
println!("\n=== Proof Generation Benchmarks ===");
for (name, size) in test_sizes {
let data = vec![0u8; size];
let metadata = HashMap::new();
let start = Instant::now();
let result = generator.generate_proof(&data, metadata, &callbacks);
let duration = start.elapsed();
assert!(result.is_ok());
let hash = result.unwrap();
println!("{}: Generated proof {} in {:?}", name, hash, duration);
assert!(
duration < Duration::from_secs(5),
"Proof generation took too long: {:?}",
duration
);
}
}
#[test]
fn benchmark_multiple_proofs() {
let config = create_benchmark_config();
let generator = ProofGenerator::new(config);
let callbacks = BenchCallbacks;
let num_proofs = 100;
let data_size = 1024;
println!("\n=== Multiple Proof Generation Benchmark ===");
let start = Instant::now();
let mut hashes = Vec::new();
for i in 0..num_proofs {
let data = vec![i as u8; data_size];
let metadata = HashMap::new();
let result = generator.generate_proof(&data, metadata, &callbacks);
assert!(result.is_ok());
hashes.push(result.unwrap());
}
let duration = start.elapsed();
let avg_duration = duration / num_proofs;
println!(
"Generated {} proofs in {:?} (avg: {:?} per proof)",
num_proofs, duration, avg_duration
);
hashes.sort();
hashes.dedup();
assert_eq!(
hashes.len(),
num_proofs as usize,
"Not all hashes were unique"
);
assert!(
avg_duration < Duration::from_millis(100),
"Average proof generation too slow: {:?}",
avg_duration
);
}
#[test]
fn benchmark_hash_consistency() {
let data = vec![42u8; 1024 * 1024]; let iterations = 1000;
println!("\n=== Hash Consistency Benchmark ===");
let start = Instant::now();
let first_hash = calculate_hash(&data);
for _ in 1..iterations {
let hash = calculate_hash(&data);
assert_eq!(hash, first_hash, "Hash inconsistency detected");
}
let duration = start.elapsed();
let avg_duration = duration / iterations;
println!(
"Computed {} hashes in {:?} (avg: {:?} per hash)",
iterations, duration, avg_duration
);
println!(
"Throughput: {:.2} MB/s",
(data.len() as f64 * iterations as f64 / (1024.0 * 1024.0)) / duration.as_secs_f64()
);
}
#[test]
fn benchmark_metadata_processing() {
let config = create_benchmark_config();
let generator = ProofGenerator::new(config);
let callbacks = BenchCallbacks;
let data = b"test data";
let metadata_sizes = vec![("Small", 10), ("Medium", 100), ("Large", 1000)];
println!("\n=== Metadata Processing Benchmarks ===");
for (name, num_fields) in metadata_sizes {
let mut metadata = HashMap::new();
for i in 0..num_fields {
metadata.insert(format!("key_{}", i), format!("value_{}", i));
}
let start = Instant::now();
let result = generator.generate_proof(data, metadata, &callbacks);
let duration = start.elapsed();
assert!(result.is_ok());
println!("{} metadata ({} fields): {:?}", name, num_fields, duration);
}
}
#[test]
fn benchmark_json_serialization() {
use chrono::Utc;
use proofmode::generate_types::*;
let proof_data = ProofData {
file_hash_sha256: "benchmark_hash".to_string(),
metadata: {
let mut map = HashMap::new();
for i in 0..1000 {
map.insert(format!("key_{}", i), format!("value_{}", i));
}
map
},
location: Some(LocationData {
latitude: 40.7128,
longitude: -74.0060,
altitude: Some(10.0),
accuracy: Some(5.0),
provider: Some("GPS".to_string()),
}),
device: Some(DeviceData {
manufacturer: "BenchCorp".to_string(),
model: "BenchDevice".to_string(),
os_version: "Bench 1.0".to_string(),
device_id: Some("bench123".to_string()),
}),
network: Some(NetworkData {
network_type: "WiFi".to_string(),
wifi_ssid: Some("BenchWiFi".to_string()),
cell_info: None,
}),
timestamps: TimestampData {
created_at: Utc::now(),
modified_at: None,
proof_generated_at: Utc::now(),
},
signature: Some("benchmark_signature".to_string()),
notarization: None,
};
println!("\n=== JSON Serialization Benchmark ===");
let iterations = 1000;
let start = Instant::now();
for _ in 0..iterations {
let json = serde_json::to_string(&proof_data);
assert!(json.is_ok());
let json_str = json.unwrap();
let deserialized: Result<ProofData, _> = serde_json::from_str(&json_str);
assert!(deserialized.is_ok());
}
let duration = start.elapsed();
let avg_duration = duration / iterations;
println!(
"Serialized/deserialized {} times in {:?} (avg: {:?})",
iterations, duration, avg_duration
);
}
#[test]
fn stress_test_concurrent_operations() {
use std::sync::Arc;
use std::thread;
let config = create_benchmark_config();
let generator = Arc::new(ProofGenerator::new(config));
let num_threads = 4;
let operations_per_thread = 25;
println!("\n=== Concurrent Operations Stress Test ===");
let start = Instant::now();
let handles: Vec<_> = (0..num_threads)
.map(|thread_id| {
let generator = Arc::clone(&generator);
thread::spawn(move || {
let callbacks = BenchCallbacks;
let mut thread_hashes = Vec::new();
for i in 0..operations_per_thread {
let data = format!("thread_{}_operation_{}", thread_id, i).into_bytes();
let metadata = HashMap::new();
let result = generator.generate_proof(&data, metadata, &callbacks);
assert!(
result.is_ok(),
"Thread {} operation {} failed",
thread_id,
i
);
thread_hashes.push(result.unwrap());
}
thread_hashes
})
})
.collect();
let mut all_hashes = Vec::new();
for handle in handles {
let thread_hashes = handle.join().expect("Thread panicked");
all_hashes.extend(thread_hashes);
}
let duration = start.elapsed();
let total_operations = num_threads * operations_per_thread;
println!(
"Completed {} operations across {} threads in {:?}",
total_operations, num_threads, duration
);
let original_len = all_hashes.len();
all_hashes.sort();
all_hashes.dedup();
assert_eq!(
all_hashes.len(),
original_len,
"Duplicate hashes found in concurrent operations"
);
println!("All {} hashes are unique", all_hashes.len());
}
}