use cs::parse::JsonParser;
use std::path::Path;
use std::time::Instant;
#[test]
fn test_json_parser_performance_large_file() {
let large_file_path = Path::new("tests/fixtures/performance/large-en.json");
if !large_file_path.exists() {
println!("Skipping performance test - large JSON file not found");
println!("Run 'node generate_large_translation.js' to create test file");
return;
}
println!("Testing JSON parser performance on large file...");
let start = Instant::now();
let entries = JsonParser::parse_file(large_file_path).expect("Failed to parse large JSON file");
let parse_duration = start.elapsed();
println!(
"Parse entire file: {} entries in {:?}",
entries.len(),
parse_duration
);
assert!(!entries.is_empty(), "Should find translation entries");
let start = Instant::now();
let filtered_entries = JsonParser::parse_file_with_query(large_file_path, Some("Click here"))
.expect("Failed to parse with query");
let query_duration = start.elapsed();
println!(
"Parse with query 'Click here': {} entries in {:?}",
filtered_entries.len(),
query_duration
);
assert!(
!filtered_entries.is_empty(),
"Should find entries matching 'Click here'"
);
let start = Instant::now();
let common_entries = JsonParser::parse_file_with_query(large_file_path, Some("error"))
.expect("Failed to parse with common query");
let common_duration = start.elapsed();
println!(
"Parse with query 'error': {} entries in {:?}",
common_entries.len(),
common_duration
);
let start = Instant::now();
let rare_entries = JsonParser::parse_file_with_query(large_file_path, Some("nonexistent"))
.expect("Failed to parse with rare query");
let rare_duration = start.elapsed();
println!(
"Parse with query 'nonexistent': {} entries in {:?}",
rare_entries.len(),
rare_duration
);
assert_eq!(
rare_entries.len(),
0,
"Should find no entries for nonexistent query"
);
assert!(
parse_duration.as_millis() < 5000,
"Full parse should complete within 5 seconds"
);
assert!(
query_duration.as_millis() < 5000,
"Query parse should complete within 5 seconds"
);
let sample_entry = entries
.iter()
.find(|e| e.key.contains("auth.labels.create"))
.unwrap();
assert_eq!(sample_entry.value, "Click here to continue");
println!("JSON parser performance test completed successfully!");
}
#[test]
fn test_json_parser_memory_usage() {
let large_file_path = Path::new("tests/fixtures/performance/large-en.json");
if !large_file_path.exists() {
println!("Skipping memory test - large JSON file not found");
return;
}
println!("Testing JSON parser memory usage...");
for i in 1..=5 {
let start = Instant::now();
let entries = JsonParser::parse_file(large_file_path).expect("Failed to parse");
let duration = start.elapsed();
println!("Parse #{}: {} entries in {:?}", i, entries.len(), duration);
assert!(
entries.len() > 7000,
"Should consistently find many entries"
);
assert!(
duration.as_millis() < 5000,
"Each parse should be reasonably fast"
);
}
println!("Memory usage test completed - no significant degradation detected");
}
#[test]
fn test_json_parser_nested_structure_performance() {
let large_file_path = Path::new("tests/fixtures/performance/large-en.json");
if !large_file_path.exists() {
println!("Skipping nested structure test - large JSON file not found");
return;
}
println!("Testing JSON parser on deeply nested structures...");
let start = Instant::now();
let entries = JsonParser::parse_file(large_file_path).expect("Failed to parse");
let duration = start.elapsed();
let max_depth = entries
.iter()
.map(|e| e.key.matches('.').count())
.max()
.unwrap_or(0);
let avg_depth: f64 = entries
.iter()
.map(|e| e.key.matches('.').count() as f64)
.sum::<f64>()
/ entries.len() as f64;
println!("Parsed {} entries in {:?}", entries.len(), duration);
println!("Max nesting depth: {} levels", max_depth);
println!("Average nesting depth: {:.2} levels", avg_depth);
assert!(max_depth >= 3, "Should handle at least 3 levels of nesting");
assert!(avg_depth >= 2.0, "Should have reasonable average nesting");
let nested_keys: Vec<_> = entries
.iter()
.filter(|e| e.key.contains("_details"))
.collect();
println!("Found {} nested detail entries", nested_keys.len());
assert!(
!nested_keys.is_empty(),
"Should find nested detail structures"
);
let detail_entry = entries
.iter()
.find(|e| e.key == "auth.labels.create_details.long")
.expect("Should find specific nested entry");
assert!(detail_entry
.value
.contains("additional detailed information"));
println!("Nested structure performance test completed successfully!");
}
#[test]
fn test_json_parser_query_filtering_accuracy() {
let large_file_path = Path::new("tests/fixtures/performance/large-en.json");
if !large_file_path.exists() {
println!("Skipping filtering accuracy test - large JSON file not found");
return;
}
println!("Testing JSON parser query filtering accuracy...");
let entries_lower = JsonParser::parse_file_with_query(large_file_path, Some("click"))
.expect("Failed to parse with lowercase query");
let entries_upper = JsonParser::parse_file_with_query(large_file_path, Some("CLICK"))
.expect("Failed to parse with uppercase query");
println!("Lowercase 'click': {} entries", entries_lower.len());
println!("Uppercase 'CLICK': {} entries", entries_upper.len());
assert_eq!(
entries_lower.len(),
entries_upper.len(),
"Case-insensitive filtering should work"
);
assert!(
!entries_lower.is_empty(),
"Should find entries containing 'click'"
);
let partial_entries = JsonParser::parse_file_with_query(large_file_path, Some("error"))
.expect("Failed to parse with partial query");
println!("Partial match 'error': {} entries", partial_entries.len());
for entry in &partial_entries {
assert!(
entry.value.to_lowercase().contains("error"),
"Entry '{}' should contain 'error'",
entry.value
);
}
let phrase_entries =
JsonParser::parse_file_with_query(large_file_path, Some("additional detailed"))
.expect("Failed to parse with phrase query");
println!(
"Phrase match 'additional detailed': {} entries",
phrase_entries.len()
);
assert!(
!phrase_entries.is_empty(),
"Should find entries with phrase"
);
println!("Query filtering accuracy test completed successfully!");
}