use oxidize_pdf::graphics::Color;
use oxidize_pdf::memory::MemoryOptions;
use oxidize_pdf::text::Font;
use oxidize_pdf::{Document, Page, Result};
use std::sync::Arc;
use std::thread;
use std::time::{Duration, Instant};
use tempfile::TempDir;
#[test]
fn test_large_image_processing() -> Result<()> {
let mut doc = Document::new();
doc.set_title("Large Image Processing Test");
for page_num in 0..10 {
let mut page = Page::a4();
page.text()
.set_font(Font::Helvetica, 14.0)
.at(50.0, 750.0)
.write(&format!("Page {} - Image Gallery", page_num + 1))?;
for img_num in 0..10 {
let x = 50.0 + (img_num % 5) as f64 * 100.0;
let y = 600.0 - (img_num / 5) as f64 * 200.0;
let color = match img_num % 3 {
0 => Color::rgb(1.0, 0.0, 0.0), 1 => Color::rgb(0.0, 1.0, 0.0), _ => Color::rgb(0.0, 0.0, 1.0), };
page.graphics()
.set_fill_color(color)
.rectangle(x, y, 80.0, 80.0)
.fill();
page.text()
.set_font(Font::Helvetica, 8.0)
.at(x, y - 10.0)
.write(&format!("IMG_{:03}", page_num * 10 + img_num))?;
}
doc.add_page(page);
}
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("large_images.pdf");
let start = Instant::now();
doc.save(&file_path)?;
let duration = start.elapsed();
let metadata = std::fs::metadata(&file_path)?;
println!(
"Large image document: {} bytes in {:?}",
metadata.len(),
duration
);
Ok(())
}
#[test]
fn test_stream_processing_large_pdf() -> Result<()> {
let temp_dir = TempDir::new().unwrap();
let large_pdf_path = temp_dir.path().join("large_source.pdf");
create_large_test_pdf(&large_pdf_path, 100)?;
let _memory_opts = MemoryOptions::large_file();
let output_path = temp_dir.path().join("processed_large.pdf");
let start = Instant::now();
let mut output_doc = Document::new();
for chunk_start in (0..100).step_by(10) {
for page_num in chunk_start..chunk_start.min(chunk_start + 10).min(100) {
let mut page = Page::a4();
page.text()
.set_font(Font::Helvetica, 12.0)
.at(50.0, 700.0)
.write(&format!("Processed page {}", page_num + 1))?;
output_doc.add_page(page);
}
std::thread::sleep(Duration::from_millis(10));
}
output_doc.save(&output_path)?;
let duration = start.elapsed();
println!("Streamed 100-page PDF in {duration:?}");
Ok(())
}
#[test]
fn test_concurrent_pdf_operations() -> Result<()> {
let temp_dir = Arc::new(TempDir::new().unwrap());
let start = Instant::now();
let timeout = Duration::from_secs(60);
let handles: Vec<_> = (0..50)
.map(|thread_id| {
let temp_dir = Arc::clone(&temp_dir);
thread::spawn(move || -> Result<()> {
let mut doc = Document::new();
doc.set_title(format!("Concurrent Document {thread_id}"));
for page_num in 0..5 {
let mut page = Page::a4();
page.text()
.set_font(Font::Helvetica, 12.0)
.at(100.0, 700.0)
.write(&format!("Thread {} - Page {}", thread_id, page_num + 1))?;
page.graphics()
.set_stroke_color(Color::rgb(
((thread_id as f32 * 0.02) % 1.0) as f64,
0.5,
(1.0 - (thread_id as f32 * 0.02) % 1.0) as f64,
))
.rectangle(100.0, 500.0, 400.0, 100.0)
.stroke();
doc.add_page(page);
}
let file_path = temp_dir.path().join(format!("concurrent_{thread_id}.pdf"));
doc.save(&file_path)?;
Ok(())
})
})
.collect();
let mut completed = 0;
for handle in handles {
if start.elapsed() > timeout {
panic!("Timeout: Concurrent operations took too long");
}
handle.join().unwrap()?;
completed += 1;
}
println!(
"Completed {} concurrent PDF operations in {:?}",
completed,
start.elapsed()
);
for i in 0..50 {
let file_path = temp_dir.path().join(format!("concurrent_{i}.pdf"));
assert!(file_path.exists());
}
Ok(())
}
#[test]
fn test_memory_leak_detection() -> Result<()> {
let temp_dir = TempDir::new().unwrap();
for cycle in 0..1000 {
let mut doc = Document::new();
doc.set_title(format!("Memory Leak Test {cycle}"));
let mut page = Page::a4();
page.text()
.set_font(Font::Helvetica, 12.0)
.at(100.0, 700.0)
.write(&format!("Cycle {cycle}"))?;
for i in 0..10 {
page.graphics()
.circle(100.0 + i as f64 * 40.0, 500.0, 20.0)
.fill();
}
doc.add_page(page);
let file_path = temp_dir
.path()
.join(format!("leak_test_{}.pdf", cycle % 10));
doc.save(&file_path)?;
if cycle % 100 == 0 {
std::thread::sleep(Duration::from_millis(10));
println!("Completed {cycle} cycles");
}
}
println!("Memory leak test completed: 1000 cycles");
Ok(())
}
#[test]
fn test_cache_thrashing() -> Result<()> {
let mut doc = Document::new();
let _font_names = [
"TestFont1",
"TestFont2",
"TestFont3",
"TestFont4",
"TestFont5",
"TestFont6",
"TestFont7",
"TestFont8",
"TestFont9",
"TestFont10",
];
for page_num in 0..50 {
let mut page = Page::a4();
let mut y = 750.0;
for (i, line) in (0..40).enumerate() {
let font_idx = (page_num * 40 + line) % 14; let font = match font_idx {
0 => Font::Helvetica,
1 => Font::HelveticaBold,
2 => Font::HelveticaOblique,
3 => Font::HelveticaBoldOblique,
4 => Font::TimesRoman,
5 => Font::TimesBold,
6 => Font::TimesItalic,
7 => Font::TimesBoldItalic,
8 => Font::Courier,
9 => Font::CourierBold,
10 => Font::CourierOblique,
11 => Font::CourierBoldOblique,
12 => Font::Symbol,
_ => Font::ZapfDingbats,
};
page.text()
.set_font(font, 8.0)
.at(50.0, y)
.write(&format!("Cache thrash test - Page {page_num} Line {i}"))?;
y -= 15.0;
}
doc.add_page(page);
}
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("cache_thrash.pdf");
let start = Instant::now();
doc.save(&file_path)?;
let duration = start.elapsed();
println!("Cache thrashing test completed in {duration:?}");
Ok(())
}
#[test]
fn test_object_deduplication_stress() -> Result<()> {
let mut doc = Document::new();
let repeated_text = "This is a repeated string that should be deduplicated. ";
let repeated_text_long = repeated_text.repeat(10);
for _page_num in 0..100 {
let mut page = Page::a4();
for i in 0..20 {
page.text()
.set_font(Font::Helvetica, 10.0)
.at(50.0, 750.0 - i as f64 * 30.0)
.write(&repeated_text_long)?;
}
for _i in 0..10 {
page.graphics()
.rectangle(100.0, 100.0, 100.0, 100.0)
.stroke();
}
doc.add_page(page);
}
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("deduplication_test.pdf");
doc.save(&file_path)?;
let metadata = std::fs::metadata(&file_path)?;
let size_mb = metadata.len() as f64 / 1_048_576.0;
println!("Deduplication test: {size_mb:.2} MB (should be optimized)");
assert!(metadata.len() < 10_000_000);
Ok(())
}
#[test]
fn test_unicode_font_subsetting_stress() -> Result<()> {
let mut doc = Document::new();
let mut page = Page::a4();
let unicode_tests = vec![
(0x0020..0x007F, "Basic Latin"),
(0x00A0..0x00FF, "Latin-1 Supplement"),
(0x0100..0x017F, "Latin Extended-A"),
(0x0370..0x03FF, "Greek"),
(0x0400..0x04FF, "Cyrillic"),
(0x0590..0x05FF, "Hebrew"),
(0x0600..0x06FF, "Arabic"),
(0x4E00..0x4E20, "CJK Sample"),
];
let mut y = 750.0;
for (range, name) in unicode_tests {
page.text()
.set_font(Font::HelveticaBold, 12.0)
.at(50.0, y)
.write(&format!("{name} Test:"))?;
y -= 20.0;
let mut test_string = String::new();
for code_point in range {
if let Some(ch) = char::from_u32(code_point) {
test_string.push(ch);
if test_string.len() >= 50 {
break; }
}
}
if !test_string.is_empty() {
page.text()
.set_font(Font::Helvetica, 10.0)
.at(50.0, y)
.write(&test_string)?;
y -= 20.0;
}
if y < 100.0 {
break; }
}
doc.add_page(page);
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("unicode_subsetting.pdf");
doc.save(&file_path)?;
Ok(())
}
#[test]
fn test_compression_stress() -> Result<()> {
for compressed in [true, false] {
let mut doc = Document::new();
doc.set_compress(compressed);
doc.set_title(format!(
"Compression Test - {}",
if compressed { "ON" } else { "OFF" }
));
let redundant_text = "AAAAAAAAAA"; let random_text = "AbCdEfGhIj";
for page_num in 0..50 {
let mut page = Page::a4();
let text = if page_num % 2 == 0 {
redundant_text.repeat(100)
} else {
random_text.repeat(100)
};
for i in 0..60 {
page.text()
.set_font(Font::Courier, 10.0)
.at(50.0, 750.0 - i as f64 * 12.0)
.write(&text)?;
}
doc.add_page(page);
}
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join(format!(
"compression_{}.pdf",
if compressed { "on" } else { "off" }
));
let start = Instant::now();
doc.save(&file_path)?;
let duration = start.elapsed();
let metadata = std::fs::metadata(&file_path)?;
let size_mb = metadata.len() as f64 / 1_048_576.0;
println!(
"Compression {}: {:.2} MB in {:?}",
if compressed { "ON" } else { "OFF" },
size_mb,
duration
);
}
Ok(())
}
#[test]
fn test_page_tree_balancing_stress() -> Result<()> {
let mut doc = Document::new();
let start = Instant::now();
for group in 0..100 {
for page_in_group in 0..10 {
let mut page = Page::a4();
page.text()
.set_font(Font::Helvetica, 10.0)
.at(50.0, 700.0)
.write(&format!("Group {group} - Page {page_in_group}"))?;
let graphics = page.graphics();
for _ in 0..5 {
graphics.save_state();
}
graphics.circle(300.0, 400.0, 50.0).fill();
for _ in 0..5 {
graphics.restore_state();
}
doc.add_page(page);
}
if start.elapsed() > Duration::from_secs(30) {
panic!("Timeout: Page tree balancing took too long");
}
}
let temp_dir = TempDir::new().unwrap();
let file_path = temp_dir.path().join("page_tree_stress.pdf");
let save_start = Instant::now();
doc.save(&file_path)?;
let save_duration = save_start.elapsed();
println!("Saved 1000-page unbalanced tree in {save_duration:?}");
Ok(())
}
fn create_large_test_pdf(path: &std::path::Path, page_count: usize) -> Result<()> {
let mut doc = Document::new();
for i in 0..page_count {
let mut page = Page::a4();
page.text()
.set_font(Font::Helvetica, 24.0)
.at(200.0, 400.0)
.write(&format!("Page {}", i + 1))?;
page.graphics().circle(300.0, 400.0, 100.0).stroke();
doc.add_page(page);
}
doc.save(path)?;
Ok(())
}
#[allow(dead_code)]
fn simulate_memory_pressure() {
let _large_vec: Vec<u8> = vec![0; 100_000_000];
}