use crate::config::FidelityTier;
use crate::error::Pdf2MdError;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::fs;
use std::io::Read;
use std::path::{Path, PathBuf};
use tracing::{debug, info, warn};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PageStats {
pub input_tokens: usize,
pub output_tokens: usize,
pub duration_ms: u64,
pub retries: u8,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CheckpointedPage {
pub page_number: usize,
pub markdown: String,
pub stats: PageStats,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CheckpointMeta {
pub conversion_id: String,
pub pdf_path: String,
pub provider_name: String,
pub model_name: String,
pub fidelity: String,
pub dpi: u32,
pub maintain_format: bool,
pub created_at: String,
}
pub trait CheckpointStore: Send + Sync {
fn save_page_checkpoint(
&self,
conversion_id: &str,
page_number: usize,
markdown: &str,
stats: &PageStats,
) -> Result<(), Pdf2MdError>;
fn load_page_checkpoint(
&self,
conversion_id: &str,
page_number: usize,
) -> Result<Option<CheckpointedPage>, Pdf2MdError>;
fn list_completed_pages(&self, conversion_id: &str) -> Result<Vec<usize>, Pdf2MdError>;
fn clear_checkpoints(&self, conversion_id: &str) -> Result<(), Pdf2MdError>;
fn save_meta(&self, conversion_id: &str, meta: &CheckpointMeta) -> Result<(), Pdf2MdError>;
}
pub struct NoopCheckpointStore;
impl CheckpointStore for NoopCheckpointStore {
fn save_page_checkpoint(
&self,
_conversion_id: &str,
_page_number: usize,
_markdown: &str,
_stats: &PageStats,
) -> Result<(), Pdf2MdError> {
Ok(())
}
fn load_page_checkpoint(
&self,
_conversion_id: &str,
_page_number: usize,
) -> Result<Option<CheckpointedPage>, Pdf2MdError> {
Ok(None)
}
fn list_completed_pages(&self, _conversion_id: &str) -> Result<Vec<usize>, Pdf2MdError> {
Ok(vec![])
}
fn clear_checkpoints(&self, _conversion_id: &str) -> Result<(), Pdf2MdError> {
Ok(())
}
fn save_meta(&self, _conversion_id: &str, _meta: &CheckpointMeta) -> Result<(), Pdf2MdError> {
Ok(())
}
}
pub struct FileCheckpointStore {
base_dir: PathBuf,
}
impl FileCheckpointStore {
pub fn new(base_dir: impl Into<PathBuf>) -> Self {
Self {
base_dir: base_dir.into(),
}
}
fn conv_dir(&self, conversion_id: &str) -> PathBuf {
self.base_dir.join(conversion_id)
}
fn page_path(&self, conversion_id: &str, page_number: usize) -> PathBuf {
self.conv_dir(conversion_id)
.join(format!("page_{:04}.json", page_number))
}
fn meta_path(&self, conversion_id: &str) -> PathBuf {
self.conv_dir(conversion_id).join("meta.json")
}
}
impl CheckpointStore for FileCheckpointStore {
fn save_page_checkpoint(
&self,
conversion_id: &str,
page_number: usize,
markdown: &str,
stats: &PageStats,
) -> Result<(), Pdf2MdError> {
let dir = self.conv_dir(conversion_id);
fs::create_dir_all(&dir).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!("Failed to create checkpoint dir '{}': {}", dir.display(), e),
})?;
let page = CheckpointedPage {
page_number,
markdown: markdown.to_string(),
stats: stats.clone(),
};
let json =
serde_json::to_string_pretty(&page).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!("Failed to serialize page {} checkpoint: {}", page_number, e),
})?;
let final_path = self.page_path(conversion_id, page_number);
let tmp_path = final_path.with_extension("tmp");
fs::write(&tmp_path, &json).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!(
"Failed to write checkpoint file '{}': {}",
tmp_path.display(),
e
),
})?;
fs::rename(&tmp_path, &final_path).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!(
"Failed to rename checkpoint '{}' → '{}': {}",
tmp_path.display(),
final_path.display(),
e
),
})?;
debug!(
"Checkpoint saved: page {} → {}",
page_number,
final_path.display()
);
Ok(())
}
fn load_page_checkpoint(
&self,
conversion_id: &str,
page_number: usize,
) -> Result<Option<CheckpointedPage>, Pdf2MdError> {
let path = self.page_path(conversion_id, page_number);
if !path.exists() {
return Ok(None);
}
let content = fs::read_to_string(&path).map_err(|e| {
warn!(
"Checkpoint file '{}' unreadable (treating as missing): {}",
path.display(),
e
);
Pdf2MdError::CheckpointError {
detail: format!("Failed to read checkpoint '{}': {}", path.display(), e),
}
});
let content = match content {
Ok(c) => c,
Err(_) => return Ok(None), };
let page: CheckpointedPage = match serde_json::from_str(&content) {
Ok(p) => p,
Err(e) => {
warn!(
"Checkpoint file '{}' has invalid JSON (treating as missing): {}",
path.display(),
e
);
return Ok(None);
}
};
if page.markdown.trim().is_empty() {
warn!(
"Checkpoint for page {} has empty markdown (treating as missing)",
page_number
);
return Ok(None);
}
debug!(
"Checkpoint loaded: page {} from {}",
page_number,
path.display()
);
Ok(Some(page))
}
fn list_completed_pages(&self, conversion_id: &str) -> Result<Vec<usize>, Pdf2MdError> {
let dir = self.conv_dir(conversion_id);
if !dir.exists() {
return Ok(vec![]);
}
let mut pages = Vec::new();
let entries = fs::read_dir(&dir).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!("Failed to read checkpoint dir '{}': {}", dir.display(), e),
})?;
for entry in entries {
let entry = match entry {
Ok(e) => e,
Err(_) => continue,
};
let name = entry.file_name();
let name_str = name.to_string_lossy();
if let Some(rest) = name_str.strip_prefix("page_") {
if let Some(num_str) = rest.strip_suffix(".json") {
if let Ok(page_num) = num_str.parse::<usize>() {
if let Ok(Some(_)) = self.load_page_checkpoint(conversion_id, page_num) {
pages.push(page_num);
}
}
}
}
}
pages.sort_unstable();
pages.dedup();
Ok(pages)
}
fn clear_checkpoints(&self, conversion_id: &str) -> Result<(), Pdf2MdError> {
let dir = self.conv_dir(conversion_id);
if !dir.exists() {
return Ok(());
}
match fs::remove_dir_all(&dir) {
Ok(()) => {
info!("Checkpoints cleared: {}", dir.display());
Ok(())
}
Err(e) => {
warn!("Failed to clear checkpoints at '{}': {}", dir.display(), e);
Ok(())
}
}
}
fn save_meta(&self, conversion_id: &str, meta: &CheckpointMeta) -> Result<(), Pdf2MdError> {
let dir = self.conv_dir(conversion_id);
fs::create_dir_all(&dir).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!("Failed to create checkpoint dir '{}': {}", dir.display(), e),
})?;
let json =
serde_json::to_string_pretty(meta).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!("Failed to serialize checkpoint metadata: {}", e),
})?;
let path = self.meta_path(conversion_id);
fs::write(&path, &json).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!(
"Failed to write checkpoint metadata '{}': {}",
path.display(),
e
),
})?;
debug!("Checkpoint metadata saved: {}", path.display());
Ok(())
}
}
const PDF_HASH_PREFIX_SIZE: usize = 64 * 1024;
pub fn compute_conversion_id(
pdf_path: &Path,
provider_name: &str,
model_name: &str,
fidelity: FidelityTier,
dpi: u32,
) -> Result<String, Pdf2MdError> {
let mut file = fs::File::open(pdf_path).map_err(|e| Pdf2MdError::CheckpointError {
detail: format!(
"Failed to open PDF for hashing '{}': {}",
pdf_path.display(),
e
),
})?;
let file_size = file
.metadata()
.map_err(|e| Pdf2MdError::CheckpointError {
detail: format!(
"Failed to read PDF metadata '{}': {}",
pdf_path.display(),
e
),
})?
.len();
let mut prefix = vec![0u8; PDF_HASH_PREFIX_SIZE.min(file_size as usize)];
file.read_exact(&mut prefix)
.map_err(|e| Pdf2MdError::CheckpointError {
detail: format!("Failed to read PDF prefix '{}': {}", pdf_path.display(), e),
})?;
let fidelity_str = match fidelity {
FidelityTier::Tier1 => "tier1",
FidelityTier::Tier2 => "tier2",
FidelityTier::Tier3 => "tier3",
};
let mut hasher = Sha256::new();
hasher.update(&prefix);
hasher.update(file_size.to_le_bytes());
hasher.update(provider_name.as_bytes());
hasher.update(model_name.as_bytes());
hasher.update(fidelity_str.as_bytes());
hasher.update(dpi.to_le_bytes());
let hash = hasher.finalize();
let hex = format!("{:x}", hash);
Ok(hex[..16].to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Write;
#[test]
fn noop_store_save_and_load_returns_none() {
let store = NoopCheckpointStore;
let stats = PageStats {
input_tokens: 100,
output_tokens: 50,
duration_ms: 1000,
retries: 0,
};
store
.save_page_checkpoint("test-id", 1, "# Hello", &stats)
.unwrap();
let result = store.load_page_checkpoint("test-id", 1).unwrap();
assert!(result.is_none());
}
#[test]
fn noop_store_list_completed_returns_empty() {
let store = NoopCheckpointStore;
let pages = store.list_completed_pages("test-id").unwrap();
assert!(pages.is_empty());
}
#[test]
fn noop_store_clear_succeeds() {
let store = NoopCheckpointStore;
store.clear_checkpoints("test-id").unwrap();
}
#[test]
fn file_store_save_and_load_page() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let stats = PageStats {
input_tokens: 1500,
output_tokens: 800,
duration_ms: 3200,
retries: 1,
};
store
.save_page_checkpoint("conv-abc", 1, "# Page 1\n\nSome content.", &stats)
.unwrap();
let loaded = store
.load_page_checkpoint("conv-abc", 1)
.unwrap()
.expect("checkpoint should exist");
assert_eq!(loaded.page_number, 1);
assert_eq!(loaded.markdown, "# Page 1\n\nSome content.");
assert_eq!(loaded.stats.input_tokens, 1500);
assert_eq!(loaded.stats.output_tokens, 800);
assert_eq!(loaded.stats.duration_ms, 3200);
assert_eq!(loaded.stats.retries, 1);
}
#[test]
fn file_store_load_nonexistent_returns_none() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let result = store.load_page_checkpoint("conv-abc", 42).unwrap();
assert!(result.is_none());
}
#[test]
fn file_store_load_corrupt_json_returns_none() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let conv_dir = dir.path().join("conv-bad");
fs::create_dir_all(&conv_dir).unwrap();
fs::write(conv_dir.join("page_0001.json"), "not valid json {{").unwrap();
let result = store.load_page_checkpoint("conv-bad", 1).unwrap();
assert!(result.is_none(), "Corrupt JSON should return None");
}
#[test]
fn file_store_load_empty_markdown_returns_none() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let conv_dir = dir.path().join("conv-empty");
fs::create_dir_all(&conv_dir).unwrap();
let page = CheckpointedPage {
page_number: 1,
markdown: " ".to_string(), stats: PageStats {
input_tokens: 0,
output_tokens: 0,
duration_ms: 0,
retries: 0,
},
};
let json = serde_json::to_string_pretty(&page).unwrap();
fs::write(conv_dir.join("page_0001.json"), json).unwrap();
let result = store.load_page_checkpoint("conv-empty", 1).unwrap();
assert!(
result.is_none(),
"Empty-markdown checkpoint should return None"
);
}
#[test]
fn file_store_list_completed_pages() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let stats = PageStats {
input_tokens: 100,
output_tokens: 50,
duration_ms: 1000,
retries: 0,
};
store
.save_page_checkpoint("conv-list", 3, "# Page 3", &stats)
.unwrap();
store
.save_page_checkpoint("conv-list", 1, "# Page 1", &stats)
.unwrap();
store
.save_page_checkpoint("conv-list", 5, "# Page 5", &stats)
.unwrap();
let pages = store.list_completed_pages("conv-list").unwrap();
assert_eq!(pages, vec![1, 3, 5]);
}
#[test]
fn file_store_list_empty_conversion() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let pages = store.list_completed_pages("nonexistent").unwrap();
assert!(pages.is_empty());
}
#[test]
fn file_store_clear_checkpoints() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let stats = PageStats {
input_tokens: 100,
output_tokens: 50,
duration_ms: 1000,
retries: 0,
};
store
.save_page_checkpoint("conv-clear", 1, "# Page 1", &stats)
.unwrap();
store
.save_page_checkpoint("conv-clear", 2, "# Page 2", &stats)
.unwrap();
assert!(store.conv_dir("conv-clear").exists());
store.clear_checkpoints("conv-clear").unwrap();
assert!(!store.conv_dir("conv-clear").exists());
let pages = store.list_completed_pages("conv-clear").unwrap();
assert!(pages.is_empty());
}
#[test]
fn file_store_clear_nonexistent_succeeds() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
store.clear_checkpoints("nonexistent").unwrap();
}
#[test]
fn file_store_save_meta() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let meta = CheckpointMeta {
conversion_id: "conv-meta".to_string(),
pdf_path: "/tmp/test.pdf".to_string(),
provider_name: "openai".to_string(),
model_name: "gpt-4.1-nano".to_string(),
fidelity: "tier2".to_string(),
dpi: 150,
maintain_format: false,
created_at: "2026-03-01T12:00:00Z".to_string(),
};
store.save_meta("conv-meta", &meta).unwrap();
let meta_path = store.meta_path("conv-meta");
assert!(meta_path.exists());
let content = fs::read_to_string(&meta_path).unwrap();
let loaded: CheckpointMeta = serde_json::from_str(&content).unwrap();
assert_eq!(loaded.conversion_id, "conv-meta");
assert_eq!(loaded.provider_name, "openai");
}
#[test]
fn file_store_no_leftover_tmp_files() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let stats = PageStats {
input_tokens: 100,
output_tokens: 50,
duration_ms: 1000,
retries: 0,
};
store
.save_page_checkpoint("conv-tmp", 1, "# Page 1", &stats)
.unwrap();
let conv_dir = store.conv_dir("conv-tmp");
let tmp_files: Vec<_> = fs::read_dir(&conv_dir)
.unwrap()
.filter_map(|e| e.ok())
.filter(|e| e.file_name().to_string_lossy().ends_with(".tmp"))
.collect();
assert!(
tmp_files.is_empty(),
"No .tmp files should remain after save, found: {:?}",
tmp_files.iter().map(|e| e.file_name()).collect::<Vec<_>>()
);
}
#[test]
fn file_store_overwrite_existing_checkpoint() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let stats1 = PageStats {
input_tokens: 100,
output_tokens: 50,
duration_ms: 1000,
retries: 0,
};
let stats2 = PageStats {
input_tokens: 200,
output_tokens: 100,
duration_ms: 2000,
retries: 1,
};
store
.save_page_checkpoint("conv-ow", 1, "# First", &stats1)
.unwrap();
store
.save_page_checkpoint("conv-ow", 1, "# Second", &stats2)
.unwrap();
let loaded = store.load_page_checkpoint("conv-ow", 1).unwrap().unwrap();
assert_eq!(loaded.markdown, "# Second");
assert_eq!(loaded.stats.input_tokens, 200);
}
#[test]
fn file_store_multiple_conversions_isolated() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let stats = PageStats {
input_tokens: 100,
output_tokens: 50,
duration_ms: 1000,
retries: 0,
};
store
.save_page_checkpoint("conv-a", 1, "# Conv A Page 1", &stats)
.unwrap();
store
.save_page_checkpoint("conv-b", 1, "# Conv B Page 1", &stats)
.unwrap();
let a = store.load_page_checkpoint("conv-a", 1).unwrap().unwrap();
let b = store.load_page_checkpoint("conv-b", 1).unwrap().unwrap();
assert_eq!(a.markdown, "# Conv A Page 1");
assert_eq!(b.markdown, "# Conv B Page 1");
store.clear_checkpoints("conv-a").unwrap();
assert!(store.load_page_checkpoint("conv-a", 1).unwrap().is_none());
assert!(store.load_page_checkpoint("conv-b", 1).unwrap().is_some());
}
#[test]
fn conversion_id_is_deterministic() {
let dir = tempfile::tempdir().unwrap();
let pdf_path = dir.path().join("test.pdf");
let mut f = fs::File::create(&pdf_path).unwrap();
f.write_all(b"%PDF-1.7\nsome pdf content that is long enough to hash")
.unwrap();
let id1 = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier2,
150,
)
.unwrap();
let id2 = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier2,
150,
)
.unwrap();
assert_eq!(id1, id2, "Same inputs must produce same conversion ID");
assert_eq!(id1.len(), 16, "Conversion ID must be 16 hex chars");
}
#[test]
fn conversion_id_changes_with_model() {
let dir = tempfile::tempdir().unwrap();
let pdf_path = dir.path().join("test.pdf");
fs::write(&pdf_path, b"%PDF-1.7\nsome content").unwrap();
let id1 = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier2,
150,
)
.unwrap();
let id2 = compute_conversion_id(&pdf_path, "openai", "gpt-4.1", FidelityTier::Tier2, 150)
.unwrap();
assert_ne!(id1, id2, "Different model must produce different ID");
}
#[test]
fn conversion_id_changes_with_provider() {
let dir = tempfile::tempdir().unwrap();
let pdf_path = dir.path().join("test.pdf");
fs::write(&pdf_path, b"%PDF-1.7\nsome content").unwrap();
let id1 = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier2,
150,
)
.unwrap();
let id2 = compute_conversion_id(
&pdf_path,
"anthropic",
"gpt-4.1-nano",
FidelityTier::Tier2,
150,
)
.unwrap();
assert_ne!(id1, id2, "Different provider must produce different ID");
}
#[test]
fn conversion_id_changes_with_dpi() {
let dir = tempfile::tempdir().unwrap();
let pdf_path = dir.path().join("test.pdf");
fs::write(&pdf_path, b"%PDF-1.7\nsome content").unwrap();
let id1 = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier2,
150,
)
.unwrap();
let id2 = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier2,
300,
)
.unwrap();
assert_ne!(id1, id2, "Different DPI must produce different ID");
}
#[test]
fn conversion_id_changes_with_fidelity() {
let dir = tempfile::tempdir().unwrap();
let pdf_path = dir.path().join("test.pdf");
fs::write(&pdf_path, b"%PDF-1.7\nsome content").unwrap();
let id1 = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier1,
150,
)
.unwrap();
let id2 = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier3,
150,
)
.unwrap();
assert_ne!(id1, id2, "Different fidelity must produce different ID");
}
#[test]
fn conversion_id_changes_with_pdf_content() {
let dir = tempfile::tempdir().unwrap();
let pdf1 = dir.path().join("a.pdf");
let pdf2 = dir.path().join("b.pdf");
fs::write(&pdf1, b"%PDF-1.7\ncontent A").unwrap();
fs::write(&pdf2, b"%PDF-1.7\ncontent B different").unwrap();
let id1 = compute_conversion_id(&pdf1, "openai", "gpt-4.1-nano", FidelityTier::Tier2, 150)
.unwrap();
let id2 = compute_conversion_id(&pdf2, "openai", "gpt-4.1-nano", FidelityTier::Tier2, 150)
.unwrap();
assert_ne!(id1, id2, "Different PDF content must produce different ID");
}
#[test]
fn conversion_id_hex_chars_only() {
let dir = tempfile::tempdir().unwrap();
let pdf_path = dir.path().join("test.pdf");
fs::write(&pdf_path, b"%PDF-1.7\nsome content").unwrap();
let id = compute_conversion_id(
&pdf_path,
"openai",
"gpt-4.1-nano",
FidelityTier::Tier2,
150,
)
.unwrap();
assert!(
id.chars().all(|c| c.is_ascii_hexdigit()),
"Conversion ID must contain only hex chars, got: {}",
id
);
}
#[test]
fn file_store_high_page_numbers() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let stats = PageStats {
input_tokens: 100,
output_tokens: 50,
duration_ms: 1000,
retries: 0,
};
store
.save_page_checkpoint("conv-high", 999, "# Page 999", &stats)
.unwrap();
store
.save_page_checkpoint("conv-high", 1500, "# Page 1500", &stats)
.unwrap();
let loaded = store
.load_page_checkpoint("conv-high", 999)
.unwrap()
.unwrap();
assert_eq!(loaded.page_number, 999);
let loaded = store
.load_page_checkpoint("conv-high", 1500)
.unwrap()
.unwrap();
assert_eq!(loaded.page_number, 1500);
let pages = store.list_completed_pages("conv-high").unwrap();
assert_eq!(pages, vec![999, 1500]);
}
#[test]
fn file_store_large_markdown_roundtrip() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let large_md = "# Big Page\n\n".to_string() + &"Lorem ipsum dolor sit amet. ".repeat(2000);
let stats = PageStats {
input_tokens: 5000,
output_tokens: 3000,
duration_ms: 8000,
retries: 0,
};
store
.save_page_checkpoint("conv-large", 1, &large_md, &stats)
.unwrap();
let loaded = store
.load_page_checkpoint("conv-large", 1)
.unwrap()
.unwrap();
assert_eq!(loaded.markdown, large_md);
assert_eq!(loaded.stats.input_tokens, 5000);
}
#[test]
fn file_store_unicode_markdown_roundtrip() {
let dir = tempfile::tempdir().unwrap();
let store = FileCheckpointStore::new(dir.path());
let unicode_md = "# 日本語テスト\n\n数学: $\\sum_{i=1}^{n} x_i$ ñ é ü ö ä 🎉 emoji test\n";
let stats = PageStats {
input_tokens: 100,
output_tokens: 50,
duration_ms: 1000,
retries: 0,
};
store
.save_page_checkpoint("conv-unicode", 1, unicode_md, &stats)
.unwrap();
let loaded = store
.load_page_checkpoint("conv-unicode", 1)
.unwrap()
.unwrap();
assert_eq!(loaded.markdown, unicode_md);
}
}