1use serde::{Deserialize, Serialize};
12use sha2::{Digest, Sha256};
13use std::fs;
14use std::io::Read;
15use std::path::{Path, PathBuf};
16
17fn require_pro() -> Result<(), String> {
18 Ok(())
19}
20
21const AUTO_TRIGGER_PATTERNS: &[&str] = &[
23 "rm -rf",
24 "rm -f",
25 "rm -fr",
26 "git reset --hard",
27 "git checkout .",
28 "git clean -fd",
29 "git clean -f",
30];
31
32pub fn should_auto_checkpoint(command: &str) -> bool {
34 let lower = command.to_lowercase();
35 AUTO_TRIGGER_PATTERNS
36 .iter()
37 .any(|p| lower.contains(p))
38 || (lower.starts_with("mv ") || lower.contains(" mv "))
43}
44
45#[derive(Debug, Clone, Serialize, Deserialize)]
47pub struct CheckpointMeta {
48 pub id: String,
49 pub created_at: String,
50 pub trigger_command: Option<String>,
51 pub paths: Vec<String>,
52 pub total_bytes: u64,
53 pub file_count: usize,
54}
55
56#[derive(Debug, Clone, Serialize, Deserialize)]
58pub struct ManifestEntry {
59 pub original_path: String,
60 pub sha256: String,
61 pub size: u64,
62 pub is_dir: bool,
63}
64
65#[derive(Debug, Clone, Serialize, Deserialize)]
67pub struct CheckpointListEntry {
68 pub id: String,
69 pub created_at: String,
70 pub trigger_command: Option<String>,
71 pub file_count: usize,
72 pub total_bytes: u64,
73}
74
75#[derive(Debug, Clone, Serialize, Deserialize)]
77pub struct CheckpointConfig {
78 #[serde(default = "default_max_count")]
79 pub max_count: usize,
80 #[serde(default = "default_max_age_days")]
81 pub max_age_days: u32,
82 #[serde(default = "default_max_total_bytes")]
83 pub max_total_bytes: u64,
84}
85
86fn default_max_count() -> usize {
87 50
88}
89fn default_max_age_days() -> u32 {
90 30
91}
92fn default_max_total_bytes() -> u64 {
93 500 * 1024 * 1024 }
95
96impl Default for CheckpointConfig {
97 fn default() -> Self {
98 Self {
99 max_count: default_max_count(),
100 max_age_days: default_max_age_days(),
101 max_total_bytes: default_max_total_bytes(),
102 }
103 }
104}
105
106pub fn checkpoints_dir() -> PathBuf {
108 match crate::policy::state_dir() {
109 Some(d) => d.join("checkpoints"),
110 None => {
111 eprintln!("tirith: WARNING: state dir unavailable, using /tmp/tirith (world-readable)");
112 PathBuf::from("/tmp/tirith").join("checkpoints")
113 }
114 }
115}
116
117pub fn create(paths: &[&str], trigger_command: Option<&str>) -> Result<CheckpointMeta, String> {
119 require_pro()?;
120 let base_dir = checkpoints_dir();
121 let id = uuid::Uuid::new_v4().to_string();
122 let cp_dir = base_dir.join(&id);
123 let files_dir = cp_dir.join("files");
124
125 fs::create_dir_all(&files_dir).map_err(|e| format!("create checkpoint dir: {e}"))?;
126
127 let mut manifest: Vec<ManifestEntry> = Vec::new();
128 let mut total_bytes: u64 = 0;
129
130 for path_str in paths {
131 let path = Path::new(path_str);
132 if !path.exists() {
133 continue;
134 }
135
136 if path.is_file() {
137 match backup_file(path, &files_dir) {
138 Ok(entry) => {
139 total_bytes += entry.size;
140 manifest.push(entry);
141 }
142 Err(e) => {
143 eprintln!("tirith: checkpoint: skip {path_str}: {e}");
144 }
145 }
146 } else if path.is_dir() {
147 match backup_dir(path, &files_dir) {
148 Ok(entries) => {
149 for entry in entries {
150 total_bytes += entry.size;
151 manifest.push(entry);
152 }
153 }
154 Err(e) => {
155 eprintln!("tirith: checkpoint: skip dir {path_str}: {e}");
156 }
157 }
158 }
159 }
160
161 if manifest.is_empty() {
162 let _ = fs::remove_dir_all(&cp_dir);
164 return Err("no files to checkpoint".to_string());
165 }
166
167 let now = chrono::Utc::now().to_rfc3339();
168 let meta = CheckpointMeta {
169 id: id.clone(),
170 created_at: now,
171 trigger_command: trigger_command.map(|s| s.to_string()),
172 paths: paths.iter().map(|s| s.to_string()).collect(),
173 total_bytes,
174 file_count: manifest.len(),
175 };
176
177 let meta_json = serde_json::to_string_pretty(&meta).map_err(|e| format!("serialize: {e}"))?;
179 fs::write(cp_dir.join("meta.json"), meta_json).map_err(|e| format!("write meta: {e}"))?;
180
181 let manifest_json =
183 serde_json::to_string_pretty(&manifest).map_err(|e| format!("serialize: {e}"))?;
184 fs::write(cp_dir.join("manifest.json"), manifest_json)
185 .map_err(|e| format!("write manifest: {e}"))?;
186
187 Ok(meta)
188}
189
190pub fn list() -> Result<Vec<CheckpointListEntry>, String> {
192 let base_dir = checkpoints_dir();
193 if !base_dir.exists() {
194 return Ok(Vec::new());
195 }
196
197 let mut entries = Vec::new();
198
199 for entry in fs::read_dir(&base_dir).map_err(|e| format!("read dir: {e}"))? {
200 let entry = match entry {
201 Ok(e) => e,
202 Err(e) => {
203 eprintln!("tirith: checkpoint list: cannot read entry: {e}");
204 continue;
205 }
206 };
207 let meta_path = entry.path().join("meta.json");
208 if !meta_path.exists() {
209 continue;
210 }
211 let meta_str = match fs::read_to_string(&meta_path) {
212 Ok(s) => s,
213 Err(e) => {
214 eprintln!(
215 "tirith: checkpoint list: cannot read {}: {e}",
216 meta_path.display()
217 );
218 continue;
219 }
220 };
221 let meta: CheckpointMeta = match serde_json::from_str(&meta_str) {
222 Ok(m) => m,
223 Err(e) => {
224 eprintln!(
225 "tirith: checkpoint list: corrupt {}: {e}",
226 meta_path.display()
227 );
228 continue;
229 }
230 };
231 entries.push(CheckpointListEntry {
232 id: meta.id,
233 created_at: meta.created_at,
234 trigger_command: meta.trigger_command,
235 file_count: meta.file_count,
236 total_bytes: meta.total_bytes,
237 });
238 }
239
240 entries.sort_by(|a, b| b.created_at.cmp(&a.created_at));
242 Ok(entries)
243}
244
245fn validate_restore_path(path: &str) -> Result<(), String> {
248 let p = Path::new(path);
249 if p.is_absolute() || path.starts_with('/') {
252 return Err(format!("restore path is absolute: {path}"));
253 }
254 for component in p.components() {
255 if matches!(component, std::path::Component::ParentDir) {
256 return Err(format!("restore path contains '..': {path}"));
257 }
258 }
259 Ok(())
260}
261
262fn validate_sha256_filename(sha: &str) -> Result<(), String> {
264 if sha.len() != 64
265 || !sha
266 .chars()
267 .all(|c| c.is_ascii_hexdigit() && !c.is_ascii_uppercase())
268 {
269 return Err(format!("invalid sha256 in manifest: {sha}"));
270 }
271 Ok(())
272}
273
274pub fn restore(checkpoint_id: &str) -> Result<Vec<String>, String> {
276 require_pro()?;
277 let cp_dir = checkpoints_dir().join(checkpoint_id);
278 if !cp_dir.exists() {
279 return Err(format!("checkpoint not found: {checkpoint_id}"));
280 }
281
282 let manifest_str = fs::read_to_string(cp_dir.join("manifest.json"))
283 .map_err(|e| format!("read manifest: {e}"))?;
284 let manifest: Vec<ManifestEntry> =
285 serde_json::from_str(&manifest_str).map_err(|e| format!("parse manifest: {e}"))?;
286
287 let files_dir = cp_dir.join("files");
288 let mut restored = Vec::new();
289
290 for entry in &manifest {
291 if entry.is_dir {
292 continue; }
294
295 validate_restore_path(&entry.original_path)?;
296 validate_sha256_filename(&entry.sha256)?;
297
298 let src = files_dir.join(&entry.sha256);
299 if !src.exists() {
300 eprintln!(
301 "tirith: checkpoint restore: missing data for {}",
302 entry.original_path
303 );
304 continue;
305 }
306
307 let dst = Path::new(&entry.original_path);
308 if let Some(parent) = dst.parent() {
309 fs::create_dir_all(parent).map_err(|e| {
310 format!(
311 "restore {}: cannot create parent dir: {e}",
312 entry.original_path
313 )
314 })?;
315 }
316
317 fs::copy(&src, dst).map_err(|e| format!("restore {}: {e}", entry.original_path))?;
318 restored.push(entry.original_path.clone());
319 }
320
321 Ok(restored)
322}
323
324pub fn diff(checkpoint_id: &str) -> Result<Vec<DiffEntry>, String> {
326 require_pro()?;
327 let cp_dir = checkpoints_dir().join(checkpoint_id);
328 if !cp_dir.exists() {
329 return Err(format!("checkpoint not found: {checkpoint_id}"));
330 }
331
332 let manifest_str = fs::read_to_string(cp_dir.join("manifest.json"))
333 .map_err(|e| format!("read manifest: {e}"))?;
334 let manifest: Vec<ManifestEntry> =
335 serde_json::from_str(&manifest_str).map_err(|e| format!("parse manifest: {e}"))?;
336
337 let files_dir = cp_dir.join("files");
338 let mut diffs = Vec::new();
339 let mut classified_paths: std::collections::HashSet<String> = std::collections::HashSet::new();
342
343 for entry in &manifest {
344 if entry.is_dir {
345 continue;
346 }
347
348 let backup = files_dir.join(&entry.sha256);
349 if !backup.exists() {
350 diffs.push(DiffEntry {
351 path: entry.original_path.clone(),
352 status: DiffStatus::BackupCorrupt,
353 checkpoint_sha256: entry.sha256.clone(),
354 current_sha256: None,
355 });
356 classified_paths.insert(entry.original_path.clone());
357 continue;
358 }
359
360 let current_path = Path::new(&entry.original_path);
361 if !current_path.exists() {
362 diffs.push(DiffEntry {
363 path: entry.original_path.clone(),
364 status: DiffStatus::Deleted,
365 checkpoint_sha256: entry.sha256.clone(),
366 current_sha256: None,
367 });
368 classified_paths.insert(entry.original_path.clone());
369 continue;
370 }
371
372 match sha256_file(current_path) {
373 Ok(current_sha) => {
374 if current_sha != entry.sha256 {
375 diffs.push(DiffEntry {
376 path: entry.original_path.clone(),
377 status: DiffStatus::Modified,
378 checkpoint_sha256: entry.sha256.clone(),
379 current_sha256: Some(current_sha),
380 });
381 classified_paths.insert(entry.original_path.clone());
382 }
383 }
384 Err(e) => {
385 eprintln!(
386 "tirith: checkpoint diff: cannot read {}: {e}",
387 entry.original_path
388 );
389 diffs.push(DiffEntry {
390 path: entry.original_path.clone(),
391 status: DiffStatus::Modified,
392 checkpoint_sha256: entry.sha256.clone(),
393 current_sha256: None,
394 });
395 classified_paths.insert(entry.original_path.clone());
396 }
397 }
398 }
399
400 let _ = &classified_paths;
401
402 Ok(diffs)
403}
404
405pub fn purge(config: &CheckpointConfig) -> Result<PurgeResult, String> {
407 require_pro()?;
408 let base_dir = checkpoints_dir();
409 if !base_dir.exists() {
410 return Ok(PurgeResult {
411 removed_count: 0,
412 freed_bytes: 0,
413 });
414 }
415
416 let mut all = list()?;
417 let mut removed_count = 0;
418 let mut freed_bytes: u64 = 0;
419
420 let now = chrono::Utc::now();
421 let max_age = chrono::Duration::days(config.max_age_days as i64);
422 all.retain(|e| {
423 if let Ok(created) = chrono::DateTime::parse_from_rfc3339(&e.created_at) {
424 let age = now.signed_duration_since(created);
425 if age > max_age {
426 let cp_dir = base_dir.join(&e.id);
427 match fs::remove_dir_all(&cp_dir) {
428 Ok(()) => {
429 freed_bytes += e.total_bytes;
430 removed_count += 1;
431 return false;
432 }
433 Err(err) => {
434 eprintln!("tirith: checkpoint purge: failed to remove {}: {err}", e.id);
435 return true;
436 }
437 }
438 }
439 }
440 true
441 });
442
443 while all.len() > config.max_count {
444 if let Some(oldest) = all.pop() {
445 let cp_dir = base_dir.join(&oldest.id);
446 match fs::remove_dir_all(&cp_dir) {
447 Ok(()) => {
448 freed_bytes += oldest.total_bytes;
449 removed_count += 1;
450 }
451 Err(e) => {
452 eprintln!(
453 "tirith: checkpoint purge: failed to remove {}: {e}",
454 oldest.id
455 );
456 break;
459 }
460 }
461 }
462 }
463
464 let mut total: u64 = all.iter().map(|e| e.total_bytes).sum();
465 while config.max_total_bytes > 0 && total > config.max_total_bytes && !all.is_empty() {
466 if let Some(oldest) = all.pop() {
467 let cp_dir = base_dir.join(&oldest.id);
468 match fs::remove_dir_all(&cp_dir) {
469 Ok(()) => {
470 total -= oldest.total_bytes;
471 freed_bytes += oldest.total_bytes;
472 removed_count += 1;
473 }
474 Err(e) => {
475 eprintln!(
476 "tirith: checkpoint purge: failed to remove {}: {e}",
477 oldest.id
478 );
479 break;
482 }
483 }
484 }
485 }
486
487 Ok(PurgeResult {
488 removed_count,
489 freed_bytes,
490 })
491}
492
493#[derive(Debug, Clone, Serialize, Deserialize)]
495pub struct DiffEntry {
496 pub path: String,
497 pub status: DiffStatus,
498 pub checkpoint_sha256: String,
499 pub current_sha256: Option<String>,
500}
501
502#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
503pub enum DiffStatus {
504 Deleted,
505 Modified,
506 BackupCorrupt,
507}
508
509#[derive(Debug, Clone, Serialize, Deserialize)]
511pub struct PurgeResult {
512 pub removed_count: usize,
513 pub freed_bytes: u64,
514}
515
516pub fn create_and_purge(paths: &[&str], trigger_command: Option<&str>) -> Result<(), String> {
520 create(paths, trigger_command)?;
521 let config = CheckpointConfig::default();
522 purge(&config)?;
523 Ok(())
524}
525
526fn backup_file(path: &Path, files_dir: &Path) -> Result<ManifestEntry, String> {
528 let sha = sha256_file(path)?;
529 let dst = files_dir.join(&sha);
530
531 if !dst.exists() {
534 fs::copy(path, &dst).map_err(|e| format!("copy: {e}"))?;
535 }
536
537 let size = match path.metadata() {
538 Ok(m) => m.len(),
539 Err(e) => {
540 eprintln!(
541 "tirith: checkpoint: cannot read metadata for {}: {e}",
542 path.display()
543 );
544 0
545 }
546 };
547
548 Ok(ManifestEntry {
549 original_path: path.to_string_lossy().to_string(),
550 sha256: sha,
551 size,
552 is_dir: false,
553 })
554}
555
556fn backup_dir(dir: &Path, files_dir: &Path) -> Result<Vec<ManifestEntry>, String> {
563 let mut entries = Vec::new();
564 const MAX_FILES: usize = 10_000;
565 const MAX_SINGLE_FILE: u64 = 100 * 1024 * 1024; backup_dir_recursive(dir, files_dir, &mut entries, MAX_FILES, MAX_SINGLE_FILE)?;
568 Ok(entries)
569}
570
571fn backup_dir_recursive(
572 dir: &Path,
573 files_dir: &Path,
574 entries: &mut Vec<ManifestEntry>,
575 max_files: usize,
576 max_single_file: u64,
577) -> Result<(), String> {
578 if entries.len() >= max_files {
579 return Ok(());
580 }
581
582 let read_dir = fs::read_dir(dir).map_err(|e| format!("read dir {}: {e}", dir.display()))?;
583
584 for entry in read_dir {
585 if entries.len() >= max_files {
586 break;
587 }
588 let entry = match entry {
589 Ok(e) => e,
590 Err(e) => {
591 eprintln!(
592 "tirith: checkpoint: skip unreadable entry in {}: {e}",
593 dir.display()
594 );
595 continue;
596 }
597 };
598 let path = entry.path();
599
600 let meta = match path.symlink_metadata() {
602 Ok(m) => m,
603 Err(e) => {
604 eprintln!("tirith: checkpoint: skip {}: {e}", path.display());
605 continue;
606 }
607 };
608
609 if meta.file_type().is_symlink() {
610 continue;
612 }
613
614 if meta.file_type().is_file() {
615 let size = meta.len();
616 if size > max_single_file {
617 eprintln!(
618 "tirith: checkpoint: skip large file {} ({} bytes)",
619 path.display(),
620 size
621 );
622 continue;
623 }
624 match backup_file(&path, files_dir) {
625 Ok(e) => entries.push(e),
626 Err(e) => {
627 eprintln!("tirith: checkpoint: skip {}: {e}", path.display());
628 }
629 }
630 } else if path.is_dir() {
631 if path
634 .file_name()
635 .and_then(|n| n.to_str())
636 .map(|n| n.starts_with('.'))
637 .unwrap_or(false)
638 {
639 continue;
640 }
641 backup_dir_recursive(&path, files_dir, entries, max_files, max_single_file)?;
642 }
643 }
644
645 Ok(())
646}
647
648fn sha256_file(path: &Path) -> Result<String, String> {
650 let mut file = fs::File::open(path).map_err(|e| format!("open {}: {e}", path.display()))?;
651 let mut hasher = Sha256::new();
652 let mut buf = [0u8; 8192];
653 loop {
654 let n = file.read(&mut buf).map_err(|e| format!("read: {e}"))?;
655 if n == 0 {
656 break;
657 }
658 hasher.update(&buf[..n]);
659 }
660 Ok(format!("{:x}", hasher.finalize()))
661}
662
663#[cfg(test)]
664mod tests {
665 use super::*;
666
667 #[test]
668 fn test_should_auto_checkpoint() {
669 assert!(should_auto_checkpoint("rm -rf /tmp/myproject"));
670 assert!(should_auto_checkpoint("rm -f important.txt"));
671 assert!(should_auto_checkpoint("git reset --hard HEAD~3"));
672 assert!(should_auto_checkpoint("git checkout ."));
673 assert!(should_auto_checkpoint("git clean -fd"));
674 assert!(should_auto_checkpoint("sudo rm -rf /"));
675 assert!(!should_auto_checkpoint("ls -la"));
676 assert!(!should_auto_checkpoint("echo hello"));
677 assert!(!should_auto_checkpoint("git status"));
678 }
679
680 #[test]
681 fn test_checkpoint_config_defaults() {
682 let config = CheckpointConfig::default();
683 assert_eq!(config.max_count, 50);
684 assert_eq!(config.max_age_days, 30);
685 assert_eq!(config.max_total_bytes, 500 * 1024 * 1024);
686 }
687
688 #[test]
689 fn test_backup_and_sha256() {
690 let tmp = tempfile::tempdir().unwrap();
691 let test_file = tmp.path().join("test.txt");
692 fs::write(&test_file, "hello world").unwrap();
693
694 let files_dir = tmp.path().join("files");
695 fs::create_dir_all(&files_dir).unwrap();
696
697 let entry = backup_file(&test_file, &files_dir).unwrap();
698 assert!(!entry.sha256.is_empty());
699 assert_eq!(entry.size, 11);
700 assert!(!entry.is_dir);
701
702 let backup_path = files_dir.join(&entry.sha256);
703 assert!(backup_path.exists());
704 let content = fs::read_to_string(&backup_path).unwrap();
705 assert_eq!(content, "hello world");
706 }
707
708 #[test]
709 fn test_backup_dir_recursive() {
710 let tmp = tempfile::tempdir().unwrap();
711 let dir = tmp.path().join("project");
712 fs::create_dir_all(dir.join("src")).unwrap();
713 fs::write(dir.join("README.md"), "# Hello").unwrap();
714 fs::write(dir.join("src/main.rs"), "fn main() {}").unwrap();
715
716 let files_dir = tmp.path().join("files");
717 fs::create_dir_all(&files_dir).unwrap();
718
719 let entries = backup_dir(&dir, &files_dir).unwrap();
720 assert_eq!(entries.len(), 2, "should backup 2 files: {entries:?}");
721 }
722
723 #[test]
724 fn test_backup_nonexistent_file() {
725 let tmp = tempfile::tempdir().unwrap();
726 let files_dir = tmp.path().join("files");
727 fs::create_dir_all(&files_dir).unwrap();
728
729 let result = backup_file(Path::new("/nonexistent/file.txt"), &files_dir);
730 assert!(result.is_err());
731 }
732
733 #[test]
734 fn test_validate_restore_path_rejects_traversal() {
735 assert!(validate_restore_path("../../etc/passwd").is_err());
736 assert!(validate_restore_path("/tmp/../etc/evil").is_err());
737 assert!(validate_restore_path("normal/path/file.txt").is_ok());
738 assert!(
740 validate_restore_path("/absolute/path/file.txt").is_err(),
741 "absolute paths should be rejected"
742 );
743 assert!(
744 validate_restore_path("/etc/passwd").is_err(),
745 "absolute paths should be rejected"
746 );
747 }
748
749 #[test]
750 fn test_validate_sha256_filename() {
751 let valid = "a".repeat(64);
752 assert!(validate_sha256_filename(&valid).is_ok());
753 assert!(validate_sha256_filename("short").is_err());
754 assert!(validate_sha256_filename("../../etc/passwd").is_err());
755 assert!(validate_sha256_filename(&"g".repeat(64)).is_err()); }
757
758 #[test]
759 fn test_diff_status_serde() {
760 let entry = DiffEntry {
761 path: "/tmp/test.txt".to_string(),
762 status: DiffStatus::Deleted,
763 checkpoint_sha256: "abc123".to_string(),
764 current_sha256: None,
765 };
766 let json = serde_json::to_string(&entry).unwrap();
767 let parsed: DiffEntry = serde_json::from_str(&json).unwrap();
768 assert_eq!(parsed.status, DiffStatus::Deleted);
769 }
770
771 #[test]
772 fn test_create_and_purge_removes_expired() {
773 let _guard = crate::TEST_ENV_LOCK
776 .lock()
777 .unwrap_or_else(|e| e.into_inner());
778
779 let tmpdir = tempfile::tempdir().unwrap();
780 let workdir = tmpdir.path().join("project");
781 fs::create_dir_all(&workdir).unwrap();
782 fs::write(workdir.join("file.txt"), "content").unwrap();
783
784 let state_dir = tmpdir.path().join("state");
785
786 let prev = std::env::var("XDG_STATE_HOME").ok();
787 unsafe { std::env::set_var("XDG_STATE_HOME", &state_dir) };
789
790 let cp_base = state_dir.join("tirith/checkpoints");
792 let old_cp = cp_base.join("old-expired");
793 let old_files = old_cp.join("files");
794 fs::create_dir_all(&old_files).unwrap();
795
796 let old_time = chrono::Utc::now() - chrono::Duration::days(60);
797 let meta_json = serde_json::json!({
798 "id": "old-expired",
799 "created_at": old_time.to_rfc3339(),
800 "trigger_command": "rm -rf old",
801 "paths": ["/tmp/old"],
802 "total_bytes": 8,
803 "file_count": 1
804 });
805 fs::write(old_cp.join("meta.json"), meta_json.to_string()).unwrap();
806 fs::write(old_files.join("dummy"), "old data").unwrap();
807 let manifest = serde_json::json!([{
808 "original_path": "old.txt",
809 "sha256": "dummy",
810 "size": 8,
811 "is_dir": false
812 }]);
813 fs::write(old_cp.join("manifest.json"), manifest.to_string()).unwrap();
814 assert!(old_cp.exists());
815
816 let work_str = workdir.to_str().unwrap();
817 let result = create_and_purge(&[work_str], Some("rm -rf tempstuff"));
818
819 match prev {
821 Some(val) => unsafe { std::env::set_var("XDG_STATE_HOME", val) },
822 None => unsafe { std::env::remove_var("XDG_STATE_HOME") },
823 }
824
825 assert!(result.is_ok(), "create_and_purge failed: {result:?}");
826 assert!(
827 !old_cp.exists(),
828 "expired checkpoint should have been purged"
829 );
830 let remaining: Vec<_> = fs::read_dir(&cp_base)
831 .unwrap()
832 .filter_map(|e| e.ok())
833 .collect();
834 assert_eq!(
835 remaining.len(),
836 1,
837 "exactly one new checkpoint should remain"
838 );
839 }
840}