1use serde::{Deserialize, Serialize};
12use sha2::{Digest, Sha256};
13use std::fs;
14use std::io::Read;
15use std::path::{Path, PathBuf};
16
17fn require_pro() -> Result<(), String> {
20 let tier = crate::license::current_tier();
21 if tier >= crate::license::Tier::Pro {
22 Ok(())
23 } else {
24 Err(format!(
25 "Checkpoint features require a Pro license (current tier: {tier})."
26 ))
27 }
28}
29
30const AUTO_TRIGGER_PATTERNS: &[&str] = &[
32 "rm -rf",
33 "rm -f",
34 "rm -fr",
35 "git reset --hard",
36 "git checkout .",
37 "git clean -fd",
38 "git clean -f",
39];
40
41pub fn should_auto_checkpoint(command: &str) -> bool {
43 let lower = command.to_lowercase();
44 AUTO_TRIGGER_PATTERNS
45 .iter()
46 .any(|p| lower.contains(p))
47 || (lower.starts_with("mv ") || lower.contains(" mv "))
52}
53
54#[derive(Debug, Clone, Serialize, Deserialize)]
56pub struct CheckpointMeta {
57 pub id: String,
58 pub created_at: String,
59 pub trigger_command: Option<String>,
60 pub paths: Vec<String>,
61 pub total_bytes: u64,
62 pub file_count: usize,
63}
64
65#[derive(Debug, Clone, Serialize, Deserialize)]
67pub struct ManifestEntry {
68 pub original_path: String,
69 pub sha256: String,
70 pub size: u64,
71 pub is_dir: bool,
72}
73
74#[derive(Debug, Clone, Serialize, Deserialize)]
76pub struct CheckpointListEntry {
77 pub id: String,
78 pub created_at: String,
79 pub trigger_command: Option<String>,
80 pub file_count: usize,
81 pub total_bytes: u64,
82}
83
84#[derive(Debug, Clone, Serialize, Deserialize)]
86pub struct CheckpointConfig {
87 #[serde(default = "default_max_count")]
88 pub max_count: usize,
89 #[serde(default = "default_max_age_days")]
90 pub max_age_days: u32,
91 #[serde(default = "default_max_total_bytes")]
92 pub max_total_bytes: u64,
93}
94
95fn default_max_count() -> usize {
96 50
97}
98fn default_max_age_days() -> u32 {
99 30
100}
101fn default_max_total_bytes() -> u64 {
102 500 * 1024 * 1024 }
104
105impl Default for CheckpointConfig {
106 fn default() -> Self {
107 Self {
108 max_count: default_max_count(),
109 max_age_days: default_max_age_days(),
110 max_total_bytes: default_max_total_bytes(),
111 }
112 }
113}
114
115pub fn checkpoints_dir() -> PathBuf {
117 match crate::policy::state_dir() {
118 Some(d) => d.join("checkpoints"),
119 None => {
120 eprintln!("tirith: WARNING: state dir unavailable, using /tmp/tirith (world-readable)");
121 PathBuf::from("/tmp/tirith").join("checkpoints")
122 }
123 }
124}
125
126pub fn create(paths: &[&str], trigger_command: Option<&str>) -> Result<CheckpointMeta, String> {
128 require_pro()?;
129 let base_dir = checkpoints_dir();
130 let id = uuid::Uuid::new_v4().to_string();
131 let cp_dir = base_dir.join(&id);
132 let files_dir = cp_dir.join("files");
133
134 fs::create_dir_all(&files_dir).map_err(|e| format!("create checkpoint dir: {e}"))?;
135
136 let mut manifest: Vec<ManifestEntry> = Vec::new();
137 let mut total_bytes: u64 = 0;
138
139 for path_str in paths {
140 let path = Path::new(path_str);
141 if !path.exists() {
142 continue;
143 }
144
145 if path.is_file() {
146 match backup_file(path, &files_dir) {
147 Ok(entry) => {
148 total_bytes += entry.size;
149 manifest.push(entry);
150 }
151 Err(e) => {
152 eprintln!("tirith: checkpoint: skip {path_str}: {e}");
153 }
154 }
155 } else if path.is_dir() {
156 match backup_dir(path, &files_dir) {
157 Ok(entries) => {
158 for entry in entries {
159 total_bytes += entry.size;
160 manifest.push(entry);
161 }
162 }
163 Err(e) => {
164 eprintln!("tirith: checkpoint: skip dir {path_str}: {e}");
165 }
166 }
167 }
168 }
169
170 if manifest.is_empty() {
171 let _ = fs::remove_dir_all(&cp_dir);
173 return Err("no files to checkpoint".to_string());
174 }
175
176 let now = chrono::Utc::now().to_rfc3339();
177 let meta = CheckpointMeta {
178 id: id.clone(),
179 created_at: now,
180 trigger_command: trigger_command.map(|s| s.to_string()),
181 paths: paths.iter().map(|s| s.to_string()).collect(),
182 total_bytes,
183 file_count: manifest.len(),
184 };
185
186 let meta_json = serde_json::to_string_pretty(&meta).map_err(|e| format!("serialize: {e}"))?;
188 fs::write(cp_dir.join("meta.json"), meta_json).map_err(|e| format!("write meta: {e}"))?;
189
190 let manifest_json =
192 serde_json::to_string_pretty(&manifest).map_err(|e| format!("serialize: {e}"))?;
193 fs::write(cp_dir.join("manifest.json"), manifest_json)
194 .map_err(|e| format!("write manifest: {e}"))?;
195
196 Ok(meta)
197}
198
199pub fn list() -> Result<Vec<CheckpointListEntry>, String> {
201 let base_dir = checkpoints_dir();
202 if !base_dir.exists() {
203 return Ok(Vec::new());
204 }
205
206 let mut entries = Vec::new();
207
208 for entry in fs::read_dir(&base_dir).map_err(|e| format!("read dir: {e}"))? {
209 let entry = match entry {
210 Ok(e) => e,
211 Err(e) => {
212 eprintln!("tirith: checkpoint list: cannot read entry: {e}");
213 continue;
214 }
215 };
216 let meta_path = entry.path().join("meta.json");
217 if !meta_path.exists() {
218 continue;
219 }
220 let meta_str = match fs::read_to_string(&meta_path) {
221 Ok(s) => s,
222 Err(e) => {
223 eprintln!(
224 "tirith: checkpoint list: cannot read {}: {e}",
225 meta_path.display()
226 );
227 continue;
228 }
229 };
230 let meta: CheckpointMeta = match serde_json::from_str(&meta_str) {
231 Ok(m) => m,
232 Err(e) => {
233 eprintln!(
234 "tirith: checkpoint list: corrupt {}: {e}",
235 meta_path.display()
236 );
237 continue;
238 }
239 };
240 entries.push(CheckpointListEntry {
241 id: meta.id,
242 created_at: meta.created_at,
243 trigger_command: meta.trigger_command,
244 file_count: meta.file_count,
245 total_bytes: meta.total_bytes,
246 });
247 }
248
249 entries.sort_by(|a, b| b.created_at.cmp(&a.created_at));
251 Ok(entries)
252}
253
254fn validate_restore_path(path: &str) -> Result<(), String> {
257 let p = Path::new(path);
258 if p.is_absolute() || path.starts_with('/') {
261 return Err(format!("restore path is absolute: {path}"));
262 }
263 for component in p.components() {
264 if matches!(component, std::path::Component::ParentDir) {
265 return Err(format!("restore path contains '..': {path}"));
266 }
267 }
268 Ok(())
269}
270
271fn validate_sha256_filename(sha: &str) -> Result<(), String> {
273 if sha.len() != 64
274 || !sha
275 .chars()
276 .all(|c| c.is_ascii_hexdigit() && !c.is_ascii_uppercase())
277 {
278 return Err(format!("invalid sha256 in manifest: {sha}"));
279 }
280 Ok(())
281}
282
283pub fn restore(checkpoint_id: &str) -> Result<Vec<String>, String> {
285 require_pro()?;
286 let cp_dir = checkpoints_dir().join(checkpoint_id);
287 if !cp_dir.exists() {
288 return Err(format!("checkpoint not found: {checkpoint_id}"));
289 }
290
291 let manifest_str = fs::read_to_string(cp_dir.join("manifest.json"))
292 .map_err(|e| format!("read manifest: {e}"))?;
293 let manifest: Vec<ManifestEntry> =
294 serde_json::from_str(&manifest_str).map_err(|e| format!("parse manifest: {e}"))?;
295
296 let files_dir = cp_dir.join("files");
297 let mut restored = Vec::new();
298
299 for entry in &manifest {
300 if entry.is_dir {
301 continue; }
303
304 validate_restore_path(&entry.original_path)?;
306
307 validate_sha256_filename(&entry.sha256)?;
309
310 let src = files_dir.join(&entry.sha256);
311 if !src.exists() {
312 eprintln!(
313 "tirith: checkpoint restore: missing data for {}",
314 entry.original_path
315 );
316 continue;
317 }
318
319 let dst = Path::new(&entry.original_path);
320 if let Some(parent) = dst.parent() {
322 fs::create_dir_all(parent).map_err(|e| {
323 format!(
324 "restore {}: cannot create parent dir: {e}",
325 entry.original_path
326 )
327 })?;
328 }
329
330 fs::copy(&src, dst).map_err(|e| format!("restore {}: {e}", entry.original_path))?;
331 restored.push(entry.original_path.clone());
332 }
333
334 Ok(restored)
335}
336
337pub fn diff(checkpoint_id: &str) -> Result<Vec<DiffEntry>, String> {
339 require_pro()?;
340 let cp_dir = checkpoints_dir().join(checkpoint_id);
341 if !cp_dir.exists() {
342 return Err(format!("checkpoint not found: {checkpoint_id}"));
343 }
344
345 let manifest_str = fs::read_to_string(cp_dir.join("manifest.json"))
346 .map_err(|e| format!("read manifest: {e}"))?;
347 let manifest: Vec<ManifestEntry> =
348 serde_json::from_str(&manifest_str).map_err(|e| format!("parse manifest: {e}"))?;
349
350 let files_dir = cp_dir.join("files");
351 let mut diffs = Vec::new();
352 let mut classified_paths: std::collections::HashSet<String> = std::collections::HashSet::new();
354
355 for entry in &manifest {
356 if entry.is_dir {
357 continue;
358 }
359
360 let backup = files_dir.join(&entry.sha256);
362 if !backup.exists() {
363 diffs.push(DiffEntry {
364 path: entry.original_path.clone(),
365 status: DiffStatus::BackupCorrupt,
366 checkpoint_sha256: entry.sha256.clone(),
367 current_sha256: None,
368 });
369 classified_paths.insert(entry.original_path.clone());
370 continue;
371 }
372
373 let current_path = Path::new(&entry.original_path);
374 if !current_path.exists() {
375 diffs.push(DiffEntry {
376 path: entry.original_path.clone(),
377 status: DiffStatus::Deleted,
378 checkpoint_sha256: entry.sha256.clone(),
379 current_sha256: None,
380 });
381 classified_paths.insert(entry.original_path.clone());
382 continue;
383 }
384
385 match sha256_file(current_path) {
387 Ok(current_sha) => {
388 if current_sha != entry.sha256 {
389 diffs.push(DiffEntry {
390 path: entry.original_path.clone(),
391 status: DiffStatus::Modified,
392 checkpoint_sha256: entry.sha256.clone(),
393 current_sha256: Some(current_sha),
394 });
395 classified_paths.insert(entry.original_path.clone());
396 }
397 }
398 Err(e) => {
399 eprintln!(
400 "tirith: checkpoint diff: cannot read {}: {e}",
401 entry.original_path
402 );
403 diffs.push(DiffEntry {
404 path: entry.original_path.clone(),
405 status: DiffStatus::Modified,
406 checkpoint_sha256: entry.sha256.clone(),
407 current_sha256: None,
408 });
409 classified_paths.insert(entry.original_path.clone());
410 }
411 }
412 }
413
414 let _ = &classified_paths;
416
417 Ok(diffs)
418}
419
420pub fn purge(config: &CheckpointConfig) -> Result<PurgeResult, String> {
422 require_pro()?;
423 let base_dir = checkpoints_dir();
424 if !base_dir.exists() {
425 return Ok(PurgeResult {
426 removed_count: 0,
427 freed_bytes: 0,
428 });
429 }
430
431 let mut all = list()?;
432 let mut removed_count = 0;
433 let mut freed_bytes: u64 = 0;
434
435 let now = chrono::Utc::now();
437 let max_age = chrono::Duration::days(config.max_age_days as i64);
438 all.retain(|e| {
439 if let Ok(created) = chrono::DateTime::parse_from_rfc3339(&e.created_at) {
440 let age = now.signed_duration_since(created);
441 if age > max_age {
442 let cp_dir = base_dir.join(&e.id);
443 match fs::remove_dir_all(&cp_dir) {
444 Ok(()) => {
445 freed_bytes += e.total_bytes;
446 removed_count += 1;
447 return false; }
449 Err(err) => {
450 eprintln!("tirith: checkpoint purge: failed to remove {}: {err}", e.id);
451 return true; }
453 }
454 }
455 }
456 true
457 });
458
459 while all.len() > config.max_count {
461 if let Some(oldest) = all.pop() {
462 let cp_dir = base_dir.join(&oldest.id);
463 match fs::remove_dir_all(&cp_dir) {
464 Ok(()) => {
465 freed_bytes += oldest.total_bytes;
466 removed_count += 1;
467 }
468 Err(e) => {
469 eprintln!(
470 "tirith: checkpoint purge: failed to remove {}: {e}",
471 oldest.id
472 );
473 break;
476 }
477 }
478 }
479 }
480
481 let mut total: u64 = all.iter().map(|e| e.total_bytes).sum();
483 while config.max_total_bytes > 0 && total > config.max_total_bytes && !all.is_empty() {
484 if let Some(oldest) = all.pop() {
485 let cp_dir = base_dir.join(&oldest.id);
486 match fs::remove_dir_all(&cp_dir) {
487 Ok(()) => {
488 total -= oldest.total_bytes;
489 freed_bytes += oldest.total_bytes;
490 removed_count += 1;
491 }
492 Err(e) => {
493 eprintln!(
494 "tirith: checkpoint purge: failed to remove {}: {e}",
495 oldest.id
496 );
497 break;
500 }
501 }
502 }
503 }
504
505 Ok(PurgeResult {
506 removed_count,
507 freed_bytes,
508 })
509}
510
511#[derive(Debug, Clone, Serialize, Deserialize)]
513pub struct DiffEntry {
514 pub path: String,
515 pub status: DiffStatus,
516 pub checkpoint_sha256: String,
517 pub current_sha256: Option<String>,
518}
519
520#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
521pub enum DiffStatus {
522 Deleted,
523 Modified,
524 BackupCorrupt,
525}
526
527#[derive(Debug, Clone, Serialize, Deserialize)]
529pub struct PurgeResult {
530 pub removed_count: usize,
531 pub freed_bytes: u64,
532}
533
534pub fn create_and_purge(paths: &[&str], trigger_command: Option<&str>) -> Result<(), String> {
538 create(paths, trigger_command)?;
539 let config = CheckpointConfig::default();
540 purge(&config)?;
541 Ok(())
542}
543
544fn backup_file(path: &Path, files_dir: &Path) -> Result<ManifestEntry, String> {
550 let sha = sha256_file(path)?;
551 let dst = files_dir.join(&sha);
552
553 if !dst.exists() {
555 fs::copy(path, &dst).map_err(|e| format!("copy: {e}"))?;
556 }
557
558 let size = match path.metadata() {
559 Ok(m) => m.len(),
560 Err(e) => {
561 eprintln!(
562 "tirith: checkpoint: cannot read metadata for {}: {e}",
563 path.display()
564 );
565 0
566 }
567 };
568
569 Ok(ManifestEntry {
570 original_path: path.to_string_lossy().to_string(),
571 sha256: sha,
572 size,
573 is_dir: false,
574 })
575}
576
577fn backup_dir(dir: &Path, files_dir: &Path) -> Result<Vec<ManifestEntry>, String> {
584 let mut entries = Vec::new();
585 const MAX_FILES: usize = 10_000;
586 const MAX_SINGLE_FILE: u64 = 100 * 1024 * 1024; backup_dir_recursive(dir, files_dir, &mut entries, MAX_FILES, MAX_SINGLE_FILE)?;
589 Ok(entries)
590}
591
592fn backup_dir_recursive(
593 dir: &Path,
594 files_dir: &Path,
595 entries: &mut Vec<ManifestEntry>,
596 max_files: usize,
597 max_single_file: u64,
598) -> Result<(), String> {
599 if entries.len() >= max_files {
600 return Ok(());
601 }
602
603 let read_dir = fs::read_dir(dir).map_err(|e| format!("read dir {}: {e}", dir.display()))?;
604
605 for entry in read_dir {
606 if entries.len() >= max_files {
607 break;
608 }
609 let entry = match entry {
610 Ok(e) => e,
611 Err(e) => {
612 eprintln!(
613 "tirith: checkpoint: skip unreadable entry in {}: {e}",
614 dir.display()
615 );
616 continue;
617 }
618 };
619 let path = entry.path();
620
621 let meta = match path.symlink_metadata() {
623 Ok(m) => m,
624 Err(e) => {
625 eprintln!("tirith: checkpoint: skip {}: {e}", path.display());
626 continue;
627 }
628 };
629
630 if meta.file_type().is_symlink() {
631 continue; }
633
634 if meta.file_type().is_file() {
635 let size = meta.len();
636 if size > max_single_file {
637 eprintln!(
638 "tirith: checkpoint: skip large file {} ({} bytes)",
639 path.display(),
640 size
641 );
642 continue;
643 }
644 match backup_file(&path, files_dir) {
645 Ok(e) => entries.push(e),
646 Err(e) => {
647 eprintln!("tirith: checkpoint: skip {}: {e}", path.display());
648 }
649 }
650 } else if path.is_dir() {
651 if path
653 .file_name()
654 .and_then(|n| n.to_str())
655 .map(|n| n.starts_with('.'))
656 .unwrap_or(false)
657 {
658 continue;
659 }
660 backup_dir_recursive(&path, files_dir, entries, max_files, max_single_file)?;
661 }
662 }
663
664 Ok(())
665}
666
667fn sha256_file(path: &Path) -> Result<String, String> {
669 let mut file = fs::File::open(path).map_err(|e| format!("open {}: {e}", path.display()))?;
670 let mut hasher = Sha256::new();
671 let mut buf = [0u8; 8192];
672 loop {
673 let n = file.read(&mut buf).map_err(|e| format!("read: {e}"))?;
674 if n == 0 {
675 break;
676 }
677 hasher.update(&buf[..n]);
678 }
679 Ok(format!("{:x}", hasher.finalize()))
680}
681
682#[cfg(test)]
683mod tests {
684 use super::*;
685
686 #[test]
687 fn test_should_auto_checkpoint() {
688 assert!(should_auto_checkpoint("rm -rf /tmp/myproject"));
689 assert!(should_auto_checkpoint("rm -f important.txt"));
690 assert!(should_auto_checkpoint("git reset --hard HEAD~3"));
691 assert!(should_auto_checkpoint("git checkout ."));
692 assert!(should_auto_checkpoint("git clean -fd"));
693 assert!(should_auto_checkpoint("sudo rm -rf /"));
694 assert!(!should_auto_checkpoint("ls -la"));
695 assert!(!should_auto_checkpoint("echo hello"));
696 assert!(!should_auto_checkpoint("git status"));
697 }
698
699 #[test]
700 fn test_checkpoint_config_defaults() {
701 let config = CheckpointConfig::default();
702 assert_eq!(config.max_count, 50);
703 assert_eq!(config.max_age_days, 30);
704 assert_eq!(config.max_total_bytes, 500 * 1024 * 1024);
705 }
706
707 #[test]
708 fn test_backup_and_sha256() {
709 let tmp = tempfile::tempdir().unwrap();
710 let test_file = tmp.path().join("test.txt");
711 fs::write(&test_file, "hello world").unwrap();
712
713 let files_dir = tmp.path().join("files");
714 fs::create_dir_all(&files_dir).unwrap();
715
716 let entry = backup_file(&test_file, &files_dir).unwrap();
717 assert!(!entry.sha256.is_empty());
718 assert_eq!(entry.size, 11); assert!(!entry.is_dir);
720
721 let backup_path = files_dir.join(&entry.sha256);
723 assert!(backup_path.exists());
724 let content = fs::read_to_string(&backup_path).unwrap();
725 assert_eq!(content, "hello world");
726 }
727
728 #[test]
729 fn test_backup_dir_recursive() {
730 let tmp = tempfile::tempdir().unwrap();
731 let dir = tmp.path().join("project");
732 fs::create_dir_all(dir.join("src")).unwrap();
733 fs::write(dir.join("README.md"), "# Hello").unwrap();
734 fs::write(dir.join("src/main.rs"), "fn main() {}").unwrap();
735
736 let files_dir = tmp.path().join("files");
737 fs::create_dir_all(&files_dir).unwrap();
738
739 let entries = backup_dir(&dir, &files_dir).unwrap();
740 assert_eq!(entries.len(), 2, "should backup 2 files: {entries:?}");
741 }
742
743 #[test]
744 fn test_backup_nonexistent_file() {
745 let tmp = tempfile::tempdir().unwrap();
746 let files_dir = tmp.path().join("files");
747 fs::create_dir_all(&files_dir).unwrap();
748
749 let result = backup_file(Path::new("/nonexistent/file.txt"), &files_dir);
750 assert!(result.is_err());
751 }
752
753 #[test]
754 fn test_validate_restore_path_rejects_traversal() {
755 assert!(validate_restore_path("../../etc/passwd").is_err());
756 assert!(validate_restore_path("/tmp/../etc/evil").is_err());
757 assert!(validate_restore_path("normal/path/file.txt").is_ok());
758 assert!(
760 validate_restore_path("/absolute/path/file.txt").is_err(),
761 "absolute paths should be rejected"
762 );
763 assert!(
764 validate_restore_path("/etc/passwd").is_err(),
765 "absolute paths should be rejected"
766 );
767 }
768
769 #[test]
770 fn test_validate_sha256_filename() {
771 let valid = "a".repeat(64);
772 assert!(validate_sha256_filename(&valid).is_ok());
773 assert!(validate_sha256_filename("short").is_err());
774 assert!(validate_sha256_filename("../../etc/passwd").is_err());
775 assert!(validate_sha256_filename(&"g".repeat(64)).is_err()); }
777
778 #[test]
779 fn test_diff_status_serde() {
780 let entry = DiffEntry {
781 path: "/tmp/test.txt".to_string(),
782 status: DiffStatus::Deleted,
783 checkpoint_sha256: "abc123".to_string(),
784 current_sha256: None,
785 };
786 let json = serde_json::to_string(&entry).unwrap();
787 let parsed: DiffEntry = serde_json::from_str(&json).unwrap();
788 assert_eq!(parsed.status, DiffStatus::Deleted);
789 }
790
791 #[test]
792 fn test_create_and_purge_removes_expired() {
793 let _guard = crate::TEST_ENV_LOCK.lock().unwrap();
796
797 let tmpdir = tempfile::tempdir().unwrap();
798 let workdir = tmpdir.path().join("project");
799 fs::create_dir_all(&workdir).unwrap();
800 fs::write(workdir.join("file.txt"), "content").unwrap();
801
802 let state_dir = tmpdir.path().join("state");
803
804 let prev = std::env::var("XDG_STATE_HOME").ok();
805 unsafe { std::env::set_var("XDG_STATE_HOME", &state_dir) };
807
808 let cp_base = state_dir.join("tirith/checkpoints");
810 let old_cp = cp_base.join("old-expired");
811 let old_files = old_cp.join("files");
812 fs::create_dir_all(&old_files).unwrap();
813
814 let old_time = chrono::Utc::now() - chrono::Duration::days(60);
815 let meta_json = serde_json::json!({
816 "id": "old-expired",
817 "created_at": old_time.to_rfc3339(),
818 "trigger_command": "rm -rf old",
819 "paths": ["/tmp/old"],
820 "total_bytes": 8,
821 "file_count": 1
822 });
823 fs::write(old_cp.join("meta.json"), meta_json.to_string()).unwrap();
824 fs::write(old_files.join("dummy"), "old data").unwrap();
825 let manifest = serde_json::json!([{
826 "original_path": "old.txt",
827 "sha256": "dummy",
828 "size": 8,
829 "is_dir": false
830 }]);
831 fs::write(old_cp.join("manifest.json"), manifest.to_string()).unwrap();
832 assert!(old_cp.exists());
833
834 let work_str = workdir.to_str().unwrap();
836 let result = create_and_purge(&[work_str], Some("rm -rf tempstuff"));
837
838 match prev {
840 Some(val) => unsafe { std::env::set_var("XDG_STATE_HOME", val) },
841 None => unsafe { std::env::remove_var("XDG_STATE_HOME") },
842 }
843
844 assert!(result.is_ok(), "create_and_purge failed: {result:?}");
845 assert!(
846 !old_cp.exists(),
847 "expired checkpoint should have been purged"
848 );
849 let remaining: Vec<_> = fs::read_dir(&cp_base)
850 .unwrap()
851 .filter_map(|e| e.ok())
852 .collect();
853 assert_eq!(
854 remaining.len(),
855 1,
856 "exactly one new checkpoint should remain"
857 );
858 }
859}