Skip to main content

tirith_core/
checkpoint.rs

1//! Checkpoint/rollback system for protecting against destructive operations.
2//!
3//! Creates file-level snapshots before destructive commands (`rm -rf`, `git reset --hard`, etc.)
4//! so users can recover accidentally destroyed work.
5//!
6//! Storage: `$XDG_STATE_HOME/tirith/checkpoints/<uuid>/`
7//!   - `meta.json`: checkpoint metadata (timestamp, paths, trigger command)
8//!   - `files/`: preserved file contents (original directory structure flattened to SHA-256 names)
9//!   - `manifest.json`: path → SHA-256 mapping for restore
10
11use serde::{Deserialize, Serialize};
12use sha2::{Digest, Sha256};
13use std::fs;
14use std::io::Read;
15use std::path::{Path, PathBuf};
16
17fn require_pro() -> Result<(), String> {
18    Ok(())
19}
20
21/// Commands that trigger automatic checkpointing.
22const AUTO_TRIGGER_PATTERNS: &[&str] = &[
23    "rm -rf",
24    "rm -f",
25    "rm -fr",
26    "git reset --hard",
27    "git checkout .",
28    "git clean -fd",
29    "git clean -f",
30];
31
32/// Check if a command should trigger auto-checkpointing.
33pub fn should_auto_checkpoint(command: &str) -> bool {
34    let lower = command.to_lowercase();
35    AUTO_TRIGGER_PATTERNS
36        .iter()
37        .any(|p| lower.contains(p))
38        // Also catch `mv` — intentionally triggers on ALL mv commands, not just
39        // overwrites, because statically determining whether the destination exists
40        // is not possible. False positives are acceptable here: checkpoints are cheap
41        // and it's better to have an unnecessary snapshot than to miss a destructive move.
42        || (lower.starts_with("mv ") || lower.contains(" mv "))
43}
44
45/// Checkpoint metadata stored alongside backed up files.
46#[derive(Debug, Clone, Serialize, Deserialize)]
47pub struct CheckpointMeta {
48    pub id: String,
49    pub created_at: String,
50    pub trigger_command: Option<String>,
51    pub paths: Vec<String>,
52    pub total_bytes: u64,
53    pub file_count: usize,
54}
55
56/// File manifest entry: original path → SHA-256 of content.
57#[derive(Debug, Clone, Serialize, Deserialize)]
58pub struct ManifestEntry {
59    pub original_path: String,
60    pub sha256: String,
61    pub size: u64,
62    pub is_dir: bool,
63}
64
65/// Result of listing checkpoints.
66#[derive(Debug, Clone, Serialize, Deserialize)]
67pub struct CheckpointListEntry {
68    pub id: String,
69    pub created_at: String,
70    pub trigger_command: Option<String>,
71    pub file_count: usize,
72    pub total_bytes: u64,
73}
74
75/// Checkpoint configuration.
76#[derive(Debug, Clone, Serialize, Deserialize)]
77pub struct CheckpointConfig {
78    #[serde(default = "default_max_count")]
79    pub max_count: usize,
80    #[serde(default = "default_max_age_days")]
81    pub max_age_days: u32,
82    #[serde(default = "default_max_total_bytes")]
83    pub max_total_bytes: u64,
84}
85
86fn default_max_count() -> usize {
87    50
88}
89fn default_max_age_days() -> u32 {
90    30
91}
92fn default_max_total_bytes() -> u64 {
93    500 * 1024 * 1024 // 500 MiB
94}
95
96impl Default for CheckpointConfig {
97    fn default() -> Self {
98        Self {
99            max_count: default_max_count(),
100            max_age_days: default_max_age_days(),
101            max_total_bytes: default_max_total_bytes(),
102        }
103    }
104}
105
106/// Get the checkpoints directory.
107pub fn checkpoints_dir() -> PathBuf {
108    match crate::policy::state_dir() {
109        Some(d) => d.join("checkpoints"),
110        None => {
111            eprintln!("tirith: WARNING: state dir unavailable, using /tmp/tirith (world-readable)");
112            PathBuf::from("/tmp/tirith").join("checkpoints")
113        }
114    }
115}
116
117/// Create a checkpoint of the given paths.
118pub fn create(paths: &[&str], trigger_command: Option<&str>) -> Result<CheckpointMeta, String> {
119    require_pro()?;
120    let base_dir = checkpoints_dir();
121    let id = uuid::Uuid::new_v4().to_string();
122    let cp_dir = base_dir.join(&id);
123    let files_dir = cp_dir.join("files");
124
125    fs::create_dir_all(&files_dir).map_err(|e| format!("create checkpoint dir: {e}"))?;
126
127    let mut manifest: Vec<ManifestEntry> = Vec::new();
128    let mut total_bytes: u64 = 0;
129
130    for path_str in paths {
131        let path = Path::new(path_str);
132        if !path.exists() {
133            continue;
134        }
135
136        if path.is_file() {
137            match backup_file(path, &files_dir) {
138                Ok(entry) => {
139                    total_bytes += entry.size;
140                    manifest.push(entry);
141                }
142                Err(e) => {
143                    eprintln!("tirith: checkpoint: skip {path_str}: {e}");
144                }
145            }
146        } else if path.is_dir() {
147            match backup_dir(path, &files_dir) {
148                Ok(entries) => {
149                    for entry in entries {
150                        total_bytes += entry.size;
151                        manifest.push(entry);
152                    }
153                }
154                Err(e) => {
155                    eprintln!("tirith: checkpoint: skip dir {path_str}: {e}");
156                }
157            }
158        }
159    }
160
161    if manifest.is_empty() {
162        // Clean up empty checkpoint dir
163        let _ = fs::remove_dir_all(&cp_dir);
164        return Err("no files to checkpoint".to_string());
165    }
166
167    let now = chrono::Utc::now().to_rfc3339();
168    let meta = CheckpointMeta {
169        id: id.clone(),
170        created_at: now,
171        trigger_command: trigger_command.map(|s| s.to_string()),
172        paths: paths.iter().map(|s| s.to_string()).collect(),
173        total_bytes,
174        file_count: manifest.len(),
175    };
176
177    // Write metadata
178    let meta_json = serde_json::to_string_pretty(&meta).map_err(|e| format!("serialize: {e}"))?;
179    fs::write(cp_dir.join("meta.json"), meta_json).map_err(|e| format!("write meta: {e}"))?;
180
181    // Write manifest
182    let manifest_json =
183        serde_json::to_string_pretty(&manifest).map_err(|e| format!("serialize: {e}"))?;
184    fs::write(cp_dir.join("manifest.json"), manifest_json)
185        .map_err(|e| format!("write manifest: {e}"))?;
186
187    Ok(meta)
188}
189
190/// List all checkpoints, newest first.
191pub fn list() -> Result<Vec<CheckpointListEntry>, String> {
192    let base_dir = checkpoints_dir();
193    if !base_dir.exists() {
194        return Ok(Vec::new());
195    }
196
197    let mut entries = Vec::new();
198
199    for entry in fs::read_dir(&base_dir).map_err(|e| format!("read dir: {e}"))? {
200        let entry = match entry {
201            Ok(e) => e,
202            Err(e) => {
203                eprintln!("tirith: checkpoint list: cannot read entry: {e}");
204                continue;
205            }
206        };
207        let meta_path = entry.path().join("meta.json");
208        if !meta_path.exists() {
209            continue;
210        }
211        let meta_str = match fs::read_to_string(&meta_path) {
212            Ok(s) => s,
213            Err(e) => {
214                eprintln!(
215                    "tirith: checkpoint list: cannot read {}: {e}",
216                    meta_path.display()
217                );
218                continue;
219            }
220        };
221        let meta: CheckpointMeta = match serde_json::from_str(&meta_str) {
222            Ok(m) => m,
223            Err(e) => {
224                eprintln!(
225                    "tirith: checkpoint list: corrupt {}: {e}",
226                    meta_path.display()
227                );
228                continue;
229            }
230        };
231        entries.push(CheckpointListEntry {
232            id: meta.id,
233            created_at: meta.created_at,
234            trigger_command: meta.trigger_command,
235            file_count: meta.file_count,
236            total_bytes: meta.total_bytes,
237        });
238    }
239
240    // Sort newest first
241    entries.sort_by(|a, b| b.created_at.cmp(&a.created_at));
242    Ok(entries)
243}
244
245/// Validate that a restore path does not contain path traversal components
246/// or absolute paths.
247fn validate_restore_path(path: &str) -> Result<(), String> {
248    let p = Path::new(path);
249    // Path::is_absolute() is platform-specific (Windows requires drive letter),
250    // so also reject Unix-style absolute paths explicitly on all platforms.
251    if p.is_absolute() || path.starts_with('/') {
252        return Err(format!("restore path is absolute: {path}"));
253    }
254    for component in p.components() {
255        if matches!(component, std::path::Component::ParentDir) {
256            return Err(format!("restore path contains '..': {path}"));
257        }
258    }
259    Ok(())
260}
261
262/// Validate that a SHA-256 filename is exactly 64 lowercase hex characters.
263fn validate_sha256_filename(sha: &str) -> Result<(), String> {
264    if sha.len() != 64
265        || !sha
266            .chars()
267            .all(|c| c.is_ascii_hexdigit() && !c.is_ascii_uppercase())
268    {
269        return Err(format!("invalid sha256 in manifest: {sha}"));
270    }
271    Ok(())
272}
273
274/// Restore files from a checkpoint.
275pub fn restore(checkpoint_id: &str) -> Result<Vec<String>, String> {
276    require_pro()?;
277    let cp_dir = checkpoints_dir().join(checkpoint_id);
278    if !cp_dir.exists() {
279        return Err(format!("checkpoint not found: {checkpoint_id}"));
280    }
281
282    let manifest_str = fs::read_to_string(cp_dir.join("manifest.json"))
283        .map_err(|e| format!("read manifest: {e}"))?;
284    let manifest: Vec<ManifestEntry> =
285        serde_json::from_str(&manifest_str).map_err(|e| format!("parse manifest: {e}"))?;
286
287    let files_dir = cp_dir.join("files");
288    let mut restored = Vec::new();
289
290    for entry in &manifest {
291        if entry.is_dir {
292            continue; // Directories are created implicitly
293        }
294
295        // CR-1: Validate original_path against path traversal
296        validate_restore_path(&entry.original_path)?;
297
298        // CR-2: Validate sha256 field is a proper hex filename
299        validate_sha256_filename(&entry.sha256)?;
300
301        let src = files_dir.join(&entry.sha256);
302        if !src.exists() {
303            eprintln!(
304                "tirith: checkpoint restore: missing data for {}",
305                entry.original_path
306            );
307            continue;
308        }
309
310        let dst = Path::new(&entry.original_path);
311        // SF-3: Propagate create_dir_all failure with clear message
312        if let Some(parent) = dst.parent() {
313            fs::create_dir_all(parent).map_err(|e| {
314                format!(
315                    "restore {}: cannot create parent dir: {e}",
316                    entry.original_path
317                )
318            })?;
319        }
320
321        fs::copy(&src, dst).map_err(|e| format!("restore {}: {e}", entry.original_path))?;
322        restored.push(entry.original_path.clone());
323    }
324
325    Ok(restored)
326}
327
328/// Get diff between checkpoint and current filesystem state.
329pub fn diff(checkpoint_id: &str) -> Result<Vec<DiffEntry>, String> {
330    require_pro()?;
331    let cp_dir = checkpoints_dir().join(checkpoint_id);
332    if !cp_dir.exists() {
333        return Err(format!("checkpoint not found: {checkpoint_id}"));
334    }
335
336    let manifest_str = fs::read_to_string(cp_dir.join("manifest.json"))
337        .map_err(|e| format!("read manifest: {e}"))?;
338    let manifest: Vec<ManifestEntry> =
339        serde_json::from_str(&manifest_str).map_err(|e| format!("parse manifest: {e}"))?;
340
341    let files_dir = cp_dir.join("files");
342    let mut diffs = Vec::new();
343    // CR-9: Track paths already classified to avoid duplicates
344    let mut classified_paths: std::collections::HashSet<String> = std::collections::HashSet::new();
345
346    for entry in &manifest {
347        if entry.is_dir {
348            continue;
349        }
350
351        // Check backup integrity first (merged with main loop to avoid CR-9 duplicates)
352        let backup = files_dir.join(&entry.sha256);
353        if !backup.exists() {
354            diffs.push(DiffEntry {
355                path: entry.original_path.clone(),
356                status: DiffStatus::BackupCorrupt,
357                checkpoint_sha256: entry.sha256.clone(),
358                current_sha256: None,
359            });
360            classified_paths.insert(entry.original_path.clone());
361            continue;
362        }
363
364        let current_path = Path::new(&entry.original_path);
365        if !current_path.exists() {
366            diffs.push(DiffEntry {
367                path: entry.original_path.clone(),
368                status: DiffStatus::Deleted,
369                checkpoint_sha256: entry.sha256.clone(),
370                current_sha256: None,
371            });
372            classified_paths.insert(entry.original_path.clone());
373            continue;
374        }
375
376        // SF-6: Handle sha256_file failure explicitly instead of unwrap_or_default
377        match sha256_file(current_path) {
378            Ok(current_sha) => {
379                if current_sha != entry.sha256 {
380                    diffs.push(DiffEntry {
381                        path: entry.original_path.clone(),
382                        status: DiffStatus::Modified,
383                        checkpoint_sha256: entry.sha256.clone(),
384                        current_sha256: Some(current_sha),
385                    });
386                    classified_paths.insert(entry.original_path.clone());
387                }
388            }
389            Err(e) => {
390                eprintln!(
391                    "tirith: checkpoint diff: cannot read {}: {e}",
392                    entry.original_path
393                );
394                diffs.push(DiffEntry {
395                    path: entry.original_path.clone(),
396                    status: DiffStatus::Modified,
397                    checkpoint_sha256: entry.sha256.clone(),
398                    current_sha256: None,
399                });
400                classified_paths.insert(entry.original_path.clone());
401            }
402        }
403    }
404
405    // classified_paths used to ensure no duplicates (CR-9 fix applied above by merging loops)
406    let _ = &classified_paths;
407
408    Ok(diffs)
409}
410
411/// Purge old checkpoints based on configuration limits.
412pub fn purge(config: &CheckpointConfig) -> Result<PurgeResult, String> {
413    require_pro()?;
414    let base_dir = checkpoints_dir();
415    if !base_dir.exists() {
416        return Ok(PurgeResult {
417            removed_count: 0,
418            freed_bytes: 0,
419        });
420    }
421
422    let mut all = list()?;
423    let mut removed_count = 0;
424    let mut freed_bytes: u64 = 0;
425
426    // Remove by age
427    let now = chrono::Utc::now();
428    let max_age = chrono::Duration::days(config.max_age_days as i64);
429    all.retain(|e| {
430        if let Ok(created) = chrono::DateTime::parse_from_rfc3339(&e.created_at) {
431            let age = now.signed_duration_since(created);
432            if age > max_age {
433                let cp_dir = base_dir.join(&e.id);
434                match fs::remove_dir_all(&cp_dir) {
435                    Ok(()) => {
436                        freed_bytes += e.total_bytes;
437                        removed_count += 1;
438                        return false; // Successfully removed, drop from list
439                    }
440                    Err(err) => {
441                        eprintln!("tirith: checkpoint purge: failed to remove {}: {err}", e.id);
442                        return true; // Failed to remove, keep in list
443                    }
444                }
445            }
446        }
447        true
448    });
449
450    // Remove by count (keep newest)
451    while all.len() > config.max_count {
452        if let Some(oldest) = all.pop() {
453            let cp_dir = base_dir.join(&oldest.id);
454            match fs::remove_dir_all(&cp_dir) {
455                Ok(()) => {
456                    freed_bytes += oldest.total_bytes;
457                    removed_count += 1;
458                }
459                Err(e) => {
460                    eprintln!(
461                        "tirith: checkpoint purge: failed to remove {}: {e}",
462                        oldest.id
463                    );
464                    // Failed to remove — stop trying to shrink by count
465                    // to avoid infinite loop with a stuck entry.
466                    break;
467                }
468            }
469        }
470    }
471
472    // Remove by total size (keep newest)
473    let mut total: u64 = all.iter().map(|e| e.total_bytes).sum();
474    while config.max_total_bytes > 0 && total > config.max_total_bytes && !all.is_empty() {
475        if let Some(oldest) = all.pop() {
476            let cp_dir = base_dir.join(&oldest.id);
477            match fs::remove_dir_all(&cp_dir) {
478                Ok(()) => {
479                    total -= oldest.total_bytes;
480                    freed_bytes += oldest.total_bytes;
481                    removed_count += 1;
482                }
483                Err(e) => {
484                    eprintln!(
485                        "tirith: checkpoint purge: failed to remove {}: {e}",
486                        oldest.id
487                    );
488                    // Failed to remove — stop trying to shrink by size
489                    // to avoid infinite loop with a stuck entry.
490                    break;
491                }
492            }
493        }
494    }
495
496    Ok(PurgeResult {
497        removed_count,
498        freed_bytes,
499    })
500}
501
502/// Diff status for a file between checkpoint and current state.
503#[derive(Debug, Clone, Serialize, Deserialize)]
504pub struct DiffEntry {
505    pub path: String,
506    pub status: DiffStatus,
507    pub checkpoint_sha256: String,
508    pub current_sha256: Option<String>,
509}
510
511#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
512pub enum DiffStatus {
513    Deleted,
514    Modified,
515    BackupCorrupt,
516}
517
518/// Result of a purge operation.
519#[derive(Debug, Clone, Serialize, Deserialize)]
520pub struct PurgeResult {
521    pub removed_count: usize,
522    pub freed_bytes: u64,
523}
524
525/// Create a checkpoint and then purge old ones with default limits.
526/// Convenience wrapper used in tests; CLI calls `create()` then `purge()` directly
527/// for distinct error messages.
528pub fn create_and_purge(paths: &[&str], trigger_command: Option<&str>) -> Result<(), String> {
529    create(paths, trigger_command)?;
530    let config = CheckpointConfig::default();
531    purge(&config)?;
532    Ok(())
533}
534
535// ---------------------------------------------------------------------------
536// Internal helpers
537// ---------------------------------------------------------------------------
538
539/// Backup a single file to the checkpoint files directory.
540fn backup_file(path: &Path, files_dir: &Path) -> Result<ManifestEntry, String> {
541    let sha = sha256_file(path)?;
542    let dst = files_dir.join(&sha);
543
544    // Only copy if not already stored (dedup by content hash)
545    if !dst.exists() {
546        fs::copy(path, &dst).map_err(|e| format!("copy: {e}"))?;
547    }
548
549    let size = match path.metadata() {
550        Ok(m) => m.len(),
551        Err(e) => {
552            eprintln!(
553                "tirith: checkpoint: cannot read metadata for {}: {e}",
554                path.display()
555            );
556            0
557        }
558    };
559
560    Ok(ManifestEntry {
561        original_path: path.to_string_lossy().to_string(),
562        sha256: sha,
563        size,
564        is_dir: false,
565    })
566}
567
568/// Backup a directory recursively.
569///
570/// NOTE: Empty directories are not recorded in the manifest. Only files are backed up.
571/// This means `restore()` will not recreate empty directories that existed at checkpoint
572/// time. Parent directories of restored files are created implicitly. Tracking empty
573/// directories would require manifest format changes and corresponding restore logic.
574fn backup_dir(dir: &Path, files_dir: &Path) -> Result<Vec<ManifestEntry>, String> {
575    let mut entries = Vec::new();
576    const MAX_FILES: usize = 10_000;
577    const MAX_SINGLE_FILE: u64 = 100 * 1024 * 1024; // 100 MiB per file
578
579    backup_dir_recursive(dir, files_dir, &mut entries, MAX_FILES, MAX_SINGLE_FILE)?;
580    Ok(entries)
581}
582
583fn backup_dir_recursive(
584    dir: &Path,
585    files_dir: &Path,
586    entries: &mut Vec<ManifestEntry>,
587    max_files: usize,
588    max_single_file: u64,
589) -> Result<(), String> {
590    if entries.len() >= max_files {
591        return Ok(());
592    }
593
594    let read_dir = fs::read_dir(dir).map_err(|e| format!("read dir {}: {e}", dir.display()))?;
595
596    for entry in read_dir {
597        if entries.len() >= max_files {
598            break;
599        }
600        let entry = match entry {
601            Ok(e) => e,
602            Err(e) => {
603                eprintln!(
604                    "tirith: checkpoint: skip unreadable entry in {}: {e}",
605                    dir.display()
606                );
607                continue;
608            }
609        };
610        let path = entry.path();
611
612        // Use symlink_metadata to avoid TOCTOU race between is_symlink() and later reads
613        let meta = match path.symlink_metadata() {
614            Ok(m) => m,
615            Err(e) => {
616                eprintln!("tirith: checkpoint: skip {}: {e}", path.display());
617                continue;
618            }
619        };
620
621        if meta.file_type().is_symlink() {
622            continue; // Skip symlinks for safety
623        }
624
625        if meta.file_type().is_file() {
626            let size = meta.len();
627            if size > max_single_file {
628                eprintln!(
629                    "tirith: checkpoint: skip large file {} ({} bytes)",
630                    path.display(),
631                    size
632                );
633                continue;
634            }
635            match backup_file(&path, files_dir) {
636                Ok(e) => entries.push(e),
637                Err(e) => {
638                    eprintln!("tirith: checkpoint: skip {}: {e}", path.display());
639                }
640            }
641        } else if path.is_dir() {
642            // Skip hidden directories (like .git)
643            if path
644                .file_name()
645                .and_then(|n| n.to_str())
646                .map(|n| n.starts_with('.'))
647                .unwrap_or(false)
648            {
649                continue;
650            }
651            backup_dir_recursive(&path, files_dir, entries, max_files, max_single_file)?;
652        }
653    }
654
655    Ok(())
656}
657
658/// Compute SHA-256 of a file.
659fn sha256_file(path: &Path) -> Result<String, String> {
660    let mut file = fs::File::open(path).map_err(|e| format!("open {}: {e}", path.display()))?;
661    let mut hasher = Sha256::new();
662    let mut buf = [0u8; 8192];
663    loop {
664        let n = file.read(&mut buf).map_err(|e| format!("read: {e}"))?;
665        if n == 0 {
666            break;
667        }
668        hasher.update(&buf[..n]);
669    }
670    Ok(format!("{:x}", hasher.finalize()))
671}
672
673#[cfg(test)]
674mod tests {
675    use super::*;
676
677    #[test]
678    fn test_should_auto_checkpoint() {
679        assert!(should_auto_checkpoint("rm -rf /tmp/myproject"));
680        assert!(should_auto_checkpoint("rm -f important.txt"));
681        assert!(should_auto_checkpoint("git reset --hard HEAD~3"));
682        assert!(should_auto_checkpoint("git checkout ."));
683        assert!(should_auto_checkpoint("git clean -fd"));
684        assert!(should_auto_checkpoint("sudo rm -rf /"));
685        assert!(!should_auto_checkpoint("ls -la"));
686        assert!(!should_auto_checkpoint("echo hello"));
687        assert!(!should_auto_checkpoint("git status"));
688    }
689
690    #[test]
691    fn test_checkpoint_config_defaults() {
692        let config = CheckpointConfig::default();
693        assert_eq!(config.max_count, 50);
694        assert_eq!(config.max_age_days, 30);
695        assert_eq!(config.max_total_bytes, 500 * 1024 * 1024);
696    }
697
698    #[test]
699    fn test_backup_and_sha256() {
700        let tmp = tempfile::tempdir().unwrap();
701        let test_file = tmp.path().join("test.txt");
702        fs::write(&test_file, "hello world").unwrap();
703
704        let files_dir = tmp.path().join("files");
705        fs::create_dir_all(&files_dir).unwrap();
706
707        let entry = backup_file(&test_file, &files_dir).unwrap();
708        assert!(!entry.sha256.is_empty());
709        assert_eq!(entry.size, 11); // "hello world" = 11 bytes
710        assert!(!entry.is_dir);
711
712        // Verify the backed up file exists
713        let backup_path = files_dir.join(&entry.sha256);
714        assert!(backup_path.exists());
715        let content = fs::read_to_string(&backup_path).unwrap();
716        assert_eq!(content, "hello world");
717    }
718
719    #[test]
720    fn test_backup_dir_recursive() {
721        let tmp = tempfile::tempdir().unwrap();
722        let dir = tmp.path().join("project");
723        fs::create_dir_all(dir.join("src")).unwrap();
724        fs::write(dir.join("README.md"), "# Hello").unwrap();
725        fs::write(dir.join("src/main.rs"), "fn main() {}").unwrap();
726
727        let files_dir = tmp.path().join("files");
728        fs::create_dir_all(&files_dir).unwrap();
729
730        let entries = backup_dir(&dir, &files_dir).unwrap();
731        assert_eq!(entries.len(), 2, "should backup 2 files: {entries:?}");
732    }
733
734    #[test]
735    fn test_backup_nonexistent_file() {
736        let tmp = tempfile::tempdir().unwrap();
737        let files_dir = tmp.path().join("files");
738        fs::create_dir_all(&files_dir).unwrap();
739
740        let result = backup_file(Path::new("/nonexistent/file.txt"), &files_dir);
741        assert!(result.is_err());
742    }
743
744    #[test]
745    fn test_validate_restore_path_rejects_traversal() {
746        assert!(validate_restore_path("../../etc/passwd").is_err());
747        assert!(validate_restore_path("/tmp/../etc/evil").is_err());
748        assert!(validate_restore_path("normal/path/file.txt").is_ok());
749        // Unix-style absolute paths must be rejected on all platforms
750        assert!(
751            validate_restore_path("/absolute/path/file.txt").is_err(),
752            "absolute paths should be rejected"
753        );
754        assert!(
755            validate_restore_path("/etc/passwd").is_err(),
756            "absolute paths should be rejected"
757        );
758    }
759
760    #[test]
761    fn test_validate_sha256_filename() {
762        let valid = "a".repeat(64);
763        assert!(validate_sha256_filename(&valid).is_ok());
764        assert!(validate_sha256_filename("short").is_err());
765        assert!(validate_sha256_filename("../../etc/passwd").is_err());
766        assert!(validate_sha256_filename(&"g".repeat(64)).is_err()); // non-hex
767    }
768
769    #[test]
770    fn test_diff_status_serde() {
771        let entry = DiffEntry {
772            path: "/tmp/test.txt".to_string(),
773            status: DiffStatus::Deleted,
774            checkpoint_sha256: "abc123".to_string(),
775            current_sha256: None,
776        };
777        let json = serde_json::to_string(&entry).unwrap();
778        let parsed: DiffEntry = serde_json::from_str(&json).unwrap();
779        assert_eq!(parsed.status, DiffStatus::Deleted);
780    }
781
782    #[test]
783    fn test_create_and_purge_removes_expired() {
784        // Verify create_and_purge() creates a new checkpoint AND purges
785        // age-expired ones in a single call.
786        let _guard = crate::TEST_ENV_LOCK
787            .lock()
788            .unwrap_or_else(|e| e.into_inner());
789
790        let tmpdir = tempfile::tempdir().unwrap();
791        let workdir = tmpdir.path().join("project");
792        fs::create_dir_all(&workdir).unwrap();
793        fs::write(workdir.join("file.txt"), "content").unwrap();
794
795        let state_dir = tmpdir.path().join("state");
796
797        let prev = std::env::var("XDG_STATE_HOME").ok();
798        // SAFETY: serialized by crate::TEST_ENV_LOCK across all modules.
799        unsafe { std::env::set_var("XDG_STATE_HOME", &state_dir) };
800
801        // Seed an ancient checkpoint (60 days old, exceeds 30-day default)
802        let cp_base = state_dir.join("tirith/checkpoints");
803        let old_cp = cp_base.join("old-expired");
804        let old_files = old_cp.join("files");
805        fs::create_dir_all(&old_files).unwrap();
806
807        let old_time = chrono::Utc::now() - chrono::Duration::days(60);
808        let meta_json = serde_json::json!({
809            "id": "old-expired",
810            "created_at": old_time.to_rfc3339(),
811            "trigger_command": "rm -rf old",
812            "paths": ["/tmp/old"],
813            "total_bytes": 8,
814            "file_count": 1
815        });
816        fs::write(old_cp.join("meta.json"), meta_json.to_string()).unwrap();
817        fs::write(old_files.join("dummy"), "old data").unwrap();
818        let manifest = serde_json::json!([{
819            "original_path": "old.txt",
820            "sha256": "dummy",
821            "size": 8,
822            "is_dir": false
823        }]);
824        fs::write(old_cp.join("manifest.json"), manifest.to_string()).unwrap();
825        assert!(old_cp.exists());
826
827        // Act
828        let work_str = workdir.to_str().unwrap();
829        let result = create_and_purge(&[work_str], Some("rm -rf tempstuff"));
830
831        // Restore env before assertions (so cleanup runs even on failure)
832        match prev {
833            Some(val) => unsafe { std::env::set_var("XDG_STATE_HOME", val) },
834            None => unsafe { std::env::remove_var("XDG_STATE_HOME") },
835        }
836
837        assert!(result.is_ok(), "create_and_purge failed: {result:?}");
838        assert!(
839            !old_cp.exists(),
840            "expired checkpoint should have been purged"
841        );
842        let remaining: Vec<_> = fs::read_dir(&cp_base)
843            .unwrap()
844            .filter_map(|e| e.ok())
845            .collect();
846        assert_eq!(
847            remaining.len(),
848            1,
849            "exactly one new checkpoint should remain"
850        );
851    }
852}