Skip to main content

sr_ai/cache/
mod.rs

1pub mod fingerprint;
2pub mod store;
3
4use crate::commands::commit::CommitPlan;
5use std::collections::BTreeMap;
6use std::path::{Path, PathBuf};
7
8use fingerprint::{compute_fingerprints, sha256_hex};
9use store::{CacheEntry, cache_dir, list_entries, read_entry, write_entry};
10
11/// Result of a cache lookup.
12pub enum CacheLookup {
13    /// Exact fingerprint match — use cached plan directly.
14    ExactHit(CommitPlan),
15    /// Partial match — provides previous plan + delta summary as hints.
16    IncrementalHit {
17        previous_plan: CommitPlan,
18        delta_summary: String,
19    },
20    /// No useful cached data.
21    Miss,
22}
23
24pub struct CacheManager {
25    repo_root: PathBuf,
26    dir: PathBuf,
27    fingerprints: BTreeMap<String, String>,
28    state_key: String,
29}
30
31impl CacheManager {
32    /// Build a new CacheManager, computing fingerprints and state key.
33    /// Returns None if cache dir can't be resolved (graceful degradation).
34    pub fn new(
35        repo_root: &Path,
36        staged_only: bool,
37        user_message: Option<&str>,
38        backend: &str,
39        model: &str,
40    ) -> Option<Self> {
41        let dir = cache_dir(repo_root)?;
42        let fingerprints = compute_fingerprints(repo_root, staged_only);
43
44        let state_key = compute_state_key(&fingerprints, staged_only, user_message, backend, model);
45
46        Some(Self {
47            repo_root: repo_root.to_path_buf(),
48            dir,
49            fingerprints,
50            state_key,
51        })
52    }
53
54    /// Look up the cache. Returns ExactHit, IncrementalHit, or Miss.
55    pub fn lookup(&self) -> CacheLookup {
56        // Tier 1: exact match
57        let exact_path = store::entry_path(&self.dir, &self.state_key);
58        if let Ok(entry) = read_entry(&exact_path) {
59            return CacheLookup::ExactHit(entry.plan);
60        }
61
62        // Tier 2: find best incremental candidate
63        let entries = match list_entries(&self.dir) {
64            Ok(e) => e,
65            Err(_) => return CacheLookup::Miss,
66        };
67
68        if entries.is_empty() {
69            return CacheLookup::Miss;
70        }
71
72        // Pick the most recent entry as the incremental candidate
73        let candidate = &entries[0];
74        let delta = compute_delta(&candidate.fingerprints, &self.fingerprints);
75
76        // Only use incremental if ≤50% of files changed
77        let total = self.fingerprints.len().max(candidate.fingerprints.len());
78        let changed = delta.changed.len() + delta.added.len() + delta.removed.len();
79
80        if total == 0 || changed * 2 > total {
81            return CacheLookup::Miss;
82        }
83
84        let summary = format_delta_summary(&delta);
85        CacheLookup::IncrementalHit {
86            previous_plan: candidate.plan.clone(),
87            delta_summary: summary,
88        }
89    }
90
91    /// Store a plan in the cache.
92    pub fn store(&self, plan: &CommitPlan, backend: &str, model: &str) {
93        let entry = CacheEntry {
94            state_key: self.state_key.clone(),
95            fingerprints: self.fingerprints.clone(),
96            plan: plan.clone(),
97            created_at: store::now_secs(),
98            backend: backend.to_string(),
99            model: model.to_string(),
100        };
101
102        if let Err(e) = write_entry(&self.dir, &entry) {
103            eprintln!("Warning: failed to write cache: {e}");
104        }
105    }
106
107    /// Clear cache for this repo.
108    pub fn clear(&self) -> anyhow::Result<usize> {
109        store::clear(&self.dir)
110    }
111
112    #[allow(dead_code)]
113    pub fn dir(&self) -> &Path {
114        &self.dir
115    }
116
117    #[allow(dead_code)]
118    pub fn repo_root(&self) -> &Path {
119        &self.repo_root
120    }
121}
122
123/// Compute the full state key from fingerprints + parameters.
124fn compute_state_key(
125    fingerprints: &BTreeMap<String, String>,
126    staged_only: bool,
127    user_message: Option<&str>,
128    backend: &str,
129    model: &str,
130) -> String {
131    let mut data = String::new();
132    for (file, hash) in fingerprints {
133        data.push_str(file);
134        data.push(':');
135        data.push_str(hash);
136        data.push('\n');
137    }
138    data.push_str(&format!("staged:{staged_only}\n"));
139    if let Some(msg) = user_message {
140        data.push_str(&format!("message:{msg}\n"));
141    }
142    data.push_str(&format!("backend:{backend}\n"));
143    data.push_str(&format!("model:{model}\n"));
144
145    sha256_hex(data.as_bytes())
146}
147
148struct FileDelta {
149    unchanged: Vec<String>,
150    changed: Vec<String>,
151    added: Vec<String>,
152    removed: Vec<String>,
153}
154
155fn compute_delta(old: &BTreeMap<String, String>, new: &BTreeMap<String, String>) -> FileDelta {
156    let mut unchanged = Vec::new();
157    let mut changed = Vec::new();
158    let mut added = Vec::new();
159    let mut removed = Vec::new();
160
161    for (file, new_hash) in new {
162        match old.get(file) {
163            Some(old_hash) if old_hash == new_hash => unchanged.push(file.clone()),
164            Some(_) => changed.push(file.clone()),
165            None => added.push(file.clone()),
166        }
167    }
168
169    for file in old.keys() {
170        if !new.contains_key(file) {
171            removed.push(file.clone());
172        }
173    }
174
175    FileDelta {
176        unchanged,
177        changed,
178        added,
179        removed,
180    }
181}
182
183fn format_delta_summary(delta: &FileDelta) -> String {
184    let mut parts = Vec::new();
185
186    if !delta.unchanged.is_empty() {
187        parts.push(format!(
188            "Unchanged files (keep previous groupings): {}",
189            delta.unchanged.join(", ")
190        ));
191    }
192    if !delta.changed.is_empty() {
193        parts.push(format!(
194            "Modified files (re-analyze): {}",
195            delta.changed.join(", ")
196        ));
197    }
198    if !delta.added.is_empty() {
199        parts.push(format!("New files: {}", delta.added.join(", ")));
200    }
201    if !delta.removed.is_empty() {
202        parts.push(format!(
203            "Removed files (drop from plan): {}",
204            delta.removed.join(", ")
205        ));
206    }
207
208    parts.join("\n")
209}