polykit_core/remote_cache/
filesystem.rs

1//! Filesystem backend for remote cache.
2
3use std::fs;
4use std::path::{Path, PathBuf};
5
6use async_trait::async_trait;
7
8use crate::error::{Error, Result};
9
10use super::artifact::Artifact;
11use super::backend::RemoteCacheBackend;
12use super::cache_key::CacheKey;
13
14/// Filesystem backend for remote cache.
15///
16/// Stores artifacts in a local directory with git worktree support.
17/// Multiple worktrees share the same cache directory based on repository root.
18pub struct FilesystemBackend {
19    cache_dir: PathBuf,
20}
21
22impl FilesystemBackend {
23    /// Creates a new filesystem backend.
24    ///
25    /// # Arguments
26    ///
27    /// * `cache_dir` - Base directory for cache storage
28    ///
29    /// # Errors
30    ///
31    /// Returns an error if the cache directory cannot be created or accessed.
32    pub fn new(cache_dir: impl AsRef<Path>) -> Result<Self> {
33        let cache_dir = cache_dir.as_ref().to_path_buf();
34
35        // Resolve git repository root if possible
36        let repo_root = Self::find_repo_root(&cache_dir)?;
37        let cache_dir = if let Some(repo_root) = repo_root {
38            // Use repository root for stable cache paths across worktrees
39            let repo_hash = Self::hash_path(&repo_root)?;
40            cache_dir.join("remote").join(repo_hash)
41        } else {
42            // Fallback to provided cache directory
43            cache_dir.join("remote")
44        };
45
46        // Create cache directory
47        fs::create_dir_all(&cache_dir).map_err(Error::Io)?;
48
49        Ok(Self { cache_dir })
50    }
51
52    /// Finds the git repository root starting from the given path.
53    fn find_repo_root(start: &Path) -> Result<Option<PathBuf>> {
54        use std::process::Command;
55
56        let output = Command::new("git")
57            .arg("rev-parse")
58            .arg("--git-dir")
59            .current_dir(start)
60            .output();
61
62        match output {
63            Ok(output) if output.status.success() => {
64                let git_dir = String::from_utf8_lossy(&output.stdout).trim().to_string();
65                let git_path = PathBuf::from(&git_dir);
66                if git_path.is_absolute() {
67                    Ok(Some(git_path.parent().unwrap_or(&git_path).to_path_buf()))
68                } else {
69                    Ok(Some(start.join(&git_dir).parent().unwrap_or(start).to_path_buf()))
70                }
71            }
72            _ => Ok(None),
73        }
74    }
75
76    /// Computes a stable hash of a path for use in cache directory names.
77    fn hash_path(path: &Path) -> Result<String> {
78        use sha2::{Digest, Sha256};
79
80        let path_str = path.to_string_lossy();
81        let mut hasher = Sha256::new();
82        hasher.update(path_str.as_bytes());
83        let hash = hasher.finalize();
84        Ok(format!("{:x}", hash)[..16].to_string()) // Use first 16 chars
85    }
86
87    /// Gets the cache path for a given cache key.
88    fn cache_path(&self, key: &CacheKey) -> PathBuf {
89        let key_str = key.as_string();
90        // Use first 2 chars for directory structure to avoid too many files in one dir
91        let dir = &key_str[..2];
92        let file = &key_str[2..];
93        self.cache_dir.join(dir).join(format!("{}.zst", file))
94    }
95
96    /// Ensures the parent directory exists for a cache path.
97    fn ensure_parent_dir(&self, path: &Path) -> Result<()> {
98        if let Some(parent) = path.parent() {
99            fs::create_dir_all(parent).map_err(Error::Io)?;
100        }
101        Ok(())
102    }
103}
104
105#[async_trait]
106impl RemoteCacheBackend for FilesystemBackend {
107    async fn upload_artifact(&self, key: &CacheKey, artifact: &Artifact) -> Result<()> {
108        let cache_path = self.cache_path(key);
109        self.ensure_parent_dir(&cache_path)?;
110
111        // Write atomically using a temp file
112        let temp_path = cache_path.with_extension("tmp");
113        fs::write(&temp_path, artifact.compressed_data()).map_err(Error::Io)?;
114
115        // Atomic rename
116        fs::rename(&temp_path, &cache_path).map_err(|e| {
117            // Clean up temp file on error
118            let _ = fs::remove_file(&temp_path);
119            Error::Io(e)
120        })?;
121
122        Ok(())
123    }
124
125    async fn fetch_artifact(&self, key: &CacheKey) -> Result<Option<Artifact>> {
126        let cache_path = self.cache_path(key);
127
128        if !cache_path.exists() {
129            return Ok(None);
130        }
131
132        // Read compressed data
133        let data = tokio::fs::read(&cache_path)
134            .await
135            .map_err(Error::Io)?;
136
137        // Parse artifact
138        let artifact = Artifact::from_compressed(data)?;
139
140        Ok(Some(artifact))
141    }
142
143    async fn has_artifact(&self, key: &CacheKey) -> Result<bool> {
144        let cache_path = self.cache_path(key);
145        Ok(cache_path.exists())
146    }
147}
148
149#[cfg(test)]
150mod tests {
151    use super::*;
152    use crate::remote_cache::artifact::Artifact;
153    use std::collections::BTreeMap;
154    use std::path::PathBuf;
155    use tempfile::TempDir;
156
157    #[tokio::test]
158    async fn test_filesystem_backend() {
159        let temp_dir = TempDir::new().unwrap();
160        let backend = FilesystemBackend::new(temp_dir.path()).unwrap();
161
162        let mut output_files = BTreeMap::new();
163        output_files.insert(PathBuf::from("file.txt"), b"content".to_vec());
164
165        let artifact = Artifact::new(
166            "test".to_string(),
167            "build".to_string(),
168            "echo".to_string(),
169            "hash123".to_string(),
170            output_files,
171        )
172        .unwrap();
173
174        let key = CacheKey::builder()
175            .package_id("test")
176            .task_name("build")
177            .command("echo")
178            .dependency_graph_hash("abc")
179            .toolchain_version("node-v20")
180            .build()
181            .unwrap();
182
183        // Upload
184        backend.upload_artifact(&key, &artifact).await.unwrap();
185
186        // Check exists
187        assert!(backend.has_artifact(&key).await.unwrap());
188
189        // Fetch
190        let fetched = backend.fetch_artifact(&key).await.unwrap();
191        assert!(fetched.is_some());
192    }
193}