1use std::path::{Path, PathBuf};
14use std::process::Command;
15
16use crate::error::{SandboxError, SandlockError};
17
18fn default_cache_dir() -> PathBuf {
20 let home = std::env::var("HOME").unwrap_or_else(|_| "/tmp".into());
21 PathBuf::from(home).join(".cache/sandlock/images")
22}
23
24fn cache_key(image: &str) -> String {
26 use std::collections::hash_map::DefaultHasher;
27 use std::hash::{Hash, Hasher};
28 let mut h = DefaultHasher::new();
29 image.hash(&mut h);
30 format!("{:016x}", h.finish())
31}
32
33pub fn extract(image: &str, cache_dir: Option<&Path>) -> Result<PathBuf, SandlockError> {
40 let cache = cache_dir
41 .map(PathBuf::from)
42 .unwrap_or_else(default_cache_dir);
43 let key = cache_key(image);
44 let rootfs = cache.join(&key).join("rootfs");
45
46 if rootfs.is_dir() {
48 if let Ok(mut entries) = std::fs::read_dir(&rootfs) {
49 if entries.next().is_some() {
50 return Ok(rootfs);
51 }
52 }
53 }
54
55 let output = Command::new("docker")
57 .args(["create", image, "/bin/true"])
58 .output()
59 .map_err(|e| SandboxError::Child(format!("docker not found: {}", e)))?;
60
61 if !output.status.success() {
62 let stderr = String::from_utf8_lossy(&output.stderr);
63 return Err(SandboxError::Child(
64 format!("docker create failed: {}", stderr.trim()),
65 ).into());
66 }
67
68 let container_id = String::from_utf8_lossy(&output.stdout).trim().to_string();
69
70 let result = extract_container(&container_id, &rootfs);
72
73 let _ = Command::new("docker")
75 .args(["rm", &container_id])
76 .stdout(std::process::Stdio::null())
77 .stderr(std::process::Stdio::null())
78 .status();
79
80 result?;
81 Ok(rootfs)
82}
83
84fn extract_container(container_id: &str, rootfs: &Path) -> Result<(), SandlockError> {
86 std::fs::create_dir_all(rootfs)
87 .map_err(|e| SandboxError::Io(e))?;
88
89 let mut child = Command::new("docker")
91 .args(["export", container_id])
92 .stdout(std::process::Stdio::piped())
93 .stderr(std::process::Stdio::piped())
94 .spawn()
95 .map_err(|e| SandboxError::Child(format!("docker export: {}", e)))?;
96
97 let stdout = child.stdout.take().unwrap();
98
99 let tar_status = Command::new("tar")
101 .args(["xf", "-", "-C"])
102 .arg(rootfs)
103 .stdin(stdout)
104 .stdout(std::process::Stdio::null())
105 .stderr(std::process::Stdio::piped())
106 .status()
107 .map_err(|e| SandboxError::Child(format!("tar extract: {}", e)))?;
108
109 let docker_status = child.wait()
110 .map_err(|e| SandboxError::Child(format!("docker export wait: {}", e)))?;
111
112 if !docker_status.success() {
113 let _ = std::fs::remove_dir_all(rootfs);
115 return Err(SandboxError::Child("docker export failed".into()).into());
116 }
117
118 if !tar_status.success() {
119 let _ = std::fs::remove_dir_all(rootfs);
120 return Err(SandboxError::Child("tar extraction failed".into()).into());
121 }
122
123 Ok(())
124}
125
126pub fn inspect_cmd(image: &str) -> Result<Vec<String>, SandlockError> {
130 let output = Command::new("docker")
131 .args([
132 "inspect", "--format",
133 "{{json .Config.Entrypoint}}|{{json .Config.Cmd}}",
134 image,
135 ])
136 .output()
137 .map_err(|_| SandboxError::Child("docker inspect failed".into()))?;
138
139 if !output.status.success() {
140 return Ok(vec!["/bin/sh".into()]);
141 }
142
143 let raw = String::from_utf8_lossy(&output.stdout).trim().to_string();
144 let parts: Vec<&str> = raw.splitn(2, '|').collect();
145
146 let entrypoint = parts.first().and_then(|s| parse_json_string_array(s));
147 let cmd = parts.get(1).and_then(|s| parse_json_string_array(s));
148
149 match (entrypoint, cmd) {
150 (Some(ep), Some(c)) => Ok([ep, c].concat()),
151 (Some(ep), None) => Ok(ep),
152 (None, Some(c)) => Ok(c),
153 (None, None) => Ok(vec!["/bin/sh".into()]),
154 }
155}
156
157fn parse_json_string_array(s: &str) -> Option<Vec<String>> {
159 let s = s.trim();
160 if s == "null" || s.is_empty() {
161 return None;
162 }
163 if !s.starts_with('[') || !s.ends_with(']') {
164 return None;
165 }
166 let inner = &s[1..s.len() - 1];
167 if inner.trim().is_empty() {
168 return Some(Vec::new());
169 }
170 let mut result = Vec::new();
171 for item in inner.split(',') {
172 let item = item.trim();
173 if item.starts_with('"') && item.ends_with('"') && item.len() >= 2 {
174 result.push(item[1..item.len() - 1].replace("\\\"", "\"").replace("\\\\", "\\"));
175 }
176 }
177 if result.is_empty() { None } else { Some(result) }
178}
179
180#[cfg(test)]
185mod tests {
186 use super::*;
187
188 #[test]
189 fn test_cache_key_deterministic() {
190 let k1 = cache_key("python:3.12-slim");
191 let k2 = cache_key("python:3.12-slim");
192 assert_eq!(k1, k2);
193 }
194
195 #[test]
196 fn test_cache_key_different() {
197 let k1 = cache_key("python:3.12-slim");
198 let k2 = cache_key("alpine:latest");
199 assert_ne!(k1, k2);
200 }
201
202 #[test]
203 fn test_default_cache_dir() {
204 let dir = default_cache_dir();
205 assert!(dir.to_str().unwrap().contains("sandlock/images"));
206 }
207
208 #[test]
209 fn test_parse_json_array() {
210 assert_eq!(
211 parse_json_string_array(r#"["python3","-c","print(1)"]"#),
212 Some(vec!["python3".into(), "-c".into(), "print(1)".into()])
213 );
214 }
215
216 #[test]
217 fn test_parse_json_null() {
218 assert_eq!(parse_json_string_array("null"), None);
219 }
220
221 #[test]
222 fn test_parse_json_empty_array() {
223 assert_eq!(parse_json_string_array("[]"), Some(vec![]));
224 }
225
226 #[test]
227 fn test_parse_json_single() {
228 assert_eq!(
229 parse_json_string_array(r#"["/bin/sh"]"#),
230 Some(vec!["/bin/sh".into()])
231 );
232 }
233}