1use std::{
3 collections::{BTreeMap, HashSet},
4 path::{Path, PathBuf},
5};
6
7use fs_err as fs;
8use memchr::memmem;
9use memmap2::Mmap;
10use miette::{Context, IntoDiagnostic};
11use serde::{Deserialize, Serialize};
12use sha2::{Digest, Sha256};
13
14use crate::{
15 env_vars,
16 metadata::{build_reindexed_channels, Output},
17 packaging::{contains_prefix_binary, contains_prefix_text, content_type, Files},
18 recipe::parser::{Dependency, Requirements},
19 render::resolved_dependencies::{
20 install_environments, resolve_dependencies, FinalizedDependencies,
21 },
22 source::copy_dir::{copy_file, create_symlink, CopyOptions},
23};
24
25#[derive(Debug, thiserror::Error)]
27pub enum CacheKeyError {
28 #[error("No cache key available")]
31 NoCacheKeyAvailable,
32 #[error("Error serializing cache: {0}")]
34 Serde(#[from] serde_json::Error),
35}
36
37#[derive(Debug, Clone, Serialize, Deserialize)]
39pub struct Cache {
40 pub requirements: Requirements,
42 pub finalized_dependencies: FinalizedDependencies,
44 pub prefix_files: Vec<(PathBuf, bool)>,
46 pub prefix: PathBuf,
49}
50
51impl Output {
52 pub fn cache_key(&self) -> Result<String, CacheKeyError> {
55 if let Some(cache) = &self.recipe.cache {
58 let requirement_names = cache
60 .requirements
61 .build_time()
62 .filter_map(|x| {
63 if let Dependency::Spec(spec) = x {
64 if spec.version.is_none() && spec.build.is_none() {
65 if let Some(name) = spec.name.as_ref() {
66 return Some(name.as_normalized().to_string());
67 }
68 }
69 }
70 None
71 })
72 .collect::<HashSet<_>>();
73
74 let mut selected_variant = BTreeMap::new();
76 for key in requirement_names.iter() {
77 if let Some(value) = self.variant().get(key) {
78 selected_variant.insert(key.as_ref(), value.clone());
79 }
80 }
81 selected_variant.insert("host_platform", self.host_platform().platform.to_string());
86 selected_variant.insert(
87 "build_platform",
88 self.build_configuration.build_platform.platform.to_string(),
89 );
90
91 let cache_key = (cache, selected_variant);
92 let mut hasher = Sha256::new();
94 let serialized = serde_json::to_string(&cache_key)?;
95 hasher.update(serialized.as_bytes());
96 let result = hasher.finalize();
97 Ok(format!("{:x}", result))
98 } else {
99 Err(CacheKeyError::NoCacheKeyAvailable)
100 }
101 }
102
103 async fn restore_cache(&self, cache_dir: PathBuf) -> Result<Output, miette::Error> {
105 let cache: Cache = serde_json::from_str(
106 &fs::read_to_string(cache_dir.join("cache.json")).into_diagnostic()?,
107 )
108 .into_diagnostic()?;
109 let copy_options = CopyOptions {
110 skip_exist: true,
111 ..Default::default()
112 };
113 let cache_prefix = cache.prefix;
114
115 let mut paths_created = HashSet::new();
116 for (file, has_prefix) in &cache.prefix_files {
117 tracing::info!("Restoring from cache: {:?}", file);
118 let dest = self.prefix().join(file);
119 let source = &cache_dir.join("prefix").join(file);
120 copy_file(source, &dest, &mut paths_created, ©_options).into_diagnostic()?;
121
122 if source.is_symlink() {
125 let symlink_target = fs::read_link(source).into_diagnostic()?;
126 if let Ok(rest) = symlink_target.strip_prefix(&cache_prefix) {
127 let new_symlink_target = self.prefix().join(rest);
128 fs::remove_file(&dest).into_diagnostic()?;
129 create_symlink(&new_symlink_target, &dest).into_diagnostic()?;
130 }
131 }
132
133 if *has_prefix {
134 replace_prefix(&dest, &cache_prefix, self.prefix())?;
135 }
136 }
137
138 Ok(Output {
139 finalized_cache_dependencies: Some(cache.finalized_dependencies.clone()),
140 ..self.clone()
141 })
142 }
143
144 pub(crate) async fn build_or_fetch_cache(
145 &self,
146 tool_configuration: &crate::tool_configuration::Configuration,
147 ) -> Result<Self, miette::Error> {
148 let span = tracing::info_span!("Running cache build");
151 let _enter = span.enter();
152
153 let target_platform = self.build_configuration.target_platform;
154 let mut env_vars = env_vars::vars(self, "BUILD");
155 env_vars.extend(env_vars::os_vars(self.prefix(), &target_platform));
156
157 if let Some(cache) = &self.recipe.cache {
158 tracing::info!("Cache key: {:?}", self.cache_key().into_diagnostic()?);
159 let cache_key = format!("bld_{}", self.cache_key().into_diagnostic()?);
160
161 let cache_dir = self
162 .build_configuration
163 .directories
164 .cache_dir
165 .join(cache_key);
166
167 if cache_dir.exists() {
169 tracing::info!("Restoring cache from {:?}", cache_dir);
170 return self.restore_cache(cache_dir).await;
171 }
172
173 let channels = build_reindexed_channels(&self.build_configuration, tool_configuration)
175 .into_diagnostic()
176 .context("failed to reindex output channel")?;
177
178 let finalized_dependencies =
179 resolve_dependencies(&cache.requirements, self, &channels, tool_configuration)
180 .await
181 .unwrap();
182
183 install_environments(self, &finalized_dependencies, tool_configuration)
184 .await
185 .into_diagnostic()?;
186
187 cache
188 .build
189 .script()
190 .run_script(
191 env_vars,
192 &self.build_configuration.directories.work_dir,
193 &self.build_configuration.directories.recipe_dir,
194 &self.build_configuration.directories.host_prefix,
195 Some(&self.build_configuration.directories.build_prefix),
196 None, )
198 .await
199 .into_diagnostic()?;
200
201 let new_files = Files::from_prefix(
203 self.prefix(),
204 cache.build.always_include_files(),
205 cache.build.files(),
206 )
207 .into_diagnostic()?;
208
209 let prefix_cache_dir = cache_dir.join("prefix");
211 fs::create_dir_all(&prefix_cache_dir).into_diagnostic()?;
212
213 let mut creation_cache = HashSet::new();
214 let mut copied_files = Vec::new();
215 let copy_options = CopyOptions::default();
216 for file in &new_files.new_files {
217 if file.is_dir() && !file.is_symlink() {
220 continue;
221 }
222 let stripped = file
223 .strip_prefix(self.prefix())
224 .expect("File should be in prefix");
225 let dest = &prefix_cache_dir.join(stripped);
226 copy_file(file, dest, &mut creation_cache, ©_options).into_diagnostic()?;
227
228 if !file.is_symlink() {
230 let content_type = content_type(file).into_diagnostic()?;
232 let has_prefix = if content_type.map(|c| c.is_text()).unwrap_or(false) {
233 contains_prefix_text(file, self.prefix(), self.target_platform())
234 } else {
235 contains_prefix_binary(file, self.prefix())
236 }
237 .into_diagnostic()?;
238 copied_files.push((stripped.to_path_buf(), has_prefix));
239 } else {
240 copied_files.push((stripped.to_path_buf(), false));
241 }
242 }
243
244 let cache = Cache {
246 requirements: cache.requirements.clone(),
247 finalized_dependencies: finalized_dependencies.clone(),
248 prefix_files: copied_files,
249 prefix: self.prefix().to_path_buf(),
250 };
251
252 let cache_file = cache_dir.join("cache.json");
253 fs::write(cache_file, serde_json::to_string(&cache).unwrap()).into_diagnostic()?;
254
255 Ok(Output {
256 finalized_cache_dependencies: Some(finalized_dependencies),
257 ..self.clone()
258 })
259 } else {
260 Ok(self.clone())
261 }
262 }
263}
264
265fn replace_prefix(file: &Path, old_prefix: &Path, new_prefix: &Path) -> Result<(), miette::Error> {
269 let output = {
271 let map_file = fs::File::open(file).into_diagnostic()?;
272 let mmap = unsafe { Mmap::map(&map_file).into_diagnostic()? };
273 let new_prefix_bytes = new_prefix.as_os_str().as_encoded_bytes();
274 let old_prefix_bytes = old_prefix.as_os_str().as_encoded_bytes();
275
276 if old_prefix == new_prefix {
278 return Ok(());
279 }
280
281 assert_eq!(
282 new_prefix_bytes.len(),
283 old_prefix_bytes.len(),
284 "Prefixes must have the same length: {:?} != {:?}",
285 new_prefix,
286 old_prefix
287 );
288
289 let mut output = Vec::with_capacity(mmap.len());
290 let mut last_match_end = 0;
291 let finder = memmem::Finder::new(old_prefix_bytes);
292
293 while let Some(index) = finder.find(&mmap[last_match_end..]) {
294 let absolute_index = last_match_end + index;
295 output.extend_from_slice(&mmap[last_match_end..absolute_index]);
296 output.extend_from_slice(new_prefix_bytes);
297 last_match_end = absolute_index + new_prefix_bytes.len();
298 }
299 output.extend_from_slice(&mmap[last_match_end..]);
300 output
301 };
303
304 fs::write(file, output).into_diagnostic()
306}