1use crate::{
4 buildinfo::RawBuildInfo,
5 compilers::{Compiler, CompilerSettings, Language},
6 output::Builds,
7 resolver::GraphEdges,
8 ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project,
9 ProjectPaths, ProjectPathsConfig, SourceCompilationKind,
10};
11use foundry_compilers_artifacts::{
12 sources::{Source, Sources},
13 Settings,
14};
15use foundry_compilers_core::{
16 error::{Result, SolcError},
17 utils::{self, strip_prefix},
18};
19use semver::Version;
20use serde::{de::DeserializeOwned, Deserialize, Serialize};
21use std::{
22 collections::{btree_map::BTreeMap, hash_map, BTreeSet, HashMap, HashSet},
23 fs,
24 path::{Path, PathBuf},
25 time::{Duration, UNIX_EPOCH},
26};
27
28const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-4";
34
35pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
37
38#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
40pub struct CompilerCache<S = Settings> {
41 #[serde(rename = "_format")]
42 pub format: String,
43 pub paths: ProjectPaths,
45 pub files: BTreeMap<PathBuf, CacheEntry>,
46 pub builds: BTreeSet<String>,
47 pub profiles: BTreeMap<String, S>,
48}
49
50impl<S> CompilerCache<S> {
51 pub fn new(format: String, paths: ProjectPaths) -> Self {
52 Self {
53 format,
54 paths,
55 files: Default::default(),
56 builds: Default::default(),
57 profiles: Default::default(),
58 }
59 }
60}
61
62impl<S: CompilerSettings> CompilerCache<S> {
63 pub fn is_empty(&self) -> bool {
64 self.files.is_empty()
65 }
66
67 pub fn remove(&mut self, file: &Path) -> Option<CacheEntry> {
69 self.files.remove(file)
70 }
71
72 pub fn len(&self) -> usize {
74 self.files.len()
75 }
76
77 pub fn artifacts_len(&self) -> usize {
79 self.entries().map(|entry| entry.artifacts().count()).sum()
80 }
81
82 pub fn entries(&self) -> impl Iterator<Item = &CacheEntry> {
84 self.files.values()
85 }
86
87 pub fn entry(&self, file: &Path) -> Option<&CacheEntry> {
89 self.files.get(file)
90 }
91
92 pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> {
94 self.files.get_mut(file)
95 }
96
97 #[instrument(skip_all, name = "sol-files-cache::read")]
115 pub fn read(path: &Path) -> Result<Self> {
116 trace!("reading solfiles cache at {}", path.display());
117 let cache: Self = utils::read_json_file(path)?;
118 trace!("read cache \"{}\" with {} entries", cache.format, cache.files.len());
119 Ok(cache)
120 }
121
122 pub fn read_joined<L>(paths: &ProjectPathsConfig<L>) -> Result<Self> {
139 let mut cache = Self::read(&paths.cache)?;
140 cache.join_entries(&paths.root).join_artifacts_files(&paths.artifacts);
141 Ok(cache)
142 }
143
144 pub fn write(&self, path: &Path) -> Result<()> {
146 trace!("writing cache with {} entries to json file: \"{}\"", self.len(), path.display());
147 utils::create_parent_dir_all(path)?;
148 utils::write_json_file(self, path, 128 * 1024)?;
149 trace!("cache file located: \"{}\"", path.display());
150 Ok(())
151 }
152
153 pub fn remove_outdated_builds(&mut self) {
155 let mut outdated = Vec::new();
156 for build_id in &self.builds {
157 if !self
158 .entries()
159 .flat_map(|e| e.artifacts.values())
160 .flat_map(|a| a.values())
161 .flat_map(|a| a.values())
162 .any(|a| a.build_id == *build_id)
163 {
164 outdated.push(build_id.to_owned());
165 }
166 }
167
168 for build_id in outdated {
169 self.builds.remove(&build_id);
170 let path = self.paths.build_infos.join(build_id).with_extension("json");
171 let _ = std::fs::remove_file(path);
172 }
173 }
174
175 pub fn join_entries(&mut self, root: &Path) -> &mut Self {
177 self.files = std::mem::take(&mut self.files)
178 .into_iter()
179 .map(|(path, entry)| (root.join(path), entry))
180 .collect();
181 self
182 }
183
184 pub fn strip_entries_prefix(&mut self, base: &Path) -> &mut Self {
186 self.files = std::mem::take(&mut self.files)
187 .into_iter()
188 .map(|(path, entry)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), entry))
189 .collect();
190 self
191 }
192
193 pub fn join_artifacts_files(&mut self, base: &Path) -> &mut Self {
195 self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base));
196 self
197 }
198
199 pub fn strip_artifact_files_prefixes(&mut self, base: &Path) -> &mut Self {
201 self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base));
202 self
203 }
204
205 pub fn remove_missing_files(&mut self) {
209 trace!("remove non existing files from cache");
210 self.files.retain(|file, _| {
211 let exists = file.exists();
212 if !exists {
213 trace!("remove {} from cache", file.display());
214 }
215 exists
216 })
217 }
218
219 pub fn all_artifacts_exist(&self) -> bool {
221 self.files.values().all(|entry| entry.all_artifacts_exist())
222 }
223
224 pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self {
246 self.files = self
247 .files
248 .into_iter()
249 .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e))
250 .collect();
251 self
252 }
253
254 pub fn find_artifact_path(&self, contract_file: &Path, contract_name: &str) -> Option<&Path> {
266 let entry = self.entry(contract_file)?;
267 entry.find_artifact_path(contract_name)
268 }
269
270 pub fn read_artifact<Artifact: DeserializeOwned>(
289 &self,
290 contract_file: &Path,
291 contract_name: &str,
292 ) -> Result<Artifact> {
293 let artifact_path =
294 self.find_artifact_path(contract_file, contract_name).ok_or_else(|| {
295 SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string())
296 })?;
297 utils::read_json_file(artifact_path)
298 }
299
300 pub fn read_artifacts<Artifact: DeserializeOwned + Send + Sync>(
315 &self,
316 ) -> Result<Artifacts<Artifact>> {
317 use rayon::prelude::*;
318
319 let artifacts = self
320 .files
321 .par_iter()
322 .map(|(file, entry)| entry.read_artifact_files().map(|files| (file.clone(), files)))
323 .collect::<Result<ArtifactsMap<_>>>()?;
324 Ok(Artifacts(artifacts))
325 }
326
327 pub fn read_builds<L: Language>(&self, build_info_dir: &Path) -> Result<Builds<L>> {
332 use rayon::prelude::*;
333
334 self.builds
335 .par_iter()
336 .map(|build_id| {
337 utils::read_json_file(&build_info_dir.join(build_id).with_extension("json"))
338 .map(|b| (build_id.clone(), b))
339 })
340 .collect::<Result<_>>()
341 .map(|b| Builds(b))
342 }
343}
344
345#[cfg(feature = "async")]
346impl<S: CompilerSettings> CompilerCache<S> {
347 pub async fn async_read(path: &Path) -> Result<Self> {
348 let path = path.to_owned();
349 Self::asyncify(move || Self::read(&path)).await
350 }
351
352 pub async fn async_write(&self, path: &Path) -> Result<()> {
353 let content = serde_json::to_vec(self)?;
354 tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path))
355 }
356
357 async fn asyncify<F, T>(f: F) -> Result<T>
358 where
359 F: FnOnce() -> Result<T> + Send + 'static,
360 T: Send + 'static,
361 {
362 match tokio::task::spawn_blocking(f).await {
363 Ok(res) => res,
364 Err(_) => Err(SolcError::io(
365 std::io::Error::new(std::io::ErrorKind::Other, "background task failed"),
366 "",
367 )),
368 }
369 }
370}
371
372impl<S> Default for CompilerCache<S> {
373 fn default() -> Self {
374 Self {
375 format: ETHERS_FORMAT_VERSION.to_string(),
376 builds: Default::default(),
377 files: Default::default(),
378 paths: Default::default(),
379 profiles: Default::default(),
380 }
381 }
382}
383
384impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache<S> {
385 fn from(config: &'a ProjectPathsConfig) -> Self {
386 let paths = config.paths_relative();
387 Self::new(Default::default(), paths)
388 }
389}
390
391#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
393pub struct CachedArtifact {
394 pub path: PathBuf,
396 pub build_id: String,
398}
399
400pub type CachedArtifacts = BTreeMap<String, BTreeMap<Version, BTreeMap<String, CachedArtifact>>>;
401
402#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
408#[serde(rename_all = "camelCase")]
409pub struct CacheEntry {
410 pub last_modification_date: u64,
412 pub content_hash: String,
414 pub source_name: PathBuf,
416 pub imports: BTreeSet<PathBuf>,
420 pub version_requirement: Option<String>,
422 pub artifacts: CachedArtifacts,
432 pub seen_by_compiler: bool,
440}
441
442impl CacheEntry {
443 pub fn last_modified(&self) -> Duration {
445 Duration::from_millis(self.last_modification_date)
446 }
447
448 pub fn find_artifact_path(&self, contract_name: &str) -> Option<&Path> {
463 self.artifacts
464 .get(contract_name)?
465 .iter()
466 .next()
467 .and_then(|(_, a)| a.iter().next())
468 .map(|(_, p)| p.path.as_path())
469 }
470
471 pub fn read_last_modification_date(file: &Path) -> Result<u64> {
473 let last_modification_date = fs::metadata(file)
474 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
475 .modified()
476 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
477 .duration_since(UNIX_EPOCH)
478 .map_err(SolcError::msg)?
479 .as_millis() as u64;
480 Ok(last_modification_date)
481 }
482
483 fn read_artifact_files<Artifact: DeserializeOwned>(
487 &self,
488 ) -> Result<BTreeMap<String, Vec<ArtifactFile<Artifact>>>> {
489 let mut artifacts = BTreeMap::new();
490 for (artifact_name, versioned_files) in self.artifacts.iter() {
491 let mut files = Vec::with_capacity(versioned_files.len());
492 for (version, cached_artifact) in versioned_files {
493 for (profile, cached_artifact) in cached_artifact {
494 let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?;
495 files.push(ArtifactFile {
496 artifact,
497 file: cached_artifact.path.clone(),
498 version: version.clone(),
499 build_id: cached_artifact.build_id.clone(),
500 profile: profile.clone(),
501 });
502 }
503 }
504 artifacts.insert(artifact_name.clone(), files);
505 }
506 Ok(artifacts)
507 }
508
509 pub(crate) fn merge_artifacts<'a, A, I, T: 'a>(&mut self, artifacts: I)
510 where
511 I: IntoIterator<Item = (&'a String, A)>,
512 A: IntoIterator<Item = &'a ArtifactFile<T>>,
513 {
514 for (name, artifacts) in artifacts.into_iter() {
515 for artifact in artifacts {
516 self.artifacts
517 .entry(name.clone())
518 .or_default()
519 .entry(artifact.version.clone())
520 .or_default()
521 .insert(
522 artifact.profile.clone(),
523 CachedArtifact {
524 build_id: artifact.build_id.clone(),
525 path: artifact.file.clone(),
526 },
527 );
528 }
529 }
530 }
531
532 pub fn contains(&self, version: &Version, profile: &str) -> bool {
534 self.artifacts.values().any(|artifacts| {
535 artifacts.get(version).and_then(|artifacts| artifacts.get(profile)).is_some()
536 })
537 }
538
539 pub fn artifacts_versions(&self) -> impl Iterator<Item = (&Version, &str, &CachedArtifact)> {
541 self.artifacts
542 .values()
543 .flatten()
544 .flat_map(|(v, a)| a.iter().map(move |(p, a)| (v, p.as_str(), a)))
545 }
546
547 pub fn find_artifact(
549 &self,
550 contract: &str,
551 version: &Version,
552 profile: &str,
553 ) -> Option<&CachedArtifact> {
554 self.artifacts
555 .get(contract)
556 .and_then(|files| files.get(version))
557 .and_then(|files| files.get(profile))
558 }
559
560 pub fn artifacts_for_version<'a>(
562 &'a self,
563 version: &'a Version,
564 ) -> impl Iterator<Item = &'a CachedArtifact> + 'a {
565 self.artifacts_versions().filter_map(move |(ver, _, file)| (ver == version).then_some(file))
566 }
567
568 pub fn artifacts(&self) -> impl Iterator<Item = &CachedArtifact> {
570 self.artifacts.values().flat_map(BTreeMap::values).flat_map(BTreeMap::values)
571 }
572
573 pub fn artifacts_mut(&mut self) -> impl Iterator<Item = &mut CachedArtifact> {
575 self.artifacts.values_mut().flat_map(BTreeMap::values_mut).flat_map(BTreeMap::values_mut)
576 }
577
578 pub fn all_artifacts_exist(&self) -> bool {
580 self.artifacts().all(|a| a.path.exists())
581 }
582
583 pub fn join_artifacts_files(&mut self, base: &Path) {
585 self.artifacts_mut().for_each(|a| a.path = base.join(&a.path))
586 }
587
588 pub fn strip_artifact_files_prefixes(&mut self, base: &Path) {
590 self.artifacts_mut().for_each(|a| {
591 if let Ok(rem) = a.path.strip_prefix(base) {
592 a.path = rem.to_path_buf();
593 }
594 })
595 }
596}
597
598#[derive(Clone, Debug, Default)]
600pub struct GroupedSources {
601 pub inner: HashMap<PathBuf, HashSet<Version>>,
602}
603
604impl GroupedSources {
605 pub fn insert(&mut self, file: PathBuf, version: Version) {
607 match self.inner.entry(file) {
608 hash_map::Entry::Occupied(mut entry) => {
609 entry.get_mut().insert(version);
610 }
611 hash_map::Entry::Vacant(entry) => {
612 entry.insert(HashSet::from([version]));
613 }
614 }
615 }
616
617 pub fn contains(&self, file: &Path, version: &Version) -> bool {
619 self.inner.get(file).is_some_and(|versions| versions.contains(version))
620 }
621}
622
623#[derive(Debug)]
626pub(crate) struct ArtifactsCacheInner<
627 'a,
628 T: ArtifactOutput<CompilerContract = C::CompilerContract>,
629 C: Compiler,
630> {
631 pub cache: CompilerCache<C::Settings>,
633
634 pub cached_artifacts: Artifacts<T::Artifact>,
636
637 pub cached_builds: Builds<C::Language>,
639
640 pub edges: GraphEdges<C::ParsedSource>,
642
643 pub project: &'a Project<C, T>,
645
646 pub dirty_sources: HashSet<PathBuf>,
649
650 pub sources_in_scope: GroupedSources,
654
655 pub content_hashes: HashMap<PathBuf, String>,
657}
658
659impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
660 ArtifactsCacheInner<'_, T, C>
661{
662 fn create_cache_entry(&mut self, file: PathBuf, source: &Source) {
664 let imports = self
665 .edges
666 .imports(&file)
667 .into_iter()
668 .map(|import| strip_prefix(import, self.project.root()).into())
669 .collect();
670
671 let entry = CacheEntry {
672 last_modification_date: CacheEntry::read_last_modification_date(&file)
673 .unwrap_or_default(),
674 content_hash: source.content_hash(),
675 source_name: strip_prefix(&file, self.project.root()).into(),
676 imports,
677 version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()),
678 artifacts: Default::default(),
680 seen_by_compiler: false,
681 };
682
683 self.cache.files.insert(file, entry);
684 }
685
686 fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
697 let mut compile_complete = HashSet::new();
699 let mut compile_optimized = HashSet::new();
700
701 for (file, source) in sources.iter() {
702 self.sources_in_scope.insert(file.clone(), version.clone());
703
704 if self.is_missing_artifacts(file, version, profile) {
706 compile_complete.insert(file.clone());
707 }
708
709 if !self.cache.files.contains_key(file) {
711 self.create_cache_entry(file.clone(), source);
712 }
713 }
714
715 for source in &compile_complete {
718 for import in self.edges.imports(source) {
719 if !compile_complete.contains(import) {
720 compile_optimized.insert(import.clone());
721 }
722 }
723 }
724
725 sources.retain(|file, source| {
726 source.kind = if compile_complete.contains(file) {
727 SourceCompilationKind::Complete
728 } else if compile_optimized.contains(file) {
729 SourceCompilationKind::Optimized
730 } else {
731 return false;
732 };
733 true
734 });
735 }
736
737 #[instrument(level = "trace", skip(self))]
739 fn is_missing_artifacts(&self, file: &Path, version: &Version, profile: &str) -> bool {
740 let Some(entry) = self.cache.entry(file) else {
741 trace!("missing cache entry");
742 return true;
743 };
744
745 if entry.seen_by_compiler && entry.artifacts.is_empty() {
749 trace!("no artifacts");
750 return false;
751 }
752
753 if !entry.contains(version, profile) {
754 trace!("missing linked artifacts");
755 return true;
756 }
757
758 if entry.artifacts_for_version(version).any(|artifact| {
759 let missing_artifact = !self.cached_artifacts.has_artifact(&artifact.path);
760 if missing_artifact {
761 trace!("missing artifact \"{}\"", artifact.path.display());
762 }
763 missing_artifact
764 }) {
765 return true;
766 }
767
768 false
769 }
770
771 fn find_and_remove_dirty(&mut self) {
773 fn populate_dirty_files<D>(
774 file: &Path,
775 dirty_files: &mut HashSet<PathBuf>,
776 edges: &GraphEdges<D>,
777 ) {
778 for file in edges.importers(file) {
779 if !dirty_files.contains(file) {
782 dirty_files.insert(file.to_path_buf());
783 populate_dirty_files(file, dirty_files, edges);
784 }
785 }
786 }
787
788 let existing_profiles = self.project.settings_profiles().collect::<BTreeMap<_, _>>();
789
790 let mut dirty_profiles = HashSet::new();
791 for (profile, settings) in &self.cache.profiles {
792 if !existing_profiles.get(profile.as_str()).is_some_and(|p| p.can_use_cached(settings))
793 {
794 trace!("dirty profile: {}", profile);
795 dirty_profiles.insert(profile.clone());
796 }
797 }
798
799 for profile in &dirty_profiles {
800 self.cache.profiles.remove(profile);
801 }
802
803 self.cache.files.retain(|_, entry| {
804 if entry.artifacts.is_empty() {
806 return true;
807 }
808 entry.artifacts.retain(|_, artifacts| {
809 artifacts.retain(|_, artifacts| {
810 artifacts.retain(|profile, _| !dirty_profiles.contains(profile));
811 !artifacts.is_empty()
812 });
813 !artifacts.is_empty()
814 });
815 !entry.artifacts.is_empty()
816 });
817
818 for (profile, settings) in existing_profiles {
819 if !self.cache.profiles.contains_key(profile) {
820 self.cache.profiles.insert(profile.to_string(), settings.clone());
821 }
822 }
823
824 let files = self.cache.files.keys().cloned().collect::<HashSet<_>>();
826
827 let mut sources = Sources::new();
828
829 for file in &files {
831 let Ok(source) = Source::read(file) else {
832 self.dirty_sources.insert(file.clone());
833 continue;
834 };
835 sources.insert(file.clone(), source);
836 }
837
838 if let Ok(graph) = Graph::<C::ParsedSource>::resolve_sources(&self.project.paths, sources) {
841 let (sources, edges) = graph.into_sources();
842
843 self.fill_hashes(&sources);
845
846 for file in sources.keys() {
848 if self.is_dirty_impl(file) {
849 self.dirty_sources.insert(file.clone());
850 }
851 }
852
853 for file in self.dirty_sources.clone().iter() {
855 populate_dirty_files(file, &mut self.dirty_sources, &edges);
856 }
857 } else {
858 self.dirty_sources.extend(files);
860 }
861
862 for file in &self.dirty_sources {
864 debug!("removing dirty file from cache: {}", file.display());
865 self.cache.remove(file);
866 }
867 }
868
869 fn is_dirty_impl(&self, file: &Path) -> bool {
870 let Some(hash) = self.content_hashes.get(file) else {
871 trace!("missing content hash");
872 return true;
873 };
874
875 let Some(entry) = self.cache.entry(file) else {
876 trace!("missing cache entry");
877 return true;
878 };
879
880 if entry.content_hash != *hash {
881 trace!("content hash changed");
882 return true;
883 }
884
885 for artifacts in self.cached_artifacts.values() {
888 for artifacts in artifacts.values() {
889 for artifact_file in artifacts {
890 if self.project.artifacts_handler().is_dirty(artifact_file).unwrap_or(true) {
891 return true;
892 }
893 }
894 }
895 }
896
897 false
899 }
900
901 fn fill_hashes(&mut self, sources: &Sources) {
903 for (file, source) in sources {
904 if let hash_map::Entry::Vacant(entry) = self.content_hashes.entry(file.clone()) {
905 entry.insert(source.content_hash());
906 }
907 }
908 }
909}
910
911#[allow(clippy::large_enum_variant)]
913#[derive(Debug)]
914pub(crate) enum ArtifactsCache<
915 'a,
916 T: ArtifactOutput<CompilerContract = C::CompilerContract>,
917 C: Compiler,
918> {
919 Ephemeral(GraphEdges<C::ParsedSource>, &'a Project<C, T>),
921 Cached(ArtifactsCacheInner<'a, T, C>),
923}
924
925impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
926 ArtifactsCache<'a, T, C>
927{
928 pub fn new(project: &'a Project<C, T>, edges: GraphEdges<C::ParsedSource>) -> Result<Self> {
930 fn get_cache<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>(
934 project: &Project<C, T>,
935 invalidate_cache: bool,
936 ) -> CompilerCache<C::Settings> {
937 let paths = project.paths.paths_relative();
939
940 if !invalidate_cache && project.cache_path().exists() {
941 if let Ok(cache) = CompilerCache::read_joined(&project.paths) {
942 if cache.paths == paths {
943 return cache;
945 }
946 }
947 }
948
949 CompilerCache::new(Default::default(), paths)
951 }
952
953 let cache = if project.cached {
954 let invalidate_cache = !edges.unresolved_imports().is_empty();
958
959 let mut cache = get_cache(project, invalidate_cache);
961
962 cache.remove_missing_files();
963
964 let mut cached_artifacts = if project.paths.artifacts.exists() {
966 trace!("reading artifacts from cache...");
967 let artifacts = cache.read_artifacts::<T::Artifact>().unwrap_or_default();
969 trace!("read {} artifacts from cache", artifacts.artifact_files().count());
970 artifacts
971 } else {
972 Default::default()
973 };
974
975 trace!("reading build infos from cache...");
976 let cached_builds = cache.read_builds(&project.paths.build_infos).unwrap_or_default();
977
978 cached_artifacts.0.retain(|_, artifacts| {
980 artifacts.retain(|_, artifacts| {
981 artifacts.retain(|artifact| cached_builds.contains_key(&artifact.build_id));
982 !artifacts.is_empty()
983 });
984 !artifacts.is_empty()
985 });
986
987 let cache = ArtifactsCacheInner {
988 cache,
989 cached_artifacts,
990 cached_builds,
991 edges,
992 project,
993 dirty_sources: Default::default(),
994 content_hashes: Default::default(),
995 sources_in_scope: Default::default(),
996 };
997
998 ArtifactsCache::Cached(cache)
999 } else {
1000 ArtifactsCache::Ephemeral(edges, project)
1002 };
1003
1004 Ok(cache)
1005 }
1006
1007 pub fn graph(&self) -> &GraphEdges<C::ParsedSource> {
1009 match self {
1010 ArtifactsCache::Ephemeral(graph, _) => graph,
1011 ArtifactsCache::Cached(inner) => &inner.edges,
1012 }
1013 }
1014
1015 #[cfg(test)]
1016 #[allow(unused)]
1017 #[doc(hidden)]
1018 pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T, C>> {
1020 match self {
1021 ArtifactsCache::Ephemeral(..) => None,
1022 ArtifactsCache::Cached(cached) => Some(cached),
1023 }
1024 }
1025
1026 pub fn output_ctx(&self) -> OutputContext<'_> {
1027 match self {
1028 ArtifactsCache::Ephemeral(..) => Default::default(),
1029 ArtifactsCache::Cached(inner) => OutputContext::new(&inner.cache),
1030 }
1031 }
1032
1033 pub fn project(&self) -> &'a Project<C, T> {
1034 match self {
1035 ArtifactsCache::Ephemeral(_, project) => project,
1036 ArtifactsCache::Cached(cache) => cache.project,
1037 }
1038 }
1039
1040 pub fn remove_dirty_sources(&mut self) {
1042 match self {
1043 ArtifactsCache::Ephemeral(..) => {}
1044 ArtifactsCache::Cached(cache) => cache.find_and_remove_dirty(),
1045 }
1046 }
1047
1048 pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
1050 match self {
1051 ArtifactsCache::Ephemeral(..) => {}
1052 ArtifactsCache::Cached(cache) => cache.filter(sources, version, profile),
1053 }
1054 }
1055
1056 pub fn consume<A>(
1062 self,
1063 written_artifacts: &Artifacts<A>,
1064 written_build_infos: &Vec<RawBuildInfo<C::Language>>,
1065 write_to_disk: bool,
1066 ) -> Result<(Artifacts<A>, Builds<C::Language>)>
1067 where
1068 T: ArtifactOutput<Artifact = A>,
1069 {
1070 let ArtifactsCache::Cached(cache) = self else {
1071 trace!("no cache configured, ephemeral");
1072 return Ok(Default::default());
1073 };
1074
1075 let ArtifactsCacheInner {
1076 mut cache,
1077 mut cached_artifacts,
1078 cached_builds,
1079 dirty_sources,
1080 sources_in_scope,
1081 project,
1082 ..
1083 } = cache;
1084
1085 cached_artifacts.0.retain(|file, artifacts| {
1087 let file = Path::new(file);
1088 artifacts.retain(|name, artifacts| {
1089 artifacts.retain(|artifact| {
1090 let version = &artifact.version;
1091
1092 if !sources_in_scope.contains(file, version) {
1093 return false;
1094 }
1095 if dirty_sources.contains(file) {
1096 return false;
1097 }
1098 if written_artifacts.find_artifact(file, name, version).is_some() {
1099 return false;
1100 }
1101 true
1102 });
1103 !artifacts.is_empty()
1104 });
1105 !artifacts.is_empty()
1106 });
1107
1108 for (file, artifacts) in written_artifacts.as_ref() {
1111 let file_path = Path::new(file);
1112 if let Some(entry) = cache.files.get_mut(file_path) {
1115 entry.merge_artifacts(artifacts);
1116 }
1117 }
1118
1119 for build_info in written_build_infos {
1120 cache.builds.insert(build_info.id.clone());
1121 }
1122
1123 if write_to_disk {
1125 cache.remove_outdated_builds();
1126 cache
1129 .strip_entries_prefix(project.root())
1130 .strip_artifact_files_prefixes(project.artifacts_path());
1131 cache.write(project.cache_path())?;
1132 }
1133
1134 Ok((cached_artifacts, cached_builds))
1135 }
1136
1137 pub fn compiler_seen(&mut self, file: &Path) {
1139 if let ArtifactsCache::Cached(cache) = self {
1140 if let Some(entry) = cache.cache.entry_mut(file) {
1141 entry.seen_by_compiler = true;
1142 }
1143 }
1144 }
1145}