1use crate::{
4 buildinfo::RawBuildInfo,
5 compilers::{Compiler, CompilerSettings, Language},
6 output::Builds,
7 resolver::GraphEdges,
8 ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project,
9 ProjectPaths, ProjectPathsConfig, SourceCompilationKind,
10};
11use foundry_compilers_artifacts::{
12 sources::{Source, Sources},
13 Settings,
14};
15use foundry_compilers_core::{
16 error::{Result, SolcError},
17 utils::{self, strip_prefix},
18};
19use semver::Version;
20use serde::{de::DeserializeOwned, Deserialize, Serialize};
21use std::{
22 collections::{btree_map::BTreeMap, hash_map, BTreeSet, HashMap, HashSet},
23 fs,
24 path::{Path, PathBuf},
25 time::{Duration, UNIX_EPOCH},
26};
27
28mod iface;
29use iface::interface_repr_hash;
30
31const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-4";
37
38pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
40
41#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
43pub struct CompilerCache<S = Settings> {
44 #[serde(rename = "_format")]
45 pub format: String,
46 pub paths: ProjectPaths,
48 pub files: BTreeMap<PathBuf, CacheEntry>,
49 pub builds: BTreeSet<String>,
50 pub profiles: BTreeMap<String, S>,
51 pub preprocessed: bool,
52 pub mocks: HashSet<PathBuf>,
53}
54
55impl<S> CompilerCache<S> {
56 pub fn new(format: String, paths: ProjectPaths, preprocessed: bool) -> Self {
57 Self {
58 format,
59 paths,
60 files: Default::default(),
61 builds: Default::default(),
62 profiles: Default::default(),
63 preprocessed,
64 mocks: Default::default(),
65 }
66 }
67}
68
69impl<S: CompilerSettings> CompilerCache<S> {
70 pub fn is_empty(&self) -> bool {
71 self.files.is_empty()
72 }
73
74 pub fn remove(&mut self, file: &Path) -> Option<CacheEntry> {
76 self.files.remove(file)
77 }
78
79 pub fn len(&self) -> usize {
81 self.files.len()
82 }
83
84 pub fn artifacts_len(&self) -> usize {
86 self.entries().map(|entry| entry.artifacts().count()).sum()
87 }
88
89 pub fn entries(&self) -> impl Iterator<Item = &CacheEntry> {
91 self.files.values()
92 }
93
94 pub fn entry(&self, file: &Path) -> Option<&CacheEntry> {
96 self.files.get(file)
97 }
98
99 pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> {
101 self.files.get_mut(file)
102 }
103
104 #[instrument(name = "CompilerCache::read", skip_all)]
122 pub fn read(path: &Path) -> Result<Self> {
123 trace!("reading solfiles cache at {}", path.display());
124 let cache: Self = utils::read_json_file(path)?;
125 trace!("read cache \"{}\" with {} entries", cache.format, cache.files.len());
126 Ok(cache)
127 }
128
129 pub fn read_joined<L>(paths: &ProjectPathsConfig<L>) -> Result<Self> {
146 let mut cache = Self::read(&paths.cache)?;
147 cache.join_entries(&paths.root).join_artifacts_files(&paths.artifacts);
148 Ok(cache)
149 }
150
151 #[instrument(name = "CompilerCache::write", skip_all)]
153 pub fn write(&self, path: &Path) -> Result<()> {
154 trace!("writing cache with {} entries to json file: \"{}\"", self.len(), path.display());
155 utils::create_parent_dir_all(path)?;
156 utils::write_json_file(self, path, 128 * 1024)?;
157 trace!("cache file located: \"{}\"", path.display());
158 Ok(())
159 }
160
161 #[instrument(skip_all)]
163 pub fn remove_outdated_builds(&mut self) {
164 let mut outdated = Vec::new();
165 for build_id in &self.builds {
166 if !self
167 .entries()
168 .flat_map(|e| e.artifacts.values())
169 .flat_map(|a| a.values())
170 .flat_map(|a| a.values())
171 .any(|a| a.build_id == *build_id)
172 {
173 outdated.push(build_id.to_owned());
174 }
175 }
176
177 for build_id in outdated {
178 self.builds.remove(&build_id);
179 let path = self.paths.build_infos.join(build_id).with_extension("json");
180 let _ = std::fs::remove_file(path);
181 }
182 }
183
184 #[instrument(skip_all)]
186 pub fn join_entries(&mut self, root: &Path) -> &mut Self {
187 self.files = std::mem::take(&mut self.files)
188 .into_iter()
189 .map(|(path, entry)| (root.join(path), entry))
190 .collect();
191 self
192 }
193
194 #[instrument(skip_all)]
196 pub fn strip_entries_prefix(&mut self, base: &Path) -> &mut Self {
197 self.files = std::mem::take(&mut self.files)
198 .into_iter()
199 .map(|(path, entry)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), entry))
200 .collect();
201 self
202 }
203
204 #[instrument(skip_all)]
206 pub fn join_artifacts_files(&mut self, base: &Path) -> &mut Self {
207 self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base));
208 self
209 }
210
211 #[instrument(skip_all)]
213 pub fn strip_artifact_files_prefixes(&mut self, base: &Path) -> &mut Self {
214 self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base));
215 self
216 }
217
218 #[instrument(skip_all)]
222 pub fn remove_missing_files(&mut self) {
223 trace!("remove non existing files from cache");
224 self.files.retain(|file, _| {
225 let exists = file.exists();
226 if !exists {
227 trace!("remove {} from cache", file.display());
228 }
229 exists
230 })
231 }
232
233 pub fn all_artifacts_exist(&self) -> bool {
235 self.files.values().all(|entry| entry.all_artifacts_exist())
236 }
237
238 pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self {
260 self.files = self
261 .files
262 .into_iter()
263 .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e))
264 .collect();
265 self
266 }
267
268 pub fn find_artifact_path(&self, contract_file: &Path, contract_name: &str) -> Option<&Path> {
280 let entry = self.entry(contract_file)?;
281 entry.find_artifact_path(contract_name)
282 }
283
284 #[instrument(skip_all)]
303 pub fn read_artifact<Artifact: DeserializeOwned>(
304 &self,
305 contract_file: &Path,
306 contract_name: &str,
307 ) -> Result<Artifact> {
308 let artifact_path =
309 self.find_artifact_path(contract_file, contract_name).ok_or_else(|| {
310 SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string())
311 })?;
312 utils::read_json_file(artifact_path)
313 }
314
315 #[instrument(skip_all)]
330 pub fn read_artifacts<Artifact: DeserializeOwned + Send + Sync>(
331 &self,
332 ) -> Result<Artifacts<Artifact>> {
333 use rayon::prelude::*;
334
335 let artifacts = self
336 .files
337 .par_iter()
338 .map(|(file, entry)| entry.read_artifact_files().map(|files| (file.clone(), files)))
339 .collect::<Result<ArtifactsMap<_>>>()?;
340 Ok(Artifacts(artifacts))
341 }
342
343 #[instrument(skip_all)]
348 pub fn read_builds<L: Language>(&self, build_info_dir: &Path) -> Result<Builds<L>> {
349 use rayon::prelude::*;
350
351 self.builds
352 .par_iter()
353 .map(|build_id| {
354 utils::read_json_file(&build_info_dir.join(build_id).with_extension("json"))
355 .map(|b| (build_id.clone(), b))
356 })
357 .collect::<Result<_>>()
358 .map(|b| Builds(b))
359 }
360}
361
362#[cfg(feature = "async")]
363impl<S: CompilerSettings> CompilerCache<S> {
364 pub async fn async_read(path: &Path) -> Result<Self> {
365 let path = path.to_owned();
366 Self::asyncify(move || Self::read(&path)).await
367 }
368
369 pub async fn async_write(&self, path: &Path) -> Result<()> {
370 let content = serde_json::to_vec(self)?;
371 tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path))
372 }
373
374 async fn asyncify<F, T>(f: F) -> Result<T>
375 where
376 F: FnOnce() -> Result<T> + Send + 'static,
377 T: Send + 'static,
378 {
379 match tokio::task::spawn_blocking(f).await {
380 Ok(res) => res,
381 Err(_) => Err(SolcError::io(std::io::Error::other("background task failed"), "")),
382 }
383 }
384}
385
386impl<S> Default for CompilerCache<S> {
387 fn default() -> Self {
388 Self {
389 format: ETHERS_FORMAT_VERSION.to_string(),
390 builds: Default::default(),
391 files: Default::default(),
392 paths: Default::default(),
393 profiles: Default::default(),
394 preprocessed: false,
395 mocks: Default::default(),
396 }
397 }
398}
399
400impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache<S> {
401 fn from(config: &'a ProjectPathsConfig) -> Self {
402 let paths = config.paths_relative();
403 Self::new(Default::default(), paths, false)
404 }
405}
406
407#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
409pub struct CachedArtifact {
410 pub path: PathBuf,
412 pub build_id: String,
414}
415
416pub type CachedArtifacts = BTreeMap<String, BTreeMap<Version, BTreeMap<String, CachedArtifact>>>;
417
418#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
424#[serde(rename_all = "camelCase")]
425pub struct CacheEntry {
426 pub last_modification_date: u64,
428 pub content_hash: String,
430 pub interface_repr_hash: Option<String>,
432 pub source_name: PathBuf,
434 pub imports: BTreeSet<PathBuf>,
438 pub version_requirement: Option<String>,
440 pub artifacts: CachedArtifacts,
450 pub seen_by_compiler: bool,
458}
459
460impl CacheEntry {
461 pub fn last_modified(&self) -> Duration {
463 Duration::from_millis(self.last_modification_date)
464 }
465
466 pub fn find_artifact_path(&self, contract_name: &str) -> Option<&Path> {
481 self.artifacts
482 .get(contract_name)?
483 .iter()
484 .next()
485 .and_then(|(_, a)| a.iter().next())
486 .map(|(_, p)| p.path.as_path())
487 }
488
489 pub fn read_last_modification_date(file: &Path) -> Result<u64> {
491 let last_modification_date = fs::metadata(file)
492 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
493 .modified()
494 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
495 .duration_since(UNIX_EPOCH)
496 .map_err(SolcError::msg)?
497 .as_millis() as u64;
498 Ok(last_modification_date)
499 }
500
501 #[instrument(skip_all)]
505 fn read_artifact_files<Artifact: DeserializeOwned>(
506 &self,
507 ) -> Result<BTreeMap<String, Vec<ArtifactFile<Artifact>>>> {
508 let mut artifacts = BTreeMap::new();
509 for (artifact_name, versioned_files) in self.artifacts.iter() {
510 let mut files = Vec::with_capacity(versioned_files.len());
511 for (version, cached_artifact) in versioned_files {
512 for (profile, cached_artifact) in cached_artifact {
513 let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?;
514 files.push(ArtifactFile {
515 artifact,
516 file: cached_artifact.path.clone(),
517 version: version.clone(),
518 build_id: cached_artifact.build_id.clone(),
519 profile: profile.clone(),
520 });
521 }
522 }
523 artifacts.insert(artifact_name.clone(), files);
524 }
525 Ok(artifacts)
526 }
527
528 #[instrument(skip_all)]
529 pub(crate) fn merge_artifacts<'a, A, I, T: 'a>(&mut self, artifacts: I)
530 where
531 I: IntoIterator<Item = (&'a String, A)>,
532 A: IntoIterator<Item = &'a ArtifactFile<T>>,
533 {
534 for (name, artifacts) in artifacts.into_iter() {
535 for artifact in artifacts {
536 self.artifacts
537 .entry(name.clone())
538 .or_default()
539 .entry(artifact.version.clone())
540 .or_default()
541 .insert(
542 artifact.profile.clone(),
543 CachedArtifact {
544 build_id: artifact.build_id.clone(),
545 path: artifact.file.clone(),
546 },
547 );
548 }
549 }
550 }
551
552 pub fn contains(&self, version: &Version, profile: &str) -> bool {
554 self.artifacts.values().any(|artifacts| {
555 artifacts.get(version).and_then(|artifacts| artifacts.get(profile)).is_some()
556 })
557 }
558
559 pub fn artifacts_versions(&self) -> impl Iterator<Item = (&Version, &str, &CachedArtifact)> {
561 self.artifacts
562 .values()
563 .flatten()
564 .flat_map(|(v, a)| a.iter().map(move |(p, a)| (v, p.as_str(), a)))
565 }
566
567 pub fn find_artifact(
569 &self,
570 contract: &str,
571 version: &Version,
572 profile: &str,
573 ) -> Option<&CachedArtifact> {
574 self.artifacts
575 .get(contract)
576 .and_then(|files| files.get(version))
577 .and_then(|files| files.get(profile))
578 }
579
580 pub fn artifacts_for_version<'a>(
582 &'a self,
583 version: &'a Version,
584 ) -> impl Iterator<Item = &'a CachedArtifact> + 'a {
585 self.artifacts_versions().filter_map(move |(ver, _, file)| (ver == version).then_some(file))
586 }
587
588 pub fn artifacts(&self) -> impl Iterator<Item = &CachedArtifact> {
590 self.artifacts.values().flat_map(BTreeMap::values).flat_map(BTreeMap::values)
591 }
592
593 pub fn artifacts_mut(&mut self) -> impl Iterator<Item = &mut CachedArtifact> {
595 self.artifacts.values_mut().flat_map(BTreeMap::values_mut).flat_map(BTreeMap::values_mut)
596 }
597
598 pub fn all_artifacts_exist(&self) -> bool {
600 self.artifacts().all(|a| a.path.exists())
601 }
602
603 pub fn join_artifacts_files(&mut self, base: &Path) {
605 self.artifacts_mut().for_each(|a| a.path = base.join(&a.path))
606 }
607
608 pub fn strip_artifact_files_prefixes(&mut self, base: &Path) {
610 self.artifacts_mut().for_each(|a| {
611 if let Ok(rem) = a.path.strip_prefix(base) {
612 a.path = rem.to_path_buf();
613 }
614 })
615 }
616}
617
618#[derive(Clone, Debug, Default)]
620pub struct GroupedSources {
621 pub inner: HashMap<PathBuf, HashSet<Version>>,
622}
623
624impl GroupedSources {
625 pub fn insert(&mut self, file: PathBuf, version: Version) {
627 match self.inner.entry(file) {
628 hash_map::Entry::Occupied(mut entry) => {
629 entry.get_mut().insert(version);
630 }
631 hash_map::Entry::Vacant(entry) => {
632 entry.insert(HashSet::from([version]));
633 }
634 }
635 }
636
637 pub fn contains(&self, file: &Path, version: &Version) -> bool {
639 self.inner.get(file).is_some_and(|versions| versions.contains(version))
640 }
641}
642
643#[derive(Debug)]
646pub(crate) struct ArtifactsCacheInner<
647 'a,
648 T: ArtifactOutput<CompilerContract = C::CompilerContract>,
649 C: Compiler,
650> {
651 pub cache: CompilerCache<C::Settings>,
653
654 pub cached_artifacts: Artifacts<T::Artifact>,
656
657 pub cached_builds: Builds<C::Language>,
659
660 pub edges: GraphEdges<C::ParsedSource>,
662
663 pub project: &'a Project<C, T>,
665
666 pub dirty_sources: HashSet<PathBuf>,
669
670 pub sources_in_scope: GroupedSources,
674
675 pub content_hashes: HashMap<PathBuf, String>,
677
678 pub interface_repr_hashes: HashMap<PathBuf, String>,
680}
681
682impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
683 ArtifactsCacheInner<'_, T, C>
684{
685 fn is_source_file(&self, file: &Path) -> bool {
687 self.project.paths.is_source_file(file)
688 }
689
690 fn create_cache_entry(&mut self, file: PathBuf, source: &Source) {
692 let imports = self
693 .edges
694 .imports(&file)
695 .into_iter()
696 .map(|import| strip_prefix(import, self.project.root()).into())
697 .collect();
698
699 let interface_repr_hash = (self.cache.preprocessed && self.is_source_file(&file))
700 .then(|| self.interface_repr_hash(source, &file).to_string());
701
702 let entry = CacheEntry {
703 last_modification_date: CacheEntry::read_last_modification_date(&file)
704 .unwrap_or_default(),
705 content_hash: source.content_hash(),
706 interface_repr_hash,
707 source_name: strip_prefix(&file, self.project.root()).into(),
708 imports,
709 version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()),
710 artifacts: Default::default(),
712 seen_by_compiler: false,
713 };
714
715 self.cache.files.insert(file, entry);
716 }
717
718 fn content_hash(&mut self, source: &Source, file: &Path) -> &str {
720 self.content_hashes.entry(file.to_path_buf()).or_insert_with(|| source.content_hash())
721 }
722
723 fn interface_repr_hash(&mut self, source: &Source, file: &Path) -> &str {
725 self.interface_repr_hashes.entry(file.to_path_buf()).or_insert_with(|| {
726 if let Some(r) = interface_repr_hash(&source.content, file) {
727 return r;
728 }
729 self.content_hashes
731 .entry(file.to_path_buf())
732 .or_insert_with(|| source.content_hash())
733 .clone()
734 })
735 }
736
737 fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
748 let mut compile_complete = HashSet::new();
750 let mut compile_optimized = HashSet::new();
751
752 for (file, source) in sources.iter() {
753 self.sources_in_scope.insert(file.clone(), version.clone());
754
755 if self.is_missing_artifacts(file, version, profile) {
757 compile_complete.insert(file.to_path_buf());
758 }
759
760 if !self.cache.files.contains_key(file) {
762 self.create_cache_entry(file.clone(), source);
763 }
764 }
765
766 for source in &compile_complete {
769 for import in self.edges.imports(source) {
770 if !compile_complete.contains(import) {
771 compile_optimized.insert(import);
772 }
773 }
774 }
775
776 sources.retain(|file, source| {
777 source.kind = if compile_complete.contains(file.as_path()) {
778 SourceCompilationKind::Complete
779 } else if compile_optimized.contains(file.as_path()) {
780 SourceCompilationKind::Optimized
781 } else {
782 return false;
783 };
784 true
785 });
786 }
787
788 #[instrument(level = "trace", skip(self))]
790 fn is_missing_artifacts(&self, file: &Path, version: &Version, profile: &str) -> bool {
791 let Some(entry) = self.cache.entry(file) else {
792 trace!("missing cache entry");
793 return true;
794 };
795
796 if entry.seen_by_compiler && entry.artifacts.is_empty() {
800 trace!("no artifacts");
801 return false;
802 }
803
804 if !entry.contains(version, profile) {
805 trace!("missing linked artifacts");
806 return true;
807 }
808
809 if entry.artifacts_for_version(version).any(|artifact| {
810 let missing_artifact = !self.cached_artifacts.has_artifact(&artifact.path);
811 if missing_artifact {
812 trace!("missing artifact \"{}\"", artifact.path.display());
813 }
814 missing_artifact
815 }) {
816 return true;
817 }
818
819 self.missing_extra_files()
822 }
823
824 fn find_and_remove_dirty(&mut self) {
826 fn populate_dirty_files<D>(
827 file: &Path,
828 dirty_files: &mut HashSet<PathBuf>,
829 edges: &GraphEdges<D>,
830 ) {
831 for file in edges.importers(file) {
832 if !dirty_files.contains(file) {
835 dirty_files.insert(file.to_path_buf());
836 populate_dirty_files(file, dirty_files, edges);
837 }
838 }
839 }
840
841 let existing_profiles = self.project.settings_profiles().collect::<BTreeMap<_, _>>();
842
843 let mut dirty_profiles = HashSet::new();
844 for (profile, settings) in &self.cache.profiles {
845 if !existing_profiles.get(profile.as_str()).is_some_and(|p| p.can_use_cached(settings))
846 {
847 trace!("dirty profile: {}", profile);
848 dirty_profiles.insert(profile.clone());
849 }
850 }
851
852 for profile in &dirty_profiles {
853 self.cache.profiles.remove(profile);
854 }
855
856 self.cache.files.retain(|_, entry| {
857 if entry.artifacts.is_empty() {
859 return true;
860 }
861 entry.artifacts.retain(|_, artifacts| {
862 artifacts.retain(|_, artifacts| {
863 artifacts.retain(|profile, _| !dirty_profiles.contains(profile));
864 !artifacts.is_empty()
865 });
866 !artifacts.is_empty()
867 });
868 !entry.artifacts.is_empty()
869 });
870
871 for (profile, settings) in existing_profiles {
872 if !self.cache.profiles.contains_key(profile) {
873 self.cache.profiles.insert(profile.to_string(), settings.clone());
874 }
875 }
876
877 let files = self.cache.files.keys().cloned().collect::<HashSet<_>>();
879
880 let mut sources = Sources::new();
881
882 for file in &files {
884 let Ok(source) = Source::read(file) else {
885 self.dirty_sources.insert(file.clone());
886 continue;
887 };
888 sources.insert(file.clone(), source);
889 }
890
891 if let Ok(graph) = Graph::<C::ParsedSource>::resolve_sources(&self.project.paths, sources) {
894 let (sources, edges) = graph.into_sources();
895
896 self.fill_hashes(&sources);
898
899 for file in sources.keys() {
901 if self.is_dirty_impl(file, false) {
902 self.dirty_sources.insert(file.clone());
903 }
904 }
905
906 if !self.cache.preprocessed {
907 for file in self.dirty_sources.clone().iter() {
909 populate_dirty_files(file, &mut self.dirty_sources, &edges);
910 }
911 } else {
912 for file in sources.keys() {
914 if self.dirty_sources.contains(file) {
915 continue;
916 }
917 let is_src = self.is_source_file(file);
918 for import in edges.imports(file) {
919 if is_src && self.dirty_sources.contains(import) {
921 self.dirty_sources.insert(file.clone());
922 break;
923 } else if !is_src
928 && self.dirty_sources.contains(import)
929 && (!self.is_source_file(import)
930 || self.is_dirty_impl(import, true)
931 || self.cache.mocks.contains(file))
932 {
933 if self.cache.mocks.contains(file) {
934 populate_dirty_files(file, &mut self.dirty_sources, &edges);
936 } else {
937 self.dirty_sources.insert(file.clone());
938 }
939 }
940 }
941 }
942 }
943 } else {
944 self.dirty_sources.extend(files);
946 }
947
948 for file in &self.dirty_sources {
950 debug!("removing dirty file from cache: {}", file.display());
951 self.cache.remove(file);
952 }
953 }
954
955 fn is_dirty_impl(&self, file: &Path, use_interface_repr: bool) -> bool {
956 let Some(entry) = self.cache.entry(file) else {
957 trace!("missing cache entry");
958 return true;
959 };
960
961 if use_interface_repr && self.cache.preprocessed {
962 let Some(interface_hash) = self.interface_repr_hashes.get(file) else {
963 trace!("missing interface hash");
964 return true;
965 };
966
967 if entry.interface_repr_hash.as_ref() != Some(interface_hash) {
968 trace!("interface hash changed");
969 return true;
970 };
971 } else {
972 let Some(content_hash) = self.content_hashes.get(file) else {
973 trace!("missing content hash");
974 return true;
975 };
976
977 if entry.content_hash != *content_hash {
978 trace!("content hash changed");
979 return true;
980 }
981 }
982
983 false
985 }
986
987 fn fill_hashes(&mut self, sources: &Sources) {
989 for (file, source) in sources {
990 let _ = self.content_hash(source, file);
991
992 if self.cache.preprocessed && self.project.paths.is_source_file(file) {
994 let _ = self.interface_repr_hash(source, file);
995 }
996 }
997 }
998
999 fn missing_extra_files(&self) -> bool {
1001 for artifacts in self.cached_artifacts.values() {
1002 for artifacts in artifacts.values() {
1003 for artifact_file in artifacts {
1004 if self.project.artifacts_handler().is_dirty(artifact_file).unwrap_or(true) {
1005 return true;
1006 }
1007 }
1008 }
1009 }
1010 false
1011 }
1012}
1013
1014#[allow(clippy::large_enum_variant)]
1016#[derive(Debug)]
1017pub(crate) enum ArtifactsCache<
1018 'a,
1019 T: ArtifactOutput<CompilerContract = C::CompilerContract>,
1020 C: Compiler,
1021> {
1022 Ephemeral(GraphEdges<C::ParsedSource>, &'a Project<C, T>),
1024 Cached(ArtifactsCacheInner<'a, T, C>),
1026}
1027
1028impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
1029 ArtifactsCache<'a, T, C>
1030{
1031 #[instrument(name = "ArtifactsCache::new", skip(project, edges))]
1033 pub fn new(
1034 project: &'a Project<C, T>,
1035 edges: GraphEdges<C::ParsedSource>,
1036 preprocessed: bool,
1037 ) -> Result<Self> {
1038 fn get_cache<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>(
1042 project: &Project<C, T>,
1043 invalidate_cache: bool,
1044 preprocessed: bool,
1045 ) -> CompilerCache<C::Settings> {
1046 let paths = project.paths.paths_relative();
1048
1049 if !invalidate_cache && project.cache_path().exists() {
1050 if let Ok(cache) = CompilerCache::read_joined(&project.paths) {
1051 if cache.paths == paths && preprocessed == cache.preprocessed {
1052 return cache;
1054 }
1055 }
1056 }
1057
1058 trace!(invalidate_cache, "cache invalidated");
1059
1060 CompilerCache::new(Default::default(), paths, preprocessed)
1062 }
1063
1064 let cache = if project.cached {
1065 let invalidate_cache = !edges.unresolved_imports().is_empty();
1069
1070 let mut cache = get_cache(project, invalidate_cache, preprocessed);
1072
1073 cache.remove_missing_files();
1074
1075 let mut cached_artifacts = if project.paths.artifacts.exists() {
1077 trace!("reading artifacts from cache...");
1078 let artifacts = cache.read_artifacts::<T::Artifact>().unwrap_or_default();
1080 trace!("read {} artifacts from cache", artifacts.artifact_files().count());
1081 artifacts
1082 } else {
1083 Default::default()
1084 };
1085
1086 trace!("reading build infos from cache...");
1087 let cached_builds = cache.read_builds(&project.paths.build_infos).unwrap_or_default();
1088
1089 cached_artifacts.0.retain(|_, artifacts| {
1091 artifacts.retain(|_, artifacts| {
1092 artifacts.retain(|artifact| cached_builds.contains_key(&artifact.build_id));
1093 !artifacts.is_empty()
1094 });
1095 !artifacts.is_empty()
1096 });
1097
1098 let cache = ArtifactsCacheInner {
1099 cache,
1100 cached_artifacts,
1101 cached_builds,
1102 edges,
1103 project,
1104 dirty_sources: Default::default(),
1105 content_hashes: Default::default(),
1106 sources_in_scope: Default::default(),
1107 interface_repr_hashes: Default::default(),
1108 };
1109
1110 ArtifactsCache::Cached(cache)
1111 } else {
1112 ArtifactsCache::Ephemeral(edges, project)
1114 };
1115
1116 Ok(cache)
1117 }
1118
1119 pub fn graph(&self) -> &GraphEdges<C::ParsedSource> {
1121 match self {
1122 ArtifactsCache::Ephemeral(graph, _) => graph,
1123 ArtifactsCache::Cached(inner) => &inner.edges,
1124 }
1125 }
1126
1127 #[cfg(test)]
1128 #[allow(unused)]
1129 #[doc(hidden)]
1130 pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T, C>> {
1132 match self {
1133 ArtifactsCache::Ephemeral(..) => None,
1134 ArtifactsCache::Cached(cached) => Some(cached),
1135 }
1136 }
1137
1138 pub fn output_ctx(&self) -> OutputContext<'_> {
1139 match self {
1140 ArtifactsCache::Ephemeral(..) => Default::default(),
1141 ArtifactsCache::Cached(inner) => OutputContext::new(&inner.cache),
1142 }
1143 }
1144
1145 pub fn project(&self) -> &'a Project<C, T> {
1146 match self {
1147 ArtifactsCache::Ephemeral(_, project) => project,
1148 ArtifactsCache::Cached(cache) => cache.project,
1149 }
1150 }
1151
1152 #[instrument(skip_all)]
1154 pub fn remove_dirty_sources(&mut self) {
1155 match self {
1156 ArtifactsCache::Ephemeral(..) => {}
1157 ArtifactsCache::Cached(cache) => cache.find_and_remove_dirty(),
1158 }
1159 }
1160
1161 pub fn update_mocks(&mut self, mocks: HashSet<PathBuf>) {
1163 match self {
1164 ArtifactsCache::Ephemeral(..) => {}
1165 ArtifactsCache::Cached(cache) => cache.cache.mocks = mocks,
1166 }
1167 }
1168
1169 pub fn mocks(&self) -> HashSet<PathBuf> {
1173 match self {
1174 ArtifactsCache::Ephemeral(..) => HashSet::default(),
1175 ArtifactsCache::Cached(cache) => cache.cache.mocks.clone(),
1176 }
1177 }
1178
1179 #[instrument(name = "ArtifactsCache::filter", skip_all)]
1181 pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
1182 match self {
1183 ArtifactsCache::Ephemeral(..) => {}
1184 ArtifactsCache::Cached(cache) => cache.filter(sources, version, profile),
1185 }
1186 }
1187
1188 #[instrument(name = "ArtifactsCache::consume", skip_all)]
1194 pub fn consume<A>(
1195 self,
1196 written_artifacts: &Artifacts<A>,
1197 written_build_infos: &Vec<RawBuildInfo<C::Language>>,
1198 write_to_disk: bool,
1199 ) -> Result<(Artifacts<A>, Builds<C::Language>)>
1200 where
1201 T: ArtifactOutput<Artifact = A>,
1202 {
1203 let ArtifactsCache::Cached(cache) = self else {
1204 trace!("no cache configured, ephemeral");
1205 return Ok(Default::default());
1206 };
1207
1208 let ArtifactsCacheInner {
1209 mut cache,
1210 mut cached_artifacts,
1211 cached_builds,
1212 dirty_sources,
1213 sources_in_scope,
1214 project,
1215 ..
1216 } = cache;
1217
1218 cached_artifacts.0.retain(|file, artifacts| {
1220 let file = Path::new(file);
1221 artifacts.retain(|name, artifacts| {
1222 artifacts.retain(|artifact| {
1223 let version = &artifact.version;
1224
1225 if !sources_in_scope.contains(file, version) {
1226 return false;
1227 }
1228 if dirty_sources.contains(file) {
1229 return false;
1230 }
1231 if written_artifacts.find_artifact(file, name, version).is_some() {
1232 return false;
1233 }
1234 true
1235 });
1236 !artifacts.is_empty()
1237 });
1238 !artifacts.is_empty()
1239 });
1240
1241 for (file, artifacts) in written_artifacts.as_ref() {
1244 let file_path = Path::new(file);
1245 if let Some(entry) = cache.files.get_mut(file_path) {
1248 entry.merge_artifacts(artifacts);
1249 }
1250 }
1251
1252 for build_info in written_build_infos {
1253 cache.builds.insert(build_info.id.clone());
1254 }
1255
1256 if write_to_disk {
1258 cache.remove_outdated_builds();
1259 cache
1262 .strip_entries_prefix(project.root())
1263 .strip_artifact_files_prefixes(project.artifacts_path());
1264 cache.write(project.cache_path())?;
1265 }
1266
1267 Ok((cached_artifacts, cached_builds))
1268 }
1269
1270 pub fn compiler_seen(&mut self, file: &Path) {
1272 if let ArtifactsCache::Cached(cache) = self {
1273 if let Some(entry) = cache.cache.entry_mut(file) {
1274 entry.seen_by_compiler = true;
1275 }
1276 }
1277 }
1278}