1use crate::{
4 buildinfo::RawBuildInfo,
5 compilers::{Compiler, CompilerSettings, Language},
6 output::Builds,
7 resolver::GraphEdges,
8 ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project,
9 ProjectPaths, ProjectPathsConfig, SourceCompilationKind, SourceParser,
10};
11use foundry_compilers_artifacts::{
12 sources::{Source, Sources},
13 Settings,
14};
15use foundry_compilers_core::{
16 error::{Result, SolcError},
17 utils::{self, strip_prefix},
18};
19use semver::Version;
20use serde::{de::DeserializeOwned, Deserialize, Serialize};
21use std::{
22 collections::{btree_map::BTreeMap, hash_map, BTreeSet, HashMap, HashSet},
23 fs,
24 path::{Path, PathBuf},
25 time::{Duration, UNIX_EPOCH},
26};
27
28mod iface;
29use iface::interface_repr_hash;
30
31const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-4";
37
38pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
40
41#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
43pub struct CompilerCache<S = Settings> {
44 #[serde(rename = "_format")]
45 pub format: String,
46 pub paths: ProjectPaths,
48 pub files: BTreeMap<PathBuf, CacheEntry>,
49 pub builds: BTreeSet<String>,
50 pub profiles: BTreeMap<String, S>,
51 pub preprocessed: bool,
52 pub mocks: HashSet<PathBuf>,
53}
54
55impl<S> CompilerCache<S> {
56 pub fn new(format: String, paths: ProjectPaths, preprocessed: bool) -> Self {
58 Self {
59 format,
60 paths,
61 files: Default::default(),
62 builds: Default::default(),
63 profiles: Default::default(),
64 preprocessed,
65 mocks: Default::default(),
66 }
67 }
68}
69
70impl<S: CompilerSettings> CompilerCache<S> {
71 pub fn is_empty(&self) -> bool {
72 self.files.is_empty()
73 }
74
75 pub fn remove(&mut self, file: &Path) -> Option<CacheEntry> {
77 self.files.remove(file)
78 }
79
80 pub fn len(&self) -> usize {
82 self.files.len()
83 }
84
85 pub fn artifacts_len(&self) -> usize {
87 self.entries().map(|entry| entry.artifacts().count()).sum()
88 }
89
90 pub fn entries(&self) -> impl Iterator<Item = &CacheEntry> {
92 self.files.values()
93 }
94
95 pub fn entry(&self, file: &Path) -> Option<&CacheEntry> {
97 self.files.get(file)
98 }
99
100 pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> {
102 self.files.get_mut(file)
103 }
104
105 #[instrument(name = "CompilerCache::read", err)]
123 pub fn read(path: &Path) -> Result<Self> {
124 let cache: Self = utils::read_json_file(path)?;
125 trace!(cache.format, cache.files = cache.files.len(), "read cache");
126 Ok(cache)
127 }
128
129 pub fn read_joined<L>(paths: &ProjectPathsConfig<L>) -> Result<Self> {
146 let mut cache = Self::read(&paths.cache)?;
147 cache.join_entries(&paths.root).join_artifacts_files(&paths.artifacts);
148 Ok(cache)
149 }
150
151 #[instrument(name = "CompilerCache::write", skip_all)]
153 pub fn write(&self, path: &Path) -> Result<()> {
154 trace!("writing cache with {} entries to json file: \"{}\"", self.len(), path.display());
155 utils::create_parent_dir_all(path)?;
156 utils::write_json_file(self, path, 128 * 1024)?;
157 trace!("cache file located: \"{}\"", path.display());
158 Ok(())
159 }
160
161 #[instrument(skip_all)]
163 pub fn remove_outdated_builds(&mut self) {
164 let mut outdated = Vec::new();
165 for build_id in &self.builds {
166 if !self
167 .entries()
168 .flat_map(|e| e.artifacts.values())
169 .flat_map(|a| a.values())
170 .flat_map(|a| a.values())
171 .any(|a| a.build_id == *build_id)
172 {
173 outdated.push(build_id.to_owned());
174 }
175 }
176
177 for build_id in outdated {
178 self.builds.remove(&build_id);
179 let path = self.paths.build_infos.join(build_id).with_extension("json");
180 let _ = std::fs::remove_file(path);
181 }
182 }
183
184 #[instrument(skip_all)]
186 pub fn join_entries(&mut self, root: &Path) -> &mut Self {
187 self.files = std::mem::take(&mut self.files)
188 .into_iter()
189 .map(|(path, entry)| (root.join(path), entry))
190 .collect();
191 self
192 }
193
194 #[instrument(skip_all)]
196 pub fn strip_entries_prefix(&mut self, base: &Path) -> &mut Self {
197 self.files = std::mem::take(&mut self.files)
198 .into_iter()
199 .map(|(path, entry)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), entry))
200 .collect();
201 self
202 }
203
204 #[instrument(skip_all)]
206 pub fn join_artifacts_files(&mut self, base: &Path) -> &mut Self {
207 self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base));
208 self
209 }
210
211 #[instrument(skip_all)]
213 pub fn strip_artifact_files_prefixes(&mut self, base: &Path) -> &mut Self {
214 self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base));
215 self
216 }
217
218 #[instrument(skip_all)]
222 pub fn remove_missing_files(&mut self) {
223 trace!("remove non existing files from cache");
224 self.files.retain(|file, _| {
225 let exists = file.exists();
226 if !exists {
227 trace!("remove {} from cache", file.display());
228 }
229 exists
230 })
231 }
232
233 pub fn all_artifacts_exist(&self) -> bool {
235 self.files.values().all(|entry| entry.all_artifacts_exist())
236 }
237
238 pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self {
260 self.files = self
261 .files
262 .into_iter()
263 .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e))
264 .collect();
265 self
266 }
267
268 pub fn find_artifact_path(&self, contract_file: &Path, contract_name: &str) -> Option<&Path> {
280 let entry = self.entry(contract_file)?;
281 entry.find_artifact_path(contract_name)
282 }
283
284 #[instrument(skip_all)]
303 pub fn read_artifact<Artifact: DeserializeOwned>(
304 &self,
305 contract_file: &Path,
306 contract_name: &str,
307 ) -> Result<Artifact> {
308 let artifact_path =
309 self.find_artifact_path(contract_file, contract_name).ok_or_else(|| {
310 SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string())
311 })?;
312 utils::read_json_file(artifact_path)
313 }
314
315 #[instrument(skip_all)]
330 pub fn read_artifacts<Artifact: DeserializeOwned + Send + Sync>(
331 &self,
332 ) -> Result<Artifacts<Artifact>> {
333 use rayon::prelude::*;
334
335 let artifacts = self
336 .files
337 .par_iter()
338 .map(|(file, entry)| entry.read_artifact_files().map(|files| (file.clone(), files)))
339 .collect::<Result<ArtifactsMap<_>>>()?;
340 Ok(Artifacts(artifacts))
341 }
342
343 #[instrument(skip_all)]
348 pub fn read_builds<L: Language>(&self, build_info_dir: &Path) -> Result<Builds<L>> {
349 use rayon::prelude::*;
350
351 self.builds
352 .par_iter()
353 .map(|build_id| {
354 utils::read_json_file(&build_info_dir.join(build_id).with_extension("json"))
355 .map(|b| (build_id.clone(), b))
356 })
357 .collect::<Result<_>>()
358 .map(|b| Builds(b))
359 }
360}
361
362#[cfg(feature = "async")]
363impl<S: CompilerSettings> CompilerCache<S> {
364 pub async fn async_read(path: &Path) -> Result<Self> {
365 let path = path.to_owned();
366 Self::asyncify(move || Self::read(&path)).await
367 }
368
369 pub async fn async_write(&self, path: &Path) -> Result<()> {
370 let content = serde_json::to_vec(self)?;
371 tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path))
372 }
373
374 async fn asyncify<F, T>(f: F) -> Result<T>
375 where
376 F: FnOnce() -> Result<T> + Send + 'static,
377 T: Send + 'static,
378 {
379 match tokio::task::spawn_blocking(f).await {
380 Ok(res) => res,
381 Err(_) => Err(SolcError::io(std::io::Error::other("background task failed"), "")),
382 }
383 }
384}
385
386impl<S> Default for CompilerCache<S> {
387 fn default() -> Self {
388 Self {
389 format: ETHERS_FORMAT_VERSION.to_string(),
390 builds: Default::default(),
391 files: Default::default(),
392 paths: Default::default(),
393 profiles: Default::default(),
394 preprocessed: false,
395 mocks: Default::default(),
396 }
397 }
398}
399
400impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache<S> {
401 fn from(config: &'a ProjectPathsConfig) -> Self {
402 let paths = config.paths_relative();
403 Self::new(Default::default(), paths, false)
404 }
405}
406
407#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
409pub struct CachedArtifact {
410 pub path: PathBuf,
412 pub build_id: String,
414}
415
416pub type CachedArtifacts = BTreeMap<String, BTreeMap<Version, BTreeMap<String, CachedArtifact>>>;
417
418#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
424#[serde(rename_all = "camelCase")]
425pub struct CacheEntry {
426 pub last_modification_date: u64,
428 pub content_hash: String,
430 pub interface_repr_hash: Option<String>,
432 pub source_name: PathBuf,
434 pub imports: BTreeSet<PathBuf>,
438 pub version_requirement: Option<String>,
440 pub artifacts: CachedArtifacts,
450 pub seen_by_compiler: bool,
458}
459
460impl CacheEntry {
461 pub fn last_modified(&self) -> Duration {
463 Duration::from_millis(self.last_modification_date)
464 }
465
466 pub fn find_artifact_path(&self, contract_name: &str) -> Option<&Path> {
481 self.artifacts
482 .get(contract_name)?
483 .iter()
484 .next()
485 .and_then(|(_, a)| a.iter().next())
486 .map(|(_, p)| p.path.as_path())
487 }
488
489 pub fn read_last_modification_date(file: &Path) -> Result<u64> {
491 let last_modification_date = fs::metadata(file)
492 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
493 .modified()
494 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
495 .duration_since(UNIX_EPOCH)
496 .map_err(SolcError::msg)?
497 .as_millis() as u64;
498 Ok(last_modification_date)
499 }
500
501 #[instrument(skip_all)]
505 fn read_artifact_files<Artifact: DeserializeOwned>(
506 &self,
507 ) -> Result<BTreeMap<String, Vec<ArtifactFile<Artifact>>>> {
508 let mut artifacts = BTreeMap::new();
509 for (artifact_name, versioned_files) in self.artifacts.iter() {
510 let mut files = Vec::with_capacity(versioned_files.len());
511 for (version, cached_artifact) in versioned_files {
512 for (profile, cached_artifact) in cached_artifact {
513 let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?;
514 files.push(ArtifactFile {
515 artifact,
516 file: cached_artifact.path.clone(),
517 version: version.clone(),
518 build_id: cached_artifact.build_id.clone(),
519 profile: profile.clone(),
520 });
521 }
522 }
523 artifacts.insert(artifact_name.clone(), files);
524 }
525 Ok(artifacts)
526 }
527
528 #[instrument(skip_all)]
529 pub(crate) fn merge_artifacts<'a, A, I, T: 'a>(&mut self, artifacts: I)
530 where
531 I: IntoIterator<Item = (&'a String, A)>,
532 A: IntoIterator<Item = &'a ArtifactFile<T>>,
533 {
534 for (name, artifacts) in artifacts.into_iter() {
535 for artifact in artifacts {
536 self.artifacts
537 .entry(name.clone())
538 .or_default()
539 .entry(artifact.version.clone())
540 .or_default()
541 .insert(
542 artifact.profile.clone(),
543 CachedArtifact {
544 build_id: artifact.build_id.clone(),
545 path: artifact.file.clone(),
546 },
547 );
548 }
549 }
550 }
551
552 pub fn contains(&self, version: &Version, profile: &str) -> bool {
554 self.artifacts.values().any(|artifacts| {
555 artifacts.get(version).and_then(|artifacts| artifacts.get(profile)).is_some()
556 })
557 }
558
559 pub fn artifacts_versions(&self) -> impl Iterator<Item = (&Version, &str, &CachedArtifact)> {
561 self.artifacts
562 .values()
563 .flatten()
564 .flat_map(|(v, a)| a.iter().map(move |(p, a)| (v, p.as_str(), a)))
565 }
566
567 pub fn find_artifact(
569 &self,
570 contract: &str,
571 version: &Version,
572 profile: &str,
573 ) -> Option<&CachedArtifact> {
574 self.artifacts
575 .get(contract)
576 .and_then(|files| files.get(version))
577 .and_then(|files| files.get(profile))
578 }
579
580 pub fn artifacts_for_version<'a>(
582 &'a self,
583 version: &'a Version,
584 ) -> impl Iterator<Item = &'a CachedArtifact> + 'a {
585 self.artifacts_versions().filter_map(move |(ver, _, file)| (ver == version).then_some(file))
586 }
587
588 pub fn artifacts(&self) -> impl Iterator<Item = &CachedArtifact> {
590 self.artifacts.values().flat_map(BTreeMap::values).flat_map(BTreeMap::values)
591 }
592
593 pub fn artifacts_mut(&mut self) -> impl Iterator<Item = &mut CachedArtifact> {
595 self.artifacts.values_mut().flat_map(BTreeMap::values_mut).flat_map(BTreeMap::values_mut)
596 }
597
598 pub fn all_artifacts_exist(&self) -> bool {
600 self.artifacts().all(|a| a.path.exists())
601 }
602
603 pub fn join_artifacts_files(&mut self, base: &Path) {
605 self.artifacts_mut().for_each(|a| a.path = base.join(&a.path))
606 }
607
608 pub fn strip_artifact_files_prefixes(&mut self, base: &Path) {
610 self.artifacts_mut().for_each(|a| {
611 if let Ok(rem) = a.path.strip_prefix(base) {
612 a.path = rem.to_path_buf();
613 }
614 })
615 }
616}
617
618#[derive(Clone, Debug, Default)]
620pub struct GroupedSources {
621 pub inner: HashMap<PathBuf, HashSet<Version>>,
622}
623
624impl GroupedSources {
625 pub fn insert(&mut self, file: PathBuf, version: Version) {
627 match self.inner.entry(file) {
628 hash_map::Entry::Occupied(mut entry) => {
629 entry.get_mut().insert(version);
630 }
631 hash_map::Entry::Vacant(entry) => {
632 entry.insert(HashSet::from([version]));
633 }
634 }
635 }
636
637 pub fn contains(&self, file: &Path, version: &Version) -> bool {
639 self.inner.get(file).is_some_and(|versions| versions.contains(version))
640 }
641}
642
643#[derive(Debug)]
646pub(crate) struct ArtifactsCacheInner<
647 'a,
648 T: ArtifactOutput<CompilerContract = C::CompilerContract>,
649 C: Compiler,
650> {
651 pub cache: CompilerCache<C::Settings>,
653
654 pub cached_artifacts: Artifacts<T::Artifact>,
656
657 pub cached_builds: Builds<C::Language>,
659
660 pub edges: GraphEdges<C::Parser>,
662
663 pub project: &'a Project<C, T>,
665
666 pub dirty_sources: HashSet<PathBuf>,
669
670 pub sources_in_scope: GroupedSources,
674
675 pub content_hashes: HashMap<PathBuf, String>,
677
678 pub interface_repr_hashes: HashMap<PathBuf, String>,
680}
681
682impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
683 ArtifactsCacheInner<'_, T, C>
684{
685 fn is_source_file(&self, file: &Path) -> bool {
687 self.project.paths.is_source_file(file)
688 }
689
690 fn create_cache_entry(&mut self, file: PathBuf, source: &Source) {
692 let imports = self
693 .edges
694 .imports(&file)
695 .into_iter()
696 .map(|import| strip_prefix(import, self.project.root()).into())
697 .collect();
698
699 let interface_repr_hash = (self.cache.preprocessed && self.is_source_file(&file))
700 .then(|| self.interface_repr_hash(source, &file).to_string());
701
702 let entry = CacheEntry {
703 last_modification_date: CacheEntry::read_last_modification_date(&file)
704 .unwrap_or_default(),
705 content_hash: source.content_hash(),
706 interface_repr_hash,
707 source_name: strip_prefix(&file, self.project.root()).into(),
708 imports,
709 version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()),
710 artifacts: Default::default(),
712 seen_by_compiler: false,
713 };
714
715 self.cache.files.insert(file, entry);
716 }
717
718 fn content_hash(&mut self, source: &Source, file: &Path) -> &str {
720 self.content_hashes.entry(file.to_path_buf()).or_insert_with(|| source.content_hash())
721 }
722
723 fn interface_repr_hash(&mut self, source: &Source, file: &Path) -> &str {
725 self.interface_repr_hashes.entry(file.to_path_buf()).or_insert_with(|| {
726 if let Some(r) = interface_repr_hash(&source.content, file) {
728 return r;
729 }
730 self.content_hashes
732 .entry(file.to_path_buf())
733 .or_insert_with(|| source.content_hash())
734 .clone()
735 })
736 }
737
738 fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
749 let mut compile_complete = HashSet::new();
751 let mut compile_optimized = HashSet::new();
752
753 for (file, source) in sources.iter() {
754 self.sources_in_scope.insert(file.clone(), version.clone());
755
756 if self.is_missing_artifacts(file, version, profile) {
758 compile_complete.insert(file.to_path_buf());
759 }
760
761 if !self.cache.files.contains_key(file) {
763 self.create_cache_entry(file.clone(), source);
764 }
765 }
766
767 for source in &compile_complete {
770 for import in self.edges.imports(source) {
771 if !compile_complete.contains(import) {
772 compile_optimized.insert(import);
773 }
774 }
775 }
776
777 sources.retain(|file, source| {
778 source.kind = if compile_complete.contains(file.as_path()) {
779 SourceCompilationKind::Complete
780 } else if compile_optimized.contains(file.as_path()) {
781 SourceCompilationKind::Optimized
782 } else {
783 return false;
784 };
785 true
786 });
787 }
788
789 fn is_missing_artifacts(&self, file: &Path, version: &Version, profile: &str) -> bool {
791 self.is_missing_artifacts_impl(file, version, profile).is_err()
792 }
793
794 #[instrument(level = "trace", name = "is_missing_artifacts", skip(self), ret)]
796 fn is_missing_artifacts_impl(
797 &self,
798 file: &Path,
799 version: &Version,
800 profile: &str,
801 ) -> Result<(), &'static str> {
802 let Some(entry) = self.cache.entry(file) else {
803 return Err("missing cache entry");
804 };
805
806 if entry.seen_by_compiler && entry.artifacts.is_empty() {
810 return Ok(());
811 }
812
813 if !entry.contains(version, profile) {
814 return Err("missing linked artifacts");
815 }
816
817 if entry
818 .artifacts_for_version(version)
819 .any(|artifact| !self.cached_artifacts.has_artifact(&artifact.path))
820 {
821 return Err("missing artifact");
822 }
823
824 if self.missing_extra_files() {
827 return Err("missing extra files");
828 }
829
830 Ok(())
831 }
832
833 fn remove_dirty_sources(&mut self) {
835 fn populate_dirty_files<P: SourceParser>(
836 file: &Path,
837 dirty_files: &mut HashSet<PathBuf>,
838 edges: &GraphEdges<P>,
839 ) {
840 for file in edges.importers(file) {
841 if !dirty_files.contains(file) {
844 dirty_files.insert(file.to_path_buf());
845 populate_dirty_files(file, dirty_files, edges);
846 }
847 }
848 }
849
850 self.update_profiles();
851
852 let files = self.cache.files.keys().cloned().collect::<HashSet<_>>();
854
855 let mut sources = Sources::new();
856
857 for file in &files {
859 let Ok(source) = Source::read(file) else {
860 self.dirty_sources.insert(file.clone());
861 continue;
862 };
863 sources.insert(file.clone(), source);
864 }
865
866 if let Ok(graph) = Graph::<C::Parser>::resolve_sources(&self.project.paths, sources) {
869 let (sources, edges) = graph.into_sources();
870
871 self.fill_hashes(&sources);
873
874 for file in sources.keys() {
876 if self.is_dirty(file, false) {
877 self.dirty_sources.insert(file.clone());
878 }
879 }
880
881 if !self.cache.preprocessed {
882 for file in self.dirty_sources.clone().iter() {
884 populate_dirty_files(file, &mut self.dirty_sources, &edges);
885 }
886 } else {
887 for file in sources.keys() {
889 if self.dirty_sources.contains(file) {
890 continue;
891 }
892 let is_src = self.is_source_file(file);
893 for import in edges.imports(file) {
894 if is_src && self.dirty_sources.contains(import) {
896 self.dirty_sources.insert(file.clone());
897 break;
898 } else if !is_src
903 && self.dirty_sources.contains(import)
904 && (!self.is_source_file(import)
905 || self.is_dirty(import, true)
906 || self.cache.mocks.contains(file))
907 {
908 if self.cache.mocks.contains(file) {
909 populate_dirty_files(file, &mut self.dirty_sources, &edges);
911 } else {
912 self.dirty_sources.insert(file.clone());
913 }
914 }
915 }
916 }
917 }
918 } else {
919 self.dirty_sources.extend(files);
921 }
922
923 for file in &self.dirty_sources {
925 debug!("removing dirty file from cache: {}", file.display());
926 self.cache.remove(file);
927 }
928 }
929
930 fn update_profiles(&mut self) {
932 let existing_profiles = self.project.settings_profiles().collect::<BTreeMap<_, _>>();
933
934 let mut dirty_profiles = HashSet::new();
935 for (profile, settings) in &self.cache.profiles {
936 if !existing_profiles.get(profile.as_str()).is_some_and(|p| p.can_use_cached(settings))
937 {
938 dirty_profiles.insert(profile.clone());
939 }
940 }
941
942 for profile in &dirty_profiles {
943 trace!(profile, "removing dirty profile and artifacts");
944 self.cache.profiles.remove(profile);
945 }
946
947 for (profile, settings) in existing_profiles {
948 if !self.cache.profiles.contains_key(profile) {
949 trace!(profile, "adding new profile");
950 self.cache.profiles.insert(profile.to_string(), settings.clone());
951 }
952 }
953
954 self.cache.files.retain(|_, entry| {
955 if entry.artifacts.is_empty() {
957 return true;
958 }
959 entry.artifacts.retain(|_, artifacts| {
960 artifacts.retain(|_, artifacts| {
961 artifacts.retain(|profile, _| !dirty_profiles.contains(profile));
962 !artifacts.is_empty()
963 });
964 !artifacts.is_empty()
965 });
966 !entry.artifacts.is_empty()
967 });
968 }
969
970 fn is_dirty(&self, file: &Path, use_interface_repr: bool) -> bool {
971 self.is_dirty_impl(file, use_interface_repr).is_err()
972 }
973
974 #[instrument(level = "trace", name = "is_dirty", skip(self), ret)]
975 fn is_dirty_impl(&self, file: &Path, use_interface_repr: bool) -> Result<(), &'static str> {
976 let Some(entry) = self.cache.entry(file) else {
977 return Err("missing cache entry");
978 };
979
980 if use_interface_repr && self.cache.preprocessed {
981 let Some(interface_hash) = self.interface_repr_hashes.get(file) else {
982 return Err("missing interface hash");
983 };
984
985 if entry.interface_repr_hash.as_ref() != Some(interface_hash) {
986 return Err("interface hash changed");
987 }
988 } else {
989 let Some(content_hash) = self.content_hashes.get(file) else {
990 return Err("missing content hash");
991 };
992
993 if entry.content_hash != *content_hash {
994 return Err("content hash changed");
995 }
996 }
997
998 Ok(())
1000 }
1001
1002 fn fill_hashes(&mut self, sources: &Sources) {
1004 for (file, source) in sources {
1005 let _ = self.content_hash(source, file);
1006
1007 if self.cache.preprocessed && self.project.paths.is_source_file(file) {
1009 let _ = self.interface_repr_hash(source, file);
1010 }
1011 }
1012 }
1013
1014 fn missing_extra_files(&self) -> bool {
1016 for artifacts in self.cached_artifacts.values() {
1017 for artifacts in artifacts.values() {
1018 for artifact_file in artifacts {
1019 if self.project.artifacts_handler().is_dirty(artifact_file).unwrap_or(true) {
1020 return true;
1021 }
1022 }
1023 }
1024 }
1025 false
1026 }
1027}
1028
1029#[allow(clippy::large_enum_variant)]
1031#[derive(Debug)]
1032pub(crate) enum ArtifactsCache<
1033 'a,
1034 T: ArtifactOutput<CompilerContract = C::CompilerContract>,
1035 C: Compiler,
1036> {
1037 Ephemeral(GraphEdges<C::Parser>, &'a Project<C, T>),
1039 Cached(ArtifactsCacheInner<'a, T, C>),
1041}
1042
1043impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
1044 ArtifactsCache<'a, T, C>
1045{
1046 #[instrument(name = "ArtifactsCache::new", skip(project, edges))]
1048 pub fn new(
1049 project: &'a Project<C, T>,
1050 edges: GraphEdges<C::Parser>,
1051 preprocessed: bool,
1052 ) -> Result<Self> {
1053 fn get_cache<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>(
1057 project: &Project<C, T>,
1058 invalidate_cache: bool,
1059 preprocessed: bool,
1060 ) -> CompilerCache<C::Settings> {
1061 let paths = project.paths.paths_relative();
1063
1064 if !invalidate_cache && project.cache_path().exists() {
1065 if let Ok(cache) = CompilerCache::read_joined(&project.paths) {
1066 if cache.paths == paths && preprocessed == cache.preprocessed {
1067 return cache;
1069 }
1070 }
1071 }
1072
1073 trace!(invalidate_cache, "cache invalidated");
1074
1075 CompilerCache::new(Default::default(), paths, preprocessed)
1077 }
1078
1079 let cache = if project.cached {
1080 let invalidate_cache = !edges.unresolved_imports().is_empty();
1084
1085 let mut cache = get_cache(project, invalidate_cache, preprocessed);
1087
1088 cache.remove_missing_files();
1089
1090 let mut cached_artifacts = if project.paths.artifacts.exists() {
1092 trace!("reading artifacts from cache...");
1093 let artifacts = cache.read_artifacts::<T::Artifact>().unwrap_or_default();
1095 trace!("read {} artifacts from cache", artifacts.artifact_files().count());
1096 artifacts
1097 } else {
1098 Default::default()
1099 };
1100
1101 trace!("reading build infos from cache...");
1102 let cached_builds = cache.read_builds(&project.paths.build_infos).unwrap_or_default();
1103
1104 cached_artifacts.0.retain(|_, artifacts| {
1106 artifacts.retain(|_, artifacts| {
1107 artifacts.retain(|artifact| cached_builds.contains_key(&artifact.build_id));
1108 !artifacts.is_empty()
1109 });
1110 !artifacts.is_empty()
1111 });
1112
1113 let cache = ArtifactsCacheInner {
1114 cache,
1115 cached_artifacts,
1116 cached_builds,
1117 edges,
1118 project,
1119 dirty_sources: Default::default(),
1120 content_hashes: Default::default(),
1121 sources_in_scope: Default::default(),
1122 interface_repr_hashes: Default::default(),
1123 };
1124
1125 ArtifactsCache::Cached(cache)
1126 } else {
1127 ArtifactsCache::Ephemeral(edges, project)
1129 };
1130
1131 Ok(cache)
1132 }
1133
1134 pub fn graph(&self) -> &GraphEdges<C::Parser> {
1136 match self {
1137 ArtifactsCache::Ephemeral(graph, _) => graph,
1138 ArtifactsCache::Cached(inner) => &inner.edges,
1139 }
1140 }
1141
1142 #[cfg(test)]
1143 #[allow(unused)]
1144 #[doc(hidden)]
1145 pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T, C>> {
1147 match self {
1148 ArtifactsCache::Ephemeral(..) => None,
1149 ArtifactsCache::Cached(cached) => Some(cached),
1150 }
1151 }
1152
1153 pub fn output_ctx(&self) -> OutputContext<'_> {
1154 match self {
1155 ArtifactsCache::Ephemeral(..) => Default::default(),
1156 ArtifactsCache::Cached(inner) => OutputContext::new(&inner.cache),
1157 }
1158 }
1159
1160 pub fn project(&self) -> &'a Project<C, T> {
1161 match self {
1162 ArtifactsCache::Ephemeral(_, project) => project,
1163 ArtifactsCache::Cached(cache) => cache.project,
1164 }
1165 }
1166
1167 #[instrument(skip_all)]
1169 pub fn remove_dirty_sources(&mut self) {
1170 match self {
1171 ArtifactsCache::Ephemeral(..) => {}
1172 ArtifactsCache::Cached(cache) => cache.remove_dirty_sources(),
1173 }
1174 }
1175
1176 pub fn update_mocks(&mut self, mocks: HashSet<PathBuf>) {
1178 match self {
1179 ArtifactsCache::Ephemeral(..) => {}
1180 ArtifactsCache::Cached(cache) => cache.cache.mocks = mocks,
1181 }
1182 }
1183
1184 pub fn mocks(&self) -> HashSet<PathBuf> {
1188 match self {
1189 ArtifactsCache::Ephemeral(..) => HashSet::default(),
1190 ArtifactsCache::Cached(cache) => cache.cache.mocks.clone(),
1191 }
1192 }
1193
1194 #[instrument(name = "ArtifactsCache::filter", skip_all)]
1196 pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
1197 match self {
1198 ArtifactsCache::Ephemeral(..) => {}
1199 ArtifactsCache::Cached(cache) => cache.filter(sources, version, profile),
1200 }
1201 }
1202
1203 #[instrument(name = "ArtifactsCache::consume", skip_all)]
1209 #[allow(clippy::type_complexity)]
1210 pub fn consume<A>(
1211 self,
1212 written_artifacts: &Artifacts<A>,
1213 written_build_infos: &Vec<RawBuildInfo<C::Language>>,
1214 write_to_disk: bool,
1215 ) -> Result<(Artifacts<A>, Builds<C::Language>, GraphEdges<C::Parser>)>
1216 where
1217 T: ArtifactOutput<Artifact = A>,
1218 {
1219 let cache = match self {
1220 ArtifactsCache::Ephemeral(edges, _project) => {
1221 trace!("no cache configured, ephemeral");
1222 return Ok((Default::default(), Default::default(), edges));
1223 }
1224 ArtifactsCache::Cached(cache) => cache,
1225 };
1226
1227 let ArtifactsCacheInner {
1228 mut cache,
1229 mut cached_artifacts,
1230 cached_builds,
1231 dirty_sources,
1232 sources_in_scope,
1233 project,
1234 edges,
1235 content_hashes: _,
1236 interface_repr_hashes: _,
1237 } = cache;
1238
1239 cached_artifacts.0.retain(|file, artifacts| {
1241 let file = Path::new(file);
1242 artifacts.retain(|name, artifacts| {
1243 artifacts.retain(|artifact| {
1244 let version = &artifact.version;
1245
1246 if !sources_in_scope.contains(file, version) {
1247 return false;
1248 }
1249 if dirty_sources.contains(file) {
1250 return false;
1251 }
1252 if written_artifacts.find_artifact(file, name, version).is_some() {
1253 return false;
1254 }
1255 true
1256 });
1257 !artifacts.is_empty()
1258 });
1259 !artifacts.is_empty()
1260 });
1261
1262 for (file, artifacts) in written_artifacts.as_ref() {
1265 let file_path = Path::new(file);
1266 if let Some(entry) = cache.files.get_mut(file_path) {
1269 entry.merge_artifacts(artifacts);
1270 }
1271 }
1272
1273 for build_info in written_build_infos {
1274 cache.builds.insert(build_info.id.clone());
1275 }
1276
1277 if write_to_disk {
1279 cache.remove_outdated_builds();
1280 cache
1283 .strip_entries_prefix(project.root())
1284 .strip_artifact_files_prefixes(project.artifacts_path());
1285 cache.write(project.cache_path())?;
1286 }
1287
1288 Ok((cached_artifacts, cached_builds, edges))
1289 }
1290
1291 pub fn compiler_seen(&mut self, file: &Path) {
1293 if let ArtifactsCache::Cached(cache) = self {
1294 if let Some(entry) = cache.cache.entry_mut(file) {
1295 entry.seen_by_compiler = true;
1296 }
1297 }
1298 }
1299}