1use crate::{
4 buildinfo::RawBuildInfo,
5 compilers::{Compiler, CompilerSettings, Language},
6 output::Builds,
7 resolver::GraphEdges,
8 ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project,
9 ProjectPaths, ProjectPathsConfig, SourceCompilationKind,
10};
11use foundry_compilers_artifacts::{
12 sources::{Source, Sources},
13 Settings,
14};
15use foundry_compilers_core::{
16 error::{Result, SolcError},
17 utils::{self, strip_prefix},
18};
19use semver::Version;
20use serde::{de::DeserializeOwned, Deserialize, Serialize};
21use std::{
22 collections::{btree_map::BTreeMap, hash_map, BTreeSet, HashMap, HashSet},
23 fs,
24 path::{Path, PathBuf},
25 time::{Duration, UNIX_EPOCH},
26};
27
28mod iface;
29use iface::interface_repr_hash;
30
31const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-4";
37
38pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
40
41#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
43pub struct CompilerCache<S = Settings> {
44 #[serde(rename = "_format")]
45 pub format: String,
46 pub paths: ProjectPaths,
48 pub files: BTreeMap<PathBuf, CacheEntry>,
49 pub builds: BTreeSet<String>,
50 pub profiles: BTreeMap<String, S>,
51 pub preprocessed: bool,
52 pub mocks: HashSet<PathBuf>,
53}
54
55impl<S> CompilerCache<S> {
56 pub fn new(format: String, paths: ProjectPaths, preprocessed: bool) -> Self {
57 Self {
58 format,
59 paths,
60 files: Default::default(),
61 builds: Default::default(),
62 profiles: Default::default(),
63 preprocessed,
64 mocks: Default::default(),
65 }
66 }
67}
68
69impl<S: CompilerSettings> CompilerCache<S> {
70 pub fn is_empty(&self) -> bool {
71 self.files.is_empty()
72 }
73
74 pub fn remove(&mut self, file: &Path) -> Option<CacheEntry> {
76 self.files.remove(file)
77 }
78
79 pub fn len(&self) -> usize {
81 self.files.len()
82 }
83
84 pub fn artifacts_len(&self) -> usize {
86 self.entries().map(|entry| entry.artifacts().count()).sum()
87 }
88
89 pub fn entries(&self) -> impl Iterator<Item = &CacheEntry> {
91 self.files.values()
92 }
93
94 pub fn entry(&self, file: &Path) -> Option<&CacheEntry> {
96 self.files.get(file)
97 }
98
99 pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> {
101 self.files.get_mut(file)
102 }
103
104 #[instrument(skip_all, name = "sol-files-cache::read")]
122 pub fn read(path: &Path) -> Result<Self> {
123 trace!("reading solfiles cache at {}", path.display());
124 let cache: Self = utils::read_json_file(path)?;
125 trace!("read cache \"{}\" with {} entries", cache.format, cache.files.len());
126 Ok(cache)
127 }
128
129 pub fn read_joined<L>(paths: &ProjectPathsConfig<L>) -> Result<Self> {
146 let mut cache = Self::read(&paths.cache)?;
147 cache.join_entries(&paths.root).join_artifacts_files(&paths.artifacts);
148 Ok(cache)
149 }
150
151 pub fn write(&self, path: &Path) -> Result<()> {
153 trace!("writing cache with {} entries to json file: \"{}\"", self.len(), path.display());
154 utils::create_parent_dir_all(path)?;
155 utils::write_json_file(self, path, 128 * 1024)?;
156 trace!("cache file located: \"{}\"", path.display());
157 Ok(())
158 }
159
160 pub fn remove_outdated_builds(&mut self) {
162 let mut outdated = Vec::new();
163 for build_id in &self.builds {
164 if !self
165 .entries()
166 .flat_map(|e| e.artifacts.values())
167 .flat_map(|a| a.values())
168 .flat_map(|a| a.values())
169 .any(|a| a.build_id == *build_id)
170 {
171 outdated.push(build_id.to_owned());
172 }
173 }
174
175 for build_id in outdated {
176 self.builds.remove(&build_id);
177 let path = self.paths.build_infos.join(build_id).with_extension("json");
178 let _ = std::fs::remove_file(path);
179 }
180 }
181
182 pub fn join_entries(&mut self, root: &Path) -> &mut Self {
184 self.files = std::mem::take(&mut self.files)
185 .into_iter()
186 .map(|(path, entry)| (root.join(path), entry))
187 .collect();
188 self
189 }
190
191 pub fn strip_entries_prefix(&mut self, base: &Path) -> &mut Self {
193 self.files = std::mem::take(&mut self.files)
194 .into_iter()
195 .map(|(path, entry)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), entry))
196 .collect();
197 self
198 }
199
200 pub fn join_artifacts_files(&mut self, base: &Path) -> &mut Self {
202 self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base));
203 self
204 }
205
206 pub fn strip_artifact_files_prefixes(&mut self, base: &Path) -> &mut Self {
208 self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base));
209 self
210 }
211
212 pub fn remove_missing_files(&mut self) {
216 trace!("remove non existing files from cache");
217 self.files.retain(|file, _| {
218 let exists = file.exists();
219 if !exists {
220 trace!("remove {} from cache", file.display());
221 }
222 exists
223 })
224 }
225
226 pub fn all_artifacts_exist(&self) -> bool {
228 self.files.values().all(|entry| entry.all_artifacts_exist())
229 }
230
231 pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self {
253 self.files = self
254 .files
255 .into_iter()
256 .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e))
257 .collect();
258 self
259 }
260
261 pub fn find_artifact_path(&self, contract_file: &Path, contract_name: &str) -> Option<&Path> {
273 let entry = self.entry(contract_file)?;
274 entry.find_artifact_path(contract_name)
275 }
276
277 pub fn read_artifact<Artifact: DeserializeOwned>(
296 &self,
297 contract_file: &Path,
298 contract_name: &str,
299 ) -> Result<Artifact> {
300 let artifact_path =
301 self.find_artifact_path(contract_file, contract_name).ok_or_else(|| {
302 SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string())
303 })?;
304 utils::read_json_file(artifact_path)
305 }
306
307 pub fn read_artifacts<Artifact: DeserializeOwned + Send + Sync>(
322 &self,
323 ) -> Result<Artifacts<Artifact>> {
324 use rayon::prelude::*;
325
326 let artifacts = self
327 .files
328 .par_iter()
329 .map(|(file, entry)| entry.read_artifact_files().map(|files| (file.clone(), files)))
330 .collect::<Result<ArtifactsMap<_>>>()?;
331 Ok(Artifacts(artifacts))
332 }
333
334 pub fn read_builds<L: Language>(&self, build_info_dir: &Path) -> Result<Builds<L>> {
339 use rayon::prelude::*;
340
341 self.builds
342 .par_iter()
343 .map(|build_id| {
344 utils::read_json_file(&build_info_dir.join(build_id).with_extension("json"))
345 .map(|b| (build_id.clone(), b))
346 })
347 .collect::<Result<_>>()
348 .map(|b| Builds(b))
349 }
350}
351
352#[cfg(feature = "async")]
353impl<S: CompilerSettings> CompilerCache<S> {
354 pub async fn async_read(path: &Path) -> Result<Self> {
355 let path = path.to_owned();
356 Self::asyncify(move || Self::read(&path)).await
357 }
358
359 pub async fn async_write(&self, path: &Path) -> Result<()> {
360 let content = serde_json::to_vec(self)?;
361 tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path))
362 }
363
364 async fn asyncify<F, T>(f: F) -> Result<T>
365 where
366 F: FnOnce() -> Result<T> + Send + 'static,
367 T: Send + 'static,
368 {
369 match tokio::task::spawn_blocking(f).await {
370 Ok(res) => res,
371 Err(_) => Err(SolcError::io(std::io::Error::other("background task failed"), "")),
372 }
373 }
374}
375
376impl<S> Default for CompilerCache<S> {
377 fn default() -> Self {
378 Self {
379 format: ETHERS_FORMAT_VERSION.to_string(),
380 builds: Default::default(),
381 files: Default::default(),
382 paths: Default::default(),
383 profiles: Default::default(),
384 preprocessed: false,
385 mocks: Default::default(),
386 }
387 }
388}
389
390impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache<S> {
391 fn from(config: &'a ProjectPathsConfig) -> Self {
392 let paths = config.paths_relative();
393 Self::new(Default::default(), paths, false)
394 }
395}
396
397#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
399pub struct CachedArtifact {
400 pub path: PathBuf,
402 pub build_id: String,
404}
405
406pub type CachedArtifacts = BTreeMap<String, BTreeMap<Version, BTreeMap<String, CachedArtifact>>>;
407
408#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
414#[serde(rename_all = "camelCase")]
415pub struct CacheEntry {
416 pub last_modification_date: u64,
418 pub content_hash: String,
420 pub interface_repr_hash: Option<String>,
422 pub source_name: PathBuf,
424 pub imports: BTreeSet<PathBuf>,
428 pub version_requirement: Option<String>,
430 pub artifacts: CachedArtifacts,
440 pub seen_by_compiler: bool,
448}
449
450impl CacheEntry {
451 pub fn last_modified(&self) -> Duration {
453 Duration::from_millis(self.last_modification_date)
454 }
455
456 pub fn find_artifact_path(&self, contract_name: &str) -> Option<&Path> {
471 self.artifacts
472 .get(contract_name)?
473 .iter()
474 .next()
475 .and_then(|(_, a)| a.iter().next())
476 .map(|(_, p)| p.path.as_path())
477 }
478
479 pub fn read_last_modification_date(file: &Path) -> Result<u64> {
481 let last_modification_date = fs::metadata(file)
482 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
483 .modified()
484 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
485 .duration_since(UNIX_EPOCH)
486 .map_err(SolcError::msg)?
487 .as_millis() as u64;
488 Ok(last_modification_date)
489 }
490
491 fn read_artifact_files<Artifact: DeserializeOwned>(
495 &self,
496 ) -> Result<BTreeMap<String, Vec<ArtifactFile<Artifact>>>> {
497 let mut artifacts = BTreeMap::new();
498 for (artifact_name, versioned_files) in self.artifacts.iter() {
499 let mut files = Vec::with_capacity(versioned_files.len());
500 for (version, cached_artifact) in versioned_files {
501 for (profile, cached_artifact) in cached_artifact {
502 let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?;
503 files.push(ArtifactFile {
504 artifact,
505 file: cached_artifact.path.clone(),
506 version: version.clone(),
507 build_id: cached_artifact.build_id.clone(),
508 profile: profile.clone(),
509 });
510 }
511 }
512 artifacts.insert(artifact_name.clone(), files);
513 }
514 Ok(artifacts)
515 }
516
517 pub(crate) fn merge_artifacts<'a, A, I, T: 'a>(&mut self, artifacts: I)
518 where
519 I: IntoIterator<Item = (&'a String, A)>,
520 A: IntoIterator<Item = &'a ArtifactFile<T>>,
521 {
522 for (name, artifacts) in artifacts.into_iter() {
523 for artifact in artifacts {
524 self.artifacts
525 .entry(name.clone())
526 .or_default()
527 .entry(artifact.version.clone())
528 .or_default()
529 .insert(
530 artifact.profile.clone(),
531 CachedArtifact {
532 build_id: artifact.build_id.clone(),
533 path: artifact.file.clone(),
534 },
535 );
536 }
537 }
538 }
539
540 pub fn contains(&self, version: &Version, profile: &str) -> bool {
542 self.artifacts.values().any(|artifacts| {
543 artifacts.get(version).and_then(|artifacts| artifacts.get(profile)).is_some()
544 })
545 }
546
547 pub fn artifacts_versions(&self) -> impl Iterator<Item = (&Version, &str, &CachedArtifact)> {
549 self.artifacts
550 .values()
551 .flatten()
552 .flat_map(|(v, a)| a.iter().map(move |(p, a)| (v, p.as_str(), a)))
553 }
554
555 pub fn find_artifact(
557 &self,
558 contract: &str,
559 version: &Version,
560 profile: &str,
561 ) -> Option<&CachedArtifact> {
562 self.artifacts
563 .get(contract)
564 .and_then(|files| files.get(version))
565 .and_then(|files| files.get(profile))
566 }
567
568 pub fn artifacts_for_version<'a>(
570 &'a self,
571 version: &'a Version,
572 ) -> impl Iterator<Item = &'a CachedArtifact> + 'a {
573 self.artifacts_versions().filter_map(move |(ver, _, file)| (ver == version).then_some(file))
574 }
575
576 pub fn artifacts(&self) -> impl Iterator<Item = &CachedArtifact> {
578 self.artifacts.values().flat_map(BTreeMap::values).flat_map(BTreeMap::values)
579 }
580
581 pub fn artifacts_mut(&mut self) -> impl Iterator<Item = &mut CachedArtifact> {
583 self.artifacts.values_mut().flat_map(BTreeMap::values_mut).flat_map(BTreeMap::values_mut)
584 }
585
586 pub fn all_artifacts_exist(&self) -> bool {
588 self.artifacts().all(|a| a.path.exists())
589 }
590
591 pub fn join_artifacts_files(&mut self, base: &Path) {
593 self.artifacts_mut().for_each(|a| a.path = base.join(&a.path))
594 }
595
596 pub fn strip_artifact_files_prefixes(&mut self, base: &Path) {
598 self.artifacts_mut().for_each(|a| {
599 if let Ok(rem) = a.path.strip_prefix(base) {
600 a.path = rem.to_path_buf();
601 }
602 })
603 }
604}
605
606#[derive(Clone, Debug, Default)]
608pub struct GroupedSources {
609 pub inner: HashMap<PathBuf, HashSet<Version>>,
610}
611
612impl GroupedSources {
613 pub fn insert(&mut self, file: PathBuf, version: Version) {
615 match self.inner.entry(file) {
616 hash_map::Entry::Occupied(mut entry) => {
617 entry.get_mut().insert(version);
618 }
619 hash_map::Entry::Vacant(entry) => {
620 entry.insert(HashSet::from([version]));
621 }
622 }
623 }
624
625 pub fn contains(&self, file: &Path, version: &Version) -> bool {
627 self.inner.get(file).is_some_and(|versions| versions.contains(version))
628 }
629}
630
631#[derive(Debug)]
634pub(crate) struct ArtifactsCacheInner<
635 'a,
636 T: ArtifactOutput<CompilerContract = C::CompilerContract>,
637 C: Compiler,
638> {
639 pub cache: CompilerCache<C::Settings>,
641
642 pub cached_artifacts: Artifacts<T::Artifact>,
644
645 pub cached_builds: Builds<C::Language>,
647
648 pub edges: GraphEdges<C::ParsedSource>,
650
651 pub project: &'a Project<C, T>,
653
654 pub dirty_sources: HashSet<PathBuf>,
657
658 pub sources_in_scope: GroupedSources,
662
663 pub content_hashes: HashMap<PathBuf, String>,
665
666 pub interface_repr_hashes: HashMap<PathBuf, String>,
668}
669
670impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
671 ArtifactsCacheInner<'_, T, C>
672{
673 fn is_source_file(&self, file: &Path) -> bool {
675 self.project.paths.is_source_file(file)
676 }
677
678 fn create_cache_entry(&mut self, file: PathBuf, source: &Source) {
680 let imports = self
681 .edges
682 .imports(&file)
683 .into_iter()
684 .map(|import| strip_prefix(import, self.project.root()).into())
685 .collect();
686
687 let interface_repr_hash = (self.cache.preprocessed && self.is_source_file(&file))
688 .then(|| self.interface_repr_hash(source, &file).to_string());
689
690 let entry = CacheEntry {
691 last_modification_date: CacheEntry::read_last_modification_date(&file)
692 .unwrap_or_default(),
693 content_hash: source.content_hash(),
694 interface_repr_hash,
695 source_name: strip_prefix(&file, self.project.root()).into(),
696 imports,
697 version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()),
698 artifacts: Default::default(),
700 seen_by_compiler: false,
701 };
702
703 self.cache.files.insert(file, entry);
704 }
705
706 fn content_hash(&mut self, source: &Source, file: &Path) -> &str {
708 self.content_hashes.entry(file.to_path_buf()).or_insert_with(|| source.content_hash())
709 }
710
711 fn interface_repr_hash(&mut self, source: &Source, file: &Path) -> &str {
713 self.interface_repr_hashes.entry(file.to_path_buf()).or_insert_with(|| {
714 if let Some(r) = interface_repr_hash(&source.content, file) {
715 return r;
716 }
717 self.content_hashes
719 .entry(file.to_path_buf())
720 .or_insert_with(|| source.content_hash())
721 .clone()
722 })
723 }
724
725 fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
736 let mut compile_complete = HashSet::new();
738 let mut compile_optimized = HashSet::new();
739
740 for (file, source) in sources.iter() {
741 self.sources_in_scope.insert(file.clone(), version.clone());
742
743 if self.is_missing_artifacts(file, version, profile) {
745 compile_complete.insert(file.to_path_buf());
746 }
747
748 if !self.cache.files.contains_key(file) {
750 self.create_cache_entry(file.clone(), source);
751 }
752 }
753
754 for source in &compile_complete {
757 for import in self.edges.imports(source) {
758 if !compile_complete.contains(import) {
759 compile_optimized.insert(import);
760 }
761 }
762 }
763
764 sources.retain(|file, source| {
765 source.kind = if compile_complete.contains(file.as_path()) {
766 SourceCompilationKind::Complete
767 } else if compile_optimized.contains(file.as_path()) {
768 SourceCompilationKind::Optimized
769 } else {
770 return false;
771 };
772 true
773 });
774 }
775
776 #[instrument(level = "trace", skip(self))]
778 fn is_missing_artifacts(&self, file: &Path, version: &Version, profile: &str) -> bool {
779 let Some(entry) = self.cache.entry(file) else {
780 trace!("missing cache entry");
781 return true;
782 };
783
784 if entry.seen_by_compiler && entry.artifacts.is_empty() {
788 trace!("no artifacts");
789 return false;
790 }
791
792 if !entry.contains(version, profile) {
793 trace!("missing linked artifacts");
794 return true;
795 }
796
797 if entry.artifacts_for_version(version).any(|artifact| {
798 let missing_artifact = !self.cached_artifacts.has_artifact(&artifact.path);
799 if missing_artifact {
800 trace!("missing artifact \"{}\"", artifact.path.display());
801 }
802 missing_artifact
803 }) {
804 return true;
805 }
806
807 self.missing_extra_files()
810 }
811
812 fn find_and_remove_dirty(&mut self) {
814 fn populate_dirty_files<D>(
815 file: &Path,
816 dirty_files: &mut HashSet<PathBuf>,
817 edges: &GraphEdges<D>,
818 ) {
819 for file in edges.importers(file) {
820 if !dirty_files.contains(file) {
823 dirty_files.insert(file.to_path_buf());
824 populate_dirty_files(file, dirty_files, edges);
825 }
826 }
827 }
828
829 let existing_profiles = self.project.settings_profiles().collect::<BTreeMap<_, _>>();
830
831 let mut dirty_profiles = HashSet::new();
832 for (profile, settings) in &self.cache.profiles {
833 if !existing_profiles.get(profile.as_str()).is_some_and(|p| p.can_use_cached(settings))
834 {
835 trace!("dirty profile: {}", profile);
836 dirty_profiles.insert(profile.clone());
837 }
838 }
839
840 for profile in &dirty_profiles {
841 self.cache.profiles.remove(profile);
842 }
843
844 self.cache.files.retain(|_, entry| {
845 if entry.artifacts.is_empty() {
847 return true;
848 }
849 entry.artifacts.retain(|_, artifacts| {
850 artifacts.retain(|_, artifacts| {
851 artifacts.retain(|profile, _| !dirty_profiles.contains(profile));
852 !artifacts.is_empty()
853 });
854 !artifacts.is_empty()
855 });
856 !entry.artifacts.is_empty()
857 });
858
859 for (profile, settings) in existing_profiles {
860 if !self.cache.profiles.contains_key(profile) {
861 self.cache.profiles.insert(profile.to_string(), settings.clone());
862 }
863 }
864
865 let files = self.cache.files.keys().cloned().collect::<HashSet<_>>();
867
868 let mut sources = Sources::new();
869
870 for file in &files {
872 let Ok(source) = Source::read(file) else {
873 self.dirty_sources.insert(file.clone());
874 continue;
875 };
876 sources.insert(file.clone(), source);
877 }
878
879 if let Ok(graph) = Graph::<C::ParsedSource>::resolve_sources(&self.project.paths, sources) {
882 let (sources, edges) = graph.into_sources();
883
884 self.fill_hashes(&sources);
886
887 for file in sources.keys() {
889 if self.is_dirty_impl(file, false) {
890 self.dirty_sources.insert(file.clone());
891 }
892 }
893
894 if !self.cache.preprocessed {
895 for file in self.dirty_sources.clone().iter() {
897 populate_dirty_files(file, &mut self.dirty_sources, &edges);
898 }
899 } else {
900 for file in sources.keys() {
902 if self.dirty_sources.contains(file) {
903 continue;
904 }
905 let is_src = self.is_source_file(file);
906 for import in edges.imports(file) {
907 if is_src && self.dirty_sources.contains(import) {
909 self.dirty_sources.insert(file.clone());
910 break;
911 } else if !is_src
916 && self.dirty_sources.contains(import)
917 && (!self.is_source_file(import)
918 || self.is_dirty_impl(import, true)
919 || self.cache.mocks.contains(file))
920 {
921 if self.cache.mocks.contains(file) {
922 populate_dirty_files(file, &mut self.dirty_sources, &edges);
924 } else {
925 self.dirty_sources.insert(file.clone());
926 }
927 }
928 }
929 }
930 }
931 } else {
932 self.dirty_sources.extend(files);
934 }
935
936 for file in &self.dirty_sources {
938 debug!("removing dirty file from cache: {}", file.display());
939 self.cache.remove(file);
940 }
941 }
942
943 fn is_dirty_impl(&self, file: &Path, use_interface_repr: bool) -> bool {
944 let Some(entry) = self.cache.entry(file) else {
945 trace!("missing cache entry");
946 return true;
947 };
948
949 if use_interface_repr && self.cache.preprocessed {
950 let Some(interface_hash) = self.interface_repr_hashes.get(file) else {
951 trace!("missing interface hash");
952 return true;
953 };
954
955 if entry.interface_repr_hash.as_ref() != Some(interface_hash) {
956 trace!("interface hash changed");
957 return true;
958 };
959 } else {
960 let Some(content_hash) = self.content_hashes.get(file) else {
961 trace!("missing content hash");
962 return true;
963 };
964
965 if entry.content_hash != *content_hash {
966 trace!("content hash changed");
967 return true;
968 }
969 }
970
971 false
973 }
974
975 fn fill_hashes(&mut self, sources: &Sources) {
977 for (file, source) in sources {
978 let _ = self.content_hash(source, file);
979
980 if self.cache.preprocessed && self.project.paths.is_source_file(file) {
982 let _ = self.interface_repr_hash(source, file);
983 }
984 }
985 }
986
987 fn missing_extra_files(&self) -> bool {
989 for artifacts in self.cached_artifacts.values() {
990 for artifacts in artifacts.values() {
991 for artifact_file in artifacts {
992 if self.project.artifacts_handler().is_dirty(artifact_file).unwrap_or(true) {
993 return true;
994 }
995 }
996 }
997 }
998 false
999 }
1000}
1001
1002#[allow(clippy::large_enum_variant)]
1004#[derive(Debug)]
1005pub(crate) enum ArtifactsCache<
1006 'a,
1007 T: ArtifactOutput<CompilerContract = C::CompilerContract>,
1008 C: Compiler,
1009> {
1010 Ephemeral(GraphEdges<C::ParsedSource>, &'a Project<C, T>),
1012 Cached(ArtifactsCacheInner<'a, T, C>),
1014}
1015
1016impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
1017 ArtifactsCache<'a, T, C>
1018{
1019 pub fn new(
1021 project: &'a Project<C, T>,
1022 edges: GraphEdges<C::ParsedSource>,
1023 preprocessed: bool,
1024 ) -> Result<Self> {
1025 fn get_cache<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>(
1029 project: &Project<C, T>,
1030 invalidate_cache: bool,
1031 preprocessed: bool,
1032 ) -> CompilerCache<C::Settings> {
1033 let paths = project.paths.paths_relative();
1035
1036 if !invalidate_cache && project.cache_path().exists() {
1037 if let Ok(cache) = CompilerCache::read_joined(&project.paths) {
1038 if cache.paths == paths && preprocessed == cache.preprocessed {
1039 return cache;
1041 }
1042 }
1043 }
1044
1045 CompilerCache::new(Default::default(), paths, preprocessed)
1047 }
1048
1049 let cache = if project.cached {
1050 let invalidate_cache = !edges.unresolved_imports().is_empty();
1054
1055 let mut cache = get_cache(project, invalidate_cache, preprocessed);
1057
1058 cache.remove_missing_files();
1059
1060 let mut cached_artifacts = if project.paths.artifacts.exists() {
1062 trace!("reading artifacts from cache...");
1063 let artifacts = cache.read_artifacts::<T::Artifact>().unwrap_or_default();
1065 trace!("read {} artifacts from cache", artifacts.artifact_files().count());
1066 artifacts
1067 } else {
1068 Default::default()
1069 };
1070
1071 trace!("reading build infos from cache...");
1072 let cached_builds = cache.read_builds(&project.paths.build_infos).unwrap_or_default();
1073
1074 cached_artifacts.0.retain(|_, artifacts| {
1076 artifacts.retain(|_, artifacts| {
1077 artifacts.retain(|artifact| cached_builds.contains_key(&artifact.build_id));
1078 !artifacts.is_empty()
1079 });
1080 !artifacts.is_empty()
1081 });
1082
1083 let cache = ArtifactsCacheInner {
1084 cache,
1085 cached_artifacts,
1086 cached_builds,
1087 edges,
1088 project,
1089 dirty_sources: Default::default(),
1090 content_hashes: Default::default(),
1091 sources_in_scope: Default::default(),
1092 interface_repr_hashes: Default::default(),
1093 };
1094
1095 ArtifactsCache::Cached(cache)
1096 } else {
1097 ArtifactsCache::Ephemeral(edges, project)
1099 };
1100
1101 Ok(cache)
1102 }
1103
1104 pub fn graph(&self) -> &GraphEdges<C::ParsedSource> {
1106 match self {
1107 ArtifactsCache::Ephemeral(graph, _) => graph,
1108 ArtifactsCache::Cached(inner) => &inner.edges,
1109 }
1110 }
1111
1112 #[cfg(test)]
1113 #[allow(unused)]
1114 #[doc(hidden)]
1115 pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T, C>> {
1117 match self {
1118 ArtifactsCache::Ephemeral(..) => None,
1119 ArtifactsCache::Cached(cached) => Some(cached),
1120 }
1121 }
1122
1123 pub fn output_ctx(&self) -> OutputContext<'_> {
1124 match self {
1125 ArtifactsCache::Ephemeral(..) => Default::default(),
1126 ArtifactsCache::Cached(inner) => OutputContext::new(&inner.cache),
1127 }
1128 }
1129
1130 pub fn project(&self) -> &'a Project<C, T> {
1131 match self {
1132 ArtifactsCache::Ephemeral(_, project) => project,
1133 ArtifactsCache::Cached(cache) => cache.project,
1134 }
1135 }
1136
1137 pub fn remove_dirty_sources(&mut self) {
1139 match self {
1140 ArtifactsCache::Ephemeral(..) => {}
1141 ArtifactsCache::Cached(cache) => cache.find_and_remove_dirty(),
1142 }
1143 }
1144
1145 pub fn update_mocks(&mut self, mocks: HashSet<PathBuf>) {
1147 match self {
1148 ArtifactsCache::Ephemeral(..) => {}
1149 ArtifactsCache::Cached(cache) => cache.cache.mocks = mocks,
1150 }
1151 }
1152
1153 pub fn mocks(&self) -> HashSet<PathBuf> {
1157 match self {
1158 ArtifactsCache::Ephemeral(..) => HashSet::default(),
1159 ArtifactsCache::Cached(cache) => cache.cache.mocks.clone(),
1160 }
1161 }
1162
1163 pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
1165 match self {
1166 ArtifactsCache::Ephemeral(..) => {}
1167 ArtifactsCache::Cached(cache) => cache.filter(sources, version, profile),
1168 }
1169 }
1170
1171 pub fn consume<A>(
1177 self,
1178 written_artifacts: &Artifacts<A>,
1179 written_build_infos: &Vec<RawBuildInfo<C::Language>>,
1180 write_to_disk: bool,
1181 ) -> Result<(Artifacts<A>, Builds<C::Language>)>
1182 where
1183 T: ArtifactOutput<Artifact = A>,
1184 {
1185 let ArtifactsCache::Cached(cache) = self else {
1186 trace!("no cache configured, ephemeral");
1187 return Ok(Default::default());
1188 };
1189
1190 let ArtifactsCacheInner {
1191 mut cache,
1192 mut cached_artifacts,
1193 cached_builds,
1194 dirty_sources,
1195 sources_in_scope,
1196 project,
1197 ..
1198 } = cache;
1199
1200 cached_artifacts.0.retain(|file, artifacts| {
1202 let file = Path::new(file);
1203 artifacts.retain(|name, artifacts| {
1204 artifacts.retain(|artifact| {
1205 let version = &artifact.version;
1206
1207 if !sources_in_scope.contains(file, version) {
1208 return false;
1209 }
1210 if dirty_sources.contains(file) {
1211 return false;
1212 }
1213 if written_artifacts.find_artifact(file, name, version).is_some() {
1214 return false;
1215 }
1216 true
1217 });
1218 !artifacts.is_empty()
1219 });
1220 !artifacts.is_empty()
1221 });
1222
1223 for (file, artifacts) in written_artifacts.as_ref() {
1226 let file_path = Path::new(file);
1227 if let Some(entry) = cache.files.get_mut(file_path) {
1230 entry.merge_artifacts(artifacts);
1231 }
1232 }
1233
1234 for build_info in written_build_infos {
1235 cache.builds.insert(build_info.id.clone());
1236 }
1237
1238 if write_to_disk {
1240 cache.remove_outdated_builds();
1241 cache
1244 .strip_entries_prefix(project.root())
1245 .strip_artifact_files_prefixes(project.artifacts_path());
1246 cache.write(project.cache_path())?;
1247 }
1248
1249 Ok((cached_artifacts, cached_builds))
1250 }
1251
1252 pub fn compiler_seen(&mut self, file: &Path) {
1254 if let ArtifactsCache::Cached(cache) = self {
1255 if let Some(entry) = cache.cache.entry_mut(file) {
1256 entry.seen_by_compiler = true;
1257 }
1258 }
1259 }
1260}