foundry_compilers/
cache.rs

1//! Support for compiling contracts.
2
3use crate::{
4    buildinfo::RawBuildInfo,
5    compilers::{Compiler, CompilerSettings, Language},
6    output::Builds,
7    resolver::GraphEdges,
8    ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, Graph, OutputContext, Project,
9    ProjectPaths, ProjectPathsConfig, SourceCompilationKind, SourceParser,
10};
11use foundry_compilers_artifacts::{
12    sources::{Source, Sources},
13    Settings,
14};
15use foundry_compilers_core::{
16    error::{Result, SolcError},
17    utils::{self, strip_prefix},
18};
19use semver::Version;
20use serde::{de::DeserializeOwned, Deserialize, Serialize};
21use std::{
22    collections::{btree_map::BTreeMap, hash_map, BTreeSet, HashMap, HashSet},
23    fs,
24    path::{Path, PathBuf},
25    time::{Duration, UNIX_EPOCH},
26};
27
28mod iface;
29use iface::interface_repr_hash;
30
31/// ethers-rs format version
32///
33/// `ethers-solc` uses a different format version id, but the actual format is consistent with
34/// hardhat This allows ethers-solc to detect if the cache file was written by hardhat or
35/// `ethers-solc`
36const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-4";
37
38/// The file name of the default cache file
39pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
40
41/// A multi version cache file
42#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
43pub struct CompilerCache<S = Settings> {
44    #[serde(rename = "_format")]
45    pub format: String,
46    /// contains all directories used for the project
47    pub paths: ProjectPaths,
48    pub files: BTreeMap<PathBuf, CacheEntry>,
49    pub builds: BTreeSet<String>,
50    pub profiles: BTreeMap<String, S>,
51    pub preprocessed: bool,
52    pub mocks: HashSet<PathBuf>,
53}
54
55impl<S> CompilerCache<S> {
56    /// Creates a new empty cache.
57    pub fn new(format: String, paths: ProjectPaths, preprocessed: bool) -> Self {
58        Self {
59            format,
60            paths,
61            files: Default::default(),
62            builds: Default::default(),
63            profiles: Default::default(),
64            preprocessed,
65            mocks: Default::default(),
66        }
67    }
68}
69
70impl<S: CompilerSettings> CompilerCache<S> {
71    pub fn is_empty(&self) -> bool {
72        self.files.is_empty()
73    }
74
75    /// Removes entry for the given file
76    pub fn remove(&mut self, file: &Path) -> Option<CacheEntry> {
77        self.files.remove(file)
78    }
79
80    /// How many entries the cache contains where each entry represents a sourc file
81    pub fn len(&self) -> usize {
82        self.files.len()
83    }
84
85    /// How many `Artifacts` this cache references, where a source file can have multiple artifacts
86    pub fn artifacts_len(&self) -> usize {
87        self.entries().map(|entry| entry.artifacts().count()).sum()
88    }
89
90    /// Returns an iterator over all `CacheEntry` this cache contains
91    pub fn entries(&self) -> impl Iterator<Item = &CacheEntry> {
92        self.files.values()
93    }
94
95    /// Returns the corresponding `CacheEntry` for the file if it exists
96    pub fn entry(&self, file: &Path) -> Option<&CacheEntry> {
97        self.files.get(file)
98    }
99
100    /// Returns the corresponding `CacheEntry` for the file if it exists
101    pub fn entry_mut(&mut self, file: &Path) -> Option<&mut CacheEntry> {
102        self.files.get_mut(file)
103    }
104
105    /// Reads the cache json file from the given path
106    ///
107    /// See also [`Self::read_joined()`]
108    ///
109    /// # Errors
110    ///
111    /// If the cache file does not exist
112    ///
113    /// # Examples
114    /// ```no_run
115    /// use foundry_compilers::{cache::CompilerCache, solc::SolcSettings, Project};
116    ///
117    /// let project = Project::builder().build(Default::default())?;
118    /// let mut cache = CompilerCache::<SolcSettings>::read(project.cache_path())?;
119    /// cache.join_artifacts_files(project.artifacts_path());
120    /// # Ok::<_, Box<dyn std::error::Error>>(())
121    /// ```
122    #[instrument(name = "CompilerCache::read", err)]
123    pub fn read(path: &Path) -> Result<Self> {
124        let cache: Self = utils::read_json_file(path)?;
125        trace!(cache.format, cache.files = cache.files.len(), "read cache");
126        Ok(cache)
127    }
128
129    /// Reads the cache json file from the given path and returns the cache with paths adjoined to
130    /// the `ProjectPathsConfig`.
131    ///
132    /// This expects the `artifact` files to be relative to the artifacts dir of the `paths` and the
133    /// `CachEntry` paths to be relative to the root dir of the `paths`
134    ///
135    ///
136    ///
137    /// # Examples
138    /// ```no_run
139    /// use foundry_compilers::{cache::CompilerCache, solc::SolcSettings, Project};
140    ///
141    /// let project = Project::builder().build(Default::default())?;
142    /// let cache: CompilerCache<SolcSettings> = CompilerCache::read_joined(&project.paths)?;
143    /// # Ok::<_, Box<dyn std::error::Error>>(())
144    /// ```
145    pub fn read_joined<L>(paths: &ProjectPathsConfig<L>) -> Result<Self> {
146        let mut cache = Self::read(&paths.cache)?;
147        cache.join_entries(&paths.root).join_artifacts_files(&paths.artifacts);
148        Ok(cache)
149    }
150
151    /// Write the cache as json file to the given path
152    #[instrument(name = "CompilerCache::write", skip_all)]
153    pub fn write(&self, path: &Path) -> Result<()> {
154        trace!("writing cache with {} entries to json file: \"{}\"", self.len(), path.display());
155        utils::create_parent_dir_all(path)?;
156        utils::write_json_file(self, path, 128 * 1024)?;
157        trace!("cache file located: \"{}\"", path.display());
158        Ok(())
159    }
160
161    /// Removes build infos which don't have any artifacts linked to them.
162    #[instrument(skip_all)]
163    pub fn remove_outdated_builds(&mut self) {
164        let mut outdated = Vec::new();
165        for build_id in &self.builds {
166            if !self
167                .entries()
168                .flat_map(|e| e.artifacts.values())
169                .flat_map(|a| a.values())
170                .flat_map(|a| a.values())
171                .any(|a| a.build_id == *build_id)
172            {
173                outdated.push(build_id.to_owned());
174            }
175        }
176
177        for build_id in outdated {
178            self.builds.remove(&build_id);
179            let path = self.paths.build_infos.join(build_id).with_extension("json");
180            let _ = std::fs::remove_file(path);
181        }
182    }
183
184    /// Sets the `CacheEntry`'s file paths to `root` adjoined to `self.file`.
185    #[instrument(skip_all)]
186    pub fn join_entries(&mut self, root: &Path) -> &mut Self {
187        self.files = std::mem::take(&mut self.files)
188            .into_iter()
189            .map(|(path, entry)| (root.join(path), entry))
190            .collect();
191        self
192    }
193
194    /// Removes `base` from all `CacheEntry` paths
195    #[instrument(skip_all)]
196    pub fn strip_entries_prefix(&mut self, base: &Path) -> &mut Self {
197        self.files = std::mem::take(&mut self.files)
198            .into_iter()
199            .map(|(path, entry)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), entry))
200            .collect();
201        self
202    }
203
204    /// Sets the artifact files location to `base` adjoined to the `CachEntries` artifacts.
205    #[instrument(skip_all)]
206    pub fn join_artifacts_files(&mut self, base: &Path) -> &mut Self {
207        self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base));
208        self
209    }
210
211    /// Removes `base` from all artifact file paths
212    #[instrument(skip_all)]
213    pub fn strip_artifact_files_prefixes(&mut self, base: &Path) -> &mut Self {
214        self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base));
215        self
216    }
217
218    /// Removes all `CacheEntry` which source files don't exist on disk
219    ///
220    /// **NOTE:** this assumes the `files` are absolute
221    #[instrument(skip_all)]
222    pub fn remove_missing_files(&mut self) {
223        trace!("remove non existing files from cache");
224        self.files.retain(|file, _| {
225            let exists = file.exists();
226            if !exists {
227                trace!("remove {} from cache", file.display());
228            }
229            exists
230        })
231    }
232
233    /// Checks if all artifact files exist
234    pub fn all_artifacts_exist(&self) -> bool {
235        self.files.values().all(|entry| entry.all_artifacts_exist())
236    }
237
238    /// Strips the given prefix from all `file` paths that identify a `CacheEntry` to make them
239    /// relative to the given `base` argument
240    ///
241    /// In other words this sets the keys (the file path of a solidity file) relative to the `base`
242    /// argument, so that the key `/Users/me/project/src/Greeter.sol` will be changed to
243    /// `src/Greeter.sol` if `base` is `/Users/me/project`
244    ///
245    /// # Examples
246    /// ```no_run
247    /// use foundry_compilers::{
248    ///     artifacts::contract::CompactContract, cache::CompilerCache, solc::SolcSettings, Project,
249    /// };
250    ///
251    /// let project = Project::builder().build(Default::default())?;
252    /// let cache: CompilerCache<SolcSettings> =
253    ///     CompilerCache::read(project.cache_path())?.with_stripped_file_prefixes(project.root());
254    /// let artifact: CompactContract = cache.read_artifact("src/Greeter.sol".as_ref(), "Greeter")?;
255    /// # Ok::<_, Box<dyn std::error::Error>>(())
256    /// ```
257    ///
258    /// **Note:** this only affects the source files, see [`Self::strip_artifact_files_prefixes()`]
259    pub fn with_stripped_file_prefixes(mut self, base: &Path) -> Self {
260        self.files = self
261            .files
262            .into_iter()
263            .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e))
264            .collect();
265        self
266    }
267
268    /// Returns the path to the artifact of the given `(file, contract)` pair
269    ///
270    /// # Examples
271    /// ```no_run
272    /// use foundry_compilers::{cache::CompilerCache, solc::SolcSettings, Project};
273    ///
274    /// let project = Project::builder().build(Default::default())?;
275    /// let cache: CompilerCache<SolcSettings> = CompilerCache::read_joined(&project.paths)?;
276    /// cache.find_artifact_path("/Users/git/myproject/src/Greeter.sol".as_ref(), "Greeter");
277    /// # Ok::<_, Box<dyn std::error::Error>>(())
278    /// ```
279    pub fn find_artifact_path(&self, contract_file: &Path, contract_name: &str) -> Option<&Path> {
280        let entry = self.entry(contract_file)?;
281        entry.find_artifact_path(contract_name)
282    }
283
284    /// Finds the path to the artifact of the given `(file, contract)` pair (see
285    /// [`Self::find_artifact_path()`]) and deserializes the artifact file as JSON.
286    ///
287    /// # Examples
288    /// ```no_run
289    /// use foundry_compilers::{
290    ///     artifacts::contract::CompactContract, cache::CompilerCache, solc::SolcSettings, Project,
291    /// };
292    ///
293    /// let project = Project::builder().build(Default::default())?;
294    /// let cache = CompilerCache::<SolcSettings>::read_joined(&project.paths)?;
295    /// let artifact: CompactContract =
296    ///     cache.read_artifact("/Users/git/myproject/src/Greeter.sol".as_ref(), "Greeter")?;
297    /// # Ok::<_, Box<dyn std::error::Error>>(())
298    /// ```
299    ///
300    /// **NOTE**: unless the cache's `files` keys were modified `contract_file` is expected to be
301    /// absolute.
302    #[instrument(skip_all)]
303    pub fn read_artifact<Artifact: DeserializeOwned>(
304        &self,
305        contract_file: &Path,
306        contract_name: &str,
307    ) -> Result<Artifact> {
308        let artifact_path =
309            self.find_artifact_path(contract_file, contract_name).ok_or_else(|| {
310                SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string())
311            })?;
312        utils::read_json_file(artifact_path)
313    }
314
315    /// Reads all cached artifacts from disk using the given ArtifactOutput handler
316    ///
317    /// # Examples
318    /// ```no_run
319    /// use foundry_compilers::{
320    ///     artifacts::contract::CompactContractBytecode, cache::CompilerCache, solc::SolcSettings,
321    ///     Project,
322    /// };
323    ///
324    /// let project = Project::builder().build(Default::default())?;
325    /// let cache: CompilerCache<SolcSettings> = CompilerCache::read_joined(&project.paths)?;
326    /// let artifacts = cache.read_artifacts::<CompactContractBytecode>()?;
327    /// # Ok::<_, Box<dyn std::error::Error>>(())
328    /// ```
329    #[instrument(skip_all)]
330    pub fn read_artifacts<Artifact: DeserializeOwned + Send + Sync>(
331        &self,
332    ) -> Result<Artifacts<Artifact>> {
333        use rayon::prelude::*;
334
335        let artifacts = self
336            .files
337            .par_iter()
338            .map(|(file, entry)| entry.read_artifact_files().map(|files| (file.clone(), files)))
339            .collect::<Result<ArtifactsMap<_>>>()?;
340        Ok(Artifacts(artifacts))
341    }
342
343    /// Reads all cached [BuildContext]s from disk. [BuildContext] is inlined into [RawBuildInfo]
344    /// objects, so we are basically just partially deserializing build infos here.
345    ///
346    /// [BuildContext]: crate::buildinfo::BuildContext
347    #[instrument(skip_all)]
348    pub fn read_builds<L: Language>(&self, build_info_dir: &Path) -> Result<Builds<L>> {
349        use rayon::prelude::*;
350
351        self.builds
352            .par_iter()
353            .map(|build_id| {
354                utils::read_json_file(&build_info_dir.join(build_id).with_extension("json"))
355                    .map(|b| (build_id.clone(), b))
356            })
357            .collect::<Result<_>>()
358            .map(|b| Builds(b))
359    }
360}
361
362#[cfg(feature = "async")]
363impl<S: CompilerSettings> CompilerCache<S> {
364    pub async fn async_read(path: &Path) -> Result<Self> {
365        let path = path.to_owned();
366        Self::asyncify(move || Self::read(&path)).await
367    }
368
369    pub async fn async_write(&self, path: &Path) -> Result<()> {
370        let content = serde_json::to_vec(self)?;
371        tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path))
372    }
373
374    async fn asyncify<F, T>(f: F) -> Result<T>
375    where
376        F: FnOnce() -> Result<T> + Send + 'static,
377        T: Send + 'static,
378    {
379        match tokio::task::spawn_blocking(f).await {
380            Ok(res) => res,
381            Err(_) => Err(SolcError::io(std::io::Error::other("background task failed"), "")),
382        }
383    }
384}
385
386impl<S> Default for CompilerCache<S> {
387    fn default() -> Self {
388        Self {
389            format: ETHERS_FORMAT_VERSION.to_string(),
390            builds: Default::default(),
391            files: Default::default(),
392            paths: Default::default(),
393            profiles: Default::default(),
394            preprocessed: false,
395            mocks: Default::default(),
396        }
397    }
398}
399
400impl<'a, S: CompilerSettings> From<&'a ProjectPathsConfig> for CompilerCache<S> {
401    fn from(config: &'a ProjectPathsConfig) -> Self {
402        let paths = config.paths_relative();
403        Self::new(Default::default(), paths, false)
404    }
405}
406
407/// Cached artifact data.
408#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
409pub struct CachedArtifact {
410    /// Path to the artifact file.
411    pub path: PathBuf,
412    /// Build id which produced the given artifact.
413    pub build_id: String,
414}
415
416pub type CachedArtifacts = BTreeMap<String, BTreeMap<Version, BTreeMap<String, CachedArtifact>>>;
417
418/// A `CacheEntry` in the cache file represents a solidity file
419///
420/// A solidity file can contain several contracts, for every contract a separate `Artifact` is
421/// emitted. so the `CacheEntry` tracks the artifacts by name. A file can be compiled with multiple
422/// `solc` versions generating version specific artifacts.
423#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
424#[serde(rename_all = "camelCase")]
425pub struct CacheEntry {
426    /// the last modification time of this file
427    pub last_modification_date: u64,
428    /// hash to identify whether the content of the file changed
429    pub content_hash: String,
430    /// hash of the interface representation of the file, if it's a source file
431    pub interface_repr_hash: Option<String>,
432    /// identifier name see [`foundry_compilers_core::utils::source_name()`]
433    pub source_name: PathBuf,
434    /// fully resolved imports of the file
435    ///
436    /// all paths start relative from the project's root: `src/importedFile.sol`
437    pub imports: BTreeSet<PathBuf>,
438    /// The solidity version pragma
439    pub version_requirement: Option<String>,
440    /// all artifacts produced for this file
441    ///
442    /// In theory a file can be compiled by different solc versions:
443    /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)`
444    /// file `C` would be compiled twice, with `0.8.10` and `0.8.11`, producing two different
445    /// artifacts.
446    ///
447    /// This map tracks the artifacts by `name -> (Version -> profile -> PathBuf)`.
448    /// This mimics the default artifacts directory structure
449    pub artifacts: CachedArtifacts,
450    /// Whether this file was compiled at least once.
451    ///
452    /// If this is true and `artifacts` are empty, it means that given version of the file does
453    /// not produce any artifacts and it should not be compiled again.
454    ///
455    /// If this is false, then artifacts are definitely empty and it should be compiled if we may
456    /// need artifacts.
457    pub seen_by_compiler: bool,
458}
459
460impl CacheEntry {
461    /// Returns the last modified timestamp `Duration`
462    pub fn last_modified(&self) -> Duration {
463        Duration::from_millis(self.last_modification_date)
464    }
465
466    /// Returns the artifact path for the contract name.
467    ///
468    /// # Examples
469    ///
470    /// ```no_run
471    /// use foundry_compilers::cache::CacheEntry;
472    ///
473    /// # fn t(entry: CacheEntry) {
474    /// # stringify!(
475    /// let entry: CacheEntry = ...;
476    /// # );
477    /// entry.find_artifact_path("Greeter");
478    /// # }
479    /// ```
480    pub fn find_artifact_path(&self, contract_name: &str) -> Option<&Path> {
481        self.artifacts
482            .get(contract_name)?
483            .iter()
484            .next()
485            .and_then(|(_, a)| a.iter().next())
486            .map(|(_, p)| p.path.as_path())
487    }
488
489    /// Reads the last modification date from the file's metadata
490    pub fn read_last_modification_date(file: &Path) -> Result<u64> {
491        let last_modification_date = fs::metadata(file)
492            .map_err(|err| SolcError::io(err, file.to_path_buf()))?
493            .modified()
494            .map_err(|err| SolcError::io(err, file.to_path_buf()))?
495            .duration_since(UNIX_EPOCH)
496            .map_err(SolcError::msg)?
497            .as_millis() as u64;
498        Ok(last_modification_date)
499    }
500
501    /// Reads all artifact files associated with the `CacheEntry`
502    ///
503    /// **Note:** all artifact file paths should be absolute.
504    #[instrument(skip_all)]
505    fn read_artifact_files<Artifact: DeserializeOwned>(
506        &self,
507    ) -> Result<BTreeMap<String, Vec<ArtifactFile<Artifact>>>> {
508        let mut artifacts = BTreeMap::new();
509        for (artifact_name, versioned_files) in self.artifacts.iter() {
510            let mut files = Vec::with_capacity(versioned_files.len());
511            for (version, cached_artifact) in versioned_files {
512                for (profile, cached_artifact) in cached_artifact {
513                    let artifact: Artifact = utils::read_json_file(&cached_artifact.path)?;
514                    files.push(ArtifactFile {
515                        artifact,
516                        file: cached_artifact.path.clone(),
517                        version: version.clone(),
518                        build_id: cached_artifact.build_id.clone(),
519                        profile: profile.clone(),
520                    });
521                }
522            }
523            artifacts.insert(artifact_name.clone(), files);
524        }
525        Ok(artifacts)
526    }
527
528    #[instrument(skip_all)]
529    pub(crate) fn merge_artifacts<'a, A, I, T: 'a>(&mut self, artifacts: I)
530    where
531        I: IntoIterator<Item = (&'a String, A)>,
532        A: IntoIterator<Item = &'a ArtifactFile<T>>,
533    {
534        for (name, artifacts) in artifacts.into_iter() {
535            for artifact in artifacts {
536                self.artifacts
537                    .entry(name.clone())
538                    .or_default()
539                    .entry(artifact.version.clone())
540                    .or_default()
541                    .insert(
542                        artifact.profile.clone(),
543                        CachedArtifact {
544                            build_id: artifact.build_id.clone(),
545                            path: artifact.file.clone(),
546                        },
547                    );
548            }
549        }
550    }
551
552    /// Returns `true` if the artifacts set contains the given version
553    pub fn contains(&self, version: &Version, profile: &str) -> bool {
554        self.artifacts.values().any(|artifacts| {
555            artifacts.get(version).and_then(|artifacts| artifacts.get(profile)).is_some()
556        })
557    }
558
559    /// Iterator that yields all artifact files and their version
560    pub fn artifacts_versions(&self) -> impl Iterator<Item = (&Version, &str, &CachedArtifact)> {
561        self.artifacts
562            .values()
563            .flatten()
564            .flat_map(|(v, a)| a.iter().map(move |(p, a)| (v, p.as_str(), a)))
565    }
566
567    /// Returns the artifact file for the contract and version pair
568    pub fn find_artifact(
569        &self,
570        contract: &str,
571        version: &Version,
572        profile: &str,
573    ) -> Option<&CachedArtifact> {
574        self.artifacts
575            .get(contract)
576            .and_then(|files| files.get(version))
577            .and_then(|files| files.get(profile))
578    }
579
580    /// Iterator that yields all artifact files and their version
581    pub fn artifacts_for_version<'a>(
582        &'a self,
583        version: &'a Version,
584    ) -> impl Iterator<Item = &'a CachedArtifact> + 'a {
585        self.artifacts_versions().filter_map(move |(ver, _, file)| (ver == version).then_some(file))
586    }
587
588    /// Iterator that yields all artifact files
589    pub fn artifacts(&self) -> impl Iterator<Item = &CachedArtifact> {
590        self.artifacts.values().flat_map(BTreeMap::values).flat_map(BTreeMap::values)
591    }
592
593    /// Mutable iterator over all artifact files
594    pub fn artifacts_mut(&mut self) -> impl Iterator<Item = &mut CachedArtifact> {
595        self.artifacts.values_mut().flat_map(BTreeMap::values_mut).flat_map(BTreeMap::values_mut)
596    }
597
598    /// Checks if all artifact files exist
599    pub fn all_artifacts_exist(&self) -> bool {
600        self.artifacts().all(|a| a.path.exists())
601    }
602
603    /// Sets the artifact's paths to `base` adjoined to the artifact's `path`.
604    pub fn join_artifacts_files(&mut self, base: &Path) {
605        self.artifacts_mut().for_each(|a| a.path = base.join(&a.path))
606    }
607
608    /// Removes `base` from the artifact's path
609    pub fn strip_artifact_files_prefixes(&mut self, base: &Path) {
610        self.artifacts_mut().for_each(|a| {
611            if let Ok(rem) = a.path.strip_prefix(base) {
612                a.path = rem.to_path_buf();
613            }
614        })
615    }
616}
617
618/// Collection of source file paths mapped to versions.
619#[derive(Clone, Debug, Default)]
620pub struct GroupedSources {
621    pub inner: HashMap<PathBuf, HashSet<Version>>,
622}
623
624impl GroupedSources {
625    /// Inserts provided source and version into the collection.
626    pub fn insert(&mut self, file: PathBuf, version: Version) {
627        match self.inner.entry(file) {
628            hash_map::Entry::Occupied(mut entry) => {
629                entry.get_mut().insert(version);
630            }
631            hash_map::Entry::Vacant(entry) => {
632                entry.insert(HashSet::from([version]));
633            }
634        }
635    }
636
637    /// Returns true if the file was included with the given version.
638    pub fn contains(&self, file: &Path, version: &Version) -> bool {
639        self.inner.get(file).is_some_and(|versions| versions.contains(version))
640    }
641}
642
643/// A helper abstraction over the [`CompilerCache`] used to determine what files need to compiled
644/// and which `Artifacts` can be reused.
645#[derive(Debug)]
646pub(crate) struct ArtifactsCacheInner<
647    'a,
648    T: ArtifactOutput<CompilerContract = C::CompilerContract>,
649    C: Compiler,
650> {
651    /// The preexisting cache file.
652    pub cache: CompilerCache<C::Settings>,
653
654    /// All already existing artifacts.
655    pub cached_artifacts: Artifacts<T::Artifact>,
656
657    /// All already existing build infos.
658    pub cached_builds: Builds<C::Language>,
659
660    /// Relationship between all the files.
661    pub edges: GraphEdges<C::Parser>,
662
663    /// The project.
664    pub project: &'a Project<C, T>,
665
666    /// Files that were invalidated and removed from cache.
667    /// Those are not grouped by version and purged completely.
668    pub dirty_sources: HashSet<PathBuf>,
669
670    /// Artifact+version pairs which are in scope for each solc version.
671    ///
672    /// Only those files will be included into cached artifacts list for each version.
673    pub sources_in_scope: GroupedSources,
674
675    /// The file hashes.
676    pub content_hashes: HashMap<PathBuf, String>,
677
678    /// The interface representations for source files.
679    pub interface_repr_hashes: HashMap<PathBuf, String>,
680}
681
682impl<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
683    ArtifactsCacheInner<'_, T, C>
684{
685    /// Whether given file is a source file or a test/script file.
686    fn is_source_file(&self, file: &Path) -> bool {
687        self.project.paths.is_source_file(file)
688    }
689
690    /// Creates a new cache entry for the file
691    fn create_cache_entry(&mut self, file: PathBuf, source: &Source) {
692        let imports = self
693            .edges
694            .imports(&file)
695            .into_iter()
696            .map(|import| strip_prefix(import, self.project.root()).into())
697            .collect();
698
699        let interface_repr_hash = (self.cache.preprocessed && self.is_source_file(&file))
700            .then(|| self.interface_repr_hash(source, &file).to_string());
701
702        let entry = CacheEntry {
703            last_modification_date: CacheEntry::read_last_modification_date(&file)
704                .unwrap_or_default(),
705            content_hash: source.content_hash(),
706            interface_repr_hash,
707            source_name: strip_prefix(&file, self.project.root()).into(),
708            imports,
709            version_requirement: self.edges.version_requirement(&file).map(|v| v.to_string()),
710            // artifacts remain empty until we received the compiler output
711            artifacts: Default::default(),
712            seen_by_compiler: false,
713        };
714
715        self.cache.files.insert(file, entry);
716    }
717
718    /// Gets or calculates the content hash for the given source file.
719    fn content_hash(&mut self, source: &Source, file: &Path) -> &str {
720        self.content_hashes.entry(file.to_path_buf()).or_insert_with(|| source.content_hash())
721    }
722
723    /// Gets or calculates the interface representation hash for the given source file.
724    fn interface_repr_hash(&mut self, source: &Source, file: &Path) -> &str {
725        self.interface_repr_hashes.entry(file.to_path_buf()).or_insert_with(|| {
726            // TODO: use `interface_representation_ast` directly with `edges.parser()`.
727            if let Some(r) = interface_repr_hash(&source.content, file) {
728                return r;
729            }
730            // Equivalent to: self.content_hash(source, file).into()
731            self.content_hashes
732                .entry(file.to_path_buf())
733                .or_insert_with(|| source.content_hash())
734                .clone()
735        })
736    }
737
738    /// Returns the set of [Source]s that need to be compiled to produce artifacts for requested
739    /// input.
740    ///
741    /// Source file may have one of the two [SourceCompilationKind]s:
742    /// 1. [SourceCompilationKind::Complete] - the file has been modified or compiled with different
743    ///    settings and its cache is invalidated. For such sources we request full data needed for
744    ///    artifact construction.
745    /// 2. [SourceCompilationKind::Optimized] - the file is not dirty, but is imported by a dirty
746    ///    file and thus will be processed by solc. For such files we don't need full data, so we
747    ///    are marking them as clean to optimize output selection later.
748    fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
749        // sources that should be passed to compiler.
750        let mut compile_complete = HashSet::new();
751        let mut compile_optimized = HashSet::new();
752
753        for (file, source) in sources.iter() {
754            self.sources_in_scope.insert(file.clone(), version.clone());
755
756            // If we are missing artifact for file, compile it.
757            if self.is_missing_artifacts(file, version, profile) {
758                compile_complete.insert(file.to_path_buf());
759            }
760
761            // Ensure that we have a cache entry for all sources.
762            if !self.cache.files.contains_key(file) {
763                self.create_cache_entry(file.clone(), source);
764            }
765        }
766
767        // Prepare optimization by collecting sources which are imported by files requiring complete
768        // compilation.
769        for source in &compile_complete {
770            for import in self.edges.imports(source) {
771                if !compile_complete.contains(import) {
772                    compile_optimized.insert(import);
773                }
774            }
775        }
776
777        sources.retain(|file, source| {
778            source.kind = if compile_complete.contains(file.as_path()) {
779                SourceCompilationKind::Complete
780            } else if compile_optimized.contains(file.as_path()) {
781                SourceCompilationKind::Optimized
782            } else {
783                return false;
784            };
785            true
786        });
787    }
788
789    /// Returns whether we are missing artifacts for the given file and version.
790    fn is_missing_artifacts(&self, file: &Path, version: &Version, profile: &str) -> bool {
791        self.is_missing_artifacts_impl(file, version, profile).is_err()
792    }
793
794    /// Returns whether we are missing artifacts for the given file and version.
795    #[instrument(level = "trace", name = "is_missing_artifacts", skip(self), ret)]
796    fn is_missing_artifacts_impl(
797        &self,
798        file: &Path,
799        version: &Version,
800        profile: &str,
801    ) -> Result<(), &'static str> {
802        let Some(entry) = self.cache.entry(file) else {
803            return Err("missing cache entry");
804        };
805
806        // only check artifact's existence if the file generated artifacts.
807        // e.g. a solidity file consisting only of import statements (like interfaces that
808        // re-export) do not create artifacts
809        if entry.seen_by_compiler && entry.artifacts.is_empty() {
810            return Ok(());
811        }
812
813        if !entry.contains(version, profile) {
814            return Err("missing linked artifacts");
815        }
816
817        if entry
818            .artifacts_for_version(version)
819            .any(|artifact| !self.cached_artifacts.has_artifact(&artifact.path))
820        {
821            return Err("missing artifact");
822        }
823
824        // If any requested extra files are missing for any artifact, mark source as dirty to
825        // generate them
826        if self.missing_extra_files() {
827            return Err("missing extra files");
828        }
829
830        Ok(())
831    }
832
833    // Walks over all cache entries, detects dirty files and removes them from cache.
834    fn remove_dirty_sources(&mut self) {
835        fn populate_dirty_files<P: SourceParser>(
836            file: &Path,
837            dirty_files: &mut HashSet<PathBuf>,
838            edges: &GraphEdges<P>,
839        ) {
840            for file in edges.importers(file) {
841                // If file is marked as dirty we either have already visited it or it was marked as
842                // dirty initially and will be visited at some point later.
843                if !dirty_files.contains(file) {
844                    dirty_files.insert(file.to_path_buf());
845                    populate_dirty_files(file, dirty_files, edges);
846                }
847            }
848        }
849
850        self.update_profiles();
851
852        // Iterate over existing cache entries.
853        let files = self.cache.files.keys().cloned().collect::<HashSet<_>>();
854
855        let mut sources = Sources::new();
856
857        // Read all sources, marking entries as dirty on I/O errors.
858        for file in &files {
859            let Ok(source) = Source::read(file) else {
860                self.dirty_sources.insert(file.clone());
861                continue;
862            };
863            sources.insert(file.clone(), source);
864        }
865
866        // Build a temporary graph for walking imports. We need this because `self.edges`
867        // only contains graph data for in-scope sources but we are operating on cache entries.
868        if let Ok(graph) = Graph::<C::Parser>::resolve_sources(&self.project.paths, sources) {
869            let (sources, edges) = graph.into_sources();
870
871            // Calculate content hashes for later comparison.
872            self.fill_hashes(&sources);
873
874            // Pre-add all sources that are guaranteed to be dirty
875            for file in sources.keys() {
876                if self.is_dirty(file, false) {
877                    self.dirty_sources.insert(file.clone());
878                }
879            }
880
881            if !self.cache.preprocessed {
882                // Perform DFS to find direct/indirect importers of dirty files.
883                for file in self.dirty_sources.clone().iter() {
884                    populate_dirty_files(file, &mut self.dirty_sources, &edges);
885                }
886            } else {
887                // Mark sources as dirty based on their imports
888                for file in sources.keys() {
889                    if self.dirty_sources.contains(file) {
890                        continue;
891                    }
892                    let is_src = self.is_source_file(file);
893                    for import in edges.imports(file) {
894                        // Any source file importing dirty source file is dirty.
895                        if is_src && self.dirty_sources.contains(import) {
896                            self.dirty_sources.insert(file.clone());
897                            break;
898                        // For non-src files we mark them as dirty only if they import dirty
899                        // non-src file or src file for which interface representation changed.
900                        // For identified mock contracts (non-src contracts that extends contracts
901                        // from src file) we mark edges as dirty.
902                        } else if !is_src
903                            && self.dirty_sources.contains(import)
904                            && (!self.is_source_file(import)
905                                || self.is_dirty(import, true)
906                                || self.cache.mocks.contains(file))
907                        {
908                            if self.cache.mocks.contains(file) {
909                                // Mark all mock edges as dirty.
910                                populate_dirty_files(file, &mut self.dirty_sources, &edges);
911                            } else {
912                                self.dirty_sources.insert(file.clone());
913                            }
914                        }
915                    }
916                }
917            }
918        } else {
919            // Purge all sources on graph resolution error.
920            self.dirty_sources.extend(files);
921        }
922
923        // Remove all dirty files from cache.
924        for file in &self.dirty_sources {
925            debug!("removing dirty file from cache: {}", file.display());
926            self.cache.remove(file);
927        }
928    }
929
930    /// Updates the profiles in the cache, removing those which are dirty alongside their artifacts.
931    fn update_profiles(&mut self) {
932        let existing_profiles = self.project.settings_profiles().collect::<BTreeMap<_, _>>();
933
934        let mut dirty_profiles = HashSet::new();
935        for (profile, settings) in &self.cache.profiles {
936            if !existing_profiles.get(profile.as_str()).is_some_and(|p| p.can_use_cached(settings))
937            {
938                dirty_profiles.insert(profile.clone());
939            }
940        }
941
942        for profile in &dirty_profiles {
943            trace!(profile, "removing dirty profile and artifacts");
944            self.cache.profiles.remove(profile);
945        }
946
947        for (profile, settings) in existing_profiles {
948            if !self.cache.profiles.contains_key(profile) {
949                trace!(profile, "adding new profile");
950                self.cache.profiles.insert(profile.to_string(), settings.clone());
951            }
952        }
953
954        self.cache.files.retain(|_, entry| {
955            // keep entries which already had no artifacts
956            if entry.artifacts.is_empty() {
957                return true;
958            }
959            entry.artifacts.retain(|_, artifacts| {
960                artifacts.retain(|_, artifacts| {
961                    artifacts.retain(|profile, _| !dirty_profiles.contains(profile));
962                    !artifacts.is_empty()
963                });
964                !artifacts.is_empty()
965            });
966            !entry.artifacts.is_empty()
967        });
968    }
969
970    fn is_dirty(&self, file: &Path, use_interface_repr: bool) -> bool {
971        self.is_dirty_impl(file, use_interface_repr).is_err()
972    }
973
974    #[instrument(level = "trace", name = "is_dirty", skip(self), ret)]
975    fn is_dirty_impl(&self, file: &Path, use_interface_repr: bool) -> Result<(), &'static str> {
976        let Some(entry) = self.cache.entry(file) else {
977            return Err("missing cache entry");
978        };
979
980        if use_interface_repr && self.cache.preprocessed {
981            let Some(interface_hash) = self.interface_repr_hashes.get(file) else {
982                return Err("missing interface hash");
983            };
984
985            if entry.interface_repr_hash.as_ref() != Some(interface_hash) {
986                return Err("interface hash changed");
987            }
988        } else {
989            let Some(content_hash) = self.content_hashes.get(file) else {
990                return Err("missing content hash");
991            };
992
993            if entry.content_hash != *content_hash {
994                return Err("content hash changed");
995            }
996        }
997
998        // all things match, can be reused
999        Ok(())
1000    }
1001
1002    /// Adds the file's hashes to the set if not set yet
1003    fn fill_hashes(&mut self, sources: &Sources) {
1004        for (file, source) in sources {
1005            let _ = self.content_hash(source, file);
1006
1007            // Fill interface representation hashes for source files
1008            if self.cache.preprocessed && self.project.paths.is_source_file(file) {
1009                let _ = self.interface_repr_hash(source, file);
1010            }
1011        }
1012    }
1013
1014    /// Helper function to check if any requested extra files are missing for any artifact.
1015    fn missing_extra_files(&self) -> bool {
1016        for artifacts in self.cached_artifacts.values() {
1017            for artifacts in artifacts.values() {
1018                for artifact_file in artifacts {
1019                    if self.project.artifacts_handler().is_dirty(artifact_file).unwrap_or(true) {
1020                        return true;
1021                    }
1022                }
1023            }
1024        }
1025        false
1026    }
1027}
1028
1029/// Abstraction over configured caching which can be either non-existent or an already loaded cache
1030#[allow(clippy::large_enum_variant)]
1031#[derive(Debug)]
1032pub(crate) enum ArtifactsCache<
1033    'a,
1034    T: ArtifactOutput<CompilerContract = C::CompilerContract>,
1035    C: Compiler,
1036> {
1037    /// Cache nothing on disk
1038    Ephemeral(GraphEdges<C::Parser>, &'a Project<C, T>),
1039    /// Handles the actual cached artifacts, detects artifacts that can be reused
1040    Cached(ArtifactsCacheInner<'a, T, C>),
1041}
1042
1043impl<'a, T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>
1044    ArtifactsCache<'a, T, C>
1045{
1046    /// Create a new cache instance with the given files
1047    #[instrument(name = "ArtifactsCache::new", skip(project, edges))]
1048    pub fn new(
1049        project: &'a Project<C, T>,
1050        edges: GraphEdges<C::Parser>,
1051        preprocessed: bool,
1052    ) -> Result<Self> {
1053        /// Returns the [CompilerCache] to use
1054        ///
1055        /// Returns a new empty cache if the cache does not exist or `invalidate_cache` is set.
1056        fn get_cache<T: ArtifactOutput<CompilerContract = C::CompilerContract>, C: Compiler>(
1057            project: &Project<C, T>,
1058            invalidate_cache: bool,
1059            preprocessed: bool,
1060        ) -> CompilerCache<C::Settings> {
1061            // the currently configured paths
1062            let paths = project.paths.paths_relative();
1063
1064            if !invalidate_cache && project.cache_path().exists() {
1065                if let Ok(cache) = CompilerCache::read_joined(&project.paths) {
1066                    if cache.paths == paths && preprocessed == cache.preprocessed {
1067                        // unchanged project paths and same preprocess cache option
1068                        return cache;
1069                    }
1070                }
1071            }
1072
1073            trace!(invalidate_cache, "cache invalidated");
1074
1075            // new empty cache
1076            CompilerCache::new(Default::default(), paths, preprocessed)
1077        }
1078
1079        let cache = if project.cached {
1080            // we only read the existing cache if we were able to resolve the entire graph
1081            // if we failed to resolve an import we invalidate the cache so don't get any false
1082            // positives
1083            let invalidate_cache = !edges.unresolved_imports().is_empty();
1084
1085            // read the cache file if it already exists
1086            let mut cache = get_cache(project, invalidate_cache, preprocessed);
1087
1088            cache.remove_missing_files();
1089
1090            // read all artifacts
1091            let mut cached_artifacts = if project.paths.artifacts.exists() {
1092                trace!("reading artifacts from cache...");
1093                // if we failed to read the whole set of artifacts we use an empty set
1094                let artifacts = cache.read_artifacts::<T::Artifact>().unwrap_or_default();
1095                trace!("read {} artifacts from cache", artifacts.artifact_files().count());
1096                artifacts
1097            } else {
1098                Default::default()
1099            };
1100
1101            trace!("reading build infos from cache...");
1102            let cached_builds = cache.read_builds(&project.paths.build_infos).unwrap_or_default();
1103
1104            // Remove artifacts for which we are missing a build info.
1105            cached_artifacts.0.retain(|_, artifacts| {
1106                artifacts.retain(|_, artifacts| {
1107                    artifacts.retain(|artifact| cached_builds.contains_key(&artifact.build_id));
1108                    !artifacts.is_empty()
1109                });
1110                !artifacts.is_empty()
1111            });
1112
1113            let cache = ArtifactsCacheInner {
1114                cache,
1115                cached_artifacts,
1116                cached_builds,
1117                edges,
1118                project,
1119                dirty_sources: Default::default(),
1120                content_hashes: Default::default(),
1121                sources_in_scope: Default::default(),
1122                interface_repr_hashes: Default::default(),
1123            };
1124
1125            ArtifactsCache::Cached(cache)
1126        } else {
1127            // nothing to cache
1128            ArtifactsCache::Ephemeral(edges, project)
1129        };
1130
1131        Ok(cache)
1132    }
1133
1134    /// Returns the graph data for this project
1135    pub fn graph(&self) -> &GraphEdges<C::Parser> {
1136        match self {
1137            ArtifactsCache::Ephemeral(graph, _) => graph,
1138            ArtifactsCache::Cached(inner) => &inner.edges,
1139        }
1140    }
1141
1142    #[cfg(test)]
1143    #[allow(unused)]
1144    #[doc(hidden)]
1145    // only useful for debugging for debugging purposes
1146    pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T, C>> {
1147        match self {
1148            ArtifactsCache::Ephemeral(..) => None,
1149            ArtifactsCache::Cached(cached) => Some(cached),
1150        }
1151    }
1152
1153    pub fn output_ctx(&self) -> OutputContext<'_> {
1154        match self {
1155            ArtifactsCache::Ephemeral(..) => Default::default(),
1156            ArtifactsCache::Cached(inner) => OutputContext::new(&inner.cache),
1157        }
1158    }
1159
1160    pub fn project(&self) -> &'a Project<C, T> {
1161        match self {
1162            ArtifactsCache::Ephemeral(_, project) => project,
1163            ArtifactsCache::Cached(cache) => cache.project,
1164        }
1165    }
1166
1167    /// Adds the file's hashes to the set if not set yet
1168    #[instrument(skip_all)]
1169    pub fn remove_dirty_sources(&mut self) {
1170        match self {
1171            ArtifactsCache::Ephemeral(..) => {}
1172            ArtifactsCache::Cached(cache) => cache.remove_dirty_sources(),
1173        }
1174    }
1175
1176    /// Updates files with mock contracts identified in preprocess phase.
1177    pub fn update_mocks(&mut self, mocks: HashSet<PathBuf>) {
1178        match self {
1179            ArtifactsCache::Ephemeral(..) => {}
1180            ArtifactsCache::Cached(cache) => cache.cache.mocks = mocks,
1181        }
1182    }
1183
1184    /// Returns the set of files with mock contracts currently in cache.
1185    /// This set is passed to preprocessors and updated accordingly.
1186    /// Cache is then updated by using `update_mocks` call.
1187    pub fn mocks(&self) -> HashSet<PathBuf> {
1188        match self {
1189            ArtifactsCache::Ephemeral(..) => HashSet::default(),
1190            ArtifactsCache::Cached(cache) => cache.cache.mocks.clone(),
1191        }
1192    }
1193
1194    /// Filters out those sources that don't need to be compiled
1195    #[instrument(name = "ArtifactsCache::filter", skip_all)]
1196    pub fn filter(&mut self, sources: &mut Sources, version: &Version, profile: &str) {
1197        match self {
1198            ArtifactsCache::Ephemeral(..) => {}
1199            ArtifactsCache::Cached(cache) => cache.filter(sources, version, profile),
1200        }
1201    }
1202
1203    /// Consumes the `Cache`, rebuilds the `SolFileCache` by merging all artifacts that were
1204    /// filtered out in the previous step (`Cache::filtered`) and the artifacts that were just
1205    /// compiled and written to disk `written_artifacts`.
1206    ///
1207    /// Returns all the _cached_ artifacts.
1208    #[instrument(name = "ArtifactsCache::consume", skip_all)]
1209    #[allow(clippy::type_complexity)]
1210    pub fn consume<A>(
1211        self,
1212        written_artifacts: &Artifacts<A>,
1213        written_build_infos: &Vec<RawBuildInfo<C::Language>>,
1214        write_to_disk: bool,
1215    ) -> Result<(Artifacts<A>, Builds<C::Language>, GraphEdges<C::Parser>)>
1216    where
1217        T: ArtifactOutput<Artifact = A>,
1218    {
1219        let cache = match self {
1220            ArtifactsCache::Ephemeral(edges, _project) => {
1221                trace!("no cache configured, ephemeral");
1222                return Ok((Default::default(), Default::default(), edges));
1223            }
1224            ArtifactsCache::Cached(cache) => cache,
1225        };
1226
1227        let ArtifactsCacheInner {
1228            mut cache,
1229            mut cached_artifacts,
1230            cached_builds,
1231            dirty_sources,
1232            sources_in_scope,
1233            project,
1234            edges,
1235            content_hashes: _,
1236            interface_repr_hashes: _,
1237        } = cache;
1238
1239        // Remove cached artifacts which are out of scope, dirty or appear in `written_artifacts`.
1240        cached_artifacts.0.retain(|file, artifacts| {
1241            let file = Path::new(file);
1242            artifacts.retain(|name, artifacts| {
1243                artifacts.retain(|artifact| {
1244                    let version = &artifact.version;
1245
1246                    if !sources_in_scope.contains(file, version) {
1247                        return false;
1248                    }
1249                    if dirty_sources.contains(file) {
1250                        return false;
1251                    }
1252                    if written_artifacts.find_artifact(file, name, version).is_some() {
1253                        return false;
1254                    }
1255                    true
1256                });
1257                !artifacts.is_empty()
1258            });
1259            !artifacts.is_empty()
1260        });
1261
1262        // Update cache entries with newly written artifacts. We update data for any artifacts as
1263        // `written_artifacts` always contain the most recent data.
1264        for (file, artifacts) in written_artifacts.as_ref() {
1265            let file_path = Path::new(file);
1266            // Only update data for existing entries, we should have entries for all in-scope files
1267            // by now.
1268            if let Some(entry) = cache.files.get_mut(file_path) {
1269                entry.merge_artifacts(artifacts);
1270            }
1271        }
1272
1273        for build_info in written_build_infos {
1274            cache.builds.insert(build_info.id.clone());
1275        }
1276
1277        // write to disk
1278        if write_to_disk {
1279            cache.remove_outdated_builds();
1280            // make all `CacheEntry` paths relative to the project root and all artifact
1281            // paths relative to the artifact's directory
1282            cache
1283                .strip_entries_prefix(project.root())
1284                .strip_artifact_files_prefixes(project.artifacts_path());
1285            cache.write(project.cache_path())?;
1286        }
1287
1288        Ok((cached_artifacts, cached_builds, edges))
1289    }
1290
1291    /// Marks the cached entry as seen by the compiler, if it's cached.
1292    pub fn compiler_seen(&mut self, file: &Path) {
1293        if let ArtifactsCache::Cached(cache) = self {
1294            if let Some(entry) = cache.cache.entry_mut(file) {
1295                entry.seen_by_compiler = true;
1296            }
1297        }
1298    }
1299}