Skip to main content

foundry_compilers_artifacts_solc/
sources.rs

1use foundry_compilers_core::error::{SolcError, SolcIoError};
2use serde::{Deserialize, Serialize};
3use std::{
4    collections::BTreeMap,
5    fs,
6    path::{Path, PathBuf},
7    sync::Arc,
8};
9
10#[cfg(feature = "walkdir")]
11use foundry_compilers_core::utils;
12
13type SourcesInner = BTreeMap<PathBuf, Source>;
14
15/// An ordered list of files and their source.
16#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
17pub struct Sources(pub SourcesInner);
18
19impl Sources {
20    /// Returns a new instance of [Sources].
21    pub fn new() -> Self {
22        Self::default()
23    }
24
25    /// Joins all paths relative to `root`.
26    pub fn make_absolute(&mut self, root: &Path) {
27        self.0 = std::mem::take(&mut self.0)
28            .into_iter()
29            .map(|(path, source)| (root.join(path), source))
30            .collect();
31    }
32
33    /// Returns `true` if no sources should have optimized output selection.
34    pub fn all_dirty(&self) -> bool {
35        self.0.values().all(|s| s.is_dirty())
36    }
37
38    /// Returns all entries that should not be optimized.
39    pub fn dirty(&self) -> impl Iterator<Item = (&PathBuf, &Source)> + '_ {
40        self.0.iter().filter(|(_, s)| s.is_dirty())
41    }
42
43    /// Returns all entries that should be optimized.
44    pub fn clean(&self) -> impl Iterator<Item = (&PathBuf, &Source)> + '_ {
45        self.0.iter().filter(|(_, s)| !s.is_dirty())
46    }
47
48    /// Returns all files that should not be optimized.
49    pub fn dirty_files(&self) -> impl Iterator<Item = &PathBuf> + '_ {
50        self.dirty().map(|(k, _)| k)
51    }
52}
53
54impl std::ops::Deref for Sources {
55    type Target = SourcesInner;
56
57    fn deref(&self) -> &Self::Target {
58        &self.0
59    }
60}
61
62impl std::ops::DerefMut for Sources {
63    fn deref_mut(&mut self) -> &mut Self::Target {
64        &mut self.0
65    }
66}
67
68impl<I> From<I> for Sources
69where
70    SourcesInner: From<I>,
71{
72    fn from(value: I) -> Self {
73        Self(From::from(value))
74    }
75}
76
77impl<I> FromIterator<I> for Sources
78where
79    SourcesInner: FromIterator<I>,
80{
81    fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
82        Self(FromIterator::from_iter(iter))
83    }
84}
85
86impl IntoIterator for Sources {
87    type Item = <SourcesInner as IntoIterator>::Item;
88    type IntoIter = <SourcesInner as IntoIterator>::IntoIter;
89
90    fn into_iter(self) -> Self::IntoIter {
91        self.0.into_iter()
92    }
93}
94
95impl<'a> IntoIterator for &'a Sources {
96    type Item = <&'a SourcesInner as IntoIterator>::Item;
97    type IntoIter = <&'a SourcesInner as IntoIterator>::IntoIter;
98
99    fn into_iter(self) -> Self::IntoIter {
100        self.0.iter()
101    }
102}
103
104impl<'a> IntoIterator for &'a mut Sources {
105    type Item = <&'a mut SourcesInner as IntoIterator>::Item;
106    type IntoIter = <&'a mut SourcesInner as IntoIterator>::IntoIter;
107
108    fn into_iter(self) -> Self::IntoIter {
109        self.0.iter_mut()
110    }
111}
112
113/// Content of a solidity file
114///
115/// This contains the actual source code of a file
116#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
117pub struct Source {
118    /// Content of the file
119    ///
120    /// This is an `Arc` because it may be cloned. If the graph of the project contains multiple
121    /// conflicting versions then the same [Source] may be required by conflicting versions and
122    /// needs to be duplicated.
123    pub content: Arc<String>,
124    #[serde(skip, default)]
125    pub kind: SourceCompilationKind,
126}
127
128impl Source {
129    /// Creates a new instance of [Source] with the given content.
130    pub fn new(content: impl Into<String>) -> Self {
131        Self { content: Arc::new(content.into()), kind: SourceCompilationKind::Complete }
132    }
133
134    /// Reads the file's content
135    #[instrument(name = "Source::read", skip_all, err)]
136    pub fn read(file: &Path) -> Result<Self, SolcIoError> {
137        trace!(file=%file.display());
138        let mut content = fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))?;
139
140        // Normalize line endings to ensure deterministic metadata.
141        if content.contains('\r') {
142            content = content.replace("\r\n", "\n");
143        }
144
145        Ok(Self::new(content))
146    }
147
148    /// [`read`](Self::read) + mapping error to [`SolcError`].
149    pub fn read_(file: &Path) -> Result<Self, SolcError> {
150        Self::read(file).map_err(|err| {
151            let exists = err.path().exists();
152            if !exists && err.path().is_symlink() {
153                return SolcError::ResolveBadSymlink(err);
154            }
155
156            // This is an additional check useful on OS that have case-sensitive paths,
157            // see also <https://docs.soliditylang.org/en/v0.8.17/path-resolution.html#import-callback>
158            // check if there exists a file with different case
159            #[cfg(feature = "walkdir")]
160            if !exists
161                && let Some(existing_file) =
162                    foundry_compilers_core::utils::find_case_sensitive_existing_file(file)
163            {
164                return SolcError::ResolveCaseSensitiveFileName { error: err, existing_file };
165            }
166
167            SolcError::Resolve(err)
168        })
169    }
170
171    /// Returns `true` if the source should be compiled with full output selection.
172    pub const fn is_dirty(&self) -> bool {
173        self.kind.is_dirty()
174    }
175
176    /// Recursively finds all source files under the given dir path and reads them all
177    #[cfg(feature = "walkdir")]
178    pub fn read_all_from(dir: &Path, extensions: &[&str]) -> Result<Sources, SolcIoError> {
179        Self::read_all(utils::source_files_iter(dir, extensions))
180    }
181
182    /// Recursively finds all solidity and yul files under the given dir path and reads them all
183    #[cfg(feature = "walkdir")]
184    pub fn read_sol_yul_from(dir: &Path) -> Result<Sources, SolcIoError> {
185        Self::read_all_from(dir, utils::SOLC_EXTENSIONS)
186    }
187
188    /// Reads all source files of the given list.
189    pub fn read_all_files(files: Vec<PathBuf>) -> Result<Sources, SolcIoError> {
190        Self::read_all(files)
191    }
192
193    /// Reads all of the given files.
194    #[instrument(name = "Source::read_all", skip_all)]
195    pub fn read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
196    where
197        I: IntoIterator<Item = T>,
198        T: Into<PathBuf>,
199    {
200        files
201            .into_iter()
202            .map(Into::into)
203            .map(|file| Self::read(&file).map(|source| (file, source)))
204            .collect()
205    }
206
207    /// Parallelized version of `Self::read_all` that reads all files using a parallel iterator
208    ///
209    /// NOTE: this is only expected to be faster than `Self::read_all` if the given iterator
210    /// contains at least several paths or the files are rather large.
211    #[cfg(feature = "rayon")]
212    pub fn par_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
213    where
214        I: IntoIterator<Item = T>,
215        <I as IntoIterator>::IntoIter: Send,
216        T: Into<PathBuf> + Send,
217    {
218        use rayon::{iter::ParallelBridge, prelude::ParallelIterator};
219        files
220            .into_iter()
221            .par_bridge()
222            .map(Into::into)
223            .map(|file| Self::read(&file).map(|source| (file, source)))
224            .collect::<Result<BTreeMap<_, _>, _>>()
225            .map(Sources)
226    }
227
228    /// Generate a non-cryptographically secure checksum of the file's content.
229    #[cfg(feature = "checksum")]
230    pub fn content_hash(&self) -> String {
231        Self::content_hash_of(&self.content)
232    }
233
234    /// Generate a non-cryptographically secure checksum of the given source.
235    #[cfg(feature = "checksum")]
236    pub fn content_hash_of(src: &str) -> String {
237        foundry_compilers_core::utils::unique_hash(src)
238    }
239}
240
241#[cfg(feature = "async")]
242impl Source {
243    /// async version of `Self::read`
244    #[instrument(name = "Source::async_read", skip_all, err)]
245    pub async fn async_read(file: &Path) -> Result<Self, SolcIoError> {
246        let mut content =
247            tokio::fs::read_to_string(file).await.map_err(|err| SolcIoError::new(err, file))?;
248
249        // Normalize line endings to ensure deterministic metadata.
250        if content.contains('\r') {
251            content = content.replace("\r\n", "\n");
252        }
253
254        Ok(Self::new(content))
255    }
256
257    /// Finds all source files under the given dir path and reads them all
258    #[cfg(feature = "walkdir")]
259    pub async fn async_read_all_from(
260        dir: &Path,
261        extensions: &[&str],
262    ) -> Result<Sources, SolcIoError> {
263        Self::async_read_all(utils::source_files(dir, extensions)).await
264    }
265
266    /// async version of `Self::read_all`
267    pub async fn async_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
268    where
269        I: IntoIterator<Item = T>,
270        T: Into<PathBuf>,
271    {
272        futures_util::future::join_all(
273            files
274                .into_iter()
275                .map(Into::into)
276                .map(|file| async { Self::async_read(&file).await.map(|source| (file, source)) }),
277        )
278        .await
279        .into_iter()
280        .collect()
281    }
282}
283
284impl AsRef<str> for Source {
285    fn as_ref(&self) -> &str {
286        &self.content
287    }
288}
289
290impl AsRef<[u8]> for Source {
291    fn as_ref(&self) -> &[u8] {
292        self.content.as_bytes()
293    }
294}
295
296/// Represents the state of a filtered [`Source`].
297#[derive(Clone, Debug, Default, PartialEq, Eq)]
298pub enum SourceCompilationKind {
299    /// We need a complete compilation output for the source.
300    #[default]
301    Complete,
302    /// A source for which we don't need a complete output and want to optimize its compilation by
303    /// reducing output selection.
304    Optimized,
305}
306
307impl SourceCompilationKind {
308    /// Whether this file should be compiled with full output selection
309    pub const fn is_dirty(&self) -> bool {
310        matches!(self, Self::Complete)
311    }
312}