foundry_compilers_artifacts_solc/
sources.rs

1use foundry_compilers_core::error::{SolcError, SolcIoError};
2use serde::{Deserialize, Serialize};
3use std::{
4    collections::BTreeMap,
5    fs,
6    path::{Path, PathBuf},
7    sync::Arc,
8};
9
10#[cfg(feature = "walkdir")]
11use foundry_compilers_core::utils;
12
13type SourcesInner = BTreeMap<PathBuf, Source>;
14
15/// An ordered list of files and their source.
16#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
17pub struct Sources(pub SourcesInner);
18
19impl Sources {
20    /// Returns a new instance of [Sources].
21    pub fn new() -> Self {
22        Self::default()
23    }
24
25    /// Joins all paths relative to `root`.
26    pub fn make_absolute(&mut self, root: &Path) {
27        self.0 = std::mem::take(&mut self.0)
28            .into_iter()
29            .map(|(path, source)| (root.join(path), source))
30            .collect();
31    }
32
33    /// Returns `true` if no sources should have optimized output selection.
34    pub fn all_dirty(&self) -> bool {
35        self.0.values().all(|s| s.is_dirty())
36    }
37
38    /// Returns all entries that should not be optimized.
39    pub fn dirty(&self) -> impl Iterator<Item = (&PathBuf, &Source)> + '_ {
40        self.0.iter().filter(|(_, s)| s.is_dirty())
41    }
42
43    /// Returns all entries that should be optimized.
44    pub fn clean(&self) -> impl Iterator<Item = (&PathBuf, &Source)> + '_ {
45        self.0.iter().filter(|(_, s)| !s.is_dirty())
46    }
47
48    /// Returns all files that should not be optimized.
49    pub fn dirty_files(&self) -> impl Iterator<Item = &PathBuf> + '_ {
50        self.dirty().map(|(k, _)| k)
51    }
52}
53
54impl std::ops::Deref for Sources {
55    type Target = SourcesInner;
56
57    fn deref(&self) -> &Self::Target {
58        &self.0
59    }
60}
61
62impl std::ops::DerefMut for Sources {
63    fn deref_mut(&mut self) -> &mut Self::Target {
64        &mut self.0
65    }
66}
67
68impl<I> From<I> for Sources
69where
70    SourcesInner: From<I>,
71{
72    fn from(value: I) -> Self {
73        Self(From::from(value))
74    }
75}
76
77impl<I> FromIterator<I> for Sources
78where
79    SourcesInner: FromIterator<I>,
80{
81    fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
82        Self(FromIterator::from_iter(iter))
83    }
84}
85
86impl IntoIterator for Sources {
87    type Item = <SourcesInner as IntoIterator>::Item;
88    type IntoIter = <SourcesInner as IntoIterator>::IntoIter;
89
90    fn into_iter(self) -> Self::IntoIter {
91        self.0.into_iter()
92    }
93}
94
95impl<'a> IntoIterator for &'a Sources {
96    type Item = <&'a SourcesInner as IntoIterator>::Item;
97    type IntoIter = <&'a SourcesInner as IntoIterator>::IntoIter;
98
99    fn into_iter(self) -> Self::IntoIter {
100        self.0.iter()
101    }
102}
103
104impl<'a> IntoIterator for &'a mut Sources {
105    type Item = <&'a mut SourcesInner as IntoIterator>::Item;
106    type IntoIter = <&'a mut SourcesInner as IntoIterator>::IntoIter;
107
108    fn into_iter(self) -> Self::IntoIter {
109        self.0.iter_mut()
110    }
111}
112
113/// Content of a solidity file
114///
115/// This contains the actual source code of a file
116#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
117pub struct Source {
118    /// Content of the file
119    ///
120    /// This is an `Arc` because it may be cloned. If the graph of the project contains multiple
121    /// conflicting versions then the same [Source] may be required by conflicting versions and
122    /// needs to be duplicated.
123    pub content: Arc<String>,
124    #[serde(skip, default)]
125    pub kind: SourceCompilationKind,
126}
127
128impl Source {
129    /// Creates a new instance of [Source] with the given content.
130    pub fn new(content: impl Into<String>) -> Self {
131        Self { content: Arc::new(content.into()), kind: SourceCompilationKind::Complete }
132    }
133
134    /// Reads the file's content
135    #[instrument(name = "Source::read", skip_all, err)]
136    pub fn read(file: &Path) -> Result<Self, SolcIoError> {
137        trace!(file=%file.display());
138        let mut content = fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))?;
139
140        // Normalize line endings to ensure deterministic metadata.
141        if content.contains('\r') {
142            content = content.replace("\r\n", "\n");
143        }
144
145        Ok(Self::new(content))
146    }
147
148    /// [`read`](Self::read) + mapping error to [`SolcError`].
149    pub fn read_(file: &Path) -> Result<Self, SolcError> {
150        Self::read(file).map_err(|err| {
151            let exists = err.path().exists();
152            if !exists && err.path().is_symlink() {
153                return SolcError::ResolveBadSymlink(err);
154            }
155
156            // This is an additional check useful on OS that have case-sensitive paths,
157            // see also <https://docs.soliditylang.org/en/v0.8.17/path-resolution.html#import-callback>
158            // check if there exists a file with different case
159            #[cfg(feature = "walkdir")]
160            if !exists {
161                if let Some(existing_file) =
162                    foundry_compilers_core::utils::find_case_sensitive_existing_file(file)
163                {
164                    return SolcError::ResolveCaseSensitiveFileName { error: err, existing_file };
165                }
166            }
167
168            SolcError::Resolve(err)
169        })
170    }
171
172    /// Returns `true` if the source should be compiled with full output selection.
173    pub fn is_dirty(&self) -> bool {
174        self.kind.is_dirty()
175    }
176
177    /// Recursively finds all source files under the given dir path and reads them all
178    #[cfg(feature = "walkdir")]
179    pub fn read_all_from(dir: &Path, extensions: &[&str]) -> Result<Sources, SolcIoError> {
180        Self::read_all(utils::source_files_iter(dir, extensions))
181    }
182
183    /// Recursively finds all solidity and yul files under the given dir path and reads them all
184    #[cfg(feature = "walkdir")]
185    pub fn read_sol_yul_from(dir: &Path) -> Result<Sources, SolcIoError> {
186        Self::read_all_from(dir, utils::SOLC_EXTENSIONS)
187    }
188
189    /// Reads all source files of the given list.
190    pub fn read_all_files(files: Vec<PathBuf>) -> Result<Sources, SolcIoError> {
191        Self::read_all(files)
192    }
193
194    /// Reads all of the given files.
195    #[instrument(name = "Source::read_all", skip_all)]
196    pub fn read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
197    where
198        I: IntoIterator<Item = T>,
199        T: Into<PathBuf>,
200    {
201        files
202            .into_iter()
203            .map(Into::into)
204            .map(|file| Self::read(&file).map(|source| (file, source)))
205            .collect()
206    }
207
208    /// Parallelized version of `Self::read_all` that reads all files using a parallel iterator
209    ///
210    /// NOTE: this is only expected to be faster than `Self::read_all` if the given iterator
211    /// contains at least several paths or the files are rather large.
212    #[cfg(feature = "rayon")]
213    pub fn par_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
214    where
215        I: IntoIterator<Item = T>,
216        <I as IntoIterator>::IntoIter: Send,
217        T: Into<PathBuf> + Send,
218    {
219        use rayon::{iter::ParallelBridge, prelude::ParallelIterator};
220        files
221            .into_iter()
222            .par_bridge()
223            .map(Into::into)
224            .map(|file| Self::read(&file).map(|source| (file, source)))
225            .collect::<Result<BTreeMap<_, _>, _>>()
226            .map(Sources)
227    }
228
229    /// Generate a non-cryptographically secure checksum of the file's content.
230    #[cfg(feature = "checksum")]
231    pub fn content_hash(&self) -> String {
232        Self::content_hash_of(&self.content)
233    }
234
235    /// Generate a non-cryptographically secure checksum of the given source.
236    #[cfg(feature = "checksum")]
237    pub fn content_hash_of(src: &str) -> String {
238        foundry_compilers_core::utils::unique_hash(src)
239    }
240}
241
242#[cfg(feature = "async")]
243impl Source {
244    /// async version of `Self::read`
245    #[instrument(name = "Source::async_read", skip_all, err)]
246    pub async fn async_read(file: &Path) -> Result<Self, SolcIoError> {
247        let mut content =
248            tokio::fs::read_to_string(file).await.map_err(|err| SolcIoError::new(err, file))?;
249
250        // Normalize line endings to ensure deterministic metadata.
251        if content.contains('\r') {
252            content = content.replace("\r\n", "\n");
253        }
254
255        Ok(Self::new(content))
256    }
257
258    /// Finds all source files under the given dir path and reads them all
259    #[cfg(feature = "walkdir")]
260    pub async fn async_read_all_from(
261        dir: &Path,
262        extensions: &[&str],
263    ) -> Result<Sources, SolcIoError> {
264        Self::async_read_all(utils::source_files(dir, extensions)).await
265    }
266
267    /// async version of `Self::read_all`
268    pub async fn async_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
269    where
270        I: IntoIterator<Item = T>,
271        T: Into<PathBuf>,
272    {
273        futures_util::future::join_all(
274            files
275                .into_iter()
276                .map(Into::into)
277                .map(|file| async { Self::async_read(&file).await.map(|source| (file, source)) }),
278        )
279        .await
280        .into_iter()
281        .collect()
282    }
283}
284
285impl AsRef<str> for Source {
286    fn as_ref(&self) -> &str {
287        &self.content
288    }
289}
290
291impl AsRef<[u8]> for Source {
292    fn as_ref(&self) -> &[u8] {
293        self.content.as_bytes()
294    }
295}
296
297/// Represents the state of a filtered [`Source`].
298#[derive(Clone, Debug, Default, PartialEq, Eq)]
299pub enum SourceCompilationKind {
300    /// We need a complete compilation output for the source.
301    #[default]
302    Complete,
303    /// A source for which we don't need a complete output and want to optimize its compilation by
304    /// reducing output selection.
305    Optimized,
306}
307
308impl SourceCompilationKind {
309    /// Whether this file should be compiled with full output selection
310    pub fn is_dirty(&self) -> bool {
311        matches!(self, Self::Complete)
312    }
313}