foundry_compilers_artifacts_solc/
sources.rs

1use foundry_compilers_core::error::SolcIoError;
2use serde::{Deserialize, Serialize};
3use std::{
4    collections::BTreeMap,
5    fs,
6    path::{Path, PathBuf},
7    sync::Arc,
8};
9
10#[cfg(feature = "walkdir")]
11use foundry_compilers_core::utils;
12
13type SourcesInner = BTreeMap<PathBuf, Source>;
14
15/// An ordered list of files and their source.
16#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
17pub struct Sources(pub SourcesInner);
18
19impl Sources {
20    /// Returns a new instance of [Sources].
21    pub fn new() -> Self {
22        Self::default()
23    }
24
25    /// Returns `true` if no sources should have optimized output selection.
26    pub fn all_dirty(&self) -> bool {
27        self.0.values().all(|s| s.is_dirty())
28    }
29
30    /// Returns all entries that should not be optimized.
31    pub fn dirty(&self) -> impl Iterator<Item = (&PathBuf, &Source)> + '_ {
32        self.0.iter().filter(|(_, s)| s.is_dirty())
33    }
34
35    /// Returns all entries that should be optimized.
36    pub fn clean(&self) -> impl Iterator<Item = (&PathBuf, &Source)> + '_ {
37        self.0.iter().filter(|(_, s)| !s.is_dirty())
38    }
39
40    /// Returns all files that should not be optimized.
41    pub fn dirty_files(&self) -> impl Iterator<Item = &PathBuf> + '_ {
42        self.dirty().map(|(k, _)| k)
43    }
44}
45
46impl std::ops::Deref for Sources {
47    type Target = SourcesInner;
48
49    fn deref(&self) -> &Self::Target {
50        &self.0
51    }
52}
53
54impl std::ops::DerefMut for Sources {
55    fn deref_mut(&mut self) -> &mut Self::Target {
56        &mut self.0
57    }
58}
59
60impl<I> From<I> for Sources
61where
62    SourcesInner: From<I>,
63{
64    fn from(value: I) -> Self {
65        Self(From::from(value))
66    }
67}
68
69impl<I> FromIterator<I> for Sources
70where
71    SourcesInner: FromIterator<I>,
72{
73    fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
74        Self(FromIterator::from_iter(iter))
75    }
76}
77
78impl IntoIterator for Sources {
79    type Item = <SourcesInner as IntoIterator>::Item;
80    type IntoIter = <SourcesInner as IntoIterator>::IntoIter;
81
82    fn into_iter(self) -> Self::IntoIter {
83        self.0.into_iter()
84    }
85}
86
87impl<'a> IntoIterator for &'a Sources {
88    type Item = <&'a SourcesInner as IntoIterator>::Item;
89    type IntoIter = <&'a SourcesInner as IntoIterator>::IntoIter;
90
91    fn into_iter(self) -> Self::IntoIter {
92        self.0.iter()
93    }
94}
95
96impl<'a> IntoIterator for &'a mut Sources {
97    type Item = <&'a mut SourcesInner as IntoIterator>::Item;
98    type IntoIter = <&'a mut SourcesInner as IntoIterator>::IntoIter;
99
100    fn into_iter(self) -> Self::IntoIter {
101        self.0.iter_mut()
102    }
103}
104
105/// Content of a solidity file
106///
107/// This contains the actual source code of a file
108#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
109pub struct Source {
110    /// Content of the file
111    ///
112    /// This is an `Arc` because it may be cloned. If the graph of the project contains multiple
113    /// conflicting versions then the same [Source] may be required by conflicting versions and
114    /// needs to be duplicated.
115    pub content: Arc<String>,
116    #[serde(skip, default)]
117    pub kind: SourceCompilationKind,
118}
119
120impl Source {
121    /// Creates a new instance of [Source] with the given content.
122    pub fn new(content: impl Into<String>) -> Self {
123        Self { content: Arc::new(content.into()), kind: SourceCompilationKind::Complete }
124    }
125
126    /// Reads the file's content
127    #[instrument(name = "read_source", level = "debug", skip_all, err)]
128    pub fn read(file: &Path) -> Result<Self, SolcIoError> {
129        trace!(file=%file.display());
130        let mut content = fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))?;
131
132        // Normalize line endings to ensure deterministic metadata.
133        if content.contains('\r') {
134            content = content.replace("\r\n", "\n");
135        }
136
137        Ok(Self::new(content))
138    }
139
140    /// Returns `true` if the source should be compiled with full output selection.
141    pub fn is_dirty(&self) -> bool {
142        self.kind.is_dirty()
143    }
144
145    /// Recursively finds all source files under the given dir path and reads them all
146    #[cfg(feature = "walkdir")]
147    pub fn read_all_from(dir: &Path, extensions: &[&str]) -> Result<Sources, SolcIoError> {
148        Self::read_all_files(utils::source_files(dir, extensions))
149    }
150
151    /// Recursively finds all solidity and yul files under the given dir path and reads them all
152    #[cfg(feature = "walkdir")]
153    pub fn read_sol_yul_from(dir: &Path) -> Result<Sources, SolcIoError> {
154        Self::read_all_from(dir, utils::SOLC_EXTENSIONS)
155    }
156
157    /// Reads all source files of the given vec
158    ///
159    /// Depending on the len of the vec it will try to read the files in parallel
160    pub fn read_all_files(files: Vec<PathBuf>) -> Result<Sources, SolcIoError> {
161        Self::read_all(files)
162    }
163
164    /// Reads all files
165    pub fn read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
166    where
167        I: IntoIterator<Item = T>,
168        T: Into<PathBuf>,
169    {
170        files
171            .into_iter()
172            .map(Into::into)
173            .map(|file| Self::read(&file).map(|source| (file, source)))
174            .collect()
175    }
176
177    /// Parallelized version of `Self::read_all` that reads all files using a parallel iterator
178    ///
179    /// NOTE: this is only expected to be faster than `Self::read_all` if the given iterator
180    /// contains at least several paths or the files are rather large.
181    #[cfg(feature = "rayon")]
182    pub fn par_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
183    where
184        I: IntoIterator<Item = T>,
185        <I as IntoIterator>::IntoIter: Send,
186        T: Into<PathBuf> + Send,
187    {
188        use rayon::{iter::ParallelBridge, prelude::ParallelIterator};
189        files
190            .into_iter()
191            .par_bridge()
192            .map(Into::into)
193            .map(|file| Self::read(&file).map(|source| (file, source)))
194            .collect::<Result<BTreeMap<_, _>, _>>()
195            .map(Sources)
196    }
197
198    /// Generate a non-cryptographically secure checksum of the file's content.
199    #[cfg(feature = "checksum")]
200    pub fn content_hash(&self) -> String {
201        Self::content_hash_of(&self.content)
202    }
203
204    /// Generate a non-cryptographically secure checksum of the given source.
205    #[cfg(feature = "checksum")]
206    pub fn content_hash_of(src: &str) -> String {
207        foundry_compilers_core::utils::unique_hash(src)
208    }
209}
210
211#[cfg(feature = "async")]
212impl Source {
213    /// async version of `Self::read`
214    #[instrument(name = "async_read_source", level = "debug", skip_all, err)]
215    pub async fn async_read(file: &Path) -> Result<Self, SolcIoError> {
216        let mut content =
217            tokio::fs::read_to_string(file).await.map_err(|err| SolcIoError::new(err, file))?;
218
219        // Normalize line endings to ensure deterministic metadata.
220        if content.contains('\r') {
221            content = content.replace("\r\n", "\n");
222        }
223
224        Ok(Self::new(content))
225    }
226
227    /// Finds all source files under the given dir path and reads them all
228    #[cfg(feature = "walkdir")]
229    pub async fn async_read_all_from(
230        dir: &Path,
231        extensions: &[&str],
232    ) -> Result<Sources, SolcIoError> {
233        Self::async_read_all(utils::source_files(dir, extensions)).await
234    }
235
236    /// async version of `Self::read_all`
237    pub async fn async_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
238    where
239        I: IntoIterator<Item = T>,
240        T: Into<PathBuf>,
241    {
242        futures_util::future::join_all(
243            files
244                .into_iter()
245                .map(Into::into)
246                .map(|file| async { Self::async_read(&file).await.map(|source| (file, source)) }),
247        )
248        .await
249        .into_iter()
250        .collect()
251    }
252}
253
254impl AsRef<str> for Source {
255    fn as_ref(&self) -> &str {
256        &self.content
257    }
258}
259
260impl AsRef<[u8]> for Source {
261    fn as_ref(&self) -> &[u8] {
262        self.content.as_bytes()
263    }
264}
265
266/// Represents the state of a filtered [`Source`].
267#[derive(Clone, Debug, Default, PartialEq, Eq)]
268pub enum SourceCompilationKind {
269    /// We need a complete compilation output for the source.
270    #[default]
271    Complete,
272    /// A source for which we don't need a complete output and want to optimize its compilation by
273    /// reducing output selection.
274    Optimized,
275}
276
277impl SourceCompilationKind {
278    /// Whether this file should be compiled with full output selection
279    pub fn is_dirty(&self) -> bool {
280        matches!(self, Self::Complete)
281    }
282}