foundry_compilers_artifacts_solc/
sources.rs

1use foundry_compilers_core::error::SolcIoError;
2use serde::{Deserialize, Serialize};
3use std::{
4    collections::BTreeMap,
5    fs,
6    path::{Path, PathBuf},
7    sync::Arc,
8};
9
10#[cfg(feature = "walkdir")]
11use foundry_compilers_core::utils;
12
13type SourcesInner = BTreeMap<PathBuf, Source>;
14
15/// An ordered list of files and their source.
16#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
17pub struct Sources(pub SourcesInner);
18
19impl Sources {
20    /// Returns a new instance of [Sources].
21    pub fn new() -> Self {
22        Self::default()
23    }
24
25    /// Returns `true` if no sources should have optimized output selection.
26    pub fn all_dirty(&self) -> bool {
27        self.0.values().all(|s| s.is_dirty())
28    }
29
30    /// Returns all entries that should not be optimized.
31    pub fn dirty(&self) -> impl Iterator<Item = (&PathBuf, &Source)> + '_ {
32        self.0.iter().filter(|(_, s)| s.is_dirty())
33    }
34
35    /// Returns all entries that should be optimized.
36    pub fn clean(&self) -> impl Iterator<Item = (&PathBuf, &Source)> + '_ {
37        self.0.iter().filter(|(_, s)| !s.is_dirty())
38    }
39
40    /// Returns all files that should not be optimized.
41    pub fn dirty_files(&self) -> impl Iterator<Item = &PathBuf> + '_ {
42        self.dirty().map(|(k, _)| k)
43    }
44}
45
46impl std::ops::Deref for Sources {
47    type Target = SourcesInner;
48
49    fn deref(&self) -> &Self::Target {
50        &self.0
51    }
52}
53
54impl std::ops::DerefMut for Sources {
55    fn deref_mut(&mut self) -> &mut Self::Target {
56        &mut self.0
57    }
58}
59
60impl<I> From<I> for Sources
61where
62    SourcesInner: From<I>,
63{
64    fn from(value: I) -> Self {
65        Self(From::from(value))
66    }
67}
68
69impl<I> FromIterator<I> for Sources
70where
71    SourcesInner: FromIterator<I>,
72{
73    fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
74        Self(FromIterator::from_iter(iter))
75    }
76}
77
78impl IntoIterator for Sources {
79    type Item = <SourcesInner as IntoIterator>::Item;
80    type IntoIter = <SourcesInner as IntoIterator>::IntoIter;
81
82    fn into_iter(self) -> Self::IntoIter {
83        self.0.into_iter()
84    }
85}
86
87impl<'a> IntoIterator for &'a Sources {
88    type Item = <&'a SourcesInner as IntoIterator>::Item;
89    type IntoIter = <&'a SourcesInner as IntoIterator>::IntoIter;
90
91    fn into_iter(self) -> Self::IntoIter {
92        self.0.iter()
93    }
94}
95
96impl<'a> IntoIterator for &'a mut Sources {
97    type Item = <&'a mut SourcesInner as IntoIterator>::Item;
98    type IntoIter = <&'a mut SourcesInner as IntoIterator>::IntoIter;
99
100    fn into_iter(self) -> Self::IntoIter {
101        self.0.iter_mut()
102    }
103}
104
105/// Content of a solidity file
106///
107/// This contains the actual source code of a file
108#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
109pub struct Source {
110    /// Content of the file
111    ///
112    /// This is an `Arc` because it may be cloned. If the graph of the project contains multiple
113    /// conflicting versions then the same [Source] may be required by conflicting versions and
114    /// needs to be duplicated.
115    pub content: Arc<String>,
116    #[serde(skip, default)]
117    pub kind: SourceCompilationKind,
118}
119
120impl Source {
121    /// Creates a new instance of [Source] with the given content.
122    pub fn new(content: impl Into<String>) -> Self {
123        Self { content: Arc::new(content.into()), kind: SourceCompilationKind::Complete }
124    }
125
126    /// Reads the file's content
127    #[instrument(name = "Source::read", skip_all, err)]
128    pub fn read(file: &Path) -> Result<Self, SolcIoError> {
129        trace!(file=%file.display());
130        let mut content = fs::read_to_string(file).map_err(|err| SolcIoError::new(err, file))?;
131
132        // Normalize line endings to ensure deterministic metadata.
133        if content.contains('\r') {
134            content = content.replace("\r\n", "\n");
135        }
136
137        Ok(Self::new(content))
138    }
139
140    /// Returns `true` if the source should be compiled with full output selection.
141    pub fn is_dirty(&self) -> bool {
142        self.kind.is_dirty()
143    }
144
145    /// Recursively finds all source files under the given dir path and reads them all
146    #[cfg(feature = "walkdir")]
147    pub fn read_all_from(dir: &Path, extensions: &[&str]) -> Result<Sources, SolcIoError> {
148        Self::read_all_files(utils::source_files(dir, extensions))
149    }
150
151    /// Recursively finds all solidity and yul files under the given dir path and reads them all
152    #[cfg(feature = "walkdir")]
153    pub fn read_sol_yul_from(dir: &Path) -> Result<Sources, SolcIoError> {
154        Self::read_all_from(dir, utils::SOLC_EXTENSIONS)
155    }
156
157    /// Reads all source files of the given vec
158    ///
159    /// Depending on the len of the vec it will try to read the files in parallel
160    pub fn read_all_files(files: Vec<PathBuf>) -> Result<Sources, SolcIoError> {
161        Self::read_all(files)
162    }
163
164    /// Reads all files
165    #[instrument(name = "Source::read_all", skip_all)]
166    pub fn read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
167    where
168        I: IntoIterator<Item = T>,
169        T: Into<PathBuf>,
170    {
171        files
172            .into_iter()
173            .map(Into::into)
174            .map(|file| Self::read(&file).map(|source| (file, source)))
175            .collect()
176    }
177
178    /// Parallelized version of `Self::read_all` that reads all files using a parallel iterator
179    ///
180    /// NOTE: this is only expected to be faster than `Self::read_all` if the given iterator
181    /// contains at least several paths or the files are rather large.
182    #[cfg(feature = "rayon")]
183    pub fn par_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
184    where
185        I: IntoIterator<Item = T>,
186        <I as IntoIterator>::IntoIter: Send,
187        T: Into<PathBuf> + Send,
188    {
189        use rayon::{iter::ParallelBridge, prelude::ParallelIterator};
190        files
191            .into_iter()
192            .par_bridge()
193            .map(Into::into)
194            .map(|file| Self::read(&file).map(|source| (file, source)))
195            .collect::<Result<BTreeMap<_, _>, _>>()
196            .map(Sources)
197    }
198
199    /// Generate a non-cryptographically secure checksum of the file's content.
200    #[cfg(feature = "checksum")]
201    pub fn content_hash(&self) -> String {
202        Self::content_hash_of(&self.content)
203    }
204
205    /// Generate a non-cryptographically secure checksum of the given source.
206    #[cfg(feature = "checksum")]
207    pub fn content_hash_of(src: &str) -> String {
208        foundry_compilers_core::utils::unique_hash(src)
209    }
210}
211
212#[cfg(feature = "async")]
213impl Source {
214    /// async version of `Self::read`
215    #[instrument(name = "Source::async_read", skip_all, err)]
216    pub async fn async_read(file: &Path) -> Result<Self, SolcIoError> {
217        let mut content =
218            tokio::fs::read_to_string(file).await.map_err(|err| SolcIoError::new(err, file))?;
219
220        // Normalize line endings to ensure deterministic metadata.
221        if content.contains('\r') {
222            content = content.replace("\r\n", "\n");
223        }
224
225        Ok(Self::new(content))
226    }
227
228    /// Finds all source files under the given dir path and reads them all
229    #[cfg(feature = "walkdir")]
230    pub async fn async_read_all_from(
231        dir: &Path,
232        extensions: &[&str],
233    ) -> Result<Sources, SolcIoError> {
234        Self::async_read_all(utils::source_files(dir, extensions)).await
235    }
236
237    /// async version of `Self::read_all`
238    pub async fn async_read_all<T, I>(files: I) -> Result<Sources, SolcIoError>
239    where
240        I: IntoIterator<Item = T>,
241        T: Into<PathBuf>,
242    {
243        futures_util::future::join_all(
244            files
245                .into_iter()
246                .map(Into::into)
247                .map(|file| async { Self::async_read(&file).await.map(|source| (file, source)) }),
248        )
249        .await
250        .into_iter()
251        .collect()
252    }
253}
254
255impl AsRef<str> for Source {
256    fn as_ref(&self) -> &str {
257        &self.content
258    }
259}
260
261impl AsRef<[u8]> for Source {
262    fn as_ref(&self) -> &[u8] {
263        self.content.as_bytes()
264    }
265}
266
267/// Represents the state of a filtered [`Source`].
268#[derive(Clone, Debug, Default, PartialEq, Eq)]
269pub enum SourceCompilationKind {
270    /// We need a complete compilation output for the source.
271    #[default]
272    Complete,
273    /// A source for which we don't need a complete output and want to optimize its compilation by
274    /// reducing output selection.
275    Optimized,
276}
277
278impl SourceCompilationKind {
279    /// Whether this file should be compiled with full output selection
280    pub fn is_dirty(&self) -> bool {
281        matches!(self, Self::Complete)
282    }
283}