foundry_compilers/resolver/
mod.rs

1//! Resolution of the entire dependency graph for a project.
2//!
3//! This module implements the core logic in taking all contracts of a project and creating a
4//! resolved graph with applied remappings for all source contracts.
5//!
6//! Some constraints we're working with when resolving contracts
7//!
8//!   1. Each file can contain several source units and can have any number of imports/dependencies
9//!      (using the term interchangeably). Each dependency can declare a version range that it is
10//!      compatible with, solidity version pragma.
11//!   2. A dependency can be imported from any directory, see `Remappings`
12//!
13//! Finding all dependencies is fairly simple, we're simply doing a DFS, starting from the source
14//! contracts
15//!
16//! ## Solc version auto-detection
17//!
18//! Solving a constraint graph is an NP-hard problem. The algorithm for finding the "best" solution
19//! makes several assumptions and tries to find a version of "Solc" that is compatible with all
20//! source files.
21//!
22//! The algorithm employed here is fairly simple, we simply do a DFS over all the source files and
23//! find the set of Solc versions that the file and all its imports are compatible with, and then we
24//! try to find a single Solc version that is compatible with all the files. This is effectively the
25//! intersection of all version sets.
26//!
27//! We always try to activate the highest (installed) solc version first. Uninstalled solc is only
28//! used if this version is the only compatible version for a single file or in the intersection of
29//! all version sets.
30//!
31//! This leads to finding the optimal version, if there is one. If there is no single Solc version
32//! that is compatible with all sources and their imports, then suddenly this becomes a very
33//! difficult problem, because what would be the "best" solution. In this case, just choose the
34//! latest (installed) Solc version and try to minimize the number of Solc versions used.
35//!
36//! ## Performance
37//!
38//! Note that this is a relatively performance-critical portion of the ethers-solc preprocessing.
39//! The data that needs to be processed is proportional to the size of the dependency
40//! graph, which can, depending on the project, often be quite large.
41//!
42//! Note that, unlike the solidity compiler, we work with the filesystem, where we have to resolve
43//! remappings and follow relative paths. We're also limiting the nodes in the graph to solidity
44//! files, since we're only interested in their
45//! [version pragma](https://docs.soliditylang.org/en/develop/layout-of-source-files.html#version-pragma),
46//! which is defined on a per source file basis.
47
48use crate::{
49    compilers::{Compiler, CompilerVersion, ParsedSource},
50    project::VersionedSources,
51    resolver::parse::SolParser,
52    ArtifactOutput, CompilerSettings, Project, ProjectPathsConfig, SourceParser,
53};
54use core::fmt;
55use foundry_compilers_artifacts::sources::{Source, Sources};
56use foundry_compilers_core::{
57    error::{Result, SolcError},
58    utils,
59};
60use semver::{Version, VersionReq};
61use std::{
62    collections::{BTreeSet, HashMap, HashSet, VecDeque},
63    io,
64    path::{Path, PathBuf},
65};
66use yansi::{Color, Paint};
67
68pub mod parse;
69mod tree;
70
71pub use parse::SolImportAlias;
72pub use tree::{print, Charset, TreeOptions};
73
74/// Container for result of version and profile resolution of sources contained in [`Graph`].
75#[derive(Debug)]
76pub struct ResolvedSources<'a, C: Compiler> {
77    /// Resolved set of sources.
78    ///
79    /// Mapping from language to a [`Vec`] of compiler inputs consisting of version, sources set
80    /// and settings.
81    pub sources: VersionedSources<'a, C::Language, C::Settings>,
82    /// A mapping from a source file path to the primary profile name selected for it.
83    ///
84    /// This is required because the same source file might be compiled with multiple different
85    /// profiles if it's present as a dependency for other sources. We want to keep a single name
86    /// of the profile which was chosen specifically for each source so that we can default to it.
87    /// Right now, this is used when generating artifact names, "primary" artifact will never have
88    /// a profile suffix.
89    pub primary_profiles: HashMap<PathBuf, &'a str>,
90    /// Graph edges.
91    pub edges: GraphEdges<C::Parser>,
92}
93
94/// The underlying edges of the graph which only contains the raw relationship data.
95///
96/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources`
97/// set is determined.
98#[derive(Clone, Debug)]
99pub struct GraphEdges<P: SourceParser> {
100    /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]`
101    /// is the set of outgoing edges for `nodes[0]`.
102    edges: Vec<Vec<usize>>,
103    /// Reverse of `edges`. That is, `rev_edges[0]` is the set of incoming edges for `nodes[0]`.
104    rev_edges: Vec<Vec<usize>>,
105    /// index maps for a solidity file to an index, for fast lookup.
106    indices: HashMap<PathBuf, usize>,
107    /// reverse of `indices` for reverse lookup
108    rev_indices: HashMap<usize, PathBuf>,
109    /// the identified version requirement of a file
110    versions: HashMap<usize, Option<VersionReq>>,
111    /// the extracted data from the source files
112    data: Vec<P::ParsedSource>,
113    /// The parser which parsed `data`.
114    parser: Option<P>,
115    /// with how many input files we started with, corresponds to `let input_files =
116    /// nodes[..num_input_files]`.
117    ///
118    /// Combined with the `indices` this way we can determine if a file was original added to the
119    /// graph as input or was added as resolved import, see [`Self::is_input_file()`]
120    num_input_files: usize,
121    /// tracks all imports that we failed to resolve for a file
122    unresolved_imports: HashSet<(PathBuf, PathBuf)>,
123    /// tracks additional include paths resolved by scanning all imports of the graph
124    ///
125    /// Absolute imports, like `import "src/Contract.sol"` are possible, but this does not play
126    /// nice with the standard-json import format, since the VFS won't be able to resolve
127    /// "src/Contract.sol" without help via `--include-path`
128    resolved_solc_include_paths: BTreeSet<PathBuf>,
129}
130
131impl<P: SourceParser> Default for GraphEdges<P> {
132    fn default() -> Self {
133        Self {
134            edges: Default::default(),
135            rev_edges: Default::default(),
136            indices: Default::default(),
137            rev_indices: Default::default(),
138            versions: Default::default(),
139            data: Default::default(),
140            parser: Default::default(),
141            num_input_files: Default::default(),
142            unresolved_imports: Default::default(),
143            resolved_solc_include_paths: Default::default(),
144        }
145    }
146}
147
148impl<P: SourceParser> GraphEdges<P> {
149    /// Returns the parser used to parse the sources.
150    pub fn parser(&self) -> &P {
151        self.parser.as_ref().unwrap()
152    }
153
154    /// Returns the parser used to parse the sources.
155    pub fn parser_mut(&mut self) -> &mut P {
156        self.parser.as_mut().unwrap()
157    }
158
159    /// How many files are source files
160    pub fn num_source_files(&self) -> usize {
161        self.num_input_files
162    }
163
164    /// Returns an iterator over all file indices
165    pub fn files(&self) -> impl Iterator<Item = usize> + '_ {
166        0..self.edges.len()
167    }
168
169    /// Returns an iterator over all source file indices
170    pub fn source_files(&self) -> impl Iterator<Item = usize> + '_ {
171        0..self.num_input_files
172    }
173
174    /// Returns an iterator over all library files
175    pub fn library_files(&self) -> impl Iterator<Item = usize> + '_ {
176        self.files().skip(self.num_input_files)
177    }
178
179    /// Returns all additional `--include-paths`
180    pub fn include_paths(&self) -> &BTreeSet<PathBuf> {
181        &self.resolved_solc_include_paths
182    }
183
184    /// Returns all imports that we failed to resolve
185    pub fn unresolved_imports(&self) -> &HashSet<(PathBuf, PathBuf)> {
186        &self.unresolved_imports
187    }
188
189    /// Returns a list of nodes the given node index points to for the given kind.
190    pub fn imported_nodes(&self, from: usize) -> &[usize] {
191        &self.edges[from]
192    }
193
194    /// Returns an iterator that yields all imports of a node and all their imports
195    pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
196        NodesIter::new(from, self).skip(1)
197    }
198
199    /// Returns all files imported by the given file
200    pub fn imports(&self, file: &Path) -> HashSet<&Path> {
201        if let Some(start) = self.indices.get(file).copied() {
202            NodesIter::new(start, self).skip(1).map(move |idx| &*self.rev_indices[&idx]).collect()
203        } else {
204            HashSet::new()
205        }
206    }
207
208    /// Returns all files that import the given file
209    pub fn importers(&self, file: &Path) -> HashSet<&Path> {
210        if let Some(start) = self.indices.get(file).copied() {
211            self.rev_edges[start].iter().map(move |idx| &*self.rev_indices[idx]).collect()
212        } else {
213            HashSet::new()
214        }
215    }
216
217    /// Returns the id of the given file
218    pub fn node_id(&self, file: &Path) -> usize {
219        self.indices[file]
220    }
221
222    /// Returns the path of the given node
223    pub fn node_path(&self, id: usize) -> &Path {
224        &self.rev_indices[&id]
225    }
226
227    /// Returns true if the `file` was originally included when the graph was first created and not
228    /// added when all `imports` were resolved
229    pub fn is_input_file(&self, file: &Path) -> bool {
230        if let Some(idx) = self.indices.get(file).copied() {
231            idx < self.num_input_files
232        } else {
233            false
234        }
235    }
236
237    /// Returns the `VersionReq` for the given file
238    pub fn version_requirement(&self, file: &Path) -> Option<&VersionReq> {
239        self.indices.get(file).and_then(|idx| self.versions.get(idx)).and_then(Option::as_ref)
240    }
241
242    /// Returns the parsed source data for the given file
243    pub fn get_parsed_source(&self, file: &Path) -> Option<&P::ParsedSource>
244    where
245        P: SourceParser,
246    {
247        self.indices.get(file).and_then(|idx| self.data.get(*idx))
248    }
249}
250
251/// Represents a fully-resolved solidity dependency graph.
252///
253/// Each node in the graph is a file and edges represent dependencies between them.
254///
255/// See also <https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files>
256#[derive(Debug)]
257pub struct Graph<P: SourceParser = SolParser> {
258    /// all nodes in the project, a `Node` represents a single file
259    pub nodes: Vec<Node<P::ParsedSource>>,
260    /// relationship of the nodes
261    edges: GraphEdges<P>,
262    /// the root of the project this graph represents
263    root: PathBuf,
264}
265
266type L<P> = <<P as SourceParser>::ParsedSource as ParsedSource>::Language;
267
268impl<P: SourceParser> Graph<P> {
269    /// Returns the parser used to parse the sources.
270    pub fn parser(&self) -> &P {
271        self.edges.parser()
272    }
273
274    /// Print the graph to `StdOut`
275    pub fn print(&self) {
276        self.print_with_options(Default::default())
277    }
278
279    /// Print the graph to `StdOut` using the provided `TreeOptions`
280    pub fn print_with_options(&self, opts: TreeOptions) {
281        let stdout = io::stdout();
282        let mut out = stdout.lock();
283        tree::print(self, &opts, &mut out).expect("failed to write to stdout.")
284    }
285
286    /// Returns a list of nodes the given node index points to for the given kind.
287    pub fn imported_nodes(&self, from: usize) -> &[usize] {
288        self.edges.imported_nodes(from)
289    }
290
291    /// Returns an iterator that yields all imports of a node and all their imports
292    pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
293        self.edges.all_imported_nodes(from)
294    }
295
296    /// Returns `true` if the given node has any outgoing edges.
297    pub(crate) fn has_outgoing_edges(&self, index: usize) -> bool {
298        !self.edges.edges[index].is_empty()
299    }
300
301    /// Returns all the resolved files and their index in the graph.
302    pub fn files(&self) -> &HashMap<PathBuf, usize> {
303        &self.edges.indices
304    }
305
306    /// Returns `true` if the graph is empty.
307    pub fn is_empty(&self) -> bool {
308        self.nodes.is_empty()
309    }
310
311    /// Gets a node by index.
312    ///
313    /// # Panics
314    ///
315    /// if the `index` node id is not included in the graph
316    pub fn node(&self, index: usize) -> &Node<P::ParsedSource> {
317        &self.nodes[index]
318    }
319
320    pub(crate) fn display_node(&self, index: usize) -> DisplayNode<'_, P::ParsedSource> {
321        DisplayNode { node: self.node(index), root: &self.root }
322    }
323
324    /// Returns an iterator that yields all nodes of the dependency tree that the given node id
325    /// spans, starting with the node itself.
326    ///
327    /// # Panics
328    ///
329    /// if the `start` node id is not included in the graph
330    pub fn node_ids(&self, start: usize) -> impl Iterator<Item = usize> + '_ {
331        NodesIter::new(start, &self.edges)
332    }
333
334    /// Same as `Self::node_ids` but returns the actual `Node`
335    pub fn nodes(&self, start: usize) -> impl Iterator<Item = &Node<P::ParsedSource>> + '_ {
336        self.node_ids(start).map(move |idx| self.node(idx))
337    }
338
339    fn split(self) -> (Vec<(PathBuf, Source)>, GraphEdges<P>) {
340        let Self { nodes, mut edges, .. } = self;
341        // need to move the extracted data to the edges, essentially splitting the node so we have
342        // access to the data at a later stage in the compile pipeline
343        let mut sources = Vec::new();
344        for (idx, node) in nodes.into_iter().enumerate() {
345            let Node { path, source, data } = node;
346            sources.push((path, source));
347            let idx2 = edges.data.len();
348            edges.data.push(data);
349            assert_eq!(idx, idx2);
350        }
351
352        (sources, edges)
353    }
354
355    /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and
356    /// returning the `nodes` converted to `Sources`
357    pub fn into_sources(self) -> (Sources, GraphEdges<P>) {
358        let (sources, edges) = self.split();
359        (sources.into_iter().collect(), edges)
360    }
361
362    /// Returns an iterator that yields only those nodes that represent input files.
363    /// See `Self::resolve_sources`
364    /// This won't yield any resolved library nodes
365    pub fn input_nodes(&self) -> impl Iterator<Item = &Node<P::ParsedSource>> {
366        self.nodes.iter().take(self.edges.num_input_files)
367    }
368
369    /// Returns all files imported by the given file
370    pub fn imports(&self, path: &Path) -> HashSet<&Path> {
371        self.edges.imports(path)
372    }
373
374    /// Resolves a number of sources within the given config
375    #[instrument(name = "Graph::resolve_sources", skip_all)]
376    pub fn resolve_sources(
377        paths: &ProjectPathsConfig<<P::ParsedSource as ParsedSource>::Language>,
378        mut sources: Sources,
379    ) -> Result<Self> {
380        /// checks if the given target path was already resolved, if so it adds its id to the list
381        /// of resolved imports. If it hasn't been resolved yet, it queues in the file for
382        /// processing
383        fn add_node<P: SourceParser>(
384            parser: &mut P,
385            unresolved: &mut VecDeque<(PathBuf, Node<P::ParsedSource>)>,
386            index: &mut HashMap<PathBuf, usize>,
387            resolved_imports: &mut Vec<usize>,
388            target: PathBuf,
389        ) -> Result<()> {
390            if let Some(idx) = index.get(&target).copied() {
391                resolved_imports.push(idx);
392            } else {
393                // imported file is not part of the input files
394                let node = parser.read(&target)?;
395                unresolved.push_back((target.clone(), node));
396                let idx = index.len();
397                index.insert(target, idx);
398                resolved_imports.push(idx);
399            }
400            Ok(())
401        }
402
403        // The cache relies on the absolute paths relative to the project root as cache keys.
404        sources.make_absolute(&paths.root);
405
406        let mut parser = P::new(paths.with_language_ref());
407
408        // we start off by reading all input files, which includes all solidity files from the
409        // source and test folder
410        let mut unresolved: VecDeque<_> = parser.parse_sources(&mut sources)?.into();
411
412        // identifiers of all resolved files
413        let mut index: HashMap<_, _> =
414            unresolved.iter().enumerate().map(|(idx, (p, _))| (p.clone(), idx)).collect();
415
416        let num_input_files = unresolved.len();
417
418        // contains the files and their dependencies
419        let mut nodes = Vec::with_capacity(unresolved.len());
420        let mut edges = Vec::with_capacity(unresolved.len());
421        let mut rev_edges = Vec::with_capacity(unresolved.len());
422
423        // tracks additional paths that should be used with `--include-path`, these are libraries
424        // that use absolute imports like `import "src/Contract.sol"`
425        let mut resolved_solc_include_paths = BTreeSet::new();
426        resolved_solc_include_paths.insert(paths.root.clone());
427
428        // keep track of all unique paths that we failed to resolve to not spam the reporter with
429        // the same path
430        let mut unresolved_imports = HashSet::new();
431
432        // now we need to resolve all imports for the source file and those imported from other
433        // locations
434        while let Some((path, node)) = unresolved.pop_front() {
435            let mut resolved_imports = Vec::new();
436            // parent directory of the current file
437            let cwd = match path.parent() {
438                Some(inner) => inner,
439                None => continue,
440            };
441
442            for import_path in node.data.resolve_imports(paths, &mut resolved_solc_include_paths)? {
443                if let Some(err) = match paths.resolve_import_and_include_paths(
444                    cwd,
445                    &import_path,
446                    &mut resolved_solc_include_paths,
447                ) {
448                    Ok(import) => add_node(
449                        &mut parser,
450                        &mut unresolved,
451                        &mut index,
452                        &mut resolved_imports,
453                        import,
454                    )
455                    .err(),
456                    Err(err) => Some(err),
457                } {
458                    unresolved_imports.insert((import_path.to_path_buf(), node.path.clone()));
459                    trace!("failed to resolve import component \"{:?}\" for {:?}", err, node.path)
460                }
461            }
462
463            nodes.push(node);
464            edges.push(resolved_imports);
465            // Will be populated later
466            rev_edges.push(Vec::new());
467        }
468
469        // Build `rev_edges`
470        for (idx, edges) in edges.iter().enumerate() {
471            for &edge in edges.iter() {
472                rev_edges[edge].push(idx);
473            }
474        }
475
476        if !unresolved_imports.is_empty() {
477            // notify on all unresolved imports
478            crate::report::unresolved_imports(
479                &unresolved_imports
480                    .iter()
481                    .map(|(i, f)| (i.as_path(), f.as_path()))
482                    .collect::<Vec<_>>(),
483                &paths.remappings,
484            );
485        }
486
487        let edges = GraphEdges {
488            edges,
489            rev_edges,
490            rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(),
491            indices: index,
492            num_input_files,
493            versions: nodes
494                .iter()
495                .enumerate()
496                .map(|(idx, node)| (idx, node.data.version_req().cloned()))
497                .collect(),
498            data: Default::default(),
499            parser: Some(parser),
500            unresolved_imports,
501            resolved_solc_include_paths,
502        };
503        Ok(Self { nodes, edges, root: paths.root.clone() })
504    }
505
506    /// Resolves the dependencies of a project's source contracts
507    pub fn resolve(
508        paths: &ProjectPathsConfig<<P::ParsedSource as ParsedSource>::Language>,
509    ) -> Result<Self> {
510        Self::resolve_sources(paths, paths.read_input_files()?)
511    }
512
513    /// Consumes the nodes of the graph and returns all input files together with their appropriate
514    /// version and the edges of the graph
515    ///
516    /// First we determine the compatible version for each input file (from sources and test folder,
517    /// see `Self::resolve`) and then we add all resolved library imports.
518    pub fn into_sources_by_version<C, T>(
519        self,
520        project: &Project<C, T>,
521    ) -> Result<ResolvedSources<'_, C>>
522    where
523        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
524        C: Compiler<Parser = P, Language = <P::ParsedSource as ParsedSource>::Language>,
525    {
526        /// insert the imports of the given node into the sources map
527        /// There can be following graph:
528        /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)`
529        /// where `C` is a library import, in which case we assign `C` only to the first input file.
530        /// However, it's not required to include them in the solc `CompilerInput` as they would get
531        /// picked up by solc otherwise, but we add them, so we can create a corresponding
532        /// cache entry for them as well. This can be optimized however
533        fn insert_imports(
534            idx: usize,
535            all_nodes: &mut HashMap<usize, (PathBuf, Source)>,
536            sources: &mut Sources,
537            edges: &[Vec<usize>],
538            processed_sources: &mut HashSet<usize>,
539        ) {
540            // iterate over all dependencies not processed yet
541            for dep in edges[idx].iter().copied() {
542                // keep track of processed dependencies, if the dep was already in the set we have
543                // processed it already
544                if !processed_sources.insert(dep) {
545                    continue;
546                }
547
548                // library import
549                if let Some((path, source)) = all_nodes.get(&dep).cloned() {
550                    sources.insert(path, source);
551                    insert_imports(dep, all_nodes, sources, edges, processed_sources);
552                }
553            }
554        }
555
556        let versioned_nodes = self.get_input_node_versions(project)?;
557        let versioned_nodes = self.resolve_settings(project, versioned_nodes)?;
558        let (nodes, edges) = self.split();
559
560        let mut all_nodes = nodes.into_iter().enumerate().collect::<HashMap<_, _>>();
561
562        let mut resulted_sources = HashMap::new();
563        let mut default_profiles = HashMap::new();
564
565        let profiles = project.settings_profiles().collect::<Vec<_>>();
566
567        // determine the `Sources` set for each solc version
568        for (language, versioned_nodes) in versioned_nodes {
569            let mut versioned_sources = Vec::with_capacity(versioned_nodes.len());
570
571            for (version, profile_to_nodes) in versioned_nodes {
572                for (profile_idx, input_node_indexes) in profile_to_nodes {
573                    let mut sources = Sources::new();
574
575                    // all input nodes will be processed
576                    let mut processed_sources = input_node_indexes.iter().copied().collect();
577
578                    // we only process input nodes (from sources, tests for example)
579                    for idx in input_node_indexes {
580                        // insert the input node in the sources set and remove it from the available
581                        // set
582                        let (path, source) =
583                            all_nodes.get(&idx).cloned().expect("node is preset. qed");
584
585                        default_profiles.insert(path.clone(), profiles[profile_idx].0);
586                        sources.insert(path, source);
587                        insert_imports(
588                            idx,
589                            &mut all_nodes,
590                            &mut sources,
591                            &edges.edges,
592                            &mut processed_sources,
593                        );
594                    }
595                    versioned_sources.push((version.clone(), sources, profiles[profile_idx]));
596                }
597            }
598
599            resulted_sources.insert(language, versioned_sources);
600        }
601
602        Ok(ResolvedSources { sources: resulted_sources, primary_profiles: default_profiles, edges })
603    }
604
605    /// Writes the list of imported files into the given formatter:
606    ///
607    /// ```text
608    /// path/to/a.sol (<version>) imports:
609    ///     path/to/b.sol (<version>)
610    ///     path/to/c.sol (<version>)
611    ///     ...
612    /// ```
613    fn format_imports_list<
614        C: Compiler,
615        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
616        W: std::fmt::Write,
617    >(
618        &self,
619        idx: usize,
620        incompatible: HashSet<usize>,
621        project: &Project<C, T>,
622        f: &mut W,
623    ) -> std::result::Result<(), std::fmt::Error> {
624        let format_node = |idx, f: &mut W| {
625            let node = self.node(idx);
626            let color = if incompatible.contains(&idx) { Color::Red } else { Color::White };
627
628            let mut line = utils::source_name(&node.path, &self.root).display().to_string();
629            if let Some(req) = self.version_requirement(idx, project) {
630                line.push_str(&format!(" {req}"));
631            }
632
633            write!(f, "{}", line.paint(color))
634        };
635        format_node(idx, f)?;
636        write!(f, " imports:")?;
637        for dep in self.node_ids(idx).skip(1) {
638            write!(f, "\n    ")?;
639            format_node(dep, f)?;
640        }
641
642        Ok(())
643    }
644
645    /// Combines version requirement parsed from file and from project restrictions.
646    fn version_requirement<
647        C: Compiler,
648        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
649    >(
650        &self,
651        idx: usize,
652        project: &Project<C, T>,
653    ) -> Option<VersionReq> {
654        let node = self.node(idx);
655        let parsed_req = node.data.version_req();
656        let other_req = project.restrictions.get(&node.path).and_then(|r| r.version.as_ref());
657
658        match (parsed_req, other_req) {
659            (Some(parsed_req), Some(other_req)) => {
660                let mut req = parsed_req.clone();
661                req.comparators.extend(other_req.comparators.clone());
662                Some(req)
663            }
664            (Some(parsed_req), None) => Some(parsed_req.clone()),
665            (None, Some(other_req)) => Some(other_req.clone()),
666            _ => None,
667        }
668    }
669
670    /// Checks that the file's version is even available.
671    ///
672    /// This returns an error if the file's version is invalid semver, or is not available such as
673    /// 0.8.20, if the highest available version is `0.8.19`
674    fn check_available_version<
675        C: Compiler,
676        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
677    >(
678        &self,
679        idx: usize,
680        all_versions: &[&CompilerVersion],
681        project: &Project<C, T>,
682    ) -> std::result::Result<(), SourceVersionError> {
683        let Some(req) = self.version_requirement(idx, project) else { return Ok(()) };
684
685        if !all_versions.iter().any(|v| req.matches(v.as_ref())) {
686            return if project.offline {
687                Err(SourceVersionError::NoMatchingVersionOffline(req))
688            } else {
689                Err(SourceVersionError::NoMatchingVersion(req))
690            };
691        }
692
693        Ok(())
694    }
695
696    /// Filters incompatible versions from the `candidates`. It iterates over node imports and in
697    /// case if there is no compatible version it returns the latest seen node id.
698    fn retain_compatible_versions<
699        C: Compiler,
700        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
701    >(
702        &self,
703        idx: usize,
704        candidates: &mut Vec<&CompilerVersion>,
705        project: &Project<C, T>,
706    ) -> Result<(), String> {
707        let mut all_versions = candidates.clone();
708
709        let nodes: Vec<_> = self.node_ids(idx).collect();
710        let mut failed_node_idx = None;
711        for node in nodes.iter() {
712            if let Some(req) = self.version_requirement(*node, project) {
713                candidates.retain(|v| req.matches(v.as_ref()));
714
715                if candidates.is_empty() {
716                    failed_node_idx = Some(*node);
717                    break;
718                }
719            }
720        }
721
722        let Some(failed_node_idx) = failed_node_idx else {
723            // everything is fine
724            return Ok(());
725        };
726
727        // This now keeps data for the node which were the last one before we had no candidates
728        // left. It means that there is a node directly conflicting with it in `nodes` coming
729        // before.
730        let failed_node = self.node(failed_node_idx);
731
732        if let Err(version_err) =
733            self.check_available_version(failed_node_idx, &all_versions, project)
734        {
735            // check if the version is even valid
736            let f = utils::source_name(&failed_node.path, &self.root).display();
737            return Err(format!("Encountered invalid solc version in {f}: {version_err}"));
738        } else {
739            // if the node requirement makes sense, it means that there is at least one node
740            // which requirement conflicts with it
741
742            // retain only versions compatible with the `failed_node`
743            if let Some(req) = self.version_requirement(failed_node_idx, project) {
744                all_versions.retain(|v| req.matches(v.as_ref()));
745            }
746
747            // iterate over all the nodes once again and find the one incompatible
748            for node in &nodes {
749                if self.check_available_version(*node, &all_versions, project).is_err() {
750                    let mut msg = "Found incompatible versions:\n".white().to_string();
751
752                    self.format_imports_list(
753                        idx,
754                        [*node, failed_node_idx].into(),
755                        project,
756                        &mut msg,
757                    )
758                    .unwrap();
759                    return Err(msg);
760                }
761            }
762        }
763
764        let mut msg = "Found incompatible versions:\n".white().to_string();
765        self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap();
766        Err(msg)
767    }
768
769    /// Filters profiles incompatible with the given node and its imports.
770    fn retain_compatible_profiles<
771        C: Compiler,
772        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
773    >(
774        &self,
775        idx: usize,
776        project: &Project<C, T>,
777        candidates: &mut Vec<(usize, (&str, &C::Settings))>,
778    ) -> Result<(), String> {
779        let mut all_profiles = candidates.clone();
780
781        let nodes: Vec<_> = self.node_ids(idx).collect();
782        let mut failed_node_idx = None;
783        for node in nodes.iter() {
784            if let Some(req) = project.restrictions.get(&self.node(*node).path) {
785                candidates.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req));
786                if candidates.is_empty() {
787                    failed_node_idx = Some(*node);
788                    break;
789                }
790            }
791        }
792
793        let Some(failed_node_idx) = failed_node_idx else {
794            // everything is fine
795            return Ok(());
796        };
797
798        let failed_node = self.node(failed_node_idx);
799
800        // retain only profiles compatible with the `failed_node`
801        if let Some(req) = project.restrictions.get(&failed_node.path) {
802            all_profiles.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req));
803        }
804
805        if all_profiles.is_empty() {
806            let f = utils::source_name(&failed_node.path, &self.root).display();
807            return Err(format!("Missing profile satisfying settings restrictions for {f}"));
808        }
809
810        // iterate over all the nodes once again and find the one incompatible
811        for node in &nodes {
812            if let Some(req) = project.restrictions.get(&self.node(*node).path) {
813                if !all_profiles
814                    .iter()
815                    .any(|(_, (_, settings))| settings.satisfies_restrictions(&**req))
816                {
817                    let mut msg = "Found incompatible settings restrictions:\n".white().to_string();
818
819                    self.format_imports_list(
820                        idx,
821                        [*node, failed_node_idx].into(),
822                        project,
823                        &mut msg,
824                    )
825                    .unwrap();
826                    return Err(msg);
827                }
828            }
829        }
830
831        let mut msg = "Found incompatible settings restrictions:\n".white().to_string();
832        self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap();
833        Err(msg)
834    }
835
836    fn input_nodes_by_language(&self) -> HashMap<L<P>, Vec<usize>> {
837        let mut nodes = HashMap::new();
838
839        for idx in 0..self.edges.num_input_files {
840            nodes.entry(self.nodes[idx].data.language()).or_insert_with(Vec::new).push(idx);
841        }
842
843        nodes
844    }
845
846    /// Returns a map of versions together with the input nodes that are compatible with that
847    /// version.
848    ///
849    /// This will essentially do a DFS on all input sources and their transitive imports and
850    /// checking that all can compiled with the version stated in the input file.
851    ///
852    /// Returns an error message with __all__ input files that don't have compatible imports.
853    ///
854    /// This also attempts to prefer local installations over remote available.
855    /// If `offline` is set to `true` then only already installed.
856    #[allow(clippy::type_complexity)]
857    fn get_input_node_versions<
858        C: Compiler<Language = L<P>>,
859        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
860    >(
861        &self,
862        project: &Project<C, T>,
863    ) -> Result<HashMap<L<P>, HashMap<Version, Vec<usize>>>> {
864        trace!("resolving input node versions");
865
866        let mut resulted_nodes = HashMap::new();
867
868        for (language, nodes) in self.input_nodes_by_language() {
869            // this is likely called by an application and will be eventually printed so we don't
870            // exit on first error, instead gather all the errors and return a bundled
871            // error message instead
872            let mut errors = Vec::new();
873
874            // the sorted list of all versions
875            let all_versions = if project.offline {
876                project
877                    .compiler
878                    .available_versions(&language)
879                    .into_iter()
880                    .filter(|v| v.is_installed())
881                    .collect()
882            } else {
883                project.compiler.available_versions(&language)
884            };
885
886            if all_versions.is_empty() && !nodes.is_empty() {
887                return Err(SolcError::msg(format!(
888                    "Found {language} sources, but no compiler versions are available for it"
889                )));
890            }
891
892            // stores all versions and their nodes that can be compiled
893            let mut versioned_nodes = HashMap::new();
894
895            // stores all files and the versions they're compatible with
896            let mut all_candidates = Vec::with_capacity(self.edges.num_input_files);
897            // walking through the node's dep tree and filtering the versions along the way
898            for idx in nodes {
899                let mut candidates = all_versions.iter().collect::<Vec<_>>();
900                // remove all incompatible versions from the candidates list by checking the node
901                // and all its imports
902                if let Err(err) = self.retain_compatible_versions(idx, &mut candidates, project) {
903                    errors.push(err);
904                } else {
905                    // found viable candidates, pick the most recent version that's already
906                    // installed
907                    let candidate =
908                        if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) {
909                            candidates[pos]
910                        } else {
911                            candidates.last().expect("not empty; qed.")
912                        }
913                        .clone();
914
915                    // also store all possible candidates to optimize the set
916                    all_candidates.push((idx, candidates.into_iter().collect::<HashSet<_>>()));
917
918                    versioned_nodes
919                        .entry(candidate)
920                        .or_insert_with(|| Vec::with_capacity(1))
921                        .push(idx);
922                }
923            }
924
925            // detected multiple versions but there might still exist a single version that
926            // satisfies all sources
927            if versioned_nodes.len() > 1 {
928                versioned_nodes = Self::resolve_multiple_versions(all_candidates);
929            }
930
931            if versioned_nodes.len() == 1 {
932                trace!(
933                    "found exact solc version for all sources  \"{}\"",
934                    versioned_nodes.keys().next().unwrap()
935                );
936            }
937
938            if errors.is_empty() {
939                trace!("resolved {} versions {:?}", versioned_nodes.len(), versioned_nodes.keys());
940                resulted_nodes.insert(
941                    language,
942                    versioned_nodes
943                        .into_iter()
944                        .map(|(v, nodes)| (Version::from(v), nodes))
945                        .collect(),
946                );
947            } else {
948                let s = errors.join("\n");
949                debug!("failed to resolve versions: {s}");
950                return Err(SolcError::msg(s));
951            }
952        }
953
954        Ok(resulted_nodes)
955    }
956
957    #[allow(clippy::complexity)]
958    fn resolve_settings<
959        C: Compiler<Language = L<P>>,
960        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
961    >(
962        &self,
963        project: &Project<C, T>,
964        input_nodes_versions: HashMap<L<P>, HashMap<Version, Vec<usize>>>,
965    ) -> Result<HashMap<L<P>, HashMap<Version, HashMap<usize, Vec<usize>>>>> {
966        let mut resulted_sources = HashMap::new();
967        let mut errors = Vec::new();
968        for (language, versions) in input_nodes_versions {
969            let mut versioned_sources = HashMap::new();
970            for (version, nodes) in versions {
971                let mut profile_to_nodes = HashMap::new();
972                for idx in nodes {
973                    let mut profile_candidates =
974                        project.settings_profiles().enumerate().collect::<Vec<_>>();
975                    if let Err(err) =
976                        self.retain_compatible_profiles(idx, project, &mut profile_candidates)
977                    {
978                        errors.push(err);
979                    } else {
980                        let (profile_idx, _) = profile_candidates.first().expect("exists");
981                        profile_to_nodes.entry(*profile_idx).or_insert_with(Vec::new).push(idx);
982                    }
983                }
984                versioned_sources.insert(version, profile_to_nodes);
985            }
986            resulted_sources.insert(language, versioned_sources);
987        }
988
989        if errors.is_empty() {
990            Ok(resulted_sources)
991        } else {
992            let s = errors.join("\n");
993            debug!("failed to resolve settings: {s}");
994            Err(SolcError::msg(s))
995        }
996    }
997
998    /// Tries to find the "best" set of versions to nodes, See [Solc version
999    /// auto-detection](#solc-version-auto-detection)
1000    ///
1001    /// This is a bit inefficient but is fine, the max. number of versions is ~80 and there's
1002    /// a high chance that the number of source files is <50, even for larger projects.
1003    fn resolve_multiple_versions(
1004        all_candidates: Vec<(usize, HashSet<&CompilerVersion>)>,
1005    ) -> HashMap<CompilerVersion, Vec<usize>> {
1006        // returns the intersection as sorted set of nodes
1007        fn intersection<'a>(
1008            mut sets: Vec<&HashSet<&'a CompilerVersion>>,
1009        ) -> Vec<&'a CompilerVersion> {
1010            if sets.is_empty() {
1011                return Vec::new();
1012            }
1013
1014            let mut result = sets.pop().cloned().expect("not empty; qed.");
1015            if !sets.is_empty() {
1016                result.retain(|item| sets.iter().all(|set| set.contains(item)));
1017            }
1018
1019            let mut v = result.into_iter().collect::<Vec<_>>();
1020            v.sort_unstable();
1021            v
1022        }
1023
1024        /// returns the highest version that is installed
1025        /// if the candidates set only contains uninstalled versions then this returns the highest
1026        /// uninstalled version
1027        fn remove_candidate(candidates: &mut Vec<&CompilerVersion>) -> CompilerVersion {
1028            debug_assert!(!candidates.is_empty());
1029
1030            if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) {
1031                candidates.remove(pos)
1032            } else {
1033                candidates.pop().expect("not empty; qed.")
1034            }
1035            .clone()
1036        }
1037
1038        let all_sets = all_candidates.iter().map(|(_, versions)| versions).collect();
1039
1040        // find all versions that satisfy all nodes
1041        let mut intersection = intersection(all_sets);
1042        if !intersection.is_empty() {
1043            let exact_version = remove_candidate(&mut intersection);
1044            let all_nodes = all_candidates.into_iter().map(|(node, _)| node).collect();
1045            trace!("resolved solc version compatible with all sources  \"{}\"", exact_version);
1046            return HashMap::from([(exact_version, all_nodes)]);
1047        }
1048
1049        // no version satisfies all nodes
1050        let mut versioned_nodes: HashMap<_, _> = HashMap::new();
1051
1052        // try to minimize the set of versions, this is guaranteed to lead to `versioned_nodes.len()
1053        // > 1` as no solc version exists that can satisfy all sources
1054        for (node, versions) in all_candidates {
1055            // need to sort them again
1056            let mut versions = versions.into_iter().collect::<Vec<_>>();
1057            versions.sort_unstable();
1058
1059            let candidate = if let Some(idx) =
1060                versions.iter().rposition(|v| versioned_nodes.contains_key(*v))
1061            {
1062                // use a version that's already in the set
1063                versions.remove(idx).clone()
1064            } else {
1065                // use the highest version otherwise
1066                remove_candidate(&mut versions)
1067            };
1068
1069            versioned_nodes.entry(candidate).or_insert_with(|| Vec::with_capacity(1)).push(node);
1070        }
1071
1072        trace!(
1073            "no solc version can satisfy all source files, resolved multiple versions  \"{:?}\"",
1074            versioned_nodes.keys()
1075        );
1076
1077        versioned_nodes
1078    }
1079}
1080
1081/// An iterator over a node and its dependencies
1082#[derive(Debug)]
1083pub struct NodesIter<'a, P: SourceParser> {
1084    /// stack of nodes
1085    stack: VecDeque<usize>,
1086    visited: HashSet<usize>,
1087    graph: &'a GraphEdges<P>,
1088}
1089
1090impl<'a, P: SourceParser> NodesIter<'a, P> {
1091    fn new(start: usize, graph: &'a GraphEdges<P>) -> Self {
1092        Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph }
1093    }
1094}
1095
1096impl<P: SourceParser> Iterator for NodesIter<'_, P> {
1097    type Item = usize;
1098    fn next(&mut self) -> Option<Self::Item> {
1099        let node = self.stack.pop_front()?;
1100
1101        if self.visited.insert(node) {
1102            // push the node's direct dependencies to the stack if we haven't visited it already
1103            self.stack.extend(self.graph.imported_nodes(node).iter().copied());
1104        }
1105        Some(node)
1106    }
1107}
1108
1109#[derive(Debug)]
1110pub struct Node<S> {
1111    /// path of the solidity  file
1112    path: PathBuf,
1113    /// content of the solidity file
1114    source: Source,
1115    /// parsed data
1116    pub data: S,
1117}
1118
1119impl<S> Node<S> {
1120    pub fn new(path: PathBuf, source: Source, data: S) -> Self {
1121        Self { path, source, data }
1122    }
1123
1124    pub fn map_data<T>(self, f: impl FnOnce(S) -> T) -> Node<T> {
1125        Node::new(self.path, self.source, f(self.data))
1126    }
1127}
1128
1129impl<S: ParsedSource> Node<S> {
1130    /// Reads the content of the file and returns a [Node] containing relevant information
1131    pub fn read(file: &Path) -> Result<Self> {
1132        let source = Source::read_(file)?;
1133        Self::parse(file, source)
1134    }
1135
1136    pub fn parse(file: &Path, source: Source) -> Result<Self> {
1137        let data = S::parse(source.as_ref(), file)?;
1138        Ok(Self::new(file.to_path_buf(), source, data))
1139    }
1140
1141    /// Returns the path of the file.
1142    pub fn path(&self) -> &Path {
1143        &self.path
1144    }
1145
1146    /// Returns the contents of the file.
1147    pub fn content(&self) -> &str {
1148        &self.source.content
1149    }
1150
1151    pub fn unpack(&self) -> (&Path, &Source) {
1152        (&self.path, &self.source)
1153    }
1154}
1155
1156/// Helper type for formatting a node
1157pub(crate) struct DisplayNode<'a, S> {
1158    node: &'a Node<S>,
1159    root: &'a PathBuf,
1160}
1161
1162impl<S: ParsedSource> fmt::Display for DisplayNode<'_, S> {
1163    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1164        let path = utils::source_name(&self.node.path, self.root);
1165        write!(f, "{}", path.display())?;
1166        if let Some(v) = self.node.data.version_req() {
1167            write!(f, " {v}")?;
1168        }
1169        Ok(())
1170    }
1171}
1172
1173/// Errors thrown when checking the solc version of a file
1174#[derive(Debug, thiserror::Error)]
1175#[allow(dead_code)]
1176enum SourceVersionError {
1177    #[error("Failed to parse solidity version {0}: {1}")]
1178    InvalidVersion(String, SolcError),
1179    #[error("No solc version exists that matches the version requirement: {0}")]
1180    NoMatchingVersion(VersionReq),
1181    #[error("No solc version installed that matches the version requirement: {0}")]
1182    NoMatchingVersionOffline(VersionReq),
1183}
1184
1185#[cfg(test)]
1186mod tests {
1187    use super::*;
1188
1189    #[test]
1190    fn can_resolve_hardhat_dependency_graph() {
1191        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample");
1192        let paths = ProjectPathsConfig::hardhat(&root).unwrap();
1193
1194        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1195
1196        assert_eq!(graph.edges.num_input_files, 1);
1197        assert_eq!(graph.files().len(), 2);
1198
1199        assert_eq!(
1200            graph.files().clone(),
1201            HashMap::from([
1202                (paths.sources.join("Greeter.sol"), 0),
1203                (paths.root.join("node_modules/hardhat/console.sol"), 1),
1204            ])
1205        );
1206    }
1207
1208    #[test]
1209    fn can_resolve_dapp_dependency_graph() {
1210        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample");
1211        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1212
1213        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1214
1215        assert_eq!(graph.edges.num_input_files, 2);
1216        assert_eq!(graph.files().len(), 3);
1217        assert_eq!(
1218            graph.files().clone(),
1219            HashMap::from([
1220                (paths.sources.join("Dapp.sol"), 0),
1221                (paths.sources.join("Dapp.t.sol"), 1),
1222                (paths.root.join("lib/ds-test/src/test.sol"), 2),
1223            ])
1224        );
1225
1226        let dapp_test = graph.node(1);
1227        assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol"));
1228        assert_eq!(
1229            dapp_test.data.imports.iter().map(|i| i.data().path()).collect::<Vec<&Path>>(),
1230            vec![Path::new("ds-test/test.sol"), Path::new("./Dapp.sol")]
1231        );
1232        assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]);
1233    }
1234
1235    #[test]
1236    fn can_print_dapp_sample_graph() {
1237        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample");
1238        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1239        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1240        let mut out = Vec::<u8>::new();
1241        tree::print(&graph, &Default::default(), &mut out).unwrap();
1242
1243        if !cfg!(windows) {
1244            assert_eq!(
1245                "
1246src/Dapp.sol >=0.6.6
1247src/Dapp.t.sol >=0.6.6
1248├── lib/ds-test/src/test.sol >=0.4.23
1249└── src/Dapp.sol >=0.6.6
1250"
1251                .trim_start()
1252                .as_bytes()
1253                .to_vec(),
1254                out
1255            );
1256        }
1257
1258        graph.edges.parser().compiler.enter(|c| {
1259            assert_eq!(c.gcx().sources.len(), 3);
1260        });
1261    }
1262
1263    #[test]
1264    #[cfg(not(target_os = "windows"))]
1265    fn can_print_hardhat_sample_graph() {
1266        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample");
1267        let paths = ProjectPathsConfig::hardhat(&root).unwrap();
1268        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1269        let mut out = Vec::<u8>::new();
1270        tree::print(&graph, &Default::default(), &mut out).unwrap();
1271        assert_eq!(
1272            "contracts/Greeter.sol >=0.6.0
1273└── node_modules/hardhat/console.sol >=0.4.22, <0.9.0
1274",
1275            String::from_utf8(out).unwrap()
1276        );
1277    }
1278
1279    #[test]
1280    #[cfg(feature = "svm-solc")]
1281    fn test_print_unresolved() {
1282        use crate::{solc::SolcCompiler, ProjectBuilder};
1283
1284        let root =
1285            Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/incompatible-pragmas");
1286        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1287        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1288        let Err(SolcError::Message(err)) = graph.get_input_node_versions(
1289            &ProjectBuilder::<SolcCompiler>::default()
1290                .paths(paths)
1291                .build(SolcCompiler::AutoDetect)
1292                .unwrap(),
1293        ) else {
1294            panic!("expected error");
1295        };
1296
1297        snapbox::assert_data_eq!(
1298            err,
1299            snapbox::str![[r#"
1300Found incompatible versions:
1301src/A.sol =0.8.25 imports:
1302    src/B.sol
1303    src/C.sol =0.7.0
1304"#]]
1305        );
1306    }
1307
1308    #[cfg(target_os = "linux")]
1309    #[test]
1310    fn can_read_different_case() {
1311        use crate::resolver::parse::SolData;
1312        use std::fs::{self, create_dir_all};
1313        use utils::tempdir;
1314
1315        let tmp_dir = tempdir("out").unwrap();
1316        let path = tmp_dir.path().join("forge-std");
1317        create_dir_all(&path).unwrap();
1318        let existing = path.join("Test.sol");
1319        let non_existing = path.join("test.sol");
1320        fs::write(
1321            existing,
1322            "
1323pragma solidity ^0.8.10;
1324contract A {}
1325        ",
1326        )
1327        .unwrap();
1328
1329        assert!(!non_existing.exists());
1330
1331        let found = crate::resolver::Node::<SolData>::read(&non_existing).unwrap_err();
1332        matches!(found, SolcError::ResolveCaseSensitiveFileName { .. });
1333    }
1334}