foundry_compilers/resolver/
mod.rs

1//! Resolution of the entire dependency graph for a project.
2//!
3//! This module implements the core logic in taking all contracts of a project and creating a
4//! resolved graph with applied remappings for all source contracts.
5//!
6//! Some constraints we're working with when resolving contracts
7//!
8//!   1. Each file can contain several source units and can have any number of imports/dependencies
9//!      (using the term interchangeably). Each dependency can declare a version range that it is
10//!      compatible with, solidity version pragma.
11//!   2. A dependency can be imported from any directory, see `Remappings`
12//!
13//! Finding all dependencies is fairly simple, we're simply doing a DFS, starting from the source
14//! contracts
15//!
16//! ## Solc version auto-detection
17//!
18//! Solving a constraint graph is an NP-hard problem. The algorithm for finding the "best" solution
19//! makes several assumptions and tries to find a version of "Solc" that is compatible with all
20//! source files.
21//!
22//! The algorithm employed here is fairly simple, we simply do a DFS over all the source files and
23//! find the set of Solc versions that the file and all its imports are compatible with, and then we
24//! try to find a single Solc version that is compatible with all the files. This is effectively the
25//! intersection of all version sets.
26//!
27//! We always try to activate the highest (installed) solc version first. Uninstalled solc is only
28//! used if this version is the only compatible version for a single file or in the intersection of
29//! all version sets.
30//!
31//! This leads to finding the optimal version, if there is one. If there is no single Solc version
32//! that is compatible with all sources and their imports, then suddenly this becomes a very
33//! difficult problem, because what would be the "best" solution. In this case, just choose the
34//! latest (installed) Solc version and try to minimize the number of Solc versions used.
35//!
36//! ## Performance
37//!
38//! Note that this is a relatively performance-critical portion of the ethers-solc preprocessing.
39//! The data that needs to be processed is proportional to the size of the dependency
40//! graph, which can, depending on the project, often be quite large.
41//!
42//! Note that, unlike the solidity compiler, we work with the filesystem, where we have to resolve
43//! remappings and follow relative paths. We're also limiting the nodes in the graph to solidity
44//! files, since we're only interested in their
45//! [version pragma](https://docs.soliditylang.org/en/develop/layout-of-source-files.html#version-pragma),
46//! which is defined on a per source file basis.
47
48use crate::{
49    compilers::{Compiler, CompilerVersion, ParsedSource},
50    project::VersionedSources,
51    resolver::parse::SolParser,
52    ArtifactOutput, CompilerSettings, Project, ProjectPathsConfig, SourceParser,
53};
54use core::fmt;
55use foundry_compilers_artifacts::sources::{Source, Sources};
56use foundry_compilers_core::{
57    error::{Result, SolcError},
58    utils,
59};
60use semver::{Version, VersionReq};
61use std::{
62    collections::{BTreeSet, HashMap, HashSet, VecDeque},
63    io,
64    path::{Path, PathBuf},
65};
66use yansi::{Color, Paint};
67
68pub mod parse;
69mod tree;
70
71pub use parse::SolImportAlias;
72pub use tree::{print, Charset, TreeOptions};
73
74/// Container for result of version and profile resolution of sources contained in [`Graph`].
75#[derive(Debug)]
76pub struct ResolvedSources<'a, C: Compiler> {
77    /// Resolved set of sources.
78    ///
79    /// Mapping from language to a [`Vec`] of compiler inputs consisting of version, sources set
80    /// and settings.
81    pub sources: VersionedSources<'a, C::Language, C::Settings>,
82    /// A mapping from a source file path to the primary profile name selected for it.
83    ///
84    /// This is required because the same source file might be compiled with multiple different
85    /// profiles if it's present as a dependency for other sources. We want to keep a single name
86    /// of the profile which was chosen specifically for each source so that we can default to it.
87    /// Right now, this is used when generating artifact names, "primary" artifact will never have
88    /// a profile suffix.
89    pub primary_profiles: HashMap<PathBuf, &'a str>,
90    /// Graph edges.
91    pub edges: GraphEdges<C::Parser>,
92}
93
94/// The underlying edges of the graph which only contains the raw relationship data.
95///
96/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources`
97/// set is determined.
98#[derive(Clone, Debug)]
99pub struct GraphEdges<P: SourceParser> {
100    /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]`
101    /// is the set of outgoing edges for `nodes[0]`.
102    edges: Vec<Vec<usize>>,
103    /// Reverse of `edges`. That is, `rev_edges[0]` is the set of incoming edges for `nodes[0]`.
104    rev_edges: Vec<Vec<usize>>,
105    /// index maps for a solidity file to an index, for fast lookup.
106    indices: HashMap<PathBuf, usize>,
107    /// reverse of `indices` for reverse lookup
108    rev_indices: HashMap<usize, PathBuf>,
109    /// the identified version requirement of a file
110    versions: HashMap<usize, Option<VersionReq>>,
111    /// the extracted data from the source files
112    data: Vec<P::ParsedSource>,
113    /// The parser which parsed `data`.
114    parser: Option<P>,
115    /// with how many input files we started with, corresponds to `let input_files =
116    /// nodes[..num_input_files]`.
117    ///
118    /// Combined with the `indices` this way we can determine if a file was original added to the
119    /// graph as input or was added as resolved import, see [`Self::is_input_file()`]
120    num_input_files: usize,
121    /// tracks all imports that we failed to resolve for a file
122    unresolved_imports: HashSet<(PathBuf, PathBuf)>,
123    /// tracks additional include paths resolved by scanning all imports of the graph
124    ///
125    /// Absolute imports, like `import "src/Contract.sol"` are possible, but this does not play
126    /// nice with the standard-json import format, since the VFS won't be able to resolve
127    /// "src/Contract.sol" without help via `--include-path`
128    resolved_solc_include_paths: BTreeSet<PathBuf>,
129}
130
131impl<P: SourceParser> Default for GraphEdges<P> {
132    fn default() -> Self {
133        Self {
134            edges: Default::default(),
135            rev_edges: Default::default(),
136            indices: Default::default(),
137            rev_indices: Default::default(),
138            versions: Default::default(),
139            data: Default::default(),
140            parser: Default::default(),
141            num_input_files: Default::default(),
142            unresolved_imports: Default::default(),
143            resolved_solc_include_paths: Default::default(),
144        }
145    }
146}
147
148impl<P: SourceParser> GraphEdges<P> {
149    /// Returns the parser used to parse the sources.
150    pub fn parser(&self) -> &P {
151        self.parser.as_ref().unwrap()
152    }
153
154    /// Returns the parser used to parse the sources.
155    pub fn parser_mut(&mut self) -> &mut P {
156        self.parser.as_mut().unwrap()
157    }
158
159    /// How many files are source files
160    pub fn num_source_files(&self) -> usize {
161        self.num_input_files
162    }
163
164    /// Returns an iterator over all file indices
165    pub fn files(&self) -> impl Iterator<Item = usize> + '_ {
166        0..self.edges.len()
167    }
168
169    /// Returns an iterator over all source file indices
170    pub fn source_files(&self) -> impl Iterator<Item = usize> + '_ {
171        0..self.num_input_files
172    }
173
174    /// Returns an iterator over all library files
175    pub fn library_files(&self) -> impl Iterator<Item = usize> + '_ {
176        self.files().skip(self.num_input_files)
177    }
178
179    /// Returns all additional `--include-paths`
180    pub fn include_paths(&self) -> &BTreeSet<PathBuf> {
181        &self.resolved_solc_include_paths
182    }
183
184    /// Returns all imports that we failed to resolve
185    pub fn unresolved_imports(&self) -> &HashSet<(PathBuf, PathBuf)> {
186        &self.unresolved_imports
187    }
188
189    /// Returns a list of nodes the given node index points to for the given kind.
190    pub fn imported_nodes(&self, from: usize) -> &[usize] {
191        &self.edges[from]
192    }
193
194    /// Returns an iterator that yields all imports of a node and all their imports
195    pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
196        NodesIter::new(from, self).skip(1)
197    }
198
199    /// Returns all files imported by the given file
200    pub fn imports(&self, file: &Path) -> HashSet<&Path> {
201        if let Some(start) = self.indices.get(file).copied() {
202            NodesIter::new(start, self).skip(1).map(move |idx| &*self.rev_indices[&idx]).collect()
203        } else {
204            HashSet::new()
205        }
206    }
207
208    /// Returns all files that import the given file
209    pub fn importers(&self, file: &Path) -> HashSet<&Path> {
210        if let Some(start) = self.indices.get(file).copied() {
211            self.rev_edges[start].iter().map(move |idx| &*self.rev_indices[idx]).collect()
212        } else {
213            HashSet::new()
214        }
215    }
216
217    /// Returns the id of the given file
218    pub fn node_id(&self, file: &Path) -> usize {
219        self.indices[file]
220    }
221
222    /// Returns the path of the given node
223    pub fn node_path(&self, id: usize) -> &Path {
224        &self.rev_indices[&id]
225    }
226
227    /// Returns true if the `file` was originally included when the graph was first created and not
228    /// added when all `imports` were resolved
229    pub fn is_input_file(&self, file: &Path) -> bool {
230        if let Some(idx) = self.indices.get(file).copied() {
231            idx < self.num_input_files
232        } else {
233            false
234        }
235    }
236
237    /// Returns the `VersionReq` for the given file
238    pub fn version_requirement(&self, file: &Path) -> Option<&VersionReq> {
239        self.indices.get(file).and_then(|idx| self.versions.get(idx)).and_then(Option::as_ref)
240    }
241
242    /// Returns the parsed source data for the given file
243    pub fn get_parsed_source(&self, file: &Path) -> Option<&P::ParsedSource>
244    where
245        P: SourceParser,
246    {
247        self.indices.get(file).and_then(|idx| self.data.get(*idx))
248    }
249}
250
251/// Represents a fully-resolved solidity dependency graph.
252///
253/// Each node in the graph is a file and edges represent dependencies between them.
254///
255/// See also <https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files>
256#[derive(Debug)]
257pub struct Graph<P: SourceParser = SolParser> {
258    /// all nodes in the project, a `Node` represents a single file
259    pub nodes: Vec<Node<P::ParsedSource>>,
260    /// relationship of the nodes
261    edges: GraphEdges<P>,
262    /// the root of the project this graph represents
263    root: PathBuf,
264}
265
266type L<P> = <<P as SourceParser>::ParsedSource as ParsedSource>::Language;
267
268impl<P: SourceParser> Graph<P> {
269    /// Returns the parser used to parse the sources.
270    pub fn parser(&self) -> &P {
271        self.edges.parser()
272    }
273
274    /// Print the graph to `StdOut`
275    pub fn print(&self) {
276        self.print_with_options(Default::default())
277    }
278
279    /// Print the graph to `StdOut` using the provided `TreeOptions`
280    pub fn print_with_options(&self, opts: TreeOptions) {
281        let stdout = io::stdout();
282        let mut out = stdout.lock();
283        tree::print(self, &opts, &mut out).expect("failed to write to stdout.")
284    }
285
286    /// Returns a list of nodes the given node index points to for the given kind.
287    pub fn imported_nodes(&self, from: usize) -> &[usize] {
288        self.edges.imported_nodes(from)
289    }
290
291    /// Returns an iterator that yields all imports of a node and all their imports
292    pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
293        self.edges.all_imported_nodes(from)
294    }
295
296    /// Returns `true` if the given node has any outgoing edges.
297    pub(crate) fn has_outgoing_edges(&self, index: usize) -> bool {
298        !self.edges.edges[index].is_empty()
299    }
300
301    /// Returns all the resolved files and their index in the graph.
302    pub fn files(&self) -> &HashMap<PathBuf, usize> {
303        &self.edges.indices
304    }
305
306    /// Returns `true` if the graph is empty.
307    pub fn is_empty(&self) -> bool {
308        self.nodes.is_empty()
309    }
310
311    /// Gets a node by index.
312    ///
313    /// # Panics
314    ///
315    /// if the `index` node id is not included in the graph
316    pub fn node(&self, index: usize) -> &Node<P::ParsedSource> {
317        &self.nodes[index]
318    }
319
320    pub(crate) fn display_node(&self, index: usize) -> DisplayNode<'_, P::ParsedSource> {
321        DisplayNode { node: self.node(index), root: &self.root }
322    }
323
324    /// Returns an iterator that yields all nodes of the dependency tree that the given node id
325    /// spans, starting with the node itself.
326    ///
327    /// # Panics
328    ///
329    /// if the `start` node id is not included in the graph
330    pub fn node_ids(&self, start: usize) -> impl Iterator<Item = usize> + '_ {
331        NodesIter::new(start, &self.edges)
332    }
333
334    /// Same as `Self::node_ids` but returns the actual `Node`
335    pub fn nodes(&self, start: usize) -> impl Iterator<Item = &Node<P::ParsedSource>> + '_ {
336        self.node_ids(start).map(move |idx| self.node(idx))
337    }
338
339    fn split(self) -> (Vec<(PathBuf, Source)>, GraphEdges<P>) {
340        let Self { nodes, mut edges, .. } = self;
341        // need to move the extracted data to the edges, essentially splitting the node so we have
342        // access to the data at a later stage in the compile pipeline
343        let mut sources = Vec::new();
344        for (idx, node) in nodes.into_iter().enumerate() {
345            let Node { path, source, data } = node;
346            sources.push((path, source));
347            let idx2 = edges.data.len();
348            edges.data.push(data);
349            assert_eq!(idx, idx2);
350        }
351
352        (sources, edges)
353    }
354
355    /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and
356    /// returning the `nodes` converted to `Sources`
357    pub fn into_sources(self) -> (Sources, GraphEdges<P>) {
358        let (sources, edges) = self.split();
359        (sources.into_iter().collect(), edges)
360    }
361
362    /// Returns an iterator that yields only those nodes that represent input files.
363    /// See `Self::resolve_sources`
364    /// This won't yield any resolved library nodes
365    pub fn input_nodes(&self) -> impl Iterator<Item = &Node<P::ParsedSource>> {
366        self.nodes.iter().take(self.edges.num_input_files)
367    }
368
369    /// Returns all files imported by the given file
370    pub fn imports(&self, path: &Path) -> HashSet<&Path> {
371        self.edges.imports(path)
372    }
373
374    /// Resolves a number of sources within the given config
375    #[instrument(name = "Graph::resolve_sources", skip_all)]
376    pub fn resolve_sources(
377        paths: &ProjectPathsConfig<<P::ParsedSource as ParsedSource>::Language>,
378        mut sources: Sources,
379    ) -> Result<Self> {
380        /// checks if the given target path was already resolved, if so it adds its id to the list
381        /// of resolved imports. If it hasn't been resolved yet, it queues in the file for
382        /// processing
383        fn add_node<P: SourceParser>(
384            parser: &mut P,
385            unresolved: &mut VecDeque<(PathBuf, Node<P::ParsedSource>)>,
386            index: &mut HashMap<PathBuf, usize>,
387            resolved_imports: &mut Vec<usize>,
388            target: PathBuf,
389        ) -> Result<()> {
390            if let Some(idx) = index.get(&target).copied() {
391                resolved_imports.push(idx);
392            } else {
393                // imported file is not part of the input files
394                let node = parser.read(&target)?;
395                unresolved.push_back((target.clone(), node));
396                let idx = index.len();
397                index.insert(target, idx);
398                resolved_imports.push(idx);
399            }
400            Ok(())
401        }
402
403        // The cache relies on the absolute paths relative to the project root as cache keys.
404        sources.make_absolute(&paths.root);
405
406        let mut parser = P::new(paths.with_language_ref());
407
408        // we start off by reading all input files, which includes all solidity files from the
409        // source and test folder
410        let mut unresolved: VecDeque<_> = parser.parse_sources(&mut sources)?.into();
411
412        // identifiers of all resolved files
413        let mut index: HashMap<_, _> =
414            unresolved.iter().enumerate().map(|(idx, (p, _))| (p.clone(), idx)).collect();
415
416        let num_input_files = unresolved.len();
417
418        // contains the files and their dependencies
419        let mut nodes = Vec::with_capacity(unresolved.len());
420        let mut edges = Vec::with_capacity(unresolved.len());
421        let mut rev_edges = Vec::with_capacity(unresolved.len());
422
423        // tracks additional paths that should be used with `--include-path`, these are libraries
424        // that use absolute imports like `import "src/Contract.sol"`
425        let mut resolved_solc_include_paths = BTreeSet::new();
426        resolved_solc_include_paths.insert(paths.root.clone());
427
428        // keep track of all unique paths that we failed to resolve to not spam the reporter with
429        // the same path
430        let mut unresolved_imports = HashSet::new();
431
432        // now we need to resolve all imports for the source file and those imported from other
433        // locations
434        while let Some((path, node)) = unresolved.pop_front() {
435            let mut resolved_imports = Vec::new();
436            // parent directory of the current file
437            let cwd = match path.parent() {
438                Some(inner) => inner,
439                None => continue,
440            };
441
442            for import_path in node.data.resolve_imports(paths, &mut resolved_solc_include_paths)? {
443                if let Some(err) = match paths.resolve_import_and_include_paths(
444                    cwd,
445                    &import_path,
446                    &mut resolved_solc_include_paths,
447                ) {
448                    Ok(import) => add_node(
449                        &mut parser,
450                        &mut unresolved,
451                        &mut index,
452                        &mut resolved_imports,
453                        import,
454                    )
455                    .err(),
456                    Err(err) => Some(err),
457                } {
458                    unresolved_imports.insert((import_path.to_path_buf(), node.path.clone()));
459                    trace!("failed to resolve import component \"{:?}\" for {:?}", err, node.path)
460                }
461            }
462
463            nodes.push(node);
464            edges.push(resolved_imports);
465            // Will be populated later
466            rev_edges.push(Vec::new());
467        }
468
469        // Build `rev_edges`
470        for (idx, edges) in edges.iter().enumerate() {
471            for &edge in edges.iter() {
472                rev_edges[edge].push(idx);
473            }
474        }
475
476        if !unresolved_imports.is_empty() {
477            // notify on all unresolved imports
478            crate::report::unresolved_imports(
479                &unresolved_imports
480                    .iter()
481                    .map(|(i, f)| (i.as_path(), f.as_path()))
482                    .collect::<Vec<_>>(),
483                &paths.remappings,
484            );
485        }
486
487        parser.finalize_imports(&mut nodes, &resolved_solc_include_paths)?;
488
489        let edges = GraphEdges {
490            edges,
491            rev_edges,
492            rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(),
493            indices: index,
494            num_input_files,
495            versions: nodes
496                .iter()
497                .enumerate()
498                .map(|(idx, node)| (idx, node.data.version_req().cloned()))
499                .collect(),
500            data: Default::default(),
501            parser: Some(parser),
502            unresolved_imports,
503            resolved_solc_include_paths,
504        };
505        Ok(Self { nodes, edges, root: paths.root.clone() })
506    }
507
508    /// Resolves the dependencies of a project's source contracts
509    pub fn resolve(
510        paths: &ProjectPathsConfig<<P::ParsedSource as ParsedSource>::Language>,
511    ) -> Result<Self> {
512        Self::resolve_sources(paths, paths.read_input_files()?)
513    }
514
515    /// Consumes the nodes of the graph and returns all input files together with their appropriate
516    /// version and the edges of the graph
517    ///
518    /// First we determine the compatible version for each input file (from sources and test folder,
519    /// see `Self::resolve`) and then we add all resolved library imports.
520    pub fn into_sources_by_version<C, T>(
521        self,
522        project: &Project<C, T>,
523    ) -> Result<ResolvedSources<'_, C>>
524    where
525        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
526        C: Compiler<Parser = P, Language = <P::ParsedSource as ParsedSource>::Language>,
527    {
528        /// insert the imports of the given node into the sources map
529        /// There can be following graph:
530        /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)`
531        /// where `C` is a library import, in which case we assign `C` only to the first input file.
532        /// However, it's not required to include them in the solc `CompilerInput` as they would get
533        /// picked up by solc otherwise, but we add them, so we can create a corresponding
534        /// cache entry for them as well. This can be optimized however
535        fn insert_imports(
536            idx: usize,
537            all_nodes: &mut HashMap<usize, (PathBuf, Source)>,
538            sources: &mut Sources,
539            edges: &[Vec<usize>],
540            processed_sources: &mut HashSet<usize>,
541        ) {
542            // iterate over all dependencies not processed yet
543            for dep in edges[idx].iter().copied() {
544                // keep track of processed dependencies, if the dep was already in the set we have
545                // processed it already
546                if !processed_sources.insert(dep) {
547                    continue;
548                }
549
550                // library import
551                if let Some((path, source)) = all_nodes.get(&dep).cloned() {
552                    sources.insert(path, source);
553                    insert_imports(dep, all_nodes, sources, edges, processed_sources);
554                }
555            }
556        }
557
558        let versioned_nodes = self.get_input_node_versions(project)?;
559        let versioned_nodes = self.resolve_settings(project, versioned_nodes)?;
560        let (nodes, edges) = self.split();
561
562        let mut all_nodes = nodes.into_iter().enumerate().collect::<HashMap<_, _>>();
563
564        let mut resulted_sources = HashMap::new();
565        let mut default_profiles = HashMap::new();
566
567        let profiles = project.settings_profiles().collect::<Vec<_>>();
568
569        // determine the `Sources` set for each solc version
570        for (language, versioned_nodes) in versioned_nodes {
571            let mut versioned_sources = Vec::with_capacity(versioned_nodes.len());
572
573            for (version, profile_to_nodes) in versioned_nodes {
574                for (profile_idx, input_node_indexes) in profile_to_nodes {
575                    let mut sources = Sources::new();
576
577                    // all input nodes will be processed
578                    let mut processed_sources = input_node_indexes.iter().copied().collect();
579
580                    // we only process input nodes (from sources, tests for example)
581                    for idx in input_node_indexes {
582                        // insert the input node in the sources set and remove it from the available
583                        // set
584                        let (path, source) =
585                            all_nodes.get(&idx).cloned().expect("node is preset. qed");
586
587                        default_profiles.insert(path.clone(), profiles[profile_idx].0);
588                        sources.insert(path, source);
589                        insert_imports(
590                            idx,
591                            &mut all_nodes,
592                            &mut sources,
593                            &edges.edges,
594                            &mut processed_sources,
595                        );
596                    }
597                    versioned_sources.push((version.clone(), sources, profiles[profile_idx]));
598                }
599            }
600
601            resulted_sources.insert(language, versioned_sources);
602        }
603
604        Ok(ResolvedSources { sources: resulted_sources, primary_profiles: default_profiles, edges })
605    }
606
607    /// Writes the list of imported files into the given formatter:
608    ///
609    /// ```text
610    /// path/to/a.sol (<version>) imports:
611    ///     path/to/b.sol (<version>)
612    ///     path/to/c.sol (<version>)
613    ///     ...
614    /// ```
615    fn format_imports_list<
616        C: Compiler,
617        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
618        W: std::fmt::Write,
619    >(
620        &self,
621        idx: usize,
622        incompatible: HashSet<usize>,
623        project: &Project<C, T>,
624        f: &mut W,
625    ) -> std::result::Result<(), std::fmt::Error> {
626        let format_node = |idx, f: &mut W| {
627            let node = self.node(idx);
628            let color = if incompatible.contains(&idx) { Color::Red } else { Color::White };
629
630            let mut line = utils::source_name(&node.path, &self.root).display().to_string();
631            if let Some(req) = self.version_requirement(idx, project) {
632                line.push_str(&format!(" {req}"));
633            }
634
635            write!(f, "{}", line.paint(color))
636        };
637        format_node(idx, f)?;
638        write!(f, " imports:")?;
639        for dep in self.node_ids(idx).skip(1) {
640            write!(f, "\n    ")?;
641            format_node(dep, f)?;
642        }
643
644        Ok(())
645    }
646
647    /// Combines version requirement parsed from file and from project restrictions.
648    fn version_requirement<
649        C: Compiler,
650        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
651    >(
652        &self,
653        idx: usize,
654        project: &Project<C, T>,
655    ) -> Option<VersionReq> {
656        let node = self.node(idx);
657        let parsed_req = node.data.version_req();
658        let other_req = project.restrictions.get(&node.path).and_then(|r| r.version.as_ref());
659
660        match (parsed_req, other_req) {
661            (Some(parsed_req), Some(other_req)) => {
662                let mut req = parsed_req.clone();
663                req.comparators.extend(other_req.comparators.clone());
664                Some(req)
665            }
666            (Some(parsed_req), None) => Some(parsed_req.clone()),
667            (None, Some(other_req)) => Some(other_req.clone()),
668            _ => None,
669        }
670    }
671
672    /// Checks that the file's version is even available.
673    ///
674    /// This returns an error if the file's version is invalid semver, or is not available such as
675    /// 0.8.20, if the highest available version is `0.8.19`
676    fn check_available_version<
677        C: Compiler,
678        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
679    >(
680        &self,
681        idx: usize,
682        all_versions: &[&CompilerVersion],
683        project: &Project<C, T>,
684    ) -> std::result::Result<(), SourceVersionError> {
685        let Some(req) = self.version_requirement(idx, project) else { return Ok(()) };
686
687        if !all_versions.iter().any(|v| req.matches(v.as_ref())) {
688            return if project.offline {
689                Err(SourceVersionError::NoMatchingVersionOffline(req))
690            } else {
691                Err(SourceVersionError::NoMatchingVersion(req))
692            };
693        }
694
695        Ok(())
696    }
697
698    /// Filters incompatible versions from the `candidates`. It iterates over node imports and in
699    /// case if there is no compatible version it returns the latest seen node id.
700    fn retain_compatible_versions<
701        C: Compiler,
702        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
703    >(
704        &self,
705        idx: usize,
706        candidates: &mut Vec<&CompilerVersion>,
707        project: &Project<C, T>,
708    ) -> Result<(), String> {
709        let mut all_versions = candidates.clone();
710
711        let nodes: Vec<_> = self.node_ids(idx).collect();
712        let mut failed_node_idx = None;
713        for node in nodes.iter() {
714            if let Some(req) = self.version_requirement(*node, project) {
715                candidates.retain(|v| req.matches(v.as_ref()));
716
717                if candidates.is_empty() {
718                    failed_node_idx = Some(*node);
719                    break;
720                }
721            }
722        }
723
724        let Some(failed_node_idx) = failed_node_idx else {
725            // everything is fine
726            return Ok(());
727        };
728
729        // This now keeps data for the node which were the last one before we had no candidates
730        // left. It means that there is a node directly conflicting with it in `nodes` coming
731        // before.
732        let failed_node = self.node(failed_node_idx);
733
734        if let Err(version_err) =
735            self.check_available_version(failed_node_idx, &all_versions, project)
736        {
737            // check if the version is even valid
738            let f = utils::source_name(&failed_node.path, &self.root).display();
739            return Err(format!("Encountered invalid solc version in {f}: {version_err}"));
740        } else {
741            // if the node requirement makes sense, it means that there is at least one node
742            // which requirement conflicts with it
743
744            // retain only versions compatible with the `failed_node`
745            if let Some(req) = self.version_requirement(failed_node_idx, project) {
746                all_versions.retain(|v| req.matches(v.as_ref()));
747            }
748
749            // iterate over all the nodes once again and find the one incompatible
750            for node in &nodes {
751                if self.check_available_version(*node, &all_versions, project).is_err() {
752                    let mut msg = "Found incompatible versions:\n".white().to_string();
753
754                    self.format_imports_list(
755                        idx,
756                        [*node, failed_node_idx].into(),
757                        project,
758                        &mut msg,
759                    )
760                    .unwrap();
761                    return Err(msg);
762                }
763            }
764        }
765
766        let mut msg = "Found incompatible versions:\n".white().to_string();
767        self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap();
768        Err(msg)
769    }
770
771    /// Filters profiles incompatible with the given node and its imports.
772    fn retain_compatible_profiles<
773        C: Compiler,
774        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
775    >(
776        &self,
777        idx: usize,
778        project: &Project<C, T>,
779        candidates: &mut Vec<(usize, (&str, &C::Settings))>,
780    ) -> Result<(), String> {
781        let mut all_profiles = candidates.clone();
782
783        let nodes: Vec<_> = self.node_ids(idx).collect();
784        let mut failed_node_idx = None;
785        for node in nodes.iter() {
786            if let Some(req) = project.restrictions.get(&self.node(*node).path) {
787                candidates.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req));
788                if candidates.is_empty() {
789                    failed_node_idx = Some(*node);
790                    break;
791                }
792            }
793        }
794
795        let Some(failed_node_idx) = failed_node_idx else {
796            // everything is fine
797            return Ok(());
798        };
799
800        let failed_node = self.node(failed_node_idx);
801
802        // retain only profiles compatible with the `failed_node`
803        if let Some(req) = project.restrictions.get(&failed_node.path) {
804            all_profiles.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req));
805        }
806
807        if all_profiles.is_empty() {
808            let f = utils::source_name(&failed_node.path, &self.root).display();
809            return Err(format!("Missing profile satisfying settings restrictions for {f}"));
810        }
811
812        // iterate over all the nodes once again and find the one incompatible
813        for node in &nodes {
814            if let Some(req) = project.restrictions.get(&self.node(*node).path) {
815                if !all_profiles
816                    .iter()
817                    .any(|(_, (_, settings))| settings.satisfies_restrictions(&**req))
818                {
819                    let mut msg = "Found incompatible settings restrictions:\n".white().to_string();
820
821                    self.format_imports_list(
822                        idx,
823                        [*node, failed_node_idx].into(),
824                        project,
825                        &mut msg,
826                    )
827                    .unwrap();
828                    return Err(msg);
829                }
830            }
831        }
832
833        let mut msg = "Found incompatible settings restrictions:\n".white().to_string();
834        self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap();
835        Err(msg)
836    }
837
838    fn input_nodes_by_language(&self) -> HashMap<L<P>, Vec<usize>> {
839        let mut nodes = HashMap::new();
840
841        for idx in 0..self.edges.num_input_files {
842            nodes.entry(self.nodes[idx].data.language()).or_insert_with(Vec::new).push(idx);
843        }
844
845        nodes
846    }
847
848    /// Returns a map of versions together with the input nodes that are compatible with that
849    /// version.
850    ///
851    /// This will essentially do a DFS on all input sources and their transitive imports and
852    /// checking that all can compiled with the version stated in the input file.
853    ///
854    /// Returns an error message with __all__ input files that don't have compatible imports.
855    ///
856    /// This also attempts to prefer local installations over remote available.
857    /// If `offline` is set to `true` then only already installed.
858    #[allow(clippy::type_complexity)]
859    fn get_input_node_versions<
860        C: Compiler<Language = L<P>>,
861        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
862    >(
863        &self,
864        project: &Project<C, T>,
865    ) -> Result<HashMap<L<P>, HashMap<Version, Vec<usize>>>> {
866        trace!("resolving input node versions");
867
868        let mut resulted_nodes = HashMap::new();
869
870        for (language, nodes) in self.input_nodes_by_language() {
871            // this is likely called by an application and will be eventually printed so we don't
872            // exit on first error, instead gather all the errors and return a bundled
873            // error message instead
874            let mut errors = Vec::new();
875
876            // the sorted list of all versions
877            let all_versions = if project.offline {
878                project
879                    .compiler
880                    .available_versions(&language)
881                    .into_iter()
882                    .filter(|v| v.is_installed())
883                    .collect()
884            } else {
885                project.compiler.available_versions(&language)
886            };
887
888            if all_versions.is_empty() && !nodes.is_empty() {
889                return Err(SolcError::msg(format!(
890                    "Found {language} sources, but no compiler versions are available for it"
891                )));
892            }
893
894            // stores all versions and their nodes that can be compiled
895            let mut versioned_nodes = HashMap::new();
896
897            // stores all files and the versions they're compatible with
898            let mut all_candidates = Vec::with_capacity(self.edges.num_input_files);
899            // walking through the node's dep tree and filtering the versions along the way
900            for idx in nodes {
901                let mut candidates = all_versions.iter().collect::<Vec<_>>();
902                // remove all incompatible versions from the candidates list by checking the node
903                // and all its imports
904                if let Err(err) = self.retain_compatible_versions(idx, &mut candidates, project) {
905                    errors.push(err);
906                } else {
907                    // found viable candidates, pick the most recent version that's already
908                    // installed
909                    let candidate =
910                        if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) {
911                            candidates[pos]
912                        } else {
913                            candidates.last().expect("not empty; qed.")
914                        }
915                        .clone();
916
917                    // also store all possible candidates to optimize the set
918                    all_candidates.push((idx, candidates.into_iter().collect::<HashSet<_>>()));
919
920                    versioned_nodes
921                        .entry(candidate)
922                        .or_insert_with(|| Vec::with_capacity(1))
923                        .push(idx);
924                }
925            }
926
927            // detected multiple versions but there might still exist a single version that
928            // satisfies all sources
929            if versioned_nodes.len() > 1 {
930                versioned_nodes = Self::resolve_multiple_versions(all_candidates);
931            }
932
933            if versioned_nodes.len() == 1 {
934                trace!(
935                    "found exact solc version for all sources  \"{}\"",
936                    versioned_nodes.keys().next().unwrap()
937                );
938            }
939
940            if errors.is_empty() {
941                trace!("resolved {} versions {:?}", versioned_nodes.len(), versioned_nodes.keys());
942                resulted_nodes.insert(
943                    language,
944                    versioned_nodes
945                        .into_iter()
946                        .map(|(v, nodes)| (Version::from(v), nodes))
947                        .collect(),
948                );
949            } else {
950                let s = errors.join("\n");
951                debug!("failed to resolve versions: {s}");
952                return Err(SolcError::msg(s));
953            }
954        }
955
956        Ok(resulted_nodes)
957    }
958
959    #[allow(clippy::complexity)]
960    fn resolve_settings<
961        C: Compiler<Language = L<P>>,
962        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
963    >(
964        &self,
965        project: &Project<C, T>,
966        input_nodes_versions: HashMap<L<P>, HashMap<Version, Vec<usize>>>,
967    ) -> Result<HashMap<L<P>, HashMap<Version, HashMap<usize, Vec<usize>>>>> {
968        let mut resulted_sources = HashMap::new();
969        let mut errors = Vec::new();
970        for (language, versions) in input_nodes_versions {
971            let mut versioned_sources = HashMap::new();
972            for (version, nodes) in versions {
973                let mut profile_to_nodes = HashMap::new();
974                for idx in nodes {
975                    let mut profile_candidates =
976                        project.settings_profiles().enumerate().collect::<Vec<_>>();
977                    if let Err(err) =
978                        self.retain_compatible_profiles(idx, project, &mut profile_candidates)
979                    {
980                        errors.push(err);
981                    } else {
982                        let (profile_idx, _) = profile_candidates.first().expect("exists");
983                        profile_to_nodes.entry(*profile_idx).or_insert_with(Vec::new).push(idx);
984                    }
985                }
986                versioned_sources.insert(version, profile_to_nodes);
987            }
988            resulted_sources.insert(language, versioned_sources);
989        }
990
991        if errors.is_empty() {
992            Ok(resulted_sources)
993        } else {
994            let s = errors.join("\n");
995            debug!("failed to resolve settings: {s}");
996            Err(SolcError::msg(s))
997        }
998    }
999
1000    /// Tries to find the "best" set of versions to nodes, See [Solc version
1001    /// auto-detection](#solc-version-auto-detection)
1002    ///
1003    /// This is a bit inefficient but is fine, the max. number of versions is ~80 and there's
1004    /// a high chance that the number of source files is <50, even for larger projects.
1005    fn resolve_multiple_versions(
1006        all_candidates: Vec<(usize, HashSet<&CompilerVersion>)>,
1007    ) -> HashMap<CompilerVersion, Vec<usize>> {
1008        // returns the intersection as sorted set of nodes
1009        fn intersection<'a>(
1010            mut sets: Vec<&HashSet<&'a CompilerVersion>>,
1011        ) -> Vec<&'a CompilerVersion> {
1012            if sets.is_empty() {
1013                return Vec::new();
1014            }
1015
1016            let mut result = sets.pop().cloned().expect("not empty; qed.");
1017            if !sets.is_empty() {
1018                result.retain(|item| sets.iter().all(|set| set.contains(item)));
1019            }
1020
1021            let mut v = result.into_iter().collect::<Vec<_>>();
1022            v.sort_unstable();
1023            v
1024        }
1025
1026        /// returns the highest version that is installed
1027        /// if the candidates set only contains uninstalled versions then this returns the highest
1028        /// uninstalled version
1029        fn remove_candidate(candidates: &mut Vec<&CompilerVersion>) -> CompilerVersion {
1030            debug_assert!(!candidates.is_empty());
1031
1032            if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) {
1033                candidates.remove(pos)
1034            } else {
1035                candidates.pop().expect("not empty; qed.")
1036            }
1037            .clone()
1038        }
1039
1040        let all_sets = all_candidates.iter().map(|(_, versions)| versions).collect();
1041
1042        // find all versions that satisfy all nodes
1043        let mut intersection = intersection(all_sets);
1044        if !intersection.is_empty() {
1045            let exact_version = remove_candidate(&mut intersection);
1046            let all_nodes = all_candidates.into_iter().map(|(node, _)| node).collect();
1047            trace!("resolved solc version compatible with all sources  \"{}\"", exact_version);
1048            return HashMap::from([(exact_version, all_nodes)]);
1049        }
1050
1051        // no version satisfies all nodes
1052        let mut versioned_nodes: HashMap<_, _> = HashMap::new();
1053
1054        // try to minimize the set of versions, this is guaranteed to lead to `versioned_nodes.len()
1055        // > 1` as no solc version exists that can satisfy all sources
1056        for (node, versions) in all_candidates {
1057            // need to sort them again
1058            let mut versions = versions.into_iter().collect::<Vec<_>>();
1059            versions.sort_unstable();
1060
1061            let candidate = if let Some(idx) =
1062                versions.iter().rposition(|v| versioned_nodes.contains_key(*v))
1063            {
1064                // use a version that's already in the set
1065                versions.remove(idx).clone()
1066            } else {
1067                // use the highest version otherwise
1068                remove_candidate(&mut versions)
1069            };
1070
1071            versioned_nodes.entry(candidate).or_insert_with(|| Vec::with_capacity(1)).push(node);
1072        }
1073
1074        trace!(
1075            "no solc version can satisfy all source files, resolved multiple versions  \"{:?}\"",
1076            versioned_nodes.keys()
1077        );
1078
1079        versioned_nodes
1080    }
1081}
1082
1083/// An iterator over a node and its dependencies
1084#[derive(Debug)]
1085pub struct NodesIter<'a, P: SourceParser> {
1086    /// stack of nodes
1087    stack: VecDeque<usize>,
1088    visited: HashSet<usize>,
1089    graph: &'a GraphEdges<P>,
1090}
1091
1092impl<'a, P: SourceParser> NodesIter<'a, P> {
1093    fn new(start: usize, graph: &'a GraphEdges<P>) -> Self {
1094        Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph }
1095    }
1096}
1097
1098impl<P: SourceParser> Iterator for NodesIter<'_, P> {
1099    type Item = usize;
1100    fn next(&mut self) -> Option<Self::Item> {
1101        let node = self.stack.pop_front()?;
1102
1103        if self.visited.insert(node) {
1104            // push the node's direct dependencies to the stack if we haven't visited it already
1105            self.stack.extend(self.graph.imported_nodes(node).iter().copied());
1106        }
1107        Some(node)
1108    }
1109}
1110
1111#[derive(Debug)]
1112pub struct Node<S> {
1113    /// path of the solidity  file
1114    path: PathBuf,
1115    /// content of the solidity file
1116    source: Source,
1117    /// parsed data
1118    pub data: S,
1119}
1120
1121impl<S> Node<S> {
1122    pub fn new(path: PathBuf, source: Source, data: S) -> Self {
1123        Self { path, source, data }
1124    }
1125
1126    pub fn map_data<T>(self, f: impl FnOnce(S) -> T) -> Node<T> {
1127        Node::new(self.path, self.source, f(self.data))
1128    }
1129}
1130
1131impl<S: ParsedSource> Node<S> {
1132    /// Reads the content of the file and returns a [Node] containing relevant information
1133    pub fn read(file: &Path) -> Result<Self> {
1134        let source = Source::read_(file)?;
1135        Self::parse(file, source)
1136    }
1137
1138    pub fn parse(file: &Path, source: Source) -> Result<Self> {
1139        let data = S::parse(source.as_ref(), file)?;
1140        Ok(Self::new(file.to_path_buf(), source, data))
1141    }
1142
1143    /// Returns the path of the file.
1144    pub fn path(&self) -> &Path {
1145        &self.path
1146    }
1147
1148    /// Returns the contents of the file.
1149    pub fn content(&self) -> &str {
1150        &self.source.content
1151    }
1152
1153    pub fn unpack(&self) -> (&Path, &Source) {
1154        (&self.path, &self.source)
1155    }
1156}
1157
1158/// Helper type for formatting a node
1159pub(crate) struct DisplayNode<'a, S> {
1160    node: &'a Node<S>,
1161    root: &'a PathBuf,
1162}
1163
1164impl<S: ParsedSource> fmt::Display for DisplayNode<'_, S> {
1165    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1166        let path = utils::source_name(&self.node.path, self.root);
1167        write!(f, "{}", path.display())?;
1168        if let Some(v) = self.node.data.version_req() {
1169            write!(f, " {v}")?;
1170        }
1171        Ok(())
1172    }
1173}
1174
1175/// Errors thrown when checking the solc version of a file
1176#[derive(Debug, thiserror::Error)]
1177#[allow(dead_code)]
1178enum SourceVersionError {
1179    #[error("Failed to parse solidity version {0}: {1}")]
1180    InvalidVersion(String, SolcError),
1181    #[error("No solc version exists that matches the version requirement: {0}")]
1182    NoMatchingVersion(VersionReq),
1183    #[error("No solc version installed that matches the version requirement: {0}")]
1184    NoMatchingVersionOffline(VersionReq),
1185}
1186
1187#[cfg(test)]
1188mod tests {
1189    use super::*;
1190
1191    #[test]
1192    fn can_resolve_hardhat_dependency_graph() {
1193        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample");
1194        let paths = ProjectPathsConfig::hardhat(&root).unwrap();
1195
1196        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1197
1198        assert_eq!(graph.edges.num_input_files, 1);
1199        assert_eq!(graph.files().len(), 2);
1200
1201        assert_eq!(
1202            graph.files().clone(),
1203            HashMap::from([
1204                (paths.sources.join("Greeter.sol"), 0),
1205                (paths.root.join("node_modules/hardhat/console.sol"), 1),
1206            ])
1207        );
1208    }
1209
1210    #[test]
1211    fn can_resolve_dapp_dependency_graph() {
1212        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample");
1213        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1214
1215        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1216
1217        assert_eq!(graph.edges.num_input_files, 2);
1218        assert_eq!(graph.files().len(), 3);
1219        assert_eq!(
1220            graph.files().clone(),
1221            HashMap::from([
1222                (paths.sources.join("Dapp.sol"), 0),
1223                (paths.sources.join("Dapp.t.sol"), 1),
1224                (paths.root.join("lib/ds-test/src/test.sol"), 2),
1225            ])
1226        );
1227
1228        let dapp_test = graph.node(1);
1229        assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol"));
1230        assert_eq!(
1231            dapp_test.data.imports.iter().map(|i| i.data().path()).collect::<Vec<&Path>>(),
1232            vec![Path::new("ds-test/test.sol"), Path::new("./Dapp.sol")]
1233        );
1234        assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]);
1235    }
1236
1237    #[test]
1238    fn can_print_dapp_sample_graph() {
1239        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample");
1240        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1241        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1242        let mut out = Vec::<u8>::new();
1243        tree::print(&graph, &Default::default(), &mut out).unwrap();
1244
1245        if !cfg!(windows) {
1246            assert_eq!(
1247                "
1248src/Dapp.sol >=0.6.6
1249src/Dapp.t.sol >=0.6.6
1250├── lib/ds-test/src/test.sol >=0.4.23
1251└── src/Dapp.sol >=0.6.6
1252"
1253                .trim_start()
1254                .as_bytes()
1255                .to_vec(),
1256                out
1257            );
1258        }
1259
1260        graph.edges.parser().compiler.enter(|c| {
1261            assert_eq!(c.gcx().sources.len(), 3);
1262        });
1263    }
1264
1265    #[test]
1266    #[cfg(not(target_os = "windows"))]
1267    fn can_print_hardhat_sample_graph() {
1268        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample");
1269        let paths = ProjectPathsConfig::hardhat(&root).unwrap();
1270        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1271        let mut out = Vec::<u8>::new();
1272        tree::print(&graph, &Default::default(), &mut out).unwrap();
1273        assert_eq!(
1274            "contracts/Greeter.sol >=0.6.0
1275└── node_modules/hardhat/console.sol >=0.4.22, <0.9.0
1276",
1277            String::from_utf8(out).unwrap()
1278        );
1279    }
1280
1281    #[test]
1282    #[cfg(feature = "svm-solc")]
1283    fn test_print_unresolved() {
1284        use crate::{solc::SolcCompiler, ProjectBuilder};
1285
1286        let root =
1287            Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/incompatible-pragmas");
1288        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1289        let graph = Graph::<SolParser>::resolve(&paths).unwrap();
1290        let Err(SolcError::Message(err)) = graph.get_input_node_versions(
1291            &ProjectBuilder::<SolcCompiler>::default()
1292                .paths(paths)
1293                .build(SolcCompiler::AutoDetect)
1294                .unwrap(),
1295        ) else {
1296            panic!("expected error");
1297        };
1298
1299        snapbox::assert_data_eq!(
1300            err,
1301            snapbox::str![[r#"
1302Found incompatible versions:
1303src/A.sol =0.8.25 imports:
1304    src/B.sol
1305    src/C.sol =0.7.0
1306"#]]
1307        );
1308    }
1309
1310    #[cfg(target_os = "linux")]
1311    #[test]
1312    fn can_read_different_case() {
1313        use crate::resolver::parse::SolData;
1314        use std::fs::{self, create_dir_all};
1315        use utils::tempdir;
1316
1317        let tmp_dir = tempdir("out").unwrap();
1318        let path = tmp_dir.path().join("forge-std");
1319        create_dir_all(&path).unwrap();
1320        let existing = path.join("Test.sol");
1321        let non_existing = path.join("test.sol");
1322        fs::write(
1323            existing,
1324            "
1325pragma solidity ^0.8.10;
1326contract A {}
1327        ",
1328        )
1329        .unwrap();
1330
1331        assert!(!non_existing.exists());
1332
1333        let found = crate::resolver::Node::<SolData>::read(&non_existing).unwrap_err();
1334        matches!(found, SolcError::ResolveCaseSensitiveFileName { .. });
1335    }
1336}