foundry_compilers/resolver/
mod.rs

1//! Resolution of the entire dependency graph for a project.
2//!
3//! This module implements the core logic in taking all contracts of a project and creating a
4//! resolved graph with applied remappings for all source contracts.
5//!
6//! Some constraints we're working with when resolving contracts
7//!
8//!   1. Each file can contain several source units and can have any number of imports/dependencies
9//!      (using the term interchangeably). Each dependency can declare a version range that it is
10//!      compatible with, solidity version pragma.
11//!   2. A dependency can be imported from any directory, see `Remappings`
12//!
13//! Finding all dependencies is fairly simple, we're simply doing a DFS, starting from the source
14//! contracts
15//!
16//! ## Solc version auto-detection
17//!
18//! Solving a constraint graph is an NP-hard problem. The algorithm for finding the "best" solution
19//! makes several assumptions and tries to find a version of "Solc" that is compatible with all
20//! source files.
21//!
22//! The algorithm employed here is fairly simple, we simply do a DFS over all the source files and
23//! find the set of Solc versions that the file and all its imports are compatible with, and then we
24//! try to find a single Solc version that is compatible with all the files. This is effectively the
25//! intersection of all version sets.
26//!
27//! We always try to activate the highest (installed) solc version first. Uninstalled solc is only
28//! used if this version is the only compatible version for a single file or in the intersection of
29//! all version sets.
30//!
31//! This leads to finding the optimal version, if there is one. If there is no single Solc version
32//! that is compatible with all sources and their imports, then suddenly this becomes a very
33//! difficult problem, because what would be the "best" solution. In this case, just choose the
34//! latest (installed) Solc version and try to minimize the number of Solc versions used.
35//!
36//! ## Performance
37//!
38//! Note that this is a relatively performance-critical portion of the ethers-solc preprocessing.
39//! The data that needs to be processed is proportional to the size of the dependency
40//! graph, which can, depending on the project, often be quite large.
41//!
42//! Note that, unlike the solidity compiler, we work with the filesystem, where we have to resolve
43//! remappings and follow relative paths. We're also limiting the nodes in the graph to solidity
44//! files, since we're only interested in their
45//! [version pragma](https://docs.soliditylang.org/en/develop/layout-of-source-files.html#version-pragma),
46//! which is defined on a per source file basis.
47
48use crate::{
49    compilers::{Compiler, CompilerVersion, Language, ParsedSource},
50    project::VersionedSources,
51    ArtifactOutput, CompilerSettings, Project, ProjectPathsConfig,
52};
53use core::fmt;
54use foundry_compilers_artifacts::sources::{Source, Sources};
55use foundry_compilers_core::{
56    error::{Result, SolcError},
57    utils::{self, find_case_sensitive_existing_file},
58};
59use parse::SolData;
60use rayon::prelude::*;
61use semver::{Version, VersionReq};
62use std::{
63    collections::{BTreeSet, HashMap, HashSet, VecDeque},
64    io,
65    path::{Path, PathBuf},
66};
67use yansi::{Color, Paint};
68
69pub mod parse;
70mod tree;
71
72pub use parse::SolImportAlias;
73pub use tree::{print, Charset, TreeOptions};
74
75/// Container for result of version and profile resolution of sources contained in [`Graph`].
76#[derive(Debug)]
77pub struct ResolvedSources<'a, C: Compiler> {
78    /// Resolved set of sources.
79    ///
80    /// Mapping from language to a [`Vec`] of compiler inputs consisting of version, sources set
81    /// and settings.
82    pub sources: VersionedSources<'a, C::Language, C::Settings>,
83    /// A mapping from a source file path to the primary profile name selected for it.
84    ///
85    /// This is required because the same source file might be compiled with multiple different
86    /// profiles if it's present as a dependency for other sources. We want to keep a single name
87    /// of the profile which was chosen specifically for each source so that we can default to it.
88    /// Right now, this is used when generating artifact names, "primary" artifact will never have
89    /// a profile suffix.
90    pub primary_profiles: HashMap<PathBuf, &'a str>,
91    /// Graph edges.
92    pub edges: GraphEdges<C::ParsedSource>,
93}
94
95/// The underlying edges of the graph which only contains the raw relationship data.
96///
97/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources`
98/// set is determined.
99#[derive(Debug)]
100pub struct GraphEdges<D> {
101    /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]`
102    /// is the set of outgoing edges for `nodes[0]`.
103    edges: Vec<Vec<usize>>,
104    /// Reverse of `edges`. That is, `rev_edges[0]` is the set of incoming edges for `nodes[0]`.
105    rev_edges: Vec<Vec<usize>>,
106    /// index maps for a solidity file to an index, for fast lookup.
107    indices: HashMap<PathBuf, usize>,
108    /// reverse of `indices` for reverse lookup
109    rev_indices: HashMap<usize, PathBuf>,
110    /// the identified version requirement of a file
111    versions: HashMap<usize, Option<VersionReq>>,
112    /// the extracted data from the source file
113    data: HashMap<usize, D>,
114    /// with how many input files we started with, corresponds to `let input_files =
115    /// nodes[..num_input_files]`.
116    ///
117    /// Combined with the `indices` this way we can determine if a file was original added to the
118    /// graph as input or was added as resolved import, see [`Self::is_input_file()`]
119    num_input_files: usize,
120    /// tracks all imports that we failed to resolve for a file
121    unresolved_imports: HashSet<(PathBuf, PathBuf)>,
122    /// tracks additional include paths resolved by scanning all imports of the graph
123    ///
124    /// Absolute imports, like `import "src/Contract.sol"` are possible, but this does not play
125    /// nice with the standard-json import format, since the VFS won't be able to resolve
126    /// "src/Contract.sol" without help via `--include-path`
127    resolved_solc_include_paths: BTreeSet<PathBuf>,
128}
129
130impl<D> GraphEdges<D> {
131    /// How many files are source files
132    pub fn num_source_files(&self) -> usize {
133        self.num_input_files
134    }
135
136    /// Returns an iterator over all file indices
137    pub fn files(&self) -> impl Iterator<Item = usize> + '_ {
138        0..self.edges.len()
139    }
140
141    /// Returns an iterator over all source file indices
142    pub fn source_files(&self) -> impl Iterator<Item = usize> + '_ {
143        0..self.num_input_files
144    }
145
146    /// Returns an iterator over all library files
147    pub fn library_files(&self) -> impl Iterator<Item = usize> + '_ {
148        self.files().skip(self.num_input_files)
149    }
150
151    /// Returns all additional `--include-paths`
152    pub fn include_paths(&self) -> &BTreeSet<PathBuf> {
153        &self.resolved_solc_include_paths
154    }
155
156    /// Returns all imports that we failed to resolve
157    pub fn unresolved_imports(&self) -> &HashSet<(PathBuf, PathBuf)> {
158        &self.unresolved_imports
159    }
160
161    /// Returns a list of nodes the given node index points to for the given kind.
162    pub fn imported_nodes(&self, from: usize) -> &[usize] {
163        &self.edges[from]
164    }
165
166    /// Returns an iterator that yields all imports of a node and all their imports
167    pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
168        NodesIter::new(from, self).skip(1)
169    }
170
171    /// Returns all files imported by the given file
172    pub fn imports(&self, file: &Path) -> HashSet<&PathBuf> {
173        if let Some(start) = self.indices.get(file).copied() {
174            NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect()
175        } else {
176            HashSet::new()
177        }
178    }
179
180    /// Returns all files that import the given file
181    pub fn importers(&self, file: &Path) -> HashSet<&PathBuf> {
182        if let Some(start) = self.indices.get(file).copied() {
183            self.rev_edges[start].iter().map(move |idx| &self.rev_indices[idx]).collect()
184        } else {
185            HashSet::new()
186        }
187    }
188
189    /// Returns the id of the given file
190    pub fn node_id(&self, file: &Path) -> usize {
191        self.indices[file]
192    }
193
194    /// Returns the path of the given node
195    pub fn node_path(&self, id: usize) -> &PathBuf {
196        &self.rev_indices[&id]
197    }
198
199    /// Returns true if the `file` was originally included when the graph was first created and not
200    /// added when all `imports` were resolved
201    pub fn is_input_file(&self, file: &Path) -> bool {
202        if let Some(idx) = self.indices.get(file).copied() {
203            idx < self.num_input_files
204        } else {
205            false
206        }
207    }
208
209    /// Returns the `VersionReq` for the given file
210    pub fn version_requirement(&self, file: &Path) -> Option<&VersionReq> {
211        self.indices.get(file).and_then(|idx| self.versions.get(idx)).and_then(Option::as_ref)
212    }
213
214    /// Returns the parsed source data for the given file
215    pub fn get_parsed_source(&self, file: &Path) -> Option<&D> {
216        self.indices.get(file).and_then(|idx| self.data.get(idx))
217    }
218}
219
220/// Represents a fully-resolved solidity dependency graph.
221///
222/// Each node in the graph is a file and edges represent dependencies between them.
223///
224/// See also <https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files>
225#[derive(Debug)]
226pub struct Graph<D = SolData> {
227    /// all nodes in the project, a `Node` represents a single file
228    pub nodes: Vec<Node<D>>,
229    /// relationship of the nodes
230    edges: GraphEdges<D>,
231    /// the root of the project this graph represents
232    root: PathBuf,
233}
234
235impl<L: Language, D: ParsedSource<Language = L>> Graph<D> {
236    /// Print the graph to `StdOut`
237    pub fn print(&self) {
238        self.print_with_options(Default::default())
239    }
240
241    /// Print the graph to `StdOut` using the provided `TreeOptions`
242    pub fn print_with_options(&self, opts: TreeOptions) {
243        let stdout = io::stdout();
244        let mut out = stdout.lock();
245        tree::print(self, &opts, &mut out).expect("failed to write to stdout.")
246    }
247
248    /// Returns a list of nodes the given node index points to for the given kind.
249    pub fn imported_nodes(&self, from: usize) -> &[usize] {
250        self.edges.imported_nodes(from)
251    }
252
253    /// Returns an iterator that yields all imports of a node and all their imports
254    pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
255        self.edges.all_imported_nodes(from)
256    }
257
258    /// Returns `true` if the given node has any outgoing edges.
259    pub(crate) fn has_outgoing_edges(&self, index: usize) -> bool {
260        !self.edges.edges[index].is_empty()
261    }
262
263    /// Returns all the resolved files and their index in the graph.
264    pub fn files(&self) -> &HashMap<PathBuf, usize> {
265        &self.edges.indices
266    }
267
268    /// Returns `true` if the graph is empty.
269    pub fn is_empty(&self) -> bool {
270        self.nodes.is_empty()
271    }
272
273    /// Gets a node by index.
274    ///
275    /// # Panics
276    ///
277    /// if the `index` node id is not included in the graph
278    pub fn node(&self, index: usize) -> &Node<D> {
279        &self.nodes[index]
280    }
281
282    pub(crate) fn display_node(&self, index: usize) -> DisplayNode<'_, D> {
283        DisplayNode { node: self.node(index), root: &self.root }
284    }
285
286    /// Returns an iterator that yields all nodes of the dependency tree that the given node id
287    /// spans, starting with the node itself.
288    ///
289    /// # Panics
290    ///
291    /// if the `start` node id is not included in the graph
292    pub fn node_ids(&self, start: usize) -> impl Iterator<Item = usize> + '_ {
293        NodesIter::new(start, &self.edges)
294    }
295
296    /// Same as `Self::node_ids` but returns the actual `Node`
297    pub fn nodes(&self, start: usize) -> impl Iterator<Item = &Node<D>> + '_ {
298        self.node_ids(start).map(move |idx| self.node(idx))
299    }
300
301    fn split(self) -> (Vec<(PathBuf, Source)>, GraphEdges<D>) {
302        let Self { nodes, mut edges, .. } = self;
303        // need to move the extracted data to the edges, essentially splitting the node so we have
304        // access to the data at a later stage in the compile pipeline
305        let mut sources = Vec::new();
306        for (idx, node) in nodes.into_iter().enumerate() {
307            let Node { path, source, data } = node;
308            sources.push((path, source));
309            edges.data.insert(idx, data);
310        }
311
312        (sources, edges)
313    }
314
315    /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and
316    /// returning the `nodes` converted to `Sources`
317    pub fn into_sources(self) -> (Sources, GraphEdges<D>) {
318        let (sources, edges) = self.split();
319        (sources.into_iter().collect(), edges)
320    }
321
322    /// Returns an iterator that yields only those nodes that represent input files.
323    /// See `Self::resolve_sources`
324    /// This won't yield any resolved library nodes
325    pub fn input_nodes(&self) -> impl Iterator<Item = &Node<D>> {
326        self.nodes.iter().take(self.edges.num_input_files)
327    }
328
329    /// Returns all files imported by the given file
330    pub fn imports(&self, path: &Path) -> HashSet<&PathBuf> {
331        self.edges.imports(path)
332    }
333
334    /// Resolves a number of sources within the given config
335    pub fn resolve_sources(
336        paths: &ProjectPathsConfig<D::Language>,
337        sources: Sources,
338    ) -> Result<Self> {
339        /// checks if the given target path was already resolved, if so it adds its id to the list
340        /// of resolved imports. If it hasn't been resolved yet, it queues in the file for
341        /// processing
342        fn add_node<D: ParsedSource>(
343            unresolved: &mut VecDeque<(PathBuf, Node<D>)>,
344            index: &mut HashMap<PathBuf, usize>,
345            resolved_imports: &mut Vec<usize>,
346            target: PathBuf,
347        ) -> Result<()> {
348            if let Some(idx) = index.get(&target).copied() {
349                resolved_imports.push(idx);
350            } else {
351                // imported file is not part of the input files
352                let node = Node::read(&target)?;
353                unresolved.push_back((target.clone(), node));
354                let idx = index.len();
355                index.insert(target, idx);
356                resolved_imports.push(idx);
357            }
358            Ok(())
359        }
360
361        // we start off by reading all input files, which includes all solidity files from the
362        // source and test folder
363        let mut unresolved: VecDeque<_> = sources
364            .0
365            .into_par_iter()
366            .map(|(path, source)| {
367                let data = D::parse(source.as_ref(), &path)?;
368                Ok((path.clone(), Node { path, source, data }))
369            })
370            .collect::<Result<_>>()?;
371
372        // identifiers of all resolved files
373        let mut index: HashMap<_, _> =
374            unresolved.iter().enumerate().map(|(idx, (p, _))| (p.clone(), idx)).collect();
375
376        let num_input_files = unresolved.len();
377
378        // contains the files and their dependencies
379        let mut nodes = Vec::with_capacity(unresolved.len());
380        let mut edges = Vec::with_capacity(unresolved.len());
381        let mut rev_edges = Vec::with_capacity(unresolved.len());
382
383        // tracks additional paths that should be used with `--include-path`, these are libraries
384        // that use absolute imports like `import "src/Contract.sol"`
385        let mut resolved_solc_include_paths = BTreeSet::new();
386        resolved_solc_include_paths.insert(paths.root.clone());
387
388        // keep track of all unique paths that we failed to resolve to not spam the reporter with
389        // the same path
390        let mut unresolved_imports = HashSet::new();
391
392        // now we need to resolve all imports for the source file and those imported from other
393        // locations
394        while let Some((path, node)) = unresolved.pop_front() {
395            let mut resolved_imports = Vec::new();
396            // parent directory of the current file
397            let cwd = match path.parent() {
398                Some(inner) => inner,
399                None => continue,
400            };
401
402            for import_path in node.data.resolve_imports(paths, &mut resolved_solc_include_paths)? {
403                match paths.resolve_import_and_include_paths(
404                    cwd,
405                    &import_path,
406                    &mut resolved_solc_include_paths,
407                ) {
408                    Ok(import) => {
409                        add_node(&mut unresolved, &mut index, &mut resolved_imports, import)
410                            .map_err(|err| {
411                                match err {
412                                    SolcError::ResolveCaseSensitiveFileName { .. }
413                                    | SolcError::Resolve(_) => {
414                                        // make the error more helpful by providing additional
415                                        // context
416                                        SolcError::FailedResolveImport(
417                                            Box::new(err),
418                                            node.path.clone(),
419                                            import_path.clone(),
420                                        )
421                                    }
422                                    _ => err,
423                                }
424                            })?
425                    }
426                    Err(err) => {
427                        unresolved_imports.insert((import_path.to_path_buf(), node.path.clone()));
428                        trace!(
429                            "failed to resolve import component \"{:?}\" for {:?}",
430                            err,
431                            node.path
432                        )
433                    }
434                };
435            }
436
437            nodes.push(node);
438            edges.push(resolved_imports);
439            // Will be populated later
440            rev_edges.push(Vec::new());
441        }
442
443        // Build `rev_edges`
444        for (idx, edges) in edges.iter().enumerate() {
445            for &edge in edges.iter() {
446                rev_edges[edge].push(idx);
447            }
448        }
449
450        if !unresolved_imports.is_empty() {
451            // notify on all unresolved imports
452            crate::report::unresolved_imports(
453                &unresolved_imports
454                    .iter()
455                    .map(|(i, f)| (i.as_path(), f.as_path()))
456                    .collect::<Vec<_>>(),
457                &paths.remappings,
458            );
459        }
460
461        let edges = GraphEdges {
462            edges,
463            rev_edges,
464            rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(),
465            indices: index,
466            num_input_files,
467            versions: nodes
468                .iter()
469                .enumerate()
470                .map(|(idx, node)| (idx, node.data.version_req().cloned()))
471                .collect(),
472            data: Default::default(),
473            unresolved_imports,
474            resolved_solc_include_paths,
475        };
476        Ok(Self { nodes, edges, root: paths.root.clone() })
477    }
478
479    /// Resolves the dependencies of a project's source contracts
480    pub fn resolve(paths: &ProjectPathsConfig<D::Language>) -> Result<Self> {
481        Self::resolve_sources(paths, paths.read_input_files()?)
482    }
483}
484
485impl<L: Language, D: ParsedSource<Language = L>> Graph<D> {
486    /// Consumes the nodes of the graph and returns all input files together with their appropriate
487    /// version and the edges of the graph
488    ///
489    /// First we determine the compatible version for each input file (from sources and test folder,
490    /// see `Self::resolve`) and then we add all resolved library imports.
491    pub fn into_sources_by_version<C, T>(
492        self,
493        project: &Project<C, T>,
494    ) -> Result<ResolvedSources<'_, C>>
495    where
496        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
497        C: Compiler<ParsedSource = D, Language = L>,
498    {
499        /// insert the imports of the given node into the sources map
500        /// There can be following graph:
501        /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)`
502        /// where `C` is a library import, in which case we assign `C` only to the first input file.
503        /// However, it's not required to include them in the solc `CompilerInput` as they would get
504        /// picked up by solc otherwise, but we add them, so we can create a corresponding
505        /// cache entry for them as well. This can be optimized however
506        fn insert_imports(
507            idx: usize,
508            all_nodes: &mut HashMap<usize, (PathBuf, Source)>,
509            sources: &mut Sources,
510            edges: &[Vec<usize>],
511            processed_sources: &mut HashSet<usize>,
512        ) {
513            // iterate over all dependencies not processed yet
514            for dep in edges[idx].iter().copied() {
515                // keep track of processed dependencies, if the dep was already in the set we have
516                // processed it already
517                if !processed_sources.insert(dep) {
518                    continue;
519                }
520
521                // library import
522                if let Some((path, source)) = all_nodes.get(&dep).cloned() {
523                    sources.insert(path, source);
524                    insert_imports(dep, all_nodes, sources, edges, processed_sources);
525                }
526            }
527        }
528
529        let versioned_nodes = self.get_input_node_versions(project)?;
530        let versioned_nodes = self.resolve_settings(project, versioned_nodes)?;
531        let (nodes, edges) = self.split();
532
533        let mut all_nodes = nodes.into_iter().enumerate().collect::<HashMap<_, _>>();
534
535        let mut resulted_sources = HashMap::new();
536        let mut default_profiles = HashMap::new();
537
538        let profiles = project.settings_profiles().collect::<Vec<_>>();
539
540        // determine the `Sources` set for each solc version
541        for (language, versioned_nodes) in versioned_nodes {
542            let mut versioned_sources = Vec::with_capacity(versioned_nodes.len());
543
544            for (version, profile_to_nodes) in versioned_nodes {
545                for (profile_idx, input_node_indixies) in profile_to_nodes {
546                    let mut sources = Sources::new();
547
548                    // all input nodes will be processed
549                    let mut processed_sources = input_node_indixies.iter().copied().collect();
550
551                    // we only process input nodes (from sources, tests for example)
552                    for idx in input_node_indixies {
553                        // insert the input node in the sources set and remove it from the available
554                        // set
555                        let (path, source) =
556                            all_nodes.get(&idx).cloned().expect("node is preset. qed");
557
558                        default_profiles.insert(path.clone(), profiles[profile_idx].0);
559                        sources.insert(path, source);
560                        insert_imports(
561                            idx,
562                            &mut all_nodes,
563                            &mut sources,
564                            &edges.edges,
565                            &mut processed_sources,
566                        );
567                    }
568                    versioned_sources.push((version.clone(), sources, profiles[profile_idx]));
569                }
570            }
571
572            resulted_sources.insert(language, versioned_sources);
573        }
574
575        Ok(ResolvedSources { sources: resulted_sources, primary_profiles: default_profiles, edges })
576    }
577
578    /// Writes the list of imported files into the given formatter:
579    ///
580    /// ```text
581    /// path/to/a.sol (<version>) imports:
582    ///     path/to/b.sol (<version>)
583    ///     path/to/c.sol (<version>)
584    ///     ...
585    /// ```
586    fn format_imports_list<
587        C: Compiler,
588        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
589        W: std::fmt::Write,
590    >(
591        &self,
592        idx: usize,
593        incompatible: HashSet<usize>,
594        project: &Project<C, T>,
595        f: &mut W,
596    ) -> std::result::Result<(), std::fmt::Error> {
597        let format_node = |idx, f: &mut W| {
598            let node = self.node(idx);
599            let color = if incompatible.contains(&idx) { Color::Red } else { Color::White };
600
601            let mut line = utils::source_name(&node.path, &self.root).display().to_string();
602            if let Some(req) = self.version_requirement(idx, project) {
603                line.push_str(&format!(" {req}"));
604            }
605
606            write!(f, "{}", line.paint(color))
607        };
608        format_node(idx, f)?;
609        write!(f, " imports:")?;
610        for dep in self.node_ids(idx).skip(1) {
611            write!(f, "\n    ")?;
612            format_node(dep, f)?;
613        }
614
615        Ok(())
616    }
617
618    /// Combines version requirement parsed from file and from project restrictions.
619    fn version_requirement<
620        C: Compiler,
621        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
622    >(
623        &self,
624        idx: usize,
625        project: &Project<C, T>,
626    ) -> Option<VersionReq> {
627        let node = self.node(idx);
628        let parsed_req = node.data.version_req();
629        let other_req = project.restrictions.get(&node.path).and_then(|r| r.version.as_ref());
630
631        match (parsed_req, other_req) {
632            (Some(parsed_req), Some(other_req)) => {
633                let mut req = parsed_req.clone();
634                req.comparators.extend(other_req.comparators.clone());
635                Some(req)
636            }
637            (Some(parsed_req), None) => Some(parsed_req.clone()),
638            (None, Some(other_req)) => Some(other_req.clone()),
639            _ => None,
640        }
641    }
642
643    /// Checks that the file's version is even available.
644    ///
645    /// This returns an error if the file's version is invalid semver, or is not available such as
646    /// 0.8.20, if the highest available version is `0.8.19`
647    fn check_available_version<
648        C: Compiler,
649        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
650    >(
651        &self,
652        idx: usize,
653        all_versions: &[&CompilerVersion],
654        project: &Project<C, T>,
655    ) -> std::result::Result<(), SourceVersionError> {
656        let Some(req) = self.version_requirement(idx, project) else { return Ok(()) };
657
658        if !all_versions.iter().any(|v| req.matches(v.as_ref())) {
659            return if project.offline {
660                Err(SourceVersionError::NoMatchingVersionOffline(req))
661            } else {
662                Err(SourceVersionError::NoMatchingVersion(req))
663            };
664        }
665
666        Ok(())
667    }
668
669    /// Filters incompatible versions from the `candidates`. It iterates over node imports and in
670    /// case if there is no compatible version it returns the latest seen node id.
671    fn retain_compatible_versions<
672        C: Compiler,
673        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
674    >(
675        &self,
676        idx: usize,
677        candidates: &mut Vec<&CompilerVersion>,
678        project: &Project<C, T>,
679    ) -> Result<(), String> {
680        let mut all_versions = candidates.clone();
681
682        let nodes: Vec<_> = self.node_ids(idx).collect();
683        let mut failed_node_idx = None;
684        for node in nodes.iter() {
685            if let Some(req) = self.version_requirement(*node, project) {
686                candidates.retain(|v| req.matches(v.as_ref()));
687
688                if candidates.is_empty() {
689                    failed_node_idx = Some(*node);
690                    break;
691                }
692            }
693        }
694
695        let Some(failed_node_idx) = failed_node_idx else {
696            // everything is fine
697            return Ok(());
698        };
699
700        // This now keeps data for the node which were the last one before we had no candidates
701        // left. It means that there is a node directly conflicting with it in `nodes` coming
702        // before.
703        let failed_node = self.node(failed_node_idx);
704
705        if let Err(version_err) =
706            self.check_available_version(failed_node_idx, &all_versions, project)
707        {
708            // check if the version is even valid
709            let f = utils::source_name(&failed_node.path, &self.root).display();
710            return Err(format!("Encountered invalid solc version in {f}: {version_err}"));
711        } else {
712            // if the node requirement makes sense, it means that there is at least one node
713            // which requirement conflicts with it
714
715            // retain only versions compatible with the `failed_node`
716            if let Some(req) = self.version_requirement(failed_node_idx, project) {
717                all_versions.retain(|v| req.matches(v.as_ref()));
718            }
719
720            // iterate over all the nodes once again and find the one incompatible
721            for node in &nodes {
722                if self.check_available_version(*node, &all_versions, project).is_err() {
723                    let mut msg = "Found incompatible versions:\n".white().to_string();
724
725                    self.format_imports_list(
726                        idx,
727                        [*node, failed_node_idx].into(),
728                        project,
729                        &mut msg,
730                    )
731                    .unwrap();
732                    return Err(msg);
733                }
734            }
735        }
736
737        let mut msg = "Found incompatible versions:\n".white().to_string();
738        self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap();
739        Err(msg)
740    }
741
742    /// Filters profiles incompatible with the given node and its imports.
743    fn retain_compatible_profiles<
744        C: Compiler,
745        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
746    >(
747        &self,
748        idx: usize,
749        project: &Project<C, T>,
750        candidates: &mut Vec<(usize, (&str, &C::Settings))>,
751    ) -> Result<(), String> {
752        let mut all_profiles = candidates.clone();
753
754        let nodes: Vec<_> = self.node_ids(idx).collect();
755        let mut failed_node_idx = None;
756        for node in nodes.iter() {
757            if let Some(req) = project.restrictions.get(&self.node(*node).path) {
758                candidates.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req));
759                if candidates.is_empty() {
760                    failed_node_idx = Some(*node);
761                    break;
762                }
763            }
764        }
765
766        let Some(failed_node_idx) = failed_node_idx else {
767            // everything is fine
768            return Ok(());
769        };
770
771        let failed_node = self.node(failed_node_idx);
772
773        // retain only profiles compatible with the `failed_node`
774        if let Some(req) = project.restrictions.get(&failed_node.path) {
775            all_profiles.retain(|(_, (_, settings))| settings.satisfies_restrictions(&**req));
776        }
777
778        if all_profiles.is_empty() {
779            let f = utils::source_name(&failed_node.path, &self.root).display();
780            return Err(format!("Missing profile satisfying settings restrictions for {f}"));
781        }
782
783        // iterate over all the nodes once again and find the one incompatible
784        for node in &nodes {
785            if let Some(req) = project.restrictions.get(&self.node(*node).path) {
786                if !all_profiles
787                    .iter()
788                    .any(|(_, (_, settings))| settings.satisfies_restrictions(&**req))
789                {
790                    let mut msg = "Found incompatible settings restrictions:\n".white().to_string();
791
792                    self.format_imports_list(
793                        idx,
794                        [*node, failed_node_idx].into(),
795                        project,
796                        &mut msg,
797                    )
798                    .unwrap();
799                    return Err(msg);
800                }
801            }
802        }
803
804        let mut msg = "Found incompatible settings restrictions:\n".white().to_string();
805        self.format_imports_list(idx, nodes.into_iter().collect(), project, &mut msg).unwrap();
806        Err(msg)
807    }
808
809    fn input_nodes_by_language(&self) -> HashMap<D::Language, Vec<usize>> {
810        let mut nodes = HashMap::new();
811
812        for idx in 0..self.edges.num_input_files {
813            nodes.entry(self.nodes[idx].data.language()).or_insert_with(Vec::new).push(idx);
814        }
815
816        nodes
817    }
818
819    /// Returns a map of versions together with the input nodes that are compatible with that
820    /// version.
821    ///
822    /// This will essentially do a DFS on all input sources and their transitive imports and
823    /// checking that all can compiled with the version stated in the input file.
824    ///
825    /// Returns an error message with __all__ input files that don't have compatible imports.
826    ///
827    /// This also attempts to prefer local installations over remote available.
828    /// If `offline` is set to `true` then only already installed.
829    fn get_input_node_versions<
830        C: Compiler<Language = L>,
831        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
832    >(
833        &self,
834        project: &Project<C, T>,
835    ) -> Result<HashMap<L, HashMap<Version, Vec<usize>>>> {
836        trace!("resolving input node versions");
837
838        let mut resulted_nodes = HashMap::new();
839
840        for (language, nodes) in self.input_nodes_by_language() {
841            // this is likely called by an application and will be eventually printed so we don't
842            // exit on first error, instead gather all the errors and return a bundled
843            // error message instead
844            let mut errors = Vec::new();
845
846            // the sorted list of all versions
847            let all_versions = if project.offline {
848                project
849                    .compiler
850                    .available_versions(&language)
851                    .into_iter()
852                    .filter(|v| v.is_installed())
853                    .collect()
854            } else {
855                project.compiler.available_versions(&language)
856            };
857
858            if all_versions.is_empty() && !nodes.is_empty() {
859                return Err(SolcError::msg(format!(
860                    "Found {language} sources, but no compiler versions are available for it"
861                )));
862            }
863
864            // stores all versions and their nodes that can be compiled
865            let mut versioned_nodes = HashMap::new();
866
867            // stores all files and the versions they're compatible with
868            let mut all_candidates = Vec::with_capacity(self.edges.num_input_files);
869            // walking through the node's dep tree and filtering the versions along the way
870            for idx in nodes {
871                let mut candidates = all_versions.iter().collect::<Vec<_>>();
872                // remove all incompatible versions from the candidates list by checking the node
873                // and all its imports
874                if let Err(err) = self.retain_compatible_versions(idx, &mut candidates, project) {
875                    errors.push(err);
876                } else {
877                    // found viable candidates, pick the most recent version that's already
878                    // installed
879                    let candidate =
880                        if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) {
881                            candidates[pos]
882                        } else {
883                            candidates.last().expect("not empty; qed.")
884                        }
885                        .clone();
886
887                    // also store all possible candidates to optimize the set
888                    all_candidates.push((idx, candidates.into_iter().collect::<HashSet<_>>()));
889
890                    versioned_nodes
891                        .entry(candidate)
892                        .or_insert_with(|| Vec::with_capacity(1))
893                        .push(idx);
894                }
895            }
896
897            // detected multiple versions but there might still exist a single version that
898            // satisfies all sources
899            if versioned_nodes.len() > 1 {
900                versioned_nodes = Self::resolve_multiple_versions(all_candidates);
901            }
902
903            if versioned_nodes.len() == 1 {
904                trace!(
905                    "found exact solc version for all sources  \"{}\"",
906                    versioned_nodes.keys().next().unwrap()
907                );
908            }
909
910            if errors.is_empty() {
911                trace!("resolved {} versions {:?}", versioned_nodes.len(), versioned_nodes.keys());
912                resulted_nodes.insert(
913                    language,
914                    versioned_nodes
915                        .into_iter()
916                        .map(|(v, nodes)| (Version::from(v), nodes))
917                        .collect(),
918                );
919            } else {
920                error!("failed to resolve versions");
921                return Err(SolcError::msg(errors.join("\n")));
922            }
923        }
924
925        Ok(resulted_nodes)
926    }
927
928    #[allow(clippy::complexity)]
929    fn resolve_settings<
930        C: Compiler<Language = L>,
931        T: ArtifactOutput<CompilerContract = C::CompilerContract>,
932    >(
933        &self,
934        project: &Project<C, T>,
935        input_nodes_versions: HashMap<L, HashMap<Version, Vec<usize>>>,
936    ) -> Result<HashMap<L, HashMap<Version, HashMap<usize, Vec<usize>>>>> {
937        let mut resulted_sources = HashMap::new();
938        let mut errors = Vec::new();
939        for (language, versions) in input_nodes_versions {
940            let mut versioned_sources = HashMap::new();
941            for (version, nodes) in versions {
942                let mut profile_to_nodes = HashMap::new();
943                for idx in nodes {
944                    let mut profile_candidates =
945                        project.settings_profiles().enumerate().collect::<Vec<_>>();
946                    if let Err(err) =
947                        self.retain_compatible_profiles(idx, project, &mut profile_candidates)
948                    {
949                        errors.push(err);
950                    } else {
951                        let (profile_idx, _) = profile_candidates.first().expect("exists");
952                        profile_to_nodes.entry(*profile_idx).or_insert_with(Vec::new).push(idx);
953                    }
954                }
955                versioned_sources.insert(version, profile_to_nodes);
956            }
957            resulted_sources.insert(language, versioned_sources);
958        }
959
960        if errors.is_empty() {
961            Ok(resulted_sources)
962        } else {
963            error!("failed to resolve settings");
964            Err(SolcError::msg(errors.join("\n")))
965        }
966    }
967
968    /// Tries to find the "best" set of versions to nodes, See [Solc version
969    /// auto-detection](#solc-version-auto-detection)
970    ///
971    /// This is a bit inefficient but is fine, the max. number of versions is ~80 and there's
972    /// a high chance that the number of source files is <50, even for larger projects.
973    fn resolve_multiple_versions(
974        all_candidates: Vec<(usize, HashSet<&CompilerVersion>)>,
975    ) -> HashMap<CompilerVersion, Vec<usize>> {
976        // returns the intersection as sorted set of nodes
977        fn intersection<'a>(
978            mut sets: Vec<&HashSet<&'a CompilerVersion>>,
979        ) -> Vec<&'a CompilerVersion> {
980            if sets.is_empty() {
981                return Vec::new();
982            }
983
984            let mut result = sets.pop().cloned().expect("not empty; qed.");
985            if !sets.is_empty() {
986                result.retain(|item| sets.iter().all(|set| set.contains(item)));
987            }
988
989            let mut v = result.into_iter().collect::<Vec<_>>();
990            v.sort_unstable();
991            v
992        }
993
994        /// returns the highest version that is installed
995        /// if the candidates set only contains uninstalled versions then this returns the highest
996        /// uninstalled version
997        fn remove_candidate(candidates: &mut Vec<&CompilerVersion>) -> CompilerVersion {
998            debug_assert!(!candidates.is_empty());
999
1000            if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) {
1001                candidates.remove(pos)
1002            } else {
1003                candidates.pop().expect("not empty; qed.")
1004            }
1005            .clone()
1006        }
1007
1008        let all_sets = all_candidates.iter().map(|(_, versions)| versions).collect();
1009
1010        // find all versions that satisfy all nodes
1011        let mut intersection = intersection(all_sets);
1012        if !intersection.is_empty() {
1013            let exact_version = remove_candidate(&mut intersection);
1014            let all_nodes = all_candidates.into_iter().map(|(node, _)| node).collect();
1015            trace!("resolved solc version compatible with all sources  \"{}\"", exact_version);
1016            return HashMap::from([(exact_version, all_nodes)]);
1017        }
1018
1019        // no version satisfies all nodes
1020        let mut versioned_nodes: HashMap<_, _> = HashMap::new();
1021
1022        // try to minimize the set of versions, this is guaranteed to lead to `versioned_nodes.len()
1023        // > 1` as no solc version exists that can satisfy all sources
1024        for (node, versions) in all_candidates {
1025            // need to sort them again
1026            let mut versions = versions.into_iter().collect::<Vec<_>>();
1027            versions.sort_unstable();
1028
1029            let candidate = if let Some(idx) =
1030                versions.iter().rposition(|v| versioned_nodes.contains_key(*v))
1031            {
1032                // use a version that's already in the set
1033                versions.remove(idx).clone()
1034            } else {
1035                // use the highest version otherwise
1036                remove_candidate(&mut versions)
1037            };
1038
1039            versioned_nodes.entry(candidate).or_insert_with(|| Vec::with_capacity(1)).push(node);
1040        }
1041
1042        trace!(
1043            "no solc version can satisfy all source files, resolved multiple versions  \"{:?}\"",
1044            versioned_nodes.keys()
1045        );
1046
1047        versioned_nodes
1048    }
1049}
1050
1051/// An iterator over a node and its dependencies
1052#[derive(Debug)]
1053pub struct NodesIter<'a, D> {
1054    /// stack of nodes
1055    stack: VecDeque<usize>,
1056    visited: HashSet<usize>,
1057    graph: &'a GraphEdges<D>,
1058}
1059
1060impl<'a, D> NodesIter<'a, D> {
1061    fn new(start: usize, graph: &'a GraphEdges<D>) -> Self {
1062        Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph }
1063    }
1064}
1065
1066impl<D> Iterator for NodesIter<'_, D> {
1067    type Item = usize;
1068    fn next(&mut self) -> Option<Self::Item> {
1069        let node = self.stack.pop_front()?;
1070
1071        if self.visited.insert(node) {
1072            // push the node's direct dependencies to the stack if we haven't visited it already
1073            self.stack.extend(self.graph.imported_nodes(node).iter().copied());
1074        }
1075        Some(node)
1076    }
1077}
1078
1079#[derive(Debug)]
1080pub struct Node<D> {
1081    /// path of the solidity  file
1082    path: PathBuf,
1083    /// content of the solidity file
1084    source: Source,
1085    /// parsed data
1086    pub data: D,
1087}
1088
1089impl<D: ParsedSource> Node<D> {
1090    /// Reads the content of the file and returns a [Node] containing relevant information
1091    pub fn read(file: &Path) -> Result<Self> {
1092        let source = Source::read(file).map_err(|err| {
1093            let exists = err.path().exists();
1094            if !exists && err.path().is_symlink() {
1095                SolcError::ResolveBadSymlink(err)
1096            } else {
1097                // This is an additional check useful on OS that have case-sensitive paths, See also <https://docs.soliditylang.org/en/v0.8.17/path-resolution.html#import-callback>
1098                if !exists {
1099                    // check if there exists a file with different case
1100                    if let Some(existing_file) = find_case_sensitive_existing_file(file) {
1101                        SolcError::ResolveCaseSensitiveFileName { error: err, existing_file }
1102                    } else {
1103                        SolcError::Resolve(err)
1104                    }
1105                } else {
1106                    SolcError::Resolve(err)
1107                }
1108            }
1109        })?;
1110        let data = D::parse(source.as_ref(), file)?;
1111        Ok(Self { path: file.to_path_buf(), source, data })
1112    }
1113
1114    /// Returns the path of the file.
1115    pub fn path(&self) -> &Path {
1116        &self.path
1117    }
1118
1119    /// Returns the contents of the file.
1120    pub fn content(&self) -> &str {
1121        &self.source.content
1122    }
1123
1124    pub fn unpack(&self) -> (&PathBuf, &Source) {
1125        (&self.path, &self.source)
1126    }
1127}
1128
1129/// Helper type for formatting a node
1130pub(crate) struct DisplayNode<'a, D> {
1131    node: &'a Node<D>,
1132    root: &'a PathBuf,
1133}
1134
1135impl<D: ParsedSource> fmt::Display for DisplayNode<'_, D> {
1136    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1137        let path = utils::source_name(&self.node.path, self.root);
1138        write!(f, "{}", path.display())?;
1139        if let Some(v) = self.node.data.version_req() {
1140            write!(f, " {v}")?;
1141        }
1142        Ok(())
1143    }
1144}
1145
1146/// Errors thrown when checking the solc version of a file
1147#[derive(Debug, thiserror::Error)]
1148#[allow(dead_code)]
1149enum SourceVersionError {
1150    #[error("Failed to parse solidity version {0}: {1}")]
1151    InvalidVersion(String, SolcError),
1152    #[error("No solc version exists that matches the version requirement: {0}")]
1153    NoMatchingVersion(VersionReq),
1154    #[error("No solc version installed that matches the version requirement: {0}")]
1155    NoMatchingVersionOffline(VersionReq),
1156}
1157
1158#[cfg(test)]
1159mod tests {
1160    use super::*;
1161
1162    #[test]
1163    fn can_resolve_hardhat_dependency_graph() {
1164        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample");
1165        let paths = ProjectPathsConfig::hardhat(&root).unwrap();
1166
1167        let graph = Graph::<SolData>::resolve(&paths).unwrap();
1168
1169        assert_eq!(graph.edges.num_input_files, 1);
1170        assert_eq!(graph.files().len(), 2);
1171
1172        assert_eq!(
1173            graph.files().clone(),
1174            HashMap::from([
1175                (paths.sources.join("Greeter.sol"), 0),
1176                (paths.root.join("node_modules/hardhat/console.sol"), 1),
1177            ])
1178        );
1179    }
1180
1181    #[test]
1182    fn can_resolve_dapp_dependency_graph() {
1183        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample");
1184        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1185
1186        let graph = Graph::<SolData>::resolve(&paths).unwrap();
1187
1188        assert_eq!(graph.edges.num_input_files, 2);
1189        assert_eq!(graph.files().len(), 3);
1190        assert_eq!(
1191            graph.files().clone(),
1192            HashMap::from([
1193                (paths.sources.join("Dapp.sol"), 0),
1194                (paths.sources.join("Dapp.t.sol"), 1),
1195                (paths.root.join("lib/ds-test/src/test.sol"), 2),
1196            ])
1197        );
1198
1199        let dapp_test = graph.node(1);
1200        assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol"));
1201        assert_eq!(
1202            dapp_test.data.imports.iter().map(|i| i.data().path()).collect::<Vec<&PathBuf>>(),
1203            vec![&PathBuf::from("ds-test/test.sol"), &PathBuf::from("./Dapp.sol")]
1204        );
1205        assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]);
1206    }
1207
1208    #[test]
1209    #[cfg(not(target_os = "windows"))]
1210    fn can_print_dapp_sample_graph() {
1211        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/dapp-sample");
1212        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1213        let graph = Graph::<SolData>::resolve(&paths).unwrap();
1214        let mut out = Vec::<u8>::new();
1215        tree::print(&graph, &Default::default(), &mut out).unwrap();
1216
1217        assert_eq!(
1218            "
1219src/Dapp.sol >=0.6.6
1220src/Dapp.t.sol >=0.6.6
1221├── lib/ds-test/src/test.sol >=0.4.23
1222└── src/Dapp.sol >=0.6.6
1223"
1224            .trim_start()
1225            .as_bytes()
1226            .to_vec(),
1227            out
1228        );
1229    }
1230
1231    #[test]
1232    #[cfg(not(target_os = "windows"))]
1233    fn can_print_hardhat_sample_graph() {
1234        let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/hardhat-sample");
1235        let paths = ProjectPathsConfig::hardhat(&root).unwrap();
1236        let graph = Graph::<SolData>::resolve(&paths).unwrap();
1237        let mut out = Vec::<u8>::new();
1238        tree::print(&graph, &Default::default(), &mut out).unwrap();
1239        assert_eq!(
1240            "contracts/Greeter.sol >=0.6.0
1241└── node_modules/hardhat/console.sol >=0.4.22, <0.9.0
1242",
1243            String::from_utf8(out).unwrap()
1244        );
1245    }
1246
1247    #[test]
1248    #[cfg(feature = "svm-solc")]
1249    fn test_print_unresolved() {
1250        use crate::{solc::SolcCompiler, ProjectBuilder};
1251
1252        let root =
1253            Path::new(env!("CARGO_MANIFEST_DIR")).join("../../test-data/incompatible-pragmas");
1254        let paths = ProjectPathsConfig::dapptools(&root).unwrap();
1255        let graph = Graph::<SolData>::resolve(&paths).unwrap();
1256        let Err(SolcError::Message(err)) = graph.get_input_node_versions(
1257            &ProjectBuilder::<SolcCompiler>::default()
1258                .paths(paths)
1259                .build(SolcCompiler::AutoDetect)
1260                .unwrap(),
1261        ) else {
1262            panic!("expected error");
1263        };
1264
1265        snapbox::assert_data_eq!(
1266            err,
1267            snapbox::str![[r#"
1268Found incompatible versions:
1269src/A.sol =0.8.25 imports:
1270    src/B.sol
1271    src/C.sol =0.7.0
1272"#]]
1273        );
1274    }
1275
1276    #[cfg(target_os = "linux")]
1277    #[test]
1278    fn can_read_different_case() {
1279        use crate::resolver::parse::SolData;
1280        use std::fs::{self, create_dir_all};
1281        use utils::tempdir;
1282
1283        let tmp_dir = tempdir("out").unwrap();
1284        let path = tmp_dir.path().join("forge-std");
1285        create_dir_all(&path).unwrap();
1286        let existing = path.join("Test.sol");
1287        let non_existing = path.join("test.sol");
1288        fs::write(
1289            existing,
1290            "
1291pragma solidity ^0.8.10;
1292contract A {}
1293        ",
1294        )
1295        .unwrap();
1296
1297        assert!(!non_existing.exists());
1298
1299        let found = crate::resolver::Node::<SolData>::read(&non_existing).unwrap_err();
1300        matches!(found, SolcError::ResolveCaseSensitiveFileName { .. });
1301    }
1302}