Skip to main content

solidity_language_server/
solc.rs

1//! Direct `solc --standard-json` runner for fast AST generation.
2//!
3//! The output is normalized into the same shape that `forge build --json --ast`
4//! produces, so all downstream consumers (goto, hover, completions, etc.) work
5//! unchanged.
6
7use crate::config::FoundryConfig;
8use crate::links;
9use crate::runner::RunnerError;
10use serde_json::{Map, Value, json};
11use std::collections::{HashMap, HashSet};
12use std::path::{Path, PathBuf};
13use std::sync::{Mutex, OnceLock};
14use tokio::process::Command;
15use tower_lsp::lsp_types::Url;
16
17/// Cached list of installed solc versions. Populated on first access,
18/// invalidated after a successful `svm::install`.
19static INSTALLED_VERSIONS: OnceLock<Mutex<Vec<SemVer>>> = OnceLock::new();
20
21fn get_installed_versions() -> Vec<SemVer> {
22    let mutex = INSTALLED_VERSIONS.get_or_init(|| Mutex::new(scan_installed_versions()));
23    mutex.lock().unwrap().clone()
24}
25
26fn invalidate_installed_versions() {
27    if let Some(mutex) = INSTALLED_VERSIONS.get() {
28        *mutex.lock().unwrap() = scan_installed_versions();
29    }
30}
31
32/// Convert a `semver::Version` (from svm-rs) to our lightweight `SemVer`.
33fn semver_to_local(v: &semver::Version) -> SemVer {
34    SemVer {
35        major: v.major as u32,
36        minor: v.minor as u32,
37        patch: v.patch as u32,
38    }
39}
40
41/// Resolve the path to the solc binary.
42///
43/// Resolution order:
44/// 1. Parse `pragma solidity` from the source file.
45///    - **Exact pragma** (`=0.7.6`): always use the file's version — foundry.toml
46///      cannot override an exact pragma without breaking compilation.
47///    - **Wildcard pragma** (`^0.8.0`, `>=0.8.0`, `>=0.6.2 <0.9.0`): if
48///      `foundry.toml` specifies a solc version that satisfies the constraint,
49///      use it. Otherwise pick the latest matching installed version.
50/// 2. If no pragma, use the `foundry.toml` solc version if set.
51/// 3. If no match is installed, auto-install via `svm install`.
52/// 4. Fall back to whatever `solc` is on `$PATH`.
53pub async fn resolve_solc_binary(
54    config: &FoundryConfig,
55    constraint: Option<&PragmaConstraint>,
56    client: Option<&tower_lsp::Client>,
57) -> PathBuf {
58    // 1. Try pragma constraint (may be tightened from the full import graph)
59    if let Some(constraint) = constraint {
60        // For exact pragmas, always honour the file — foundry.toml can't override
61        // without causing a compilation failure.
62        // For wildcard pragmas, prefer the foundry.toml version if it satisfies
63        // the constraint. This mirrors `forge build` behaviour where the project
64        // config picks the version but the pragma must still be satisfied.
65        if !matches!(constraint, PragmaConstraint::Exact(_))
66            && let Some(ref config_ver) = config.solc_version
67            && let Some(parsed) = SemVer::parse(config_ver)
68            && version_satisfies(&parsed, constraint)
69            && let Some(path) = find_solc_binary(config_ver)
70        {
71            if let Some(c) = client {
72                c.log_message(
73                    tower_lsp::lsp_types::MessageType::INFO,
74                    format!(
75                        "solc: foundry.toml {config_ver} satisfies pragma {constraint:?} → {}",
76                        path.display()
77                    ),
78                )
79                .await;
80            }
81            return path;
82        }
83
84        let installed = get_installed_versions();
85        if let Some(version) = find_matching_version(constraint, &installed)
86            && let Some(path) = find_solc_binary(&version.to_string())
87        {
88            if let Some(c) = client {
89                c.log_message(
90                    tower_lsp::lsp_types::MessageType::INFO,
91                    format!(
92                        "solc: pragma {constraint:?} → {version} → {}",
93                        path.display()
94                    ),
95                )
96                .await;
97            }
98            return path;
99        }
100
101        // No matching version installed — try auto-install via svm
102        let install_version = version_to_install(constraint);
103        if let Some(ref ver_str) = install_version {
104            if let Some(c) = client {
105                c.show_message(
106                    tower_lsp::lsp_types::MessageType::INFO,
107                    format!("Installing solc {ver_str}..."),
108                )
109                .await;
110            }
111
112            if svm_install(ver_str).await {
113                // Refresh the cached version list after install
114                invalidate_installed_versions();
115
116                if let Some(c) = client {
117                    c.show_message(
118                        tower_lsp::lsp_types::MessageType::INFO,
119                        format!("Installed solc {ver_str}"),
120                    )
121                    .await;
122                }
123                if let Some(path) = find_solc_binary(ver_str) {
124                    return path;
125                }
126            } else if let Some(c) = client {
127                c.show_message(
128                    tower_lsp::lsp_types::MessageType::WARNING,
129                    format!(
130                        "Failed to install solc {ver_str}. \
131                             Install it manually: svm install {ver_str}"
132                    ),
133                )
134                .await;
135            }
136        }
137    }
138
139    // 2. No pragma — use foundry.toml version if available
140    if let Some(ref version) = config.solc_version
141        && let Some(path) = find_solc_binary(version)
142    {
143        if let Some(c) = client {
144            c.log_message(
145                tower_lsp::lsp_types::MessageType::INFO,
146                format!(
147                    "solc: no pragma, using foundry.toml version {version} → {}",
148                    path.display()
149                ),
150            )
151            .await;
152        }
153        return path;
154    }
155
156    // 3. Fall back to system solc
157    if let Some(c) = client {
158        c.log_message(
159            tower_lsp::lsp_types::MessageType::INFO,
160            "solc: no pragma match, falling back to system solc",
161        )
162        .await;
163    }
164    PathBuf::from("solc")
165}
166
167/// Determine which version to install for a pragma constraint.
168///
169/// - Exact: install that version
170/// - Caret `^0.8.20`: install `0.8.20` (minimum satisfying)
171/// - Gte `>=0.8.0`: install `0.8.0` (minimum satisfying)
172/// - Range `>=0.6.2 <0.9.0`: install `0.6.2` (minimum satisfying)
173fn version_to_install(constraint: &PragmaConstraint) -> Option<String> {
174    match constraint {
175        PragmaConstraint::Exact(v) => Some(v.to_string()),
176        PragmaConstraint::Caret(v) => Some(v.to_string()),
177        PragmaConstraint::Gte(v) => Some(v.to_string()),
178        PragmaConstraint::Range(lower, _) => Some(lower.to_string()),
179    }
180}
181
182/// Install a solc version using svm-rs library.
183///
184/// Returns `true` if the install succeeded.
185async fn svm_install(version: &str) -> bool {
186    let ver = match semver::Version::parse(version) {
187        Ok(v) => v,
188        Err(_) => return false,
189    };
190    svm::install(&ver).await.is_ok()
191}
192
193/// Look up a solc binary by version string using `svm::version_binary()`.
194fn find_solc_binary(version: &str) -> Option<PathBuf> {
195    let path = svm::version_binary(version);
196    if path.is_file() {
197        return Some(path);
198    }
199    None
200}
201
202// ── Pragma parsing ────────────────────────────────────────────────────────
203
204/// A parsed semver version (major.minor.patch).
205#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
206pub struct SemVer {
207    pub major: u32,
208    pub minor: u32,
209    pub patch: u32,
210}
211
212impl SemVer {
213    fn parse(s: &str) -> Option<SemVer> {
214        let parts: Vec<&str> = s.split('.').collect();
215        if parts.len() != 3 {
216            return None;
217        }
218        Some(SemVer {
219            major: parts[0].parse().ok()?,
220            minor: parts[1].parse().ok()?,
221            patch: parts[2].parse().ok()?,
222        })
223    }
224}
225
226impl std::fmt::Display for SemVer {
227    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
228        write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
229    }
230}
231
232/// A version constraint from `pragma solidity`.
233#[derive(Debug, Clone, PartialEq)]
234pub enum PragmaConstraint {
235    /// `0.8.26` — exact match
236    Exact(SemVer),
237    /// `^0.8.0` — same major.minor, patch >= specified
238    /// Actually in Solidity: `^0.8.0` means `>=0.8.0 <0.9.0`
239    Caret(SemVer),
240    /// `>=0.8.0` — at least this version
241    Gte(SemVer),
242    /// `>=0.6.2 <0.9.0` — range
243    Range(SemVer, SemVer),
244}
245
246/// Resolve a Solidity import path to an absolute filesystem path.
247///
248/// Handles relative imports (`./`, `../`) and remapped imports.
249fn resolve_import_to_abs(
250    project_root: &Path,
251    importer_abs: &Path,
252    import_path: &str,
253    remappings: &[String],
254) -> Option<PathBuf> {
255    if import_path.starts_with("./") || import_path.starts_with("../") {
256        let base = importer_abs.parent()?;
257        return Some(lexical_normalize(&base.join(import_path)));
258    }
259
260    for remap in remappings {
261        let mut it = remap.splitn(2, '=');
262        let prefix = it.next().unwrap_or_default();
263        let target = it.next().unwrap_or_default();
264        if prefix.is_empty() || target.is_empty() {
265            continue;
266        }
267        if import_path.starts_with(prefix) {
268            let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
269            return Some(lexical_normalize(
270                &project_root.join(format!("{target}{suffix}")),
271            ));
272        }
273    }
274
275    Some(lexical_normalize(&project_root.join(import_path)))
276}
277
278/// Normalize a path by resolving `.` and `..` components lexically
279/// (without hitting the filesystem).
280fn lexical_normalize(path: &Path) -> PathBuf {
281    let mut out = PathBuf::new();
282    for comp in path.components() {
283        match comp {
284            std::path::Component::CurDir => {}
285            std::path::Component::ParentDir => {
286                out.pop();
287            }
288            _ => out.push(comp.as_os_str()),
289        }
290    }
291    out
292}
293
294/// Collect pragma constraints from a file and all its transitive imports.
295///
296/// Walks the import graph using simple string scanning (no tree-sitter),
297/// resolving import paths via remappings.  Returns all pragmas found so
298/// that the caller can pick a solc version satisfying every file.
299fn collect_import_pragmas(
300    file_path: &Path,
301    project_root: &Path,
302    remappings: &[String],
303) -> Vec<PragmaConstraint> {
304    let mut pragmas = Vec::new();
305    let mut visited = HashSet::new();
306    collect_import_pragmas_recursive(
307        file_path,
308        project_root,
309        remappings,
310        &mut pragmas,
311        &mut visited,
312    );
313    pragmas
314}
315
316fn collect_import_pragmas_recursive(
317    file_path: &Path,
318    project_root: &Path,
319    remappings: &[String],
320    pragmas: &mut Vec<PragmaConstraint>,
321    visited: &mut HashSet<PathBuf>,
322) {
323    if !visited.insert(file_path.to_path_buf()) {
324        return;
325    }
326    let source = match std::fs::read_to_string(file_path) {
327        Ok(s) => s,
328        Err(_) => return,
329    };
330    if let Some(pragma) = parse_pragma(&source) {
331        pragmas.push(pragma);
332    }
333    for imp in links::ts_find_imports(source.as_bytes()) {
334        if let Some(abs) = resolve_import_to_abs(project_root, file_path, &imp.path, remappings) {
335            collect_import_pragmas_recursive(&abs, project_root, remappings, pragmas, visited);
336        }
337    }
338}
339
340/// Tighten a set of pragma constraints into a single constraint that
341/// satisfies all of them.
342///
343/// Rules:
344/// - An exact pragma always wins (if any file requires `0.8.23`, we must
345///   use exactly `0.8.23`).
346/// - Multiple exact pragmas that disagree → returns the first one (solc
347///   will error anyway, but we still try).
348/// - For wildcard pragmas, compute the intersection range and return it.
349fn tightest_constraint(pragmas: &[PragmaConstraint]) -> Option<PragmaConstraint> {
350    if pragmas.is_empty() {
351        return None;
352    }
353
354    // If any pragma is Exact, that version must be used.
355    for p in pragmas {
356        if matches!(p, PragmaConstraint::Exact(_)) {
357            return Some(p.clone());
358        }
359    }
360
361    // Normalize every constraint to a (lower, upper) range, then intersect.
362    let mut lower = SemVer {
363        major: 0,
364        minor: 0,
365        patch: 0,
366    };
367    let mut upper: Option<SemVer> = None;
368
369    for p in pragmas {
370        let (lo, hi) = constraint_to_range(p);
371        if lo > lower {
372            lower = lo;
373        }
374        if let Some(hi) = hi {
375            upper = Some(match upper {
376                Some(cur) if hi < cur => hi,
377                Some(cur) => cur,
378                None => hi,
379            });
380        }
381    }
382
383    match upper {
384        Some(hi) if lower >= hi => None, // empty intersection
385        Some(hi) => Some(PragmaConstraint::Range(lower, hi)),
386        None => Some(PragmaConstraint::Gte(lower)),
387    }
388}
389
390/// Convert a pragma constraint to an inclusive lower bound and optional
391/// exclusive upper bound.
392fn constraint_to_range(constraint: &PragmaConstraint) -> (SemVer, Option<SemVer>) {
393    match constraint {
394        PragmaConstraint::Exact(v) => (
395            v.clone(),
396            Some(SemVer {
397                major: v.major,
398                minor: v.minor,
399                patch: v.patch + 1,
400            }),
401        ),
402        PragmaConstraint::Caret(v) => (
403            v.clone(),
404            Some(SemVer {
405                major: v.major,
406                minor: v.minor + 1,
407                patch: 0,
408            }),
409        ),
410        PragmaConstraint::Gte(v) => (v.clone(), None),
411        PragmaConstraint::Range(lo, hi) => (lo.clone(), Some(hi.clone())),
412    }
413}
414
415/// Parse `pragma solidity <constraint>;` from Solidity source.
416///
417/// Handles:
418/// - `pragma solidity 0.8.26;` → Exact
419/// - `pragma solidity ^0.8.0;` → Caret
420/// - `pragma solidity >=0.8.0;` → Gte
421/// - `pragma solidity >=0.6.2 <0.9.0;` → Range
422pub fn parse_pragma(source: &str) -> Option<PragmaConstraint> {
423    // Find the pragma line — only scan the first ~20 lines for performance
424    let pragma_line = source
425        .lines()
426        .take(20)
427        .find(|line| line.trim_start().starts_with("pragma solidity"))?;
428
429    // Extract the constraint string between "pragma solidity" and ";"
430    let after_keyword = pragma_line
431        .trim_start()
432        .strip_prefix("pragma solidity")?
433        .trim();
434    let constraint_str = after_keyword
435        .strip_suffix(';')
436        .unwrap_or(after_keyword)
437        .trim();
438
439    if constraint_str.is_empty() {
440        return None;
441    }
442
443    // Range: >=X.Y.Z <A.B.C
444    if let Some(rest) = constraint_str.strip_prefix(">=") {
445        let rest = rest.trim();
446        if let Some(space_idx) = rest.find(|c: char| c.is_whitespace() || c == '<') {
447            let lower_str = rest[..space_idx].trim();
448            let upper_part = rest[space_idx..].trim();
449            if let Some(upper_str) = upper_part.strip_prefix('<') {
450                let upper_str = upper_str.trim();
451                if let (Some(lower), Some(upper)) =
452                    (SemVer::parse(lower_str), SemVer::parse(upper_str))
453                {
454                    return Some(PragmaConstraint::Range(lower, upper));
455                }
456            }
457        }
458        // Just >=X.Y.Z
459        if let Some(ver) = SemVer::parse(rest) {
460            return Some(PragmaConstraint::Gte(ver));
461        }
462    }
463
464    // Caret: ^X.Y.Z
465    if let Some(rest) = constraint_str.strip_prefix('^')
466        && let Some(ver) = SemVer::parse(rest.trim())
467    {
468        return Some(PragmaConstraint::Caret(ver));
469    }
470
471    // Exact: X.Y.Z
472    if let Some(ver) = SemVer::parse(constraint_str) {
473        return Some(PragmaConstraint::Exact(ver));
474    }
475
476    None
477}
478
479/// List installed solc versions (cached — use `get_installed_versions()` internally).
480pub fn list_installed_versions() -> Vec<SemVer> {
481    get_installed_versions()
482}
483
484/// Scan the filesystem for installed solc versions using `svm::installed_versions()`.
485///
486/// Returns sorted, deduplicated versions (ascending).
487fn scan_installed_versions() -> Vec<SemVer> {
488    svm::installed_versions()
489        .unwrap_or_default()
490        .iter()
491        .map(semver_to_local)
492        .collect()
493}
494
495/// Find the best matching installed version for a pragma constraint.
496///
497/// For all constraint types, picks the **latest** installed version that
498/// satisfies the constraint.
499pub fn find_matching_version(
500    constraint: &PragmaConstraint,
501    installed: &[SemVer],
502) -> Option<SemVer> {
503    let candidates: Vec<&SemVer> = installed
504        .iter()
505        .filter(|v| version_satisfies(v, constraint))
506        .collect();
507
508    // Pick the latest (last, since installed is sorted ascending)
509    candidates.last().cloned().cloned()
510}
511
512/// Check if a version satisfies a pragma constraint.
513pub fn version_satisfies(version: &SemVer, constraint: &PragmaConstraint) -> bool {
514    match constraint {
515        PragmaConstraint::Exact(v) => version == v,
516        PragmaConstraint::Caret(v) => {
517            // Solidity caret: ^0.8.0 means >=0.8.0 <0.9.0
518            // i.e. same major, next minor is the ceiling
519            version.major == v.major && version >= v && version.minor < v.minor + 1
520        }
521        PragmaConstraint::Gte(v) => version >= v,
522        PragmaConstraint::Range(lower, upper) => version >= lower && version < upper,
523    }
524}
525
526/// Fetch remappings by running `forge remappings` in the project root.
527///
528/// Falls back to config remappings, then to an empty list.
529pub async fn resolve_remappings(config: &FoundryConfig) -> Vec<String> {
530    // Try `forge remappings` first — it merges all sources (foundry.toml,
531    // remappings.txt, auto-detected libs).
532    let output = Command::new("forge")
533        .arg("remappings")
534        .current_dir(&config.root)
535        .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1")
536        .output()
537        .await;
538
539    if let Ok(output) = output
540        && output.status.success()
541    {
542        let stdout = String::from_utf8_lossy(&output.stdout);
543        let remappings: Vec<String> = stdout
544            .lines()
545            .filter(|l| !l.trim().is_empty())
546            .map(|l| l.to_string())
547            .collect();
548        if !remappings.is_empty() {
549            return remappings;
550        }
551    }
552
553    // Fall back to remappings from foundry.toml
554    if !config.remappings.is_empty() {
555        return config.remappings.clone();
556    }
557
558    // Fall back to remappings.txt at project root
559    let remappings_txt = config.root.join("remappings.txt");
560    if let Ok(content) = std::fs::read_to_string(&remappings_txt) {
561        return content
562            .lines()
563            .filter(|l| !l.trim().is_empty())
564            .map(|l| l.to_string())
565            .collect();
566    }
567
568    Vec::new()
569}
570
571/// Build the `--standard-json` input for solc.
572///
573/// Reads compiler settings from the `FoundryConfig` (parsed from `foundry.toml`)
574/// and maps them to the solc standard JSON `settings` object:
575///
576/// - `via_ir` → `settings.viaIR`
577/// - `evm_version` → `settings.evmVersion`
578///
579/// Note: `optimizer` is intentionally excluded — it adds ~3s and doesn't
580/// affect AST/ABI/doc quality.
581///
582/// `evm.gasEstimates` is conditionally included: when `via_ir` is **off**,
583/// gas estimates cost only ~0.7s (legacy pipeline) and enable gas inlay
584/// hints. When `via_ir` is **on**, requesting gas estimates forces solc
585/// through the full Yul IR codegen pipeline, inflating cold start from
586/// ~1.8s to ~14s — so they are excluded.
587pub fn build_standard_json_input(
588    file_path: &str,
589    remappings: &[String],
590    config: &FoundryConfig,
591) -> Value {
592    // Base contract-level outputs: ABI, docs, method selectors.
593    // Gas estimates are only included when viaIR is off (see doc comment).
594    let mut contract_outputs = vec!["abi", "devdoc", "userdoc", "evm.methodIdentifiers"];
595    if !config.via_ir {
596        contract_outputs.push("evm.gasEstimates");
597    }
598
599    let mut settings = json!({
600        "remappings": remappings,
601        "outputSelection": {
602            "*": {
603                "*": contract_outputs,
604                "": ["ast"]
605            }
606        }
607    });
608
609    if config.via_ir {
610        settings["viaIR"] = json!(true);
611    }
612
613    // EVM version
614    if let Some(ref evm_version) = config.evm_version {
615        settings["evmVersion"] = json!(evm_version);
616    }
617
618    json!({
619        "language": "Solidity",
620        "sources": {
621            file_path: {
622                "urls": [file_path]
623            }
624        },
625        "settings": settings
626    })
627}
628
629/// Run `solc --standard-json` and return the parsed output.
630pub async fn run_solc(
631    solc_binary: &Path,
632    input: &Value,
633    project_root: &Path,
634) -> Result<Value, RunnerError> {
635    let input_str = serde_json::to_string(input)?;
636
637    let mut child = Command::new(solc_binary)
638        .arg("--standard-json")
639        .current_dir(project_root)
640        .stdin(std::process::Stdio::piped())
641        .stdout(std::process::Stdio::piped())
642        .stderr(std::process::Stdio::piped())
643        .spawn()?;
644
645    // Write the standard-json input to solc's stdin.
646    if let Some(mut stdin) = child.stdin.take() {
647        use tokio::io::AsyncWriteExt;
648        stdin
649            .write_all(input_str.as_bytes())
650            .await
651            .map_err(RunnerError::CommandError)?;
652        // Drop stdin to close it, signaling EOF to solc.
653    }
654
655    let output = child
656        .wait_with_output()
657        .await
658        .map_err(RunnerError::CommandError)?;
659
660    // solc writes JSON to stdout even on errors (errors are in the JSON)
661    let stdout = String::from_utf8_lossy(&output.stdout);
662    if stdout.trim().is_empty() {
663        let stderr = String::from_utf8_lossy(&output.stderr);
664        return Err(RunnerError::CommandError(std::io::Error::other(format!(
665            "solc produced no output, stderr: {stderr}"
666        ))));
667    }
668
669    let parsed: Value = serde_json::from_str(&stdout)?;
670    Ok(parsed)
671}
672
673/// Normalize raw solc `--standard-json` output into the canonical shape.
674///
675/// Solc's native shape is already close to canonical:
676/// - `sources[path] = { id, ast }` — kept as-is
677/// - `contracts[path][name] = { abi, evm, ... }` — kept as-is
678/// - `errors` — kept as-is (defaults to `[]` if absent)
679///
680/// When `project_root` is provided, relative source paths are resolved to
681/// absolute paths so that downstream code (goto, hover, links) can map AST
682/// paths back to `file://` URIs. This is necessary because `solc_ast()`
683/// passes a relative path to solc (to fix import resolution), and solc then
684/// returns relative paths in the AST `absolutePath` and source keys.
685///
686/// Constructs `source_id_to_path` from source IDs for cross-file resolution.
687///
688/// Takes ownership and uses `Value::take()` to move AST nodes in-place,
689/// avoiding expensive clones of multi-MB AST data.
690///
691/// Also resolves `absolutePath` on nested `ImportDirective` nodes so that
692/// goto-definition on import strings works regardless of CWD.
693pub fn normalize_solc_output(mut solc_output: Value, project_root: Option<&Path>) -> Value {
694    /// Walk an AST node tree and resolve `absolutePath` on `ImportDirective` nodes.
695    fn resolve_import_absolute_paths(node: &mut Value, resolve: &dyn Fn(&str) -> String) {
696        let is_import = node.get("nodeType").and_then(|v| v.as_str()) == Some("ImportDirective");
697
698        if is_import {
699            if let Some(abs_path) = node.get("absolutePath").and_then(|v| v.as_str()) {
700                let resolved = resolve(abs_path);
701                node.as_object_mut()
702                    .unwrap()
703                    .insert("absolutePath".to_string(), json!(resolved));
704            }
705        }
706
707        // Recurse into "nodes" array (top-level AST children)
708        if let Some(nodes) = node.get_mut("nodes").and_then(|v| v.as_array_mut()) {
709            for child in nodes {
710                resolve_import_absolute_paths(child, resolve);
711            }
712        }
713    }
714    let mut result = Map::new();
715
716    // Move errors out (defaults to [] if absent)
717    let errors = solc_output
718        .get_mut("errors")
719        .map(Value::take)
720        .unwrap_or_else(|| json!([]));
721    result.insert("errors".to_string(), errors);
722
723    // Helper: resolve a path to absolute using the project root.
724    // If the path is already absolute or no project root is given, return as-is.
725    let resolve = |p: &str| -> String {
726        if let Some(root) = project_root {
727            let path = Path::new(p);
728            if path.is_relative() {
729                return root.join(path).to_string_lossy().into_owned();
730            }
731        }
732        p.to_string()
733    };
734
735    // Sources: rekey with absolute paths and update AST absolutePath fields.
736    // Also build source_id_to_path for cross-file resolution.
737    let mut source_id_to_path = Map::new();
738    let mut resolved_sources = Map::new();
739
740    if let Some(sources) = solc_output
741        .get_mut("sources")
742        .and_then(|s| s.as_object_mut())
743    {
744        // Collect keys first to avoid borrow issues
745        let keys: Vec<String> = sources.keys().cloned().collect();
746        for key in keys {
747            if let Some(mut source_data) = sources.remove(&key) {
748                let abs_key = resolve(&key);
749
750                // Update the AST absolutePath field to match, and resolve
751                // absolutePath on nested ImportDirective nodes so that
752                // goto-definition works regardless of CWD.
753                if let Some(ast) = source_data.get_mut("ast") {
754                    if let Some(abs_path) = ast.get("absolutePath").and_then(|v| v.as_str()) {
755                        let resolved = resolve(abs_path);
756                        ast.as_object_mut()
757                            .unwrap()
758                            .insert("absolutePath".to_string(), json!(resolved));
759                    }
760                    resolve_import_absolute_paths(ast, &resolve);
761                }
762
763                if let Some(id) = source_data.get("id") {
764                    source_id_to_path.insert(id.to_string(), json!(&abs_key));
765                }
766
767                resolved_sources.insert(abs_key, source_data);
768            }
769        }
770    }
771
772    result.insert("sources".to_string(), Value::Object(resolved_sources));
773
774    // Contracts: rekey with absolute paths
775    let mut resolved_contracts = Map::new();
776    if let Some(contracts) = solc_output
777        .get_mut("contracts")
778        .and_then(|c| c.as_object_mut())
779    {
780        let keys: Vec<String> = contracts.keys().cloned().collect();
781        for key in keys {
782            if let Some(contract_data) = contracts.remove(&key) {
783                resolved_contracts.insert(resolve(&key), contract_data);
784            }
785        }
786    }
787    result.insert("contracts".to_string(), Value::Object(resolved_contracts));
788
789    // Construct source_id_to_path for cross-file resolution
790    result.insert(
791        "source_id_to_path".to_string(),
792        Value::Object(source_id_to_path),
793    );
794
795    Value::Object(result)
796}
797
798/// Normalize forge `build --json --ast` output into the canonical shape.
799///
800/// Forge wraps data in arrays with metadata:
801/// - `sources[path] = [{ source_file: { id, ast }, build_id, profile, version }]`
802/// - `contracts[path][name] = [{ contract: { abi, evm, ... }, build_id, profile, version }]`
803/// - `build_infos = [{ source_id_to_path: { ... } }]`
804///
805/// This unwraps to the canonical flat shape:
806/// - `sources[path] = { id, ast }`
807/// - `contracts[path][name] = { abi, evm, ... }`
808/// - `source_id_to_path = { ... }`
809pub fn normalize_forge_output(mut forge_output: Value) -> Value {
810    let mut result = Map::new();
811
812    // Move errors out
813    let errors = forge_output
814        .get_mut("errors")
815        .map(Value::take)
816        .unwrap_or_else(|| json!([]));
817    result.insert("errors".to_string(), errors);
818
819    // Unwrap sources: [{ source_file: { id, ast } }] → { id, ast }
820    let mut normalized_sources = Map::new();
821    if let Some(sources) = forge_output
822        .get_mut("sources")
823        .and_then(|s| s.as_object_mut())
824    {
825        for (path, entries) in sources.iter_mut() {
826            if let Some(arr) = entries.as_array_mut()
827                && let Some(first) = arr.first_mut()
828                && let Some(sf) = first.get_mut("source_file")
829            {
830                normalized_sources.insert(path.clone(), sf.take());
831            }
832        }
833    }
834    result.insert("sources".to_string(), Value::Object(normalized_sources));
835
836    // Unwrap contracts: [{ contract: { ... } }] → { ... }
837    let mut normalized_contracts = Map::new();
838    if let Some(contracts) = forge_output
839        .get_mut("contracts")
840        .and_then(|c| c.as_object_mut())
841    {
842        for (path, names) in contracts.iter_mut() {
843            let mut path_contracts = Map::new();
844            if let Some(names_obj) = names.as_object_mut() {
845                for (name, entries) in names_obj.iter_mut() {
846                    if let Some(arr) = entries.as_array_mut()
847                        && let Some(first) = arr.first_mut()
848                        && let Some(contract) = first.get_mut("contract")
849                    {
850                        path_contracts.insert(name.clone(), contract.take());
851                    }
852                }
853            }
854            normalized_contracts.insert(path.clone(), Value::Object(path_contracts));
855        }
856    }
857    result.insert("contracts".to_string(), Value::Object(normalized_contracts));
858
859    // Extract source_id_to_path from build_infos
860    let source_id_to_path = forge_output
861        .get_mut("build_infos")
862        .and_then(|bi| bi.as_array_mut())
863        .and_then(|arr| arr.first_mut())
864        .and_then(|info| info.get_mut("source_id_to_path"))
865        .map(Value::take)
866        .unwrap_or_else(|| json!({}));
867    result.insert("source_id_to_path".to_string(), source_id_to_path);
868
869    Value::Object(result)
870}
871
872/// Run solc for a file and return normalized output.
873///
874/// This is the main entry point used by the LSP. Reads the file source
875/// to detect the pragma version and resolve the correct solc binary.
876pub async fn solc_ast(
877    file_path: &str,
878    config: &FoundryConfig,
879    client: Option<&tower_lsp::Client>,
880) -> Result<Value, RunnerError> {
881    let remappings = resolve_remappings(config).await;
882
883    // Collect pragma constraints from the file and all its transitive imports
884    // so we pick a solc version that satisfies the entire dependency graph.
885    let file_abs = Path::new(file_path);
886    let pragmas = collect_import_pragmas(file_abs, &config.root, &remappings);
887    let constraint = tightest_constraint(&pragmas);
888    let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
889
890    // Solc's import resolver fails when sources use absolute paths — it resolves
891    // 0 transitive imports, causing "No matching declaration found" errors for
892    // inherited members. Convert to a path relative to the project root so solc
893    // can properly resolve `src/`, `lib/`, and remapped imports.
894    let rel_path = Path::new(file_path)
895        .strip_prefix(&config.root)
896        .map(|p| p.to_string_lossy().into_owned())
897        .unwrap_or_else(|_| file_path.to_string());
898
899    let input = build_standard_json_input(&rel_path, &remappings, config);
900    let raw_output = run_solc(&solc_binary, &input, &config.root).await?;
901
902    Ok(normalize_solc_output(raw_output, Some(&config.root)))
903}
904
905/// Run solc for build diagnostics (same output, just used for error extraction).
906pub async fn solc_build(
907    file_path: &str,
908    config: &FoundryConfig,
909    client: Option<&tower_lsp::Client>,
910) -> Result<Value, RunnerError> {
911    solc_ast(file_path, config, client).await
912}
913
914// ── Project-wide indexing ──────────────────────────────────────────────────
915
916/// Discover all Solidity source files under the project root.
917///
918/// Walks the entire project directory, including `test/`, `script/`, and
919/// any other user-authored directories. Only skips:
920/// - Directories listed in `config.libs` (default: `["lib"]`)
921/// - Directories in `DISCOVER_SKIP_DIRS` (build artifacts)
922/// - Hidden directories (starting with `.`)
923///
924/// Includes `.t.sol` (test) and `.s.sol` (script) files so that
925/// find-references and rename work across the full project.
926pub fn discover_source_files(config: &FoundryConfig) -> Vec<PathBuf> {
927    discover_source_files_inner(config, false)
928}
929
930/// Discover source files including library directories.
931///
932/// When `fullProjectScan` is enabled, this includes files from the configured
933/// `libs` directories (e.g. `dependencies/`, `node_modules/`).  Files with
934/// incompatible pragma versions are handled by the error-driven retry loop
935/// in [`solc_project_index_from_files`].
936pub fn discover_source_files_with_libs(config: &FoundryConfig) -> Vec<PathBuf> {
937    discover_source_files_inner(config, true)
938}
939
940fn discover_source_files_inner(config: &FoundryConfig, include_libs: bool) -> Vec<PathBuf> {
941    let root = &config.root;
942    if !root.is_dir() {
943        return Vec::new();
944    }
945    let skip_libs = if include_libs { &[][..] } else { &config.libs };
946    let mut files = Vec::new();
947    discover_recursive(root, skip_libs, &mut files);
948    files.sort();
949    files
950}
951
952/// Discover the true compilation closure by tracing imports from the
953/// project's own source files (`src/`, `test/`, `script/`, and any other
954/// non-lib top-level directories).
955///
956/// Starting from every `.sol` file returned by [`discover_source_files`]
957/// (project files only, no lib dirs), this BFS-walks the import graph using
958/// the provided remappings to resolve each `import` statement to an absolute
959/// path.  It adds every reachable file — including lib files that are actually
960/// imported — to the result set.
961///
962/// Files whose imports cannot be resolved (missing external deps that aren't
963/// in this project) are silently skipped at that edge; the importer is still
964/// included.
965///
966/// This produces a much smaller, self-consistent set than scanning all files
967/// in lib directories, and avoids pulling in lib files that have broken
968/// transitive deps (e.g. chainlink automation files that need `@eth-optimism`
969/// which is not vendored here).
970pub fn discover_compilation_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
971    // Seed: all project source files (no lib dirs).
972    let seeds = discover_source_files(config);
973    let mut visited: HashSet<PathBuf> = HashSet::new();
974    let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
975
976    while let Some(file) = queue.pop_front() {
977        if !visited.insert(file.clone()) {
978            continue;
979        }
980        let source = match std::fs::read_to_string(&file) {
981            Ok(s) => s,
982            Err(_) => continue,
983        };
984        for imp in links::ts_find_imports(source.as_bytes()) {
985            if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
986                if abs.exists() && !visited.contains(&abs) {
987                    queue.push_back(abs);
988                }
989            }
990        }
991    }
992
993    let mut result: Vec<PathBuf> = visited.into_iter().collect();
994    result.sort();
995    result
996}
997
998/// Directories that are always skipped during source file discovery,
999/// regardless of the `include_libs` setting.
1000const DISCOVER_SKIP_DIRS: &[&str] = &["out", "artifacts", "cache", "target", "broadcast"];
1001
1002fn discover_recursive(dir: &Path, skip_libs: &[String], files: &mut Vec<PathBuf>) {
1003    let entries = match std::fs::read_dir(dir) {
1004        Ok(e) => e,
1005        Err(_) => return,
1006    };
1007    for entry in entries.flatten() {
1008        let path = entry.path();
1009        if path.is_dir() {
1010            if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
1011                // Skip hidden directories (e.g., .git, .github)
1012                if name.starts_with('.') {
1013                    continue;
1014                }
1015                // Skip build artifact directories
1016                if DISCOVER_SKIP_DIRS.contains(&name) {
1017                    continue;
1018                }
1019                // Skip user-configured library directories (unless include_libs)
1020                if skip_libs.iter().any(|lib| lib == name) {
1021                    continue;
1022                }
1023            }
1024            discover_recursive(&path, skip_libs, files);
1025        } else if let Some(name) = path.file_name().and_then(|n| n.to_str())
1026            && name.ends_with(".sol")
1027        {
1028            files.push(path);
1029        }
1030    }
1031}
1032
1033/// Build a `--standard-json` input that compiles all given source files at once.
1034///
1035/// Each file is added as a source entry with a `urls` field (relative to project root).
1036/// This produces a single AST covering the entire project in one solc invocation.
1037///
1038/// See [`build_standard_json_input`] for rationale on excluded settings.
1039pub fn build_batch_standard_json_input(
1040    source_files: &[PathBuf],
1041    remappings: &[String],
1042    config: &FoundryConfig,
1043) -> Value {
1044    build_batch_standard_json_input_with_cache(source_files, remappings, config, None)
1045}
1046
1047/// Build a batch standard-json input for solc.
1048///
1049/// When `content_cache` is provided, files whose URI string appears as a key
1050/// are included with `"content"` (in-memory source).  Files not in the cache
1051/// fall back to `"urls"` (solc reads from disk).
1052///
1053/// This allows the re-index after a rename to feed solc the updated import
1054/// paths from our text_cache without requiring the editor to have flushed
1055/// them to disk yet.
1056pub fn build_batch_standard_json_input_with_cache(
1057    source_files: &[PathBuf],
1058    remappings: &[String],
1059    config: &FoundryConfig,
1060    content_cache: Option<&HashMap<String, (i32, String)>>,
1061) -> Value {
1062    let mut contract_outputs = vec!["abi", "devdoc", "userdoc", "evm.methodIdentifiers"];
1063    if !config.via_ir {
1064        contract_outputs.push("evm.gasEstimates");
1065    }
1066
1067    let mut settings = json!({
1068        "remappings": remappings,
1069        "outputSelection": {
1070            "*": {
1071                "*": contract_outputs,
1072                "": ["ast"]
1073            }
1074        }
1075    });
1076
1077    if config.via_ir {
1078        settings["viaIR"] = json!(true);
1079    }
1080    if let Some(ref evm_version) = config.evm_version {
1081        settings["evmVersion"] = json!(evm_version);
1082    }
1083
1084    let mut sources = serde_json::Map::new();
1085    for file in source_files {
1086        let rel_path = file
1087            .strip_prefix(&config.root)
1088            .map(|p| p.to_string_lossy().into_owned())
1089            .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1090
1091        // Try to use cached content so solc doesn't need to read from disk.
1092        let cached_content = content_cache.and_then(|cache| {
1093            let uri = Url::from_file_path(file).ok()?;
1094            cache.get(&uri.to_string()).map(|(_, c)| c.as_str())
1095        });
1096
1097        if let Some(content) = cached_content {
1098            sources.insert(rel_path, json!({ "content": content }));
1099        } else {
1100            sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1101        }
1102    }
1103
1104    json!({
1105        "language": "Solidity",
1106        "sources": sources,
1107        "settings": settings
1108    })
1109}
1110
1111/// Build a parse-only standard-json input (``stopAfter: "parsing"``).
1112///
1113/// Unlike the full batch input this mode stops before import resolution and
1114/// type-checking.  That means:
1115///
1116/// * No version 5333 errors cascade from imported incompatible files — the
1117///   compatible files are NOT fetched from disk as imports.
1118/// * The resulting ASTs contain all declaration nodes and local
1119///   ``referencedDeclaration`` IDs but **not** cross-file resolved IDs.
1120/// * Only ``ast`` output is requested; contract outputs (abi, gas …) are
1121///   omitted because they require type-checking.
1122///
1123/// This is used for the compatible-file batch in the mixed-version project
1124/// index so we can get parse-time ASTs for all project/lib files that satisfy
1125/// the project pragma, without being blocked by imports into incompatible lib
1126/// files.
1127pub fn build_parse_only_json_input(
1128    source_files: &[PathBuf],
1129    remappings: &[String],
1130    config: &FoundryConfig,
1131) -> Value {
1132    let settings = json!({
1133        "stopAfter": "parsing",
1134        "remappings": remappings,
1135        "outputSelection": {
1136            "*": {
1137                "": ["ast"]
1138            }
1139        }
1140    });
1141
1142    let mut sources = serde_json::Map::new();
1143    for file in source_files {
1144        let rel_path = file
1145            .strip_prefix(&config.root)
1146            .map(|p| p.to_string_lossy().into_owned())
1147            .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1148        sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1149    }
1150
1151    json!({
1152        "language": "Solidity",
1153        "sources": sources,
1154        "settings": settings
1155    })
1156}
1157
1158/// Run a project-wide solc compilation and return normalized output.
1159///
1160/// Discovers all source files, compiles them in a single `solc --standard-json`
1161/// invocation, and returns the normalized AST data.
1162///
1163/// When `text_cache` is provided, files whose URI string appears as a key
1164/// are fed to solc via `"content"` (in-memory) rather than `"urls"` (disk).
1165/// This ensures the re-index after a rename uses the updated import paths
1166/// from our cache, even if the editor hasn't flushed them to disk yet.
1167pub async fn solc_project_index(
1168    config: &FoundryConfig,
1169    client: Option<&tower_lsp::Client>,
1170    text_cache: Option<&HashMap<String, (i32, String)>>,
1171) -> Result<Value, RunnerError> {
1172    // Resolve remappings first — needed for import tracing.
1173    let remappings = resolve_remappings(config).await;
1174
1175    // Trace imports from project source files to find the true compilation
1176    // closure.  This avoids pulling in lib files that are never imported by
1177    // the project (e.g. chainlink automation files that need @eth-optimism,
1178    // which isn't vendored here).
1179    let source_files = discover_compilation_closure(config, &remappings);
1180    if source_files.is_empty() {
1181        return Err(RunnerError::CommandError(std::io::Error::other(
1182            "no source files found for project index",
1183        )));
1184    }
1185
1186    solc_project_index_from_files(config, client, text_cache, &source_files).await
1187}
1188
1189/// Run a scoped project-index compile over a selected file list.
1190///
1191/// This is intended for aggressive incremental reindex strategies where only
1192/// a dependency-closure subset should be recompiled.
1193pub async fn solc_project_index_scoped(
1194    config: &FoundryConfig,
1195    client: Option<&tower_lsp::Client>,
1196    text_cache: Option<&HashMap<String, (i32, String)>>,
1197    source_files: &[PathBuf],
1198) -> Result<Value, RunnerError> {
1199    if source_files.is_empty() {
1200        return Err(RunnerError::CommandError(std::io::Error::other(
1201            "no source files provided for scoped project index",
1202        )));
1203    }
1204
1205    solc_project_index_from_files(config, client, text_cache, source_files).await
1206}
1207
1208/// Extract source file paths from solc error code 5333 ("Source file requires
1209/// different compiler version") errors.  Returns the relative paths exactly
1210/// as they appear in `sourceLocation.file`.
1211#[cfg(test)]
1212fn extract_version_error_files(solc_output: &Value) -> HashSet<String> {
1213    let mut files = HashSet::new();
1214    if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1215        for err in errors {
1216            let is_5333 = err.get("errorCode").and_then(|c| c.as_str()) == Some("5333");
1217            if is_5333
1218                && let Some(file) = err
1219                    .get("sourceLocation")
1220                    .and_then(|sl| sl.get("file"))
1221                    .and_then(|f| f.as_str())
1222            {
1223                files.insert(file.to_string());
1224            }
1225        }
1226    }
1227    files
1228}
1229
1230/// Build a reverse-import closure: given a set of files to exclude, find all
1231/// files that transitively import any of them.  Those files must also be
1232/// excluded because solc will still resolve their imports from disk and fail.
1233///
1234/// Returns the full exclusion set (seed files + their transitive importers).
1235#[cfg(test)]
1236fn reverse_import_closure(
1237    source_files: &[PathBuf],
1238    exclude_abs: &HashSet<PathBuf>,
1239    project_root: &Path,
1240    remappings: &[String],
1241) -> HashSet<PathBuf> {
1242    // Build forward import graph: file -> set of files it imports.
1243    // Then invert to get reverse edges: imported_file -> set of importers.
1244    let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1245
1246    for file in source_files {
1247        let Ok(bytes) = std::fs::read(file) else {
1248            continue;
1249        };
1250        for imp in links::ts_find_imports(&bytes) {
1251            if let Some(imported_abs) =
1252                resolve_import_to_abs(project_root, file, &imp.path, remappings)
1253            {
1254                reverse_edges
1255                    .entry(imported_abs)
1256                    .or_default()
1257                    .insert(file.clone());
1258            }
1259        }
1260    }
1261
1262    // BFS from excluded files through reverse edges.
1263    let mut closure: HashSet<PathBuf> = exclude_abs.clone();
1264    let mut queue: std::collections::VecDeque<PathBuf> = exclude_abs.iter().cloned().collect();
1265
1266    while let Some(current) = queue.pop_front() {
1267        if let Some(importers) = reverse_edges.get(&current) {
1268            for importer in importers {
1269                if closure.insert(importer.clone()) {
1270                    queue.push_back(importer.clone());
1271                }
1272            }
1273        }
1274    }
1275
1276    closure
1277}
1278
1279/// Merge two normalized solc outputs at the `Value` level.
1280///
1281/// Combines `sources`, `contracts`, `source_id_to_path`, and `errors` from
1282/// `other` into `base`.  Source IDs in `other` are remapped to avoid
1283/// collisions with `base`.
1284fn merge_normalized_outputs(base: &mut Value, other: Value) {
1285    // Merge sources (keyed by absolute path — no collisions across partitions).
1286    if let (Some(base_sources), Some(other_sources)) = (
1287        base.get_mut("sources").and_then(|s| s.as_object_mut()),
1288        other.get("sources").and_then(|s| s.as_object()),
1289    ) {
1290        // Find the max source ID in base so we can remap other's IDs.
1291        let max_base_id = base_sources
1292            .values()
1293            .filter_map(|v| v.get("id").and_then(|id| id.as_u64()))
1294            .max()
1295            .map(|m| m + 1)
1296            .unwrap_or(0);
1297
1298        // Collect other's id -> path mappings for source_id_to_path.
1299        let mut remapped_id_to_path: Vec<(String, String)> = Vec::new();
1300
1301        for (path, mut source_data) in other_sources.clone() {
1302            // Remap the source ID to avoid collisions.
1303            if let Some(id) = source_data.get("id").and_then(|id| id.as_u64()) {
1304                let new_id = id + max_base_id;
1305                source_data
1306                    .as_object_mut()
1307                    .unwrap()
1308                    .insert("id".to_string(), json!(new_id));
1309                remapped_id_to_path.push((new_id.to_string(), path.clone()));
1310            }
1311            base_sources.insert(path, source_data);
1312        }
1313
1314        // Merge source_id_to_path.
1315        if let Some(base_id_map) = base
1316            .get_mut("source_id_to_path")
1317            .and_then(|m| m.as_object_mut())
1318        {
1319            for (id, path) in remapped_id_to_path {
1320                base_id_map.insert(id, json!(path));
1321            }
1322        }
1323    }
1324
1325    // Merge contracts.
1326    if let (Some(base_contracts), Some(other_contracts)) = (
1327        base.get_mut("contracts").and_then(|c| c.as_object_mut()),
1328        other.get("contracts").and_then(|c| c.as_object()),
1329    ) {
1330        for (path, contract_data) in other_contracts {
1331            base_contracts.insert(path.clone(), contract_data.clone());
1332        }
1333    }
1334
1335    // Don't merge errors — the retry errors from incompatible files are noise.
1336    // The base already has the clean errors from the successful compilation.
1337}
1338
1339async fn solc_project_index_from_files(
1340    config: &FoundryConfig,
1341    client: Option<&tower_lsp::Client>,
1342    text_cache: Option<&HashMap<String, (i32, String)>>,
1343    source_files: &[PathBuf],
1344) -> Result<Value, RunnerError> {
1345    if source_files.is_empty() {
1346        return Err(RunnerError::CommandError(std::io::Error::other(
1347            "no source files found for project index",
1348        )));
1349    }
1350
1351    if let Some(c) = client {
1352        c.log_message(
1353            tower_lsp::lsp_types::MessageType::INFO,
1354            format!(
1355                "project index: discovered {} source files in {}",
1356                source_files.len(),
1357                config.root.display()
1358            ),
1359        )
1360        .await;
1361    }
1362
1363    let remappings = resolve_remappings(config).await;
1364
1365    // Resolve the project's solc version from foundry.toml.
1366    let project_version: Option<SemVer> =
1367        config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1368
1369    // When no version is pinned in foundry.toml, derive a constraint from the
1370    // source files' pragmas so that svm can auto-install a matching binary.
1371    let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1372        Some(PragmaConstraint::Exact(v.clone()))
1373    } else {
1374        source_files.iter().find_map(|f| {
1375            std::fs::read_to_string(f)
1376                .ok()
1377                .and_then(|src| parse_pragma(&src))
1378        })
1379    };
1380    let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1381
1382    // -- Pre-scan pragmas to separate compatible vs incompatible files. --
1383    //
1384    // Solc emits ZERO ASTs when any file in the batch has a version error
1385    // (5333).  We must exclude incompatible files before compiling so the
1386    // batch produces full AST output for all compatible files.
1387    let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1388        let mut compat = Vec::with_capacity(source_files.len());
1389        let mut incompat = Vec::new();
1390        for file in source_files {
1391            let is_compatible = std::fs::read_to_string(file)
1392                .ok()
1393                .and_then(|src| parse_pragma(&src))
1394                .map(|pragma| version_satisfies(ver, &pragma))
1395                // Files without a pragma are assumed compatible.
1396                .unwrap_or(true);
1397            if is_compatible {
1398                compat.push(file.clone());
1399            } else {
1400                incompat.push(file.clone());
1401            }
1402        }
1403        (compat, incompat)
1404    } else {
1405        // No project version configured — compile everything in one batch.
1406        (source_files.to_vec(), Vec::new())
1407    };
1408
1409    if !incompatible_files.is_empty() {
1410        if let Some(c) = client {
1411            c.log_message(
1412                tower_lsp::lsp_types::MessageType::INFO,
1413                format!(
1414                    "project index: {} compatible, {} incompatible with solc {}",
1415                    compatible_files.len(),
1416                    incompatible_files.len(),
1417                    project_version
1418                        .as_ref()
1419                        .map(|v| v.to_string())
1420                        .unwrap_or_default(),
1421                ),
1422            )
1423            .await;
1424        }
1425    }
1426
1427    // -- Full batch compile of compatible files. --
1428    //
1429    // The source file list comes from discover_compilation_closure which only
1430    // includes files reachable via imports from src/test/script — so all files
1431    // in the batch are version-compatible and their transitive imports resolve.
1432    // A full (non-parse-only) compile is required so that cross-file
1433    // referencedDeclaration IDs are populated for goto-references to work.
1434    let mut result = if compatible_files.is_empty() {
1435        json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1436    } else {
1437        let input = build_batch_standard_json_input_with_cache(
1438            &compatible_files,
1439            &remappings,
1440            config,
1441            text_cache,
1442        );
1443        let raw = run_solc(&solc_binary, &input, &config.root).await?;
1444        normalize_solc_output(raw, Some(&config.root))
1445    };
1446
1447    let batch_source_count = result
1448        .get("sources")
1449        .and_then(|s| s.as_object())
1450        .map_or(0, |obj| obj.len());
1451
1452    if incompatible_files.is_empty() {
1453        if let Some(c) = client {
1454            c.log_message(
1455                tower_lsp::lsp_types::MessageType::INFO,
1456                format!(
1457                    "project index: compiled {} files with no version mismatches",
1458                    source_files.len(),
1459                ),
1460            )
1461            .await;
1462        }
1463        return Ok(result);
1464    }
1465
1466    if let Some(c) = client {
1467        // Log first few errors from the batch to understand why sources=0.
1468        let batch_errors: Vec<String> = result
1469            .get("errors")
1470            .and_then(|e| e.as_array())
1471            .map(|arr| {
1472                arr.iter()
1473                    .filter(|e| e.get("severity").and_then(|s| s.as_str()) == Some("error"))
1474                    .take(3)
1475                    .filter_map(|e| {
1476                        let msg = e.get("message").and_then(|m| m.as_str()).unwrap_or("?");
1477                        let file = e
1478                            .get("sourceLocation")
1479                            .and_then(|sl| sl.get("file"))
1480                            .and_then(|f| f.as_str())
1481                            .unwrap_or("?");
1482                        Some(format!("{file}: {msg}"))
1483                    })
1484                    .collect()
1485            })
1486            .unwrap_or_default();
1487
1488        c.log_message(
1489            tower_lsp::lsp_types::MessageType::INFO,
1490            format!(
1491                "project index: batch produced {} sources, now compiling {} incompatible files individually{}",
1492                batch_source_count,
1493                incompatible_files.len(),
1494                if batch_errors.is_empty() {
1495                    String::new()
1496                } else {
1497                    format!(" [first errors: {}]", batch_errors.join("; "))
1498                },
1499            ),
1500        )
1501        .await;
1502    }
1503
1504    // -- Individually compile incompatible files with their matching solc. --
1505    let mut compiled = 0usize;
1506    let mut skipped = 0usize;
1507    for file in &incompatible_files {
1508        let pragma = std::fs::read_to_string(file)
1509            .ok()
1510            .and_then(|src| parse_pragma(&src));
1511
1512        let Some(file_constraint) = pragma else {
1513            skipped += 1;
1514            continue;
1515        };
1516
1517        let file_binary = resolve_solc_binary(config, Some(&file_constraint), client).await;
1518        let input = build_batch_standard_json_input_with_cache(
1519            &[file.clone()],
1520            &remappings,
1521            config,
1522            text_cache,
1523        );
1524        match run_solc(&file_binary, &input, &config.root).await {
1525            Ok(raw) => {
1526                let normalized = normalize_solc_output(raw, Some(&config.root));
1527                merge_normalized_outputs(&mut result, normalized);
1528                compiled += 1;
1529            }
1530            Err(e) => {
1531                if let Some(c) = client {
1532                    c.log_message(
1533                        tower_lsp::lsp_types::MessageType::WARNING,
1534                        format!(
1535                            "project index: incompatible file {} failed: {e}",
1536                            file.display(),
1537                        ),
1538                    )
1539                    .await;
1540                }
1541                skipped += 1;
1542            }
1543        }
1544    }
1545
1546    if let Some(c) = client {
1547        c.log_message(
1548            tower_lsp::lsp_types::MessageType::INFO,
1549            format!(
1550                "project index: incompatible files done — {compiled} compiled, {skipped} skipped",
1551            ),
1552        )
1553        .await;
1554    }
1555
1556    Ok(result)
1557}
1558
1559#[cfg(test)]
1560mod tests {
1561    use super::*;
1562
1563    #[test]
1564    fn test_normalize_solc_sources() {
1565        let solc_output = json!({
1566            "sources": {
1567                "src/Foo.sol": {
1568                    "id": 0,
1569                    "ast": {
1570                        "nodeType": "SourceUnit",
1571                        "absolutePath": "src/Foo.sol",
1572                        "id": 100
1573                    }
1574                },
1575                "src/Bar.sol": {
1576                    "id": 1,
1577                    "ast": {
1578                        "nodeType": "SourceUnit",
1579                        "absolutePath": "src/Bar.sol",
1580                        "id": 200
1581                    }
1582                }
1583            },
1584            "contracts": {},
1585            "errors": []
1586        });
1587
1588        let normalized = normalize_solc_output(solc_output, None);
1589
1590        // Sources kept in solc-native shape: path -> { id, ast }
1591        let sources = normalized.get("sources").unwrap().as_object().unwrap();
1592        assert_eq!(sources.len(), 2);
1593
1594        let foo = sources.get("src/Foo.sol").unwrap();
1595        assert_eq!(foo.get("id").unwrap(), 0);
1596        assert_eq!(
1597            foo.get("ast")
1598                .unwrap()
1599                .get("nodeType")
1600                .unwrap()
1601                .as_str()
1602                .unwrap(),
1603            "SourceUnit"
1604        );
1605
1606        // Check source_id_to_path constructed
1607        let id_to_path = normalized
1608            .get("source_id_to_path")
1609            .unwrap()
1610            .as_object()
1611            .unwrap();
1612        assert_eq!(id_to_path.len(), 2);
1613    }
1614
1615    #[test]
1616    fn test_normalize_solc_contracts() {
1617        let solc_output = json!({
1618            "sources": {},
1619            "contracts": {
1620                "src/Foo.sol": {
1621                    "Foo": {
1622                        "abi": [{"type": "function", "name": "bar"}],
1623                        "evm": {
1624                            "methodIdentifiers": {
1625                                "bar(uint256)": "abcd1234"
1626                            },
1627                            "gasEstimates": {
1628                                "external": {"bar(uint256)": "200"}
1629                            }
1630                        }
1631                    }
1632                }
1633            },
1634            "errors": []
1635        });
1636
1637        let normalized = normalize_solc_output(solc_output, None);
1638
1639        // Contracts kept in solc-native shape: path -> name -> { abi, evm, ... }
1640        let contracts = normalized.get("contracts").unwrap().as_object().unwrap();
1641        let foo_contracts = contracts.get("src/Foo.sol").unwrap().as_object().unwrap();
1642        let foo = foo_contracts.get("Foo").unwrap();
1643
1644        let method_ids = foo
1645            .get("evm")
1646            .unwrap()
1647            .get("methodIdentifiers")
1648            .unwrap()
1649            .as_object()
1650            .unwrap();
1651        assert_eq!(
1652            method_ids.get("bar(uint256)").unwrap().as_str().unwrap(),
1653            "abcd1234"
1654        );
1655    }
1656
1657    #[test]
1658    fn test_normalize_solc_errors_passthrough() {
1659        let solc_output = json!({
1660            "sources": {},
1661            "contracts": {},
1662            "errors": [{
1663                "sourceLocation": {"file": "src/Foo.sol", "start": 0, "end": 10},
1664                "type": "Warning",
1665                "component": "general",
1666                "severity": "warning",
1667                "errorCode": "2394",
1668                "message": "test warning",
1669                "formattedMessage": "Warning: test warning"
1670            }]
1671        });
1672
1673        let normalized = normalize_solc_output(solc_output, None);
1674
1675        let errors = normalized.get("errors").unwrap().as_array().unwrap();
1676        assert_eq!(errors.len(), 1);
1677        assert_eq!(
1678            errors[0].get("errorCode").unwrap().as_str().unwrap(),
1679            "2394"
1680        );
1681    }
1682
1683    #[test]
1684    fn test_normalize_empty_solc_output() {
1685        let solc_output = json!({
1686            "sources": {},
1687            "contracts": {}
1688        });
1689
1690        let normalized = normalize_solc_output(solc_output, None);
1691
1692        assert!(
1693            normalized
1694                .get("sources")
1695                .unwrap()
1696                .as_object()
1697                .unwrap()
1698                .is_empty()
1699        );
1700        assert!(
1701            normalized
1702                .get("contracts")
1703                .unwrap()
1704                .as_object()
1705                .unwrap()
1706                .is_empty()
1707        );
1708        assert_eq!(
1709            normalized.get("errors").unwrap().as_array().unwrap().len(),
1710            0
1711        );
1712        assert!(
1713            normalized
1714                .get("source_id_to_path")
1715                .unwrap()
1716                .as_object()
1717                .unwrap()
1718                .is_empty()
1719        );
1720    }
1721
1722    #[test]
1723    fn test_build_standard_json_input() {
1724        let config = FoundryConfig::default();
1725        let input = build_standard_json_input(
1726            "/path/to/Foo.sol",
1727            &[
1728                "ds-test/=lib/forge-std/lib/ds-test/src/".to_string(),
1729                "forge-std/=lib/forge-std/src/".to_string(),
1730            ],
1731            &config,
1732        );
1733
1734        let sources = input.get("sources").unwrap().as_object().unwrap();
1735        assert!(sources.contains_key("/path/to/Foo.sol"));
1736
1737        let settings = input.get("settings").unwrap();
1738        let remappings = settings.get("remappings").unwrap().as_array().unwrap();
1739        assert_eq!(remappings.len(), 2);
1740
1741        let output_sel = settings.get("outputSelection").unwrap();
1742        assert!(output_sel.get("*").is_some());
1743
1744        // Default config: no optimizer, no viaIR, no evmVersion
1745        assert!(settings.get("optimizer").is_none());
1746        assert!(settings.get("viaIR").is_none());
1747        assert!(settings.get("evmVersion").is_none());
1748
1749        // Without viaIR, gasEstimates is included (~0.7s, enables gas hints)
1750        let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
1751        let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
1752        assert!(output_names.contains(&"evm.gasEstimates"));
1753        assert!(output_names.contains(&"abi"));
1754        assert!(output_names.contains(&"devdoc"));
1755        assert!(output_names.contains(&"userdoc"));
1756        assert!(output_names.contains(&"evm.methodIdentifiers"));
1757    }
1758
1759    #[test]
1760    fn test_build_standard_json_input_with_config() {
1761        let config = FoundryConfig {
1762            optimizer: true,
1763            optimizer_runs: 9999999,
1764            via_ir: true,
1765            evm_version: Some("osaka".to_string()),
1766            ..Default::default()
1767        };
1768        let input = build_standard_json_input("/path/to/Foo.sol", &[], &config);
1769
1770        let settings = input.get("settings").unwrap();
1771
1772        // Optimizer is never passed — adds ~3s and doesn't affect AST/ABI/docs
1773        assert!(settings.get("optimizer").is_none());
1774
1775        // viaIR IS passed when config has it (some contracts require it to compile)
1776        assert!(settings.get("viaIR").unwrap().as_bool().unwrap());
1777
1778        // With viaIR, gasEstimates is excluded (would cause 14s cold start)
1779        let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
1780        let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
1781        assert!(!output_names.contains(&"evm.gasEstimates"));
1782
1783        // EVM version
1784        assert_eq!(
1785            settings.get("evmVersion").unwrap().as_str().unwrap(),
1786            "osaka"
1787        );
1788    }
1789
1790    #[tokio::test]
1791    async fn test_resolve_solc_binary_default() {
1792        let config = FoundryConfig::default();
1793        let binary = resolve_solc_binary(&config, None, None).await;
1794        assert_eq!(binary, PathBuf::from("solc"));
1795    }
1796
1797    #[test]
1798    fn test_parse_pragma_exact() {
1799        let source = "// SPDX\npragma solidity 0.8.26;\n";
1800        assert_eq!(
1801            parse_pragma(source),
1802            Some(PragmaConstraint::Exact(SemVer {
1803                major: 0,
1804                minor: 8,
1805                patch: 26
1806            }))
1807        );
1808    }
1809
1810    #[test]
1811    fn test_parse_pragma_caret() {
1812        let source = "pragma solidity ^0.8.0;\n";
1813        assert_eq!(
1814            parse_pragma(source),
1815            Some(PragmaConstraint::Caret(SemVer {
1816                major: 0,
1817                minor: 8,
1818                patch: 0
1819            }))
1820        );
1821    }
1822
1823    #[test]
1824    fn test_parse_pragma_gte() {
1825        let source = "pragma solidity >=0.8.0;\n";
1826        assert_eq!(
1827            parse_pragma(source),
1828            Some(PragmaConstraint::Gte(SemVer {
1829                major: 0,
1830                minor: 8,
1831                patch: 0
1832            }))
1833        );
1834    }
1835
1836    #[test]
1837    fn test_parse_pragma_range() {
1838        let source = "pragma solidity >=0.6.2 <0.9.0;\n";
1839        assert_eq!(
1840            parse_pragma(source),
1841            Some(PragmaConstraint::Range(
1842                SemVer {
1843                    major: 0,
1844                    minor: 6,
1845                    patch: 2
1846                },
1847                SemVer {
1848                    major: 0,
1849                    minor: 9,
1850                    patch: 0
1851                },
1852            ))
1853        );
1854    }
1855
1856    #[test]
1857    fn test_parse_pragma_none() {
1858        let source = "contract Foo {}\n";
1859        assert_eq!(parse_pragma(source), None);
1860    }
1861
1862    #[test]
1863    fn test_version_satisfies_exact() {
1864        let v = SemVer {
1865            major: 0,
1866            minor: 8,
1867            patch: 26,
1868        };
1869        assert!(version_satisfies(&v, &PragmaConstraint::Exact(v.clone())));
1870        assert!(!version_satisfies(
1871            &SemVer {
1872                major: 0,
1873                minor: 8,
1874                patch: 25
1875            },
1876            &PragmaConstraint::Exact(v)
1877        ));
1878    }
1879
1880    #[test]
1881    fn test_version_satisfies_caret() {
1882        let constraint = PragmaConstraint::Caret(SemVer {
1883            major: 0,
1884            minor: 8,
1885            patch: 0,
1886        });
1887        assert!(version_satisfies(
1888            &SemVer {
1889                major: 0,
1890                minor: 8,
1891                patch: 0
1892            },
1893            &constraint
1894        ));
1895        assert!(version_satisfies(
1896            &SemVer {
1897                major: 0,
1898                minor: 8,
1899                patch: 26
1900            },
1901            &constraint
1902        ));
1903        // 0.9.0 is outside ^0.8.0
1904        assert!(!version_satisfies(
1905            &SemVer {
1906                major: 0,
1907                minor: 9,
1908                patch: 0
1909            },
1910            &constraint
1911        ));
1912        // 0.7.0 is below
1913        assert!(!version_satisfies(
1914            &SemVer {
1915                major: 0,
1916                minor: 7,
1917                patch: 0
1918            },
1919            &constraint
1920        ));
1921    }
1922
1923    #[test]
1924    fn test_version_satisfies_gte() {
1925        let constraint = PragmaConstraint::Gte(SemVer {
1926            major: 0,
1927            minor: 8,
1928            patch: 0,
1929        });
1930        assert!(version_satisfies(
1931            &SemVer {
1932                major: 0,
1933                minor: 8,
1934                patch: 0
1935            },
1936            &constraint
1937        ));
1938        assert!(version_satisfies(
1939            &SemVer {
1940                major: 0,
1941                minor: 9,
1942                patch: 0
1943            },
1944            &constraint
1945        ));
1946        assert!(!version_satisfies(
1947            &SemVer {
1948                major: 0,
1949                minor: 7,
1950                patch: 0
1951            },
1952            &constraint
1953        ));
1954    }
1955
1956    #[test]
1957    fn test_version_satisfies_range() {
1958        let constraint = PragmaConstraint::Range(
1959            SemVer {
1960                major: 0,
1961                minor: 6,
1962                patch: 2,
1963            },
1964            SemVer {
1965                major: 0,
1966                minor: 9,
1967                patch: 0,
1968            },
1969        );
1970        assert!(version_satisfies(
1971            &SemVer {
1972                major: 0,
1973                minor: 6,
1974                patch: 2
1975            },
1976            &constraint
1977        ));
1978        assert!(version_satisfies(
1979            &SemVer {
1980                major: 0,
1981                minor: 8,
1982                patch: 26
1983            },
1984            &constraint
1985        ));
1986        // 0.9.0 is the upper bound (exclusive)
1987        assert!(!version_satisfies(
1988            &SemVer {
1989                major: 0,
1990                minor: 9,
1991                patch: 0
1992            },
1993            &constraint
1994        ));
1995        assert!(!version_satisfies(
1996            &SemVer {
1997                major: 0,
1998                minor: 6,
1999                patch: 1
2000            },
2001            &constraint
2002        ));
2003    }
2004
2005    #[test]
2006    fn test_find_matching_version() {
2007        let installed = vec![
2008            SemVer {
2009                major: 0,
2010                minor: 8,
2011                patch: 0,
2012            },
2013            SemVer {
2014                major: 0,
2015                minor: 8,
2016                patch: 20,
2017            },
2018            SemVer {
2019                major: 0,
2020                minor: 8,
2021                patch: 26,
2022            },
2023            SemVer {
2024                major: 0,
2025                minor: 8,
2026                patch: 33,
2027            },
2028        ];
2029        // ^0.8.20 should pick latest: 0.8.33
2030        let constraint = PragmaConstraint::Caret(SemVer {
2031            major: 0,
2032            minor: 8,
2033            patch: 20,
2034        });
2035        let matched = find_matching_version(&constraint, &installed);
2036        assert_eq!(
2037            matched,
2038            Some(SemVer {
2039                major: 0,
2040                minor: 8,
2041                patch: 33
2042            })
2043        );
2044
2045        // exact 0.8.20
2046        let constraint = PragmaConstraint::Exact(SemVer {
2047            major: 0,
2048            minor: 8,
2049            patch: 20,
2050        });
2051        let matched = find_matching_version(&constraint, &installed);
2052        assert_eq!(
2053            matched,
2054            Some(SemVer {
2055                major: 0,
2056                minor: 8,
2057                patch: 20
2058            })
2059        );
2060
2061        // exact 0.8.15 — not installed
2062        let constraint = PragmaConstraint::Exact(SemVer {
2063            major: 0,
2064            minor: 8,
2065            patch: 15,
2066        });
2067        let matched = find_matching_version(&constraint, &installed);
2068        assert_eq!(matched, None);
2069    }
2070
2071    #[test]
2072    fn test_list_installed_versions() {
2073        // Just verify it doesn't panic — actual versions depend on system
2074        let versions = list_installed_versions();
2075        // Versions should be sorted
2076        for w in versions.windows(2) {
2077            assert!(w[0] <= w[1]);
2078        }
2079    }
2080
2081    // -------------------------------------------------------------------
2082    // Tests for mixed-version retry helpers
2083    // -------------------------------------------------------------------
2084
2085    #[test]
2086    fn test_extract_version_error_files_basic() {
2087        let output = json!({
2088            "errors": [
2089                {
2090                    "errorCode": "5333",
2091                    "severity": "error",
2092                    "message": "Source file requires different compiler version",
2093                    "sourceLocation": {
2094                        "file": "lib/openzeppelin/contracts/token/ERC20/ERC20.sol",
2095                        "start": 32,
2096                        "end": 58
2097                    }
2098                },
2099                {
2100                    "errorCode": "5333",
2101                    "severity": "error",
2102                    "message": "Source file requires different compiler version",
2103                    "sourceLocation": {
2104                        "file": "lib/old-lib/src/Legacy.sol",
2105                        "start": 32,
2106                        "end": 58
2107                    }
2108                },
2109                {
2110                    "errorCode": "9574",
2111                    "severity": "error",
2112                    "message": "Some other error",
2113                    "sourceLocation": {
2114                        "file": "src/Main.sol",
2115                        "start": 100,
2116                        "end": 200
2117                    }
2118                }
2119            ]
2120        });
2121
2122        let files = extract_version_error_files(&output);
2123        assert_eq!(files.len(), 2);
2124        assert!(files.contains("lib/openzeppelin/contracts/token/ERC20/ERC20.sol"));
2125        assert!(files.contains("lib/old-lib/src/Legacy.sol"));
2126        // Non-5333 error files should NOT be included.
2127        assert!(!files.contains("src/Main.sol"));
2128    }
2129
2130    #[test]
2131    fn test_extract_version_error_files_empty() {
2132        let output = json!({
2133            "errors": []
2134        });
2135        assert!(extract_version_error_files(&output).is_empty());
2136
2137        // No errors key at all.
2138        let output = json!({});
2139        assert!(extract_version_error_files(&output).is_empty());
2140    }
2141
2142    #[test]
2143    fn test_extract_version_error_files_no_source_location() {
2144        let output = json!({
2145            "errors": [
2146                {
2147                    "errorCode": "5333",
2148                    "severity": "error",
2149                    "message": "Source file requires different compiler version"
2150                    // No sourceLocation field.
2151                }
2152            ]
2153        });
2154        assert!(extract_version_error_files(&output).is_empty());
2155    }
2156
2157    #[test]
2158    fn test_extract_version_error_files_dedup() {
2159        let output = json!({
2160            "errors": [
2161                {
2162                    "errorCode": "5333",
2163                    "severity": "error",
2164                    "sourceLocation": { "file": "lib/same.sol", "start": 0, "end": 10 }
2165                },
2166                {
2167                    "errorCode": "5333",
2168                    "severity": "error",
2169                    "sourceLocation": { "file": "lib/same.sol", "start": 50, "end": 70 }
2170                }
2171            ]
2172        });
2173        let files = extract_version_error_files(&output);
2174        assert_eq!(files.len(), 1);
2175        assert!(files.contains("lib/same.sol"));
2176    }
2177
2178    #[test]
2179    fn test_reverse_import_closure_simple() {
2180        // Create a temp directory with three files:
2181        //   a.sol imports b.sol
2182        //   b.sol imports c.sol
2183        //   d.sol (standalone)
2184        //
2185        // If c.sol is excluded, the closure should include: c.sol, b.sol, a.sol
2186        // (b imports c, a imports b — both are transitive importers of c).
2187        let dir = tempfile::tempdir().unwrap();
2188        let root = dir.path();
2189
2190        std::fs::write(
2191            root.join("a.sol"),
2192            "// SPDX-License-Identifier: MIT\nimport \"./b.sol\";\ncontract A {}",
2193        )
2194        .unwrap();
2195        std::fs::write(
2196            root.join("b.sol"),
2197            "// SPDX-License-Identifier: MIT\nimport \"./c.sol\";\ncontract B {}",
2198        )
2199        .unwrap();
2200        std::fs::write(
2201            root.join("c.sol"),
2202            "// SPDX-License-Identifier: MIT\ncontract C {}",
2203        )
2204        .unwrap();
2205        std::fs::write(
2206            root.join("d.sol"),
2207            "// SPDX-License-Identifier: MIT\ncontract D {}",
2208        )
2209        .unwrap();
2210
2211        let files: Vec<PathBuf> = vec![
2212            root.join("a.sol"),
2213            root.join("b.sol"),
2214            root.join("c.sol"),
2215            root.join("d.sol"),
2216        ];
2217
2218        let exclude: HashSet<PathBuf> = [root.join("c.sol")].into_iter().collect();
2219        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2220
2221        assert!(
2222            closure.contains(&root.join("c.sol")),
2223            "seed file in closure"
2224        );
2225        assert!(closure.contains(&root.join("b.sol")), "direct importer");
2226        assert!(closure.contains(&root.join("a.sol")), "transitive importer");
2227        assert!(
2228            !closure.contains(&root.join("d.sol")),
2229            "unrelated file not in closure"
2230        );
2231        assert_eq!(closure.len(), 3);
2232    }
2233
2234    #[test]
2235    fn test_reverse_import_closure_no_importers() {
2236        // Excluding a file that nothing imports — closure is just the seed.
2237        let dir = tempfile::tempdir().unwrap();
2238        let root = dir.path();
2239
2240        std::fs::write(root.join("a.sol"), "contract A {}").unwrap();
2241        std::fs::write(root.join("b.sol"), "contract B {}").unwrap();
2242
2243        let files: Vec<PathBuf> = vec![root.join("a.sol"), root.join("b.sol")];
2244        let exclude: HashSet<PathBuf> = [root.join("a.sol")].into_iter().collect();
2245
2246        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2247        assert_eq!(closure.len(), 1);
2248        assert!(closure.contains(&root.join("a.sol")));
2249    }
2250
2251    #[test]
2252    fn test_reverse_import_closure_diamond() {
2253        // Diamond pattern:
2254        //   a.sol imports b.sol and c.sol
2255        //   b.sol imports d.sol
2256        //   c.sol imports d.sol
2257        //
2258        // Excluding d.sol → closure = {d, b, c, a}
2259        let dir = tempfile::tempdir().unwrap();
2260        let root = dir.path();
2261
2262        std::fs::write(
2263            root.join("a.sol"),
2264            "import \"./b.sol\";\nimport \"./c.sol\";\ncontract A {}",
2265        )
2266        .unwrap();
2267        std::fs::write(root.join("b.sol"), "import \"./d.sol\";\ncontract B {}").unwrap();
2268        std::fs::write(root.join("c.sol"), "import \"./d.sol\";\ncontract C {}").unwrap();
2269        std::fs::write(root.join("d.sol"), "contract D {}").unwrap();
2270
2271        let files: Vec<PathBuf> = vec![
2272            root.join("a.sol"),
2273            root.join("b.sol"),
2274            root.join("c.sol"),
2275            root.join("d.sol"),
2276        ];
2277        let exclude: HashSet<PathBuf> = [root.join("d.sol")].into_iter().collect();
2278
2279        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2280        assert_eq!(closure.len(), 4);
2281    }
2282
2283    #[test]
2284    fn test_merge_normalized_outputs_basic() {
2285        let mut base = json!({
2286            "sources": {
2287                "/abs/src/A.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } },
2288                "/abs/src/B.sol": { "id": 1, "ast": { "nodeType": "SourceUnit" } }
2289            },
2290            "contracts": {
2291                "/abs/src/A.sol": { "A": { "abi": [] } }
2292            },
2293            "errors": [],
2294            "source_id_to_path": {
2295                "0": "/abs/src/A.sol",
2296                "1": "/abs/src/B.sol"
2297            }
2298        });
2299
2300        let other = json!({
2301            "sources": {
2302                "/abs/lib/C.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } }
2303            },
2304            "contracts": {
2305                "/abs/lib/C.sol": { "C": { "abi": [] } }
2306            },
2307            "errors": [],
2308            "source_id_to_path": {
2309                "0": "/abs/lib/C.sol"
2310            }
2311        });
2312
2313        merge_normalized_outputs(&mut base, other);
2314
2315        // Sources should now have 3 entries.
2316        let sources = base["sources"].as_object().unwrap();
2317        assert_eq!(sources.len(), 3);
2318        assert!(sources.contains_key("/abs/lib/C.sol"));
2319
2320        // The merged source's ID should be remapped (0 + max_base_id=2 → 2).
2321        let c_id = sources["/abs/lib/C.sol"]["id"].as_u64().unwrap();
2322        assert_eq!(
2323            c_id, 2,
2324            "remapped id should be max_base_id (2) + original (0)"
2325        );
2326
2327        // source_id_to_path should have 3 entries.
2328        let id_map = base["source_id_to_path"].as_object().unwrap();
2329        assert_eq!(id_map.len(), 3);
2330        assert_eq!(id_map["2"].as_str().unwrap(), "/abs/lib/C.sol");
2331
2332        // Contracts should have 2 entries.
2333        let contracts = base["contracts"].as_object().unwrap();
2334        assert_eq!(contracts.len(), 2);
2335        assert!(contracts.contains_key("/abs/lib/C.sol"));
2336    }
2337
2338    #[test]
2339    fn test_merge_normalized_outputs_empty_other() {
2340        let mut base = json!({
2341            "sources": {
2342                "/abs/src/A.sol": { "id": 0, "ast": {} }
2343            },
2344            "contracts": {},
2345            "errors": [],
2346            "source_id_to_path": { "0": "/abs/src/A.sol" }
2347        });
2348
2349        let other = json!({
2350            "sources": {},
2351            "contracts": {},
2352            "errors": [],
2353            "source_id_to_path": {}
2354        });
2355
2356        merge_normalized_outputs(&mut base, other);
2357
2358        let sources = base["sources"].as_object().unwrap();
2359        assert_eq!(sources.len(), 1);
2360    }
2361
2362    #[test]
2363    fn test_merge_normalized_outputs_empty_base() {
2364        let mut base = json!({
2365            "sources": {},
2366            "contracts": {},
2367            "errors": [],
2368            "source_id_to_path": {}
2369        });
2370
2371        let other = json!({
2372            "sources": {
2373                "/abs/lib/X.sol": { "id": 0, "ast": {} }
2374            },
2375            "contracts": {
2376                "/abs/lib/X.sol": { "X": { "abi": [] } }
2377            },
2378            "errors": [],
2379            "source_id_to_path": { "0": "/abs/lib/X.sol" }
2380        });
2381
2382        merge_normalized_outputs(&mut base, other);
2383
2384        let sources = base["sources"].as_object().unwrap();
2385        assert_eq!(sources.len(), 1);
2386        // max_base_id is 0 (no entries), so remapped id = 0 + 0 = 0.
2387        let x_id = sources["/abs/lib/X.sol"]["id"].as_u64().unwrap();
2388        assert_eq!(x_id, 0);
2389    }
2390}