Skip to main content

solidity_language_server/
solc.rs

1//! Direct `solc --standard-json` runner for fast AST generation.
2//!
3//! The output is normalized into the same shape that `forge build --json --ast`
4//! produces, so all downstream consumers (goto, hover, completions, etc.) work
5//! unchanged.
6
7use crate::config::FoundryConfig;
8use crate::links;
9use crate::runner::RunnerError;
10use serde_json::{Map, Value, json};
11use std::collections::{HashMap, HashSet};
12use std::path::{Path, PathBuf};
13use std::sync::{Mutex, OnceLock};
14use tokio::process::Command;
15use tower_lsp::lsp_types::Url;
16
17/// Cached list of installed solc versions. Populated on first access,
18/// invalidated after a successful `svm::install`.
19static INSTALLED_VERSIONS: OnceLock<Mutex<Vec<SemVer>>> = OnceLock::new();
20
21fn get_installed_versions() -> Vec<SemVer> {
22    let mutex = INSTALLED_VERSIONS.get_or_init(|| Mutex::new(scan_installed_versions()));
23    mutex.lock().unwrap().clone()
24}
25
26fn invalidate_installed_versions() {
27    if let Some(mutex) = INSTALLED_VERSIONS.get() {
28        *mutex.lock().unwrap() = scan_installed_versions();
29    }
30}
31
32/// Convert a `semver::Version` (from svm-rs) to our lightweight `SemVer`.
33fn semver_to_local(v: &semver::Version) -> SemVer {
34    SemVer {
35        major: v.major as u32,
36        minor: v.minor as u32,
37        patch: v.patch as u32,
38    }
39}
40
41/// Resolve the path to the solc binary.
42///
43/// Resolution order:
44/// 1. Parse `pragma solidity` from the source file.
45///    - **Exact pragma** (`=0.7.6`): always use the file's version — foundry.toml
46///      cannot override an exact pragma without breaking compilation.
47///    - **Wildcard pragma** (`^0.8.0`, `>=0.8.0`, `>=0.6.2 <0.9.0`): if
48///      `foundry.toml` specifies a solc version that satisfies the constraint,
49///      use it. Otherwise pick the latest matching installed version.
50/// 2. If no pragma, use the `foundry.toml` solc version if set.
51/// 3. If no match is installed, auto-install via `svm install`.
52/// 4. Fall back to whatever `solc` is on `$PATH`.
53pub async fn resolve_solc_binary(
54    config: &FoundryConfig,
55    constraint: Option<&PragmaConstraint>,
56    client: Option<&tower_lsp::Client>,
57) -> PathBuf {
58    // 1. Try pragma constraint (may be tightened from the full import graph)
59    if let Some(constraint) = constraint {
60        // For exact pragmas, always honour the file — foundry.toml can't override
61        // without causing a compilation failure.
62        // For wildcard pragmas, prefer the foundry.toml version if it satisfies
63        // the constraint. This mirrors `forge build` behaviour where the project
64        // config picks the version but the pragma must still be satisfied.
65        if !matches!(constraint, PragmaConstraint::Exact(_))
66            && let Some(ref config_ver) = config.solc_version
67            && let Some(parsed) = SemVer::parse(config_ver)
68            && version_satisfies(&parsed, constraint)
69            && let Some(path) = find_solc_binary(config_ver)
70        {
71            if let Some(c) = client {
72                c.log_message(
73                    tower_lsp::lsp_types::MessageType::INFO,
74                    format!("using solc {config_ver} (pragma {constraint})"),
75                )
76                .await;
77            }
78            return path;
79        }
80
81        let installed = get_installed_versions();
82        if let Some(version) = find_matching_version(constraint, &installed)
83            && let Some(path) = find_solc_binary(&version.to_string())
84        {
85            if let Some(c) = client {
86                c.log_message(
87                    tower_lsp::lsp_types::MessageType::INFO,
88                    format!("using solc {version}"),
89                )
90                .await;
91            }
92            return path;
93        }
94
95        // No matching version installed — try auto-install via svm
96        let install_version = version_to_install(constraint);
97        if let Some(ref ver_str) = install_version {
98            if let Some(c) = client {
99                c.show_message(
100                    tower_lsp::lsp_types::MessageType::INFO,
101                    format!("Installing solc {ver_str}..."),
102                )
103                .await;
104            }
105
106            if svm_install(ver_str).await {
107                // Refresh the cached version list after install
108                invalidate_installed_versions();
109
110                if let Some(c) = client {
111                    c.show_message(
112                        tower_lsp::lsp_types::MessageType::INFO,
113                        format!("Installed solc {ver_str}"),
114                    )
115                    .await;
116                }
117                if let Some(path) = find_solc_binary(ver_str) {
118                    return path;
119                }
120            } else if let Some(c) = client {
121                c.show_message(
122                    tower_lsp::lsp_types::MessageType::WARNING,
123                    format!(
124                        "Failed to install solc {ver_str}. \
125                             Install it manually: svm install {ver_str}"
126                    ),
127                )
128                .await;
129            }
130        }
131    }
132
133    // 2. No pragma — use foundry.toml version if available
134    if let Some(ref version) = config.solc_version
135        && let Some(path) = find_solc_binary(version)
136    {
137        if let Some(c) = client {
138            c.log_message(
139                tower_lsp::lsp_types::MessageType::INFO,
140                format!(
141                    "solc: no pragma, using foundry.toml version {version} → {}",
142                    path.display()
143                ),
144            )
145            .await;
146        }
147        return path;
148    }
149
150    // 3. Fall back to system solc
151    if let Some(c) = client {
152        c.log_message(
153            tower_lsp::lsp_types::MessageType::INFO,
154            "solc: no pragma match, falling back to system solc",
155        )
156        .await;
157    }
158    PathBuf::from("solc")
159}
160
161/// Determine which version to install for a pragma constraint.
162///
163/// - Exact: install that version
164/// - Caret `^0.8.20`: install `0.8.20` (minimum satisfying)
165/// - Gte `>=0.8.0`: install `0.8.0` (minimum satisfying)
166/// - Range `>=0.6.2 <0.9.0`: install `0.6.2` (minimum satisfying)
167fn version_to_install(constraint: &PragmaConstraint) -> Option<String> {
168    match constraint {
169        PragmaConstraint::Exact(v) => Some(v.to_string()),
170        PragmaConstraint::Caret(v) => Some(v.to_string()),
171        PragmaConstraint::Gte(v) => Some(v.to_string()),
172        PragmaConstraint::Range(lower, _) => Some(lower.to_string()),
173    }
174}
175
176/// Install a solc version using svm-rs library.
177///
178/// Returns `true` if the install succeeded.
179async fn svm_install(version: &str) -> bool {
180    let ver = match semver::Version::parse(version) {
181        Ok(v) => v,
182        Err(_) => return false,
183    };
184    svm::install(&ver).await.is_ok()
185}
186
187/// Look up a solc binary by version string using `svm::version_binary()`.
188fn find_solc_binary(version: &str) -> Option<PathBuf> {
189    let path = svm::version_binary(version);
190    if path.is_file() {
191        return Some(path);
192    }
193    None
194}
195
196// ── Pragma parsing ────────────────────────────────────────────────────────
197
198/// A parsed semver version (major.minor.patch).
199#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
200pub struct SemVer {
201    pub major: u32,
202    pub minor: u32,
203    pub patch: u32,
204}
205
206impl SemVer {
207    fn parse(s: &str) -> Option<SemVer> {
208        let parts: Vec<&str> = s.split('.').collect();
209        if parts.len() != 3 {
210            return None;
211        }
212        Some(SemVer {
213            major: parts[0].parse().ok()?,
214            minor: parts[1].parse().ok()?,
215            patch: parts[2].parse().ok()?,
216        })
217    }
218}
219
220impl std::fmt::Display for SemVer {
221    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
222        write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
223    }
224}
225
226/// A version constraint from `pragma solidity`.
227#[derive(Debug, Clone, PartialEq)]
228pub enum PragmaConstraint {
229    /// `0.8.26` — exact match
230    Exact(SemVer),
231    /// `^0.8.0` — same major.minor, patch >= specified
232    /// Actually in Solidity: `^0.8.0` means `>=0.8.0 <0.9.0`
233    Caret(SemVer),
234    /// `>=0.8.0` — at least this version
235    Gte(SemVer),
236    /// `>=0.6.2 <0.9.0` — range
237    Range(SemVer, SemVer),
238}
239
240impl std::fmt::Display for PragmaConstraint {
241    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
242        match self {
243            PragmaConstraint::Exact(v) => write!(f, "={v}"),
244            PragmaConstraint::Caret(v) => write!(f, "^{v}"),
245            PragmaConstraint::Gte(v) => write!(f, ">={v}"),
246            PragmaConstraint::Range(lo, hi) => write!(f, ">={lo} <{hi}"),
247        }
248    }
249}
250
251/// Resolve a Solidity import path to an absolute filesystem path.
252///
253/// Handles relative imports (`./`, `../`) and remapped imports.
254fn resolve_import_to_abs(
255    project_root: &Path,
256    importer_abs: &Path,
257    import_path: &str,
258    remappings: &[String],
259) -> Option<PathBuf> {
260    if import_path.starts_with("./") || import_path.starts_with("../") {
261        let base = importer_abs.parent()?;
262        return Some(lexical_normalize(&base.join(import_path)));
263    }
264
265    for remap in remappings {
266        let mut it = remap.splitn(2, '=');
267        let prefix = it.next().unwrap_or_default();
268        let target = it.next().unwrap_or_default();
269        if prefix.is_empty() || target.is_empty() {
270            continue;
271        }
272        if import_path.starts_with(prefix) {
273            let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
274            return Some(lexical_normalize(
275                &project_root.join(format!("{target}{suffix}")),
276            ));
277        }
278    }
279
280    Some(lexical_normalize(&project_root.join(import_path)))
281}
282
283/// Normalize a path by resolving `.` and `..` components lexically
284/// (without hitting the filesystem).
285fn lexical_normalize(path: &Path) -> PathBuf {
286    let mut out = PathBuf::new();
287    for comp in path.components() {
288        match comp {
289            std::path::Component::CurDir => {}
290            std::path::Component::ParentDir => {
291                out.pop();
292            }
293            _ => out.push(comp.as_os_str()),
294        }
295    }
296    out
297}
298
299/// Collect pragma constraints from a file and all its transitive imports.
300///
301/// Walks the import graph using simple string scanning (no tree-sitter),
302/// resolving import paths via remappings.  Returns all pragmas found so
303/// that the caller can pick a solc version satisfying every file.
304fn collect_import_pragmas(
305    file_path: &Path,
306    project_root: &Path,
307    remappings: &[String],
308) -> Vec<PragmaConstraint> {
309    let mut pragmas = Vec::new();
310    let mut visited = HashSet::new();
311    collect_import_pragmas_recursive(
312        file_path,
313        project_root,
314        remappings,
315        &mut pragmas,
316        &mut visited,
317    );
318    pragmas
319}
320
321fn collect_import_pragmas_recursive(
322    file_path: &Path,
323    project_root: &Path,
324    remappings: &[String],
325    pragmas: &mut Vec<PragmaConstraint>,
326    visited: &mut HashSet<PathBuf>,
327) {
328    if !visited.insert(file_path.to_path_buf()) {
329        return;
330    }
331    let source = match std::fs::read_to_string(file_path) {
332        Ok(s) => s,
333        Err(_) => return,
334    };
335    if let Some(pragma) = parse_pragma(&source) {
336        pragmas.push(pragma);
337    }
338    for imp in links::ts_find_imports(source.as_bytes()) {
339        if let Some(abs) = resolve_import_to_abs(project_root, file_path, &imp.path, remappings) {
340            collect_import_pragmas_recursive(&abs, project_root, remappings, pragmas, visited);
341        }
342    }
343}
344
345/// Tighten a set of pragma constraints into a single constraint that
346/// satisfies all of them.
347///
348/// Rules:
349/// - An exact pragma always wins (if any file requires `0.8.23`, we must
350///   use exactly `0.8.23`).
351/// - Multiple exact pragmas that disagree → returns the first one (solc
352///   will error anyway, but we still try).
353/// - For wildcard pragmas, compute the intersection range and return it.
354fn tightest_constraint(pragmas: &[PragmaConstraint]) -> Option<PragmaConstraint> {
355    if pragmas.is_empty() {
356        return None;
357    }
358
359    // If any pragma is Exact, that version must be used.
360    for p in pragmas {
361        if matches!(p, PragmaConstraint::Exact(_)) {
362            return Some(p.clone());
363        }
364    }
365
366    // Normalize every constraint to a (lower, upper) range, then intersect.
367    let mut lower = SemVer {
368        major: 0,
369        minor: 0,
370        patch: 0,
371    };
372    let mut upper: Option<SemVer> = None;
373
374    for p in pragmas {
375        let (lo, hi) = constraint_to_range(p);
376        if lo > lower {
377            lower = lo;
378        }
379        if let Some(hi) = hi {
380            upper = Some(match upper {
381                Some(cur) if hi < cur => hi,
382                Some(cur) => cur,
383                None => hi,
384            });
385        }
386    }
387
388    match upper {
389        Some(hi) if lower >= hi => None, // empty intersection
390        Some(hi) => Some(PragmaConstraint::Range(lower, hi)),
391        None => Some(PragmaConstraint::Gte(lower)),
392    }
393}
394
395/// Convert a pragma constraint to an inclusive lower bound and optional
396/// exclusive upper bound.
397fn constraint_to_range(constraint: &PragmaConstraint) -> (SemVer, Option<SemVer>) {
398    match constraint {
399        PragmaConstraint::Exact(v) => (
400            v.clone(),
401            Some(SemVer {
402                major: v.major,
403                minor: v.minor,
404                patch: v.patch + 1,
405            }),
406        ),
407        PragmaConstraint::Caret(v) => (
408            v.clone(),
409            Some(SemVer {
410                major: v.major,
411                minor: v.minor + 1,
412                patch: 0,
413            }),
414        ),
415        PragmaConstraint::Gte(v) => (v.clone(), None),
416        PragmaConstraint::Range(lo, hi) => (lo.clone(), Some(hi.clone())),
417    }
418}
419
420/// Parse `pragma solidity <constraint>;` from Solidity source.
421///
422/// Handles:
423/// - `pragma solidity 0.8.26;` → Exact
424/// - `pragma solidity ^0.8.0;` → Caret
425/// - `pragma solidity >=0.8.0;` → Gte
426/// - `pragma solidity >=0.6.2 <0.9.0;` → Range
427pub fn parse_pragma(source: &str) -> Option<PragmaConstraint> {
428    // Find the pragma line — only scan the first ~20 lines for performance
429    let pragma_line = source
430        .lines()
431        .take(20)
432        .find(|line| line.trim_start().starts_with("pragma solidity"))?;
433
434    // Extract the constraint string between "pragma solidity" and ";"
435    let after_keyword = pragma_line
436        .trim_start()
437        .strip_prefix("pragma solidity")?
438        .trim();
439    let constraint_str = after_keyword
440        .strip_suffix(';')
441        .unwrap_or(after_keyword)
442        .trim();
443
444    if constraint_str.is_empty() {
445        return None;
446    }
447
448    // Range: >=X.Y.Z <A.B.C
449    if let Some(rest) = constraint_str.strip_prefix(">=") {
450        let rest = rest.trim();
451        if let Some(space_idx) = rest.find(|c: char| c.is_whitespace() || c == '<') {
452            let lower_str = rest[..space_idx].trim();
453            let upper_part = rest[space_idx..].trim();
454            if let Some(upper_str) = upper_part.strip_prefix('<') {
455                let upper_str = upper_str.trim();
456                if let (Some(lower), Some(upper)) =
457                    (SemVer::parse(lower_str), SemVer::parse(upper_str))
458                {
459                    return Some(PragmaConstraint::Range(lower, upper));
460                }
461            }
462        }
463        // Just >=X.Y.Z
464        if let Some(ver) = SemVer::parse(rest) {
465            return Some(PragmaConstraint::Gte(ver));
466        }
467    }
468
469    // Caret: ^X.Y.Z
470    if let Some(rest) = constraint_str.strip_prefix('^')
471        && let Some(ver) = SemVer::parse(rest.trim())
472    {
473        return Some(PragmaConstraint::Caret(ver));
474    }
475
476    // Exact: X.Y.Z
477    if let Some(ver) = SemVer::parse(constraint_str) {
478        return Some(PragmaConstraint::Exact(ver));
479    }
480
481    None
482}
483
484/// List installed solc versions (cached — use `get_installed_versions()` internally).
485pub fn list_installed_versions() -> Vec<SemVer> {
486    get_installed_versions()
487}
488
489/// Scan the filesystem for installed solc versions using `svm::installed_versions()`.
490///
491/// Returns sorted, deduplicated versions (ascending).
492fn scan_installed_versions() -> Vec<SemVer> {
493    svm::installed_versions()
494        .unwrap_or_default()
495        .iter()
496        .map(semver_to_local)
497        .collect()
498}
499
500/// Find the best matching installed version for a pragma constraint.
501///
502/// For all constraint types, picks the **latest** installed version that
503/// satisfies the constraint.
504pub fn find_matching_version(
505    constraint: &PragmaConstraint,
506    installed: &[SemVer],
507) -> Option<SemVer> {
508    let candidates: Vec<&SemVer> = installed
509        .iter()
510        .filter(|v| version_satisfies(v, constraint))
511        .collect();
512
513    // Pick the latest (last, since installed is sorted ascending)
514    candidates.last().cloned().cloned()
515}
516
517/// Check if a version satisfies a pragma constraint.
518pub fn version_satisfies(version: &SemVer, constraint: &PragmaConstraint) -> bool {
519    match constraint {
520        PragmaConstraint::Exact(v) => version == v,
521        PragmaConstraint::Caret(v) => {
522            // Solidity caret: ^0.8.0 means >=0.8.0 <0.9.0
523            // i.e. same major, next minor is the ceiling
524            version.major == v.major && version >= v && version.minor < v.minor + 1
525        }
526        PragmaConstraint::Gte(v) => version >= v,
527        PragmaConstraint::Range(lower, upper) => version >= lower && version < upper,
528    }
529}
530
531/// Fetch remappings by running `forge remappings` in the project root.
532///
533/// Falls back to config remappings, then to an empty list.
534pub async fn resolve_remappings(config: &FoundryConfig) -> Vec<String> {
535    // Try `forge remappings` first — it merges all sources (foundry.toml,
536    // remappings.txt, auto-detected libs).
537    let output = Command::new("forge")
538        .arg("remappings")
539        .current_dir(&config.root)
540        .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1")
541        .output()
542        .await;
543
544    if let Ok(output) = output
545        && output.status.success()
546    {
547        let stdout = String::from_utf8_lossy(&output.stdout);
548        let remappings: Vec<String> = stdout
549            .lines()
550            .filter(|l| !l.trim().is_empty())
551            .map(|l| l.to_string())
552            .collect();
553        if !remappings.is_empty() {
554            return remappings;
555        }
556    }
557
558    // Fall back to remappings from foundry.toml
559    if !config.remappings.is_empty() {
560        return config.remappings.clone();
561    }
562
563    // Fall back to remappings.txt at project root
564    let remappings_txt = config.root.join("remappings.txt");
565    if let Ok(content) = std::fs::read_to_string(&remappings_txt) {
566        return content
567            .lines()
568            .filter(|l| !l.trim().is_empty())
569            .map(|l| l.to_string())
570            .collect();
571    }
572
573    Vec::new()
574}
575
576/// Build the `--standard-json` input for solc.
577///
578/// Reads compiler settings from the `FoundryConfig` (parsed from `foundry.toml`)
579/// and maps them to the solc standard JSON `settings` object:
580///
581/// - `via_ir` → `settings.viaIR`
582/// - `evm_version` → `settings.evmVersion`
583///
584/// Note: `optimizer` and `evm.gasEstimates` are intentionally excluded.
585/// The optimizer adds ~3s and doesn't affect AST/doc quality.
586/// Gas estimates force solc through full EVM codegen — benchmarking on
587/// a 510-file project showed 56s with vs 6s without (88% of cost).
588/// Build a standard-json input for a single-file solc compilation.
589///
590/// Only the entry file is listed in `sources`.  Solc discovers imported
591/// files on its own via the import callback and reads them from disk.
592///
593/// When `source_content` is provided, the text is inlined as `"content"`
594/// so solc compiles the editor's live buffer instead of the on-disk version.
595/// When `None`, solc reads the file from disk via `"urls"`.
596pub fn build_standard_json_input(
597    file_path: &str,
598    remappings: &[String],
599    config: &FoundryConfig,
600    source_content: Option<&str>,
601) -> Value {
602    let contract_outputs = vec!["devdoc", "userdoc", "evm.methodIdentifiers"];
603
604    let mut settings = json!({
605        "remappings": remappings,
606        "outputSelection": {
607            "*": {
608                "*": contract_outputs,
609                "": ["ast"]
610            }
611        }
612    });
613
614    if config.via_ir {
615        settings["viaIR"] = json!(true);
616    }
617
618    // EVM version
619    if let Some(ref evm_version) = config.evm_version {
620        settings["evmVersion"] = json!(evm_version);
621    }
622
623    let source_value = match source_content {
624        Some(content) => json!({ "content": content }),
625        None => json!({ "urls": [file_path] }),
626    };
627
628    // Build sources manually to avoid json! macro key interpolation issues.
629    let mut sources = serde_json::Map::new();
630    sources.insert(file_path.to_string(), source_value);
631
632    json!({
633        "language": "Solidity",
634        "sources": sources,
635        "settings": settings
636    })
637}
638
639/// Run `solc --standard-json` and return the parsed output.
640pub async fn run_solc(
641    solc_binary: &Path,
642    input: &Value,
643    project_root: &Path,
644) -> Result<Value, RunnerError> {
645    let _ = crate::project_cache::save_last_solc_input(project_root, input);
646    let input_str = serde_json::to_string(input)?;
647
648    let mut child = Command::new(solc_binary)
649        .arg("--standard-json")
650        .current_dir(project_root)
651        .stdin(std::process::Stdio::piped())
652        .stdout(std::process::Stdio::piped())
653        .stderr(std::process::Stdio::piped())
654        .spawn()?;
655
656    // Write the standard-json input to solc's stdin.
657    if let Some(mut stdin) = child.stdin.take() {
658        use tokio::io::AsyncWriteExt;
659        stdin
660            .write_all(input_str.as_bytes())
661            .await
662            .map_err(RunnerError::CommandError)?;
663        // Drop stdin to close it, signaling EOF to solc.
664    }
665
666    let output = child
667        .wait_with_output()
668        .await
669        .map_err(RunnerError::CommandError)?;
670
671    // solc writes JSON to stdout even on errors (errors are in the JSON)
672    let stdout = String::from_utf8_lossy(&output.stdout);
673    if stdout.trim().is_empty() {
674        let stderr = String::from_utf8_lossy(&output.stderr);
675        return Err(RunnerError::CommandError(std::io::Error::other(format!(
676            "solc produced no output, stderr: {stderr}"
677        ))));
678    }
679
680    let parsed: Value = serde_json::from_str(&stdout)?;
681    Ok(parsed)
682}
683
684/// Normalize raw solc `--standard-json` output into the canonical shape.
685///
686/// Solc's native shape is already close to canonical:
687/// - `sources[path] = { id, ast }` — kept as-is
688/// - `contracts[path][name] = { abi, evm, ... }` — kept as-is
689/// - `errors` — kept as-is (defaults to `[]` if absent)
690///
691/// When `project_root` is provided, relative source paths are resolved to
692/// absolute paths so that downstream code (goto, hover, links) can map AST
693/// paths back to `file://` URIs. This is necessary because `solc_ast()`
694/// passes a relative path to solc (to fix import resolution), and solc then
695/// returns relative paths in the AST `absolutePath` and source keys.
696///
697/// Constructs `source_id_to_path` from source IDs for cross-file resolution.
698///
699/// Takes ownership and uses `Value::take()` to move AST nodes in-place,
700/// avoiding expensive clones of multi-MB AST data.
701///
702/// Also resolves `absolutePath` on nested `ImportDirective` nodes so that
703/// goto-definition on import strings works regardless of CWD.
704pub fn normalize_solc_output(mut solc_output: Value, project_root: Option<&Path>) -> Value {
705    /// Walk an AST node tree and resolve `absolutePath` on `ImportDirective` nodes.
706    fn resolve_import_absolute_paths(node: &mut Value, resolve: &dyn Fn(&str) -> String) {
707        let is_import = node.get("nodeType").and_then(|v| v.as_str()) == Some("ImportDirective");
708
709        if is_import {
710            if let Some(abs_path) = node.get("absolutePath").and_then(|v| v.as_str()) {
711                let resolved = resolve(abs_path);
712                node.as_object_mut()
713                    .unwrap()
714                    .insert("absolutePath".to_string(), json!(resolved));
715            }
716        }
717
718        // Recurse into "nodes" array (top-level AST children)
719        if let Some(nodes) = node.get_mut("nodes").and_then(|v| v.as_array_mut()) {
720            for child in nodes {
721                resolve_import_absolute_paths(child, resolve);
722            }
723        }
724    }
725    let mut result = Map::new();
726
727    // Move errors out (defaults to [] if absent)
728    let errors = solc_output
729        .get_mut("errors")
730        .map(Value::take)
731        .unwrap_or_else(|| json!([]));
732    result.insert("errors".to_string(), errors);
733
734    // Helper: resolve a path to absolute using the project root.
735    // If the path is already absolute or no project root is given, return as-is.
736    let resolve = |p: &str| -> String {
737        if let Some(root) = project_root {
738            let path = Path::new(p);
739            if path.is_relative() {
740                return root.join(path).to_string_lossy().into_owned();
741            }
742        }
743        p.to_string()
744    };
745
746    // Sources: rekey with absolute paths and update AST absolutePath fields.
747    // Also build source_id_to_path for cross-file resolution.
748    let mut source_id_to_path = Map::new();
749    let mut resolved_sources = Map::new();
750
751    if let Some(sources) = solc_output
752        .get_mut("sources")
753        .and_then(|s| s.as_object_mut())
754    {
755        // Collect keys first to avoid borrow issues
756        let keys: Vec<String> = sources.keys().cloned().collect();
757        for key in keys {
758            if let Some(mut source_data) = sources.remove(&key) {
759                let abs_key = resolve(&key);
760
761                // Update the AST absolutePath field to match, and resolve
762                // absolutePath on nested ImportDirective nodes so that
763                // goto-definition works regardless of CWD.
764                if let Some(ast) = source_data.get_mut("ast") {
765                    if let Some(abs_path) = ast.get("absolutePath").and_then(|v| v.as_str()) {
766                        let resolved = resolve(abs_path);
767                        ast.as_object_mut()
768                            .unwrap()
769                            .insert("absolutePath".to_string(), json!(resolved));
770                    }
771                    resolve_import_absolute_paths(ast, &resolve);
772                }
773
774                if let Some(id) = source_data.get("id") {
775                    source_id_to_path.insert(id.to_string(), json!(&abs_key));
776                }
777
778                resolved_sources.insert(abs_key, source_data);
779            }
780        }
781    }
782
783    result.insert("sources".to_string(), Value::Object(resolved_sources));
784
785    // Contracts: rekey with absolute paths
786    let mut resolved_contracts = Map::new();
787    if let Some(contracts) = solc_output
788        .get_mut("contracts")
789        .and_then(|c| c.as_object_mut())
790    {
791        let keys: Vec<String> = contracts.keys().cloned().collect();
792        for key in keys {
793            if let Some(contract_data) = contracts.remove(&key) {
794                resolved_contracts.insert(resolve(&key), contract_data);
795            }
796        }
797    }
798    result.insert("contracts".to_string(), Value::Object(resolved_contracts));
799
800    // Construct source_id_to_path for cross-file resolution
801    result.insert(
802        "source_id_to_path".to_string(),
803        Value::Object(source_id_to_path),
804    );
805
806    Value::Object(result)
807}
808
809/// Normalize forge `build --json --ast` output into the canonical shape.
810///
811/// Forge wraps data in arrays with metadata:
812/// - `sources[path] = [{ source_file: { id, ast }, build_id, profile, version }]`
813/// - `contracts[path][name] = [{ contract: { abi, evm, ... }, build_id, profile, version }]`
814/// - `build_infos = [{ source_id_to_path: { ... } }]`
815///
816/// This unwraps to the canonical flat shape:
817/// - `sources[path] = { id, ast }`
818/// - `contracts[path][name] = { abi, evm, ... }`
819/// - `source_id_to_path = { ... }`
820pub fn normalize_forge_output(mut forge_output: Value) -> Value {
821    let mut result = Map::new();
822
823    // Move errors out
824    let errors = forge_output
825        .get_mut("errors")
826        .map(Value::take)
827        .unwrap_or_else(|| json!([]));
828    result.insert("errors".to_string(), errors);
829
830    // Unwrap sources: [{ source_file: { id, ast } }] → { id, ast }
831    let mut normalized_sources = Map::new();
832    if let Some(sources) = forge_output
833        .get_mut("sources")
834        .and_then(|s| s.as_object_mut())
835    {
836        for (path, entries) in sources.iter_mut() {
837            if let Some(arr) = entries.as_array_mut()
838                && let Some(first) = arr.first_mut()
839                && let Some(sf) = first.get_mut("source_file")
840            {
841                normalized_sources.insert(path.clone(), sf.take());
842            }
843        }
844    }
845    result.insert("sources".to_string(), Value::Object(normalized_sources));
846
847    // Unwrap contracts: [{ contract: { ... } }] → { ... }
848    let mut normalized_contracts = Map::new();
849    if let Some(contracts) = forge_output
850        .get_mut("contracts")
851        .and_then(|c| c.as_object_mut())
852    {
853        for (path, names) in contracts.iter_mut() {
854            let mut path_contracts = Map::new();
855            if let Some(names_obj) = names.as_object_mut() {
856                for (name, entries) in names_obj.iter_mut() {
857                    if let Some(arr) = entries.as_array_mut()
858                        && let Some(first) = arr.first_mut()
859                        && let Some(contract) = first.get_mut("contract")
860                    {
861                        path_contracts.insert(name.clone(), contract.take());
862                    }
863                }
864            }
865            normalized_contracts.insert(path.clone(), Value::Object(path_contracts));
866        }
867    }
868    result.insert("contracts".to_string(), Value::Object(normalized_contracts));
869
870    // Extract source_id_to_path from build_infos
871    let source_id_to_path = forge_output
872        .get_mut("build_infos")
873        .and_then(|bi| bi.as_array_mut())
874        .and_then(|arr| arr.first_mut())
875        .and_then(|info| info.get_mut("source_id_to_path"))
876        .map(Value::take)
877        .unwrap_or_else(|| json!({}));
878    result.insert("source_id_to_path".to_string(), source_id_to_path);
879
880    Value::Object(result)
881}
882
883/// Run solc for a file and return normalized output.
884///
885/// This is the main entry point used by the LSP. Reads the file source
886/// to detect the pragma version and resolve the correct solc binary.
887///
888/// When `content_cache` is provided, open editor buffers are fed to solc
889/// When `source_content` is provided, the text is inlined so solc compiles
890/// the editor's live buffer instead of reading from disk.
891pub async fn solc_ast(
892    file_path: &str,
893    config: &FoundryConfig,
894    client: Option<&tower_lsp::Client>,
895    source_content: Option<&str>,
896) -> Result<Value, RunnerError> {
897    let remappings = resolve_remappings(config).await;
898
899    // Collect pragma constraints from the file and all its transitive imports
900    // so we pick a solc version that satisfies the entire dependency graph.
901    // This is a synchronous recursive FS crawl — run it on the blocking pool
902    // so we don't stall the tokio async runtime on large projects.
903    let file_abs = Path::new(file_path).to_path_buf();
904    let config_root = config.root.clone();
905    let remappings_clone = remappings.clone();
906    let pragmas = tokio::task::spawn_blocking(move || {
907        collect_import_pragmas(&file_abs, &config_root, &remappings_clone)
908    })
909    .await
910    .unwrap_or_default();
911    let constraint = tightest_constraint(&pragmas);
912    let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
913
914    // Solc's import resolver fails when sources use absolute paths — it resolves
915    // 0 transitive imports, causing "No matching declaration found" errors for
916    // inherited members. Convert to a path relative to the project root so solc
917    // can properly resolve `src/`, `lib/`, and remapped imports.
918    let rel_path = Path::new(file_path)
919        .strip_prefix(&config.root)
920        .map(|p| p.to_string_lossy().into_owned())
921        .unwrap_or_else(|_| file_path.to_string());
922
923    let input = build_standard_json_input(&rel_path, &remappings, config, source_content);
924    let raw_output = run_solc(&solc_binary, &input, &config.root).await?;
925
926    Ok(normalize_solc_output(raw_output, Some(&config.root)))
927}
928
929/// Run solc for build diagnostics (same output, just used for error extraction).
930pub async fn solc_build(
931    file_path: &str,
932    config: &FoundryConfig,
933    client: Option<&tower_lsp::Client>,
934) -> Result<Value, RunnerError> {
935    solc_ast(file_path, config, client, None).await
936}
937
938// ── Project-wide indexing ──────────────────────────────────────────────────
939
940/// Discover all Solidity source files under the project root.
941///
942/// Walks the entire project directory, including `test/`, `script/`, and
943/// any other user-authored directories. Only skips:
944/// - Directories listed in `config.libs` (default: `["lib"]`)
945/// - Directories in `DISCOVER_SKIP_DIRS` (build artifacts)
946/// - Hidden directories (starting with `.`)
947///
948/// Includes `.t.sol` (test) and `.s.sol` (script) files so that
949/// find-references and rename work across the full project.
950/// Discover the project's own source files by walking only the directories
951/// configured in `foundry.toml`: `src`, `test`, and `script`.
952///
953/// This mirrors how Forge discovers compilable files — it never walks
954/// directories outside these three (plus libs).  Stray directories like
955/// `certora/` or `hardhat/` are ignored, preventing broken imports from
956/// poisoning the solc batch.
957pub fn discover_source_files(config: &FoundryConfig) -> Vec<PathBuf> {
958    discover_source_files_inner(config, false)
959}
960
961/// Discover only the `src` directory files (no test, no script).
962///
963/// Used as the seed set for phase-1 of two-phase project indexing, where
964/// we want to compile only the production source closure first for fast
965/// time-to-first-reference.
966pub fn discover_src_only_files(config: &FoundryConfig) -> Vec<PathBuf> {
967    let root = &config.root;
968    if !root.is_dir() {
969        return Vec::new();
970    }
971    let mut files = Vec::new();
972    let dir = root.join(&config.sources_dir);
973    if dir.is_dir() {
974        discover_recursive(&dir, &[], &mut files);
975    }
976    files.sort();
977    files
978}
979
980/// Discover the compilation closure seeded only from `src` files.
981///
982/// Like [`discover_compilation_closure`] but seeds only from
983/// [`discover_src_only_files`] instead of all project directories.
984/// This produces the minimal set of files needed to compile the
985/// production source code, excluding test and script files.
986pub fn discover_src_only_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
987    let seeds = discover_src_only_files(config);
988    let mut visited: HashSet<PathBuf> = HashSet::new();
989    let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
990
991    while let Some(file) = queue.pop_front() {
992        if !visited.insert(file.clone()) {
993            continue;
994        }
995        let source = match std::fs::read_to_string(&file) {
996            Ok(s) => s,
997            Err(_) => continue,
998        };
999        for imp in links::ts_find_imports(source.as_bytes()) {
1000            if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
1001                if abs.exists() && !visited.contains(&abs) {
1002                    queue.push_back(abs);
1003                }
1004            }
1005        }
1006    }
1007
1008    let mut result: Vec<PathBuf> = visited.into_iter().collect();
1009    result.sort();
1010    result
1011}
1012
1013/// Discover source files including library directories.
1014///
1015/// When `fullProjectScan` is enabled, this includes files from the configured
1016/// `libs` directories (e.g. `dependencies/`, `node_modules/`).  Files with
1017/// incompatible pragma versions are handled by the error-driven retry loop
1018/// in [`solc_project_index_from_files`].
1019pub fn discover_source_files_with_libs(config: &FoundryConfig) -> Vec<PathBuf> {
1020    discover_source_files_inner(config, true)
1021}
1022
1023fn discover_source_files_inner(config: &FoundryConfig, include_libs: bool) -> Vec<PathBuf> {
1024    let root = &config.root;
1025    if !root.is_dir() {
1026        return Vec::new();
1027    }
1028
1029    let mut files = Vec::new();
1030    let no_skip: &[String] = &[];
1031
1032    // Walk only the configured source directories (src, test, script).
1033    // This matches Forge's behaviour: only files under these three directories
1034    // are considered project sources.  Directories like `certora/`, `hardhat/`,
1035    // etc. are never seeded.
1036    for dir_name in [&config.sources_dir, &config.test_dir, &config.script_dir] {
1037        let dir = root.join(dir_name);
1038        if dir.is_dir() {
1039            discover_recursive(&dir, no_skip, &mut files);
1040        }
1041    }
1042
1043    // When include_libs is requested, also walk lib directories.
1044    if include_libs {
1045        for lib_name in &config.libs {
1046            let lib_dir = root.join(lib_name);
1047            if lib_dir.is_dir() {
1048                discover_recursive(&lib_dir, no_skip, &mut files);
1049            }
1050        }
1051    }
1052
1053    files.sort();
1054    files
1055}
1056
1057/// Discover the true compilation closure by tracing imports from the
1058/// project's own source files (`src/`, `test/`, `script/`, and any other
1059/// non-lib top-level directories).
1060///
1061/// Starting from every `.sol` file returned by [`discover_source_files`]
1062/// (project files only, no lib dirs), this BFS-walks the import graph using
1063/// the provided remappings to resolve each `import` statement to an absolute
1064/// path.  It adds every reachable file — including lib files that are actually
1065/// imported — to the result set.
1066///
1067/// Files whose imports cannot be resolved (missing external deps that aren't
1068/// in this project) are silently skipped at that edge; the importer is still
1069/// included.
1070///
1071/// This produces a much smaller, self-consistent set than scanning all files
1072/// in lib directories, and avoids pulling in lib files that have broken
1073/// transitive deps (e.g. chainlink automation files that need `@eth-optimism`
1074/// which is not vendored here).
1075pub fn discover_compilation_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
1076    // Seed: all project source files (no lib dirs).
1077    let seeds = discover_source_files(config);
1078    let mut visited: HashSet<PathBuf> = HashSet::new();
1079    let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
1080
1081    while let Some(file) = queue.pop_front() {
1082        if !visited.insert(file.clone()) {
1083            continue;
1084        }
1085        let source = match std::fs::read_to_string(&file) {
1086            Ok(s) => s,
1087            Err(_) => continue,
1088        };
1089        for imp in links::ts_find_imports(source.as_bytes()) {
1090            if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
1091                if abs.exists() && !visited.contains(&abs) {
1092                    queue.push_back(abs);
1093                }
1094            }
1095        }
1096    }
1097
1098    let mut result: Vec<PathBuf> = visited.into_iter().collect();
1099    result.sort();
1100    result
1101}
1102
1103/// Directories that are always skipped during source file discovery,
1104/// regardless of the `include_libs` setting.
1105const DISCOVER_SKIP_DIRS: &[&str] = &["out", "artifacts", "cache", "target", "broadcast"];
1106
1107fn discover_recursive(dir: &Path, skip_libs: &[String], files: &mut Vec<PathBuf>) {
1108    let entries = match std::fs::read_dir(dir) {
1109        Ok(e) => e,
1110        Err(_) => return,
1111    };
1112    for entry in entries.flatten() {
1113        let path = entry.path();
1114        if path.is_dir() {
1115            if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
1116                // Skip hidden directories (e.g., .git, .github)
1117                if name.starts_with('.') {
1118                    continue;
1119                }
1120                // Skip build artifact directories
1121                if DISCOVER_SKIP_DIRS.contains(&name) {
1122                    continue;
1123                }
1124                // Skip user-configured library directories (unless include_libs)
1125                if skip_libs.iter().any(|lib| lib == name) {
1126                    continue;
1127                }
1128            }
1129            discover_recursive(&path, skip_libs, files);
1130        } else if let Some(name) = path.file_name().and_then(|n| n.to_str())
1131            && name.ends_with(".sol")
1132        {
1133            files.push(path);
1134        }
1135    }
1136}
1137
1138/// Build a `--standard-json` input that compiles all given source files at once.
1139///
1140/// Each file is added as a source entry with a `urls` field (relative to project root).
1141/// This produces a single AST covering the entire project in one solc invocation.
1142///
1143/// See [`build_standard_json_input`] for rationale on excluded settings.
1144pub fn build_batch_standard_json_input(
1145    source_files: &[PathBuf],
1146    remappings: &[String],
1147    config: &FoundryConfig,
1148) -> Value {
1149    build_batch_standard_json_input_with_cache(source_files, remappings, config, None)
1150}
1151
1152/// Build a batch standard-json input for solc.
1153///
1154/// When `content_cache` is provided, files whose URI string appears as a key
1155/// are included with `"content"` (in-memory source).  Files not in the cache
1156/// fall back to `"urls"` (solc reads from disk).
1157///
1158/// This allows the re-index after a rename to feed solc the updated import
1159/// paths from our text_cache without requiring the editor to have flushed
1160/// them to disk yet.
1161pub fn build_batch_standard_json_input_with_cache(
1162    source_files: &[PathBuf],
1163    remappings: &[String],
1164    config: &FoundryConfig,
1165    content_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1166) -> Value {
1167    let contract_outputs = vec!["devdoc", "userdoc", "evm.methodIdentifiers"];
1168
1169    let mut settings = json!({
1170        "remappings": remappings,
1171        "outputSelection": {
1172            "*": {
1173                "*": contract_outputs,
1174                "": ["ast"]
1175            }
1176        }
1177    });
1178
1179    if config.via_ir {
1180        settings["viaIR"] = json!(true);
1181    }
1182    if let Some(ref evm_version) = config.evm_version {
1183        settings["evmVersion"] = json!(evm_version);
1184    }
1185
1186    let mut sources = serde_json::Map::new();
1187    for file in source_files {
1188        let rel_path = file
1189            .strip_prefix(&config.root)
1190            .map(|p| p.to_string_lossy().into_owned())
1191            .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1192
1193        // Try to use cached content so solc doesn't need to read from disk.
1194        let cached_content = content_cache.and_then(|cache| {
1195            let uri = Url::from_file_path(file).ok()?;
1196            cache.get(uri.as_str()).map(|(_, c)| c.as_str())
1197        });
1198
1199        if let Some(content) = cached_content {
1200            sources.insert(rel_path, json!({ "content": content }));
1201        } else {
1202            sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1203        }
1204    }
1205
1206    json!({
1207        "language": "Solidity",
1208        "sources": sources,
1209        "settings": settings
1210    })
1211}
1212
1213/// Build an AST-only batch standard-json input for sub-cache builds.
1214///
1215/// Unlike the full batch input, this omits all codegen-affecting settings
1216/// (`viaIR`, `evmVersion`, optimizer) and only requests the AST — no
1217/// `devdoc`, `userdoc`, or `evm.methodIdentifiers`.  This is significantly
1218/// faster because solc skips type-checking contract outputs and codegen.
1219///
1220/// Sub-caches only need the AST for cross-file reference lookup (node IDs,
1221/// `referencedDeclaration`, source locations).
1222pub fn build_batch_standard_json_input_ast_only(
1223    source_files: &[PathBuf],
1224    remappings: &[String],
1225    root: &Path,
1226) -> Value {
1227    let settings = json!({
1228        "remappings": remappings,
1229        "outputSelection": {
1230            "*": {
1231                "": ["ast"]
1232            }
1233        }
1234    });
1235
1236    let mut sources = serde_json::Map::new();
1237    for file in source_files {
1238        let rel_path = file
1239            .strip_prefix(root)
1240            .map(|p| p.to_string_lossy().into_owned())
1241            .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1242        sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1243    }
1244
1245    json!({
1246        "language": "Solidity",
1247        "sources": sources,
1248        "settings": settings
1249    })
1250}
1251
1252/// Build a parse-only standard-json input (``stopAfter: "parsing"``).
1253///
1254/// Unlike the full batch input this mode stops before import resolution and
1255/// type-checking.  That means:
1256///
1257/// * No version 5333 errors cascade from imported incompatible files — the
1258///   compatible files are NOT fetched from disk as imports.
1259/// * The resulting ASTs contain all declaration nodes and local
1260///   ``referencedDeclaration`` IDs but **not** cross-file resolved IDs.
1261/// * Only ``ast`` output is requested; contract outputs (abi, gas …) are
1262///   omitted because they require type-checking.
1263///
1264/// This is used for the compatible-file batch in the mixed-version project
1265/// index so we can get parse-time ASTs for all project/lib files that satisfy
1266/// the project pragma, without being blocked by imports into incompatible lib
1267/// files.
1268pub fn build_parse_only_json_input(
1269    source_files: &[PathBuf],
1270    remappings: &[String],
1271    config: &FoundryConfig,
1272) -> Value {
1273    let settings = json!({
1274        "stopAfter": "parsing",
1275        "remappings": remappings,
1276        "outputSelection": {
1277            "*": {
1278                "": ["ast"]
1279            }
1280        }
1281    });
1282
1283    let mut sources = serde_json::Map::new();
1284    for file in source_files {
1285        let rel_path = file
1286            .strip_prefix(&config.root)
1287            .map(|p| p.to_string_lossy().into_owned())
1288            .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1289        sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1290    }
1291
1292    json!({
1293        "language": "Solidity",
1294        "sources": sources,
1295        "settings": settings
1296    })
1297}
1298
1299/// Run a project-wide solc compilation and return normalized output.
1300///
1301/// Discovers all source files, compiles them in a single `solc --standard-json`
1302/// invocation, and returns the normalized AST data.
1303///
1304/// When `text_cache` is provided, files whose URI string appears as a key
1305/// are fed to solc via `"content"` (in-memory) rather than `"urls"` (disk).
1306/// This ensures the re-index after a rename uses the updated import paths
1307/// from our cache, even if the editor hasn't flushed them to disk yet.
1308pub async fn solc_project_index(
1309    config: &FoundryConfig,
1310    client: Option<&tower_lsp::Client>,
1311    text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1312) -> Result<Value, RunnerError> {
1313    // Resolve remappings first — needed for import tracing.
1314    let remappings = resolve_remappings(config).await;
1315
1316    // Trace imports from project source files to find the true compilation
1317    // closure.  This avoids pulling in lib files that are never imported by
1318    // the project (e.g. chainlink automation files that need @eth-optimism,
1319    // which isn't vendored here).
1320    let source_files = discover_compilation_closure(config, &remappings);
1321    if source_files.is_empty() {
1322        return Err(RunnerError::CommandError(std::io::Error::other(
1323            "no source files found for project index",
1324        )));
1325    }
1326
1327    solc_project_index_from_files(config, client, text_cache, &source_files).await
1328}
1329
1330/// AST-only project index for sub-cache builds.
1331///
1332/// Identical to [`solc_project_index`] but requests only AST output —
1333/// no `devdoc`, `userdoc`, or `evm.methodIdentifiers`.  Also omits
1334/// `viaIR`, `evmVersion`, and optimizer settings since they only affect
1335/// codegen (which is skipped when no contract outputs are requested).
1336///
1337/// This is significantly faster because solc skips all codegen work.
1338/// "Stack too deep" errors cannot occur in AST-only mode.
1339pub async fn solc_project_index_ast_only(
1340    config: &FoundryConfig,
1341    client: Option<&tower_lsp::Client>,
1342) -> Result<Value, RunnerError> {
1343    let remappings = resolve_remappings(config).await;
1344    let source_files = discover_compilation_closure(config, &remappings);
1345    if source_files.is_empty() {
1346        return Err(RunnerError::CommandError(std::io::Error::other(
1347            "no source files found for project index",
1348        )));
1349    }
1350    solc_project_index_from_files_ast_only(config, client, &source_files).await
1351}
1352
1353/// AST-only compile over a list of source files.
1354///
1355/// Like [`solc_project_index_from_files`] but uses
1356/// [`build_batch_standard_json_input_ast_only`] — no codegen settings,
1357/// no contract outputs.
1358async fn solc_project_index_from_files_ast_only(
1359    config: &FoundryConfig,
1360    client: Option<&tower_lsp::Client>,
1361    source_files: &[PathBuf],
1362) -> Result<Value, RunnerError> {
1363    if source_files.is_empty() {
1364        return Err(RunnerError::CommandError(std::io::Error::other(
1365            "no source files found for AST-only project index",
1366        )));
1367    }
1368
1369    let remappings = resolve_remappings(config).await;
1370
1371    let project_version: Option<SemVer> =
1372        config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1373    let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1374        Some(PragmaConstraint::Exact(v.clone()))
1375    } else {
1376        source_files.iter().find_map(|f| {
1377            std::fs::read_to_string(f)
1378                .ok()
1379                .and_then(|src| parse_pragma(&src))
1380        })
1381    };
1382    let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1383
1384    // Pre-scan pragmas to separate compatible vs incompatible files.
1385    let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1386        let mut compat = Vec::with_capacity(source_files.len());
1387        let mut incompat = Vec::new();
1388        for file in source_files {
1389            let is_compatible = std::fs::read_to_string(file)
1390                .ok()
1391                .and_then(|src| parse_pragma(&src))
1392                .map(|pragma| version_satisfies(ver, &pragma))
1393                .unwrap_or(true);
1394            if is_compatible {
1395                compat.push(file.clone());
1396            } else {
1397                incompat.push(file.clone());
1398            }
1399        }
1400        (compat, incompat)
1401    } else {
1402        (source_files.to_vec(), Vec::new())
1403    };
1404
1405    if !incompatible_files.is_empty() {
1406        if let Some(c) = client {
1407            c.log_message(
1408                tower_lsp::lsp_types::MessageType::INFO,
1409                format!(
1410                    "project index: {} compatible, {} incompatible with solc {}",
1411                    compatible_files.len(),
1412                    incompatible_files.len(),
1413                    project_version
1414                        .as_ref()
1415                        .map(|v| v.to_string())
1416                        .unwrap_or_default(),
1417                ),
1418            )
1419            .await;
1420        }
1421    }
1422
1423    let mut result = if compatible_files.is_empty() {
1424        json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1425    } else {
1426        let input =
1427            build_batch_standard_json_input_ast_only(&compatible_files, &remappings, &config.root);
1428        let raw = run_solc(&solc_binary, &input, &config.root).await?;
1429        normalize_solc_output(raw, Some(&config.root))
1430    };
1431
1432    if incompatible_files.is_empty() {
1433        return Ok(result);
1434    }
1435
1436    // Compile incompatible files individually with their own solc versions.
1437    for file in &incompatible_files {
1438        let pragma = std::fs::read_to_string(file)
1439            .ok()
1440            .and_then(|src| parse_pragma(&src));
1441        let file_binary = resolve_solc_binary(config, pragma.as_ref(), client).await;
1442        let input =
1443            build_batch_standard_json_input_ast_only(&[file.clone()], &remappings, &config.root);
1444        if let Ok(raw) = run_solc(&file_binary, &input, &config.root).await {
1445            let normalized = normalize_solc_output(raw, Some(&config.root));
1446            merge_normalized_outputs(&mut result, normalized);
1447        }
1448    }
1449
1450    if let Some(c) = client {
1451        let total = result
1452            .get("sources")
1453            .and_then(|s| s.as_object())
1454            .map_or(0, |obj| obj.len());
1455        c.log_message(
1456            tower_lsp::lsp_types::MessageType::INFO,
1457            format!(
1458                "project index: compiled {} files ({} needed different solc version)",
1459                total,
1460                incompatible_files.len(),
1461            ),
1462        )
1463        .await;
1464    }
1465
1466    Ok(result)
1467}
1468
1469/// Run a scoped project-index compile over a selected file list.
1470///
1471/// This is intended for aggressive incremental reindex strategies where only
1472/// a dependency-closure subset should be recompiled.
1473pub async fn solc_project_index_scoped(
1474    config: &FoundryConfig,
1475    client: Option<&tower_lsp::Client>,
1476    text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1477    source_files: &[PathBuf],
1478) -> Result<Value, RunnerError> {
1479    if source_files.is_empty() {
1480        return Err(RunnerError::CommandError(std::io::Error::other(
1481            "no source files provided for scoped project index",
1482        )));
1483    }
1484
1485    solc_project_index_from_files(config, client, text_cache, source_files).await
1486}
1487
1488/// Extract source file paths from solc error code 5333 ("Source file requires
1489/// different compiler version") errors.  Returns the relative paths exactly
1490/// as they appear in `sourceLocation.file`.
1491#[cfg(test)]
1492fn extract_version_error_files(solc_output: &Value) -> HashSet<String> {
1493    let mut files = HashSet::new();
1494    if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1495        for err in errors {
1496            let is_5333 = err.get("errorCode").and_then(|c| c.as_str()) == Some("5333");
1497            if is_5333
1498                && let Some(file) = err
1499                    .get("sourceLocation")
1500                    .and_then(|sl| sl.get("file"))
1501                    .and_then(|f| f.as_str())
1502            {
1503                files.insert(file.to_string());
1504            }
1505        }
1506    }
1507    files
1508}
1509
1510/// Extract source file paths from solc error code 6275 ("Source not found")
1511/// errors.  Returns the relative paths of source files whose imports failed.
1512#[cfg(test)]
1513#[allow(dead_code)]
1514fn extract_import_error_files(solc_output: &Value) -> HashSet<String> {
1515    let mut files = HashSet::new();
1516    if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1517        for err in errors {
1518            let is_6275 = err.get("errorCode").and_then(|c| c.as_str()) == Some("6275");
1519            if is_6275
1520                && let Some(file) = err
1521                    .get("sourceLocation")
1522                    .and_then(|sl| sl.get("file"))
1523                    .and_then(|f| f.as_str())
1524            {
1525                files.insert(file.to_string());
1526            }
1527        }
1528    }
1529    files
1530}
1531
1532/// Build a reverse-import closure: given a set of files to exclude, find all
1533/// files that transitively import any of them.  Those files must also be
1534/// excluded because solc will still resolve their imports from disk and fail.
1535///
1536/// Returns the full exclusion set (seed files + their transitive importers).
1537#[cfg(test)]
1538fn reverse_import_closure(
1539    source_files: &[PathBuf],
1540    exclude_abs: &HashSet<PathBuf>,
1541    project_root: &Path,
1542    remappings: &[String],
1543) -> HashSet<PathBuf> {
1544    // Build forward import graph: file -> set of files it imports.
1545    // Then invert to get reverse edges: imported_file -> set of importers.
1546    let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1547
1548    for file in source_files {
1549        let Ok(bytes) = std::fs::read(file) else {
1550            continue;
1551        };
1552        for imp in links::ts_find_imports(&bytes) {
1553            if let Some(imported_abs) =
1554                resolve_import_to_abs(project_root, file, &imp.path, remappings)
1555            {
1556                reverse_edges
1557                    .entry(imported_abs)
1558                    .or_default()
1559                    .insert(file.clone());
1560            }
1561        }
1562    }
1563
1564    // BFS from excluded files through reverse edges.
1565    let mut closure: HashSet<PathBuf> = exclude_abs.clone();
1566    let mut queue: std::collections::VecDeque<PathBuf> = exclude_abs.iter().cloned().collect();
1567
1568    while let Some(current) = queue.pop_front() {
1569        if let Some(importers) = reverse_edges.get(&current) {
1570            for importer in importers {
1571                if closure.insert(importer.clone()) {
1572                    queue.push_back(importer.clone());
1573                }
1574            }
1575        }
1576    }
1577
1578    closure
1579}
1580
1581/// Merge two normalized solc outputs at the `Value` level.
1582///
1583/// Combines `sources`, `contracts`, `source_id_to_path`, and `errors` from
1584/// `other` into `base`.  Source IDs in `other` are remapped to avoid
1585/// collisions with `base`.
1586fn merge_normalized_outputs(base: &mut Value, other: Value) {
1587    // Merge sources (keyed by absolute path — no collisions across partitions).
1588    if let (Some(base_sources), Some(other_sources)) = (
1589        base.get_mut("sources").and_then(|s| s.as_object_mut()),
1590        other.get("sources").and_then(|s| s.as_object()),
1591    ) {
1592        // Find the max source ID in base so we can remap other's IDs.
1593        let max_base_id = base_sources
1594            .values()
1595            .filter_map(|v| v.get("id").and_then(|id| id.as_u64()))
1596            .max()
1597            .map(|m| m + 1)
1598            .unwrap_or(0);
1599
1600        // Collect other's id -> path mappings for source_id_to_path.
1601        let mut remapped_id_to_path: Vec<(String, String)> = Vec::new();
1602
1603        for (path, mut source_data) in other_sources.clone() {
1604            // Remap the source ID to avoid collisions.
1605            if let Some(id) = source_data.get("id").and_then(|id| id.as_u64()) {
1606                let new_id = id + max_base_id;
1607                source_data
1608                    .as_object_mut()
1609                    .unwrap()
1610                    .insert("id".to_string(), json!(new_id));
1611                remapped_id_to_path.push((new_id.to_string(), path.clone()));
1612            }
1613            base_sources.insert(path, source_data);
1614        }
1615
1616        // Merge source_id_to_path.
1617        if let Some(base_id_map) = base
1618            .get_mut("source_id_to_path")
1619            .and_then(|m| m.as_object_mut())
1620        {
1621            for (id, path) in remapped_id_to_path {
1622                base_id_map.insert(id, json!(path));
1623            }
1624        }
1625    }
1626
1627    // Merge contracts.
1628    if let (Some(base_contracts), Some(other_contracts)) = (
1629        base.get_mut("contracts").and_then(|c| c.as_object_mut()),
1630        other.get("contracts").and_then(|c| c.as_object()),
1631    ) {
1632        for (path, contract_data) in other_contracts {
1633            base_contracts.insert(path.clone(), contract_data.clone());
1634        }
1635    }
1636
1637    // Don't merge errors — the retry errors from incompatible files are noise.
1638    // The base already has the clean errors from the successful compilation.
1639}
1640
1641async fn solc_project_index_from_files(
1642    config: &FoundryConfig,
1643    client: Option<&tower_lsp::Client>,
1644    text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1645    source_files: &[PathBuf],
1646) -> Result<Value, RunnerError> {
1647    if source_files.is_empty() {
1648        return Err(RunnerError::CommandError(std::io::Error::other(
1649            "no source files found for project index",
1650        )));
1651    }
1652
1653    let remappings = resolve_remappings(config).await;
1654
1655    // Resolve the project's solc version from foundry.toml.
1656    let project_version: Option<SemVer> =
1657        config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1658
1659    // When no version is pinned in foundry.toml, derive a constraint from the
1660    // source files' pragmas so that svm can auto-install a matching binary.
1661    let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1662        Some(PragmaConstraint::Exact(v.clone()))
1663    } else {
1664        source_files.iter().find_map(|f| {
1665            std::fs::read_to_string(f)
1666                .ok()
1667                .and_then(|src| parse_pragma(&src))
1668        })
1669    };
1670    let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1671
1672    // -- Pre-scan pragmas to separate compatible vs incompatible files. --
1673    //
1674    // Solc emits ZERO ASTs when any file in the batch has a version error
1675    // (5333).  We must exclude incompatible files before compiling so the
1676    // batch produces full AST output for all compatible files.
1677    let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1678        let mut compat = Vec::with_capacity(source_files.len());
1679        let mut incompat = Vec::new();
1680        for file in source_files {
1681            let is_compatible = std::fs::read_to_string(file)
1682                .ok()
1683                .and_then(|src| parse_pragma(&src))
1684                .map(|pragma| version_satisfies(ver, &pragma))
1685                // Files without a pragma are assumed compatible.
1686                .unwrap_or(true);
1687            if is_compatible {
1688                compat.push(file.clone());
1689            } else {
1690                incompat.push(file.clone());
1691            }
1692        }
1693        (compat, incompat)
1694    } else {
1695        // No project version configured — compile everything in one batch.
1696        (source_files.to_vec(), Vec::new())
1697    };
1698
1699    if !incompatible_files.is_empty() {
1700        if let Some(c) = client {
1701            c.log_message(
1702                tower_lsp::lsp_types::MessageType::INFO,
1703                format!(
1704                    "project index: {} compatible, {} incompatible with solc {}",
1705                    compatible_files.len(),
1706                    incompatible_files.len(),
1707                    project_version
1708                        .as_ref()
1709                        .map(|v| v.to_string())
1710                        .unwrap_or_default(),
1711                ),
1712            )
1713            .await;
1714        }
1715    }
1716
1717    // -- Full batch compile of compatible files. --
1718    //
1719    // The source file list comes from discover_compilation_closure which only
1720    // includes files reachable via imports from src/test/script — so all files
1721    // in the batch are version-compatible and their transitive imports resolve.
1722    // A full (non-parse-only) compile is required so that cross-file
1723    // referencedDeclaration IDs are populated for goto-references to work.
1724    let mut result = if compatible_files.is_empty() {
1725        json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1726    } else {
1727        let input = build_batch_standard_json_input_with_cache(
1728            &compatible_files,
1729            &remappings,
1730            config,
1731            text_cache,
1732        );
1733        let raw = run_solc(&solc_binary, &input, &config.root).await?;
1734        normalize_solc_output(raw, Some(&config.root))
1735    };
1736
1737    let batch_source_count = result
1738        .get("sources")
1739        .and_then(|s| s.as_object())
1740        .map_or(0, |obj| obj.len());
1741
1742    if incompatible_files.is_empty() {
1743        return Ok(result);
1744    }
1745
1746    if let Some(c) = client {
1747        // Log first few errors from the batch to understand why sources=0.
1748        let batch_errors: Vec<String> = result
1749            .get("errors")
1750            .and_then(|e| e.as_array())
1751            .map(|arr| {
1752                arr.iter()
1753                    .filter(|e| e.get("severity").and_then(|s| s.as_str()) == Some("error"))
1754                    .take(3)
1755                    .filter_map(|e| {
1756                        let msg = e.get("message").and_then(|m| m.as_str()).unwrap_or("?");
1757                        let file = e
1758                            .get("sourceLocation")
1759                            .and_then(|sl| sl.get("file"))
1760                            .and_then(|f| f.as_str())
1761                            .unwrap_or("?");
1762                        Some(format!("{file}: {msg}"))
1763                    })
1764                    .collect()
1765            })
1766            .unwrap_or_default();
1767
1768        c.log_message(
1769            tower_lsp::lsp_types::MessageType::INFO,
1770            format!(
1771                "project index: batch produced {} sources, now compiling {} incompatible files individually{}",
1772                batch_source_count,
1773                incompatible_files.len(),
1774                if batch_errors.is_empty() {
1775                    String::new()
1776                } else {
1777                    format!(" [first errors: {}]", batch_errors.join("; "))
1778                },
1779            ),
1780        )
1781        .await;
1782    }
1783
1784    // -- Individually compile incompatible files with their matching solc. --
1785    let mut compiled = 0usize;
1786    let mut skipped = 0usize;
1787    for file in &incompatible_files {
1788        let pragma = std::fs::read_to_string(file)
1789            .ok()
1790            .and_then(|src| parse_pragma(&src));
1791
1792        let Some(file_constraint) = pragma else {
1793            skipped += 1;
1794            continue;
1795        };
1796
1797        let file_binary = resolve_solc_binary(config, Some(&file_constraint), client).await;
1798        let input = build_batch_standard_json_input_with_cache(
1799            &[file.clone()],
1800            &remappings,
1801            config,
1802            text_cache,
1803        );
1804        match run_solc(&file_binary, &input, &config.root).await {
1805            Ok(raw) => {
1806                let normalized = normalize_solc_output(raw, Some(&config.root));
1807                merge_normalized_outputs(&mut result, normalized);
1808                compiled += 1;
1809            }
1810            Err(e) => {
1811                if let Some(c) = client {
1812                    c.log_message(
1813                        tower_lsp::lsp_types::MessageType::WARNING,
1814                        format!(
1815                            "project index: incompatible file {} failed: {e}",
1816                            file.display(),
1817                        ),
1818                    )
1819                    .await;
1820                }
1821                skipped += 1;
1822            }
1823        }
1824    }
1825
1826    if let Some(c) = client {
1827        c.log_message(
1828            tower_lsp::lsp_types::MessageType::INFO,
1829            format!(
1830                "project index: incompatible files done — {compiled} compiled, {skipped} skipped",
1831            ),
1832        )
1833        .await;
1834    }
1835
1836    Ok(result)
1837}
1838
1839#[cfg(test)]
1840mod tests {
1841    use super::*;
1842
1843    #[test]
1844    fn test_normalize_solc_sources() {
1845        let solc_output = json!({
1846            "sources": {
1847                "src/Foo.sol": {
1848                    "id": 0,
1849                    "ast": {
1850                        "nodeType": "SourceUnit",
1851                        "absolutePath": "src/Foo.sol",
1852                        "id": 100
1853                    }
1854                },
1855                "src/Bar.sol": {
1856                    "id": 1,
1857                    "ast": {
1858                        "nodeType": "SourceUnit",
1859                        "absolutePath": "src/Bar.sol",
1860                        "id": 200
1861                    }
1862                }
1863            },
1864            "contracts": {},
1865            "errors": []
1866        });
1867
1868        let normalized = normalize_solc_output(solc_output, None);
1869
1870        // Sources kept in solc-native shape: path -> { id, ast }
1871        let sources = normalized.get("sources").unwrap().as_object().unwrap();
1872        assert_eq!(sources.len(), 2);
1873
1874        let foo = sources.get("src/Foo.sol").unwrap();
1875        assert_eq!(foo.get("id").unwrap(), 0);
1876        assert_eq!(
1877            foo.get("ast")
1878                .unwrap()
1879                .get("nodeType")
1880                .unwrap()
1881                .as_str()
1882                .unwrap(),
1883            "SourceUnit"
1884        );
1885
1886        // Check source_id_to_path constructed
1887        let id_to_path = normalized
1888            .get("source_id_to_path")
1889            .unwrap()
1890            .as_object()
1891            .unwrap();
1892        assert_eq!(id_to_path.len(), 2);
1893    }
1894
1895    #[test]
1896    fn test_normalize_solc_contracts() {
1897        let solc_output = json!({
1898            "sources": {},
1899            "contracts": {
1900                "src/Foo.sol": {
1901                    "Foo": {
1902                        "abi": [{"type": "function", "name": "bar"}],
1903                        "evm": {
1904                            "methodIdentifiers": {
1905                                "bar(uint256)": "abcd1234"
1906                            }
1907                        }
1908                    }
1909                }
1910            },
1911            "errors": []
1912        });
1913
1914        let normalized = normalize_solc_output(solc_output, None);
1915
1916        // Contracts kept in solc-native shape: path -> name -> { abi, evm, ... }
1917        let contracts = normalized.get("contracts").unwrap().as_object().unwrap();
1918        let foo_contracts = contracts.get("src/Foo.sol").unwrap().as_object().unwrap();
1919        let foo = foo_contracts.get("Foo").unwrap();
1920
1921        let method_ids = foo
1922            .get("evm")
1923            .unwrap()
1924            .get("methodIdentifiers")
1925            .unwrap()
1926            .as_object()
1927            .unwrap();
1928        assert_eq!(
1929            method_ids.get("bar(uint256)").unwrap().as_str().unwrap(),
1930            "abcd1234"
1931        );
1932    }
1933
1934    #[test]
1935    fn test_normalize_solc_errors_passthrough() {
1936        let solc_output = json!({
1937            "sources": {},
1938            "contracts": {},
1939            "errors": [{
1940                "sourceLocation": {"file": "src/Foo.sol", "start": 0, "end": 10},
1941                "type": "Warning",
1942                "component": "general",
1943                "severity": "warning",
1944                "errorCode": "2394",
1945                "message": "test warning",
1946                "formattedMessage": "Warning: test warning"
1947            }]
1948        });
1949
1950        let normalized = normalize_solc_output(solc_output, None);
1951
1952        let errors = normalized.get("errors").unwrap().as_array().unwrap();
1953        assert_eq!(errors.len(), 1);
1954        assert_eq!(
1955            errors[0].get("errorCode").unwrap().as_str().unwrap(),
1956            "2394"
1957        );
1958    }
1959
1960    #[test]
1961    fn test_normalize_empty_solc_output() {
1962        let solc_output = json!({
1963            "sources": {},
1964            "contracts": {}
1965        });
1966
1967        let normalized = normalize_solc_output(solc_output, None);
1968
1969        assert!(
1970            normalized
1971                .get("sources")
1972                .unwrap()
1973                .as_object()
1974                .unwrap()
1975                .is_empty()
1976        );
1977        assert!(
1978            normalized
1979                .get("contracts")
1980                .unwrap()
1981                .as_object()
1982                .unwrap()
1983                .is_empty()
1984        );
1985        assert_eq!(
1986            normalized.get("errors").unwrap().as_array().unwrap().len(),
1987            0
1988        );
1989        assert!(
1990            normalized
1991                .get("source_id_to_path")
1992                .unwrap()
1993                .as_object()
1994                .unwrap()
1995                .is_empty()
1996        );
1997    }
1998
1999    #[test]
2000    fn test_build_standard_json_input() {
2001        let config = FoundryConfig::default();
2002        let input = build_standard_json_input(
2003            "/path/to/Foo.sol",
2004            &[
2005                "ds-test/=lib/forge-std/lib/ds-test/src/".to_string(),
2006                "forge-std/=lib/forge-std/src/".to_string(),
2007            ],
2008            &config,
2009            None,
2010        );
2011
2012        let sources = input.get("sources").unwrap().as_object().unwrap();
2013        assert!(sources.contains_key("/path/to/Foo.sol"));
2014
2015        let settings = input.get("settings").unwrap();
2016        let remappings = settings.get("remappings").unwrap().as_array().unwrap();
2017        assert_eq!(remappings.len(), 2);
2018
2019        let output_sel = settings.get("outputSelection").unwrap();
2020        assert!(output_sel.get("*").is_some());
2021
2022        // Default config: no optimizer, no viaIR, no evmVersion
2023        assert!(settings.get("optimizer").is_none());
2024        assert!(settings.get("viaIR").is_none());
2025        assert!(settings.get("evmVersion").is_none());
2026
2027        // gasEstimates is never requested — forces full EVM codegen (88% of compile time)
2028        let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
2029        let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
2030        assert!(!output_names.contains(&"evm.gasEstimates"));
2031        assert!(!output_names.contains(&"abi")); // ABI is intentionally omitted — no consumer
2032        assert!(output_names.contains(&"devdoc"));
2033        assert!(output_names.contains(&"userdoc"));
2034        assert!(output_names.contains(&"evm.methodIdentifiers"));
2035    }
2036
2037    #[test]
2038    fn test_build_standard_json_input_with_config() {
2039        let config = FoundryConfig {
2040            optimizer: true,
2041            optimizer_runs: 9999999,
2042            via_ir: true,
2043            evm_version: Some("osaka".to_string()),
2044            ..Default::default()
2045        };
2046        let input = build_standard_json_input("/path/to/Foo.sol", &[], &config, None);
2047
2048        let settings = input.get("settings").unwrap();
2049
2050        // Optimizer is never passed — adds ~3s and doesn't affect AST/ABI/docs
2051        assert!(settings.get("optimizer").is_none());
2052
2053        // viaIR IS passed when config has it (some contracts require it to compile)
2054        assert!(settings.get("viaIR").unwrap().as_bool().unwrap());
2055
2056        // gasEstimates is never requested regardless of viaIR
2057        let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
2058        let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
2059        assert!(!output_names.contains(&"evm.gasEstimates"));
2060
2061        // EVM version
2062        assert_eq!(
2063            settings.get("evmVersion").unwrap().as_str().unwrap(),
2064            "osaka"
2065        );
2066    }
2067
2068    #[tokio::test]
2069    async fn test_resolve_solc_binary_default() {
2070        let config = FoundryConfig::default();
2071        let binary = resolve_solc_binary(&config, None, None).await;
2072        assert_eq!(binary, PathBuf::from("solc"));
2073    }
2074
2075    #[test]
2076    fn test_parse_pragma_exact() {
2077        let source = "// SPDX\npragma solidity 0.8.26;\n";
2078        assert_eq!(
2079            parse_pragma(source),
2080            Some(PragmaConstraint::Exact(SemVer {
2081                major: 0,
2082                minor: 8,
2083                patch: 26
2084            }))
2085        );
2086    }
2087
2088    #[test]
2089    fn test_parse_pragma_caret() {
2090        let source = "pragma solidity ^0.8.0;\n";
2091        assert_eq!(
2092            parse_pragma(source),
2093            Some(PragmaConstraint::Caret(SemVer {
2094                major: 0,
2095                minor: 8,
2096                patch: 0
2097            }))
2098        );
2099    }
2100
2101    #[test]
2102    fn test_parse_pragma_gte() {
2103        let source = "pragma solidity >=0.8.0;\n";
2104        assert_eq!(
2105            parse_pragma(source),
2106            Some(PragmaConstraint::Gte(SemVer {
2107                major: 0,
2108                minor: 8,
2109                patch: 0
2110            }))
2111        );
2112    }
2113
2114    #[test]
2115    fn test_parse_pragma_range() {
2116        let source = "pragma solidity >=0.6.2 <0.9.0;\n";
2117        assert_eq!(
2118            parse_pragma(source),
2119            Some(PragmaConstraint::Range(
2120                SemVer {
2121                    major: 0,
2122                    minor: 6,
2123                    patch: 2
2124                },
2125                SemVer {
2126                    major: 0,
2127                    minor: 9,
2128                    patch: 0
2129                },
2130            ))
2131        );
2132    }
2133
2134    #[test]
2135    fn test_parse_pragma_none() {
2136        let source = "contract Foo {}\n";
2137        assert_eq!(parse_pragma(source), None);
2138    }
2139
2140    #[test]
2141    fn test_version_satisfies_exact() {
2142        let v = SemVer {
2143            major: 0,
2144            minor: 8,
2145            patch: 26,
2146        };
2147        assert!(version_satisfies(&v, &PragmaConstraint::Exact(v.clone())));
2148        assert!(!version_satisfies(
2149            &SemVer {
2150                major: 0,
2151                minor: 8,
2152                patch: 25
2153            },
2154            &PragmaConstraint::Exact(v)
2155        ));
2156    }
2157
2158    #[test]
2159    fn test_version_satisfies_caret() {
2160        let constraint = PragmaConstraint::Caret(SemVer {
2161            major: 0,
2162            minor: 8,
2163            patch: 0,
2164        });
2165        assert!(version_satisfies(
2166            &SemVer {
2167                major: 0,
2168                minor: 8,
2169                patch: 0
2170            },
2171            &constraint
2172        ));
2173        assert!(version_satisfies(
2174            &SemVer {
2175                major: 0,
2176                minor: 8,
2177                patch: 26
2178            },
2179            &constraint
2180        ));
2181        // 0.9.0 is outside ^0.8.0
2182        assert!(!version_satisfies(
2183            &SemVer {
2184                major: 0,
2185                minor: 9,
2186                patch: 0
2187            },
2188            &constraint
2189        ));
2190        // 0.7.0 is below
2191        assert!(!version_satisfies(
2192            &SemVer {
2193                major: 0,
2194                minor: 7,
2195                patch: 0
2196            },
2197            &constraint
2198        ));
2199    }
2200
2201    #[test]
2202    fn test_version_satisfies_gte() {
2203        let constraint = PragmaConstraint::Gte(SemVer {
2204            major: 0,
2205            minor: 8,
2206            patch: 0,
2207        });
2208        assert!(version_satisfies(
2209            &SemVer {
2210                major: 0,
2211                minor: 8,
2212                patch: 0
2213            },
2214            &constraint
2215        ));
2216        assert!(version_satisfies(
2217            &SemVer {
2218                major: 0,
2219                minor: 9,
2220                patch: 0
2221            },
2222            &constraint
2223        ));
2224        assert!(!version_satisfies(
2225            &SemVer {
2226                major: 0,
2227                minor: 7,
2228                patch: 0
2229            },
2230            &constraint
2231        ));
2232    }
2233
2234    #[test]
2235    fn test_version_satisfies_range() {
2236        let constraint = PragmaConstraint::Range(
2237            SemVer {
2238                major: 0,
2239                minor: 6,
2240                patch: 2,
2241            },
2242            SemVer {
2243                major: 0,
2244                minor: 9,
2245                patch: 0,
2246            },
2247        );
2248        assert!(version_satisfies(
2249            &SemVer {
2250                major: 0,
2251                minor: 6,
2252                patch: 2
2253            },
2254            &constraint
2255        ));
2256        assert!(version_satisfies(
2257            &SemVer {
2258                major: 0,
2259                minor: 8,
2260                patch: 26
2261            },
2262            &constraint
2263        ));
2264        // 0.9.0 is the upper bound (exclusive)
2265        assert!(!version_satisfies(
2266            &SemVer {
2267                major: 0,
2268                minor: 9,
2269                patch: 0
2270            },
2271            &constraint
2272        ));
2273        assert!(!version_satisfies(
2274            &SemVer {
2275                major: 0,
2276                minor: 6,
2277                patch: 1
2278            },
2279            &constraint
2280        ));
2281    }
2282
2283    #[test]
2284    fn test_find_matching_version() {
2285        let installed = vec![
2286            SemVer {
2287                major: 0,
2288                minor: 8,
2289                patch: 0,
2290            },
2291            SemVer {
2292                major: 0,
2293                minor: 8,
2294                patch: 20,
2295            },
2296            SemVer {
2297                major: 0,
2298                minor: 8,
2299                patch: 26,
2300            },
2301            SemVer {
2302                major: 0,
2303                minor: 8,
2304                patch: 33,
2305            },
2306        ];
2307        // ^0.8.20 should pick latest: 0.8.33
2308        let constraint = PragmaConstraint::Caret(SemVer {
2309            major: 0,
2310            minor: 8,
2311            patch: 20,
2312        });
2313        let matched = find_matching_version(&constraint, &installed);
2314        assert_eq!(
2315            matched,
2316            Some(SemVer {
2317                major: 0,
2318                minor: 8,
2319                patch: 33
2320            })
2321        );
2322
2323        // exact 0.8.20
2324        let constraint = PragmaConstraint::Exact(SemVer {
2325            major: 0,
2326            minor: 8,
2327            patch: 20,
2328        });
2329        let matched = find_matching_version(&constraint, &installed);
2330        assert_eq!(
2331            matched,
2332            Some(SemVer {
2333                major: 0,
2334                minor: 8,
2335                patch: 20
2336            })
2337        );
2338
2339        // exact 0.8.15 — not installed
2340        let constraint = PragmaConstraint::Exact(SemVer {
2341            major: 0,
2342            minor: 8,
2343            patch: 15,
2344        });
2345        let matched = find_matching_version(&constraint, &installed);
2346        assert_eq!(matched, None);
2347    }
2348
2349    #[test]
2350    fn test_list_installed_versions() {
2351        // Just verify it doesn't panic — actual versions depend on system
2352        let versions = list_installed_versions();
2353        // Versions should be sorted
2354        for w in versions.windows(2) {
2355            assert!(w[0] <= w[1]);
2356        }
2357    }
2358
2359    // -------------------------------------------------------------------
2360    // Tests for mixed-version retry helpers
2361    // -------------------------------------------------------------------
2362
2363    #[test]
2364    fn test_extract_version_error_files_basic() {
2365        let output = json!({
2366            "errors": [
2367                {
2368                    "errorCode": "5333",
2369                    "severity": "error",
2370                    "message": "Source file requires different compiler version",
2371                    "sourceLocation": {
2372                        "file": "lib/openzeppelin/contracts/token/ERC20/ERC20.sol",
2373                        "start": 32,
2374                        "end": 58
2375                    }
2376                },
2377                {
2378                    "errorCode": "5333",
2379                    "severity": "error",
2380                    "message": "Source file requires different compiler version",
2381                    "sourceLocation": {
2382                        "file": "lib/old-lib/src/Legacy.sol",
2383                        "start": 32,
2384                        "end": 58
2385                    }
2386                },
2387                {
2388                    "errorCode": "9574",
2389                    "severity": "error",
2390                    "message": "Some other error",
2391                    "sourceLocation": {
2392                        "file": "src/Main.sol",
2393                        "start": 100,
2394                        "end": 200
2395                    }
2396                }
2397            ]
2398        });
2399
2400        let files = extract_version_error_files(&output);
2401        assert_eq!(files.len(), 2);
2402        assert!(files.contains("lib/openzeppelin/contracts/token/ERC20/ERC20.sol"));
2403        assert!(files.contains("lib/old-lib/src/Legacy.sol"));
2404        // Non-5333 error files should NOT be included.
2405        assert!(!files.contains("src/Main.sol"));
2406    }
2407
2408    #[test]
2409    fn test_extract_version_error_files_empty() {
2410        let output = json!({
2411            "errors": []
2412        });
2413        assert!(extract_version_error_files(&output).is_empty());
2414
2415        // No errors key at all.
2416        let output = json!({});
2417        assert!(extract_version_error_files(&output).is_empty());
2418    }
2419
2420    #[test]
2421    fn test_extract_version_error_files_no_source_location() {
2422        let output = json!({
2423            "errors": [
2424                {
2425                    "errorCode": "5333",
2426                    "severity": "error",
2427                    "message": "Source file requires different compiler version"
2428                    // No sourceLocation field.
2429                }
2430            ]
2431        });
2432        assert!(extract_version_error_files(&output).is_empty());
2433    }
2434
2435    #[test]
2436    fn test_extract_version_error_files_dedup() {
2437        let output = json!({
2438            "errors": [
2439                {
2440                    "errorCode": "5333",
2441                    "severity": "error",
2442                    "sourceLocation": { "file": "lib/same.sol", "start": 0, "end": 10 }
2443                },
2444                {
2445                    "errorCode": "5333",
2446                    "severity": "error",
2447                    "sourceLocation": { "file": "lib/same.sol", "start": 50, "end": 70 }
2448                }
2449            ]
2450        });
2451        let files = extract_version_error_files(&output);
2452        assert_eq!(files.len(), 1);
2453        assert!(files.contains("lib/same.sol"));
2454    }
2455
2456    #[test]
2457    fn test_reverse_import_closure_simple() {
2458        // Create a temp directory with three files:
2459        //   a.sol imports b.sol
2460        //   b.sol imports c.sol
2461        //   d.sol (standalone)
2462        //
2463        // If c.sol is excluded, the closure should include: c.sol, b.sol, a.sol
2464        // (b imports c, a imports b — both are transitive importers of c).
2465        let dir = tempfile::tempdir().unwrap();
2466        let root = dir.path();
2467
2468        std::fs::write(
2469            root.join("a.sol"),
2470            "// SPDX-License-Identifier: MIT\nimport \"./b.sol\";\ncontract A {}",
2471        )
2472        .unwrap();
2473        std::fs::write(
2474            root.join("b.sol"),
2475            "// SPDX-License-Identifier: MIT\nimport \"./c.sol\";\ncontract B {}",
2476        )
2477        .unwrap();
2478        std::fs::write(
2479            root.join("c.sol"),
2480            "// SPDX-License-Identifier: MIT\ncontract C {}",
2481        )
2482        .unwrap();
2483        std::fs::write(
2484            root.join("d.sol"),
2485            "// SPDX-License-Identifier: MIT\ncontract D {}",
2486        )
2487        .unwrap();
2488
2489        let files: Vec<PathBuf> = vec![
2490            root.join("a.sol"),
2491            root.join("b.sol"),
2492            root.join("c.sol"),
2493            root.join("d.sol"),
2494        ];
2495
2496        let exclude: HashSet<PathBuf> = [root.join("c.sol")].into_iter().collect();
2497        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2498
2499        assert!(
2500            closure.contains(&root.join("c.sol")),
2501            "seed file in closure"
2502        );
2503        assert!(closure.contains(&root.join("b.sol")), "direct importer");
2504        assert!(closure.contains(&root.join("a.sol")), "transitive importer");
2505        assert!(
2506            !closure.contains(&root.join("d.sol")),
2507            "unrelated file not in closure"
2508        );
2509        assert_eq!(closure.len(), 3);
2510    }
2511
2512    #[test]
2513    fn test_reverse_import_closure_no_importers() {
2514        // Excluding a file that nothing imports — closure is just the seed.
2515        let dir = tempfile::tempdir().unwrap();
2516        let root = dir.path();
2517
2518        std::fs::write(root.join("a.sol"), "contract A {}").unwrap();
2519        std::fs::write(root.join("b.sol"), "contract B {}").unwrap();
2520
2521        let files: Vec<PathBuf> = vec![root.join("a.sol"), root.join("b.sol")];
2522        let exclude: HashSet<PathBuf> = [root.join("a.sol")].into_iter().collect();
2523
2524        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2525        assert_eq!(closure.len(), 1);
2526        assert!(closure.contains(&root.join("a.sol")));
2527    }
2528
2529    #[test]
2530    fn test_reverse_import_closure_diamond() {
2531        // Diamond pattern:
2532        //   a.sol imports b.sol and c.sol
2533        //   b.sol imports d.sol
2534        //   c.sol imports d.sol
2535        //
2536        // Excluding d.sol → closure = {d, b, c, a}
2537        let dir = tempfile::tempdir().unwrap();
2538        let root = dir.path();
2539
2540        std::fs::write(
2541            root.join("a.sol"),
2542            "import \"./b.sol\";\nimport \"./c.sol\";\ncontract A {}",
2543        )
2544        .unwrap();
2545        std::fs::write(root.join("b.sol"), "import \"./d.sol\";\ncontract B {}").unwrap();
2546        std::fs::write(root.join("c.sol"), "import \"./d.sol\";\ncontract C {}").unwrap();
2547        std::fs::write(root.join("d.sol"), "contract D {}").unwrap();
2548
2549        let files: Vec<PathBuf> = vec![
2550            root.join("a.sol"),
2551            root.join("b.sol"),
2552            root.join("c.sol"),
2553            root.join("d.sol"),
2554        ];
2555        let exclude: HashSet<PathBuf> = [root.join("d.sol")].into_iter().collect();
2556
2557        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2558        assert_eq!(closure.len(), 4);
2559    }
2560
2561    #[test]
2562    fn test_merge_normalized_outputs_basic() {
2563        let mut base = json!({
2564            "sources": {
2565                "/abs/src/A.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } },
2566                "/abs/src/B.sol": { "id": 1, "ast": { "nodeType": "SourceUnit" } }
2567            },
2568            "contracts": {
2569                "/abs/src/A.sol": { "A": { "abi": [] } }
2570            },
2571            "errors": [],
2572            "source_id_to_path": {
2573                "0": "/abs/src/A.sol",
2574                "1": "/abs/src/B.sol"
2575            }
2576        });
2577
2578        let other = json!({
2579            "sources": {
2580                "/abs/lib/C.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } }
2581            },
2582            "contracts": {
2583                "/abs/lib/C.sol": { "C": { "abi": [] } }
2584            },
2585            "errors": [],
2586            "source_id_to_path": {
2587                "0": "/abs/lib/C.sol"
2588            }
2589        });
2590
2591        merge_normalized_outputs(&mut base, other);
2592
2593        // Sources should now have 3 entries.
2594        let sources = base["sources"].as_object().unwrap();
2595        assert_eq!(sources.len(), 3);
2596        assert!(sources.contains_key("/abs/lib/C.sol"));
2597
2598        // The merged source's ID should be remapped (0 + max_base_id=2 → 2).
2599        let c_id = sources["/abs/lib/C.sol"]["id"].as_u64().unwrap();
2600        assert_eq!(
2601            c_id, 2,
2602            "remapped id should be max_base_id (2) + original (0)"
2603        );
2604
2605        // source_id_to_path should have 3 entries.
2606        let id_map = base["source_id_to_path"].as_object().unwrap();
2607        assert_eq!(id_map.len(), 3);
2608        assert_eq!(id_map["2"].as_str().unwrap(), "/abs/lib/C.sol");
2609
2610        // Contracts should have 2 entries.
2611        let contracts = base["contracts"].as_object().unwrap();
2612        assert_eq!(contracts.len(), 2);
2613        assert!(contracts.contains_key("/abs/lib/C.sol"));
2614    }
2615
2616    #[test]
2617    fn test_merge_normalized_outputs_empty_other() {
2618        let mut base = json!({
2619            "sources": {
2620                "/abs/src/A.sol": { "id": 0, "ast": {} }
2621            },
2622            "contracts": {},
2623            "errors": [],
2624            "source_id_to_path": { "0": "/abs/src/A.sol" }
2625        });
2626
2627        let other = json!({
2628            "sources": {},
2629            "contracts": {},
2630            "errors": [],
2631            "source_id_to_path": {}
2632        });
2633
2634        merge_normalized_outputs(&mut base, other);
2635
2636        let sources = base["sources"].as_object().unwrap();
2637        assert_eq!(sources.len(), 1);
2638    }
2639
2640    #[test]
2641    fn test_merge_normalized_outputs_empty_base() {
2642        let mut base = json!({
2643            "sources": {},
2644            "contracts": {},
2645            "errors": [],
2646            "source_id_to_path": {}
2647        });
2648
2649        let other = json!({
2650            "sources": {
2651                "/abs/lib/X.sol": { "id": 0, "ast": {} }
2652            },
2653            "contracts": {
2654                "/abs/lib/X.sol": { "X": { "abi": [] } }
2655            },
2656            "errors": [],
2657            "source_id_to_path": { "0": "/abs/lib/X.sol" }
2658        });
2659
2660        merge_normalized_outputs(&mut base, other);
2661
2662        let sources = base["sources"].as_object().unwrap();
2663        assert_eq!(sources.len(), 1);
2664        // max_base_id is 0 (no entries), so remapped id = 0 + 0 = 0.
2665        let x_id = sources["/abs/lib/X.sol"]["id"].as_u64().unwrap();
2666        assert_eq!(x_id, 0);
2667    }
2668}