Skip to main content

solidity_language_server/
solc.rs

1//! Direct `solc --standard-json` runner for fast AST generation.
2//!
3//! The output is normalized into the same shape that `forge build --json --ast`
4//! produces, so all downstream consumers (goto, hover, completions, etc.) work
5//! unchanged.
6
7use crate::config::FoundryConfig;
8use crate::links;
9use crate::runner::RunnerError;
10use serde_json::{Map, Value, json};
11use std::collections::{HashMap, HashSet};
12use std::path::{Path, PathBuf};
13use std::sync::{Mutex, OnceLock};
14use tokio::process::Command;
15use tower_lsp::lsp_types::Url;
16
17/// Cached list of installed solc versions. Populated on first access,
18/// invalidated after a successful `svm::install`.
19static INSTALLED_VERSIONS: OnceLock<Mutex<Vec<SemVer>>> = OnceLock::new();
20
21fn get_installed_versions() -> Vec<SemVer> {
22    let mutex = INSTALLED_VERSIONS.get_or_init(|| Mutex::new(scan_installed_versions()));
23    mutex.lock().unwrap().clone()
24}
25
26fn invalidate_installed_versions() {
27    if let Some(mutex) = INSTALLED_VERSIONS.get() {
28        *mutex.lock().unwrap() = scan_installed_versions();
29    }
30}
31
32/// Convert a `semver::Version` (from svm-rs) to our lightweight `SemVer`.
33fn semver_to_local(v: &semver::Version) -> SemVer {
34    SemVer {
35        major: v.major as u32,
36        minor: v.minor as u32,
37        patch: v.patch as u32,
38    }
39}
40
41/// Resolve the path to the solc binary.
42///
43/// Resolution order:
44/// 1. Parse `pragma solidity` from the source file.
45///    - **Exact pragma** (`=0.7.6`): always use the file's version — foundry.toml
46///      cannot override an exact pragma without breaking compilation.
47///    - **Wildcard pragma** (`^0.8.0`, `>=0.8.0`, `>=0.6.2 <0.9.0`): if
48///      `foundry.toml` specifies a solc version that satisfies the constraint,
49///      use it. Otherwise pick the latest matching installed version.
50/// 2. If no pragma, use the `foundry.toml` solc version if set.
51/// 3. If no match is installed, auto-install via `svm install`.
52/// 4. Fall back to whatever `solc` is on `$PATH`.
53pub async fn resolve_solc_binary(
54    config: &FoundryConfig,
55    constraint: Option<&PragmaConstraint>,
56    client: Option<&tower_lsp::Client>,
57) -> PathBuf {
58    // 1. Try pragma constraint (may be tightened from the full import graph)
59    if let Some(constraint) = constraint {
60        // For exact pragmas, always honour the file — foundry.toml can't override
61        // without causing a compilation failure.
62        // For wildcard pragmas, prefer the foundry.toml version if it satisfies
63        // the constraint. This mirrors `forge build` behaviour where the project
64        // config picks the version but the pragma must still be satisfied.
65        if !matches!(constraint, PragmaConstraint::Exact(_))
66            && let Some(ref config_ver) = config.solc_version
67            && let Some(parsed) = SemVer::parse(config_ver)
68            && version_satisfies(&parsed, constraint)
69            && let Some(path) = find_solc_binary(config_ver)
70        {
71            if let Some(c) = client {
72                c.log_message(
73                    tower_lsp::lsp_types::MessageType::INFO,
74                    format!("using solc {config_ver} (pragma {constraint})"),
75                )
76                .await;
77            }
78            return path;
79        }
80
81        let installed = get_installed_versions();
82        if let Some(version) = find_matching_version(constraint, &installed)
83            && let Some(path) = find_solc_binary(&version.to_string())
84        {
85            if let Some(c) = client {
86                c.log_message(
87                    tower_lsp::lsp_types::MessageType::INFO,
88                    format!("using solc {version}"),
89                )
90                .await;
91            }
92            return path;
93        }
94
95        // No matching version installed — try auto-install via svm
96        let install_version = version_to_install(constraint);
97        if let Some(ref ver_str) = install_version {
98            if let Some(c) = client {
99                c.show_message(
100                    tower_lsp::lsp_types::MessageType::INFO,
101                    format!("Installing solc {ver_str}..."),
102                )
103                .await;
104            }
105
106            if svm_install(ver_str).await {
107                // Refresh the cached version list after install
108                invalidate_installed_versions();
109
110                if let Some(c) = client {
111                    c.show_message(
112                        tower_lsp::lsp_types::MessageType::INFO,
113                        format!("Installed solc {ver_str}"),
114                    )
115                    .await;
116                }
117                if let Some(path) = find_solc_binary(ver_str) {
118                    return path;
119                }
120            } else if let Some(c) = client {
121                c.show_message(
122                    tower_lsp::lsp_types::MessageType::WARNING,
123                    format!(
124                        "Failed to install solc {ver_str}. \
125                             Install it manually: svm install {ver_str}"
126                    ),
127                )
128                .await;
129            }
130        }
131    }
132
133    // 2. No pragma — use foundry.toml version if available
134    if let Some(ref version) = config.solc_version
135        && let Some(path) = find_solc_binary(version)
136    {
137        if let Some(c) = client {
138            c.log_message(
139                tower_lsp::lsp_types::MessageType::INFO,
140                format!(
141                    "solc: no pragma, using foundry.toml version {version} → {}",
142                    path.display()
143                ),
144            )
145            .await;
146        }
147        return path;
148    }
149
150    // 3. Fall back to system solc
151    if let Some(c) = client {
152        c.log_message(
153            tower_lsp::lsp_types::MessageType::INFO,
154            "solc: no pragma match, falling back to system solc",
155        )
156        .await;
157    }
158    PathBuf::from("solc")
159}
160
161/// Determine which version to install for a pragma constraint.
162///
163/// - Exact: install that version
164/// - Caret `^0.8.20`: install `0.8.20` (minimum satisfying)
165/// - Gte `>=0.8.0`: install `0.8.0` (minimum satisfying)
166/// - Range `>=0.6.2 <0.9.0`: install `0.6.2` (minimum satisfying)
167fn version_to_install(constraint: &PragmaConstraint) -> Option<String> {
168    match constraint {
169        PragmaConstraint::Exact(v) => Some(v.to_string()),
170        PragmaConstraint::Caret(v) => Some(v.to_string()),
171        PragmaConstraint::Gte(v) => Some(v.to_string()),
172        PragmaConstraint::Range(lower, _) => Some(lower.to_string()),
173    }
174}
175
176/// Install a solc version using svm-rs library.
177///
178/// Returns `true` if the install succeeded.
179async fn svm_install(version: &str) -> bool {
180    let ver = match semver::Version::parse(version) {
181        Ok(v) => v,
182        Err(_) => return false,
183    };
184    svm::install(&ver).await.is_ok()
185}
186
187/// Look up a solc binary by version string using `svm::version_binary()`.
188fn find_solc_binary(version: &str) -> Option<PathBuf> {
189    let path = svm::version_binary(version);
190    if path.is_file() {
191        return Some(path);
192    }
193    None
194}
195
196// ── Pragma parsing ────────────────────────────────────────────────────────
197
198/// A parsed semver version (major.minor.patch).
199#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
200pub struct SemVer {
201    pub major: u32,
202    pub minor: u32,
203    pub patch: u32,
204}
205
206impl SemVer {
207    fn parse(s: &str) -> Option<SemVer> {
208        let parts: Vec<&str> = s.split('.').collect();
209        if parts.len() != 3 {
210            return None;
211        }
212        Some(SemVer {
213            major: parts[0].parse().ok()?,
214            minor: parts[1].parse().ok()?,
215            patch: parts[2].parse().ok()?,
216        })
217    }
218}
219
220impl std::fmt::Display for SemVer {
221    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
222        write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
223    }
224}
225
226/// A version constraint from `pragma solidity`.
227#[derive(Debug, Clone, PartialEq)]
228pub enum PragmaConstraint {
229    /// `0.8.26` — exact match
230    Exact(SemVer),
231    /// `^0.8.0` — same major.minor, patch >= specified
232    /// Actually in Solidity: `^0.8.0` means `>=0.8.0 <0.9.0`
233    Caret(SemVer),
234    /// `>=0.8.0` — at least this version
235    Gte(SemVer),
236    /// `>=0.6.2 <0.9.0` — range
237    Range(SemVer, SemVer),
238}
239
240impl std::fmt::Display for PragmaConstraint {
241    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
242        match self {
243            PragmaConstraint::Exact(v) => write!(f, "={v}"),
244            PragmaConstraint::Caret(v) => write!(f, "^{v}"),
245            PragmaConstraint::Gte(v) => write!(f, ">={v}"),
246            PragmaConstraint::Range(lo, hi) => write!(f, ">={lo} <{hi}"),
247        }
248    }
249}
250
251/// Resolve a Solidity import path to an absolute filesystem path.
252///
253/// Handles relative imports (`./`, `../`) and remapped imports.
254fn resolve_import_to_abs(
255    project_root: &Path,
256    importer_abs: &Path,
257    import_path: &str,
258    remappings: &[String],
259) -> Option<PathBuf> {
260    if import_path.starts_with("./") || import_path.starts_with("../") {
261        let base = importer_abs.parent()?;
262        return Some(lexical_normalize(&base.join(import_path)));
263    }
264
265    for remap in remappings {
266        let mut it = remap.splitn(2, '=');
267        let prefix = it.next().unwrap_or_default();
268        let target = it.next().unwrap_or_default();
269        if prefix.is_empty() || target.is_empty() {
270            continue;
271        }
272        if import_path.starts_with(prefix) {
273            let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
274            return Some(lexical_normalize(
275                &project_root.join(format!("{target}{suffix}")),
276            ));
277        }
278    }
279
280    Some(lexical_normalize(&project_root.join(import_path)))
281}
282
283/// Normalize a path by resolving `.` and `..` components lexically
284/// (without hitting the filesystem).
285fn lexical_normalize(path: &Path) -> PathBuf {
286    let mut out = PathBuf::new();
287    for comp in path.components() {
288        match comp {
289            std::path::Component::CurDir => {}
290            std::path::Component::ParentDir => {
291                out.pop();
292            }
293            _ => out.push(comp.as_os_str()),
294        }
295    }
296    out
297}
298
299/// Collect pragma constraints from a file and all its transitive imports.
300///
301/// Walks the import graph using simple string scanning (no tree-sitter),
302/// resolving import paths via remappings.  Returns all pragmas found so
303/// that the caller can pick a solc version satisfying every file.
304fn collect_import_pragmas(
305    file_path: &Path,
306    project_root: &Path,
307    remappings: &[String],
308) -> Vec<PragmaConstraint> {
309    let mut pragmas = Vec::new();
310    let mut visited = HashSet::new();
311    collect_import_pragmas_recursive(
312        file_path,
313        project_root,
314        remappings,
315        &mut pragmas,
316        &mut visited,
317    );
318    pragmas
319}
320
321fn collect_import_pragmas_recursive(
322    file_path: &Path,
323    project_root: &Path,
324    remappings: &[String],
325    pragmas: &mut Vec<PragmaConstraint>,
326    visited: &mut HashSet<PathBuf>,
327) {
328    if !visited.insert(file_path.to_path_buf()) {
329        return;
330    }
331    let source = match std::fs::read_to_string(file_path) {
332        Ok(s) => s,
333        Err(_) => return,
334    };
335    if let Some(pragma) = parse_pragma(&source) {
336        pragmas.push(pragma);
337    }
338    for imp in links::ts_find_imports(source.as_bytes()) {
339        if let Some(abs) = resolve_import_to_abs(project_root, file_path, &imp.path, remappings) {
340            collect_import_pragmas_recursive(&abs, project_root, remappings, pragmas, visited);
341        }
342    }
343}
344
345/// Tighten a set of pragma constraints into a single constraint that
346/// satisfies all of them.
347///
348/// Rules:
349/// - An exact pragma always wins (if any file requires `0.8.23`, we must
350///   use exactly `0.8.23`).
351/// - Multiple exact pragmas that disagree → returns the first one (solc
352///   will error anyway, but we still try).
353/// - For wildcard pragmas, compute the intersection range and return it.
354fn tightest_constraint(pragmas: &[PragmaConstraint]) -> Option<PragmaConstraint> {
355    if pragmas.is_empty() {
356        return None;
357    }
358
359    // If any pragma is Exact, that version must be used.
360    for p in pragmas {
361        if matches!(p, PragmaConstraint::Exact(_)) {
362            return Some(p.clone());
363        }
364    }
365
366    // Normalize every constraint to a (lower, upper) range, then intersect.
367    let mut lower = SemVer {
368        major: 0,
369        minor: 0,
370        patch: 0,
371    };
372    let mut upper: Option<SemVer> = None;
373
374    for p in pragmas {
375        let (lo, hi) = constraint_to_range(p);
376        if lo > lower {
377            lower = lo;
378        }
379        if let Some(hi) = hi {
380            upper = Some(match upper {
381                Some(cur) if hi < cur => hi,
382                Some(cur) => cur,
383                None => hi,
384            });
385        }
386    }
387
388    match upper {
389        Some(hi) if lower >= hi => None, // empty intersection
390        Some(hi) => Some(PragmaConstraint::Range(lower, hi)),
391        None => Some(PragmaConstraint::Gte(lower)),
392    }
393}
394
395/// Convert a pragma constraint to an inclusive lower bound and optional
396/// exclusive upper bound.
397fn constraint_to_range(constraint: &PragmaConstraint) -> (SemVer, Option<SemVer>) {
398    match constraint {
399        PragmaConstraint::Exact(v) => (
400            v.clone(),
401            Some(SemVer {
402                major: v.major,
403                minor: v.minor,
404                patch: v.patch + 1,
405            }),
406        ),
407        PragmaConstraint::Caret(v) => (
408            v.clone(),
409            Some(SemVer {
410                major: v.major,
411                minor: v.minor + 1,
412                patch: 0,
413            }),
414        ),
415        PragmaConstraint::Gte(v) => (v.clone(), None),
416        PragmaConstraint::Range(lo, hi) => (lo.clone(), Some(hi.clone())),
417    }
418}
419
420/// Parse `pragma solidity <constraint>;` from Solidity source.
421///
422/// Handles:
423/// - `pragma solidity 0.8.26;` → Exact
424/// - `pragma solidity ^0.8.0;` → Caret
425/// - `pragma solidity >=0.8.0;` → Gte
426/// - `pragma solidity >=0.6.2 <0.9.0;` → Range
427pub fn parse_pragma(source: &str) -> Option<PragmaConstraint> {
428    // Find the pragma line — only scan the first ~20 lines for performance
429    let pragma_line = source
430        .lines()
431        .take(20)
432        .find(|line| line.trim_start().starts_with("pragma solidity"))?;
433
434    // Extract the constraint string between "pragma solidity" and ";"
435    let after_keyword = pragma_line
436        .trim_start()
437        .strip_prefix("pragma solidity")?
438        .trim();
439    let constraint_str = after_keyword
440        .strip_suffix(';')
441        .unwrap_or(after_keyword)
442        .trim();
443
444    if constraint_str.is_empty() {
445        return None;
446    }
447
448    // Range: >=X.Y.Z <A.B.C
449    if let Some(rest) = constraint_str.strip_prefix(">=") {
450        let rest = rest.trim();
451        if let Some(space_idx) = rest.find(|c: char| c.is_whitespace() || c == '<') {
452            let lower_str = rest[..space_idx].trim();
453            let upper_part = rest[space_idx..].trim();
454            if let Some(upper_str) = upper_part.strip_prefix('<') {
455                let upper_str = upper_str.trim();
456                if let (Some(lower), Some(upper)) =
457                    (SemVer::parse(lower_str), SemVer::parse(upper_str))
458                {
459                    return Some(PragmaConstraint::Range(lower, upper));
460                }
461            }
462        }
463        // Just >=X.Y.Z
464        if let Some(ver) = SemVer::parse(rest) {
465            return Some(PragmaConstraint::Gte(ver));
466        }
467    }
468
469    // Caret: ^X.Y.Z
470    if let Some(rest) = constraint_str.strip_prefix('^')
471        && let Some(ver) = SemVer::parse(rest.trim())
472    {
473        return Some(PragmaConstraint::Caret(ver));
474    }
475
476    // Exact: X.Y.Z
477    if let Some(ver) = SemVer::parse(constraint_str) {
478        return Some(PragmaConstraint::Exact(ver));
479    }
480
481    None
482}
483
484/// List installed solc versions (cached — use `get_installed_versions()` internally).
485pub fn list_installed_versions() -> Vec<SemVer> {
486    get_installed_versions()
487}
488
489/// Scan the filesystem for installed solc versions using `svm::installed_versions()`.
490///
491/// Returns sorted, deduplicated versions (ascending).
492fn scan_installed_versions() -> Vec<SemVer> {
493    svm::installed_versions()
494        .unwrap_or_default()
495        .iter()
496        .map(semver_to_local)
497        .collect()
498}
499
500/// Find the best matching installed version for a pragma constraint.
501///
502/// For all constraint types, picks the **latest** installed version that
503/// satisfies the constraint.
504pub fn find_matching_version(
505    constraint: &PragmaConstraint,
506    installed: &[SemVer],
507) -> Option<SemVer> {
508    let candidates: Vec<&SemVer> = installed
509        .iter()
510        .filter(|v| version_satisfies(v, constraint))
511        .collect();
512
513    // Pick the latest (last, since installed is sorted ascending)
514    candidates.last().cloned().cloned()
515}
516
517/// Check if a version satisfies a pragma constraint.
518pub fn version_satisfies(version: &SemVer, constraint: &PragmaConstraint) -> bool {
519    match constraint {
520        PragmaConstraint::Exact(v) => version == v,
521        PragmaConstraint::Caret(v) => {
522            // Solidity caret: ^0.8.0 means >=0.8.0 <0.9.0
523            // i.e. same major, next minor is the ceiling
524            version.major == v.major && version >= v && version.minor < v.minor + 1
525        }
526        PragmaConstraint::Gte(v) => version >= v,
527        PragmaConstraint::Range(lower, upper) => version >= lower && version < upper,
528    }
529}
530
531/// Fetch remappings by running `forge remappings` in the project root.
532///
533/// Falls back to config remappings, then to an empty list.
534pub async fn resolve_remappings(config: &FoundryConfig) -> Vec<String> {
535    // Try `forge remappings` first — it merges all sources (foundry.toml,
536    // remappings.txt, auto-detected libs).
537    let output = Command::new("forge")
538        .arg("remappings")
539        .current_dir(&config.root)
540        .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1")
541        .output()
542        .await;
543
544    if let Ok(output) = output
545        && output.status.success()
546    {
547        let stdout = String::from_utf8_lossy(&output.stdout);
548        let remappings: Vec<String> = stdout
549            .lines()
550            .filter(|l| !l.trim().is_empty())
551            .map(|l| l.to_string())
552            .collect();
553        if !remappings.is_empty() {
554            return remappings;
555        }
556    }
557
558    // Fall back to remappings from foundry.toml
559    if !config.remappings.is_empty() {
560        return config.remappings.clone();
561    }
562
563    // Fall back to remappings.txt at project root
564    let remappings_txt = config.root.join("remappings.txt");
565    if let Ok(content) = std::fs::read_to_string(&remappings_txt) {
566        return content
567            .lines()
568            .filter(|l| !l.trim().is_empty())
569            .map(|l| l.to_string())
570            .collect();
571    }
572
573    Vec::new()
574}
575
576/// Build the `--standard-json` input for solc.
577///
578/// Reads compiler settings from the `FoundryConfig` (parsed from `foundry.toml`)
579/// and maps them to the solc standard JSON `settings` object:
580///
581/// - `via_ir` → `settings.viaIR`
582/// - `evm_version` → `settings.evmVersion`
583///
584/// Note: `optimizer` and `evm.gasEstimates` are intentionally excluded.
585/// The optimizer adds ~3s and doesn't affect AST/doc quality.
586/// Gas estimates force solc through full EVM codegen — benchmarking on
587/// a 510-file project showed 56s with vs 6s without (88% of cost).
588pub fn build_standard_json_input(
589    file_path: &str,
590    remappings: &[String],
591    config: &FoundryConfig,
592) -> Value {
593    let contract_outputs = vec!["devdoc", "userdoc", "evm.methodIdentifiers"];
594
595    let mut settings = json!({
596        "remappings": remappings,
597        "outputSelection": {
598            "*": {
599                "*": contract_outputs,
600                "": ["ast"]
601            }
602        }
603    });
604
605    if config.via_ir {
606        settings["viaIR"] = json!(true);
607    }
608
609    // EVM version
610    if let Some(ref evm_version) = config.evm_version {
611        settings["evmVersion"] = json!(evm_version);
612    }
613
614    json!({
615        "language": "Solidity",
616        "sources": {
617            file_path: {
618                "urls": [file_path]
619            }
620        },
621        "settings": settings
622    })
623}
624
625/// Run `solc --standard-json` and return the parsed output.
626pub async fn run_solc(
627    solc_binary: &Path,
628    input: &Value,
629    project_root: &Path,
630) -> Result<Value, RunnerError> {
631    let _ = crate::project_cache::save_last_solc_input(project_root, input);
632    let input_str = serde_json::to_string(input)?;
633
634    let mut child = Command::new(solc_binary)
635        .arg("--standard-json")
636        .current_dir(project_root)
637        .stdin(std::process::Stdio::piped())
638        .stdout(std::process::Stdio::piped())
639        .stderr(std::process::Stdio::piped())
640        .spawn()?;
641
642    // Write the standard-json input to solc's stdin.
643    if let Some(mut stdin) = child.stdin.take() {
644        use tokio::io::AsyncWriteExt;
645        stdin
646            .write_all(input_str.as_bytes())
647            .await
648            .map_err(RunnerError::CommandError)?;
649        // Drop stdin to close it, signaling EOF to solc.
650    }
651
652    let output = child
653        .wait_with_output()
654        .await
655        .map_err(RunnerError::CommandError)?;
656
657    // solc writes JSON to stdout even on errors (errors are in the JSON)
658    let stdout = String::from_utf8_lossy(&output.stdout);
659    if stdout.trim().is_empty() {
660        let stderr = String::from_utf8_lossy(&output.stderr);
661        return Err(RunnerError::CommandError(std::io::Error::other(format!(
662            "solc produced no output, stderr: {stderr}"
663        ))));
664    }
665
666    let parsed: Value = serde_json::from_str(&stdout)?;
667    Ok(parsed)
668}
669
670/// Normalize raw solc `--standard-json` output into the canonical shape.
671///
672/// Solc's native shape is already close to canonical:
673/// - `sources[path] = { id, ast }` — kept as-is
674/// - `contracts[path][name] = { abi, evm, ... }` — kept as-is
675/// - `errors` — kept as-is (defaults to `[]` if absent)
676///
677/// When `project_root` is provided, relative source paths are resolved to
678/// absolute paths so that downstream code (goto, hover, links) can map AST
679/// paths back to `file://` URIs. This is necessary because `solc_ast()`
680/// passes a relative path to solc (to fix import resolution), and solc then
681/// returns relative paths in the AST `absolutePath` and source keys.
682///
683/// Constructs `source_id_to_path` from source IDs for cross-file resolution.
684///
685/// Takes ownership and uses `Value::take()` to move AST nodes in-place,
686/// avoiding expensive clones of multi-MB AST data.
687///
688/// Also resolves `absolutePath` on nested `ImportDirective` nodes so that
689/// goto-definition on import strings works regardless of CWD.
690pub fn normalize_solc_output(mut solc_output: Value, project_root: Option<&Path>) -> Value {
691    /// Walk an AST node tree and resolve `absolutePath` on `ImportDirective` nodes.
692    fn resolve_import_absolute_paths(node: &mut Value, resolve: &dyn Fn(&str) -> String) {
693        let is_import = node.get("nodeType").and_then(|v| v.as_str()) == Some("ImportDirective");
694
695        if is_import {
696            if let Some(abs_path) = node.get("absolutePath").and_then(|v| v.as_str()) {
697                let resolved = resolve(abs_path);
698                node.as_object_mut()
699                    .unwrap()
700                    .insert("absolutePath".to_string(), json!(resolved));
701            }
702        }
703
704        // Recurse into "nodes" array (top-level AST children)
705        if let Some(nodes) = node.get_mut("nodes").and_then(|v| v.as_array_mut()) {
706            for child in nodes {
707                resolve_import_absolute_paths(child, resolve);
708            }
709        }
710    }
711    let mut result = Map::new();
712
713    // Move errors out (defaults to [] if absent)
714    let errors = solc_output
715        .get_mut("errors")
716        .map(Value::take)
717        .unwrap_or_else(|| json!([]));
718    result.insert("errors".to_string(), errors);
719
720    // Helper: resolve a path to absolute using the project root.
721    // If the path is already absolute or no project root is given, return as-is.
722    let resolve = |p: &str| -> String {
723        if let Some(root) = project_root {
724            let path = Path::new(p);
725            if path.is_relative() {
726                return root.join(path).to_string_lossy().into_owned();
727            }
728        }
729        p.to_string()
730    };
731
732    // Sources: rekey with absolute paths and update AST absolutePath fields.
733    // Also build source_id_to_path for cross-file resolution.
734    let mut source_id_to_path = Map::new();
735    let mut resolved_sources = Map::new();
736
737    if let Some(sources) = solc_output
738        .get_mut("sources")
739        .and_then(|s| s.as_object_mut())
740    {
741        // Collect keys first to avoid borrow issues
742        let keys: Vec<String> = sources.keys().cloned().collect();
743        for key in keys {
744            if let Some(mut source_data) = sources.remove(&key) {
745                let abs_key = resolve(&key);
746
747                // Update the AST absolutePath field to match, and resolve
748                // absolutePath on nested ImportDirective nodes so that
749                // goto-definition works regardless of CWD.
750                if let Some(ast) = source_data.get_mut("ast") {
751                    if let Some(abs_path) = ast.get("absolutePath").and_then(|v| v.as_str()) {
752                        let resolved = resolve(abs_path);
753                        ast.as_object_mut()
754                            .unwrap()
755                            .insert("absolutePath".to_string(), json!(resolved));
756                    }
757                    resolve_import_absolute_paths(ast, &resolve);
758                }
759
760                if let Some(id) = source_data.get("id") {
761                    source_id_to_path.insert(id.to_string(), json!(&abs_key));
762                }
763
764                resolved_sources.insert(abs_key, source_data);
765            }
766        }
767    }
768
769    result.insert("sources".to_string(), Value::Object(resolved_sources));
770
771    // Contracts: rekey with absolute paths
772    let mut resolved_contracts = Map::new();
773    if let Some(contracts) = solc_output
774        .get_mut("contracts")
775        .and_then(|c| c.as_object_mut())
776    {
777        let keys: Vec<String> = contracts.keys().cloned().collect();
778        for key in keys {
779            if let Some(contract_data) = contracts.remove(&key) {
780                resolved_contracts.insert(resolve(&key), contract_data);
781            }
782        }
783    }
784    result.insert("contracts".to_string(), Value::Object(resolved_contracts));
785
786    // Construct source_id_to_path for cross-file resolution
787    result.insert(
788        "source_id_to_path".to_string(),
789        Value::Object(source_id_to_path),
790    );
791
792    Value::Object(result)
793}
794
795/// Normalize forge `build --json --ast` output into the canonical shape.
796///
797/// Forge wraps data in arrays with metadata:
798/// - `sources[path] = [{ source_file: { id, ast }, build_id, profile, version }]`
799/// - `contracts[path][name] = [{ contract: { abi, evm, ... }, build_id, profile, version }]`
800/// - `build_infos = [{ source_id_to_path: { ... } }]`
801///
802/// This unwraps to the canonical flat shape:
803/// - `sources[path] = { id, ast }`
804/// - `contracts[path][name] = { abi, evm, ... }`
805/// - `source_id_to_path = { ... }`
806pub fn normalize_forge_output(mut forge_output: Value) -> Value {
807    let mut result = Map::new();
808
809    // Move errors out
810    let errors = forge_output
811        .get_mut("errors")
812        .map(Value::take)
813        .unwrap_or_else(|| json!([]));
814    result.insert("errors".to_string(), errors);
815
816    // Unwrap sources: [{ source_file: { id, ast } }] → { id, ast }
817    let mut normalized_sources = Map::new();
818    if let Some(sources) = forge_output
819        .get_mut("sources")
820        .and_then(|s| s.as_object_mut())
821    {
822        for (path, entries) in sources.iter_mut() {
823            if let Some(arr) = entries.as_array_mut()
824                && let Some(first) = arr.first_mut()
825                && let Some(sf) = first.get_mut("source_file")
826            {
827                normalized_sources.insert(path.clone(), sf.take());
828            }
829        }
830    }
831    result.insert("sources".to_string(), Value::Object(normalized_sources));
832
833    // Unwrap contracts: [{ contract: { ... } }] → { ... }
834    let mut normalized_contracts = Map::new();
835    if let Some(contracts) = forge_output
836        .get_mut("contracts")
837        .and_then(|c| c.as_object_mut())
838    {
839        for (path, names) in contracts.iter_mut() {
840            let mut path_contracts = Map::new();
841            if let Some(names_obj) = names.as_object_mut() {
842                for (name, entries) in names_obj.iter_mut() {
843                    if let Some(arr) = entries.as_array_mut()
844                        && let Some(first) = arr.first_mut()
845                        && let Some(contract) = first.get_mut("contract")
846                    {
847                        path_contracts.insert(name.clone(), contract.take());
848                    }
849                }
850            }
851            normalized_contracts.insert(path.clone(), Value::Object(path_contracts));
852        }
853    }
854    result.insert("contracts".to_string(), Value::Object(normalized_contracts));
855
856    // Extract source_id_to_path from build_infos
857    let source_id_to_path = forge_output
858        .get_mut("build_infos")
859        .and_then(|bi| bi.as_array_mut())
860        .and_then(|arr| arr.first_mut())
861        .and_then(|info| info.get_mut("source_id_to_path"))
862        .map(Value::take)
863        .unwrap_or_else(|| json!({}));
864    result.insert("source_id_to_path".to_string(), source_id_to_path);
865
866    Value::Object(result)
867}
868
869/// Run solc for a file and return normalized output.
870///
871/// This is the main entry point used by the LSP. Reads the file source
872/// to detect the pragma version and resolve the correct solc binary.
873pub async fn solc_ast(
874    file_path: &str,
875    config: &FoundryConfig,
876    client: Option<&tower_lsp::Client>,
877) -> Result<Value, RunnerError> {
878    let remappings = resolve_remappings(config).await;
879
880    // Collect pragma constraints from the file and all its transitive imports
881    // so we pick a solc version that satisfies the entire dependency graph.
882    // This is a synchronous recursive FS crawl — run it on the blocking pool
883    // so we don't stall the tokio async runtime on large projects.
884    let file_abs = Path::new(file_path).to_path_buf();
885    let config_root = config.root.clone();
886    let remappings_clone = remappings.clone();
887    let pragmas = tokio::task::spawn_blocking(move || {
888        collect_import_pragmas(&file_abs, &config_root, &remappings_clone)
889    })
890    .await
891    .unwrap_or_default();
892    let constraint = tightest_constraint(&pragmas);
893    let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
894
895    // Solc's import resolver fails when sources use absolute paths — it resolves
896    // 0 transitive imports, causing "No matching declaration found" errors for
897    // inherited members. Convert to a path relative to the project root so solc
898    // can properly resolve `src/`, `lib/`, and remapped imports.
899    let rel_path = Path::new(file_path)
900        .strip_prefix(&config.root)
901        .map(|p| p.to_string_lossy().into_owned())
902        .unwrap_or_else(|_| file_path.to_string());
903
904    let input = build_standard_json_input(&rel_path, &remappings, config);
905    let raw_output = run_solc(&solc_binary, &input, &config.root).await?;
906
907    Ok(normalize_solc_output(raw_output, Some(&config.root)))
908}
909
910/// Run solc for build diagnostics (same output, just used for error extraction).
911pub async fn solc_build(
912    file_path: &str,
913    config: &FoundryConfig,
914    client: Option<&tower_lsp::Client>,
915) -> Result<Value, RunnerError> {
916    solc_ast(file_path, config, client).await
917}
918
919// ── Project-wide indexing ──────────────────────────────────────────────────
920
921/// Discover all Solidity source files under the project root.
922///
923/// Walks the entire project directory, including `test/`, `script/`, and
924/// any other user-authored directories. Only skips:
925/// - Directories listed in `config.libs` (default: `["lib"]`)
926/// - Directories in `DISCOVER_SKIP_DIRS` (build artifacts)
927/// - Hidden directories (starting with `.`)
928///
929/// Includes `.t.sol` (test) and `.s.sol` (script) files so that
930/// find-references and rename work across the full project.
931/// Discover the project's own source files by walking only the directories
932/// configured in `foundry.toml`: `src`, `test`, and `script`.
933///
934/// This mirrors how Forge discovers compilable files — it never walks
935/// directories outside these three (plus libs).  Stray directories like
936/// `certora/` or `hardhat/` are ignored, preventing broken imports from
937/// poisoning the solc batch.
938pub fn discover_source_files(config: &FoundryConfig) -> Vec<PathBuf> {
939    discover_source_files_inner(config, false)
940}
941
942/// Discover only the `src` directory files (no test, no script).
943///
944/// Used as the seed set for phase-1 of two-phase project indexing, where
945/// we want to compile only the production source closure first for fast
946/// time-to-first-reference.
947pub fn discover_src_only_files(config: &FoundryConfig) -> Vec<PathBuf> {
948    let root = &config.root;
949    if !root.is_dir() {
950        return Vec::new();
951    }
952    let mut files = Vec::new();
953    let dir = root.join(&config.sources_dir);
954    if dir.is_dir() {
955        discover_recursive(&dir, &[], &mut files);
956    }
957    files.sort();
958    files
959}
960
961/// Discover the compilation closure seeded only from `src` files.
962///
963/// Like [`discover_compilation_closure`] but seeds only from
964/// [`discover_src_only_files`] instead of all project directories.
965/// This produces the minimal set of files needed to compile the
966/// production source code, excluding test and script files.
967pub fn discover_src_only_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
968    let seeds = discover_src_only_files(config);
969    let mut visited: HashSet<PathBuf> = HashSet::new();
970    let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
971
972    while let Some(file) = queue.pop_front() {
973        if !visited.insert(file.clone()) {
974            continue;
975        }
976        let source = match std::fs::read_to_string(&file) {
977            Ok(s) => s,
978            Err(_) => continue,
979        };
980        for imp in links::ts_find_imports(source.as_bytes()) {
981            if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
982                if abs.exists() && !visited.contains(&abs) {
983                    queue.push_back(abs);
984                }
985            }
986        }
987    }
988
989    let mut result: Vec<PathBuf> = visited.into_iter().collect();
990    result.sort();
991    result
992}
993
994/// Discover source files including library directories.
995///
996/// When `fullProjectScan` is enabled, this includes files from the configured
997/// `libs` directories (e.g. `dependencies/`, `node_modules/`).  Files with
998/// incompatible pragma versions are handled by the error-driven retry loop
999/// in [`solc_project_index_from_files`].
1000pub fn discover_source_files_with_libs(config: &FoundryConfig) -> Vec<PathBuf> {
1001    discover_source_files_inner(config, true)
1002}
1003
1004fn discover_source_files_inner(config: &FoundryConfig, include_libs: bool) -> Vec<PathBuf> {
1005    let root = &config.root;
1006    if !root.is_dir() {
1007        return Vec::new();
1008    }
1009
1010    let mut files = Vec::new();
1011    let no_skip: &[String] = &[];
1012
1013    // Walk only the configured source directories (src, test, script).
1014    // This matches Forge's behaviour: only files under these three directories
1015    // are considered project sources.  Directories like `certora/`, `hardhat/`,
1016    // etc. are never seeded.
1017    for dir_name in [&config.sources_dir, &config.test_dir, &config.script_dir] {
1018        let dir = root.join(dir_name);
1019        if dir.is_dir() {
1020            discover_recursive(&dir, no_skip, &mut files);
1021        }
1022    }
1023
1024    // When include_libs is requested, also walk lib directories.
1025    if include_libs {
1026        for lib_name in &config.libs {
1027            let lib_dir = root.join(lib_name);
1028            if lib_dir.is_dir() {
1029                discover_recursive(&lib_dir, no_skip, &mut files);
1030            }
1031        }
1032    }
1033
1034    files.sort();
1035    files
1036}
1037
1038/// Discover the true compilation closure by tracing imports from the
1039/// project's own source files (`src/`, `test/`, `script/`, and any other
1040/// non-lib top-level directories).
1041///
1042/// Starting from every `.sol` file returned by [`discover_source_files`]
1043/// (project files only, no lib dirs), this BFS-walks the import graph using
1044/// the provided remappings to resolve each `import` statement to an absolute
1045/// path.  It adds every reachable file — including lib files that are actually
1046/// imported — to the result set.
1047///
1048/// Files whose imports cannot be resolved (missing external deps that aren't
1049/// in this project) are silently skipped at that edge; the importer is still
1050/// included.
1051///
1052/// This produces a much smaller, self-consistent set than scanning all files
1053/// in lib directories, and avoids pulling in lib files that have broken
1054/// transitive deps (e.g. chainlink automation files that need `@eth-optimism`
1055/// which is not vendored here).
1056pub fn discover_compilation_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
1057    // Seed: all project source files (no lib dirs).
1058    let seeds = discover_source_files(config);
1059    let mut visited: HashSet<PathBuf> = HashSet::new();
1060    let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
1061
1062    while let Some(file) = queue.pop_front() {
1063        if !visited.insert(file.clone()) {
1064            continue;
1065        }
1066        let source = match std::fs::read_to_string(&file) {
1067            Ok(s) => s,
1068            Err(_) => continue,
1069        };
1070        for imp in links::ts_find_imports(source.as_bytes()) {
1071            if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
1072                if abs.exists() && !visited.contains(&abs) {
1073                    queue.push_back(abs);
1074                }
1075            }
1076        }
1077    }
1078
1079    let mut result: Vec<PathBuf> = visited.into_iter().collect();
1080    result.sort();
1081    result
1082}
1083
1084/// Directories that are always skipped during source file discovery,
1085/// regardless of the `include_libs` setting.
1086const DISCOVER_SKIP_DIRS: &[&str] = &["out", "artifacts", "cache", "target", "broadcast"];
1087
1088fn discover_recursive(dir: &Path, skip_libs: &[String], files: &mut Vec<PathBuf>) {
1089    let entries = match std::fs::read_dir(dir) {
1090        Ok(e) => e,
1091        Err(_) => return,
1092    };
1093    for entry in entries.flatten() {
1094        let path = entry.path();
1095        if path.is_dir() {
1096            if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
1097                // Skip hidden directories (e.g., .git, .github)
1098                if name.starts_with('.') {
1099                    continue;
1100                }
1101                // Skip build artifact directories
1102                if DISCOVER_SKIP_DIRS.contains(&name) {
1103                    continue;
1104                }
1105                // Skip user-configured library directories (unless include_libs)
1106                if skip_libs.iter().any(|lib| lib == name) {
1107                    continue;
1108                }
1109            }
1110            discover_recursive(&path, skip_libs, files);
1111        } else if let Some(name) = path.file_name().and_then(|n| n.to_str())
1112            && name.ends_with(".sol")
1113        {
1114            files.push(path);
1115        }
1116    }
1117}
1118
1119/// Build a `--standard-json` input that compiles all given source files at once.
1120///
1121/// Each file is added as a source entry with a `urls` field (relative to project root).
1122/// This produces a single AST covering the entire project in one solc invocation.
1123///
1124/// See [`build_standard_json_input`] for rationale on excluded settings.
1125pub fn build_batch_standard_json_input(
1126    source_files: &[PathBuf],
1127    remappings: &[String],
1128    config: &FoundryConfig,
1129) -> Value {
1130    build_batch_standard_json_input_with_cache(source_files, remappings, config, None)
1131}
1132
1133/// Build a batch standard-json input for solc.
1134///
1135/// When `content_cache` is provided, files whose URI string appears as a key
1136/// are included with `"content"` (in-memory source).  Files not in the cache
1137/// fall back to `"urls"` (solc reads from disk).
1138///
1139/// This allows the re-index after a rename to feed solc the updated import
1140/// paths from our text_cache without requiring the editor to have flushed
1141/// them to disk yet.
1142pub fn build_batch_standard_json_input_with_cache(
1143    source_files: &[PathBuf],
1144    remappings: &[String],
1145    config: &FoundryConfig,
1146    content_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1147) -> Value {
1148    let contract_outputs = vec!["devdoc", "userdoc", "evm.methodIdentifiers"];
1149
1150    let mut settings = json!({
1151        "remappings": remappings,
1152        "outputSelection": {
1153            "*": {
1154                "*": contract_outputs,
1155                "": ["ast"]
1156            }
1157        }
1158    });
1159
1160    if config.via_ir {
1161        settings["viaIR"] = json!(true);
1162    }
1163    if let Some(ref evm_version) = config.evm_version {
1164        settings["evmVersion"] = json!(evm_version);
1165    }
1166
1167    let mut sources = serde_json::Map::new();
1168    for file in source_files {
1169        let rel_path = file
1170            .strip_prefix(&config.root)
1171            .map(|p| p.to_string_lossy().into_owned())
1172            .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1173
1174        // Try to use cached content so solc doesn't need to read from disk.
1175        let cached_content = content_cache.and_then(|cache| {
1176            let uri = Url::from_file_path(file).ok()?;
1177            cache.get(uri.as_str()).map(|(_, c)| c.as_str())
1178        });
1179
1180        if let Some(content) = cached_content {
1181            sources.insert(rel_path, json!({ "content": content }));
1182        } else {
1183            sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1184        }
1185    }
1186
1187    json!({
1188        "language": "Solidity",
1189        "sources": sources,
1190        "settings": settings
1191    })
1192}
1193
1194/// Build an AST-only batch standard-json input for sub-cache builds.
1195///
1196/// Unlike the full batch input, this omits all codegen-affecting settings
1197/// (`viaIR`, `evmVersion`, optimizer) and only requests the AST — no
1198/// `devdoc`, `userdoc`, or `evm.methodIdentifiers`.  This is significantly
1199/// faster because solc skips type-checking contract outputs and codegen.
1200///
1201/// Sub-caches only need the AST for cross-file reference lookup (node IDs,
1202/// `referencedDeclaration`, source locations).
1203pub fn build_batch_standard_json_input_ast_only(
1204    source_files: &[PathBuf],
1205    remappings: &[String],
1206    root: &Path,
1207) -> Value {
1208    let settings = json!({
1209        "remappings": remappings,
1210        "outputSelection": {
1211            "*": {
1212                "": ["ast"]
1213            }
1214        }
1215    });
1216
1217    let mut sources = serde_json::Map::new();
1218    for file in source_files {
1219        let rel_path = file
1220            .strip_prefix(root)
1221            .map(|p| p.to_string_lossy().into_owned())
1222            .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1223        sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1224    }
1225
1226    json!({
1227        "language": "Solidity",
1228        "sources": sources,
1229        "settings": settings
1230    })
1231}
1232
1233/// Build a parse-only standard-json input (``stopAfter: "parsing"``).
1234///
1235/// Unlike the full batch input this mode stops before import resolution and
1236/// type-checking.  That means:
1237///
1238/// * No version 5333 errors cascade from imported incompatible files — the
1239///   compatible files are NOT fetched from disk as imports.
1240/// * The resulting ASTs contain all declaration nodes and local
1241///   ``referencedDeclaration`` IDs but **not** cross-file resolved IDs.
1242/// * Only ``ast`` output is requested; contract outputs (abi, gas …) are
1243///   omitted because they require type-checking.
1244///
1245/// This is used for the compatible-file batch in the mixed-version project
1246/// index so we can get parse-time ASTs for all project/lib files that satisfy
1247/// the project pragma, without being blocked by imports into incompatible lib
1248/// files.
1249pub fn build_parse_only_json_input(
1250    source_files: &[PathBuf],
1251    remappings: &[String],
1252    config: &FoundryConfig,
1253) -> Value {
1254    let settings = json!({
1255        "stopAfter": "parsing",
1256        "remappings": remappings,
1257        "outputSelection": {
1258            "*": {
1259                "": ["ast"]
1260            }
1261        }
1262    });
1263
1264    let mut sources = serde_json::Map::new();
1265    for file in source_files {
1266        let rel_path = file
1267            .strip_prefix(&config.root)
1268            .map(|p| p.to_string_lossy().into_owned())
1269            .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1270        sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1271    }
1272
1273    json!({
1274        "language": "Solidity",
1275        "sources": sources,
1276        "settings": settings
1277    })
1278}
1279
1280/// Run a project-wide solc compilation and return normalized output.
1281///
1282/// Discovers all source files, compiles them in a single `solc --standard-json`
1283/// invocation, and returns the normalized AST data.
1284///
1285/// When `text_cache` is provided, files whose URI string appears as a key
1286/// are fed to solc via `"content"` (in-memory) rather than `"urls"` (disk).
1287/// This ensures the re-index after a rename uses the updated import paths
1288/// from our cache, even if the editor hasn't flushed them to disk yet.
1289pub async fn solc_project_index(
1290    config: &FoundryConfig,
1291    client: Option<&tower_lsp::Client>,
1292    text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1293) -> Result<Value, RunnerError> {
1294    // Resolve remappings first — needed for import tracing.
1295    let remappings = resolve_remappings(config).await;
1296
1297    // Trace imports from project source files to find the true compilation
1298    // closure.  This avoids pulling in lib files that are never imported by
1299    // the project (e.g. chainlink automation files that need @eth-optimism,
1300    // which isn't vendored here).
1301    let source_files = discover_compilation_closure(config, &remappings);
1302    if source_files.is_empty() {
1303        return Err(RunnerError::CommandError(std::io::Error::other(
1304            "no source files found for project index",
1305        )));
1306    }
1307
1308    solc_project_index_from_files(config, client, text_cache, &source_files).await
1309}
1310
1311/// AST-only project index for sub-cache builds.
1312///
1313/// Identical to [`solc_project_index`] but requests only AST output —
1314/// no `devdoc`, `userdoc`, or `evm.methodIdentifiers`.  Also omits
1315/// `viaIR`, `evmVersion`, and optimizer settings since they only affect
1316/// codegen (which is skipped when no contract outputs are requested).
1317///
1318/// This is significantly faster because solc skips all codegen work.
1319/// "Stack too deep" errors cannot occur in AST-only mode.
1320pub async fn solc_project_index_ast_only(
1321    config: &FoundryConfig,
1322    client: Option<&tower_lsp::Client>,
1323) -> Result<Value, RunnerError> {
1324    let remappings = resolve_remappings(config).await;
1325    let source_files = discover_compilation_closure(config, &remappings);
1326    if source_files.is_empty() {
1327        return Err(RunnerError::CommandError(std::io::Error::other(
1328            "no source files found for project index",
1329        )));
1330    }
1331    solc_project_index_from_files_ast_only(config, client, &source_files).await
1332}
1333
1334/// AST-only compile over a list of source files.
1335///
1336/// Like [`solc_project_index_from_files`] but uses
1337/// [`build_batch_standard_json_input_ast_only`] — no codegen settings,
1338/// no contract outputs.
1339async fn solc_project_index_from_files_ast_only(
1340    config: &FoundryConfig,
1341    client: Option<&tower_lsp::Client>,
1342    source_files: &[PathBuf],
1343) -> Result<Value, RunnerError> {
1344    if source_files.is_empty() {
1345        return Err(RunnerError::CommandError(std::io::Error::other(
1346            "no source files found for AST-only project index",
1347        )));
1348    }
1349
1350    let remappings = resolve_remappings(config).await;
1351
1352    let project_version: Option<SemVer> =
1353        config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1354    let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1355        Some(PragmaConstraint::Exact(v.clone()))
1356    } else {
1357        source_files.iter().find_map(|f| {
1358            std::fs::read_to_string(f)
1359                .ok()
1360                .and_then(|src| parse_pragma(&src))
1361        })
1362    };
1363    let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1364
1365    // Pre-scan pragmas to separate compatible vs incompatible files.
1366    let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1367        let mut compat = Vec::with_capacity(source_files.len());
1368        let mut incompat = Vec::new();
1369        for file in source_files {
1370            let is_compatible = std::fs::read_to_string(file)
1371                .ok()
1372                .and_then(|src| parse_pragma(&src))
1373                .map(|pragma| version_satisfies(ver, &pragma))
1374                .unwrap_or(true);
1375            if is_compatible {
1376                compat.push(file.clone());
1377            } else {
1378                incompat.push(file.clone());
1379            }
1380        }
1381        (compat, incompat)
1382    } else {
1383        (source_files.to_vec(), Vec::new())
1384    };
1385
1386    if !incompatible_files.is_empty() {
1387        if let Some(c) = client {
1388            c.log_message(
1389                tower_lsp::lsp_types::MessageType::INFO,
1390                format!(
1391                    "project index: {} compatible, {} incompatible with solc {}",
1392                    compatible_files.len(),
1393                    incompatible_files.len(),
1394                    project_version
1395                        .as_ref()
1396                        .map(|v| v.to_string())
1397                        .unwrap_or_default(),
1398                ),
1399            )
1400            .await;
1401        }
1402    }
1403
1404    let mut result = if compatible_files.is_empty() {
1405        json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1406    } else {
1407        let input =
1408            build_batch_standard_json_input_ast_only(&compatible_files, &remappings, &config.root);
1409        let raw = run_solc(&solc_binary, &input, &config.root).await?;
1410        normalize_solc_output(raw, Some(&config.root))
1411    };
1412
1413    if incompatible_files.is_empty() {
1414        return Ok(result);
1415    }
1416
1417    // Compile incompatible files individually with their own solc versions.
1418    for file in &incompatible_files {
1419        let pragma = std::fs::read_to_string(file)
1420            .ok()
1421            .and_then(|src| parse_pragma(&src));
1422        let file_binary = resolve_solc_binary(config, pragma.as_ref(), client).await;
1423        let input =
1424            build_batch_standard_json_input_ast_only(&[file.clone()], &remappings, &config.root);
1425        if let Ok(raw) = run_solc(&file_binary, &input, &config.root).await {
1426            let normalized = normalize_solc_output(raw, Some(&config.root));
1427            merge_normalized_outputs(&mut result, normalized);
1428        }
1429    }
1430
1431    if let Some(c) = client {
1432        let total = result
1433            .get("sources")
1434            .and_then(|s| s.as_object())
1435            .map_or(0, |obj| obj.len());
1436        c.log_message(
1437            tower_lsp::lsp_types::MessageType::INFO,
1438            format!(
1439                "project index: compiled {} files ({} needed different solc version)",
1440                total,
1441                incompatible_files.len(),
1442            ),
1443        )
1444        .await;
1445    }
1446
1447    Ok(result)
1448}
1449
1450/// Run a scoped project-index compile over a selected file list.
1451///
1452/// This is intended for aggressive incremental reindex strategies where only
1453/// a dependency-closure subset should be recompiled.
1454pub async fn solc_project_index_scoped(
1455    config: &FoundryConfig,
1456    client: Option<&tower_lsp::Client>,
1457    text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1458    source_files: &[PathBuf],
1459) -> Result<Value, RunnerError> {
1460    if source_files.is_empty() {
1461        return Err(RunnerError::CommandError(std::io::Error::other(
1462            "no source files provided for scoped project index",
1463        )));
1464    }
1465
1466    solc_project_index_from_files(config, client, text_cache, source_files).await
1467}
1468
1469/// Extract source file paths from solc error code 5333 ("Source file requires
1470/// different compiler version") errors.  Returns the relative paths exactly
1471/// as they appear in `sourceLocation.file`.
1472#[cfg(test)]
1473fn extract_version_error_files(solc_output: &Value) -> HashSet<String> {
1474    let mut files = HashSet::new();
1475    if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1476        for err in errors {
1477            let is_5333 = err.get("errorCode").and_then(|c| c.as_str()) == Some("5333");
1478            if is_5333
1479                && let Some(file) = err
1480                    .get("sourceLocation")
1481                    .and_then(|sl| sl.get("file"))
1482                    .and_then(|f| f.as_str())
1483            {
1484                files.insert(file.to_string());
1485            }
1486        }
1487    }
1488    files
1489}
1490
1491/// Extract source file paths from solc error code 6275 ("Source not found")
1492/// errors.  Returns the relative paths of source files whose imports failed.
1493#[cfg(test)]
1494#[allow(dead_code)]
1495fn extract_import_error_files(solc_output: &Value) -> HashSet<String> {
1496    let mut files = HashSet::new();
1497    if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1498        for err in errors {
1499            let is_6275 = err.get("errorCode").and_then(|c| c.as_str()) == Some("6275");
1500            if is_6275
1501                && let Some(file) = err
1502                    .get("sourceLocation")
1503                    .and_then(|sl| sl.get("file"))
1504                    .and_then(|f| f.as_str())
1505            {
1506                files.insert(file.to_string());
1507            }
1508        }
1509    }
1510    files
1511}
1512
1513/// Build a reverse-import closure: given a set of files to exclude, find all
1514/// files that transitively import any of them.  Those files must also be
1515/// excluded because solc will still resolve their imports from disk and fail.
1516///
1517/// Returns the full exclusion set (seed files + their transitive importers).
1518#[cfg(test)]
1519fn reverse_import_closure(
1520    source_files: &[PathBuf],
1521    exclude_abs: &HashSet<PathBuf>,
1522    project_root: &Path,
1523    remappings: &[String],
1524) -> HashSet<PathBuf> {
1525    // Build forward import graph: file -> set of files it imports.
1526    // Then invert to get reverse edges: imported_file -> set of importers.
1527    let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1528
1529    for file in source_files {
1530        let Ok(bytes) = std::fs::read(file) else {
1531            continue;
1532        };
1533        for imp in links::ts_find_imports(&bytes) {
1534            if let Some(imported_abs) =
1535                resolve_import_to_abs(project_root, file, &imp.path, remappings)
1536            {
1537                reverse_edges
1538                    .entry(imported_abs)
1539                    .or_default()
1540                    .insert(file.clone());
1541            }
1542        }
1543    }
1544
1545    // BFS from excluded files through reverse edges.
1546    let mut closure: HashSet<PathBuf> = exclude_abs.clone();
1547    let mut queue: std::collections::VecDeque<PathBuf> = exclude_abs.iter().cloned().collect();
1548
1549    while let Some(current) = queue.pop_front() {
1550        if let Some(importers) = reverse_edges.get(&current) {
1551            for importer in importers {
1552                if closure.insert(importer.clone()) {
1553                    queue.push_back(importer.clone());
1554                }
1555            }
1556        }
1557    }
1558
1559    closure
1560}
1561
1562/// Merge two normalized solc outputs at the `Value` level.
1563///
1564/// Combines `sources`, `contracts`, `source_id_to_path`, and `errors` from
1565/// `other` into `base`.  Source IDs in `other` are remapped to avoid
1566/// collisions with `base`.
1567fn merge_normalized_outputs(base: &mut Value, other: Value) {
1568    // Merge sources (keyed by absolute path — no collisions across partitions).
1569    if let (Some(base_sources), Some(other_sources)) = (
1570        base.get_mut("sources").and_then(|s| s.as_object_mut()),
1571        other.get("sources").and_then(|s| s.as_object()),
1572    ) {
1573        // Find the max source ID in base so we can remap other's IDs.
1574        let max_base_id = base_sources
1575            .values()
1576            .filter_map(|v| v.get("id").and_then(|id| id.as_u64()))
1577            .max()
1578            .map(|m| m + 1)
1579            .unwrap_or(0);
1580
1581        // Collect other's id -> path mappings for source_id_to_path.
1582        let mut remapped_id_to_path: Vec<(String, String)> = Vec::new();
1583
1584        for (path, mut source_data) in other_sources.clone() {
1585            // Remap the source ID to avoid collisions.
1586            if let Some(id) = source_data.get("id").and_then(|id| id.as_u64()) {
1587                let new_id = id + max_base_id;
1588                source_data
1589                    .as_object_mut()
1590                    .unwrap()
1591                    .insert("id".to_string(), json!(new_id));
1592                remapped_id_to_path.push((new_id.to_string(), path.clone()));
1593            }
1594            base_sources.insert(path, source_data);
1595        }
1596
1597        // Merge source_id_to_path.
1598        if let Some(base_id_map) = base
1599            .get_mut("source_id_to_path")
1600            .and_then(|m| m.as_object_mut())
1601        {
1602            for (id, path) in remapped_id_to_path {
1603                base_id_map.insert(id, json!(path));
1604            }
1605        }
1606    }
1607
1608    // Merge contracts.
1609    if let (Some(base_contracts), Some(other_contracts)) = (
1610        base.get_mut("contracts").and_then(|c| c.as_object_mut()),
1611        other.get("contracts").and_then(|c| c.as_object()),
1612    ) {
1613        for (path, contract_data) in other_contracts {
1614            base_contracts.insert(path.clone(), contract_data.clone());
1615        }
1616    }
1617
1618    // Don't merge errors — the retry errors from incompatible files are noise.
1619    // The base already has the clean errors from the successful compilation.
1620}
1621
1622async fn solc_project_index_from_files(
1623    config: &FoundryConfig,
1624    client: Option<&tower_lsp::Client>,
1625    text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1626    source_files: &[PathBuf],
1627) -> Result<Value, RunnerError> {
1628    if source_files.is_empty() {
1629        return Err(RunnerError::CommandError(std::io::Error::other(
1630            "no source files found for project index",
1631        )));
1632    }
1633
1634    let remappings = resolve_remappings(config).await;
1635
1636    // Resolve the project's solc version from foundry.toml.
1637    let project_version: Option<SemVer> =
1638        config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1639
1640    // When no version is pinned in foundry.toml, derive a constraint from the
1641    // source files' pragmas so that svm can auto-install a matching binary.
1642    let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1643        Some(PragmaConstraint::Exact(v.clone()))
1644    } else {
1645        source_files.iter().find_map(|f| {
1646            std::fs::read_to_string(f)
1647                .ok()
1648                .and_then(|src| parse_pragma(&src))
1649        })
1650    };
1651    let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1652
1653    // -- Pre-scan pragmas to separate compatible vs incompatible files. --
1654    //
1655    // Solc emits ZERO ASTs when any file in the batch has a version error
1656    // (5333).  We must exclude incompatible files before compiling so the
1657    // batch produces full AST output for all compatible files.
1658    let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1659        let mut compat = Vec::with_capacity(source_files.len());
1660        let mut incompat = Vec::new();
1661        for file in source_files {
1662            let is_compatible = std::fs::read_to_string(file)
1663                .ok()
1664                .and_then(|src| parse_pragma(&src))
1665                .map(|pragma| version_satisfies(ver, &pragma))
1666                // Files without a pragma are assumed compatible.
1667                .unwrap_or(true);
1668            if is_compatible {
1669                compat.push(file.clone());
1670            } else {
1671                incompat.push(file.clone());
1672            }
1673        }
1674        (compat, incompat)
1675    } else {
1676        // No project version configured — compile everything in one batch.
1677        (source_files.to_vec(), Vec::new())
1678    };
1679
1680    if !incompatible_files.is_empty() {
1681        if let Some(c) = client {
1682            c.log_message(
1683                tower_lsp::lsp_types::MessageType::INFO,
1684                format!(
1685                    "project index: {} compatible, {} incompatible with solc {}",
1686                    compatible_files.len(),
1687                    incompatible_files.len(),
1688                    project_version
1689                        .as_ref()
1690                        .map(|v| v.to_string())
1691                        .unwrap_or_default(),
1692                ),
1693            )
1694            .await;
1695        }
1696    }
1697
1698    // -- Full batch compile of compatible files. --
1699    //
1700    // The source file list comes from discover_compilation_closure which only
1701    // includes files reachable via imports from src/test/script — so all files
1702    // in the batch are version-compatible and their transitive imports resolve.
1703    // A full (non-parse-only) compile is required so that cross-file
1704    // referencedDeclaration IDs are populated for goto-references to work.
1705    let mut result = if compatible_files.is_empty() {
1706        json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1707    } else {
1708        let input = build_batch_standard_json_input_with_cache(
1709            &compatible_files,
1710            &remappings,
1711            config,
1712            text_cache,
1713        );
1714        let raw = run_solc(&solc_binary, &input, &config.root).await?;
1715        normalize_solc_output(raw, Some(&config.root))
1716    };
1717
1718    let batch_source_count = result
1719        .get("sources")
1720        .and_then(|s| s.as_object())
1721        .map_or(0, |obj| obj.len());
1722
1723    if incompatible_files.is_empty() {
1724        return Ok(result);
1725    }
1726
1727    if let Some(c) = client {
1728        // Log first few errors from the batch to understand why sources=0.
1729        let batch_errors: Vec<String> = result
1730            .get("errors")
1731            .and_then(|e| e.as_array())
1732            .map(|arr| {
1733                arr.iter()
1734                    .filter(|e| e.get("severity").and_then(|s| s.as_str()) == Some("error"))
1735                    .take(3)
1736                    .filter_map(|e| {
1737                        let msg = e.get("message").and_then(|m| m.as_str()).unwrap_or("?");
1738                        let file = e
1739                            .get("sourceLocation")
1740                            .and_then(|sl| sl.get("file"))
1741                            .and_then(|f| f.as_str())
1742                            .unwrap_or("?");
1743                        Some(format!("{file}: {msg}"))
1744                    })
1745                    .collect()
1746            })
1747            .unwrap_or_default();
1748
1749        c.log_message(
1750            tower_lsp::lsp_types::MessageType::INFO,
1751            format!(
1752                "project index: batch produced {} sources, now compiling {} incompatible files individually{}",
1753                batch_source_count,
1754                incompatible_files.len(),
1755                if batch_errors.is_empty() {
1756                    String::new()
1757                } else {
1758                    format!(" [first errors: {}]", batch_errors.join("; "))
1759                },
1760            ),
1761        )
1762        .await;
1763    }
1764
1765    // -- Individually compile incompatible files with their matching solc. --
1766    let mut compiled = 0usize;
1767    let mut skipped = 0usize;
1768    for file in &incompatible_files {
1769        let pragma = std::fs::read_to_string(file)
1770            .ok()
1771            .and_then(|src| parse_pragma(&src));
1772
1773        let Some(file_constraint) = pragma else {
1774            skipped += 1;
1775            continue;
1776        };
1777
1778        let file_binary = resolve_solc_binary(config, Some(&file_constraint), client).await;
1779        let input = build_batch_standard_json_input_with_cache(
1780            &[file.clone()],
1781            &remappings,
1782            config,
1783            text_cache,
1784        );
1785        match run_solc(&file_binary, &input, &config.root).await {
1786            Ok(raw) => {
1787                let normalized = normalize_solc_output(raw, Some(&config.root));
1788                merge_normalized_outputs(&mut result, normalized);
1789                compiled += 1;
1790            }
1791            Err(e) => {
1792                if let Some(c) = client {
1793                    c.log_message(
1794                        tower_lsp::lsp_types::MessageType::WARNING,
1795                        format!(
1796                            "project index: incompatible file {} failed: {e}",
1797                            file.display(),
1798                        ),
1799                    )
1800                    .await;
1801                }
1802                skipped += 1;
1803            }
1804        }
1805    }
1806
1807    if let Some(c) = client {
1808        c.log_message(
1809            tower_lsp::lsp_types::MessageType::INFO,
1810            format!(
1811                "project index: incompatible files done — {compiled} compiled, {skipped} skipped",
1812            ),
1813        )
1814        .await;
1815    }
1816
1817    Ok(result)
1818}
1819
1820#[cfg(test)]
1821mod tests {
1822    use super::*;
1823
1824    #[test]
1825    fn test_normalize_solc_sources() {
1826        let solc_output = json!({
1827            "sources": {
1828                "src/Foo.sol": {
1829                    "id": 0,
1830                    "ast": {
1831                        "nodeType": "SourceUnit",
1832                        "absolutePath": "src/Foo.sol",
1833                        "id": 100
1834                    }
1835                },
1836                "src/Bar.sol": {
1837                    "id": 1,
1838                    "ast": {
1839                        "nodeType": "SourceUnit",
1840                        "absolutePath": "src/Bar.sol",
1841                        "id": 200
1842                    }
1843                }
1844            },
1845            "contracts": {},
1846            "errors": []
1847        });
1848
1849        let normalized = normalize_solc_output(solc_output, None);
1850
1851        // Sources kept in solc-native shape: path -> { id, ast }
1852        let sources = normalized.get("sources").unwrap().as_object().unwrap();
1853        assert_eq!(sources.len(), 2);
1854
1855        let foo = sources.get("src/Foo.sol").unwrap();
1856        assert_eq!(foo.get("id").unwrap(), 0);
1857        assert_eq!(
1858            foo.get("ast")
1859                .unwrap()
1860                .get("nodeType")
1861                .unwrap()
1862                .as_str()
1863                .unwrap(),
1864            "SourceUnit"
1865        );
1866
1867        // Check source_id_to_path constructed
1868        let id_to_path = normalized
1869            .get("source_id_to_path")
1870            .unwrap()
1871            .as_object()
1872            .unwrap();
1873        assert_eq!(id_to_path.len(), 2);
1874    }
1875
1876    #[test]
1877    fn test_normalize_solc_contracts() {
1878        let solc_output = json!({
1879            "sources": {},
1880            "contracts": {
1881                "src/Foo.sol": {
1882                    "Foo": {
1883                        "abi": [{"type": "function", "name": "bar"}],
1884                        "evm": {
1885                            "methodIdentifiers": {
1886                                "bar(uint256)": "abcd1234"
1887                            }
1888                        }
1889                    }
1890                }
1891            },
1892            "errors": []
1893        });
1894
1895        let normalized = normalize_solc_output(solc_output, None);
1896
1897        // Contracts kept in solc-native shape: path -> name -> { abi, evm, ... }
1898        let contracts = normalized.get("contracts").unwrap().as_object().unwrap();
1899        let foo_contracts = contracts.get("src/Foo.sol").unwrap().as_object().unwrap();
1900        let foo = foo_contracts.get("Foo").unwrap();
1901
1902        let method_ids = foo
1903            .get("evm")
1904            .unwrap()
1905            .get("methodIdentifiers")
1906            .unwrap()
1907            .as_object()
1908            .unwrap();
1909        assert_eq!(
1910            method_ids.get("bar(uint256)").unwrap().as_str().unwrap(),
1911            "abcd1234"
1912        );
1913    }
1914
1915    #[test]
1916    fn test_normalize_solc_errors_passthrough() {
1917        let solc_output = json!({
1918            "sources": {},
1919            "contracts": {},
1920            "errors": [{
1921                "sourceLocation": {"file": "src/Foo.sol", "start": 0, "end": 10},
1922                "type": "Warning",
1923                "component": "general",
1924                "severity": "warning",
1925                "errorCode": "2394",
1926                "message": "test warning",
1927                "formattedMessage": "Warning: test warning"
1928            }]
1929        });
1930
1931        let normalized = normalize_solc_output(solc_output, None);
1932
1933        let errors = normalized.get("errors").unwrap().as_array().unwrap();
1934        assert_eq!(errors.len(), 1);
1935        assert_eq!(
1936            errors[0].get("errorCode").unwrap().as_str().unwrap(),
1937            "2394"
1938        );
1939    }
1940
1941    #[test]
1942    fn test_normalize_empty_solc_output() {
1943        let solc_output = json!({
1944            "sources": {},
1945            "contracts": {}
1946        });
1947
1948        let normalized = normalize_solc_output(solc_output, None);
1949
1950        assert!(
1951            normalized
1952                .get("sources")
1953                .unwrap()
1954                .as_object()
1955                .unwrap()
1956                .is_empty()
1957        );
1958        assert!(
1959            normalized
1960                .get("contracts")
1961                .unwrap()
1962                .as_object()
1963                .unwrap()
1964                .is_empty()
1965        );
1966        assert_eq!(
1967            normalized.get("errors").unwrap().as_array().unwrap().len(),
1968            0
1969        );
1970        assert!(
1971            normalized
1972                .get("source_id_to_path")
1973                .unwrap()
1974                .as_object()
1975                .unwrap()
1976                .is_empty()
1977        );
1978    }
1979
1980    #[test]
1981    fn test_build_standard_json_input() {
1982        let config = FoundryConfig::default();
1983        let input = build_standard_json_input(
1984            "/path/to/Foo.sol",
1985            &[
1986                "ds-test/=lib/forge-std/lib/ds-test/src/".to_string(),
1987                "forge-std/=lib/forge-std/src/".to_string(),
1988            ],
1989            &config,
1990        );
1991
1992        let sources = input.get("sources").unwrap().as_object().unwrap();
1993        assert!(sources.contains_key("/path/to/Foo.sol"));
1994
1995        let settings = input.get("settings").unwrap();
1996        let remappings = settings.get("remappings").unwrap().as_array().unwrap();
1997        assert_eq!(remappings.len(), 2);
1998
1999        let output_sel = settings.get("outputSelection").unwrap();
2000        assert!(output_sel.get("*").is_some());
2001
2002        // Default config: no optimizer, no viaIR, no evmVersion
2003        assert!(settings.get("optimizer").is_none());
2004        assert!(settings.get("viaIR").is_none());
2005        assert!(settings.get("evmVersion").is_none());
2006
2007        // gasEstimates is never requested — forces full EVM codegen (88% of compile time)
2008        let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
2009        let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
2010        assert!(!output_names.contains(&"evm.gasEstimates"));
2011        assert!(!output_names.contains(&"abi")); // ABI is intentionally omitted — no consumer
2012        assert!(output_names.contains(&"devdoc"));
2013        assert!(output_names.contains(&"userdoc"));
2014        assert!(output_names.contains(&"evm.methodIdentifiers"));
2015    }
2016
2017    #[test]
2018    fn test_build_standard_json_input_with_config() {
2019        let config = FoundryConfig {
2020            optimizer: true,
2021            optimizer_runs: 9999999,
2022            via_ir: true,
2023            evm_version: Some("osaka".to_string()),
2024            ..Default::default()
2025        };
2026        let input = build_standard_json_input("/path/to/Foo.sol", &[], &config);
2027
2028        let settings = input.get("settings").unwrap();
2029
2030        // Optimizer is never passed — adds ~3s and doesn't affect AST/ABI/docs
2031        assert!(settings.get("optimizer").is_none());
2032
2033        // viaIR IS passed when config has it (some contracts require it to compile)
2034        assert!(settings.get("viaIR").unwrap().as_bool().unwrap());
2035
2036        // gasEstimates is never requested regardless of viaIR
2037        let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
2038        let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
2039        assert!(!output_names.contains(&"evm.gasEstimates"));
2040
2041        // EVM version
2042        assert_eq!(
2043            settings.get("evmVersion").unwrap().as_str().unwrap(),
2044            "osaka"
2045        );
2046    }
2047
2048    #[tokio::test]
2049    async fn test_resolve_solc_binary_default() {
2050        let config = FoundryConfig::default();
2051        let binary = resolve_solc_binary(&config, None, None).await;
2052        assert_eq!(binary, PathBuf::from("solc"));
2053    }
2054
2055    #[test]
2056    fn test_parse_pragma_exact() {
2057        let source = "// SPDX\npragma solidity 0.8.26;\n";
2058        assert_eq!(
2059            parse_pragma(source),
2060            Some(PragmaConstraint::Exact(SemVer {
2061                major: 0,
2062                minor: 8,
2063                patch: 26
2064            }))
2065        );
2066    }
2067
2068    #[test]
2069    fn test_parse_pragma_caret() {
2070        let source = "pragma solidity ^0.8.0;\n";
2071        assert_eq!(
2072            parse_pragma(source),
2073            Some(PragmaConstraint::Caret(SemVer {
2074                major: 0,
2075                minor: 8,
2076                patch: 0
2077            }))
2078        );
2079    }
2080
2081    #[test]
2082    fn test_parse_pragma_gte() {
2083        let source = "pragma solidity >=0.8.0;\n";
2084        assert_eq!(
2085            parse_pragma(source),
2086            Some(PragmaConstraint::Gte(SemVer {
2087                major: 0,
2088                minor: 8,
2089                patch: 0
2090            }))
2091        );
2092    }
2093
2094    #[test]
2095    fn test_parse_pragma_range() {
2096        let source = "pragma solidity >=0.6.2 <0.9.0;\n";
2097        assert_eq!(
2098            parse_pragma(source),
2099            Some(PragmaConstraint::Range(
2100                SemVer {
2101                    major: 0,
2102                    minor: 6,
2103                    patch: 2
2104                },
2105                SemVer {
2106                    major: 0,
2107                    minor: 9,
2108                    patch: 0
2109                },
2110            ))
2111        );
2112    }
2113
2114    #[test]
2115    fn test_parse_pragma_none() {
2116        let source = "contract Foo {}\n";
2117        assert_eq!(parse_pragma(source), None);
2118    }
2119
2120    #[test]
2121    fn test_version_satisfies_exact() {
2122        let v = SemVer {
2123            major: 0,
2124            minor: 8,
2125            patch: 26,
2126        };
2127        assert!(version_satisfies(&v, &PragmaConstraint::Exact(v.clone())));
2128        assert!(!version_satisfies(
2129            &SemVer {
2130                major: 0,
2131                minor: 8,
2132                patch: 25
2133            },
2134            &PragmaConstraint::Exact(v)
2135        ));
2136    }
2137
2138    #[test]
2139    fn test_version_satisfies_caret() {
2140        let constraint = PragmaConstraint::Caret(SemVer {
2141            major: 0,
2142            minor: 8,
2143            patch: 0,
2144        });
2145        assert!(version_satisfies(
2146            &SemVer {
2147                major: 0,
2148                minor: 8,
2149                patch: 0
2150            },
2151            &constraint
2152        ));
2153        assert!(version_satisfies(
2154            &SemVer {
2155                major: 0,
2156                minor: 8,
2157                patch: 26
2158            },
2159            &constraint
2160        ));
2161        // 0.9.0 is outside ^0.8.0
2162        assert!(!version_satisfies(
2163            &SemVer {
2164                major: 0,
2165                minor: 9,
2166                patch: 0
2167            },
2168            &constraint
2169        ));
2170        // 0.7.0 is below
2171        assert!(!version_satisfies(
2172            &SemVer {
2173                major: 0,
2174                minor: 7,
2175                patch: 0
2176            },
2177            &constraint
2178        ));
2179    }
2180
2181    #[test]
2182    fn test_version_satisfies_gte() {
2183        let constraint = PragmaConstraint::Gte(SemVer {
2184            major: 0,
2185            minor: 8,
2186            patch: 0,
2187        });
2188        assert!(version_satisfies(
2189            &SemVer {
2190                major: 0,
2191                minor: 8,
2192                patch: 0
2193            },
2194            &constraint
2195        ));
2196        assert!(version_satisfies(
2197            &SemVer {
2198                major: 0,
2199                minor: 9,
2200                patch: 0
2201            },
2202            &constraint
2203        ));
2204        assert!(!version_satisfies(
2205            &SemVer {
2206                major: 0,
2207                minor: 7,
2208                patch: 0
2209            },
2210            &constraint
2211        ));
2212    }
2213
2214    #[test]
2215    fn test_version_satisfies_range() {
2216        let constraint = PragmaConstraint::Range(
2217            SemVer {
2218                major: 0,
2219                minor: 6,
2220                patch: 2,
2221            },
2222            SemVer {
2223                major: 0,
2224                minor: 9,
2225                patch: 0,
2226            },
2227        );
2228        assert!(version_satisfies(
2229            &SemVer {
2230                major: 0,
2231                minor: 6,
2232                patch: 2
2233            },
2234            &constraint
2235        ));
2236        assert!(version_satisfies(
2237            &SemVer {
2238                major: 0,
2239                minor: 8,
2240                patch: 26
2241            },
2242            &constraint
2243        ));
2244        // 0.9.0 is the upper bound (exclusive)
2245        assert!(!version_satisfies(
2246            &SemVer {
2247                major: 0,
2248                minor: 9,
2249                patch: 0
2250            },
2251            &constraint
2252        ));
2253        assert!(!version_satisfies(
2254            &SemVer {
2255                major: 0,
2256                minor: 6,
2257                patch: 1
2258            },
2259            &constraint
2260        ));
2261    }
2262
2263    #[test]
2264    fn test_find_matching_version() {
2265        let installed = vec![
2266            SemVer {
2267                major: 0,
2268                minor: 8,
2269                patch: 0,
2270            },
2271            SemVer {
2272                major: 0,
2273                minor: 8,
2274                patch: 20,
2275            },
2276            SemVer {
2277                major: 0,
2278                minor: 8,
2279                patch: 26,
2280            },
2281            SemVer {
2282                major: 0,
2283                minor: 8,
2284                patch: 33,
2285            },
2286        ];
2287        // ^0.8.20 should pick latest: 0.8.33
2288        let constraint = PragmaConstraint::Caret(SemVer {
2289            major: 0,
2290            minor: 8,
2291            patch: 20,
2292        });
2293        let matched = find_matching_version(&constraint, &installed);
2294        assert_eq!(
2295            matched,
2296            Some(SemVer {
2297                major: 0,
2298                minor: 8,
2299                patch: 33
2300            })
2301        );
2302
2303        // exact 0.8.20
2304        let constraint = PragmaConstraint::Exact(SemVer {
2305            major: 0,
2306            minor: 8,
2307            patch: 20,
2308        });
2309        let matched = find_matching_version(&constraint, &installed);
2310        assert_eq!(
2311            matched,
2312            Some(SemVer {
2313                major: 0,
2314                minor: 8,
2315                patch: 20
2316            })
2317        );
2318
2319        // exact 0.8.15 — not installed
2320        let constraint = PragmaConstraint::Exact(SemVer {
2321            major: 0,
2322            minor: 8,
2323            patch: 15,
2324        });
2325        let matched = find_matching_version(&constraint, &installed);
2326        assert_eq!(matched, None);
2327    }
2328
2329    #[test]
2330    fn test_list_installed_versions() {
2331        // Just verify it doesn't panic — actual versions depend on system
2332        let versions = list_installed_versions();
2333        // Versions should be sorted
2334        for w in versions.windows(2) {
2335            assert!(w[0] <= w[1]);
2336        }
2337    }
2338
2339    // -------------------------------------------------------------------
2340    // Tests for mixed-version retry helpers
2341    // -------------------------------------------------------------------
2342
2343    #[test]
2344    fn test_extract_version_error_files_basic() {
2345        let output = json!({
2346            "errors": [
2347                {
2348                    "errorCode": "5333",
2349                    "severity": "error",
2350                    "message": "Source file requires different compiler version",
2351                    "sourceLocation": {
2352                        "file": "lib/openzeppelin/contracts/token/ERC20/ERC20.sol",
2353                        "start": 32,
2354                        "end": 58
2355                    }
2356                },
2357                {
2358                    "errorCode": "5333",
2359                    "severity": "error",
2360                    "message": "Source file requires different compiler version",
2361                    "sourceLocation": {
2362                        "file": "lib/old-lib/src/Legacy.sol",
2363                        "start": 32,
2364                        "end": 58
2365                    }
2366                },
2367                {
2368                    "errorCode": "9574",
2369                    "severity": "error",
2370                    "message": "Some other error",
2371                    "sourceLocation": {
2372                        "file": "src/Main.sol",
2373                        "start": 100,
2374                        "end": 200
2375                    }
2376                }
2377            ]
2378        });
2379
2380        let files = extract_version_error_files(&output);
2381        assert_eq!(files.len(), 2);
2382        assert!(files.contains("lib/openzeppelin/contracts/token/ERC20/ERC20.sol"));
2383        assert!(files.contains("lib/old-lib/src/Legacy.sol"));
2384        // Non-5333 error files should NOT be included.
2385        assert!(!files.contains("src/Main.sol"));
2386    }
2387
2388    #[test]
2389    fn test_extract_version_error_files_empty() {
2390        let output = json!({
2391            "errors": []
2392        });
2393        assert!(extract_version_error_files(&output).is_empty());
2394
2395        // No errors key at all.
2396        let output = json!({});
2397        assert!(extract_version_error_files(&output).is_empty());
2398    }
2399
2400    #[test]
2401    fn test_extract_version_error_files_no_source_location() {
2402        let output = json!({
2403            "errors": [
2404                {
2405                    "errorCode": "5333",
2406                    "severity": "error",
2407                    "message": "Source file requires different compiler version"
2408                    // No sourceLocation field.
2409                }
2410            ]
2411        });
2412        assert!(extract_version_error_files(&output).is_empty());
2413    }
2414
2415    #[test]
2416    fn test_extract_version_error_files_dedup() {
2417        let output = json!({
2418            "errors": [
2419                {
2420                    "errorCode": "5333",
2421                    "severity": "error",
2422                    "sourceLocation": { "file": "lib/same.sol", "start": 0, "end": 10 }
2423                },
2424                {
2425                    "errorCode": "5333",
2426                    "severity": "error",
2427                    "sourceLocation": { "file": "lib/same.sol", "start": 50, "end": 70 }
2428                }
2429            ]
2430        });
2431        let files = extract_version_error_files(&output);
2432        assert_eq!(files.len(), 1);
2433        assert!(files.contains("lib/same.sol"));
2434    }
2435
2436    #[test]
2437    fn test_reverse_import_closure_simple() {
2438        // Create a temp directory with three files:
2439        //   a.sol imports b.sol
2440        //   b.sol imports c.sol
2441        //   d.sol (standalone)
2442        //
2443        // If c.sol is excluded, the closure should include: c.sol, b.sol, a.sol
2444        // (b imports c, a imports b — both are transitive importers of c).
2445        let dir = tempfile::tempdir().unwrap();
2446        let root = dir.path();
2447
2448        std::fs::write(
2449            root.join("a.sol"),
2450            "// SPDX-License-Identifier: MIT\nimport \"./b.sol\";\ncontract A {}",
2451        )
2452        .unwrap();
2453        std::fs::write(
2454            root.join("b.sol"),
2455            "// SPDX-License-Identifier: MIT\nimport \"./c.sol\";\ncontract B {}",
2456        )
2457        .unwrap();
2458        std::fs::write(
2459            root.join("c.sol"),
2460            "// SPDX-License-Identifier: MIT\ncontract C {}",
2461        )
2462        .unwrap();
2463        std::fs::write(
2464            root.join("d.sol"),
2465            "// SPDX-License-Identifier: MIT\ncontract D {}",
2466        )
2467        .unwrap();
2468
2469        let files: Vec<PathBuf> = vec![
2470            root.join("a.sol"),
2471            root.join("b.sol"),
2472            root.join("c.sol"),
2473            root.join("d.sol"),
2474        ];
2475
2476        let exclude: HashSet<PathBuf> = [root.join("c.sol")].into_iter().collect();
2477        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2478
2479        assert!(
2480            closure.contains(&root.join("c.sol")),
2481            "seed file in closure"
2482        );
2483        assert!(closure.contains(&root.join("b.sol")), "direct importer");
2484        assert!(closure.contains(&root.join("a.sol")), "transitive importer");
2485        assert!(
2486            !closure.contains(&root.join("d.sol")),
2487            "unrelated file not in closure"
2488        );
2489        assert_eq!(closure.len(), 3);
2490    }
2491
2492    #[test]
2493    fn test_reverse_import_closure_no_importers() {
2494        // Excluding a file that nothing imports — closure is just the seed.
2495        let dir = tempfile::tempdir().unwrap();
2496        let root = dir.path();
2497
2498        std::fs::write(root.join("a.sol"), "contract A {}").unwrap();
2499        std::fs::write(root.join("b.sol"), "contract B {}").unwrap();
2500
2501        let files: Vec<PathBuf> = vec![root.join("a.sol"), root.join("b.sol")];
2502        let exclude: HashSet<PathBuf> = [root.join("a.sol")].into_iter().collect();
2503
2504        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2505        assert_eq!(closure.len(), 1);
2506        assert!(closure.contains(&root.join("a.sol")));
2507    }
2508
2509    #[test]
2510    fn test_reverse_import_closure_diamond() {
2511        // Diamond pattern:
2512        //   a.sol imports b.sol and c.sol
2513        //   b.sol imports d.sol
2514        //   c.sol imports d.sol
2515        //
2516        // Excluding d.sol → closure = {d, b, c, a}
2517        let dir = tempfile::tempdir().unwrap();
2518        let root = dir.path();
2519
2520        std::fs::write(
2521            root.join("a.sol"),
2522            "import \"./b.sol\";\nimport \"./c.sol\";\ncontract A {}",
2523        )
2524        .unwrap();
2525        std::fs::write(root.join("b.sol"), "import \"./d.sol\";\ncontract B {}").unwrap();
2526        std::fs::write(root.join("c.sol"), "import \"./d.sol\";\ncontract C {}").unwrap();
2527        std::fs::write(root.join("d.sol"), "contract D {}").unwrap();
2528
2529        let files: Vec<PathBuf> = vec![
2530            root.join("a.sol"),
2531            root.join("b.sol"),
2532            root.join("c.sol"),
2533            root.join("d.sol"),
2534        ];
2535        let exclude: HashSet<PathBuf> = [root.join("d.sol")].into_iter().collect();
2536
2537        let closure = reverse_import_closure(&files, &exclude, root, &[]);
2538        assert_eq!(closure.len(), 4);
2539    }
2540
2541    #[test]
2542    fn test_merge_normalized_outputs_basic() {
2543        let mut base = json!({
2544            "sources": {
2545                "/abs/src/A.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } },
2546                "/abs/src/B.sol": { "id": 1, "ast": { "nodeType": "SourceUnit" } }
2547            },
2548            "contracts": {
2549                "/abs/src/A.sol": { "A": { "abi": [] } }
2550            },
2551            "errors": [],
2552            "source_id_to_path": {
2553                "0": "/abs/src/A.sol",
2554                "1": "/abs/src/B.sol"
2555            }
2556        });
2557
2558        let other = json!({
2559            "sources": {
2560                "/abs/lib/C.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } }
2561            },
2562            "contracts": {
2563                "/abs/lib/C.sol": { "C": { "abi": [] } }
2564            },
2565            "errors": [],
2566            "source_id_to_path": {
2567                "0": "/abs/lib/C.sol"
2568            }
2569        });
2570
2571        merge_normalized_outputs(&mut base, other);
2572
2573        // Sources should now have 3 entries.
2574        let sources = base["sources"].as_object().unwrap();
2575        assert_eq!(sources.len(), 3);
2576        assert!(sources.contains_key("/abs/lib/C.sol"));
2577
2578        // The merged source's ID should be remapped (0 + max_base_id=2 → 2).
2579        let c_id = sources["/abs/lib/C.sol"]["id"].as_u64().unwrap();
2580        assert_eq!(
2581            c_id, 2,
2582            "remapped id should be max_base_id (2) + original (0)"
2583        );
2584
2585        // source_id_to_path should have 3 entries.
2586        let id_map = base["source_id_to_path"].as_object().unwrap();
2587        assert_eq!(id_map.len(), 3);
2588        assert_eq!(id_map["2"].as_str().unwrap(), "/abs/lib/C.sol");
2589
2590        // Contracts should have 2 entries.
2591        let contracts = base["contracts"].as_object().unwrap();
2592        assert_eq!(contracts.len(), 2);
2593        assert!(contracts.contains_key("/abs/lib/C.sol"));
2594    }
2595
2596    #[test]
2597    fn test_merge_normalized_outputs_empty_other() {
2598        let mut base = json!({
2599            "sources": {
2600                "/abs/src/A.sol": { "id": 0, "ast": {} }
2601            },
2602            "contracts": {},
2603            "errors": [],
2604            "source_id_to_path": { "0": "/abs/src/A.sol" }
2605        });
2606
2607        let other = json!({
2608            "sources": {},
2609            "contracts": {},
2610            "errors": [],
2611            "source_id_to_path": {}
2612        });
2613
2614        merge_normalized_outputs(&mut base, other);
2615
2616        let sources = base["sources"].as_object().unwrap();
2617        assert_eq!(sources.len(), 1);
2618    }
2619
2620    #[test]
2621    fn test_merge_normalized_outputs_empty_base() {
2622        let mut base = json!({
2623            "sources": {},
2624            "contracts": {},
2625            "errors": [],
2626            "source_id_to_path": {}
2627        });
2628
2629        let other = json!({
2630            "sources": {
2631                "/abs/lib/X.sol": { "id": 0, "ast": {} }
2632            },
2633            "contracts": {
2634                "/abs/lib/X.sol": { "X": { "abi": [] } }
2635            },
2636            "errors": [],
2637            "source_id_to_path": { "0": "/abs/lib/X.sol" }
2638        });
2639
2640        merge_normalized_outputs(&mut base, other);
2641
2642        let sources = base["sources"].as_object().unwrap();
2643        assert_eq!(sources.len(), 1);
2644        // max_base_id is 0 (no entries), so remapped id = 0 + 0 = 0.
2645        let x_id = sources["/abs/lib/X.sol"]["id"].as_u64().unwrap();
2646        assert_eq!(x_id, 0);
2647    }
2648}