1use crate::config::FoundryConfig;
8use crate::links;
9use crate::runner::RunnerError;
10use serde_json::{Map, Value, json};
11use std::collections::{HashMap, HashSet};
12use std::path::{Path, PathBuf};
13use std::sync::{Mutex, OnceLock};
14use tokio::process::Command;
15use tower_lsp::lsp_types::Url;
16
17static INSTALLED_VERSIONS: OnceLock<Mutex<Vec<SemVer>>> = OnceLock::new();
20
21fn get_installed_versions() -> Vec<SemVer> {
22 let mutex = INSTALLED_VERSIONS.get_or_init(|| Mutex::new(scan_installed_versions()));
23 mutex.lock().unwrap().clone()
24}
25
26fn invalidate_installed_versions() {
27 if let Some(mutex) = INSTALLED_VERSIONS.get() {
28 *mutex.lock().unwrap() = scan_installed_versions();
29 }
30}
31
32fn semver_to_local(v: &semver::Version) -> SemVer {
34 SemVer {
35 major: v.major as u32,
36 minor: v.minor as u32,
37 patch: v.patch as u32,
38 }
39}
40
41pub async fn resolve_solc_binary(
54 config: &FoundryConfig,
55 constraint: Option<&PragmaConstraint>,
56 client: Option<&tower_lsp::Client>,
57) -> PathBuf {
58 if let Some(constraint) = constraint {
60 if !matches!(constraint, PragmaConstraint::Exact(_))
66 && let Some(ref config_ver) = config.solc_version
67 && let Some(parsed) = SemVer::parse(config_ver)
68 && version_satisfies(&parsed, constraint)
69 && let Some(path) = find_solc_binary(config_ver)
70 {
71 if let Some(c) = client {
72 c.log_message(
73 tower_lsp::lsp_types::MessageType::INFO,
74 format!(
75 "solc: foundry.toml {config_ver} satisfies pragma {constraint:?} → {}",
76 path.display()
77 ),
78 )
79 .await;
80 }
81 return path;
82 }
83
84 let installed = get_installed_versions();
85 if let Some(version) = find_matching_version(constraint, &installed)
86 && let Some(path) = find_solc_binary(&version.to_string())
87 {
88 if let Some(c) = client {
89 c.log_message(
90 tower_lsp::lsp_types::MessageType::INFO,
91 format!(
92 "solc: pragma {constraint:?} → {version} → {}",
93 path.display()
94 ),
95 )
96 .await;
97 }
98 return path;
99 }
100
101 let install_version = version_to_install(constraint);
103 if let Some(ref ver_str) = install_version {
104 if let Some(c) = client {
105 c.show_message(
106 tower_lsp::lsp_types::MessageType::INFO,
107 format!("Installing solc {ver_str}..."),
108 )
109 .await;
110 }
111
112 if svm_install(ver_str).await {
113 invalidate_installed_versions();
115
116 if let Some(c) = client {
117 c.show_message(
118 tower_lsp::lsp_types::MessageType::INFO,
119 format!("Installed solc {ver_str}"),
120 )
121 .await;
122 }
123 if let Some(path) = find_solc_binary(ver_str) {
124 return path;
125 }
126 } else if let Some(c) = client {
127 c.show_message(
128 tower_lsp::lsp_types::MessageType::WARNING,
129 format!(
130 "Failed to install solc {ver_str}. \
131 Install it manually: svm install {ver_str}"
132 ),
133 )
134 .await;
135 }
136 }
137 }
138
139 if let Some(ref version) = config.solc_version
141 && let Some(path) = find_solc_binary(version)
142 {
143 if let Some(c) = client {
144 c.log_message(
145 tower_lsp::lsp_types::MessageType::INFO,
146 format!(
147 "solc: no pragma, using foundry.toml version {version} → {}",
148 path.display()
149 ),
150 )
151 .await;
152 }
153 return path;
154 }
155
156 if let Some(c) = client {
158 c.log_message(
159 tower_lsp::lsp_types::MessageType::INFO,
160 "solc: no pragma match, falling back to system solc",
161 )
162 .await;
163 }
164 PathBuf::from("solc")
165}
166
167fn version_to_install(constraint: &PragmaConstraint) -> Option<String> {
174 match constraint {
175 PragmaConstraint::Exact(v) => Some(v.to_string()),
176 PragmaConstraint::Caret(v) => Some(v.to_string()),
177 PragmaConstraint::Gte(v) => Some(v.to_string()),
178 PragmaConstraint::Range(lower, _) => Some(lower.to_string()),
179 }
180}
181
182async fn svm_install(version: &str) -> bool {
186 let ver = match semver::Version::parse(version) {
187 Ok(v) => v,
188 Err(_) => return false,
189 };
190 svm::install(&ver).await.is_ok()
191}
192
193fn find_solc_binary(version: &str) -> Option<PathBuf> {
195 let path = svm::version_binary(version);
196 if path.is_file() {
197 return Some(path);
198 }
199 None
200}
201
202#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
206pub struct SemVer {
207 pub major: u32,
208 pub minor: u32,
209 pub patch: u32,
210}
211
212impl SemVer {
213 fn parse(s: &str) -> Option<SemVer> {
214 let parts: Vec<&str> = s.split('.').collect();
215 if parts.len() != 3 {
216 return None;
217 }
218 Some(SemVer {
219 major: parts[0].parse().ok()?,
220 minor: parts[1].parse().ok()?,
221 patch: parts[2].parse().ok()?,
222 })
223 }
224}
225
226impl std::fmt::Display for SemVer {
227 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
228 write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
229 }
230}
231
232#[derive(Debug, Clone, PartialEq)]
234pub enum PragmaConstraint {
235 Exact(SemVer),
237 Caret(SemVer),
240 Gte(SemVer),
242 Range(SemVer, SemVer),
244}
245
246fn resolve_import_to_abs(
250 project_root: &Path,
251 importer_abs: &Path,
252 import_path: &str,
253 remappings: &[String],
254) -> Option<PathBuf> {
255 if import_path.starts_with("./") || import_path.starts_with("../") {
256 let base = importer_abs.parent()?;
257 return Some(lexical_normalize(&base.join(import_path)));
258 }
259
260 for remap in remappings {
261 let mut it = remap.splitn(2, '=');
262 let prefix = it.next().unwrap_or_default();
263 let target = it.next().unwrap_or_default();
264 if prefix.is_empty() || target.is_empty() {
265 continue;
266 }
267 if import_path.starts_with(prefix) {
268 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
269 return Some(lexical_normalize(
270 &project_root.join(format!("{target}{suffix}")),
271 ));
272 }
273 }
274
275 Some(lexical_normalize(&project_root.join(import_path)))
276}
277
278fn lexical_normalize(path: &Path) -> PathBuf {
281 let mut out = PathBuf::new();
282 for comp in path.components() {
283 match comp {
284 std::path::Component::CurDir => {}
285 std::path::Component::ParentDir => {
286 out.pop();
287 }
288 _ => out.push(comp.as_os_str()),
289 }
290 }
291 out
292}
293
294fn collect_import_pragmas(
300 file_path: &Path,
301 project_root: &Path,
302 remappings: &[String],
303) -> Vec<PragmaConstraint> {
304 let mut pragmas = Vec::new();
305 let mut visited = HashSet::new();
306 collect_import_pragmas_recursive(
307 file_path,
308 project_root,
309 remappings,
310 &mut pragmas,
311 &mut visited,
312 );
313 pragmas
314}
315
316fn collect_import_pragmas_recursive(
317 file_path: &Path,
318 project_root: &Path,
319 remappings: &[String],
320 pragmas: &mut Vec<PragmaConstraint>,
321 visited: &mut HashSet<PathBuf>,
322) {
323 if !visited.insert(file_path.to_path_buf()) {
324 return;
325 }
326 let source = match std::fs::read_to_string(file_path) {
327 Ok(s) => s,
328 Err(_) => return,
329 };
330 if let Some(pragma) = parse_pragma(&source) {
331 pragmas.push(pragma);
332 }
333 for imp in links::ts_find_imports(source.as_bytes()) {
334 if let Some(abs) = resolve_import_to_abs(project_root, file_path, &imp.path, remappings) {
335 collect_import_pragmas_recursive(&abs, project_root, remappings, pragmas, visited);
336 }
337 }
338}
339
340fn tightest_constraint(pragmas: &[PragmaConstraint]) -> Option<PragmaConstraint> {
350 if pragmas.is_empty() {
351 return None;
352 }
353
354 for p in pragmas {
356 if matches!(p, PragmaConstraint::Exact(_)) {
357 return Some(p.clone());
358 }
359 }
360
361 let mut lower = SemVer {
363 major: 0,
364 minor: 0,
365 patch: 0,
366 };
367 let mut upper: Option<SemVer> = None;
368
369 for p in pragmas {
370 let (lo, hi) = constraint_to_range(p);
371 if lo > lower {
372 lower = lo;
373 }
374 if let Some(hi) = hi {
375 upper = Some(match upper {
376 Some(cur) if hi < cur => hi,
377 Some(cur) => cur,
378 None => hi,
379 });
380 }
381 }
382
383 match upper {
384 Some(hi) if lower >= hi => None, Some(hi) => Some(PragmaConstraint::Range(lower, hi)),
386 None => Some(PragmaConstraint::Gte(lower)),
387 }
388}
389
390fn constraint_to_range(constraint: &PragmaConstraint) -> (SemVer, Option<SemVer>) {
393 match constraint {
394 PragmaConstraint::Exact(v) => (
395 v.clone(),
396 Some(SemVer {
397 major: v.major,
398 minor: v.minor,
399 patch: v.patch + 1,
400 }),
401 ),
402 PragmaConstraint::Caret(v) => (
403 v.clone(),
404 Some(SemVer {
405 major: v.major,
406 minor: v.minor + 1,
407 patch: 0,
408 }),
409 ),
410 PragmaConstraint::Gte(v) => (v.clone(), None),
411 PragmaConstraint::Range(lo, hi) => (lo.clone(), Some(hi.clone())),
412 }
413}
414
415pub fn parse_pragma(source: &str) -> Option<PragmaConstraint> {
423 let pragma_line = source
425 .lines()
426 .take(20)
427 .find(|line| line.trim_start().starts_with("pragma solidity"))?;
428
429 let after_keyword = pragma_line
431 .trim_start()
432 .strip_prefix("pragma solidity")?
433 .trim();
434 let constraint_str = after_keyword
435 .strip_suffix(';')
436 .unwrap_or(after_keyword)
437 .trim();
438
439 if constraint_str.is_empty() {
440 return None;
441 }
442
443 if let Some(rest) = constraint_str.strip_prefix(">=") {
445 let rest = rest.trim();
446 if let Some(space_idx) = rest.find(|c: char| c.is_whitespace() || c == '<') {
447 let lower_str = rest[..space_idx].trim();
448 let upper_part = rest[space_idx..].trim();
449 if let Some(upper_str) = upper_part.strip_prefix('<') {
450 let upper_str = upper_str.trim();
451 if let (Some(lower), Some(upper)) =
452 (SemVer::parse(lower_str), SemVer::parse(upper_str))
453 {
454 return Some(PragmaConstraint::Range(lower, upper));
455 }
456 }
457 }
458 if let Some(ver) = SemVer::parse(rest) {
460 return Some(PragmaConstraint::Gte(ver));
461 }
462 }
463
464 if let Some(rest) = constraint_str.strip_prefix('^')
466 && let Some(ver) = SemVer::parse(rest.trim())
467 {
468 return Some(PragmaConstraint::Caret(ver));
469 }
470
471 if let Some(ver) = SemVer::parse(constraint_str) {
473 return Some(PragmaConstraint::Exact(ver));
474 }
475
476 None
477}
478
479pub fn list_installed_versions() -> Vec<SemVer> {
481 get_installed_versions()
482}
483
484fn scan_installed_versions() -> Vec<SemVer> {
488 svm::installed_versions()
489 .unwrap_or_default()
490 .iter()
491 .map(semver_to_local)
492 .collect()
493}
494
495pub fn find_matching_version(
500 constraint: &PragmaConstraint,
501 installed: &[SemVer],
502) -> Option<SemVer> {
503 let candidates: Vec<&SemVer> = installed
504 .iter()
505 .filter(|v| version_satisfies(v, constraint))
506 .collect();
507
508 candidates.last().cloned().cloned()
510}
511
512pub fn version_satisfies(version: &SemVer, constraint: &PragmaConstraint) -> bool {
514 match constraint {
515 PragmaConstraint::Exact(v) => version == v,
516 PragmaConstraint::Caret(v) => {
517 version.major == v.major && version >= v && version.minor < v.minor + 1
520 }
521 PragmaConstraint::Gte(v) => version >= v,
522 PragmaConstraint::Range(lower, upper) => version >= lower && version < upper,
523 }
524}
525
526pub async fn resolve_remappings(config: &FoundryConfig) -> Vec<String> {
530 let output = Command::new("forge")
533 .arg("remappings")
534 .current_dir(&config.root)
535 .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1")
536 .output()
537 .await;
538
539 if let Ok(output) = output
540 && output.status.success()
541 {
542 let stdout = String::from_utf8_lossy(&output.stdout);
543 let remappings: Vec<String> = stdout
544 .lines()
545 .filter(|l| !l.trim().is_empty())
546 .map(|l| l.to_string())
547 .collect();
548 if !remappings.is_empty() {
549 return remappings;
550 }
551 }
552
553 if !config.remappings.is_empty() {
555 return config.remappings.clone();
556 }
557
558 let remappings_txt = config.root.join("remappings.txt");
560 if let Ok(content) = std::fs::read_to_string(&remappings_txt) {
561 return content
562 .lines()
563 .filter(|l| !l.trim().is_empty())
564 .map(|l| l.to_string())
565 .collect();
566 }
567
568 Vec::new()
569}
570
571pub fn build_standard_json_input(
588 file_path: &str,
589 remappings: &[String],
590 config: &FoundryConfig,
591) -> Value {
592 let mut contract_outputs = vec!["abi", "devdoc", "userdoc", "evm.methodIdentifiers"];
595 if !config.via_ir {
596 contract_outputs.push("evm.gasEstimates");
597 }
598
599 let mut settings = json!({
600 "remappings": remappings,
601 "outputSelection": {
602 "*": {
603 "*": contract_outputs,
604 "": ["ast"]
605 }
606 }
607 });
608
609 if config.via_ir {
610 settings["viaIR"] = json!(true);
611 }
612
613 if let Some(ref evm_version) = config.evm_version {
615 settings["evmVersion"] = json!(evm_version);
616 }
617
618 json!({
619 "language": "Solidity",
620 "sources": {
621 file_path: {
622 "urls": [file_path]
623 }
624 },
625 "settings": settings
626 })
627}
628
629pub async fn run_solc(
631 solc_binary: &Path,
632 input: &Value,
633 project_root: &Path,
634) -> Result<Value, RunnerError> {
635 let input_str = serde_json::to_string(input)?;
636
637 let mut child = Command::new(solc_binary)
638 .arg("--standard-json")
639 .current_dir(project_root)
640 .stdin(std::process::Stdio::piped())
641 .stdout(std::process::Stdio::piped())
642 .stderr(std::process::Stdio::piped())
643 .spawn()?;
644
645 if let Some(mut stdin) = child.stdin.take() {
647 use tokio::io::AsyncWriteExt;
648 stdin
649 .write_all(input_str.as_bytes())
650 .await
651 .map_err(RunnerError::CommandError)?;
652 }
654
655 let output = child
656 .wait_with_output()
657 .await
658 .map_err(RunnerError::CommandError)?;
659
660 let stdout = String::from_utf8_lossy(&output.stdout);
662 if stdout.trim().is_empty() {
663 let stderr = String::from_utf8_lossy(&output.stderr);
664 return Err(RunnerError::CommandError(std::io::Error::other(format!(
665 "solc produced no output, stderr: {stderr}"
666 ))));
667 }
668
669 let parsed: Value = serde_json::from_str(&stdout)?;
670 Ok(parsed)
671}
672
673pub fn normalize_solc_output(mut solc_output: Value, project_root: Option<&Path>) -> Value {
694 fn resolve_import_absolute_paths(node: &mut Value, resolve: &dyn Fn(&str) -> String) {
696 let is_import = node.get("nodeType").and_then(|v| v.as_str()) == Some("ImportDirective");
697
698 if is_import {
699 if let Some(abs_path) = node.get("absolutePath").and_then(|v| v.as_str()) {
700 let resolved = resolve(abs_path);
701 node.as_object_mut()
702 .unwrap()
703 .insert("absolutePath".to_string(), json!(resolved));
704 }
705 }
706
707 if let Some(nodes) = node.get_mut("nodes").and_then(|v| v.as_array_mut()) {
709 for child in nodes {
710 resolve_import_absolute_paths(child, resolve);
711 }
712 }
713 }
714 let mut result = Map::new();
715
716 let errors = solc_output
718 .get_mut("errors")
719 .map(Value::take)
720 .unwrap_or_else(|| json!([]));
721 result.insert("errors".to_string(), errors);
722
723 let resolve = |p: &str| -> String {
726 if let Some(root) = project_root {
727 let path = Path::new(p);
728 if path.is_relative() {
729 return root.join(path).to_string_lossy().into_owned();
730 }
731 }
732 p.to_string()
733 };
734
735 let mut source_id_to_path = Map::new();
738 let mut resolved_sources = Map::new();
739
740 if let Some(sources) = solc_output
741 .get_mut("sources")
742 .and_then(|s| s.as_object_mut())
743 {
744 let keys: Vec<String> = sources.keys().cloned().collect();
746 for key in keys {
747 if let Some(mut source_data) = sources.remove(&key) {
748 let abs_key = resolve(&key);
749
750 if let Some(ast) = source_data.get_mut("ast") {
754 if let Some(abs_path) = ast.get("absolutePath").and_then(|v| v.as_str()) {
755 let resolved = resolve(abs_path);
756 ast.as_object_mut()
757 .unwrap()
758 .insert("absolutePath".to_string(), json!(resolved));
759 }
760 resolve_import_absolute_paths(ast, &resolve);
761 }
762
763 if let Some(id) = source_data.get("id") {
764 source_id_to_path.insert(id.to_string(), json!(&abs_key));
765 }
766
767 resolved_sources.insert(abs_key, source_data);
768 }
769 }
770 }
771
772 result.insert("sources".to_string(), Value::Object(resolved_sources));
773
774 let mut resolved_contracts = Map::new();
776 if let Some(contracts) = solc_output
777 .get_mut("contracts")
778 .and_then(|c| c.as_object_mut())
779 {
780 let keys: Vec<String> = contracts.keys().cloned().collect();
781 for key in keys {
782 if let Some(contract_data) = contracts.remove(&key) {
783 resolved_contracts.insert(resolve(&key), contract_data);
784 }
785 }
786 }
787 result.insert("contracts".to_string(), Value::Object(resolved_contracts));
788
789 result.insert(
791 "source_id_to_path".to_string(),
792 Value::Object(source_id_to_path),
793 );
794
795 Value::Object(result)
796}
797
798pub fn normalize_forge_output(mut forge_output: Value) -> Value {
810 let mut result = Map::new();
811
812 let errors = forge_output
814 .get_mut("errors")
815 .map(Value::take)
816 .unwrap_or_else(|| json!([]));
817 result.insert("errors".to_string(), errors);
818
819 let mut normalized_sources = Map::new();
821 if let Some(sources) = forge_output
822 .get_mut("sources")
823 .and_then(|s| s.as_object_mut())
824 {
825 for (path, entries) in sources.iter_mut() {
826 if let Some(arr) = entries.as_array_mut()
827 && let Some(first) = arr.first_mut()
828 && let Some(sf) = first.get_mut("source_file")
829 {
830 normalized_sources.insert(path.clone(), sf.take());
831 }
832 }
833 }
834 result.insert("sources".to_string(), Value::Object(normalized_sources));
835
836 let mut normalized_contracts = Map::new();
838 if let Some(contracts) = forge_output
839 .get_mut("contracts")
840 .and_then(|c| c.as_object_mut())
841 {
842 for (path, names) in contracts.iter_mut() {
843 let mut path_contracts = Map::new();
844 if let Some(names_obj) = names.as_object_mut() {
845 for (name, entries) in names_obj.iter_mut() {
846 if let Some(arr) = entries.as_array_mut()
847 && let Some(first) = arr.first_mut()
848 && let Some(contract) = first.get_mut("contract")
849 {
850 path_contracts.insert(name.clone(), contract.take());
851 }
852 }
853 }
854 normalized_contracts.insert(path.clone(), Value::Object(path_contracts));
855 }
856 }
857 result.insert("contracts".to_string(), Value::Object(normalized_contracts));
858
859 let source_id_to_path = forge_output
861 .get_mut("build_infos")
862 .and_then(|bi| bi.as_array_mut())
863 .and_then(|arr| arr.first_mut())
864 .and_then(|info| info.get_mut("source_id_to_path"))
865 .map(Value::take)
866 .unwrap_or_else(|| json!({}));
867 result.insert("source_id_to_path".to_string(), source_id_to_path);
868
869 Value::Object(result)
870}
871
872pub async fn solc_ast(
877 file_path: &str,
878 config: &FoundryConfig,
879 client: Option<&tower_lsp::Client>,
880) -> Result<Value, RunnerError> {
881 let remappings = resolve_remappings(config).await;
882
883 let file_abs = Path::new(file_path);
886 let pragmas = collect_import_pragmas(file_abs, &config.root, &remappings);
887 let constraint = tightest_constraint(&pragmas);
888 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
889
890 let rel_path = Path::new(file_path)
895 .strip_prefix(&config.root)
896 .map(|p| p.to_string_lossy().into_owned())
897 .unwrap_or_else(|_| file_path.to_string());
898
899 let input = build_standard_json_input(&rel_path, &remappings, config);
900 let raw_output = run_solc(&solc_binary, &input, &config.root).await?;
901
902 Ok(normalize_solc_output(raw_output, Some(&config.root)))
903}
904
905pub async fn solc_build(
907 file_path: &str,
908 config: &FoundryConfig,
909 client: Option<&tower_lsp::Client>,
910) -> Result<Value, RunnerError> {
911 solc_ast(file_path, config, client).await
912}
913
914pub fn discover_source_files(config: &FoundryConfig) -> Vec<PathBuf> {
927 discover_source_files_inner(config, false)
928}
929
930pub fn discover_source_files_with_libs(config: &FoundryConfig) -> Vec<PathBuf> {
937 discover_source_files_inner(config, true)
938}
939
940fn discover_source_files_inner(config: &FoundryConfig, include_libs: bool) -> Vec<PathBuf> {
941 let root = &config.root;
942 if !root.is_dir() {
943 return Vec::new();
944 }
945 let skip_libs = if include_libs { &[][..] } else { &config.libs };
946 let mut files = Vec::new();
947 discover_recursive(root, skip_libs, &mut files);
948 files.sort();
949 files
950}
951
952pub fn discover_compilation_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
971 let seeds = discover_source_files(config);
973 let mut visited: HashSet<PathBuf> = HashSet::new();
974 let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
975
976 while let Some(file) = queue.pop_front() {
977 if !visited.insert(file.clone()) {
978 continue;
979 }
980 let source = match std::fs::read_to_string(&file) {
981 Ok(s) => s,
982 Err(_) => continue,
983 };
984 for imp in links::ts_find_imports(source.as_bytes()) {
985 if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
986 if abs.exists() && !visited.contains(&abs) {
987 queue.push_back(abs);
988 }
989 }
990 }
991 }
992
993 let mut result: Vec<PathBuf> = visited.into_iter().collect();
994 result.sort();
995 result
996}
997
998const DISCOVER_SKIP_DIRS: &[&str] = &["out", "artifacts", "cache", "target", "broadcast"];
1001
1002fn discover_recursive(dir: &Path, skip_libs: &[String], files: &mut Vec<PathBuf>) {
1003 let entries = match std::fs::read_dir(dir) {
1004 Ok(e) => e,
1005 Err(_) => return,
1006 };
1007 for entry in entries.flatten() {
1008 let path = entry.path();
1009 if path.is_dir() {
1010 if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
1011 if name.starts_with('.') {
1013 continue;
1014 }
1015 if DISCOVER_SKIP_DIRS.contains(&name) {
1017 continue;
1018 }
1019 if skip_libs.iter().any(|lib| lib == name) {
1021 continue;
1022 }
1023 }
1024 discover_recursive(&path, skip_libs, files);
1025 } else if let Some(name) = path.file_name().and_then(|n| n.to_str())
1026 && name.ends_with(".sol")
1027 {
1028 files.push(path);
1029 }
1030 }
1031}
1032
1033pub fn build_batch_standard_json_input(
1040 source_files: &[PathBuf],
1041 remappings: &[String],
1042 config: &FoundryConfig,
1043) -> Value {
1044 build_batch_standard_json_input_with_cache(source_files, remappings, config, None)
1045}
1046
1047pub fn build_batch_standard_json_input_with_cache(
1057 source_files: &[PathBuf],
1058 remappings: &[String],
1059 config: &FoundryConfig,
1060 content_cache: Option<&HashMap<String, (i32, String)>>,
1061) -> Value {
1062 let mut contract_outputs = vec!["abi", "devdoc", "userdoc", "evm.methodIdentifiers"];
1063 if !config.via_ir {
1064 contract_outputs.push("evm.gasEstimates");
1065 }
1066
1067 let mut settings = json!({
1068 "remappings": remappings,
1069 "outputSelection": {
1070 "*": {
1071 "*": contract_outputs,
1072 "": ["ast"]
1073 }
1074 }
1075 });
1076
1077 if config.via_ir {
1078 settings["viaIR"] = json!(true);
1079 }
1080 if let Some(ref evm_version) = config.evm_version {
1081 settings["evmVersion"] = json!(evm_version);
1082 }
1083
1084 let mut sources = serde_json::Map::new();
1085 for file in source_files {
1086 let rel_path = file
1087 .strip_prefix(&config.root)
1088 .map(|p| p.to_string_lossy().into_owned())
1089 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1090
1091 let cached_content = content_cache.and_then(|cache| {
1093 let uri = Url::from_file_path(file).ok()?;
1094 cache.get(&uri.to_string()).map(|(_, c)| c.as_str())
1095 });
1096
1097 if let Some(content) = cached_content {
1098 sources.insert(rel_path, json!({ "content": content }));
1099 } else {
1100 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1101 }
1102 }
1103
1104 json!({
1105 "language": "Solidity",
1106 "sources": sources,
1107 "settings": settings
1108 })
1109}
1110
1111pub fn build_parse_only_json_input(
1128 source_files: &[PathBuf],
1129 remappings: &[String],
1130 config: &FoundryConfig,
1131) -> Value {
1132 let settings = json!({
1133 "stopAfter": "parsing",
1134 "remappings": remappings,
1135 "outputSelection": {
1136 "*": {
1137 "": ["ast"]
1138 }
1139 }
1140 });
1141
1142 let mut sources = serde_json::Map::new();
1143 for file in source_files {
1144 let rel_path = file
1145 .strip_prefix(&config.root)
1146 .map(|p| p.to_string_lossy().into_owned())
1147 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1148 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1149 }
1150
1151 json!({
1152 "language": "Solidity",
1153 "sources": sources,
1154 "settings": settings
1155 })
1156}
1157
1158pub async fn solc_project_index(
1168 config: &FoundryConfig,
1169 client: Option<&tower_lsp::Client>,
1170 text_cache: Option<&HashMap<String, (i32, String)>>,
1171) -> Result<Value, RunnerError> {
1172 let remappings = resolve_remappings(config).await;
1174
1175 let source_files = discover_compilation_closure(config, &remappings);
1180 if source_files.is_empty() {
1181 return Err(RunnerError::CommandError(std::io::Error::other(
1182 "no source files found for project index",
1183 )));
1184 }
1185
1186 solc_project_index_from_files(config, client, text_cache, &source_files).await
1187}
1188
1189pub async fn solc_project_index_scoped(
1194 config: &FoundryConfig,
1195 client: Option<&tower_lsp::Client>,
1196 text_cache: Option<&HashMap<String, (i32, String)>>,
1197 source_files: &[PathBuf],
1198) -> Result<Value, RunnerError> {
1199 if source_files.is_empty() {
1200 return Err(RunnerError::CommandError(std::io::Error::other(
1201 "no source files provided for scoped project index",
1202 )));
1203 }
1204
1205 solc_project_index_from_files(config, client, text_cache, source_files).await
1206}
1207
1208#[cfg(test)]
1212fn extract_version_error_files(solc_output: &Value) -> HashSet<String> {
1213 let mut files = HashSet::new();
1214 if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1215 for err in errors {
1216 let is_5333 = err.get("errorCode").and_then(|c| c.as_str()) == Some("5333");
1217 if is_5333
1218 && let Some(file) = err
1219 .get("sourceLocation")
1220 .and_then(|sl| sl.get("file"))
1221 .and_then(|f| f.as_str())
1222 {
1223 files.insert(file.to_string());
1224 }
1225 }
1226 }
1227 files
1228}
1229
1230#[cfg(test)]
1236fn reverse_import_closure(
1237 source_files: &[PathBuf],
1238 exclude_abs: &HashSet<PathBuf>,
1239 project_root: &Path,
1240 remappings: &[String],
1241) -> HashSet<PathBuf> {
1242 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1245
1246 for file in source_files {
1247 let Ok(bytes) = std::fs::read(file) else {
1248 continue;
1249 };
1250 for imp in links::ts_find_imports(&bytes) {
1251 if let Some(imported_abs) =
1252 resolve_import_to_abs(project_root, file, &imp.path, remappings)
1253 {
1254 reverse_edges
1255 .entry(imported_abs)
1256 .or_default()
1257 .insert(file.clone());
1258 }
1259 }
1260 }
1261
1262 let mut closure: HashSet<PathBuf> = exclude_abs.clone();
1264 let mut queue: std::collections::VecDeque<PathBuf> = exclude_abs.iter().cloned().collect();
1265
1266 while let Some(current) = queue.pop_front() {
1267 if let Some(importers) = reverse_edges.get(¤t) {
1268 for importer in importers {
1269 if closure.insert(importer.clone()) {
1270 queue.push_back(importer.clone());
1271 }
1272 }
1273 }
1274 }
1275
1276 closure
1277}
1278
1279fn merge_normalized_outputs(base: &mut Value, other: Value) {
1285 if let (Some(base_sources), Some(other_sources)) = (
1287 base.get_mut("sources").and_then(|s| s.as_object_mut()),
1288 other.get("sources").and_then(|s| s.as_object()),
1289 ) {
1290 let max_base_id = base_sources
1292 .values()
1293 .filter_map(|v| v.get("id").and_then(|id| id.as_u64()))
1294 .max()
1295 .map(|m| m + 1)
1296 .unwrap_or(0);
1297
1298 let mut remapped_id_to_path: Vec<(String, String)> = Vec::new();
1300
1301 for (path, mut source_data) in other_sources.clone() {
1302 if let Some(id) = source_data.get("id").and_then(|id| id.as_u64()) {
1304 let new_id = id + max_base_id;
1305 source_data
1306 .as_object_mut()
1307 .unwrap()
1308 .insert("id".to_string(), json!(new_id));
1309 remapped_id_to_path.push((new_id.to_string(), path.clone()));
1310 }
1311 base_sources.insert(path, source_data);
1312 }
1313
1314 if let Some(base_id_map) = base
1316 .get_mut("source_id_to_path")
1317 .and_then(|m| m.as_object_mut())
1318 {
1319 for (id, path) in remapped_id_to_path {
1320 base_id_map.insert(id, json!(path));
1321 }
1322 }
1323 }
1324
1325 if let (Some(base_contracts), Some(other_contracts)) = (
1327 base.get_mut("contracts").and_then(|c| c.as_object_mut()),
1328 other.get("contracts").and_then(|c| c.as_object()),
1329 ) {
1330 for (path, contract_data) in other_contracts {
1331 base_contracts.insert(path.clone(), contract_data.clone());
1332 }
1333 }
1334
1335 }
1338
1339async fn solc_project_index_from_files(
1340 config: &FoundryConfig,
1341 client: Option<&tower_lsp::Client>,
1342 text_cache: Option<&HashMap<String, (i32, String)>>,
1343 source_files: &[PathBuf],
1344) -> Result<Value, RunnerError> {
1345 if source_files.is_empty() {
1346 return Err(RunnerError::CommandError(std::io::Error::other(
1347 "no source files found for project index",
1348 )));
1349 }
1350
1351 if let Some(c) = client {
1352 c.log_message(
1353 tower_lsp::lsp_types::MessageType::INFO,
1354 format!(
1355 "project index: discovered {} source files in {}",
1356 source_files.len(),
1357 config.root.display()
1358 ),
1359 )
1360 .await;
1361 }
1362
1363 let remappings = resolve_remappings(config).await;
1364
1365 let project_version: Option<SemVer> =
1367 config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1368
1369 let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1372 Some(PragmaConstraint::Exact(v.clone()))
1373 } else {
1374 source_files.iter().find_map(|f| {
1375 std::fs::read_to_string(f)
1376 .ok()
1377 .and_then(|src| parse_pragma(&src))
1378 })
1379 };
1380 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1381
1382 let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1388 let mut compat = Vec::with_capacity(source_files.len());
1389 let mut incompat = Vec::new();
1390 for file in source_files {
1391 let is_compatible = std::fs::read_to_string(file)
1392 .ok()
1393 .and_then(|src| parse_pragma(&src))
1394 .map(|pragma| version_satisfies(ver, &pragma))
1395 .unwrap_or(true);
1397 if is_compatible {
1398 compat.push(file.clone());
1399 } else {
1400 incompat.push(file.clone());
1401 }
1402 }
1403 (compat, incompat)
1404 } else {
1405 (source_files.to_vec(), Vec::new())
1407 };
1408
1409 if !incompatible_files.is_empty() {
1410 if let Some(c) = client {
1411 c.log_message(
1412 tower_lsp::lsp_types::MessageType::INFO,
1413 format!(
1414 "project index: {} compatible, {} incompatible with solc {}",
1415 compatible_files.len(),
1416 incompatible_files.len(),
1417 project_version
1418 .as_ref()
1419 .map(|v| v.to_string())
1420 .unwrap_or_default(),
1421 ),
1422 )
1423 .await;
1424 }
1425 }
1426
1427 let mut result = if compatible_files.is_empty() {
1435 json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1436 } else {
1437 let input = build_batch_standard_json_input_with_cache(
1438 &compatible_files,
1439 &remappings,
1440 config,
1441 text_cache,
1442 );
1443 let raw = run_solc(&solc_binary, &input, &config.root).await?;
1444 normalize_solc_output(raw, Some(&config.root))
1445 };
1446
1447 let batch_source_count = result
1448 .get("sources")
1449 .and_then(|s| s.as_object())
1450 .map_or(0, |obj| obj.len());
1451
1452 if incompatible_files.is_empty() {
1453 if let Some(c) = client {
1454 c.log_message(
1455 tower_lsp::lsp_types::MessageType::INFO,
1456 format!(
1457 "project index: compiled {} files with no version mismatches",
1458 source_files.len(),
1459 ),
1460 )
1461 .await;
1462 }
1463 return Ok(result);
1464 }
1465
1466 if let Some(c) = client {
1467 let batch_errors: Vec<String> = result
1469 .get("errors")
1470 .and_then(|e| e.as_array())
1471 .map(|arr| {
1472 arr.iter()
1473 .filter(|e| e.get("severity").and_then(|s| s.as_str()) == Some("error"))
1474 .take(3)
1475 .filter_map(|e| {
1476 let msg = e.get("message").and_then(|m| m.as_str()).unwrap_or("?");
1477 let file = e
1478 .get("sourceLocation")
1479 .and_then(|sl| sl.get("file"))
1480 .and_then(|f| f.as_str())
1481 .unwrap_or("?");
1482 Some(format!("{file}: {msg}"))
1483 })
1484 .collect()
1485 })
1486 .unwrap_or_default();
1487
1488 c.log_message(
1489 tower_lsp::lsp_types::MessageType::INFO,
1490 format!(
1491 "project index: batch produced {} sources, now compiling {} incompatible files individually{}",
1492 batch_source_count,
1493 incompatible_files.len(),
1494 if batch_errors.is_empty() {
1495 String::new()
1496 } else {
1497 format!(" [first errors: {}]", batch_errors.join("; "))
1498 },
1499 ),
1500 )
1501 .await;
1502 }
1503
1504 let mut compiled = 0usize;
1506 let mut skipped = 0usize;
1507 for file in &incompatible_files {
1508 let pragma = std::fs::read_to_string(file)
1509 .ok()
1510 .and_then(|src| parse_pragma(&src));
1511
1512 let Some(file_constraint) = pragma else {
1513 skipped += 1;
1514 continue;
1515 };
1516
1517 let file_binary = resolve_solc_binary(config, Some(&file_constraint), client).await;
1518 let input = build_batch_standard_json_input_with_cache(
1519 &[file.clone()],
1520 &remappings,
1521 config,
1522 text_cache,
1523 );
1524 match run_solc(&file_binary, &input, &config.root).await {
1525 Ok(raw) => {
1526 let normalized = normalize_solc_output(raw, Some(&config.root));
1527 merge_normalized_outputs(&mut result, normalized);
1528 compiled += 1;
1529 }
1530 Err(e) => {
1531 if let Some(c) = client {
1532 c.log_message(
1533 tower_lsp::lsp_types::MessageType::WARNING,
1534 format!(
1535 "project index: incompatible file {} failed: {e}",
1536 file.display(),
1537 ),
1538 )
1539 .await;
1540 }
1541 skipped += 1;
1542 }
1543 }
1544 }
1545
1546 if let Some(c) = client {
1547 c.log_message(
1548 tower_lsp::lsp_types::MessageType::INFO,
1549 format!(
1550 "project index: incompatible files done — {compiled} compiled, {skipped} skipped",
1551 ),
1552 )
1553 .await;
1554 }
1555
1556 Ok(result)
1557}
1558
1559#[cfg(test)]
1560mod tests {
1561 use super::*;
1562
1563 #[test]
1564 fn test_normalize_solc_sources() {
1565 let solc_output = json!({
1566 "sources": {
1567 "src/Foo.sol": {
1568 "id": 0,
1569 "ast": {
1570 "nodeType": "SourceUnit",
1571 "absolutePath": "src/Foo.sol",
1572 "id": 100
1573 }
1574 },
1575 "src/Bar.sol": {
1576 "id": 1,
1577 "ast": {
1578 "nodeType": "SourceUnit",
1579 "absolutePath": "src/Bar.sol",
1580 "id": 200
1581 }
1582 }
1583 },
1584 "contracts": {},
1585 "errors": []
1586 });
1587
1588 let normalized = normalize_solc_output(solc_output, None);
1589
1590 let sources = normalized.get("sources").unwrap().as_object().unwrap();
1592 assert_eq!(sources.len(), 2);
1593
1594 let foo = sources.get("src/Foo.sol").unwrap();
1595 assert_eq!(foo.get("id").unwrap(), 0);
1596 assert_eq!(
1597 foo.get("ast")
1598 .unwrap()
1599 .get("nodeType")
1600 .unwrap()
1601 .as_str()
1602 .unwrap(),
1603 "SourceUnit"
1604 );
1605
1606 let id_to_path = normalized
1608 .get("source_id_to_path")
1609 .unwrap()
1610 .as_object()
1611 .unwrap();
1612 assert_eq!(id_to_path.len(), 2);
1613 }
1614
1615 #[test]
1616 fn test_normalize_solc_contracts() {
1617 let solc_output = json!({
1618 "sources": {},
1619 "contracts": {
1620 "src/Foo.sol": {
1621 "Foo": {
1622 "abi": [{"type": "function", "name": "bar"}],
1623 "evm": {
1624 "methodIdentifiers": {
1625 "bar(uint256)": "abcd1234"
1626 },
1627 "gasEstimates": {
1628 "external": {"bar(uint256)": "200"}
1629 }
1630 }
1631 }
1632 }
1633 },
1634 "errors": []
1635 });
1636
1637 let normalized = normalize_solc_output(solc_output, None);
1638
1639 let contracts = normalized.get("contracts").unwrap().as_object().unwrap();
1641 let foo_contracts = contracts.get("src/Foo.sol").unwrap().as_object().unwrap();
1642 let foo = foo_contracts.get("Foo").unwrap();
1643
1644 let method_ids = foo
1645 .get("evm")
1646 .unwrap()
1647 .get("methodIdentifiers")
1648 .unwrap()
1649 .as_object()
1650 .unwrap();
1651 assert_eq!(
1652 method_ids.get("bar(uint256)").unwrap().as_str().unwrap(),
1653 "abcd1234"
1654 );
1655 }
1656
1657 #[test]
1658 fn test_normalize_solc_errors_passthrough() {
1659 let solc_output = json!({
1660 "sources": {},
1661 "contracts": {},
1662 "errors": [{
1663 "sourceLocation": {"file": "src/Foo.sol", "start": 0, "end": 10},
1664 "type": "Warning",
1665 "component": "general",
1666 "severity": "warning",
1667 "errorCode": "2394",
1668 "message": "test warning",
1669 "formattedMessage": "Warning: test warning"
1670 }]
1671 });
1672
1673 let normalized = normalize_solc_output(solc_output, None);
1674
1675 let errors = normalized.get("errors").unwrap().as_array().unwrap();
1676 assert_eq!(errors.len(), 1);
1677 assert_eq!(
1678 errors[0].get("errorCode").unwrap().as_str().unwrap(),
1679 "2394"
1680 );
1681 }
1682
1683 #[test]
1684 fn test_normalize_empty_solc_output() {
1685 let solc_output = json!({
1686 "sources": {},
1687 "contracts": {}
1688 });
1689
1690 let normalized = normalize_solc_output(solc_output, None);
1691
1692 assert!(
1693 normalized
1694 .get("sources")
1695 .unwrap()
1696 .as_object()
1697 .unwrap()
1698 .is_empty()
1699 );
1700 assert!(
1701 normalized
1702 .get("contracts")
1703 .unwrap()
1704 .as_object()
1705 .unwrap()
1706 .is_empty()
1707 );
1708 assert_eq!(
1709 normalized.get("errors").unwrap().as_array().unwrap().len(),
1710 0
1711 );
1712 assert!(
1713 normalized
1714 .get("source_id_to_path")
1715 .unwrap()
1716 .as_object()
1717 .unwrap()
1718 .is_empty()
1719 );
1720 }
1721
1722 #[test]
1723 fn test_build_standard_json_input() {
1724 let config = FoundryConfig::default();
1725 let input = build_standard_json_input(
1726 "/path/to/Foo.sol",
1727 &[
1728 "ds-test/=lib/forge-std/lib/ds-test/src/".to_string(),
1729 "forge-std/=lib/forge-std/src/".to_string(),
1730 ],
1731 &config,
1732 );
1733
1734 let sources = input.get("sources").unwrap().as_object().unwrap();
1735 assert!(sources.contains_key("/path/to/Foo.sol"));
1736
1737 let settings = input.get("settings").unwrap();
1738 let remappings = settings.get("remappings").unwrap().as_array().unwrap();
1739 assert_eq!(remappings.len(), 2);
1740
1741 let output_sel = settings.get("outputSelection").unwrap();
1742 assert!(output_sel.get("*").is_some());
1743
1744 assert!(settings.get("optimizer").is_none());
1746 assert!(settings.get("viaIR").is_none());
1747 assert!(settings.get("evmVersion").is_none());
1748
1749 let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
1751 let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
1752 assert!(output_names.contains(&"evm.gasEstimates"));
1753 assert!(output_names.contains(&"abi"));
1754 assert!(output_names.contains(&"devdoc"));
1755 assert!(output_names.contains(&"userdoc"));
1756 assert!(output_names.contains(&"evm.methodIdentifiers"));
1757 }
1758
1759 #[test]
1760 fn test_build_standard_json_input_with_config() {
1761 let config = FoundryConfig {
1762 optimizer: true,
1763 optimizer_runs: 9999999,
1764 via_ir: true,
1765 evm_version: Some("osaka".to_string()),
1766 ..Default::default()
1767 };
1768 let input = build_standard_json_input("/path/to/Foo.sol", &[], &config);
1769
1770 let settings = input.get("settings").unwrap();
1771
1772 assert!(settings.get("optimizer").is_none());
1774
1775 assert!(settings.get("viaIR").unwrap().as_bool().unwrap());
1777
1778 let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
1780 let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
1781 assert!(!output_names.contains(&"evm.gasEstimates"));
1782
1783 assert_eq!(
1785 settings.get("evmVersion").unwrap().as_str().unwrap(),
1786 "osaka"
1787 );
1788 }
1789
1790 #[tokio::test]
1791 async fn test_resolve_solc_binary_default() {
1792 let config = FoundryConfig::default();
1793 let binary = resolve_solc_binary(&config, None, None).await;
1794 assert_eq!(binary, PathBuf::from("solc"));
1795 }
1796
1797 #[test]
1798 fn test_parse_pragma_exact() {
1799 let source = "// SPDX\npragma solidity 0.8.26;\n";
1800 assert_eq!(
1801 parse_pragma(source),
1802 Some(PragmaConstraint::Exact(SemVer {
1803 major: 0,
1804 minor: 8,
1805 patch: 26
1806 }))
1807 );
1808 }
1809
1810 #[test]
1811 fn test_parse_pragma_caret() {
1812 let source = "pragma solidity ^0.8.0;\n";
1813 assert_eq!(
1814 parse_pragma(source),
1815 Some(PragmaConstraint::Caret(SemVer {
1816 major: 0,
1817 minor: 8,
1818 patch: 0
1819 }))
1820 );
1821 }
1822
1823 #[test]
1824 fn test_parse_pragma_gte() {
1825 let source = "pragma solidity >=0.8.0;\n";
1826 assert_eq!(
1827 parse_pragma(source),
1828 Some(PragmaConstraint::Gte(SemVer {
1829 major: 0,
1830 minor: 8,
1831 patch: 0
1832 }))
1833 );
1834 }
1835
1836 #[test]
1837 fn test_parse_pragma_range() {
1838 let source = "pragma solidity >=0.6.2 <0.9.0;\n";
1839 assert_eq!(
1840 parse_pragma(source),
1841 Some(PragmaConstraint::Range(
1842 SemVer {
1843 major: 0,
1844 minor: 6,
1845 patch: 2
1846 },
1847 SemVer {
1848 major: 0,
1849 minor: 9,
1850 patch: 0
1851 },
1852 ))
1853 );
1854 }
1855
1856 #[test]
1857 fn test_parse_pragma_none() {
1858 let source = "contract Foo {}\n";
1859 assert_eq!(parse_pragma(source), None);
1860 }
1861
1862 #[test]
1863 fn test_version_satisfies_exact() {
1864 let v = SemVer {
1865 major: 0,
1866 minor: 8,
1867 patch: 26,
1868 };
1869 assert!(version_satisfies(&v, &PragmaConstraint::Exact(v.clone())));
1870 assert!(!version_satisfies(
1871 &SemVer {
1872 major: 0,
1873 minor: 8,
1874 patch: 25
1875 },
1876 &PragmaConstraint::Exact(v)
1877 ));
1878 }
1879
1880 #[test]
1881 fn test_version_satisfies_caret() {
1882 let constraint = PragmaConstraint::Caret(SemVer {
1883 major: 0,
1884 minor: 8,
1885 patch: 0,
1886 });
1887 assert!(version_satisfies(
1888 &SemVer {
1889 major: 0,
1890 minor: 8,
1891 patch: 0
1892 },
1893 &constraint
1894 ));
1895 assert!(version_satisfies(
1896 &SemVer {
1897 major: 0,
1898 minor: 8,
1899 patch: 26
1900 },
1901 &constraint
1902 ));
1903 assert!(!version_satisfies(
1905 &SemVer {
1906 major: 0,
1907 minor: 9,
1908 patch: 0
1909 },
1910 &constraint
1911 ));
1912 assert!(!version_satisfies(
1914 &SemVer {
1915 major: 0,
1916 minor: 7,
1917 patch: 0
1918 },
1919 &constraint
1920 ));
1921 }
1922
1923 #[test]
1924 fn test_version_satisfies_gte() {
1925 let constraint = PragmaConstraint::Gte(SemVer {
1926 major: 0,
1927 minor: 8,
1928 patch: 0,
1929 });
1930 assert!(version_satisfies(
1931 &SemVer {
1932 major: 0,
1933 minor: 8,
1934 patch: 0
1935 },
1936 &constraint
1937 ));
1938 assert!(version_satisfies(
1939 &SemVer {
1940 major: 0,
1941 minor: 9,
1942 patch: 0
1943 },
1944 &constraint
1945 ));
1946 assert!(!version_satisfies(
1947 &SemVer {
1948 major: 0,
1949 minor: 7,
1950 patch: 0
1951 },
1952 &constraint
1953 ));
1954 }
1955
1956 #[test]
1957 fn test_version_satisfies_range() {
1958 let constraint = PragmaConstraint::Range(
1959 SemVer {
1960 major: 0,
1961 minor: 6,
1962 patch: 2,
1963 },
1964 SemVer {
1965 major: 0,
1966 minor: 9,
1967 patch: 0,
1968 },
1969 );
1970 assert!(version_satisfies(
1971 &SemVer {
1972 major: 0,
1973 minor: 6,
1974 patch: 2
1975 },
1976 &constraint
1977 ));
1978 assert!(version_satisfies(
1979 &SemVer {
1980 major: 0,
1981 minor: 8,
1982 patch: 26
1983 },
1984 &constraint
1985 ));
1986 assert!(!version_satisfies(
1988 &SemVer {
1989 major: 0,
1990 minor: 9,
1991 patch: 0
1992 },
1993 &constraint
1994 ));
1995 assert!(!version_satisfies(
1996 &SemVer {
1997 major: 0,
1998 minor: 6,
1999 patch: 1
2000 },
2001 &constraint
2002 ));
2003 }
2004
2005 #[test]
2006 fn test_find_matching_version() {
2007 let installed = vec![
2008 SemVer {
2009 major: 0,
2010 minor: 8,
2011 patch: 0,
2012 },
2013 SemVer {
2014 major: 0,
2015 minor: 8,
2016 patch: 20,
2017 },
2018 SemVer {
2019 major: 0,
2020 minor: 8,
2021 patch: 26,
2022 },
2023 SemVer {
2024 major: 0,
2025 minor: 8,
2026 patch: 33,
2027 },
2028 ];
2029 let constraint = PragmaConstraint::Caret(SemVer {
2031 major: 0,
2032 minor: 8,
2033 patch: 20,
2034 });
2035 let matched = find_matching_version(&constraint, &installed);
2036 assert_eq!(
2037 matched,
2038 Some(SemVer {
2039 major: 0,
2040 minor: 8,
2041 patch: 33
2042 })
2043 );
2044
2045 let constraint = PragmaConstraint::Exact(SemVer {
2047 major: 0,
2048 minor: 8,
2049 patch: 20,
2050 });
2051 let matched = find_matching_version(&constraint, &installed);
2052 assert_eq!(
2053 matched,
2054 Some(SemVer {
2055 major: 0,
2056 minor: 8,
2057 patch: 20
2058 })
2059 );
2060
2061 let constraint = PragmaConstraint::Exact(SemVer {
2063 major: 0,
2064 minor: 8,
2065 patch: 15,
2066 });
2067 let matched = find_matching_version(&constraint, &installed);
2068 assert_eq!(matched, None);
2069 }
2070
2071 #[test]
2072 fn test_list_installed_versions() {
2073 let versions = list_installed_versions();
2075 for w in versions.windows(2) {
2077 assert!(w[0] <= w[1]);
2078 }
2079 }
2080
2081 #[test]
2086 fn test_extract_version_error_files_basic() {
2087 let output = json!({
2088 "errors": [
2089 {
2090 "errorCode": "5333",
2091 "severity": "error",
2092 "message": "Source file requires different compiler version",
2093 "sourceLocation": {
2094 "file": "lib/openzeppelin/contracts/token/ERC20/ERC20.sol",
2095 "start": 32,
2096 "end": 58
2097 }
2098 },
2099 {
2100 "errorCode": "5333",
2101 "severity": "error",
2102 "message": "Source file requires different compiler version",
2103 "sourceLocation": {
2104 "file": "lib/old-lib/src/Legacy.sol",
2105 "start": 32,
2106 "end": 58
2107 }
2108 },
2109 {
2110 "errorCode": "9574",
2111 "severity": "error",
2112 "message": "Some other error",
2113 "sourceLocation": {
2114 "file": "src/Main.sol",
2115 "start": 100,
2116 "end": 200
2117 }
2118 }
2119 ]
2120 });
2121
2122 let files = extract_version_error_files(&output);
2123 assert_eq!(files.len(), 2);
2124 assert!(files.contains("lib/openzeppelin/contracts/token/ERC20/ERC20.sol"));
2125 assert!(files.contains("lib/old-lib/src/Legacy.sol"));
2126 assert!(!files.contains("src/Main.sol"));
2128 }
2129
2130 #[test]
2131 fn test_extract_version_error_files_empty() {
2132 let output = json!({
2133 "errors": []
2134 });
2135 assert!(extract_version_error_files(&output).is_empty());
2136
2137 let output = json!({});
2139 assert!(extract_version_error_files(&output).is_empty());
2140 }
2141
2142 #[test]
2143 fn test_extract_version_error_files_no_source_location() {
2144 let output = json!({
2145 "errors": [
2146 {
2147 "errorCode": "5333",
2148 "severity": "error",
2149 "message": "Source file requires different compiler version"
2150 }
2152 ]
2153 });
2154 assert!(extract_version_error_files(&output).is_empty());
2155 }
2156
2157 #[test]
2158 fn test_extract_version_error_files_dedup() {
2159 let output = json!({
2160 "errors": [
2161 {
2162 "errorCode": "5333",
2163 "severity": "error",
2164 "sourceLocation": { "file": "lib/same.sol", "start": 0, "end": 10 }
2165 },
2166 {
2167 "errorCode": "5333",
2168 "severity": "error",
2169 "sourceLocation": { "file": "lib/same.sol", "start": 50, "end": 70 }
2170 }
2171 ]
2172 });
2173 let files = extract_version_error_files(&output);
2174 assert_eq!(files.len(), 1);
2175 assert!(files.contains("lib/same.sol"));
2176 }
2177
2178 #[test]
2179 fn test_reverse_import_closure_simple() {
2180 let dir = tempfile::tempdir().unwrap();
2188 let root = dir.path();
2189
2190 std::fs::write(
2191 root.join("a.sol"),
2192 "// SPDX-License-Identifier: MIT\nimport \"./b.sol\";\ncontract A {}",
2193 )
2194 .unwrap();
2195 std::fs::write(
2196 root.join("b.sol"),
2197 "// SPDX-License-Identifier: MIT\nimport \"./c.sol\";\ncontract B {}",
2198 )
2199 .unwrap();
2200 std::fs::write(
2201 root.join("c.sol"),
2202 "// SPDX-License-Identifier: MIT\ncontract C {}",
2203 )
2204 .unwrap();
2205 std::fs::write(
2206 root.join("d.sol"),
2207 "// SPDX-License-Identifier: MIT\ncontract D {}",
2208 )
2209 .unwrap();
2210
2211 let files: Vec<PathBuf> = vec![
2212 root.join("a.sol"),
2213 root.join("b.sol"),
2214 root.join("c.sol"),
2215 root.join("d.sol"),
2216 ];
2217
2218 let exclude: HashSet<PathBuf> = [root.join("c.sol")].into_iter().collect();
2219 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2220
2221 assert!(
2222 closure.contains(&root.join("c.sol")),
2223 "seed file in closure"
2224 );
2225 assert!(closure.contains(&root.join("b.sol")), "direct importer");
2226 assert!(closure.contains(&root.join("a.sol")), "transitive importer");
2227 assert!(
2228 !closure.contains(&root.join("d.sol")),
2229 "unrelated file not in closure"
2230 );
2231 assert_eq!(closure.len(), 3);
2232 }
2233
2234 #[test]
2235 fn test_reverse_import_closure_no_importers() {
2236 let dir = tempfile::tempdir().unwrap();
2238 let root = dir.path();
2239
2240 std::fs::write(root.join("a.sol"), "contract A {}").unwrap();
2241 std::fs::write(root.join("b.sol"), "contract B {}").unwrap();
2242
2243 let files: Vec<PathBuf> = vec![root.join("a.sol"), root.join("b.sol")];
2244 let exclude: HashSet<PathBuf> = [root.join("a.sol")].into_iter().collect();
2245
2246 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2247 assert_eq!(closure.len(), 1);
2248 assert!(closure.contains(&root.join("a.sol")));
2249 }
2250
2251 #[test]
2252 fn test_reverse_import_closure_diamond() {
2253 let dir = tempfile::tempdir().unwrap();
2260 let root = dir.path();
2261
2262 std::fs::write(
2263 root.join("a.sol"),
2264 "import \"./b.sol\";\nimport \"./c.sol\";\ncontract A {}",
2265 )
2266 .unwrap();
2267 std::fs::write(root.join("b.sol"), "import \"./d.sol\";\ncontract B {}").unwrap();
2268 std::fs::write(root.join("c.sol"), "import \"./d.sol\";\ncontract C {}").unwrap();
2269 std::fs::write(root.join("d.sol"), "contract D {}").unwrap();
2270
2271 let files: Vec<PathBuf> = vec![
2272 root.join("a.sol"),
2273 root.join("b.sol"),
2274 root.join("c.sol"),
2275 root.join("d.sol"),
2276 ];
2277 let exclude: HashSet<PathBuf> = [root.join("d.sol")].into_iter().collect();
2278
2279 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2280 assert_eq!(closure.len(), 4);
2281 }
2282
2283 #[test]
2284 fn test_merge_normalized_outputs_basic() {
2285 let mut base = json!({
2286 "sources": {
2287 "/abs/src/A.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } },
2288 "/abs/src/B.sol": { "id": 1, "ast": { "nodeType": "SourceUnit" } }
2289 },
2290 "contracts": {
2291 "/abs/src/A.sol": { "A": { "abi": [] } }
2292 },
2293 "errors": [],
2294 "source_id_to_path": {
2295 "0": "/abs/src/A.sol",
2296 "1": "/abs/src/B.sol"
2297 }
2298 });
2299
2300 let other = json!({
2301 "sources": {
2302 "/abs/lib/C.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } }
2303 },
2304 "contracts": {
2305 "/abs/lib/C.sol": { "C": { "abi": [] } }
2306 },
2307 "errors": [],
2308 "source_id_to_path": {
2309 "0": "/abs/lib/C.sol"
2310 }
2311 });
2312
2313 merge_normalized_outputs(&mut base, other);
2314
2315 let sources = base["sources"].as_object().unwrap();
2317 assert_eq!(sources.len(), 3);
2318 assert!(sources.contains_key("/abs/lib/C.sol"));
2319
2320 let c_id = sources["/abs/lib/C.sol"]["id"].as_u64().unwrap();
2322 assert_eq!(
2323 c_id, 2,
2324 "remapped id should be max_base_id (2) + original (0)"
2325 );
2326
2327 let id_map = base["source_id_to_path"].as_object().unwrap();
2329 assert_eq!(id_map.len(), 3);
2330 assert_eq!(id_map["2"].as_str().unwrap(), "/abs/lib/C.sol");
2331
2332 let contracts = base["contracts"].as_object().unwrap();
2334 assert_eq!(contracts.len(), 2);
2335 assert!(contracts.contains_key("/abs/lib/C.sol"));
2336 }
2337
2338 #[test]
2339 fn test_merge_normalized_outputs_empty_other() {
2340 let mut base = json!({
2341 "sources": {
2342 "/abs/src/A.sol": { "id": 0, "ast": {} }
2343 },
2344 "contracts": {},
2345 "errors": [],
2346 "source_id_to_path": { "0": "/abs/src/A.sol" }
2347 });
2348
2349 let other = json!({
2350 "sources": {},
2351 "contracts": {},
2352 "errors": [],
2353 "source_id_to_path": {}
2354 });
2355
2356 merge_normalized_outputs(&mut base, other);
2357
2358 let sources = base["sources"].as_object().unwrap();
2359 assert_eq!(sources.len(), 1);
2360 }
2361
2362 #[test]
2363 fn test_merge_normalized_outputs_empty_base() {
2364 let mut base = json!({
2365 "sources": {},
2366 "contracts": {},
2367 "errors": [],
2368 "source_id_to_path": {}
2369 });
2370
2371 let other = json!({
2372 "sources": {
2373 "/abs/lib/X.sol": { "id": 0, "ast": {} }
2374 },
2375 "contracts": {
2376 "/abs/lib/X.sol": { "X": { "abi": [] } }
2377 },
2378 "errors": [],
2379 "source_id_to_path": { "0": "/abs/lib/X.sol" }
2380 });
2381
2382 merge_normalized_outputs(&mut base, other);
2383
2384 let sources = base["sources"].as_object().unwrap();
2385 assert_eq!(sources.len(), 1);
2386 let x_id = sources["/abs/lib/X.sol"]["id"].as_u64().unwrap();
2388 assert_eq!(x_id, 0);
2389 }
2390}