1use crate::config::FoundryConfig;
8use crate::links;
9use crate::runner::RunnerError;
10use serde_json::{Map, Value, json};
11use std::collections::{HashMap, HashSet};
12use std::path::{Path, PathBuf};
13use std::sync::{Mutex, OnceLock};
14use tokio::process::Command;
15use tower_lsp::lsp_types::Url;
16
17static INSTALLED_VERSIONS: OnceLock<Mutex<Vec<SemVer>>> = OnceLock::new();
20
21fn get_installed_versions() -> Vec<SemVer> {
22 let mutex = INSTALLED_VERSIONS.get_or_init(|| Mutex::new(scan_installed_versions()));
23 mutex.lock().unwrap().clone()
24}
25
26fn invalidate_installed_versions() {
27 if let Some(mutex) = INSTALLED_VERSIONS.get() {
28 *mutex.lock().unwrap() = scan_installed_versions();
29 }
30}
31
32fn semver_to_local(v: &semver::Version) -> SemVer {
34 SemVer {
35 major: v.major as u32,
36 minor: v.minor as u32,
37 patch: v.patch as u32,
38 }
39}
40
41pub async fn resolve_solc_binary(
54 config: &FoundryConfig,
55 constraint: Option<&PragmaConstraint>,
56 client: Option<&tower_lsp::Client>,
57) -> PathBuf {
58 if let Some(constraint) = constraint {
60 if !matches!(constraint, PragmaConstraint::Exact(_))
66 && let Some(ref config_ver) = config.solc_version
67 && let Some(parsed) = SemVer::parse(config_ver)
68 && version_satisfies(&parsed, constraint)
69 && let Some(path) = find_solc_binary(config_ver)
70 {
71 if let Some(c) = client {
72 c.log_message(
73 tower_lsp::lsp_types::MessageType::INFO,
74 format!("using solc {config_ver} (pragma {constraint})"),
75 )
76 .await;
77 }
78 return path;
79 }
80
81 let installed = get_installed_versions();
82 if let Some(version) = find_matching_version(constraint, &installed)
83 && let Some(path) = find_solc_binary(&version.to_string())
84 {
85 if let Some(c) = client {
86 c.log_message(
87 tower_lsp::lsp_types::MessageType::INFO,
88 format!("using solc {version}"),
89 )
90 .await;
91 }
92 return path;
93 }
94
95 let install_version = version_to_install(constraint);
97 if let Some(ref ver_str) = install_version {
98 if let Some(c) = client {
99 c.show_message(
100 tower_lsp::lsp_types::MessageType::INFO,
101 format!("Installing solc {ver_str}..."),
102 )
103 .await;
104 }
105
106 if svm_install(ver_str).await {
107 invalidate_installed_versions();
109
110 if let Some(c) = client {
111 c.show_message(
112 tower_lsp::lsp_types::MessageType::INFO,
113 format!("Installed solc {ver_str}"),
114 )
115 .await;
116 }
117 if let Some(path) = find_solc_binary(ver_str) {
118 return path;
119 }
120 } else if let Some(c) = client {
121 c.show_message(
122 tower_lsp::lsp_types::MessageType::WARNING,
123 format!(
124 "Failed to install solc {ver_str}. \
125 Install it manually: svm install {ver_str}"
126 ),
127 )
128 .await;
129 }
130 }
131 }
132
133 if let Some(ref version) = config.solc_version
135 && let Some(path) = find_solc_binary(version)
136 {
137 if let Some(c) = client {
138 c.log_message(
139 tower_lsp::lsp_types::MessageType::INFO,
140 format!(
141 "solc: no pragma, using foundry.toml version {version} → {}",
142 path.display()
143 ),
144 )
145 .await;
146 }
147 return path;
148 }
149
150 if let Some(c) = client {
152 c.log_message(
153 tower_lsp::lsp_types::MessageType::INFO,
154 "solc: no pragma match, falling back to system solc",
155 )
156 .await;
157 }
158 PathBuf::from("solc")
159}
160
161fn version_to_install(constraint: &PragmaConstraint) -> Option<String> {
168 match constraint {
169 PragmaConstraint::Exact(v) => Some(v.to_string()),
170 PragmaConstraint::Caret(v) => Some(v.to_string()),
171 PragmaConstraint::Gte(v) => Some(v.to_string()),
172 PragmaConstraint::Range(lower, _) => Some(lower.to_string()),
173 }
174}
175
176async fn svm_install(version: &str) -> bool {
180 let ver = match semver::Version::parse(version) {
181 Ok(v) => v,
182 Err(_) => return false,
183 };
184 svm::install(&ver).await.is_ok()
185}
186
187fn find_solc_binary(version: &str) -> Option<PathBuf> {
189 let path = svm::version_binary(version);
190 if path.is_file() {
191 return Some(path);
192 }
193 None
194}
195
196#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
200pub struct SemVer {
201 pub major: u32,
202 pub minor: u32,
203 pub patch: u32,
204}
205
206impl SemVer {
207 fn parse(s: &str) -> Option<SemVer> {
208 let parts: Vec<&str> = s.split('.').collect();
209 if parts.len() != 3 {
210 return None;
211 }
212 Some(SemVer {
213 major: parts[0].parse().ok()?,
214 minor: parts[1].parse().ok()?,
215 patch: parts[2].parse().ok()?,
216 })
217 }
218}
219
220impl std::fmt::Display for SemVer {
221 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
222 write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
223 }
224}
225
226#[derive(Debug, Clone, PartialEq)]
228pub enum PragmaConstraint {
229 Exact(SemVer),
231 Caret(SemVer),
234 Gte(SemVer),
236 Range(SemVer, SemVer),
238}
239
240impl std::fmt::Display for PragmaConstraint {
241 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
242 match self {
243 PragmaConstraint::Exact(v) => write!(f, "={v}"),
244 PragmaConstraint::Caret(v) => write!(f, "^{v}"),
245 PragmaConstraint::Gte(v) => write!(f, ">={v}"),
246 PragmaConstraint::Range(lo, hi) => write!(f, ">={lo} <{hi}"),
247 }
248 }
249}
250
251fn resolve_import_to_abs(
255 project_root: &Path,
256 importer_abs: &Path,
257 import_path: &str,
258 remappings: &[String],
259) -> Option<PathBuf> {
260 if import_path.starts_with("./") || import_path.starts_with("../") {
261 let base = importer_abs.parent()?;
262 return Some(lexical_normalize(&base.join(import_path)));
263 }
264
265 for remap in remappings {
266 let mut it = remap.splitn(2, '=');
267 let prefix = it.next().unwrap_or_default();
268 let target = it.next().unwrap_or_default();
269 if prefix.is_empty() || target.is_empty() {
270 continue;
271 }
272 if import_path.starts_with(prefix) {
273 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
274 return Some(lexical_normalize(
275 &project_root.join(format!("{target}{suffix}")),
276 ));
277 }
278 }
279
280 Some(lexical_normalize(&project_root.join(import_path)))
281}
282
283fn lexical_normalize(path: &Path) -> PathBuf {
286 let mut out = PathBuf::new();
287 for comp in path.components() {
288 match comp {
289 std::path::Component::CurDir => {}
290 std::path::Component::ParentDir => {
291 out.pop();
292 }
293 _ => out.push(comp.as_os_str()),
294 }
295 }
296 out
297}
298
299fn collect_import_pragmas(
305 file_path: &Path,
306 project_root: &Path,
307 remappings: &[String],
308) -> Vec<PragmaConstraint> {
309 let mut pragmas = Vec::new();
310 let mut visited = HashSet::new();
311 collect_import_pragmas_recursive(
312 file_path,
313 project_root,
314 remappings,
315 &mut pragmas,
316 &mut visited,
317 );
318 pragmas
319}
320
321fn collect_import_pragmas_recursive(
322 file_path: &Path,
323 project_root: &Path,
324 remappings: &[String],
325 pragmas: &mut Vec<PragmaConstraint>,
326 visited: &mut HashSet<PathBuf>,
327) {
328 if !visited.insert(file_path.to_path_buf()) {
329 return;
330 }
331 let source = match std::fs::read_to_string(file_path) {
332 Ok(s) => s,
333 Err(_) => return,
334 };
335 if let Some(pragma) = parse_pragma(&source) {
336 pragmas.push(pragma);
337 }
338 for imp in links::ts_find_imports(source.as_bytes()) {
339 if let Some(abs) = resolve_import_to_abs(project_root, file_path, &imp.path, remappings) {
340 collect_import_pragmas_recursive(&abs, project_root, remappings, pragmas, visited);
341 }
342 }
343}
344
345fn tightest_constraint(pragmas: &[PragmaConstraint]) -> Option<PragmaConstraint> {
355 if pragmas.is_empty() {
356 return None;
357 }
358
359 for p in pragmas {
361 if matches!(p, PragmaConstraint::Exact(_)) {
362 return Some(p.clone());
363 }
364 }
365
366 let mut lower = SemVer {
368 major: 0,
369 minor: 0,
370 patch: 0,
371 };
372 let mut upper: Option<SemVer> = None;
373
374 for p in pragmas {
375 let (lo, hi) = constraint_to_range(p);
376 if lo > lower {
377 lower = lo;
378 }
379 if let Some(hi) = hi {
380 upper = Some(match upper {
381 Some(cur) if hi < cur => hi,
382 Some(cur) => cur,
383 None => hi,
384 });
385 }
386 }
387
388 match upper {
389 Some(hi) if lower >= hi => None, Some(hi) => Some(PragmaConstraint::Range(lower, hi)),
391 None => Some(PragmaConstraint::Gte(lower)),
392 }
393}
394
395fn constraint_to_range(constraint: &PragmaConstraint) -> (SemVer, Option<SemVer>) {
398 match constraint {
399 PragmaConstraint::Exact(v) => (
400 v.clone(),
401 Some(SemVer {
402 major: v.major,
403 minor: v.minor,
404 patch: v.patch + 1,
405 }),
406 ),
407 PragmaConstraint::Caret(v) => (
408 v.clone(),
409 Some(SemVer {
410 major: v.major,
411 minor: v.minor + 1,
412 patch: 0,
413 }),
414 ),
415 PragmaConstraint::Gte(v) => (v.clone(), None),
416 PragmaConstraint::Range(lo, hi) => (lo.clone(), Some(hi.clone())),
417 }
418}
419
420pub fn parse_pragma(source: &str) -> Option<PragmaConstraint> {
428 let pragma_line = source
430 .lines()
431 .take(20)
432 .find(|line| line.trim_start().starts_with("pragma solidity"))?;
433
434 let after_keyword = pragma_line
436 .trim_start()
437 .strip_prefix("pragma solidity")?
438 .trim();
439 let constraint_str = after_keyword
440 .strip_suffix(';')
441 .unwrap_or(after_keyword)
442 .trim();
443
444 if constraint_str.is_empty() {
445 return None;
446 }
447
448 if let Some(rest) = constraint_str.strip_prefix(">=") {
450 let rest = rest.trim();
451 if let Some(space_idx) = rest.find(|c: char| c.is_whitespace() || c == '<') {
452 let lower_str = rest[..space_idx].trim();
453 let upper_part = rest[space_idx..].trim();
454 if let Some(upper_str) = upper_part.strip_prefix('<') {
455 let upper_str = upper_str.trim();
456 if let (Some(lower), Some(upper)) =
457 (SemVer::parse(lower_str), SemVer::parse(upper_str))
458 {
459 return Some(PragmaConstraint::Range(lower, upper));
460 }
461 }
462 }
463 if let Some(ver) = SemVer::parse(rest) {
465 return Some(PragmaConstraint::Gte(ver));
466 }
467 }
468
469 if let Some(rest) = constraint_str.strip_prefix('^')
471 && let Some(ver) = SemVer::parse(rest.trim())
472 {
473 return Some(PragmaConstraint::Caret(ver));
474 }
475
476 if let Some(ver) = SemVer::parse(constraint_str) {
478 return Some(PragmaConstraint::Exact(ver));
479 }
480
481 None
482}
483
484pub fn list_installed_versions() -> Vec<SemVer> {
486 get_installed_versions()
487}
488
489fn scan_installed_versions() -> Vec<SemVer> {
493 svm::installed_versions()
494 .unwrap_or_default()
495 .iter()
496 .map(semver_to_local)
497 .collect()
498}
499
500pub fn find_matching_version(
505 constraint: &PragmaConstraint,
506 installed: &[SemVer],
507) -> Option<SemVer> {
508 let candidates: Vec<&SemVer> = installed
509 .iter()
510 .filter(|v| version_satisfies(v, constraint))
511 .collect();
512
513 candidates.last().cloned().cloned()
515}
516
517pub fn version_satisfies(version: &SemVer, constraint: &PragmaConstraint) -> bool {
519 match constraint {
520 PragmaConstraint::Exact(v) => version == v,
521 PragmaConstraint::Caret(v) => {
522 version.major == v.major && version >= v && version.minor < v.minor + 1
525 }
526 PragmaConstraint::Gte(v) => version >= v,
527 PragmaConstraint::Range(lower, upper) => version >= lower && version < upper,
528 }
529}
530
531pub async fn resolve_remappings(config: &FoundryConfig) -> Vec<String> {
535 let output = Command::new("forge")
538 .arg("remappings")
539 .current_dir(&config.root)
540 .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1")
541 .output()
542 .await;
543
544 if let Ok(output) = output
545 && output.status.success()
546 {
547 let stdout = String::from_utf8_lossy(&output.stdout);
548 let remappings: Vec<String> = stdout
549 .lines()
550 .filter(|l| !l.trim().is_empty())
551 .map(|l| l.to_string())
552 .collect();
553 if !remappings.is_empty() {
554 return remappings;
555 }
556 }
557
558 if !config.remappings.is_empty() {
560 return config.remappings.clone();
561 }
562
563 let remappings_txt = config.root.join("remappings.txt");
565 if let Ok(content) = std::fs::read_to_string(&remappings_txt) {
566 return content
567 .lines()
568 .filter(|l| !l.trim().is_empty())
569 .map(|l| l.to_string())
570 .collect();
571 }
572
573 Vec::new()
574}
575
576pub fn build_standard_json_input(
597 file_path: &str,
598 remappings: &[String],
599 config: &FoundryConfig,
600 source_content: Option<&str>,
601) -> Value {
602 let contract_outputs = vec!["devdoc", "userdoc", "evm.methodIdentifiers"];
603
604 let mut settings = json!({
605 "remappings": remappings,
606 "outputSelection": {
607 "*": {
608 "*": contract_outputs,
609 "": ["ast"]
610 }
611 }
612 });
613
614 if config.via_ir {
615 settings["viaIR"] = json!(true);
616 }
617
618 if let Some(ref evm_version) = config.evm_version {
620 settings["evmVersion"] = json!(evm_version);
621 }
622
623 let source_value = match source_content {
624 Some(content) => json!({ "content": content }),
625 None => json!({ "urls": [file_path] }),
626 };
627
628 let mut sources = serde_json::Map::new();
630 sources.insert(file_path.to_string(), source_value);
631
632 json!({
633 "language": "Solidity",
634 "sources": sources,
635 "settings": settings
636 })
637}
638
639pub async fn run_solc(
641 solc_binary: &Path,
642 input: &Value,
643 project_root: &Path,
644) -> Result<Value, RunnerError> {
645 let _ = crate::project_cache::save_last_solc_input(project_root, input);
646 let input_str = serde_json::to_string(input)?;
647
648 let mut child = Command::new(solc_binary)
649 .arg("--standard-json")
650 .current_dir(project_root)
651 .stdin(std::process::Stdio::piped())
652 .stdout(std::process::Stdio::piped())
653 .stderr(std::process::Stdio::piped())
654 .spawn()?;
655
656 if let Some(mut stdin) = child.stdin.take() {
658 use tokio::io::AsyncWriteExt;
659 stdin
660 .write_all(input_str.as_bytes())
661 .await
662 .map_err(RunnerError::CommandError)?;
663 }
665
666 let output = child
667 .wait_with_output()
668 .await
669 .map_err(RunnerError::CommandError)?;
670
671 let stdout = String::from_utf8_lossy(&output.stdout);
673 if stdout.trim().is_empty() {
674 let stderr = String::from_utf8_lossy(&output.stderr);
675 return Err(RunnerError::CommandError(std::io::Error::other(format!(
676 "solc produced no output, stderr: {stderr}"
677 ))));
678 }
679
680 let parsed: Value = serde_json::from_str(&stdout)?;
681 Ok(parsed)
682}
683
684pub fn normalize_solc_output(mut solc_output: Value, project_root: Option<&Path>) -> Value {
705 fn resolve_import_absolute_paths(node: &mut Value, resolve: &dyn Fn(&str) -> String) {
707 let is_import = node.get("nodeType").and_then(|v| v.as_str()) == Some("ImportDirective");
708
709 if is_import {
710 if let Some(abs_path) = node.get("absolutePath").and_then(|v| v.as_str()) {
711 let resolved = resolve(abs_path);
712 node.as_object_mut()
713 .unwrap()
714 .insert("absolutePath".to_string(), json!(resolved));
715 }
716 }
717
718 if let Some(nodes) = node.get_mut("nodes").and_then(|v| v.as_array_mut()) {
720 for child in nodes {
721 resolve_import_absolute_paths(child, resolve);
722 }
723 }
724 }
725 let mut result = Map::new();
726
727 let errors = solc_output
729 .get_mut("errors")
730 .map(Value::take)
731 .unwrap_or_else(|| json!([]));
732 result.insert("errors".to_string(), errors);
733
734 let resolve = |p: &str| -> String {
737 if let Some(root) = project_root {
738 let path = Path::new(p);
739 if path.is_relative() {
740 return root.join(path).to_string_lossy().into_owned();
741 }
742 }
743 p.to_string()
744 };
745
746 let mut source_id_to_path = Map::new();
749 let mut resolved_sources = Map::new();
750
751 if let Some(sources) = solc_output
752 .get_mut("sources")
753 .and_then(|s| s.as_object_mut())
754 {
755 let keys: Vec<String> = sources.keys().cloned().collect();
757 for key in keys {
758 if let Some(mut source_data) = sources.remove(&key) {
759 let abs_key = resolve(&key);
760
761 if let Some(ast) = source_data.get_mut("ast") {
765 if let Some(abs_path) = ast.get("absolutePath").and_then(|v| v.as_str()) {
766 let resolved = resolve(abs_path);
767 ast.as_object_mut()
768 .unwrap()
769 .insert("absolutePath".to_string(), json!(resolved));
770 }
771 resolve_import_absolute_paths(ast, &resolve);
772 }
773
774 if let Some(id) = source_data.get("id") {
775 source_id_to_path.insert(id.to_string(), json!(&abs_key));
776 }
777
778 resolved_sources.insert(abs_key, source_data);
779 }
780 }
781 }
782
783 result.insert("sources".to_string(), Value::Object(resolved_sources));
784
785 let mut resolved_contracts = Map::new();
787 if let Some(contracts) = solc_output
788 .get_mut("contracts")
789 .and_then(|c| c.as_object_mut())
790 {
791 let keys: Vec<String> = contracts.keys().cloned().collect();
792 for key in keys {
793 if let Some(contract_data) = contracts.remove(&key) {
794 resolved_contracts.insert(resolve(&key), contract_data);
795 }
796 }
797 }
798 result.insert("contracts".to_string(), Value::Object(resolved_contracts));
799
800 result.insert(
802 "source_id_to_path".to_string(),
803 Value::Object(source_id_to_path),
804 );
805
806 Value::Object(result)
807}
808
809pub fn normalize_forge_output(mut forge_output: Value) -> Value {
821 let mut result = Map::new();
822
823 let errors = forge_output
825 .get_mut("errors")
826 .map(Value::take)
827 .unwrap_or_else(|| json!([]));
828 result.insert("errors".to_string(), errors);
829
830 let mut normalized_sources = Map::new();
832 if let Some(sources) = forge_output
833 .get_mut("sources")
834 .and_then(|s| s.as_object_mut())
835 {
836 for (path, entries) in sources.iter_mut() {
837 if let Some(arr) = entries.as_array_mut()
838 && let Some(first) = arr.first_mut()
839 && let Some(sf) = first.get_mut("source_file")
840 {
841 normalized_sources.insert(path.clone(), sf.take());
842 }
843 }
844 }
845 result.insert("sources".to_string(), Value::Object(normalized_sources));
846
847 let mut normalized_contracts = Map::new();
849 if let Some(contracts) = forge_output
850 .get_mut("contracts")
851 .and_then(|c| c.as_object_mut())
852 {
853 for (path, names) in contracts.iter_mut() {
854 let mut path_contracts = Map::new();
855 if let Some(names_obj) = names.as_object_mut() {
856 for (name, entries) in names_obj.iter_mut() {
857 if let Some(arr) = entries.as_array_mut()
858 && let Some(first) = arr.first_mut()
859 && let Some(contract) = first.get_mut("contract")
860 {
861 path_contracts.insert(name.clone(), contract.take());
862 }
863 }
864 }
865 normalized_contracts.insert(path.clone(), Value::Object(path_contracts));
866 }
867 }
868 result.insert("contracts".to_string(), Value::Object(normalized_contracts));
869
870 let source_id_to_path = forge_output
872 .get_mut("build_infos")
873 .and_then(|bi| bi.as_array_mut())
874 .and_then(|arr| arr.first_mut())
875 .and_then(|info| info.get_mut("source_id_to_path"))
876 .map(Value::take)
877 .unwrap_or_else(|| json!({}));
878 result.insert("source_id_to_path".to_string(), source_id_to_path);
879
880 Value::Object(result)
881}
882
883pub async fn solc_ast(
892 file_path: &str,
893 config: &FoundryConfig,
894 client: Option<&tower_lsp::Client>,
895 source_content: Option<&str>,
896) -> Result<Value, RunnerError> {
897 let remappings = resolve_remappings(config).await;
898
899 let file_abs = Path::new(file_path).to_path_buf();
904 let config_root = config.root.clone();
905 let remappings_clone = remappings.clone();
906 let pragmas = tokio::task::spawn_blocking(move || {
907 collect_import_pragmas(&file_abs, &config_root, &remappings_clone)
908 })
909 .await
910 .unwrap_or_default();
911 let constraint = tightest_constraint(&pragmas);
912 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
913
914 let rel_path = Path::new(file_path)
919 .strip_prefix(&config.root)
920 .map(|p| p.to_string_lossy().into_owned())
921 .unwrap_or_else(|_| file_path.to_string());
922
923 let input = build_standard_json_input(&rel_path, &remappings, config, source_content);
924 let raw_output = run_solc(&solc_binary, &input, &config.root).await?;
925
926 Ok(normalize_solc_output(raw_output, Some(&config.root)))
927}
928
929pub async fn solc_build(
931 file_path: &str,
932 config: &FoundryConfig,
933 client: Option<&tower_lsp::Client>,
934) -> Result<Value, RunnerError> {
935 solc_ast(file_path, config, client, None).await
936}
937
938pub fn discover_source_files(config: &FoundryConfig) -> Vec<PathBuf> {
958 discover_source_files_inner(config, false)
959}
960
961pub fn discover_src_only_files(config: &FoundryConfig) -> Vec<PathBuf> {
967 let root = &config.root;
968 if !root.is_dir() {
969 return Vec::new();
970 }
971 let mut files = Vec::new();
972 let dir = root.join(&config.sources_dir);
973 if dir.is_dir() {
974 discover_recursive(&dir, &[], &mut files);
975 }
976 files.sort();
977 files
978}
979
980pub fn discover_src_only_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
987 let seeds = discover_src_only_files(config);
988 let mut visited: HashSet<PathBuf> = HashSet::new();
989 let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
990
991 while let Some(file) = queue.pop_front() {
992 if !visited.insert(file.clone()) {
993 continue;
994 }
995 let source = match std::fs::read_to_string(&file) {
996 Ok(s) => s,
997 Err(_) => continue,
998 };
999 for imp in links::ts_find_imports(source.as_bytes()) {
1000 if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
1001 if abs.exists() && !visited.contains(&abs) {
1002 queue.push_back(abs);
1003 }
1004 }
1005 }
1006 }
1007
1008 let mut result: Vec<PathBuf> = visited.into_iter().collect();
1009 result.sort();
1010 result
1011}
1012
1013pub fn discover_source_files_with_libs(config: &FoundryConfig) -> Vec<PathBuf> {
1020 discover_source_files_inner(config, true)
1021}
1022
1023fn discover_source_files_inner(config: &FoundryConfig, include_libs: bool) -> Vec<PathBuf> {
1024 let root = &config.root;
1025 if !root.is_dir() {
1026 return Vec::new();
1027 }
1028
1029 let mut files = Vec::new();
1030 let no_skip: &[String] = &[];
1031
1032 for dir_name in [&config.sources_dir, &config.test_dir, &config.script_dir] {
1037 let dir = root.join(dir_name);
1038 if dir.is_dir() {
1039 discover_recursive(&dir, no_skip, &mut files);
1040 }
1041 }
1042
1043 if include_libs {
1045 for lib_name in &config.libs {
1046 let lib_dir = root.join(lib_name);
1047 if lib_dir.is_dir() {
1048 discover_recursive(&lib_dir, no_skip, &mut files);
1049 }
1050 }
1051 }
1052
1053 files.sort();
1054 files
1055}
1056
1057pub fn discover_compilation_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
1076 let seeds = discover_source_files(config);
1078 let mut visited: HashSet<PathBuf> = HashSet::new();
1079 let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
1080
1081 while let Some(file) = queue.pop_front() {
1082 if !visited.insert(file.clone()) {
1083 continue;
1084 }
1085 let source = match std::fs::read_to_string(&file) {
1086 Ok(s) => s,
1087 Err(_) => continue,
1088 };
1089 for imp in links::ts_find_imports(source.as_bytes()) {
1090 if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
1091 if abs.exists() && !visited.contains(&abs) {
1092 queue.push_back(abs);
1093 }
1094 }
1095 }
1096 }
1097
1098 let mut result: Vec<PathBuf> = visited.into_iter().collect();
1099 result.sort();
1100 result
1101}
1102
1103const DISCOVER_SKIP_DIRS: &[&str] = &["out", "artifacts", "cache", "target", "broadcast"];
1106
1107fn discover_recursive(dir: &Path, skip_libs: &[String], files: &mut Vec<PathBuf>) {
1108 let entries = match std::fs::read_dir(dir) {
1109 Ok(e) => e,
1110 Err(_) => return,
1111 };
1112 for entry in entries.flatten() {
1113 let path = entry.path();
1114 if path.is_dir() {
1115 if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
1116 if name.starts_with('.') {
1118 continue;
1119 }
1120 if DISCOVER_SKIP_DIRS.contains(&name) {
1122 continue;
1123 }
1124 if skip_libs.iter().any(|lib| lib == name) {
1126 continue;
1127 }
1128 }
1129 discover_recursive(&path, skip_libs, files);
1130 } else if let Some(name) = path.file_name().and_then(|n| n.to_str())
1131 && name.ends_with(".sol")
1132 {
1133 files.push(path);
1134 }
1135 }
1136}
1137
1138pub fn build_batch_standard_json_input(
1145 source_files: &[PathBuf],
1146 remappings: &[String],
1147 config: &FoundryConfig,
1148) -> Value {
1149 build_batch_standard_json_input_with_cache(source_files, remappings, config, None)
1150}
1151
1152pub fn build_batch_standard_json_input_with_cache(
1162 source_files: &[PathBuf],
1163 remappings: &[String],
1164 config: &FoundryConfig,
1165 content_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1166) -> Value {
1167 let contract_outputs = vec!["devdoc", "userdoc", "evm.methodIdentifiers"];
1168
1169 let mut settings = json!({
1170 "remappings": remappings,
1171 "outputSelection": {
1172 "*": {
1173 "*": contract_outputs,
1174 "": ["ast"]
1175 }
1176 }
1177 });
1178
1179 if config.via_ir {
1180 settings["viaIR"] = json!(true);
1181 }
1182 if let Some(ref evm_version) = config.evm_version {
1183 settings["evmVersion"] = json!(evm_version);
1184 }
1185
1186 let mut sources = serde_json::Map::new();
1187 for file in source_files {
1188 let rel_path = file
1189 .strip_prefix(&config.root)
1190 .map(|p| p.to_string_lossy().into_owned())
1191 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1192
1193 let cached_content = content_cache.and_then(|cache| {
1195 let uri = Url::from_file_path(file).ok()?;
1196 cache.get(uri.as_str()).map(|(_, c)| c.as_str())
1197 });
1198
1199 if let Some(content) = cached_content {
1200 sources.insert(rel_path, json!({ "content": content }));
1201 } else {
1202 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1203 }
1204 }
1205
1206 json!({
1207 "language": "Solidity",
1208 "sources": sources,
1209 "settings": settings
1210 })
1211}
1212
1213pub fn build_batch_standard_json_input_ast_only(
1223 source_files: &[PathBuf],
1224 remappings: &[String],
1225 root: &Path,
1226) -> Value {
1227 let settings = json!({
1228 "remappings": remappings,
1229 "outputSelection": {
1230 "*": {
1231 "": ["ast"]
1232 }
1233 }
1234 });
1235
1236 let mut sources = serde_json::Map::new();
1237 for file in source_files {
1238 let rel_path = file
1239 .strip_prefix(root)
1240 .map(|p| p.to_string_lossy().into_owned())
1241 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1242 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1243 }
1244
1245 json!({
1246 "language": "Solidity",
1247 "sources": sources,
1248 "settings": settings
1249 })
1250}
1251
1252pub fn build_parse_only_json_input(
1269 source_files: &[PathBuf],
1270 remappings: &[String],
1271 config: &FoundryConfig,
1272) -> Value {
1273 let settings = json!({
1274 "stopAfter": "parsing",
1275 "remappings": remappings,
1276 "outputSelection": {
1277 "*": {
1278 "": ["ast"]
1279 }
1280 }
1281 });
1282
1283 let mut sources = serde_json::Map::new();
1284 for file in source_files {
1285 let rel_path = file
1286 .strip_prefix(&config.root)
1287 .map(|p| p.to_string_lossy().into_owned())
1288 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1289 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1290 }
1291
1292 json!({
1293 "language": "Solidity",
1294 "sources": sources,
1295 "settings": settings
1296 })
1297}
1298
1299pub async fn solc_project_index(
1309 config: &FoundryConfig,
1310 client: Option<&tower_lsp::Client>,
1311 text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1312) -> Result<Value, RunnerError> {
1313 let remappings = resolve_remappings(config).await;
1315
1316 let source_files = discover_compilation_closure(config, &remappings);
1321 if source_files.is_empty() {
1322 return Err(RunnerError::CommandError(std::io::Error::other(
1323 "no source files found for project index",
1324 )));
1325 }
1326
1327 solc_project_index_from_files(config, client, text_cache, &source_files).await
1328}
1329
1330pub async fn solc_project_index_ast_only(
1340 config: &FoundryConfig,
1341 client: Option<&tower_lsp::Client>,
1342) -> Result<Value, RunnerError> {
1343 let remappings = resolve_remappings(config).await;
1344 let source_files = discover_compilation_closure(config, &remappings);
1345 if source_files.is_empty() {
1346 return Err(RunnerError::CommandError(std::io::Error::other(
1347 "no source files found for project index",
1348 )));
1349 }
1350 solc_project_index_from_files_ast_only(config, client, &source_files).await
1351}
1352
1353async fn solc_project_index_from_files_ast_only(
1359 config: &FoundryConfig,
1360 client: Option<&tower_lsp::Client>,
1361 source_files: &[PathBuf],
1362) -> Result<Value, RunnerError> {
1363 if source_files.is_empty() {
1364 return Err(RunnerError::CommandError(std::io::Error::other(
1365 "no source files found for AST-only project index",
1366 )));
1367 }
1368
1369 let remappings = resolve_remappings(config).await;
1370
1371 let project_version: Option<SemVer> =
1372 config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1373 let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1374 Some(PragmaConstraint::Exact(v.clone()))
1375 } else {
1376 source_files.iter().find_map(|f| {
1377 std::fs::read_to_string(f)
1378 .ok()
1379 .and_then(|src| parse_pragma(&src))
1380 })
1381 };
1382 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1383
1384 let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1386 let mut compat = Vec::with_capacity(source_files.len());
1387 let mut incompat = Vec::new();
1388 for file in source_files {
1389 let is_compatible = std::fs::read_to_string(file)
1390 .ok()
1391 .and_then(|src| parse_pragma(&src))
1392 .map(|pragma| version_satisfies(ver, &pragma))
1393 .unwrap_or(true);
1394 if is_compatible {
1395 compat.push(file.clone());
1396 } else {
1397 incompat.push(file.clone());
1398 }
1399 }
1400 (compat, incompat)
1401 } else {
1402 (source_files.to_vec(), Vec::new())
1403 };
1404
1405 if !incompatible_files.is_empty() {
1406 if let Some(c) = client {
1407 c.log_message(
1408 tower_lsp::lsp_types::MessageType::INFO,
1409 format!(
1410 "project index: {} compatible, {} incompatible with solc {}",
1411 compatible_files.len(),
1412 incompatible_files.len(),
1413 project_version
1414 .as_ref()
1415 .map(|v| v.to_string())
1416 .unwrap_or_default(),
1417 ),
1418 )
1419 .await;
1420 }
1421 }
1422
1423 let mut result = if compatible_files.is_empty() {
1424 json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1425 } else {
1426 let input =
1427 build_batch_standard_json_input_ast_only(&compatible_files, &remappings, &config.root);
1428 let raw = run_solc(&solc_binary, &input, &config.root).await?;
1429 normalize_solc_output(raw, Some(&config.root))
1430 };
1431
1432 if incompatible_files.is_empty() {
1433 return Ok(result);
1434 }
1435
1436 for file in &incompatible_files {
1438 let pragma = std::fs::read_to_string(file)
1439 .ok()
1440 .and_then(|src| parse_pragma(&src));
1441 let file_binary = resolve_solc_binary(config, pragma.as_ref(), client).await;
1442 let input =
1443 build_batch_standard_json_input_ast_only(&[file.clone()], &remappings, &config.root);
1444 if let Ok(raw) = run_solc(&file_binary, &input, &config.root).await {
1445 let normalized = normalize_solc_output(raw, Some(&config.root));
1446 merge_normalized_outputs(&mut result, normalized);
1447 }
1448 }
1449
1450 if let Some(c) = client {
1451 let total = result
1452 .get("sources")
1453 .and_then(|s| s.as_object())
1454 .map_or(0, |obj| obj.len());
1455 c.log_message(
1456 tower_lsp::lsp_types::MessageType::INFO,
1457 format!(
1458 "project index: compiled {} files ({} needed different solc version)",
1459 total,
1460 incompatible_files.len(),
1461 ),
1462 )
1463 .await;
1464 }
1465
1466 Ok(result)
1467}
1468
1469pub async fn solc_project_index_scoped(
1474 config: &FoundryConfig,
1475 client: Option<&tower_lsp::Client>,
1476 text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1477 source_files: &[PathBuf],
1478) -> Result<Value, RunnerError> {
1479 if source_files.is_empty() {
1480 return Err(RunnerError::CommandError(std::io::Error::other(
1481 "no source files provided for scoped project index",
1482 )));
1483 }
1484
1485 solc_project_index_from_files(config, client, text_cache, source_files).await
1486}
1487
1488#[cfg(test)]
1492fn extract_version_error_files(solc_output: &Value) -> HashSet<String> {
1493 let mut files = HashSet::new();
1494 if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1495 for err in errors {
1496 let is_5333 = err.get("errorCode").and_then(|c| c.as_str()) == Some("5333");
1497 if is_5333
1498 && let Some(file) = err
1499 .get("sourceLocation")
1500 .and_then(|sl| sl.get("file"))
1501 .and_then(|f| f.as_str())
1502 {
1503 files.insert(file.to_string());
1504 }
1505 }
1506 }
1507 files
1508}
1509
1510#[cfg(test)]
1513#[allow(dead_code)]
1514fn extract_import_error_files(solc_output: &Value) -> HashSet<String> {
1515 let mut files = HashSet::new();
1516 if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1517 for err in errors {
1518 let is_6275 = err.get("errorCode").and_then(|c| c.as_str()) == Some("6275");
1519 if is_6275
1520 && let Some(file) = err
1521 .get("sourceLocation")
1522 .and_then(|sl| sl.get("file"))
1523 .and_then(|f| f.as_str())
1524 {
1525 files.insert(file.to_string());
1526 }
1527 }
1528 }
1529 files
1530}
1531
1532#[cfg(test)]
1538fn reverse_import_closure(
1539 source_files: &[PathBuf],
1540 exclude_abs: &HashSet<PathBuf>,
1541 project_root: &Path,
1542 remappings: &[String],
1543) -> HashSet<PathBuf> {
1544 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1547
1548 for file in source_files {
1549 let Ok(bytes) = std::fs::read(file) else {
1550 continue;
1551 };
1552 for imp in links::ts_find_imports(&bytes) {
1553 if let Some(imported_abs) =
1554 resolve_import_to_abs(project_root, file, &imp.path, remappings)
1555 {
1556 reverse_edges
1557 .entry(imported_abs)
1558 .or_default()
1559 .insert(file.clone());
1560 }
1561 }
1562 }
1563
1564 let mut closure: HashSet<PathBuf> = exclude_abs.clone();
1566 let mut queue: std::collections::VecDeque<PathBuf> = exclude_abs.iter().cloned().collect();
1567
1568 while let Some(current) = queue.pop_front() {
1569 if let Some(importers) = reverse_edges.get(¤t) {
1570 for importer in importers {
1571 if closure.insert(importer.clone()) {
1572 queue.push_back(importer.clone());
1573 }
1574 }
1575 }
1576 }
1577
1578 closure
1579}
1580
1581fn merge_normalized_outputs(base: &mut Value, other: Value) {
1587 if let (Some(base_sources), Some(other_sources)) = (
1589 base.get_mut("sources").and_then(|s| s.as_object_mut()),
1590 other.get("sources").and_then(|s| s.as_object()),
1591 ) {
1592 let max_base_id = base_sources
1594 .values()
1595 .filter_map(|v| v.get("id").and_then(|id| id.as_u64()))
1596 .max()
1597 .map(|m| m + 1)
1598 .unwrap_or(0);
1599
1600 let mut remapped_id_to_path: Vec<(String, String)> = Vec::new();
1602
1603 for (path, mut source_data) in other_sources.clone() {
1604 if let Some(id) = source_data.get("id").and_then(|id| id.as_u64()) {
1606 let new_id = id + max_base_id;
1607 source_data
1608 .as_object_mut()
1609 .unwrap()
1610 .insert("id".to_string(), json!(new_id));
1611 remapped_id_to_path.push((new_id.to_string(), path.clone()));
1612 }
1613 base_sources.insert(path, source_data);
1614 }
1615
1616 if let Some(base_id_map) = base
1618 .get_mut("source_id_to_path")
1619 .and_then(|m| m.as_object_mut())
1620 {
1621 for (id, path) in remapped_id_to_path {
1622 base_id_map.insert(id, json!(path));
1623 }
1624 }
1625 }
1626
1627 if let (Some(base_contracts), Some(other_contracts)) = (
1629 base.get_mut("contracts").and_then(|c| c.as_object_mut()),
1630 other.get("contracts").and_then(|c| c.as_object()),
1631 ) {
1632 for (path, contract_data) in other_contracts {
1633 base_contracts.insert(path.clone(), contract_data.clone());
1634 }
1635 }
1636
1637 }
1640
1641async fn solc_project_index_from_files(
1642 config: &FoundryConfig,
1643 client: Option<&tower_lsp::Client>,
1644 text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1645 source_files: &[PathBuf],
1646) -> Result<Value, RunnerError> {
1647 if source_files.is_empty() {
1648 return Err(RunnerError::CommandError(std::io::Error::other(
1649 "no source files found for project index",
1650 )));
1651 }
1652
1653 let remappings = resolve_remappings(config).await;
1654
1655 let project_version: Option<SemVer> =
1657 config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1658
1659 let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1662 Some(PragmaConstraint::Exact(v.clone()))
1663 } else {
1664 source_files.iter().find_map(|f| {
1665 std::fs::read_to_string(f)
1666 .ok()
1667 .and_then(|src| parse_pragma(&src))
1668 })
1669 };
1670 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1671
1672 let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1678 let mut compat = Vec::with_capacity(source_files.len());
1679 let mut incompat = Vec::new();
1680 for file in source_files {
1681 let is_compatible = std::fs::read_to_string(file)
1682 .ok()
1683 .and_then(|src| parse_pragma(&src))
1684 .map(|pragma| version_satisfies(ver, &pragma))
1685 .unwrap_or(true);
1687 if is_compatible {
1688 compat.push(file.clone());
1689 } else {
1690 incompat.push(file.clone());
1691 }
1692 }
1693 (compat, incompat)
1694 } else {
1695 (source_files.to_vec(), Vec::new())
1697 };
1698
1699 if !incompatible_files.is_empty() {
1700 if let Some(c) = client {
1701 c.log_message(
1702 tower_lsp::lsp_types::MessageType::INFO,
1703 format!(
1704 "project index: {} compatible, {} incompatible with solc {}",
1705 compatible_files.len(),
1706 incompatible_files.len(),
1707 project_version
1708 .as_ref()
1709 .map(|v| v.to_string())
1710 .unwrap_or_default(),
1711 ),
1712 )
1713 .await;
1714 }
1715 }
1716
1717 let mut result = if compatible_files.is_empty() {
1725 json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1726 } else {
1727 let input = build_batch_standard_json_input_with_cache(
1728 &compatible_files,
1729 &remappings,
1730 config,
1731 text_cache,
1732 );
1733 let raw = run_solc(&solc_binary, &input, &config.root).await?;
1734 normalize_solc_output(raw, Some(&config.root))
1735 };
1736
1737 let batch_source_count = result
1738 .get("sources")
1739 .and_then(|s| s.as_object())
1740 .map_or(0, |obj| obj.len());
1741
1742 if incompatible_files.is_empty() {
1743 return Ok(result);
1744 }
1745
1746 if let Some(c) = client {
1747 let batch_errors: Vec<String> = result
1749 .get("errors")
1750 .and_then(|e| e.as_array())
1751 .map(|arr| {
1752 arr.iter()
1753 .filter(|e| e.get("severity").and_then(|s| s.as_str()) == Some("error"))
1754 .take(3)
1755 .filter_map(|e| {
1756 let msg = e.get("message").and_then(|m| m.as_str()).unwrap_or("?");
1757 let file = e
1758 .get("sourceLocation")
1759 .and_then(|sl| sl.get("file"))
1760 .and_then(|f| f.as_str())
1761 .unwrap_or("?");
1762 Some(format!("{file}: {msg}"))
1763 })
1764 .collect()
1765 })
1766 .unwrap_or_default();
1767
1768 c.log_message(
1769 tower_lsp::lsp_types::MessageType::INFO,
1770 format!(
1771 "project index: batch produced {} sources, now compiling {} incompatible files individually{}",
1772 batch_source_count,
1773 incompatible_files.len(),
1774 if batch_errors.is_empty() {
1775 String::new()
1776 } else {
1777 format!(" [first errors: {}]", batch_errors.join("; "))
1778 },
1779 ),
1780 )
1781 .await;
1782 }
1783
1784 let mut compiled = 0usize;
1786 let mut skipped = 0usize;
1787 for file in &incompatible_files {
1788 let pragma = std::fs::read_to_string(file)
1789 .ok()
1790 .and_then(|src| parse_pragma(&src));
1791
1792 let Some(file_constraint) = pragma else {
1793 skipped += 1;
1794 continue;
1795 };
1796
1797 let file_binary = resolve_solc_binary(config, Some(&file_constraint), client).await;
1798 let input = build_batch_standard_json_input_with_cache(
1799 &[file.clone()],
1800 &remappings,
1801 config,
1802 text_cache,
1803 );
1804 match run_solc(&file_binary, &input, &config.root).await {
1805 Ok(raw) => {
1806 let normalized = normalize_solc_output(raw, Some(&config.root));
1807 merge_normalized_outputs(&mut result, normalized);
1808 compiled += 1;
1809 }
1810 Err(e) => {
1811 if let Some(c) = client {
1812 c.log_message(
1813 tower_lsp::lsp_types::MessageType::WARNING,
1814 format!(
1815 "project index: incompatible file {} failed: {e}",
1816 file.display(),
1817 ),
1818 )
1819 .await;
1820 }
1821 skipped += 1;
1822 }
1823 }
1824 }
1825
1826 if let Some(c) = client {
1827 c.log_message(
1828 tower_lsp::lsp_types::MessageType::INFO,
1829 format!(
1830 "project index: incompatible files done — {compiled} compiled, {skipped} skipped",
1831 ),
1832 )
1833 .await;
1834 }
1835
1836 Ok(result)
1837}
1838
1839#[cfg(test)]
1840mod tests {
1841 use super::*;
1842
1843 #[test]
1844 fn test_normalize_solc_sources() {
1845 let solc_output = json!({
1846 "sources": {
1847 "src/Foo.sol": {
1848 "id": 0,
1849 "ast": {
1850 "nodeType": "SourceUnit",
1851 "absolutePath": "src/Foo.sol",
1852 "id": 100
1853 }
1854 },
1855 "src/Bar.sol": {
1856 "id": 1,
1857 "ast": {
1858 "nodeType": "SourceUnit",
1859 "absolutePath": "src/Bar.sol",
1860 "id": 200
1861 }
1862 }
1863 },
1864 "contracts": {},
1865 "errors": []
1866 });
1867
1868 let normalized = normalize_solc_output(solc_output, None);
1869
1870 let sources = normalized.get("sources").unwrap().as_object().unwrap();
1872 assert_eq!(sources.len(), 2);
1873
1874 let foo = sources.get("src/Foo.sol").unwrap();
1875 assert_eq!(foo.get("id").unwrap(), 0);
1876 assert_eq!(
1877 foo.get("ast")
1878 .unwrap()
1879 .get("nodeType")
1880 .unwrap()
1881 .as_str()
1882 .unwrap(),
1883 "SourceUnit"
1884 );
1885
1886 let id_to_path = normalized
1888 .get("source_id_to_path")
1889 .unwrap()
1890 .as_object()
1891 .unwrap();
1892 assert_eq!(id_to_path.len(), 2);
1893 }
1894
1895 #[test]
1896 fn test_normalize_solc_contracts() {
1897 let solc_output = json!({
1898 "sources": {},
1899 "contracts": {
1900 "src/Foo.sol": {
1901 "Foo": {
1902 "abi": [{"type": "function", "name": "bar"}],
1903 "evm": {
1904 "methodIdentifiers": {
1905 "bar(uint256)": "abcd1234"
1906 }
1907 }
1908 }
1909 }
1910 },
1911 "errors": []
1912 });
1913
1914 let normalized = normalize_solc_output(solc_output, None);
1915
1916 let contracts = normalized.get("contracts").unwrap().as_object().unwrap();
1918 let foo_contracts = contracts.get("src/Foo.sol").unwrap().as_object().unwrap();
1919 let foo = foo_contracts.get("Foo").unwrap();
1920
1921 let method_ids = foo
1922 .get("evm")
1923 .unwrap()
1924 .get("methodIdentifiers")
1925 .unwrap()
1926 .as_object()
1927 .unwrap();
1928 assert_eq!(
1929 method_ids.get("bar(uint256)").unwrap().as_str().unwrap(),
1930 "abcd1234"
1931 );
1932 }
1933
1934 #[test]
1935 fn test_normalize_solc_errors_passthrough() {
1936 let solc_output = json!({
1937 "sources": {},
1938 "contracts": {},
1939 "errors": [{
1940 "sourceLocation": {"file": "src/Foo.sol", "start": 0, "end": 10},
1941 "type": "Warning",
1942 "component": "general",
1943 "severity": "warning",
1944 "errorCode": "2394",
1945 "message": "test warning",
1946 "formattedMessage": "Warning: test warning"
1947 }]
1948 });
1949
1950 let normalized = normalize_solc_output(solc_output, None);
1951
1952 let errors = normalized.get("errors").unwrap().as_array().unwrap();
1953 assert_eq!(errors.len(), 1);
1954 assert_eq!(
1955 errors[0].get("errorCode").unwrap().as_str().unwrap(),
1956 "2394"
1957 );
1958 }
1959
1960 #[test]
1961 fn test_normalize_empty_solc_output() {
1962 let solc_output = json!({
1963 "sources": {},
1964 "contracts": {}
1965 });
1966
1967 let normalized = normalize_solc_output(solc_output, None);
1968
1969 assert!(
1970 normalized
1971 .get("sources")
1972 .unwrap()
1973 .as_object()
1974 .unwrap()
1975 .is_empty()
1976 );
1977 assert!(
1978 normalized
1979 .get("contracts")
1980 .unwrap()
1981 .as_object()
1982 .unwrap()
1983 .is_empty()
1984 );
1985 assert_eq!(
1986 normalized.get("errors").unwrap().as_array().unwrap().len(),
1987 0
1988 );
1989 assert!(
1990 normalized
1991 .get("source_id_to_path")
1992 .unwrap()
1993 .as_object()
1994 .unwrap()
1995 .is_empty()
1996 );
1997 }
1998
1999 #[test]
2000 fn test_build_standard_json_input() {
2001 let config = FoundryConfig::default();
2002 let input = build_standard_json_input(
2003 "/path/to/Foo.sol",
2004 &[
2005 "ds-test/=lib/forge-std/lib/ds-test/src/".to_string(),
2006 "forge-std/=lib/forge-std/src/".to_string(),
2007 ],
2008 &config,
2009 None,
2010 );
2011
2012 let sources = input.get("sources").unwrap().as_object().unwrap();
2013 assert!(sources.contains_key("/path/to/Foo.sol"));
2014
2015 let settings = input.get("settings").unwrap();
2016 let remappings = settings.get("remappings").unwrap().as_array().unwrap();
2017 assert_eq!(remappings.len(), 2);
2018
2019 let output_sel = settings.get("outputSelection").unwrap();
2020 assert!(output_sel.get("*").is_some());
2021
2022 assert!(settings.get("optimizer").is_none());
2024 assert!(settings.get("viaIR").is_none());
2025 assert!(settings.get("evmVersion").is_none());
2026
2027 let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
2029 let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
2030 assert!(!output_names.contains(&"evm.gasEstimates"));
2031 assert!(!output_names.contains(&"abi")); assert!(output_names.contains(&"devdoc"));
2033 assert!(output_names.contains(&"userdoc"));
2034 assert!(output_names.contains(&"evm.methodIdentifiers"));
2035 }
2036
2037 #[test]
2038 fn test_build_standard_json_input_with_config() {
2039 let config = FoundryConfig {
2040 optimizer: true,
2041 optimizer_runs: 9999999,
2042 via_ir: true,
2043 evm_version: Some("osaka".to_string()),
2044 ..Default::default()
2045 };
2046 let input = build_standard_json_input("/path/to/Foo.sol", &[], &config, None);
2047
2048 let settings = input.get("settings").unwrap();
2049
2050 assert!(settings.get("optimizer").is_none());
2052
2053 assert!(settings.get("viaIR").unwrap().as_bool().unwrap());
2055
2056 let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
2058 let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
2059 assert!(!output_names.contains(&"evm.gasEstimates"));
2060
2061 assert_eq!(
2063 settings.get("evmVersion").unwrap().as_str().unwrap(),
2064 "osaka"
2065 );
2066 }
2067
2068 #[tokio::test]
2069 async fn test_resolve_solc_binary_default() {
2070 let config = FoundryConfig::default();
2071 let binary = resolve_solc_binary(&config, None, None).await;
2072 assert_eq!(binary, PathBuf::from("solc"));
2073 }
2074
2075 #[test]
2076 fn test_parse_pragma_exact() {
2077 let source = "// SPDX\npragma solidity 0.8.26;\n";
2078 assert_eq!(
2079 parse_pragma(source),
2080 Some(PragmaConstraint::Exact(SemVer {
2081 major: 0,
2082 minor: 8,
2083 patch: 26
2084 }))
2085 );
2086 }
2087
2088 #[test]
2089 fn test_parse_pragma_caret() {
2090 let source = "pragma solidity ^0.8.0;\n";
2091 assert_eq!(
2092 parse_pragma(source),
2093 Some(PragmaConstraint::Caret(SemVer {
2094 major: 0,
2095 minor: 8,
2096 patch: 0
2097 }))
2098 );
2099 }
2100
2101 #[test]
2102 fn test_parse_pragma_gte() {
2103 let source = "pragma solidity >=0.8.0;\n";
2104 assert_eq!(
2105 parse_pragma(source),
2106 Some(PragmaConstraint::Gte(SemVer {
2107 major: 0,
2108 minor: 8,
2109 patch: 0
2110 }))
2111 );
2112 }
2113
2114 #[test]
2115 fn test_parse_pragma_range() {
2116 let source = "pragma solidity >=0.6.2 <0.9.0;\n";
2117 assert_eq!(
2118 parse_pragma(source),
2119 Some(PragmaConstraint::Range(
2120 SemVer {
2121 major: 0,
2122 minor: 6,
2123 patch: 2
2124 },
2125 SemVer {
2126 major: 0,
2127 minor: 9,
2128 patch: 0
2129 },
2130 ))
2131 );
2132 }
2133
2134 #[test]
2135 fn test_parse_pragma_none() {
2136 let source = "contract Foo {}\n";
2137 assert_eq!(parse_pragma(source), None);
2138 }
2139
2140 #[test]
2141 fn test_version_satisfies_exact() {
2142 let v = SemVer {
2143 major: 0,
2144 minor: 8,
2145 patch: 26,
2146 };
2147 assert!(version_satisfies(&v, &PragmaConstraint::Exact(v.clone())));
2148 assert!(!version_satisfies(
2149 &SemVer {
2150 major: 0,
2151 minor: 8,
2152 patch: 25
2153 },
2154 &PragmaConstraint::Exact(v)
2155 ));
2156 }
2157
2158 #[test]
2159 fn test_version_satisfies_caret() {
2160 let constraint = PragmaConstraint::Caret(SemVer {
2161 major: 0,
2162 minor: 8,
2163 patch: 0,
2164 });
2165 assert!(version_satisfies(
2166 &SemVer {
2167 major: 0,
2168 minor: 8,
2169 patch: 0
2170 },
2171 &constraint
2172 ));
2173 assert!(version_satisfies(
2174 &SemVer {
2175 major: 0,
2176 minor: 8,
2177 patch: 26
2178 },
2179 &constraint
2180 ));
2181 assert!(!version_satisfies(
2183 &SemVer {
2184 major: 0,
2185 minor: 9,
2186 patch: 0
2187 },
2188 &constraint
2189 ));
2190 assert!(!version_satisfies(
2192 &SemVer {
2193 major: 0,
2194 minor: 7,
2195 patch: 0
2196 },
2197 &constraint
2198 ));
2199 }
2200
2201 #[test]
2202 fn test_version_satisfies_gte() {
2203 let constraint = PragmaConstraint::Gte(SemVer {
2204 major: 0,
2205 minor: 8,
2206 patch: 0,
2207 });
2208 assert!(version_satisfies(
2209 &SemVer {
2210 major: 0,
2211 minor: 8,
2212 patch: 0
2213 },
2214 &constraint
2215 ));
2216 assert!(version_satisfies(
2217 &SemVer {
2218 major: 0,
2219 minor: 9,
2220 patch: 0
2221 },
2222 &constraint
2223 ));
2224 assert!(!version_satisfies(
2225 &SemVer {
2226 major: 0,
2227 minor: 7,
2228 patch: 0
2229 },
2230 &constraint
2231 ));
2232 }
2233
2234 #[test]
2235 fn test_version_satisfies_range() {
2236 let constraint = PragmaConstraint::Range(
2237 SemVer {
2238 major: 0,
2239 minor: 6,
2240 patch: 2,
2241 },
2242 SemVer {
2243 major: 0,
2244 minor: 9,
2245 patch: 0,
2246 },
2247 );
2248 assert!(version_satisfies(
2249 &SemVer {
2250 major: 0,
2251 minor: 6,
2252 patch: 2
2253 },
2254 &constraint
2255 ));
2256 assert!(version_satisfies(
2257 &SemVer {
2258 major: 0,
2259 minor: 8,
2260 patch: 26
2261 },
2262 &constraint
2263 ));
2264 assert!(!version_satisfies(
2266 &SemVer {
2267 major: 0,
2268 minor: 9,
2269 patch: 0
2270 },
2271 &constraint
2272 ));
2273 assert!(!version_satisfies(
2274 &SemVer {
2275 major: 0,
2276 minor: 6,
2277 patch: 1
2278 },
2279 &constraint
2280 ));
2281 }
2282
2283 #[test]
2284 fn test_find_matching_version() {
2285 let installed = vec![
2286 SemVer {
2287 major: 0,
2288 minor: 8,
2289 patch: 0,
2290 },
2291 SemVer {
2292 major: 0,
2293 minor: 8,
2294 patch: 20,
2295 },
2296 SemVer {
2297 major: 0,
2298 minor: 8,
2299 patch: 26,
2300 },
2301 SemVer {
2302 major: 0,
2303 minor: 8,
2304 patch: 33,
2305 },
2306 ];
2307 let constraint = PragmaConstraint::Caret(SemVer {
2309 major: 0,
2310 minor: 8,
2311 patch: 20,
2312 });
2313 let matched = find_matching_version(&constraint, &installed);
2314 assert_eq!(
2315 matched,
2316 Some(SemVer {
2317 major: 0,
2318 minor: 8,
2319 patch: 33
2320 })
2321 );
2322
2323 let constraint = PragmaConstraint::Exact(SemVer {
2325 major: 0,
2326 minor: 8,
2327 patch: 20,
2328 });
2329 let matched = find_matching_version(&constraint, &installed);
2330 assert_eq!(
2331 matched,
2332 Some(SemVer {
2333 major: 0,
2334 minor: 8,
2335 patch: 20
2336 })
2337 );
2338
2339 let constraint = PragmaConstraint::Exact(SemVer {
2341 major: 0,
2342 minor: 8,
2343 patch: 15,
2344 });
2345 let matched = find_matching_version(&constraint, &installed);
2346 assert_eq!(matched, None);
2347 }
2348
2349 #[test]
2350 fn test_list_installed_versions() {
2351 let versions = list_installed_versions();
2353 for w in versions.windows(2) {
2355 assert!(w[0] <= w[1]);
2356 }
2357 }
2358
2359 #[test]
2364 fn test_extract_version_error_files_basic() {
2365 let output = json!({
2366 "errors": [
2367 {
2368 "errorCode": "5333",
2369 "severity": "error",
2370 "message": "Source file requires different compiler version",
2371 "sourceLocation": {
2372 "file": "lib/openzeppelin/contracts/token/ERC20/ERC20.sol",
2373 "start": 32,
2374 "end": 58
2375 }
2376 },
2377 {
2378 "errorCode": "5333",
2379 "severity": "error",
2380 "message": "Source file requires different compiler version",
2381 "sourceLocation": {
2382 "file": "lib/old-lib/src/Legacy.sol",
2383 "start": 32,
2384 "end": 58
2385 }
2386 },
2387 {
2388 "errorCode": "9574",
2389 "severity": "error",
2390 "message": "Some other error",
2391 "sourceLocation": {
2392 "file": "src/Main.sol",
2393 "start": 100,
2394 "end": 200
2395 }
2396 }
2397 ]
2398 });
2399
2400 let files = extract_version_error_files(&output);
2401 assert_eq!(files.len(), 2);
2402 assert!(files.contains("lib/openzeppelin/contracts/token/ERC20/ERC20.sol"));
2403 assert!(files.contains("lib/old-lib/src/Legacy.sol"));
2404 assert!(!files.contains("src/Main.sol"));
2406 }
2407
2408 #[test]
2409 fn test_extract_version_error_files_empty() {
2410 let output = json!({
2411 "errors": []
2412 });
2413 assert!(extract_version_error_files(&output).is_empty());
2414
2415 let output = json!({});
2417 assert!(extract_version_error_files(&output).is_empty());
2418 }
2419
2420 #[test]
2421 fn test_extract_version_error_files_no_source_location() {
2422 let output = json!({
2423 "errors": [
2424 {
2425 "errorCode": "5333",
2426 "severity": "error",
2427 "message": "Source file requires different compiler version"
2428 }
2430 ]
2431 });
2432 assert!(extract_version_error_files(&output).is_empty());
2433 }
2434
2435 #[test]
2436 fn test_extract_version_error_files_dedup() {
2437 let output = json!({
2438 "errors": [
2439 {
2440 "errorCode": "5333",
2441 "severity": "error",
2442 "sourceLocation": { "file": "lib/same.sol", "start": 0, "end": 10 }
2443 },
2444 {
2445 "errorCode": "5333",
2446 "severity": "error",
2447 "sourceLocation": { "file": "lib/same.sol", "start": 50, "end": 70 }
2448 }
2449 ]
2450 });
2451 let files = extract_version_error_files(&output);
2452 assert_eq!(files.len(), 1);
2453 assert!(files.contains("lib/same.sol"));
2454 }
2455
2456 #[test]
2457 fn test_reverse_import_closure_simple() {
2458 let dir = tempfile::tempdir().unwrap();
2466 let root = dir.path();
2467
2468 std::fs::write(
2469 root.join("a.sol"),
2470 "// SPDX-License-Identifier: MIT\nimport \"./b.sol\";\ncontract A {}",
2471 )
2472 .unwrap();
2473 std::fs::write(
2474 root.join("b.sol"),
2475 "// SPDX-License-Identifier: MIT\nimport \"./c.sol\";\ncontract B {}",
2476 )
2477 .unwrap();
2478 std::fs::write(
2479 root.join("c.sol"),
2480 "// SPDX-License-Identifier: MIT\ncontract C {}",
2481 )
2482 .unwrap();
2483 std::fs::write(
2484 root.join("d.sol"),
2485 "// SPDX-License-Identifier: MIT\ncontract D {}",
2486 )
2487 .unwrap();
2488
2489 let files: Vec<PathBuf> = vec![
2490 root.join("a.sol"),
2491 root.join("b.sol"),
2492 root.join("c.sol"),
2493 root.join("d.sol"),
2494 ];
2495
2496 let exclude: HashSet<PathBuf> = [root.join("c.sol")].into_iter().collect();
2497 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2498
2499 assert!(
2500 closure.contains(&root.join("c.sol")),
2501 "seed file in closure"
2502 );
2503 assert!(closure.contains(&root.join("b.sol")), "direct importer");
2504 assert!(closure.contains(&root.join("a.sol")), "transitive importer");
2505 assert!(
2506 !closure.contains(&root.join("d.sol")),
2507 "unrelated file not in closure"
2508 );
2509 assert_eq!(closure.len(), 3);
2510 }
2511
2512 #[test]
2513 fn test_reverse_import_closure_no_importers() {
2514 let dir = tempfile::tempdir().unwrap();
2516 let root = dir.path();
2517
2518 std::fs::write(root.join("a.sol"), "contract A {}").unwrap();
2519 std::fs::write(root.join("b.sol"), "contract B {}").unwrap();
2520
2521 let files: Vec<PathBuf> = vec![root.join("a.sol"), root.join("b.sol")];
2522 let exclude: HashSet<PathBuf> = [root.join("a.sol")].into_iter().collect();
2523
2524 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2525 assert_eq!(closure.len(), 1);
2526 assert!(closure.contains(&root.join("a.sol")));
2527 }
2528
2529 #[test]
2530 fn test_reverse_import_closure_diamond() {
2531 let dir = tempfile::tempdir().unwrap();
2538 let root = dir.path();
2539
2540 std::fs::write(
2541 root.join("a.sol"),
2542 "import \"./b.sol\";\nimport \"./c.sol\";\ncontract A {}",
2543 )
2544 .unwrap();
2545 std::fs::write(root.join("b.sol"), "import \"./d.sol\";\ncontract B {}").unwrap();
2546 std::fs::write(root.join("c.sol"), "import \"./d.sol\";\ncontract C {}").unwrap();
2547 std::fs::write(root.join("d.sol"), "contract D {}").unwrap();
2548
2549 let files: Vec<PathBuf> = vec![
2550 root.join("a.sol"),
2551 root.join("b.sol"),
2552 root.join("c.sol"),
2553 root.join("d.sol"),
2554 ];
2555 let exclude: HashSet<PathBuf> = [root.join("d.sol")].into_iter().collect();
2556
2557 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2558 assert_eq!(closure.len(), 4);
2559 }
2560
2561 #[test]
2562 fn test_merge_normalized_outputs_basic() {
2563 let mut base = json!({
2564 "sources": {
2565 "/abs/src/A.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } },
2566 "/abs/src/B.sol": { "id": 1, "ast": { "nodeType": "SourceUnit" } }
2567 },
2568 "contracts": {
2569 "/abs/src/A.sol": { "A": { "abi": [] } }
2570 },
2571 "errors": [],
2572 "source_id_to_path": {
2573 "0": "/abs/src/A.sol",
2574 "1": "/abs/src/B.sol"
2575 }
2576 });
2577
2578 let other = json!({
2579 "sources": {
2580 "/abs/lib/C.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } }
2581 },
2582 "contracts": {
2583 "/abs/lib/C.sol": { "C": { "abi": [] } }
2584 },
2585 "errors": [],
2586 "source_id_to_path": {
2587 "0": "/abs/lib/C.sol"
2588 }
2589 });
2590
2591 merge_normalized_outputs(&mut base, other);
2592
2593 let sources = base["sources"].as_object().unwrap();
2595 assert_eq!(sources.len(), 3);
2596 assert!(sources.contains_key("/abs/lib/C.sol"));
2597
2598 let c_id = sources["/abs/lib/C.sol"]["id"].as_u64().unwrap();
2600 assert_eq!(
2601 c_id, 2,
2602 "remapped id should be max_base_id (2) + original (0)"
2603 );
2604
2605 let id_map = base["source_id_to_path"].as_object().unwrap();
2607 assert_eq!(id_map.len(), 3);
2608 assert_eq!(id_map["2"].as_str().unwrap(), "/abs/lib/C.sol");
2609
2610 let contracts = base["contracts"].as_object().unwrap();
2612 assert_eq!(contracts.len(), 2);
2613 assert!(contracts.contains_key("/abs/lib/C.sol"));
2614 }
2615
2616 #[test]
2617 fn test_merge_normalized_outputs_empty_other() {
2618 let mut base = json!({
2619 "sources": {
2620 "/abs/src/A.sol": { "id": 0, "ast": {} }
2621 },
2622 "contracts": {},
2623 "errors": [],
2624 "source_id_to_path": { "0": "/abs/src/A.sol" }
2625 });
2626
2627 let other = json!({
2628 "sources": {},
2629 "contracts": {},
2630 "errors": [],
2631 "source_id_to_path": {}
2632 });
2633
2634 merge_normalized_outputs(&mut base, other);
2635
2636 let sources = base["sources"].as_object().unwrap();
2637 assert_eq!(sources.len(), 1);
2638 }
2639
2640 #[test]
2641 fn test_merge_normalized_outputs_empty_base() {
2642 let mut base = json!({
2643 "sources": {},
2644 "contracts": {},
2645 "errors": [],
2646 "source_id_to_path": {}
2647 });
2648
2649 let other = json!({
2650 "sources": {
2651 "/abs/lib/X.sol": { "id": 0, "ast": {} }
2652 },
2653 "contracts": {
2654 "/abs/lib/X.sol": { "X": { "abi": [] } }
2655 },
2656 "errors": [],
2657 "source_id_to_path": { "0": "/abs/lib/X.sol" }
2658 });
2659
2660 merge_normalized_outputs(&mut base, other);
2661
2662 let sources = base["sources"].as_object().unwrap();
2663 assert_eq!(sources.len(), 1);
2664 let x_id = sources["/abs/lib/X.sol"]["id"].as_u64().unwrap();
2666 assert_eq!(x_id, 0);
2667 }
2668}