1use crate::config::FoundryConfig;
8use crate::links;
9use crate::runner::RunnerError;
10use serde_json::{Map, Value, json};
11use std::collections::{HashMap, HashSet};
12use std::path::{Path, PathBuf};
13use std::sync::{Mutex, OnceLock};
14use tokio::process::Command;
15use tower_lsp::lsp_types::Url;
16
17static INSTALLED_VERSIONS: OnceLock<Mutex<Vec<SemVer>>> = OnceLock::new();
20
21fn get_installed_versions() -> Vec<SemVer> {
22 let mutex = INSTALLED_VERSIONS.get_or_init(|| Mutex::new(scan_installed_versions()));
23 mutex.lock().unwrap().clone()
24}
25
26fn invalidate_installed_versions() {
27 if let Some(mutex) = INSTALLED_VERSIONS.get() {
28 *mutex.lock().unwrap() = scan_installed_versions();
29 }
30}
31
32fn semver_to_local(v: &semver::Version) -> SemVer {
34 SemVer {
35 major: v.major as u32,
36 minor: v.minor as u32,
37 patch: v.patch as u32,
38 }
39}
40
41pub async fn resolve_solc_binary(
54 config: &FoundryConfig,
55 constraint: Option<&PragmaConstraint>,
56 client: Option<&tower_lsp::Client>,
57) -> PathBuf {
58 if let Some(constraint) = constraint {
60 if !matches!(constraint, PragmaConstraint::Exact(_))
66 && let Some(ref config_ver) = config.solc_version
67 && let Some(parsed) = SemVer::parse(config_ver)
68 && version_satisfies(&parsed, constraint)
69 && let Some(path) = find_solc_binary(config_ver)
70 {
71 if let Some(c) = client {
72 c.log_message(
73 tower_lsp::lsp_types::MessageType::INFO,
74 format!("using solc {config_ver} (pragma {constraint})"),
75 )
76 .await;
77 }
78 return path;
79 }
80
81 let installed = get_installed_versions();
82 if let Some(version) = find_matching_version(constraint, &installed)
83 && let Some(path) = find_solc_binary(&version.to_string())
84 {
85 if let Some(c) = client {
86 c.log_message(
87 tower_lsp::lsp_types::MessageType::INFO,
88 format!("using solc {version}"),
89 )
90 .await;
91 }
92 return path;
93 }
94
95 let install_version = version_to_install(constraint);
97 if let Some(ref ver_str) = install_version {
98 if let Some(c) = client {
99 c.show_message(
100 tower_lsp::lsp_types::MessageType::INFO,
101 format!("Installing solc {ver_str}..."),
102 )
103 .await;
104 }
105
106 if svm_install(ver_str).await {
107 invalidate_installed_versions();
109
110 if let Some(c) = client {
111 c.show_message(
112 tower_lsp::lsp_types::MessageType::INFO,
113 format!("Installed solc {ver_str}"),
114 )
115 .await;
116 }
117 if let Some(path) = find_solc_binary(ver_str) {
118 return path;
119 }
120 } else if let Some(c) = client {
121 c.show_message(
122 tower_lsp::lsp_types::MessageType::WARNING,
123 format!(
124 "Failed to install solc {ver_str}. \
125 Install it manually: svm install {ver_str}"
126 ),
127 )
128 .await;
129 }
130 }
131 }
132
133 if let Some(ref version) = config.solc_version
135 && let Some(path) = find_solc_binary(version)
136 {
137 if let Some(c) = client {
138 c.log_message(
139 tower_lsp::lsp_types::MessageType::INFO,
140 format!(
141 "solc: no pragma, using foundry.toml version {version} → {}",
142 path.display()
143 ),
144 )
145 .await;
146 }
147 return path;
148 }
149
150 if let Some(c) = client {
152 c.log_message(
153 tower_lsp::lsp_types::MessageType::INFO,
154 "solc: no pragma match, falling back to system solc",
155 )
156 .await;
157 }
158 PathBuf::from("solc")
159}
160
161fn version_to_install(constraint: &PragmaConstraint) -> Option<String> {
168 match constraint {
169 PragmaConstraint::Exact(v) => Some(v.to_string()),
170 PragmaConstraint::Caret(v) => Some(v.to_string()),
171 PragmaConstraint::Gte(v) => Some(v.to_string()),
172 PragmaConstraint::Range(lower, _) => Some(lower.to_string()),
173 }
174}
175
176async fn svm_install(version: &str) -> bool {
180 let ver = match semver::Version::parse(version) {
181 Ok(v) => v,
182 Err(_) => return false,
183 };
184 svm::install(&ver).await.is_ok()
185}
186
187fn find_solc_binary(version: &str) -> Option<PathBuf> {
189 let path = svm::version_binary(version);
190 if path.is_file() {
191 return Some(path);
192 }
193 None
194}
195
196#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
200pub struct SemVer {
201 pub major: u32,
202 pub minor: u32,
203 pub patch: u32,
204}
205
206impl SemVer {
207 fn parse(s: &str) -> Option<SemVer> {
208 let parts: Vec<&str> = s.split('.').collect();
209 if parts.len() != 3 {
210 return None;
211 }
212 Some(SemVer {
213 major: parts[0].parse().ok()?,
214 minor: parts[1].parse().ok()?,
215 patch: parts[2].parse().ok()?,
216 })
217 }
218}
219
220impl std::fmt::Display for SemVer {
221 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
222 write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
223 }
224}
225
226#[derive(Debug, Clone, PartialEq)]
228pub enum PragmaConstraint {
229 Exact(SemVer),
231 Caret(SemVer),
234 Gte(SemVer),
236 Range(SemVer, SemVer),
238}
239
240impl std::fmt::Display for PragmaConstraint {
241 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
242 match self {
243 PragmaConstraint::Exact(v) => write!(f, "={v}"),
244 PragmaConstraint::Caret(v) => write!(f, "^{v}"),
245 PragmaConstraint::Gte(v) => write!(f, ">={v}"),
246 PragmaConstraint::Range(lo, hi) => write!(f, ">={lo} <{hi}"),
247 }
248 }
249}
250
251fn resolve_import_to_abs(
255 project_root: &Path,
256 importer_abs: &Path,
257 import_path: &str,
258 remappings: &[String],
259) -> Option<PathBuf> {
260 if import_path.starts_with("./") || import_path.starts_with("../") {
261 let base = importer_abs.parent()?;
262 return Some(lexical_normalize(&base.join(import_path)));
263 }
264
265 for remap in remappings {
266 let mut it = remap.splitn(2, '=');
267 let prefix = it.next().unwrap_or_default();
268 let target = it.next().unwrap_or_default();
269 if prefix.is_empty() || target.is_empty() {
270 continue;
271 }
272 if import_path.starts_with(prefix) {
273 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
274 return Some(lexical_normalize(
275 &project_root.join(format!("{target}{suffix}")),
276 ));
277 }
278 }
279
280 Some(lexical_normalize(&project_root.join(import_path)))
281}
282
283fn lexical_normalize(path: &Path) -> PathBuf {
286 let mut out = PathBuf::new();
287 for comp in path.components() {
288 match comp {
289 std::path::Component::CurDir => {}
290 std::path::Component::ParentDir => {
291 out.pop();
292 }
293 _ => out.push(comp.as_os_str()),
294 }
295 }
296 out
297}
298
299fn collect_import_pragmas(
305 file_path: &Path,
306 project_root: &Path,
307 remappings: &[String],
308) -> Vec<PragmaConstraint> {
309 let mut pragmas = Vec::new();
310 let mut visited = HashSet::new();
311 collect_import_pragmas_recursive(
312 file_path,
313 project_root,
314 remappings,
315 &mut pragmas,
316 &mut visited,
317 );
318 pragmas
319}
320
321fn collect_import_pragmas_recursive(
322 file_path: &Path,
323 project_root: &Path,
324 remappings: &[String],
325 pragmas: &mut Vec<PragmaConstraint>,
326 visited: &mut HashSet<PathBuf>,
327) {
328 if !visited.insert(file_path.to_path_buf()) {
329 return;
330 }
331 let source = match std::fs::read_to_string(file_path) {
332 Ok(s) => s,
333 Err(_) => return,
334 };
335 if let Some(pragma) = parse_pragma(&source) {
336 pragmas.push(pragma);
337 }
338 for imp in links::ts_find_imports(source.as_bytes()) {
339 if let Some(abs) = resolve_import_to_abs(project_root, file_path, &imp.path, remappings) {
340 collect_import_pragmas_recursive(&abs, project_root, remappings, pragmas, visited);
341 }
342 }
343}
344
345fn tightest_constraint(pragmas: &[PragmaConstraint]) -> Option<PragmaConstraint> {
355 if pragmas.is_empty() {
356 return None;
357 }
358
359 for p in pragmas {
361 if matches!(p, PragmaConstraint::Exact(_)) {
362 return Some(p.clone());
363 }
364 }
365
366 let mut lower = SemVer {
368 major: 0,
369 minor: 0,
370 patch: 0,
371 };
372 let mut upper: Option<SemVer> = None;
373
374 for p in pragmas {
375 let (lo, hi) = constraint_to_range(p);
376 if lo > lower {
377 lower = lo;
378 }
379 if let Some(hi) = hi {
380 upper = Some(match upper {
381 Some(cur) if hi < cur => hi,
382 Some(cur) => cur,
383 None => hi,
384 });
385 }
386 }
387
388 match upper {
389 Some(hi) if lower >= hi => None, Some(hi) => Some(PragmaConstraint::Range(lower, hi)),
391 None => Some(PragmaConstraint::Gte(lower)),
392 }
393}
394
395fn constraint_to_range(constraint: &PragmaConstraint) -> (SemVer, Option<SemVer>) {
398 match constraint {
399 PragmaConstraint::Exact(v) => (
400 v.clone(),
401 Some(SemVer {
402 major: v.major,
403 minor: v.minor,
404 patch: v.patch + 1,
405 }),
406 ),
407 PragmaConstraint::Caret(v) => (
408 v.clone(),
409 Some(SemVer {
410 major: v.major,
411 minor: v.minor + 1,
412 patch: 0,
413 }),
414 ),
415 PragmaConstraint::Gte(v) => (v.clone(), None),
416 PragmaConstraint::Range(lo, hi) => (lo.clone(), Some(hi.clone())),
417 }
418}
419
420pub fn parse_pragma(source: &str) -> Option<PragmaConstraint> {
428 let pragma_line = source
430 .lines()
431 .take(20)
432 .find(|line| line.trim_start().starts_with("pragma solidity"))?;
433
434 let after_keyword = pragma_line
436 .trim_start()
437 .strip_prefix("pragma solidity")?
438 .trim();
439 let constraint_str = after_keyword
440 .strip_suffix(';')
441 .unwrap_or(after_keyword)
442 .trim();
443
444 if constraint_str.is_empty() {
445 return None;
446 }
447
448 if let Some(rest) = constraint_str.strip_prefix(">=") {
450 let rest = rest.trim();
451 if let Some(space_idx) = rest.find(|c: char| c.is_whitespace() || c == '<') {
452 let lower_str = rest[..space_idx].trim();
453 let upper_part = rest[space_idx..].trim();
454 if let Some(upper_str) = upper_part.strip_prefix('<') {
455 let upper_str = upper_str.trim();
456 if let (Some(lower), Some(upper)) =
457 (SemVer::parse(lower_str), SemVer::parse(upper_str))
458 {
459 return Some(PragmaConstraint::Range(lower, upper));
460 }
461 }
462 }
463 if let Some(ver) = SemVer::parse(rest) {
465 return Some(PragmaConstraint::Gte(ver));
466 }
467 }
468
469 if let Some(rest) = constraint_str.strip_prefix('^')
471 && let Some(ver) = SemVer::parse(rest.trim())
472 {
473 return Some(PragmaConstraint::Caret(ver));
474 }
475
476 if let Some(ver) = SemVer::parse(constraint_str) {
478 return Some(PragmaConstraint::Exact(ver));
479 }
480
481 None
482}
483
484pub fn list_installed_versions() -> Vec<SemVer> {
486 get_installed_versions()
487}
488
489fn scan_installed_versions() -> Vec<SemVer> {
493 svm::installed_versions()
494 .unwrap_or_default()
495 .iter()
496 .map(semver_to_local)
497 .collect()
498}
499
500pub fn find_matching_version(
505 constraint: &PragmaConstraint,
506 installed: &[SemVer],
507) -> Option<SemVer> {
508 let candidates: Vec<&SemVer> = installed
509 .iter()
510 .filter(|v| version_satisfies(v, constraint))
511 .collect();
512
513 candidates.last().cloned().cloned()
515}
516
517pub fn version_satisfies(version: &SemVer, constraint: &PragmaConstraint) -> bool {
519 match constraint {
520 PragmaConstraint::Exact(v) => version == v,
521 PragmaConstraint::Caret(v) => {
522 version.major == v.major && version >= v && version.minor < v.minor + 1
525 }
526 PragmaConstraint::Gte(v) => version >= v,
527 PragmaConstraint::Range(lower, upper) => version >= lower && version < upper,
528 }
529}
530
531pub async fn resolve_remappings(config: &FoundryConfig) -> Vec<String> {
535 let output = Command::new("forge")
538 .arg("remappings")
539 .current_dir(&config.root)
540 .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1")
541 .output()
542 .await;
543
544 if let Ok(output) = output
545 && output.status.success()
546 {
547 let stdout = String::from_utf8_lossy(&output.stdout);
548 let remappings: Vec<String> = stdout
549 .lines()
550 .filter(|l| !l.trim().is_empty())
551 .map(|l| l.to_string())
552 .collect();
553 if !remappings.is_empty() {
554 return remappings;
555 }
556 }
557
558 if !config.remappings.is_empty() {
560 return config.remappings.clone();
561 }
562
563 let remappings_txt = config.root.join("remappings.txt");
565 if let Ok(content) = std::fs::read_to_string(&remappings_txt) {
566 return content
567 .lines()
568 .filter(|l| !l.trim().is_empty())
569 .map(|l| l.to_string())
570 .collect();
571 }
572
573 Vec::new()
574}
575
576pub fn build_standard_json_input(
589 file_path: &str,
590 remappings: &[String],
591 config: &FoundryConfig,
592) -> Value {
593 let contract_outputs = vec!["devdoc", "userdoc", "evm.methodIdentifiers"];
594
595 let mut settings = json!({
596 "remappings": remappings,
597 "outputSelection": {
598 "*": {
599 "*": contract_outputs,
600 "": ["ast"]
601 }
602 }
603 });
604
605 if config.via_ir {
606 settings["viaIR"] = json!(true);
607 }
608
609 if let Some(ref evm_version) = config.evm_version {
611 settings["evmVersion"] = json!(evm_version);
612 }
613
614 json!({
615 "language": "Solidity",
616 "sources": {
617 file_path: {
618 "urls": [file_path]
619 }
620 },
621 "settings": settings
622 })
623}
624
625pub async fn run_solc(
627 solc_binary: &Path,
628 input: &Value,
629 project_root: &Path,
630) -> Result<Value, RunnerError> {
631 let _ = crate::project_cache::save_last_solc_input(project_root, input);
632 let input_str = serde_json::to_string(input)?;
633
634 let mut child = Command::new(solc_binary)
635 .arg("--standard-json")
636 .current_dir(project_root)
637 .stdin(std::process::Stdio::piped())
638 .stdout(std::process::Stdio::piped())
639 .stderr(std::process::Stdio::piped())
640 .spawn()?;
641
642 if let Some(mut stdin) = child.stdin.take() {
644 use tokio::io::AsyncWriteExt;
645 stdin
646 .write_all(input_str.as_bytes())
647 .await
648 .map_err(RunnerError::CommandError)?;
649 }
651
652 let output = child
653 .wait_with_output()
654 .await
655 .map_err(RunnerError::CommandError)?;
656
657 let stdout = String::from_utf8_lossy(&output.stdout);
659 if stdout.trim().is_empty() {
660 let stderr = String::from_utf8_lossy(&output.stderr);
661 return Err(RunnerError::CommandError(std::io::Error::other(format!(
662 "solc produced no output, stderr: {stderr}"
663 ))));
664 }
665
666 let parsed: Value = serde_json::from_str(&stdout)?;
667 Ok(parsed)
668}
669
670pub fn normalize_solc_output(mut solc_output: Value, project_root: Option<&Path>) -> Value {
691 fn resolve_import_absolute_paths(node: &mut Value, resolve: &dyn Fn(&str) -> String) {
693 let is_import = node.get("nodeType").and_then(|v| v.as_str()) == Some("ImportDirective");
694
695 if is_import {
696 if let Some(abs_path) = node.get("absolutePath").and_then(|v| v.as_str()) {
697 let resolved = resolve(abs_path);
698 node.as_object_mut()
699 .unwrap()
700 .insert("absolutePath".to_string(), json!(resolved));
701 }
702 }
703
704 if let Some(nodes) = node.get_mut("nodes").and_then(|v| v.as_array_mut()) {
706 for child in nodes {
707 resolve_import_absolute_paths(child, resolve);
708 }
709 }
710 }
711 let mut result = Map::new();
712
713 let errors = solc_output
715 .get_mut("errors")
716 .map(Value::take)
717 .unwrap_or_else(|| json!([]));
718 result.insert("errors".to_string(), errors);
719
720 let resolve = |p: &str| -> String {
723 if let Some(root) = project_root {
724 let path = Path::new(p);
725 if path.is_relative() {
726 return root.join(path).to_string_lossy().into_owned();
727 }
728 }
729 p.to_string()
730 };
731
732 let mut source_id_to_path = Map::new();
735 let mut resolved_sources = Map::new();
736
737 if let Some(sources) = solc_output
738 .get_mut("sources")
739 .and_then(|s| s.as_object_mut())
740 {
741 let keys: Vec<String> = sources.keys().cloned().collect();
743 for key in keys {
744 if let Some(mut source_data) = sources.remove(&key) {
745 let abs_key = resolve(&key);
746
747 if let Some(ast) = source_data.get_mut("ast") {
751 if let Some(abs_path) = ast.get("absolutePath").and_then(|v| v.as_str()) {
752 let resolved = resolve(abs_path);
753 ast.as_object_mut()
754 .unwrap()
755 .insert("absolutePath".to_string(), json!(resolved));
756 }
757 resolve_import_absolute_paths(ast, &resolve);
758 }
759
760 if let Some(id) = source_data.get("id") {
761 source_id_to_path.insert(id.to_string(), json!(&abs_key));
762 }
763
764 resolved_sources.insert(abs_key, source_data);
765 }
766 }
767 }
768
769 result.insert("sources".to_string(), Value::Object(resolved_sources));
770
771 let mut resolved_contracts = Map::new();
773 if let Some(contracts) = solc_output
774 .get_mut("contracts")
775 .and_then(|c| c.as_object_mut())
776 {
777 let keys: Vec<String> = contracts.keys().cloned().collect();
778 for key in keys {
779 if let Some(contract_data) = contracts.remove(&key) {
780 resolved_contracts.insert(resolve(&key), contract_data);
781 }
782 }
783 }
784 result.insert("contracts".to_string(), Value::Object(resolved_contracts));
785
786 result.insert(
788 "source_id_to_path".to_string(),
789 Value::Object(source_id_to_path),
790 );
791
792 Value::Object(result)
793}
794
795pub fn normalize_forge_output(mut forge_output: Value) -> Value {
807 let mut result = Map::new();
808
809 let errors = forge_output
811 .get_mut("errors")
812 .map(Value::take)
813 .unwrap_or_else(|| json!([]));
814 result.insert("errors".to_string(), errors);
815
816 let mut normalized_sources = Map::new();
818 if let Some(sources) = forge_output
819 .get_mut("sources")
820 .and_then(|s| s.as_object_mut())
821 {
822 for (path, entries) in sources.iter_mut() {
823 if let Some(arr) = entries.as_array_mut()
824 && let Some(first) = arr.first_mut()
825 && let Some(sf) = first.get_mut("source_file")
826 {
827 normalized_sources.insert(path.clone(), sf.take());
828 }
829 }
830 }
831 result.insert("sources".to_string(), Value::Object(normalized_sources));
832
833 let mut normalized_contracts = Map::new();
835 if let Some(contracts) = forge_output
836 .get_mut("contracts")
837 .and_then(|c| c.as_object_mut())
838 {
839 for (path, names) in contracts.iter_mut() {
840 let mut path_contracts = Map::new();
841 if let Some(names_obj) = names.as_object_mut() {
842 for (name, entries) in names_obj.iter_mut() {
843 if let Some(arr) = entries.as_array_mut()
844 && let Some(first) = arr.first_mut()
845 && let Some(contract) = first.get_mut("contract")
846 {
847 path_contracts.insert(name.clone(), contract.take());
848 }
849 }
850 }
851 normalized_contracts.insert(path.clone(), Value::Object(path_contracts));
852 }
853 }
854 result.insert("contracts".to_string(), Value::Object(normalized_contracts));
855
856 let source_id_to_path = forge_output
858 .get_mut("build_infos")
859 .and_then(|bi| bi.as_array_mut())
860 .and_then(|arr| arr.first_mut())
861 .and_then(|info| info.get_mut("source_id_to_path"))
862 .map(Value::take)
863 .unwrap_or_else(|| json!({}));
864 result.insert("source_id_to_path".to_string(), source_id_to_path);
865
866 Value::Object(result)
867}
868
869pub async fn solc_ast(
874 file_path: &str,
875 config: &FoundryConfig,
876 client: Option<&tower_lsp::Client>,
877) -> Result<Value, RunnerError> {
878 let remappings = resolve_remappings(config).await;
879
880 let file_abs = Path::new(file_path).to_path_buf();
885 let config_root = config.root.clone();
886 let remappings_clone = remappings.clone();
887 let pragmas = tokio::task::spawn_blocking(move || {
888 collect_import_pragmas(&file_abs, &config_root, &remappings_clone)
889 })
890 .await
891 .unwrap_or_default();
892 let constraint = tightest_constraint(&pragmas);
893 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
894
895 let rel_path = Path::new(file_path)
900 .strip_prefix(&config.root)
901 .map(|p| p.to_string_lossy().into_owned())
902 .unwrap_or_else(|_| file_path.to_string());
903
904 let input = build_standard_json_input(&rel_path, &remappings, config);
905 let raw_output = run_solc(&solc_binary, &input, &config.root).await?;
906
907 Ok(normalize_solc_output(raw_output, Some(&config.root)))
908}
909
910pub async fn solc_build(
912 file_path: &str,
913 config: &FoundryConfig,
914 client: Option<&tower_lsp::Client>,
915) -> Result<Value, RunnerError> {
916 solc_ast(file_path, config, client).await
917}
918
919pub fn discover_source_files(config: &FoundryConfig) -> Vec<PathBuf> {
939 discover_source_files_inner(config, false)
940}
941
942pub fn discover_src_only_files(config: &FoundryConfig) -> Vec<PathBuf> {
948 let root = &config.root;
949 if !root.is_dir() {
950 return Vec::new();
951 }
952 let mut files = Vec::new();
953 let dir = root.join(&config.sources_dir);
954 if dir.is_dir() {
955 discover_recursive(&dir, &[], &mut files);
956 }
957 files.sort();
958 files
959}
960
961pub fn discover_src_only_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
968 let seeds = discover_src_only_files(config);
969 let mut visited: HashSet<PathBuf> = HashSet::new();
970 let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
971
972 while let Some(file) = queue.pop_front() {
973 if !visited.insert(file.clone()) {
974 continue;
975 }
976 let source = match std::fs::read_to_string(&file) {
977 Ok(s) => s,
978 Err(_) => continue,
979 };
980 for imp in links::ts_find_imports(source.as_bytes()) {
981 if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
982 if abs.exists() && !visited.contains(&abs) {
983 queue.push_back(abs);
984 }
985 }
986 }
987 }
988
989 let mut result: Vec<PathBuf> = visited.into_iter().collect();
990 result.sort();
991 result
992}
993
994pub fn discover_source_files_with_libs(config: &FoundryConfig) -> Vec<PathBuf> {
1001 discover_source_files_inner(config, true)
1002}
1003
1004fn discover_source_files_inner(config: &FoundryConfig, include_libs: bool) -> Vec<PathBuf> {
1005 let root = &config.root;
1006 if !root.is_dir() {
1007 return Vec::new();
1008 }
1009
1010 let mut files = Vec::new();
1011 let no_skip: &[String] = &[];
1012
1013 for dir_name in [&config.sources_dir, &config.test_dir, &config.script_dir] {
1018 let dir = root.join(dir_name);
1019 if dir.is_dir() {
1020 discover_recursive(&dir, no_skip, &mut files);
1021 }
1022 }
1023
1024 if include_libs {
1026 for lib_name in &config.libs {
1027 let lib_dir = root.join(lib_name);
1028 if lib_dir.is_dir() {
1029 discover_recursive(&lib_dir, no_skip, &mut files);
1030 }
1031 }
1032 }
1033
1034 files.sort();
1035 files
1036}
1037
1038pub fn discover_compilation_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
1057 let seeds = discover_source_files(config);
1059 let mut visited: HashSet<PathBuf> = HashSet::new();
1060 let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
1061
1062 while let Some(file) = queue.pop_front() {
1063 if !visited.insert(file.clone()) {
1064 continue;
1065 }
1066 let source = match std::fs::read_to_string(&file) {
1067 Ok(s) => s,
1068 Err(_) => continue,
1069 };
1070 for imp in links::ts_find_imports(source.as_bytes()) {
1071 if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
1072 if abs.exists() && !visited.contains(&abs) {
1073 queue.push_back(abs);
1074 }
1075 }
1076 }
1077 }
1078
1079 let mut result: Vec<PathBuf> = visited.into_iter().collect();
1080 result.sort();
1081 result
1082}
1083
1084const DISCOVER_SKIP_DIRS: &[&str] = &["out", "artifacts", "cache", "target", "broadcast"];
1087
1088fn discover_recursive(dir: &Path, skip_libs: &[String], files: &mut Vec<PathBuf>) {
1089 let entries = match std::fs::read_dir(dir) {
1090 Ok(e) => e,
1091 Err(_) => return,
1092 };
1093 for entry in entries.flatten() {
1094 let path = entry.path();
1095 if path.is_dir() {
1096 if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
1097 if name.starts_with('.') {
1099 continue;
1100 }
1101 if DISCOVER_SKIP_DIRS.contains(&name) {
1103 continue;
1104 }
1105 if skip_libs.iter().any(|lib| lib == name) {
1107 continue;
1108 }
1109 }
1110 discover_recursive(&path, skip_libs, files);
1111 } else if let Some(name) = path.file_name().and_then(|n| n.to_str())
1112 && name.ends_with(".sol")
1113 {
1114 files.push(path);
1115 }
1116 }
1117}
1118
1119pub fn build_batch_standard_json_input(
1126 source_files: &[PathBuf],
1127 remappings: &[String],
1128 config: &FoundryConfig,
1129) -> Value {
1130 build_batch_standard_json_input_with_cache(source_files, remappings, config, None)
1131}
1132
1133pub fn build_batch_standard_json_input_with_cache(
1143 source_files: &[PathBuf],
1144 remappings: &[String],
1145 config: &FoundryConfig,
1146 content_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1147) -> Value {
1148 let contract_outputs = vec!["devdoc", "userdoc", "evm.methodIdentifiers"];
1149
1150 let mut settings = json!({
1151 "remappings": remappings,
1152 "outputSelection": {
1153 "*": {
1154 "*": contract_outputs,
1155 "": ["ast"]
1156 }
1157 }
1158 });
1159
1160 if config.via_ir {
1161 settings["viaIR"] = json!(true);
1162 }
1163 if let Some(ref evm_version) = config.evm_version {
1164 settings["evmVersion"] = json!(evm_version);
1165 }
1166
1167 let mut sources = serde_json::Map::new();
1168 for file in source_files {
1169 let rel_path = file
1170 .strip_prefix(&config.root)
1171 .map(|p| p.to_string_lossy().into_owned())
1172 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1173
1174 let cached_content = content_cache.and_then(|cache| {
1176 let uri = Url::from_file_path(file).ok()?;
1177 cache.get(uri.as_str()).map(|(_, c)| c.as_str())
1178 });
1179
1180 if let Some(content) = cached_content {
1181 sources.insert(rel_path, json!({ "content": content }));
1182 } else {
1183 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1184 }
1185 }
1186
1187 json!({
1188 "language": "Solidity",
1189 "sources": sources,
1190 "settings": settings
1191 })
1192}
1193
1194pub fn build_batch_standard_json_input_ast_only(
1204 source_files: &[PathBuf],
1205 remappings: &[String],
1206 root: &Path,
1207) -> Value {
1208 let settings = json!({
1209 "remappings": remappings,
1210 "outputSelection": {
1211 "*": {
1212 "": ["ast"]
1213 }
1214 }
1215 });
1216
1217 let mut sources = serde_json::Map::new();
1218 for file in source_files {
1219 let rel_path = file
1220 .strip_prefix(root)
1221 .map(|p| p.to_string_lossy().into_owned())
1222 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1223 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1224 }
1225
1226 json!({
1227 "language": "Solidity",
1228 "sources": sources,
1229 "settings": settings
1230 })
1231}
1232
1233pub fn build_parse_only_json_input(
1250 source_files: &[PathBuf],
1251 remappings: &[String],
1252 config: &FoundryConfig,
1253) -> Value {
1254 let settings = json!({
1255 "stopAfter": "parsing",
1256 "remappings": remappings,
1257 "outputSelection": {
1258 "*": {
1259 "": ["ast"]
1260 }
1261 }
1262 });
1263
1264 let mut sources = serde_json::Map::new();
1265 for file in source_files {
1266 let rel_path = file
1267 .strip_prefix(&config.root)
1268 .map(|p| p.to_string_lossy().into_owned())
1269 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1270 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1271 }
1272
1273 json!({
1274 "language": "Solidity",
1275 "sources": sources,
1276 "settings": settings
1277 })
1278}
1279
1280pub async fn solc_project_index(
1290 config: &FoundryConfig,
1291 client: Option<&tower_lsp::Client>,
1292 text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1293) -> Result<Value, RunnerError> {
1294 let remappings = resolve_remappings(config).await;
1296
1297 let source_files = discover_compilation_closure(config, &remappings);
1302 if source_files.is_empty() {
1303 return Err(RunnerError::CommandError(std::io::Error::other(
1304 "no source files found for project index",
1305 )));
1306 }
1307
1308 solc_project_index_from_files(config, client, text_cache, &source_files).await
1309}
1310
1311pub async fn solc_project_index_ast_only(
1321 config: &FoundryConfig,
1322 client: Option<&tower_lsp::Client>,
1323) -> Result<Value, RunnerError> {
1324 let remappings = resolve_remappings(config).await;
1325 let source_files = discover_compilation_closure(config, &remappings);
1326 if source_files.is_empty() {
1327 return Err(RunnerError::CommandError(std::io::Error::other(
1328 "no source files found for project index",
1329 )));
1330 }
1331 solc_project_index_from_files_ast_only(config, client, &source_files).await
1332}
1333
1334async fn solc_project_index_from_files_ast_only(
1340 config: &FoundryConfig,
1341 client: Option<&tower_lsp::Client>,
1342 source_files: &[PathBuf],
1343) -> Result<Value, RunnerError> {
1344 if source_files.is_empty() {
1345 return Err(RunnerError::CommandError(std::io::Error::other(
1346 "no source files found for AST-only project index",
1347 )));
1348 }
1349
1350 let remappings = resolve_remappings(config).await;
1351
1352 let project_version: Option<SemVer> =
1353 config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1354 let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1355 Some(PragmaConstraint::Exact(v.clone()))
1356 } else {
1357 source_files.iter().find_map(|f| {
1358 std::fs::read_to_string(f)
1359 .ok()
1360 .and_then(|src| parse_pragma(&src))
1361 })
1362 };
1363 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1364
1365 let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1367 let mut compat = Vec::with_capacity(source_files.len());
1368 let mut incompat = Vec::new();
1369 for file in source_files {
1370 let is_compatible = std::fs::read_to_string(file)
1371 .ok()
1372 .and_then(|src| parse_pragma(&src))
1373 .map(|pragma| version_satisfies(ver, &pragma))
1374 .unwrap_or(true);
1375 if is_compatible {
1376 compat.push(file.clone());
1377 } else {
1378 incompat.push(file.clone());
1379 }
1380 }
1381 (compat, incompat)
1382 } else {
1383 (source_files.to_vec(), Vec::new())
1384 };
1385
1386 if !incompatible_files.is_empty() {
1387 if let Some(c) = client {
1388 c.log_message(
1389 tower_lsp::lsp_types::MessageType::INFO,
1390 format!(
1391 "project index: {} compatible, {} incompatible with solc {}",
1392 compatible_files.len(),
1393 incompatible_files.len(),
1394 project_version
1395 .as_ref()
1396 .map(|v| v.to_string())
1397 .unwrap_or_default(),
1398 ),
1399 )
1400 .await;
1401 }
1402 }
1403
1404 let mut result = if compatible_files.is_empty() {
1405 json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1406 } else {
1407 let input =
1408 build_batch_standard_json_input_ast_only(&compatible_files, &remappings, &config.root);
1409 let raw = run_solc(&solc_binary, &input, &config.root).await?;
1410 normalize_solc_output(raw, Some(&config.root))
1411 };
1412
1413 if incompatible_files.is_empty() {
1414 return Ok(result);
1415 }
1416
1417 for file in &incompatible_files {
1419 let pragma = std::fs::read_to_string(file)
1420 .ok()
1421 .and_then(|src| parse_pragma(&src));
1422 let file_binary = resolve_solc_binary(config, pragma.as_ref(), client).await;
1423 let input =
1424 build_batch_standard_json_input_ast_only(&[file.clone()], &remappings, &config.root);
1425 if let Ok(raw) = run_solc(&file_binary, &input, &config.root).await {
1426 let normalized = normalize_solc_output(raw, Some(&config.root));
1427 merge_normalized_outputs(&mut result, normalized);
1428 }
1429 }
1430
1431 if let Some(c) = client {
1432 let total = result
1433 .get("sources")
1434 .and_then(|s| s.as_object())
1435 .map_or(0, |obj| obj.len());
1436 c.log_message(
1437 tower_lsp::lsp_types::MessageType::INFO,
1438 format!(
1439 "project index: compiled {} files ({} needed different solc version)",
1440 total,
1441 incompatible_files.len(),
1442 ),
1443 )
1444 .await;
1445 }
1446
1447 Ok(result)
1448}
1449
1450pub async fn solc_project_index_scoped(
1455 config: &FoundryConfig,
1456 client: Option<&tower_lsp::Client>,
1457 text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1458 source_files: &[PathBuf],
1459) -> Result<Value, RunnerError> {
1460 if source_files.is_empty() {
1461 return Err(RunnerError::CommandError(std::io::Error::other(
1462 "no source files provided for scoped project index",
1463 )));
1464 }
1465
1466 solc_project_index_from_files(config, client, text_cache, source_files).await
1467}
1468
1469#[cfg(test)]
1473fn extract_version_error_files(solc_output: &Value) -> HashSet<String> {
1474 let mut files = HashSet::new();
1475 if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1476 for err in errors {
1477 let is_5333 = err.get("errorCode").and_then(|c| c.as_str()) == Some("5333");
1478 if is_5333
1479 && let Some(file) = err
1480 .get("sourceLocation")
1481 .and_then(|sl| sl.get("file"))
1482 .and_then(|f| f.as_str())
1483 {
1484 files.insert(file.to_string());
1485 }
1486 }
1487 }
1488 files
1489}
1490
1491#[cfg(test)]
1494#[allow(dead_code)]
1495fn extract_import_error_files(solc_output: &Value) -> HashSet<String> {
1496 let mut files = HashSet::new();
1497 if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1498 for err in errors {
1499 let is_6275 = err.get("errorCode").and_then(|c| c.as_str()) == Some("6275");
1500 if is_6275
1501 && let Some(file) = err
1502 .get("sourceLocation")
1503 .and_then(|sl| sl.get("file"))
1504 .and_then(|f| f.as_str())
1505 {
1506 files.insert(file.to_string());
1507 }
1508 }
1509 }
1510 files
1511}
1512
1513#[cfg(test)]
1519fn reverse_import_closure(
1520 source_files: &[PathBuf],
1521 exclude_abs: &HashSet<PathBuf>,
1522 project_root: &Path,
1523 remappings: &[String],
1524) -> HashSet<PathBuf> {
1525 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1528
1529 for file in source_files {
1530 let Ok(bytes) = std::fs::read(file) else {
1531 continue;
1532 };
1533 for imp in links::ts_find_imports(&bytes) {
1534 if let Some(imported_abs) =
1535 resolve_import_to_abs(project_root, file, &imp.path, remappings)
1536 {
1537 reverse_edges
1538 .entry(imported_abs)
1539 .or_default()
1540 .insert(file.clone());
1541 }
1542 }
1543 }
1544
1545 let mut closure: HashSet<PathBuf> = exclude_abs.clone();
1547 let mut queue: std::collections::VecDeque<PathBuf> = exclude_abs.iter().cloned().collect();
1548
1549 while let Some(current) = queue.pop_front() {
1550 if let Some(importers) = reverse_edges.get(¤t) {
1551 for importer in importers {
1552 if closure.insert(importer.clone()) {
1553 queue.push_back(importer.clone());
1554 }
1555 }
1556 }
1557 }
1558
1559 closure
1560}
1561
1562fn merge_normalized_outputs(base: &mut Value, other: Value) {
1568 if let (Some(base_sources), Some(other_sources)) = (
1570 base.get_mut("sources").and_then(|s| s.as_object_mut()),
1571 other.get("sources").and_then(|s| s.as_object()),
1572 ) {
1573 let max_base_id = base_sources
1575 .values()
1576 .filter_map(|v| v.get("id").and_then(|id| id.as_u64()))
1577 .max()
1578 .map(|m| m + 1)
1579 .unwrap_or(0);
1580
1581 let mut remapped_id_to_path: Vec<(String, String)> = Vec::new();
1583
1584 for (path, mut source_data) in other_sources.clone() {
1585 if let Some(id) = source_data.get("id").and_then(|id| id.as_u64()) {
1587 let new_id = id + max_base_id;
1588 source_data
1589 .as_object_mut()
1590 .unwrap()
1591 .insert("id".to_string(), json!(new_id));
1592 remapped_id_to_path.push((new_id.to_string(), path.clone()));
1593 }
1594 base_sources.insert(path, source_data);
1595 }
1596
1597 if let Some(base_id_map) = base
1599 .get_mut("source_id_to_path")
1600 .and_then(|m| m.as_object_mut())
1601 {
1602 for (id, path) in remapped_id_to_path {
1603 base_id_map.insert(id, json!(path));
1604 }
1605 }
1606 }
1607
1608 if let (Some(base_contracts), Some(other_contracts)) = (
1610 base.get_mut("contracts").and_then(|c| c.as_object_mut()),
1611 other.get("contracts").and_then(|c| c.as_object()),
1612 ) {
1613 for (path, contract_data) in other_contracts {
1614 base_contracts.insert(path.clone(), contract_data.clone());
1615 }
1616 }
1617
1618 }
1621
1622async fn solc_project_index_from_files(
1623 config: &FoundryConfig,
1624 client: Option<&tower_lsp::Client>,
1625 text_cache: Option<&HashMap<crate::types::DocumentUri, (i32, String)>>,
1626 source_files: &[PathBuf],
1627) -> Result<Value, RunnerError> {
1628 if source_files.is_empty() {
1629 return Err(RunnerError::CommandError(std::io::Error::other(
1630 "no source files found for project index",
1631 )));
1632 }
1633
1634 let remappings = resolve_remappings(config).await;
1635
1636 let project_version: Option<SemVer> =
1638 config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1639
1640 let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1643 Some(PragmaConstraint::Exact(v.clone()))
1644 } else {
1645 source_files.iter().find_map(|f| {
1646 std::fs::read_to_string(f)
1647 .ok()
1648 .and_then(|src| parse_pragma(&src))
1649 })
1650 };
1651 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1652
1653 let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1659 let mut compat = Vec::with_capacity(source_files.len());
1660 let mut incompat = Vec::new();
1661 for file in source_files {
1662 let is_compatible = std::fs::read_to_string(file)
1663 .ok()
1664 .and_then(|src| parse_pragma(&src))
1665 .map(|pragma| version_satisfies(ver, &pragma))
1666 .unwrap_or(true);
1668 if is_compatible {
1669 compat.push(file.clone());
1670 } else {
1671 incompat.push(file.clone());
1672 }
1673 }
1674 (compat, incompat)
1675 } else {
1676 (source_files.to_vec(), Vec::new())
1678 };
1679
1680 if !incompatible_files.is_empty() {
1681 if let Some(c) = client {
1682 c.log_message(
1683 tower_lsp::lsp_types::MessageType::INFO,
1684 format!(
1685 "project index: {} compatible, {} incompatible with solc {}",
1686 compatible_files.len(),
1687 incompatible_files.len(),
1688 project_version
1689 .as_ref()
1690 .map(|v| v.to_string())
1691 .unwrap_or_default(),
1692 ),
1693 )
1694 .await;
1695 }
1696 }
1697
1698 let mut result = if compatible_files.is_empty() {
1706 json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1707 } else {
1708 let input = build_batch_standard_json_input_with_cache(
1709 &compatible_files,
1710 &remappings,
1711 config,
1712 text_cache,
1713 );
1714 let raw = run_solc(&solc_binary, &input, &config.root).await?;
1715 normalize_solc_output(raw, Some(&config.root))
1716 };
1717
1718 let batch_source_count = result
1719 .get("sources")
1720 .and_then(|s| s.as_object())
1721 .map_or(0, |obj| obj.len());
1722
1723 if incompatible_files.is_empty() {
1724 return Ok(result);
1725 }
1726
1727 if let Some(c) = client {
1728 let batch_errors: Vec<String> = result
1730 .get("errors")
1731 .and_then(|e| e.as_array())
1732 .map(|arr| {
1733 arr.iter()
1734 .filter(|e| e.get("severity").and_then(|s| s.as_str()) == Some("error"))
1735 .take(3)
1736 .filter_map(|e| {
1737 let msg = e.get("message").and_then(|m| m.as_str()).unwrap_or("?");
1738 let file = e
1739 .get("sourceLocation")
1740 .and_then(|sl| sl.get("file"))
1741 .and_then(|f| f.as_str())
1742 .unwrap_or("?");
1743 Some(format!("{file}: {msg}"))
1744 })
1745 .collect()
1746 })
1747 .unwrap_or_default();
1748
1749 c.log_message(
1750 tower_lsp::lsp_types::MessageType::INFO,
1751 format!(
1752 "project index: batch produced {} sources, now compiling {} incompatible files individually{}",
1753 batch_source_count,
1754 incompatible_files.len(),
1755 if batch_errors.is_empty() {
1756 String::new()
1757 } else {
1758 format!(" [first errors: {}]", batch_errors.join("; "))
1759 },
1760 ),
1761 )
1762 .await;
1763 }
1764
1765 let mut compiled = 0usize;
1767 let mut skipped = 0usize;
1768 for file in &incompatible_files {
1769 let pragma = std::fs::read_to_string(file)
1770 .ok()
1771 .and_then(|src| parse_pragma(&src));
1772
1773 let Some(file_constraint) = pragma else {
1774 skipped += 1;
1775 continue;
1776 };
1777
1778 let file_binary = resolve_solc_binary(config, Some(&file_constraint), client).await;
1779 let input = build_batch_standard_json_input_with_cache(
1780 &[file.clone()],
1781 &remappings,
1782 config,
1783 text_cache,
1784 );
1785 match run_solc(&file_binary, &input, &config.root).await {
1786 Ok(raw) => {
1787 let normalized = normalize_solc_output(raw, Some(&config.root));
1788 merge_normalized_outputs(&mut result, normalized);
1789 compiled += 1;
1790 }
1791 Err(e) => {
1792 if let Some(c) = client {
1793 c.log_message(
1794 tower_lsp::lsp_types::MessageType::WARNING,
1795 format!(
1796 "project index: incompatible file {} failed: {e}",
1797 file.display(),
1798 ),
1799 )
1800 .await;
1801 }
1802 skipped += 1;
1803 }
1804 }
1805 }
1806
1807 if let Some(c) = client {
1808 c.log_message(
1809 tower_lsp::lsp_types::MessageType::INFO,
1810 format!(
1811 "project index: incompatible files done — {compiled} compiled, {skipped} skipped",
1812 ),
1813 )
1814 .await;
1815 }
1816
1817 Ok(result)
1818}
1819
1820#[cfg(test)]
1821mod tests {
1822 use super::*;
1823
1824 #[test]
1825 fn test_normalize_solc_sources() {
1826 let solc_output = json!({
1827 "sources": {
1828 "src/Foo.sol": {
1829 "id": 0,
1830 "ast": {
1831 "nodeType": "SourceUnit",
1832 "absolutePath": "src/Foo.sol",
1833 "id": 100
1834 }
1835 },
1836 "src/Bar.sol": {
1837 "id": 1,
1838 "ast": {
1839 "nodeType": "SourceUnit",
1840 "absolutePath": "src/Bar.sol",
1841 "id": 200
1842 }
1843 }
1844 },
1845 "contracts": {},
1846 "errors": []
1847 });
1848
1849 let normalized = normalize_solc_output(solc_output, None);
1850
1851 let sources = normalized.get("sources").unwrap().as_object().unwrap();
1853 assert_eq!(sources.len(), 2);
1854
1855 let foo = sources.get("src/Foo.sol").unwrap();
1856 assert_eq!(foo.get("id").unwrap(), 0);
1857 assert_eq!(
1858 foo.get("ast")
1859 .unwrap()
1860 .get("nodeType")
1861 .unwrap()
1862 .as_str()
1863 .unwrap(),
1864 "SourceUnit"
1865 );
1866
1867 let id_to_path = normalized
1869 .get("source_id_to_path")
1870 .unwrap()
1871 .as_object()
1872 .unwrap();
1873 assert_eq!(id_to_path.len(), 2);
1874 }
1875
1876 #[test]
1877 fn test_normalize_solc_contracts() {
1878 let solc_output = json!({
1879 "sources": {},
1880 "contracts": {
1881 "src/Foo.sol": {
1882 "Foo": {
1883 "abi": [{"type": "function", "name": "bar"}],
1884 "evm": {
1885 "methodIdentifiers": {
1886 "bar(uint256)": "abcd1234"
1887 }
1888 }
1889 }
1890 }
1891 },
1892 "errors": []
1893 });
1894
1895 let normalized = normalize_solc_output(solc_output, None);
1896
1897 let contracts = normalized.get("contracts").unwrap().as_object().unwrap();
1899 let foo_contracts = contracts.get("src/Foo.sol").unwrap().as_object().unwrap();
1900 let foo = foo_contracts.get("Foo").unwrap();
1901
1902 let method_ids = foo
1903 .get("evm")
1904 .unwrap()
1905 .get("methodIdentifiers")
1906 .unwrap()
1907 .as_object()
1908 .unwrap();
1909 assert_eq!(
1910 method_ids.get("bar(uint256)").unwrap().as_str().unwrap(),
1911 "abcd1234"
1912 );
1913 }
1914
1915 #[test]
1916 fn test_normalize_solc_errors_passthrough() {
1917 let solc_output = json!({
1918 "sources": {},
1919 "contracts": {},
1920 "errors": [{
1921 "sourceLocation": {"file": "src/Foo.sol", "start": 0, "end": 10},
1922 "type": "Warning",
1923 "component": "general",
1924 "severity": "warning",
1925 "errorCode": "2394",
1926 "message": "test warning",
1927 "formattedMessage": "Warning: test warning"
1928 }]
1929 });
1930
1931 let normalized = normalize_solc_output(solc_output, None);
1932
1933 let errors = normalized.get("errors").unwrap().as_array().unwrap();
1934 assert_eq!(errors.len(), 1);
1935 assert_eq!(
1936 errors[0].get("errorCode").unwrap().as_str().unwrap(),
1937 "2394"
1938 );
1939 }
1940
1941 #[test]
1942 fn test_normalize_empty_solc_output() {
1943 let solc_output = json!({
1944 "sources": {},
1945 "contracts": {}
1946 });
1947
1948 let normalized = normalize_solc_output(solc_output, None);
1949
1950 assert!(
1951 normalized
1952 .get("sources")
1953 .unwrap()
1954 .as_object()
1955 .unwrap()
1956 .is_empty()
1957 );
1958 assert!(
1959 normalized
1960 .get("contracts")
1961 .unwrap()
1962 .as_object()
1963 .unwrap()
1964 .is_empty()
1965 );
1966 assert_eq!(
1967 normalized.get("errors").unwrap().as_array().unwrap().len(),
1968 0
1969 );
1970 assert!(
1971 normalized
1972 .get("source_id_to_path")
1973 .unwrap()
1974 .as_object()
1975 .unwrap()
1976 .is_empty()
1977 );
1978 }
1979
1980 #[test]
1981 fn test_build_standard_json_input() {
1982 let config = FoundryConfig::default();
1983 let input = build_standard_json_input(
1984 "/path/to/Foo.sol",
1985 &[
1986 "ds-test/=lib/forge-std/lib/ds-test/src/".to_string(),
1987 "forge-std/=lib/forge-std/src/".to_string(),
1988 ],
1989 &config,
1990 );
1991
1992 let sources = input.get("sources").unwrap().as_object().unwrap();
1993 assert!(sources.contains_key("/path/to/Foo.sol"));
1994
1995 let settings = input.get("settings").unwrap();
1996 let remappings = settings.get("remappings").unwrap().as_array().unwrap();
1997 assert_eq!(remappings.len(), 2);
1998
1999 let output_sel = settings.get("outputSelection").unwrap();
2000 assert!(output_sel.get("*").is_some());
2001
2002 assert!(settings.get("optimizer").is_none());
2004 assert!(settings.get("viaIR").is_none());
2005 assert!(settings.get("evmVersion").is_none());
2006
2007 let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
2009 let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
2010 assert!(!output_names.contains(&"evm.gasEstimates"));
2011 assert!(!output_names.contains(&"abi")); assert!(output_names.contains(&"devdoc"));
2013 assert!(output_names.contains(&"userdoc"));
2014 assert!(output_names.contains(&"evm.methodIdentifiers"));
2015 }
2016
2017 #[test]
2018 fn test_build_standard_json_input_with_config() {
2019 let config = FoundryConfig {
2020 optimizer: true,
2021 optimizer_runs: 9999999,
2022 via_ir: true,
2023 evm_version: Some("osaka".to_string()),
2024 ..Default::default()
2025 };
2026 let input = build_standard_json_input("/path/to/Foo.sol", &[], &config);
2027
2028 let settings = input.get("settings").unwrap();
2029
2030 assert!(settings.get("optimizer").is_none());
2032
2033 assert!(settings.get("viaIR").unwrap().as_bool().unwrap());
2035
2036 let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
2038 let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
2039 assert!(!output_names.contains(&"evm.gasEstimates"));
2040
2041 assert_eq!(
2043 settings.get("evmVersion").unwrap().as_str().unwrap(),
2044 "osaka"
2045 );
2046 }
2047
2048 #[tokio::test]
2049 async fn test_resolve_solc_binary_default() {
2050 let config = FoundryConfig::default();
2051 let binary = resolve_solc_binary(&config, None, None).await;
2052 assert_eq!(binary, PathBuf::from("solc"));
2053 }
2054
2055 #[test]
2056 fn test_parse_pragma_exact() {
2057 let source = "// SPDX\npragma solidity 0.8.26;\n";
2058 assert_eq!(
2059 parse_pragma(source),
2060 Some(PragmaConstraint::Exact(SemVer {
2061 major: 0,
2062 minor: 8,
2063 patch: 26
2064 }))
2065 );
2066 }
2067
2068 #[test]
2069 fn test_parse_pragma_caret() {
2070 let source = "pragma solidity ^0.8.0;\n";
2071 assert_eq!(
2072 parse_pragma(source),
2073 Some(PragmaConstraint::Caret(SemVer {
2074 major: 0,
2075 minor: 8,
2076 patch: 0
2077 }))
2078 );
2079 }
2080
2081 #[test]
2082 fn test_parse_pragma_gte() {
2083 let source = "pragma solidity >=0.8.0;\n";
2084 assert_eq!(
2085 parse_pragma(source),
2086 Some(PragmaConstraint::Gte(SemVer {
2087 major: 0,
2088 minor: 8,
2089 patch: 0
2090 }))
2091 );
2092 }
2093
2094 #[test]
2095 fn test_parse_pragma_range() {
2096 let source = "pragma solidity >=0.6.2 <0.9.0;\n";
2097 assert_eq!(
2098 parse_pragma(source),
2099 Some(PragmaConstraint::Range(
2100 SemVer {
2101 major: 0,
2102 minor: 6,
2103 patch: 2
2104 },
2105 SemVer {
2106 major: 0,
2107 minor: 9,
2108 patch: 0
2109 },
2110 ))
2111 );
2112 }
2113
2114 #[test]
2115 fn test_parse_pragma_none() {
2116 let source = "contract Foo {}\n";
2117 assert_eq!(parse_pragma(source), None);
2118 }
2119
2120 #[test]
2121 fn test_version_satisfies_exact() {
2122 let v = SemVer {
2123 major: 0,
2124 minor: 8,
2125 patch: 26,
2126 };
2127 assert!(version_satisfies(&v, &PragmaConstraint::Exact(v.clone())));
2128 assert!(!version_satisfies(
2129 &SemVer {
2130 major: 0,
2131 minor: 8,
2132 patch: 25
2133 },
2134 &PragmaConstraint::Exact(v)
2135 ));
2136 }
2137
2138 #[test]
2139 fn test_version_satisfies_caret() {
2140 let constraint = PragmaConstraint::Caret(SemVer {
2141 major: 0,
2142 minor: 8,
2143 patch: 0,
2144 });
2145 assert!(version_satisfies(
2146 &SemVer {
2147 major: 0,
2148 minor: 8,
2149 patch: 0
2150 },
2151 &constraint
2152 ));
2153 assert!(version_satisfies(
2154 &SemVer {
2155 major: 0,
2156 minor: 8,
2157 patch: 26
2158 },
2159 &constraint
2160 ));
2161 assert!(!version_satisfies(
2163 &SemVer {
2164 major: 0,
2165 minor: 9,
2166 patch: 0
2167 },
2168 &constraint
2169 ));
2170 assert!(!version_satisfies(
2172 &SemVer {
2173 major: 0,
2174 minor: 7,
2175 patch: 0
2176 },
2177 &constraint
2178 ));
2179 }
2180
2181 #[test]
2182 fn test_version_satisfies_gte() {
2183 let constraint = PragmaConstraint::Gte(SemVer {
2184 major: 0,
2185 minor: 8,
2186 patch: 0,
2187 });
2188 assert!(version_satisfies(
2189 &SemVer {
2190 major: 0,
2191 minor: 8,
2192 patch: 0
2193 },
2194 &constraint
2195 ));
2196 assert!(version_satisfies(
2197 &SemVer {
2198 major: 0,
2199 minor: 9,
2200 patch: 0
2201 },
2202 &constraint
2203 ));
2204 assert!(!version_satisfies(
2205 &SemVer {
2206 major: 0,
2207 minor: 7,
2208 patch: 0
2209 },
2210 &constraint
2211 ));
2212 }
2213
2214 #[test]
2215 fn test_version_satisfies_range() {
2216 let constraint = PragmaConstraint::Range(
2217 SemVer {
2218 major: 0,
2219 minor: 6,
2220 patch: 2,
2221 },
2222 SemVer {
2223 major: 0,
2224 minor: 9,
2225 patch: 0,
2226 },
2227 );
2228 assert!(version_satisfies(
2229 &SemVer {
2230 major: 0,
2231 minor: 6,
2232 patch: 2
2233 },
2234 &constraint
2235 ));
2236 assert!(version_satisfies(
2237 &SemVer {
2238 major: 0,
2239 minor: 8,
2240 patch: 26
2241 },
2242 &constraint
2243 ));
2244 assert!(!version_satisfies(
2246 &SemVer {
2247 major: 0,
2248 minor: 9,
2249 patch: 0
2250 },
2251 &constraint
2252 ));
2253 assert!(!version_satisfies(
2254 &SemVer {
2255 major: 0,
2256 minor: 6,
2257 patch: 1
2258 },
2259 &constraint
2260 ));
2261 }
2262
2263 #[test]
2264 fn test_find_matching_version() {
2265 let installed = vec![
2266 SemVer {
2267 major: 0,
2268 minor: 8,
2269 patch: 0,
2270 },
2271 SemVer {
2272 major: 0,
2273 minor: 8,
2274 patch: 20,
2275 },
2276 SemVer {
2277 major: 0,
2278 minor: 8,
2279 patch: 26,
2280 },
2281 SemVer {
2282 major: 0,
2283 minor: 8,
2284 patch: 33,
2285 },
2286 ];
2287 let constraint = PragmaConstraint::Caret(SemVer {
2289 major: 0,
2290 minor: 8,
2291 patch: 20,
2292 });
2293 let matched = find_matching_version(&constraint, &installed);
2294 assert_eq!(
2295 matched,
2296 Some(SemVer {
2297 major: 0,
2298 minor: 8,
2299 patch: 33
2300 })
2301 );
2302
2303 let constraint = PragmaConstraint::Exact(SemVer {
2305 major: 0,
2306 minor: 8,
2307 patch: 20,
2308 });
2309 let matched = find_matching_version(&constraint, &installed);
2310 assert_eq!(
2311 matched,
2312 Some(SemVer {
2313 major: 0,
2314 minor: 8,
2315 patch: 20
2316 })
2317 );
2318
2319 let constraint = PragmaConstraint::Exact(SemVer {
2321 major: 0,
2322 minor: 8,
2323 patch: 15,
2324 });
2325 let matched = find_matching_version(&constraint, &installed);
2326 assert_eq!(matched, None);
2327 }
2328
2329 #[test]
2330 fn test_list_installed_versions() {
2331 let versions = list_installed_versions();
2333 for w in versions.windows(2) {
2335 assert!(w[0] <= w[1]);
2336 }
2337 }
2338
2339 #[test]
2344 fn test_extract_version_error_files_basic() {
2345 let output = json!({
2346 "errors": [
2347 {
2348 "errorCode": "5333",
2349 "severity": "error",
2350 "message": "Source file requires different compiler version",
2351 "sourceLocation": {
2352 "file": "lib/openzeppelin/contracts/token/ERC20/ERC20.sol",
2353 "start": 32,
2354 "end": 58
2355 }
2356 },
2357 {
2358 "errorCode": "5333",
2359 "severity": "error",
2360 "message": "Source file requires different compiler version",
2361 "sourceLocation": {
2362 "file": "lib/old-lib/src/Legacy.sol",
2363 "start": 32,
2364 "end": 58
2365 }
2366 },
2367 {
2368 "errorCode": "9574",
2369 "severity": "error",
2370 "message": "Some other error",
2371 "sourceLocation": {
2372 "file": "src/Main.sol",
2373 "start": 100,
2374 "end": 200
2375 }
2376 }
2377 ]
2378 });
2379
2380 let files = extract_version_error_files(&output);
2381 assert_eq!(files.len(), 2);
2382 assert!(files.contains("lib/openzeppelin/contracts/token/ERC20/ERC20.sol"));
2383 assert!(files.contains("lib/old-lib/src/Legacy.sol"));
2384 assert!(!files.contains("src/Main.sol"));
2386 }
2387
2388 #[test]
2389 fn test_extract_version_error_files_empty() {
2390 let output = json!({
2391 "errors": []
2392 });
2393 assert!(extract_version_error_files(&output).is_empty());
2394
2395 let output = json!({});
2397 assert!(extract_version_error_files(&output).is_empty());
2398 }
2399
2400 #[test]
2401 fn test_extract_version_error_files_no_source_location() {
2402 let output = json!({
2403 "errors": [
2404 {
2405 "errorCode": "5333",
2406 "severity": "error",
2407 "message": "Source file requires different compiler version"
2408 }
2410 ]
2411 });
2412 assert!(extract_version_error_files(&output).is_empty());
2413 }
2414
2415 #[test]
2416 fn test_extract_version_error_files_dedup() {
2417 let output = json!({
2418 "errors": [
2419 {
2420 "errorCode": "5333",
2421 "severity": "error",
2422 "sourceLocation": { "file": "lib/same.sol", "start": 0, "end": 10 }
2423 },
2424 {
2425 "errorCode": "5333",
2426 "severity": "error",
2427 "sourceLocation": { "file": "lib/same.sol", "start": 50, "end": 70 }
2428 }
2429 ]
2430 });
2431 let files = extract_version_error_files(&output);
2432 assert_eq!(files.len(), 1);
2433 assert!(files.contains("lib/same.sol"));
2434 }
2435
2436 #[test]
2437 fn test_reverse_import_closure_simple() {
2438 let dir = tempfile::tempdir().unwrap();
2446 let root = dir.path();
2447
2448 std::fs::write(
2449 root.join("a.sol"),
2450 "// SPDX-License-Identifier: MIT\nimport \"./b.sol\";\ncontract A {}",
2451 )
2452 .unwrap();
2453 std::fs::write(
2454 root.join("b.sol"),
2455 "// SPDX-License-Identifier: MIT\nimport \"./c.sol\";\ncontract B {}",
2456 )
2457 .unwrap();
2458 std::fs::write(
2459 root.join("c.sol"),
2460 "// SPDX-License-Identifier: MIT\ncontract C {}",
2461 )
2462 .unwrap();
2463 std::fs::write(
2464 root.join("d.sol"),
2465 "// SPDX-License-Identifier: MIT\ncontract D {}",
2466 )
2467 .unwrap();
2468
2469 let files: Vec<PathBuf> = vec![
2470 root.join("a.sol"),
2471 root.join("b.sol"),
2472 root.join("c.sol"),
2473 root.join("d.sol"),
2474 ];
2475
2476 let exclude: HashSet<PathBuf> = [root.join("c.sol")].into_iter().collect();
2477 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2478
2479 assert!(
2480 closure.contains(&root.join("c.sol")),
2481 "seed file in closure"
2482 );
2483 assert!(closure.contains(&root.join("b.sol")), "direct importer");
2484 assert!(closure.contains(&root.join("a.sol")), "transitive importer");
2485 assert!(
2486 !closure.contains(&root.join("d.sol")),
2487 "unrelated file not in closure"
2488 );
2489 assert_eq!(closure.len(), 3);
2490 }
2491
2492 #[test]
2493 fn test_reverse_import_closure_no_importers() {
2494 let dir = tempfile::tempdir().unwrap();
2496 let root = dir.path();
2497
2498 std::fs::write(root.join("a.sol"), "contract A {}").unwrap();
2499 std::fs::write(root.join("b.sol"), "contract B {}").unwrap();
2500
2501 let files: Vec<PathBuf> = vec![root.join("a.sol"), root.join("b.sol")];
2502 let exclude: HashSet<PathBuf> = [root.join("a.sol")].into_iter().collect();
2503
2504 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2505 assert_eq!(closure.len(), 1);
2506 assert!(closure.contains(&root.join("a.sol")));
2507 }
2508
2509 #[test]
2510 fn test_reverse_import_closure_diamond() {
2511 let dir = tempfile::tempdir().unwrap();
2518 let root = dir.path();
2519
2520 std::fs::write(
2521 root.join("a.sol"),
2522 "import \"./b.sol\";\nimport \"./c.sol\";\ncontract A {}",
2523 )
2524 .unwrap();
2525 std::fs::write(root.join("b.sol"), "import \"./d.sol\";\ncontract B {}").unwrap();
2526 std::fs::write(root.join("c.sol"), "import \"./d.sol\";\ncontract C {}").unwrap();
2527 std::fs::write(root.join("d.sol"), "contract D {}").unwrap();
2528
2529 let files: Vec<PathBuf> = vec![
2530 root.join("a.sol"),
2531 root.join("b.sol"),
2532 root.join("c.sol"),
2533 root.join("d.sol"),
2534 ];
2535 let exclude: HashSet<PathBuf> = [root.join("d.sol")].into_iter().collect();
2536
2537 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2538 assert_eq!(closure.len(), 4);
2539 }
2540
2541 #[test]
2542 fn test_merge_normalized_outputs_basic() {
2543 let mut base = json!({
2544 "sources": {
2545 "/abs/src/A.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } },
2546 "/abs/src/B.sol": { "id": 1, "ast": { "nodeType": "SourceUnit" } }
2547 },
2548 "contracts": {
2549 "/abs/src/A.sol": { "A": { "abi": [] } }
2550 },
2551 "errors": [],
2552 "source_id_to_path": {
2553 "0": "/abs/src/A.sol",
2554 "1": "/abs/src/B.sol"
2555 }
2556 });
2557
2558 let other = json!({
2559 "sources": {
2560 "/abs/lib/C.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } }
2561 },
2562 "contracts": {
2563 "/abs/lib/C.sol": { "C": { "abi": [] } }
2564 },
2565 "errors": [],
2566 "source_id_to_path": {
2567 "0": "/abs/lib/C.sol"
2568 }
2569 });
2570
2571 merge_normalized_outputs(&mut base, other);
2572
2573 let sources = base["sources"].as_object().unwrap();
2575 assert_eq!(sources.len(), 3);
2576 assert!(sources.contains_key("/abs/lib/C.sol"));
2577
2578 let c_id = sources["/abs/lib/C.sol"]["id"].as_u64().unwrap();
2580 assert_eq!(
2581 c_id, 2,
2582 "remapped id should be max_base_id (2) + original (0)"
2583 );
2584
2585 let id_map = base["source_id_to_path"].as_object().unwrap();
2587 assert_eq!(id_map.len(), 3);
2588 assert_eq!(id_map["2"].as_str().unwrap(), "/abs/lib/C.sol");
2589
2590 let contracts = base["contracts"].as_object().unwrap();
2592 assert_eq!(contracts.len(), 2);
2593 assert!(contracts.contains_key("/abs/lib/C.sol"));
2594 }
2595
2596 #[test]
2597 fn test_merge_normalized_outputs_empty_other() {
2598 let mut base = json!({
2599 "sources": {
2600 "/abs/src/A.sol": { "id": 0, "ast": {} }
2601 },
2602 "contracts": {},
2603 "errors": [],
2604 "source_id_to_path": { "0": "/abs/src/A.sol" }
2605 });
2606
2607 let other = json!({
2608 "sources": {},
2609 "contracts": {},
2610 "errors": [],
2611 "source_id_to_path": {}
2612 });
2613
2614 merge_normalized_outputs(&mut base, other);
2615
2616 let sources = base["sources"].as_object().unwrap();
2617 assert_eq!(sources.len(), 1);
2618 }
2619
2620 #[test]
2621 fn test_merge_normalized_outputs_empty_base() {
2622 let mut base = json!({
2623 "sources": {},
2624 "contracts": {},
2625 "errors": [],
2626 "source_id_to_path": {}
2627 });
2628
2629 let other = json!({
2630 "sources": {
2631 "/abs/lib/X.sol": { "id": 0, "ast": {} }
2632 },
2633 "contracts": {
2634 "/abs/lib/X.sol": { "X": { "abi": [] } }
2635 },
2636 "errors": [],
2637 "source_id_to_path": { "0": "/abs/lib/X.sol" }
2638 });
2639
2640 merge_normalized_outputs(&mut base, other);
2641
2642 let sources = base["sources"].as_object().unwrap();
2643 assert_eq!(sources.len(), 1);
2644 let x_id = sources["/abs/lib/X.sol"]["id"].as_u64().unwrap();
2646 assert_eq!(x_id, 0);
2647 }
2648}