1use crate::config::FoundryConfig;
8use crate::links;
9use crate::runner::RunnerError;
10use serde_json::{Map, Value, json};
11use std::collections::{HashMap, HashSet};
12use std::path::{Path, PathBuf};
13use std::sync::{Mutex, OnceLock};
14use tokio::process::Command;
15use tower_lsp::lsp_types::Url;
16
17static INSTALLED_VERSIONS: OnceLock<Mutex<Vec<SemVer>>> = OnceLock::new();
20
21fn get_installed_versions() -> Vec<SemVer> {
22 let mutex = INSTALLED_VERSIONS.get_or_init(|| Mutex::new(scan_installed_versions()));
23 mutex.lock().unwrap().clone()
24}
25
26fn invalidate_installed_versions() {
27 if let Some(mutex) = INSTALLED_VERSIONS.get() {
28 *mutex.lock().unwrap() = scan_installed_versions();
29 }
30}
31
32fn semver_to_local(v: &semver::Version) -> SemVer {
34 SemVer {
35 major: v.major as u32,
36 minor: v.minor as u32,
37 patch: v.patch as u32,
38 }
39}
40
41pub async fn resolve_solc_binary(
54 config: &FoundryConfig,
55 constraint: Option<&PragmaConstraint>,
56 client: Option<&tower_lsp::Client>,
57) -> PathBuf {
58 if let Some(constraint) = constraint {
60 if !matches!(constraint, PragmaConstraint::Exact(_))
66 && let Some(ref config_ver) = config.solc_version
67 && let Some(parsed) = SemVer::parse(config_ver)
68 && version_satisfies(&parsed, constraint)
69 && let Some(path) = find_solc_binary(config_ver)
70 {
71 if let Some(c) = client {
72 c.log_message(
73 tower_lsp::lsp_types::MessageType::INFO,
74 format!(
75 "solc: foundry.toml {config_ver} satisfies pragma {constraint:?} → {}",
76 path.display()
77 ),
78 )
79 .await;
80 }
81 return path;
82 }
83
84 let installed = get_installed_versions();
85 if let Some(version) = find_matching_version(constraint, &installed)
86 && let Some(path) = find_solc_binary(&version.to_string())
87 {
88 if let Some(c) = client {
89 c.log_message(
90 tower_lsp::lsp_types::MessageType::INFO,
91 format!(
92 "solc: pragma {constraint:?} → {version} → {}",
93 path.display()
94 ),
95 )
96 .await;
97 }
98 return path;
99 }
100
101 let install_version = version_to_install(constraint);
103 if let Some(ref ver_str) = install_version {
104 if let Some(c) = client {
105 c.show_message(
106 tower_lsp::lsp_types::MessageType::INFO,
107 format!("Installing solc {ver_str}..."),
108 )
109 .await;
110 }
111
112 if svm_install(ver_str).await {
113 invalidate_installed_versions();
115
116 if let Some(c) = client {
117 c.show_message(
118 tower_lsp::lsp_types::MessageType::INFO,
119 format!("Installed solc {ver_str}"),
120 )
121 .await;
122 }
123 if let Some(path) = find_solc_binary(ver_str) {
124 return path;
125 }
126 } else if let Some(c) = client {
127 c.show_message(
128 tower_lsp::lsp_types::MessageType::WARNING,
129 format!(
130 "Failed to install solc {ver_str}. \
131 Install it manually: svm install {ver_str}"
132 ),
133 )
134 .await;
135 }
136 }
137 }
138
139 if let Some(ref version) = config.solc_version
141 && let Some(path) = find_solc_binary(version)
142 {
143 if let Some(c) = client {
144 c.log_message(
145 tower_lsp::lsp_types::MessageType::INFO,
146 format!(
147 "solc: no pragma, using foundry.toml version {version} → {}",
148 path.display()
149 ),
150 )
151 .await;
152 }
153 return path;
154 }
155
156 if let Some(c) = client {
158 c.log_message(
159 tower_lsp::lsp_types::MessageType::INFO,
160 "solc: no pragma match, falling back to system solc",
161 )
162 .await;
163 }
164 PathBuf::from("solc")
165}
166
167fn version_to_install(constraint: &PragmaConstraint) -> Option<String> {
174 match constraint {
175 PragmaConstraint::Exact(v) => Some(v.to_string()),
176 PragmaConstraint::Caret(v) => Some(v.to_string()),
177 PragmaConstraint::Gte(v) => Some(v.to_string()),
178 PragmaConstraint::Range(lower, _) => Some(lower.to_string()),
179 }
180}
181
182async fn svm_install(version: &str) -> bool {
186 let ver = match semver::Version::parse(version) {
187 Ok(v) => v,
188 Err(_) => return false,
189 };
190 svm::install(&ver).await.is_ok()
191}
192
193fn find_solc_binary(version: &str) -> Option<PathBuf> {
195 let path = svm::version_binary(version);
196 if path.is_file() {
197 return Some(path);
198 }
199 None
200}
201
202#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
206pub struct SemVer {
207 pub major: u32,
208 pub minor: u32,
209 pub patch: u32,
210}
211
212impl SemVer {
213 fn parse(s: &str) -> Option<SemVer> {
214 let parts: Vec<&str> = s.split('.').collect();
215 if parts.len() != 3 {
216 return None;
217 }
218 Some(SemVer {
219 major: parts[0].parse().ok()?,
220 minor: parts[1].parse().ok()?,
221 patch: parts[2].parse().ok()?,
222 })
223 }
224}
225
226impl std::fmt::Display for SemVer {
227 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
228 write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
229 }
230}
231
232#[derive(Debug, Clone, PartialEq)]
234pub enum PragmaConstraint {
235 Exact(SemVer),
237 Caret(SemVer),
240 Gte(SemVer),
242 Range(SemVer, SemVer),
244}
245
246fn resolve_import_to_abs(
250 project_root: &Path,
251 importer_abs: &Path,
252 import_path: &str,
253 remappings: &[String],
254) -> Option<PathBuf> {
255 if import_path.starts_with("./") || import_path.starts_with("../") {
256 let base = importer_abs.parent()?;
257 return Some(lexical_normalize(&base.join(import_path)));
258 }
259
260 for remap in remappings {
261 let mut it = remap.splitn(2, '=');
262 let prefix = it.next().unwrap_or_default();
263 let target = it.next().unwrap_or_default();
264 if prefix.is_empty() || target.is_empty() {
265 continue;
266 }
267 if import_path.starts_with(prefix) {
268 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
269 return Some(lexical_normalize(
270 &project_root.join(format!("{target}{suffix}")),
271 ));
272 }
273 }
274
275 Some(lexical_normalize(&project_root.join(import_path)))
276}
277
278fn lexical_normalize(path: &Path) -> PathBuf {
281 let mut out = PathBuf::new();
282 for comp in path.components() {
283 match comp {
284 std::path::Component::CurDir => {}
285 std::path::Component::ParentDir => {
286 out.pop();
287 }
288 _ => out.push(comp.as_os_str()),
289 }
290 }
291 out
292}
293
294fn collect_import_pragmas(
300 file_path: &Path,
301 project_root: &Path,
302 remappings: &[String],
303) -> Vec<PragmaConstraint> {
304 let mut pragmas = Vec::new();
305 let mut visited = HashSet::new();
306 collect_import_pragmas_recursive(
307 file_path,
308 project_root,
309 remappings,
310 &mut pragmas,
311 &mut visited,
312 );
313 pragmas
314}
315
316fn collect_import_pragmas_recursive(
317 file_path: &Path,
318 project_root: &Path,
319 remappings: &[String],
320 pragmas: &mut Vec<PragmaConstraint>,
321 visited: &mut HashSet<PathBuf>,
322) {
323 if !visited.insert(file_path.to_path_buf()) {
324 return;
325 }
326 let source = match std::fs::read_to_string(file_path) {
327 Ok(s) => s,
328 Err(_) => return,
329 };
330 if let Some(pragma) = parse_pragma(&source) {
331 pragmas.push(pragma);
332 }
333 for imp in links::ts_find_imports(source.as_bytes()) {
334 if let Some(abs) = resolve_import_to_abs(project_root, file_path, &imp.path, remappings) {
335 collect_import_pragmas_recursive(&abs, project_root, remappings, pragmas, visited);
336 }
337 }
338}
339
340fn tightest_constraint(pragmas: &[PragmaConstraint]) -> Option<PragmaConstraint> {
350 if pragmas.is_empty() {
351 return None;
352 }
353
354 for p in pragmas {
356 if matches!(p, PragmaConstraint::Exact(_)) {
357 return Some(p.clone());
358 }
359 }
360
361 let mut lower = SemVer {
363 major: 0,
364 minor: 0,
365 patch: 0,
366 };
367 let mut upper: Option<SemVer> = None;
368
369 for p in pragmas {
370 let (lo, hi) = constraint_to_range(p);
371 if lo > lower {
372 lower = lo;
373 }
374 if let Some(hi) = hi {
375 upper = Some(match upper {
376 Some(cur) if hi < cur => hi,
377 Some(cur) => cur,
378 None => hi,
379 });
380 }
381 }
382
383 match upper {
384 Some(hi) if lower >= hi => None, Some(hi) => Some(PragmaConstraint::Range(lower, hi)),
386 None => Some(PragmaConstraint::Gte(lower)),
387 }
388}
389
390fn constraint_to_range(constraint: &PragmaConstraint) -> (SemVer, Option<SemVer>) {
393 match constraint {
394 PragmaConstraint::Exact(v) => (
395 v.clone(),
396 Some(SemVer {
397 major: v.major,
398 minor: v.minor,
399 patch: v.patch + 1,
400 }),
401 ),
402 PragmaConstraint::Caret(v) => (
403 v.clone(),
404 Some(SemVer {
405 major: v.major,
406 minor: v.minor + 1,
407 patch: 0,
408 }),
409 ),
410 PragmaConstraint::Gte(v) => (v.clone(), None),
411 PragmaConstraint::Range(lo, hi) => (lo.clone(), Some(hi.clone())),
412 }
413}
414
415pub fn parse_pragma(source: &str) -> Option<PragmaConstraint> {
423 let pragma_line = source
425 .lines()
426 .take(20)
427 .find(|line| line.trim_start().starts_with("pragma solidity"))?;
428
429 let after_keyword = pragma_line
431 .trim_start()
432 .strip_prefix("pragma solidity")?
433 .trim();
434 let constraint_str = after_keyword
435 .strip_suffix(';')
436 .unwrap_or(after_keyword)
437 .trim();
438
439 if constraint_str.is_empty() {
440 return None;
441 }
442
443 if let Some(rest) = constraint_str.strip_prefix(">=") {
445 let rest = rest.trim();
446 if let Some(space_idx) = rest.find(|c: char| c.is_whitespace() || c == '<') {
447 let lower_str = rest[..space_idx].trim();
448 let upper_part = rest[space_idx..].trim();
449 if let Some(upper_str) = upper_part.strip_prefix('<') {
450 let upper_str = upper_str.trim();
451 if let (Some(lower), Some(upper)) =
452 (SemVer::parse(lower_str), SemVer::parse(upper_str))
453 {
454 return Some(PragmaConstraint::Range(lower, upper));
455 }
456 }
457 }
458 if let Some(ver) = SemVer::parse(rest) {
460 return Some(PragmaConstraint::Gte(ver));
461 }
462 }
463
464 if let Some(rest) = constraint_str.strip_prefix('^')
466 && let Some(ver) = SemVer::parse(rest.trim())
467 {
468 return Some(PragmaConstraint::Caret(ver));
469 }
470
471 if let Some(ver) = SemVer::parse(constraint_str) {
473 return Some(PragmaConstraint::Exact(ver));
474 }
475
476 None
477}
478
479pub fn list_installed_versions() -> Vec<SemVer> {
481 get_installed_versions()
482}
483
484fn scan_installed_versions() -> Vec<SemVer> {
488 svm::installed_versions()
489 .unwrap_or_default()
490 .iter()
491 .map(semver_to_local)
492 .collect()
493}
494
495pub fn find_matching_version(
500 constraint: &PragmaConstraint,
501 installed: &[SemVer],
502) -> Option<SemVer> {
503 let candidates: Vec<&SemVer> = installed
504 .iter()
505 .filter(|v| version_satisfies(v, constraint))
506 .collect();
507
508 candidates.last().cloned().cloned()
510}
511
512pub fn version_satisfies(version: &SemVer, constraint: &PragmaConstraint) -> bool {
514 match constraint {
515 PragmaConstraint::Exact(v) => version == v,
516 PragmaConstraint::Caret(v) => {
517 version.major == v.major && version >= v && version.minor < v.minor + 1
520 }
521 PragmaConstraint::Gte(v) => version >= v,
522 PragmaConstraint::Range(lower, upper) => version >= lower && version < upper,
523 }
524}
525
526pub async fn resolve_remappings(config: &FoundryConfig) -> Vec<String> {
530 let output = Command::new("forge")
533 .arg("remappings")
534 .current_dir(&config.root)
535 .env("FOUNDRY_DISABLE_NIGHTLY_WARNING", "1")
536 .output()
537 .await;
538
539 if let Ok(output) = output
540 && output.status.success()
541 {
542 let stdout = String::from_utf8_lossy(&output.stdout);
543 let remappings: Vec<String> = stdout
544 .lines()
545 .filter(|l| !l.trim().is_empty())
546 .map(|l| l.to_string())
547 .collect();
548 if !remappings.is_empty() {
549 return remappings;
550 }
551 }
552
553 if !config.remappings.is_empty() {
555 return config.remappings.clone();
556 }
557
558 let remappings_txt = config.root.join("remappings.txt");
560 if let Ok(content) = std::fs::read_to_string(&remappings_txt) {
561 return content
562 .lines()
563 .filter(|l| !l.trim().is_empty())
564 .map(|l| l.to_string())
565 .collect();
566 }
567
568 Vec::new()
569}
570
571pub fn build_standard_json_input(
588 file_path: &str,
589 remappings: &[String],
590 config: &FoundryConfig,
591) -> Value {
592 let mut contract_outputs = vec!["abi", "devdoc", "userdoc", "evm.methodIdentifiers"];
595 if !config.via_ir {
596 contract_outputs.push("evm.gasEstimates");
597 }
598
599 let mut settings = json!({
600 "remappings": remappings,
601 "outputSelection": {
602 "*": {
603 "*": contract_outputs,
604 "": ["ast"]
605 }
606 }
607 });
608
609 if config.via_ir {
610 settings["viaIR"] = json!(true);
611 }
612
613 if let Some(ref evm_version) = config.evm_version {
615 settings["evmVersion"] = json!(evm_version);
616 }
617
618 json!({
619 "language": "Solidity",
620 "sources": {
621 file_path: {
622 "urls": [file_path]
623 }
624 },
625 "settings": settings
626 })
627}
628
629pub async fn run_solc(
631 solc_binary: &Path,
632 input: &Value,
633 project_root: &Path,
634) -> Result<Value, RunnerError> {
635 let input_str = serde_json::to_string(input)?;
636
637 let mut child = Command::new(solc_binary)
638 .arg("--standard-json")
639 .current_dir(project_root)
640 .stdin(std::process::Stdio::piped())
641 .stdout(std::process::Stdio::piped())
642 .stderr(std::process::Stdio::piped())
643 .spawn()?;
644
645 if let Some(mut stdin) = child.stdin.take() {
647 use tokio::io::AsyncWriteExt;
648 stdin
649 .write_all(input_str.as_bytes())
650 .await
651 .map_err(RunnerError::CommandError)?;
652 }
654
655 let output = child
656 .wait_with_output()
657 .await
658 .map_err(RunnerError::CommandError)?;
659
660 let stdout = String::from_utf8_lossy(&output.stdout);
662 if stdout.trim().is_empty() {
663 let stderr = String::from_utf8_lossy(&output.stderr);
664 return Err(RunnerError::CommandError(std::io::Error::other(format!(
665 "solc produced no output, stderr: {stderr}"
666 ))));
667 }
668
669 let parsed: Value = serde_json::from_str(&stdout)?;
670 Ok(parsed)
671}
672
673pub fn normalize_solc_output(mut solc_output: Value, project_root: Option<&Path>) -> Value {
694 fn resolve_import_absolute_paths(node: &mut Value, resolve: &dyn Fn(&str) -> String) {
696 let is_import = node.get("nodeType").and_then(|v| v.as_str()) == Some("ImportDirective");
697
698 if is_import {
699 if let Some(abs_path) = node.get("absolutePath").and_then(|v| v.as_str()) {
700 let resolved = resolve(abs_path);
701 node.as_object_mut()
702 .unwrap()
703 .insert("absolutePath".to_string(), json!(resolved));
704 }
705 }
706
707 if let Some(nodes) = node.get_mut("nodes").and_then(|v| v.as_array_mut()) {
709 for child in nodes {
710 resolve_import_absolute_paths(child, resolve);
711 }
712 }
713 }
714 let mut result = Map::new();
715
716 let errors = solc_output
718 .get_mut("errors")
719 .map(Value::take)
720 .unwrap_or_else(|| json!([]));
721 result.insert("errors".to_string(), errors);
722
723 let resolve = |p: &str| -> String {
726 if let Some(root) = project_root {
727 let path = Path::new(p);
728 if path.is_relative() {
729 return root.join(path).to_string_lossy().into_owned();
730 }
731 }
732 p.to_string()
733 };
734
735 let mut source_id_to_path = Map::new();
738 let mut resolved_sources = Map::new();
739
740 if let Some(sources) = solc_output
741 .get_mut("sources")
742 .and_then(|s| s.as_object_mut())
743 {
744 let keys: Vec<String> = sources.keys().cloned().collect();
746 for key in keys {
747 if let Some(mut source_data) = sources.remove(&key) {
748 let abs_key = resolve(&key);
749
750 if let Some(ast) = source_data.get_mut("ast") {
754 if let Some(abs_path) = ast.get("absolutePath").and_then(|v| v.as_str()) {
755 let resolved = resolve(abs_path);
756 ast.as_object_mut()
757 .unwrap()
758 .insert("absolutePath".to_string(), json!(resolved));
759 }
760 resolve_import_absolute_paths(ast, &resolve);
761 }
762
763 if let Some(id) = source_data.get("id") {
764 source_id_to_path.insert(id.to_string(), json!(&abs_key));
765 }
766
767 resolved_sources.insert(abs_key, source_data);
768 }
769 }
770 }
771
772 result.insert("sources".to_string(), Value::Object(resolved_sources));
773
774 let mut resolved_contracts = Map::new();
776 if let Some(contracts) = solc_output
777 .get_mut("contracts")
778 .and_then(|c| c.as_object_mut())
779 {
780 let keys: Vec<String> = contracts.keys().cloned().collect();
781 for key in keys {
782 if let Some(contract_data) = contracts.remove(&key) {
783 resolved_contracts.insert(resolve(&key), contract_data);
784 }
785 }
786 }
787 result.insert("contracts".to_string(), Value::Object(resolved_contracts));
788
789 result.insert(
791 "source_id_to_path".to_string(),
792 Value::Object(source_id_to_path),
793 );
794
795 Value::Object(result)
796}
797
798pub fn normalize_forge_output(mut forge_output: Value) -> Value {
810 let mut result = Map::new();
811
812 let errors = forge_output
814 .get_mut("errors")
815 .map(Value::take)
816 .unwrap_or_else(|| json!([]));
817 result.insert("errors".to_string(), errors);
818
819 let mut normalized_sources = Map::new();
821 if let Some(sources) = forge_output
822 .get_mut("sources")
823 .and_then(|s| s.as_object_mut())
824 {
825 for (path, entries) in sources.iter_mut() {
826 if let Some(arr) = entries.as_array_mut()
827 && let Some(first) = arr.first_mut()
828 && let Some(sf) = first.get_mut("source_file")
829 {
830 normalized_sources.insert(path.clone(), sf.take());
831 }
832 }
833 }
834 result.insert("sources".to_string(), Value::Object(normalized_sources));
835
836 let mut normalized_contracts = Map::new();
838 if let Some(contracts) = forge_output
839 .get_mut("contracts")
840 .and_then(|c| c.as_object_mut())
841 {
842 for (path, names) in contracts.iter_mut() {
843 let mut path_contracts = Map::new();
844 if let Some(names_obj) = names.as_object_mut() {
845 for (name, entries) in names_obj.iter_mut() {
846 if let Some(arr) = entries.as_array_mut()
847 && let Some(first) = arr.first_mut()
848 && let Some(contract) = first.get_mut("contract")
849 {
850 path_contracts.insert(name.clone(), contract.take());
851 }
852 }
853 }
854 normalized_contracts.insert(path.clone(), Value::Object(path_contracts));
855 }
856 }
857 result.insert("contracts".to_string(), Value::Object(normalized_contracts));
858
859 let source_id_to_path = forge_output
861 .get_mut("build_infos")
862 .and_then(|bi| bi.as_array_mut())
863 .and_then(|arr| arr.first_mut())
864 .and_then(|info| info.get_mut("source_id_to_path"))
865 .map(Value::take)
866 .unwrap_or_else(|| json!({}));
867 result.insert("source_id_to_path".to_string(), source_id_to_path);
868
869 Value::Object(result)
870}
871
872pub async fn solc_ast(
877 file_path: &str,
878 config: &FoundryConfig,
879 client: Option<&tower_lsp::Client>,
880) -> Result<Value, RunnerError> {
881 let remappings = resolve_remappings(config).await;
882
883 let file_abs = Path::new(file_path).to_path_buf();
888 let config_root = config.root.clone();
889 let remappings_clone = remappings.clone();
890 let pragmas = tokio::task::spawn_blocking(move || {
891 collect_import_pragmas(&file_abs, &config_root, &remappings_clone)
892 })
893 .await
894 .unwrap_or_default();
895 let constraint = tightest_constraint(&pragmas);
896 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
897
898 let rel_path = Path::new(file_path)
903 .strip_prefix(&config.root)
904 .map(|p| p.to_string_lossy().into_owned())
905 .unwrap_or_else(|_| file_path.to_string());
906
907 let input = build_standard_json_input(&rel_path, &remappings, config);
908 let raw_output = run_solc(&solc_binary, &input, &config.root).await?;
909
910 Ok(normalize_solc_output(raw_output, Some(&config.root)))
911}
912
913pub async fn solc_build(
915 file_path: &str,
916 config: &FoundryConfig,
917 client: Option<&tower_lsp::Client>,
918) -> Result<Value, RunnerError> {
919 solc_ast(file_path, config, client).await
920}
921
922pub fn discover_source_files(config: &FoundryConfig) -> Vec<PathBuf> {
935 discover_source_files_inner(config, false)
936}
937
938pub fn discover_source_files_with_libs(config: &FoundryConfig) -> Vec<PathBuf> {
945 discover_source_files_inner(config, true)
946}
947
948fn discover_source_files_inner(config: &FoundryConfig, include_libs: bool) -> Vec<PathBuf> {
949 let root = &config.root;
950 if !root.is_dir() {
951 return Vec::new();
952 }
953 let skip_libs = if include_libs { &[][..] } else { &config.libs };
954 let mut files = Vec::new();
955 discover_recursive(root, skip_libs, &mut files);
956 files.sort();
957 files
958}
959
960pub fn discover_compilation_closure(config: &FoundryConfig, remappings: &[String]) -> Vec<PathBuf> {
979 let seeds = discover_source_files(config);
981 let mut visited: HashSet<PathBuf> = HashSet::new();
982 let mut queue: std::collections::VecDeque<PathBuf> = seeds.into_iter().collect();
983
984 while let Some(file) = queue.pop_front() {
985 if !visited.insert(file.clone()) {
986 continue;
987 }
988 let source = match std::fs::read_to_string(&file) {
989 Ok(s) => s,
990 Err(_) => continue,
991 };
992 for imp in links::ts_find_imports(source.as_bytes()) {
993 if let Some(abs) = resolve_import_to_abs(&config.root, &file, &imp.path, remappings) {
994 if abs.exists() && !visited.contains(&abs) {
995 queue.push_back(abs);
996 }
997 }
998 }
999 }
1000
1001 let mut result: Vec<PathBuf> = visited.into_iter().collect();
1002 result.sort();
1003 result
1004}
1005
1006const DISCOVER_SKIP_DIRS: &[&str] = &["out", "artifacts", "cache", "target", "broadcast"];
1009
1010fn discover_recursive(dir: &Path, skip_libs: &[String], files: &mut Vec<PathBuf>) {
1011 let entries = match std::fs::read_dir(dir) {
1012 Ok(e) => e,
1013 Err(_) => return,
1014 };
1015 for entry in entries.flatten() {
1016 let path = entry.path();
1017 if path.is_dir() {
1018 if let Some(name) = path.file_name().and_then(|n| n.to_str()) {
1019 if name.starts_with('.') {
1021 continue;
1022 }
1023 if DISCOVER_SKIP_DIRS.contains(&name) {
1025 continue;
1026 }
1027 if skip_libs.iter().any(|lib| lib == name) {
1029 continue;
1030 }
1031 }
1032 discover_recursive(&path, skip_libs, files);
1033 } else if let Some(name) = path.file_name().and_then(|n| n.to_str())
1034 && name.ends_with(".sol")
1035 {
1036 files.push(path);
1037 }
1038 }
1039}
1040
1041pub fn build_batch_standard_json_input(
1048 source_files: &[PathBuf],
1049 remappings: &[String],
1050 config: &FoundryConfig,
1051) -> Value {
1052 build_batch_standard_json_input_with_cache(source_files, remappings, config, None)
1053}
1054
1055pub fn build_batch_standard_json_input_with_cache(
1065 source_files: &[PathBuf],
1066 remappings: &[String],
1067 config: &FoundryConfig,
1068 content_cache: Option<&HashMap<String, (i32, String)>>,
1069) -> Value {
1070 let mut contract_outputs = vec!["abi", "devdoc", "userdoc", "evm.methodIdentifiers"];
1071 if !config.via_ir {
1072 contract_outputs.push("evm.gasEstimates");
1073 }
1074
1075 let mut settings = json!({
1076 "remappings": remappings,
1077 "outputSelection": {
1078 "*": {
1079 "*": contract_outputs,
1080 "": ["ast"]
1081 }
1082 }
1083 });
1084
1085 if config.via_ir {
1086 settings["viaIR"] = json!(true);
1087 }
1088 if let Some(ref evm_version) = config.evm_version {
1089 settings["evmVersion"] = json!(evm_version);
1090 }
1091
1092 let mut sources = serde_json::Map::new();
1093 for file in source_files {
1094 let rel_path = file
1095 .strip_prefix(&config.root)
1096 .map(|p| p.to_string_lossy().into_owned())
1097 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1098
1099 let cached_content = content_cache.and_then(|cache| {
1101 let uri = Url::from_file_path(file).ok()?;
1102 cache.get(&uri.to_string()).map(|(_, c)| c.as_str())
1103 });
1104
1105 if let Some(content) = cached_content {
1106 sources.insert(rel_path, json!({ "content": content }));
1107 } else {
1108 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1109 }
1110 }
1111
1112 json!({
1113 "language": "Solidity",
1114 "sources": sources,
1115 "settings": settings
1116 })
1117}
1118
1119pub fn build_parse_only_json_input(
1136 source_files: &[PathBuf],
1137 remappings: &[String],
1138 config: &FoundryConfig,
1139) -> Value {
1140 let settings = json!({
1141 "stopAfter": "parsing",
1142 "remappings": remappings,
1143 "outputSelection": {
1144 "*": {
1145 "": ["ast"]
1146 }
1147 }
1148 });
1149
1150 let mut sources = serde_json::Map::new();
1151 for file in source_files {
1152 let rel_path = file
1153 .strip_prefix(&config.root)
1154 .map(|p| p.to_string_lossy().into_owned())
1155 .unwrap_or_else(|_| file.to_string_lossy().into_owned());
1156 sources.insert(rel_path.clone(), json!({ "urls": [rel_path] }));
1157 }
1158
1159 json!({
1160 "language": "Solidity",
1161 "sources": sources,
1162 "settings": settings
1163 })
1164}
1165
1166pub async fn solc_project_index(
1176 config: &FoundryConfig,
1177 client: Option<&tower_lsp::Client>,
1178 text_cache: Option<&HashMap<String, (i32, String)>>,
1179) -> Result<Value, RunnerError> {
1180 let remappings = resolve_remappings(config).await;
1182
1183 let source_files = discover_compilation_closure(config, &remappings);
1188 if source_files.is_empty() {
1189 return Err(RunnerError::CommandError(std::io::Error::other(
1190 "no source files found for project index",
1191 )));
1192 }
1193
1194 solc_project_index_from_files(config, client, text_cache, &source_files).await
1195}
1196
1197pub async fn solc_project_index_scoped(
1202 config: &FoundryConfig,
1203 client: Option<&tower_lsp::Client>,
1204 text_cache: Option<&HashMap<String, (i32, String)>>,
1205 source_files: &[PathBuf],
1206) -> Result<Value, RunnerError> {
1207 if source_files.is_empty() {
1208 return Err(RunnerError::CommandError(std::io::Error::other(
1209 "no source files provided for scoped project index",
1210 )));
1211 }
1212
1213 solc_project_index_from_files(config, client, text_cache, source_files).await
1214}
1215
1216#[cfg(test)]
1220fn extract_version_error_files(solc_output: &Value) -> HashSet<String> {
1221 let mut files = HashSet::new();
1222 if let Some(errors) = solc_output.get("errors").and_then(|e| e.as_array()) {
1223 for err in errors {
1224 let is_5333 = err.get("errorCode").and_then(|c| c.as_str()) == Some("5333");
1225 if is_5333
1226 && let Some(file) = err
1227 .get("sourceLocation")
1228 .and_then(|sl| sl.get("file"))
1229 .and_then(|f| f.as_str())
1230 {
1231 files.insert(file.to_string());
1232 }
1233 }
1234 }
1235 files
1236}
1237
1238#[cfg(test)]
1244fn reverse_import_closure(
1245 source_files: &[PathBuf],
1246 exclude_abs: &HashSet<PathBuf>,
1247 project_root: &Path,
1248 remappings: &[String],
1249) -> HashSet<PathBuf> {
1250 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1253
1254 for file in source_files {
1255 let Ok(bytes) = std::fs::read(file) else {
1256 continue;
1257 };
1258 for imp in links::ts_find_imports(&bytes) {
1259 if let Some(imported_abs) =
1260 resolve_import_to_abs(project_root, file, &imp.path, remappings)
1261 {
1262 reverse_edges
1263 .entry(imported_abs)
1264 .or_default()
1265 .insert(file.clone());
1266 }
1267 }
1268 }
1269
1270 let mut closure: HashSet<PathBuf> = exclude_abs.clone();
1272 let mut queue: std::collections::VecDeque<PathBuf> = exclude_abs.iter().cloned().collect();
1273
1274 while let Some(current) = queue.pop_front() {
1275 if let Some(importers) = reverse_edges.get(¤t) {
1276 for importer in importers {
1277 if closure.insert(importer.clone()) {
1278 queue.push_back(importer.clone());
1279 }
1280 }
1281 }
1282 }
1283
1284 closure
1285}
1286
1287fn merge_normalized_outputs(base: &mut Value, other: Value) {
1293 if let (Some(base_sources), Some(other_sources)) = (
1295 base.get_mut("sources").and_then(|s| s.as_object_mut()),
1296 other.get("sources").and_then(|s| s.as_object()),
1297 ) {
1298 let max_base_id = base_sources
1300 .values()
1301 .filter_map(|v| v.get("id").and_then(|id| id.as_u64()))
1302 .max()
1303 .map(|m| m + 1)
1304 .unwrap_or(0);
1305
1306 let mut remapped_id_to_path: Vec<(String, String)> = Vec::new();
1308
1309 for (path, mut source_data) in other_sources.clone() {
1310 if let Some(id) = source_data.get("id").and_then(|id| id.as_u64()) {
1312 let new_id = id + max_base_id;
1313 source_data
1314 .as_object_mut()
1315 .unwrap()
1316 .insert("id".to_string(), json!(new_id));
1317 remapped_id_to_path.push((new_id.to_string(), path.clone()));
1318 }
1319 base_sources.insert(path, source_data);
1320 }
1321
1322 if let Some(base_id_map) = base
1324 .get_mut("source_id_to_path")
1325 .and_then(|m| m.as_object_mut())
1326 {
1327 for (id, path) in remapped_id_to_path {
1328 base_id_map.insert(id, json!(path));
1329 }
1330 }
1331 }
1332
1333 if let (Some(base_contracts), Some(other_contracts)) = (
1335 base.get_mut("contracts").and_then(|c| c.as_object_mut()),
1336 other.get("contracts").and_then(|c| c.as_object()),
1337 ) {
1338 for (path, contract_data) in other_contracts {
1339 base_contracts.insert(path.clone(), contract_data.clone());
1340 }
1341 }
1342
1343 }
1346
1347async fn solc_project_index_from_files(
1348 config: &FoundryConfig,
1349 client: Option<&tower_lsp::Client>,
1350 text_cache: Option<&HashMap<String, (i32, String)>>,
1351 source_files: &[PathBuf],
1352) -> Result<Value, RunnerError> {
1353 if source_files.is_empty() {
1354 return Err(RunnerError::CommandError(std::io::Error::other(
1355 "no source files found for project index",
1356 )));
1357 }
1358
1359 if let Some(c) = client {
1360 c.log_message(
1361 tower_lsp::lsp_types::MessageType::INFO,
1362 format!(
1363 "project index: discovered {} source files in {}",
1364 source_files.len(),
1365 config.root.display()
1366 ),
1367 )
1368 .await;
1369 }
1370
1371 let remappings = resolve_remappings(config).await;
1372
1373 let project_version: Option<SemVer> =
1375 config.solc_version.as_ref().and_then(|v| SemVer::parse(v));
1376
1377 let constraint: Option<PragmaConstraint> = if let Some(ref v) = project_version {
1380 Some(PragmaConstraint::Exact(v.clone()))
1381 } else {
1382 source_files.iter().find_map(|f| {
1383 std::fs::read_to_string(f)
1384 .ok()
1385 .and_then(|src| parse_pragma(&src))
1386 })
1387 };
1388 let solc_binary = resolve_solc_binary(config, constraint.as_ref(), client).await;
1389
1390 let (compatible_files, incompatible_files) = if let Some(ref ver) = project_version {
1396 let mut compat = Vec::with_capacity(source_files.len());
1397 let mut incompat = Vec::new();
1398 for file in source_files {
1399 let is_compatible = std::fs::read_to_string(file)
1400 .ok()
1401 .and_then(|src| parse_pragma(&src))
1402 .map(|pragma| version_satisfies(ver, &pragma))
1403 .unwrap_or(true);
1405 if is_compatible {
1406 compat.push(file.clone());
1407 } else {
1408 incompat.push(file.clone());
1409 }
1410 }
1411 (compat, incompat)
1412 } else {
1413 (source_files.to_vec(), Vec::new())
1415 };
1416
1417 if !incompatible_files.is_empty() {
1418 if let Some(c) = client {
1419 c.log_message(
1420 tower_lsp::lsp_types::MessageType::INFO,
1421 format!(
1422 "project index: {} compatible, {} incompatible with solc {}",
1423 compatible_files.len(),
1424 incompatible_files.len(),
1425 project_version
1426 .as_ref()
1427 .map(|v| v.to_string())
1428 .unwrap_or_default(),
1429 ),
1430 )
1431 .await;
1432 }
1433 }
1434
1435 let mut result = if compatible_files.is_empty() {
1443 json!({"sources": {}, "contracts": {}, "errors": [], "source_id_to_path": {}})
1444 } else {
1445 let input = build_batch_standard_json_input_with_cache(
1446 &compatible_files,
1447 &remappings,
1448 config,
1449 text_cache,
1450 );
1451 let raw = run_solc(&solc_binary, &input, &config.root).await?;
1452 normalize_solc_output(raw, Some(&config.root))
1453 };
1454
1455 let batch_source_count = result
1456 .get("sources")
1457 .and_then(|s| s.as_object())
1458 .map_or(0, |obj| obj.len());
1459
1460 if incompatible_files.is_empty() {
1461 if let Some(c) = client {
1462 c.log_message(
1463 tower_lsp::lsp_types::MessageType::INFO,
1464 format!(
1465 "project index: compiled {} files with no version mismatches",
1466 source_files.len(),
1467 ),
1468 )
1469 .await;
1470 }
1471 return Ok(result);
1472 }
1473
1474 if let Some(c) = client {
1475 let batch_errors: Vec<String> = result
1477 .get("errors")
1478 .and_then(|e| e.as_array())
1479 .map(|arr| {
1480 arr.iter()
1481 .filter(|e| e.get("severity").and_then(|s| s.as_str()) == Some("error"))
1482 .take(3)
1483 .filter_map(|e| {
1484 let msg = e.get("message").and_then(|m| m.as_str()).unwrap_or("?");
1485 let file = e
1486 .get("sourceLocation")
1487 .and_then(|sl| sl.get("file"))
1488 .and_then(|f| f.as_str())
1489 .unwrap_or("?");
1490 Some(format!("{file}: {msg}"))
1491 })
1492 .collect()
1493 })
1494 .unwrap_or_default();
1495
1496 c.log_message(
1497 tower_lsp::lsp_types::MessageType::INFO,
1498 format!(
1499 "project index: batch produced {} sources, now compiling {} incompatible files individually{}",
1500 batch_source_count,
1501 incompatible_files.len(),
1502 if batch_errors.is_empty() {
1503 String::new()
1504 } else {
1505 format!(" [first errors: {}]", batch_errors.join("; "))
1506 },
1507 ),
1508 )
1509 .await;
1510 }
1511
1512 let mut compiled = 0usize;
1514 let mut skipped = 0usize;
1515 for file in &incompatible_files {
1516 let pragma = std::fs::read_to_string(file)
1517 .ok()
1518 .and_then(|src| parse_pragma(&src));
1519
1520 let Some(file_constraint) = pragma else {
1521 skipped += 1;
1522 continue;
1523 };
1524
1525 let file_binary = resolve_solc_binary(config, Some(&file_constraint), client).await;
1526 let input = build_batch_standard_json_input_with_cache(
1527 &[file.clone()],
1528 &remappings,
1529 config,
1530 text_cache,
1531 );
1532 match run_solc(&file_binary, &input, &config.root).await {
1533 Ok(raw) => {
1534 let normalized = normalize_solc_output(raw, Some(&config.root));
1535 merge_normalized_outputs(&mut result, normalized);
1536 compiled += 1;
1537 }
1538 Err(e) => {
1539 if let Some(c) = client {
1540 c.log_message(
1541 tower_lsp::lsp_types::MessageType::WARNING,
1542 format!(
1543 "project index: incompatible file {} failed: {e}",
1544 file.display(),
1545 ),
1546 )
1547 .await;
1548 }
1549 skipped += 1;
1550 }
1551 }
1552 }
1553
1554 if let Some(c) = client {
1555 c.log_message(
1556 tower_lsp::lsp_types::MessageType::INFO,
1557 format!(
1558 "project index: incompatible files done — {compiled} compiled, {skipped} skipped",
1559 ),
1560 )
1561 .await;
1562 }
1563
1564 Ok(result)
1565}
1566
1567#[cfg(test)]
1568mod tests {
1569 use super::*;
1570
1571 #[test]
1572 fn test_normalize_solc_sources() {
1573 let solc_output = json!({
1574 "sources": {
1575 "src/Foo.sol": {
1576 "id": 0,
1577 "ast": {
1578 "nodeType": "SourceUnit",
1579 "absolutePath": "src/Foo.sol",
1580 "id": 100
1581 }
1582 },
1583 "src/Bar.sol": {
1584 "id": 1,
1585 "ast": {
1586 "nodeType": "SourceUnit",
1587 "absolutePath": "src/Bar.sol",
1588 "id": 200
1589 }
1590 }
1591 },
1592 "contracts": {},
1593 "errors": []
1594 });
1595
1596 let normalized = normalize_solc_output(solc_output, None);
1597
1598 let sources = normalized.get("sources").unwrap().as_object().unwrap();
1600 assert_eq!(sources.len(), 2);
1601
1602 let foo = sources.get("src/Foo.sol").unwrap();
1603 assert_eq!(foo.get("id").unwrap(), 0);
1604 assert_eq!(
1605 foo.get("ast")
1606 .unwrap()
1607 .get("nodeType")
1608 .unwrap()
1609 .as_str()
1610 .unwrap(),
1611 "SourceUnit"
1612 );
1613
1614 let id_to_path = normalized
1616 .get("source_id_to_path")
1617 .unwrap()
1618 .as_object()
1619 .unwrap();
1620 assert_eq!(id_to_path.len(), 2);
1621 }
1622
1623 #[test]
1624 fn test_normalize_solc_contracts() {
1625 let solc_output = json!({
1626 "sources": {},
1627 "contracts": {
1628 "src/Foo.sol": {
1629 "Foo": {
1630 "abi": [{"type": "function", "name": "bar"}],
1631 "evm": {
1632 "methodIdentifiers": {
1633 "bar(uint256)": "abcd1234"
1634 },
1635 "gasEstimates": {
1636 "external": {"bar(uint256)": "200"}
1637 }
1638 }
1639 }
1640 }
1641 },
1642 "errors": []
1643 });
1644
1645 let normalized = normalize_solc_output(solc_output, None);
1646
1647 let contracts = normalized.get("contracts").unwrap().as_object().unwrap();
1649 let foo_contracts = contracts.get("src/Foo.sol").unwrap().as_object().unwrap();
1650 let foo = foo_contracts.get("Foo").unwrap();
1651
1652 let method_ids = foo
1653 .get("evm")
1654 .unwrap()
1655 .get("methodIdentifiers")
1656 .unwrap()
1657 .as_object()
1658 .unwrap();
1659 assert_eq!(
1660 method_ids.get("bar(uint256)").unwrap().as_str().unwrap(),
1661 "abcd1234"
1662 );
1663 }
1664
1665 #[test]
1666 fn test_normalize_solc_errors_passthrough() {
1667 let solc_output = json!({
1668 "sources": {},
1669 "contracts": {},
1670 "errors": [{
1671 "sourceLocation": {"file": "src/Foo.sol", "start": 0, "end": 10},
1672 "type": "Warning",
1673 "component": "general",
1674 "severity": "warning",
1675 "errorCode": "2394",
1676 "message": "test warning",
1677 "formattedMessage": "Warning: test warning"
1678 }]
1679 });
1680
1681 let normalized = normalize_solc_output(solc_output, None);
1682
1683 let errors = normalized.get("errors").unwrap().as_array().unwrap();
1684 assert_eq!(errors.len(), 1);
1685 assert_eq!(
1686 errors[0].get("errorCode").unwrap().as_str().unwrap(),
1687 "2394"
1688 );
1689 }
1690
1691 #[test]
1692 fn test_normalize_empty_solc_output() {
1693 let solc_output = json!({
1694 "sources": {},
1695 "contracts": {}
1696 });
1697
1698 let normalized = normalize_solc_output(solc_output, None);
1699
1700 assert!(
1701 normalized
1702 .get("sources")
1703 .unwrap()
1704 .as_object()
1705 .unwrap()
1706 .is_empty()
1707 );
1708 assert!(
1709 normalized
1710 .get("contracts")
1711 .unwrap()
1712 .as_object()
1713 .unwrap()
1714 .is_empty()
1715 );
1716 assert_eq!(
1717 normalized.get("errors").unwrap().as_array().unwrap().len(),
1718 0
1719 );
1720 assert!(
1721 normalized
1722 .get("source_id_to_path")
1723 .unwrap()
1724 .as_object()
1725 .unwrap()
1726 .is_empty()
1727 );
1728 }
1729
1730 #[test]
1731 fn test_build_standard_json_input() {
1732 let config = FoundryConfig::default();
1733 let input = build_standard_json_input(
1734 "/path/to/Foo.sol",
1735 &[
1736 "ds-test/=lib/forge-std/lib/ds-test/src/".to_string(),
1737 "forge-std/=lib/forge-std/src/".to_string(),
1738 ],
1739 &config,
1740 );
1741
1742 let sources = input.get("sources").unwrap().as_object().unwrap();
1743 assert!(sources.contains_key("/path/to/Foo.sol"));
1744
1745 let settings = input.get("settings").unwrap();
1746 let remappings = settings.get("remappings").unwrap().as_array().unwrap();
1747 assert_eq!(remappings.len(), 2);
1748
1749 let output_sel = settings.get("outputSelection").unwrap();
1750 assert!(output_sel.get("*").is_some());
1751
1752 assert!(settings.get("optimizer").is_none());
1754 assert!(settings.get("viaIR").is_none());
1755 assert!(settings.get("evmVersion").is_none());
1756
1757 let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
1759 let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
1760 assert!(output_names.contains(&"evm.gasEstimates"));
1761 assert!(output_names.contains(&"abi"));
1762 assert!(output_names.contains(&"devdoc"));
1763 assert!(output_names.contains(&"userdoc"));
1764 assert!(output_names.contains(&"evm.methodIdentifiers"));
1765 }
1766
1767 #[test]
1768 fn test_build_standard_json_input_with_config() {
1769 let config = FoundryConfig {
1770 optimizer: true,
1771 optimizer_runs: 9999999,
1772 via_ir: true,
1773 evm_version: Some("osaka".to_string()),
1774 ..Default::default()
1775 };
1776 let input = build_standard_json_input("/path/to/Foo.sol", &[], &config);
1777
1778 let settings = input.get("settings").unwrap();
1779
1780 assert!(settings.get("optimizer").is_none());
1782
1783 assert!(settings.get("viaIR").unwrap().as_bool().unwrap());
1785
1786 let outputs = settings["outputSelection"]["*"]["*"].as_array().unwrap();
1788 let output_names: Vec<&str> = outputs.iter().map(|v| v.as_str().unwrap()).collect();
1789 assert!(!output_names.contains(&"evm.gasEstimates"));
1790
1791 assert_eq!(
1793 settings.get("evmVersion").unwrap().as_str().unwrap(),
1794 "osaka"
1795 );
1796 }
1797
1798 #[tokio::test]
1799 async fn test_resolve_solc_binary_default() {
1800 let config = FoundryConfig::default();
1801 let binary = resolve_solc_binary(&config, None, None).await;
1802 assert_eq!(binary, PathBuf::from("solc"));
1803 }
1804
1805 #[test]
1806 fn test_parse_pragma_exact() {
1807 let source = "// SPDX\npragma solidity 0.8.26;\n";
1808 assert_eq!(
1809 parse_pragma(source),
1810 Some(PragmaConstraint::Exact(SemVer {
1811 major: 0,
1812 minor: 8,
1813 patch: 26
1814 }))
1815 );
1816 }
1817
1818 #[test]
1819 fn test_parse_pragma_caret() {
1820 let source = "pragma solidity ^0.8.0;\n";
1821 assert_eq!(
1822 parse_pragma(source),
1823 Some(PragmaConstraint::Caret(SemVer {
1824 major: 0,
1825 minor: 8,
1826 patch: 0
1827 }))
1828 );
1829 }
1830
1831 #[test]
1832 fn test_parse_pragma_gte() {
1833 let source = "pragma solidity >=0.8.0;\n";
1834 assert_eq!(
1835 parse_pragma(source),
1836 Some(PragmaConstraint::Gte(SemVer {
1837 major: 0,
1838 minor: 8,
1839 patch: 0
1840 }))
1841 );
1842 }
1843
1844 #[test]
1845 fn test_parse_pragma_range() {
1846 let source = "pragma solidity >=0.6.2 <0.9.0;\n";
1847 assert_eq!(
1848 parse_pragma(source),
1849 Some(PragmaConstraint::Range(
1850 SemVer {
1851 major: 0,
1852 minor: 6,
1853 patch: 2
1854 },
1855 SemVer {
1856 major: 0,
1857 minor: 9,
1858 patch: 0
1859 },
1860 ))
1861 );
1862 }
1863
1864 #[test]
1865 fn test_parse_pragma_none() {
1866 let source = "contract Foo {}\n";
1867 assert_eq!(parse_pragma(source), None);
1868 }
1869
1870 #[test]
1871 fn test_version_satisfies_exact() {
1872 let v = SemVer {
1873 major: 0,
1874 minor: 8,
1875 patch: 26,
1876 };
1877 assert!(version_satisfies(&v, &PragmaConstraint::Exact(v.clone())));
1878 assert!(!version_satisfies(
1879 &SemVer {
1880 major: 0,
1881 minor: 8,
1882 patch: 25
1883 },
1884 &PragmaConstraint::Exact(v)
1885 ));
1886 }
1887
1888 #[test]
1889 fn test_version_satisfies_caret() {
1890 let constraint = PragmaConstraint::Caret(SemVer {
1891 major: 0,
1892 minor: 8,
1893 patch: 0,
1894 });
1895 assert!(version_satisfies(
1896 &SemVer {
1897 major: 0,
1898 minor: 8,
1899 patch: 0
1900 },
1901 &constraint
1902 ));
1903 assert!(version_satisfies(
1904 &SemVer {
1905 major: 0,
1906 minor: 8,
1907 patch: 26
1908 },
1909 &constraint
1910 ));
1911 assert!(!version_satisfies(
1913 &SemVer {
1914 major: 0,
1915 minor: 9,
1916 patch: 0
1917 },
1918 &constraint
1919 ));
1920 assert!(!version_satisfies(
1922 &SemVer {
1923 major: 0,
1924 minor: 7,
1925 patch: 0
1926 },
1927 &constraint
1928 ));
1929 }
1930
1931 #[test]
1932 fn test_version_satisfies_gte() {
1933 let constraint = PragmaConstraint::Gte(SemVer {
1934 major: 0,
1935 minor: 8,
1936 patch: 0,
1937 });
1938 assert!(version_satisfies(
1939 &SemVer {
1940 major: 0,
1941 minor: 8,
1942 patch: 0
1943 },
1944 &constraint
1945 ));
1946 assert!(version_satisfies(
1947 &SemVer {
1948 major: 0,
1949 minor: 9,
1950 patch: 0
1951 },
1952 &constraint
1953 ));
1954 assert!(!version_satisfies(
1955 &SemVer {
1956 major: 0,
1957 minor: 7,
1958 patch: 0
1959 },
1960 &constraint
1961 ));
1962 }
1963
1964 #[test]
1965 fn test_version_satisfies_range() {
1966 let constraint = PragmaConstraint::Range(
1967 SemVer {
1968 major: 0,
1969 minor: 6,
1970 patch: 2,
1971 },
1972 SemVer {
1973 major: 0,
1974 minor: 9,
1975 patch: 0,
1976 },
1977 );
1978 assert!(version_satisfies(
1979 &SemVer {
1980 major: 0,
1981 minor: 6,
1982 patch: 2
1983 },
1984 &constraint
1985 ));
1986 assert!(version_satisfies(
1987 &SemVer {
1988 major: 0,
1989 minor: 8,
1990 patch: 26
1991 },
1992 &constraint
1993 ));
1994 assert!(!version_satisfies(
1996 &SemVer {
1997 major: 0,
1998 minor: 9,
1999 patch: 0
2000 },
2001 &constraint
2002 ));
2003 assert!(!version_satisfies(
2004 &SemVer {
2005 major: 0,
2006 minor: 6,
2007 patch: 1
2008 },
2009 &constraint
2010 ));
2011 }
2012
2013 #[test]
2014 fn test_find_matching_version() {
2015 let installed = vec![
2016 SemVer {
2017 major: 0,
2018 minor: 8,
2019 patch: 0,
2020 },
2021 SemVer {
2022 major: 0,
2023 minor: 8,
2024 patch: 20,
2025 },
2026 SemVer {
2027 major: 0,
2028 minor: 8,
2029 patch: 26,
2030 },
2031 SemVer {
2032 major: 0,
2033 minor: 8,
2034 patch: 33,
2035 },
2036 ];
2037 let constraint = PragmaConstraint::Caret(SemVer {
2039 major: 0,
2040 minor: 8,
2041 patch: 20,
2042 });
2043 let matched = find_matching_version(&constraint, &installed);
2044 assert_eq!(
2045 matched,
2046 Some(SemVer {
2047 major: 0,
2048 minor: 8,
2049 patch: 33
2050 })
2051 );
2052
2053 let constraint = PragmaConstraint::Exact(SemVer {
2055 major: 0,
2056 minor: 8,
2057 patch: 20,
2058 });
2059 let matched = find_matching_version(&constraint, &installed);
2060 assert_eq!(
2061 matched,
2062 Some(SemVer {
2063 major: 0,
2064 minor: 8,
2065 patch: 20
2066 })
2067 );
2068
2069 let constraint = PragmaConstraint::Exact(SemVer {
2071 major: 0,
2072 minor: 8,
2073 patch: 15,
2074 });
2075 let matched = find_matching_version(&constraint, &installed);
2076 assert_eq!(matched, None);
2077 }
2078
2079 #[test]
2080 fn test_list_installed_versions() {
2081 let versions = list_installed_versions();
2083 for w in versions.windows(2) {
2085 assert!(w[0] <= w[1]);
2086 }
2087 }
2088
2089 #[test]
2094 fn test_extract_version_error_files_basic() {
2095 let output = json!({
2096 "errors": [
2097 {
2098 "errorCode": "5333",
2099 "severity": "error",
2100 "message": "Source file requires different compiler version",
2101 "sourceLocation": {
2102 "file": "lib/openzeppelin/contracts/token/ERC20/ERC20.sol",
2103 "start": 32,
2104 "end": 58
2105 }
2106 },
2107 {
2108 "errorCode": "5333",
2109 "severity": "error",
2110 "message": "Source file requires different compiler version",
2111 "sourceLocation": {
2112 "file": "lib/old-lib/src/Legacy.sol",
2113 "start": 32,
2114 "end": 58
2115 }
2116 },
2117 {
2118 "errorCode": "9574",
2119 "severity": "error",
2120 "message": "Some other error",
2121 "sourceLocation": {
2122 "file": "src/Main.sol",
2123 "start": 100,
2124 "end": 200
2125 }
2126 }
2127 ]
2128 });
2129
2130 let files = extract_version_error_files(&output);
2131 assert_eq!(files.len(), 2);
2132 assert!(files.contains("lib/openzeppelin/contracts/token/ERC20/ERC20.sol"));
2133 assert!(files.contains("lib/old-lib/src/Legacy.sol"));
2134 assert!(!files.contains("src/Main.sol"));
2136 }
2137
2138 #[test]
2139 fn test_extract_version_error_files_empty() {
2140 let output = json!({
2141 "errors": []
2142 });
2143 assert!(extract_version_error_files(&output).is_empty());
2144
2145 let output = json!({});
2147 assert!(extract_version_error_files(&output).is_empty());
2148 }
2149
2150 #[test]
2151 fn test_extract_version_error_files_no_source_location() {
2152 let output = json!({
2153 "errors": [
2154 {
2155 "errorCode": "5333",
2156 "severity": "error",
2157 "message": "Source file requires different compiler version"
2158 }
2160 ]
2161 });
2162 assert!(extract_version_error_files(&output).is_empty());
2163 }
2164
2165 #[test]
2166 fn test_extract_version_error_files_dedup() {
2167 let output = json!({
2168 "errors": [
2169 {
2170 "errorCode": "5333",
2171 "severity": "error",
2172 "sourceLocation": { "file": "lib/same.sol", "start": 0, "end": 10 }
2173 },
2174 {
2175 "errorCode": "5333",
2176 "severity": "error",
2177 "sourceLocation": { "file": "lib/same.sol", "start": 50, "end": 70 }
2178 }
2179 ]
2180 });
2181 let files = extract_version_error_files(&output);
2182 assert_eq!(files.len(), 1);
2183 assert!(files.contains("lib/same.sol"));
2184 }
2185
2186 #[test]
2187 fn test_reverse_import_closure_simple() {
2188 let dir = tempfile::tempdir().unwrap();
2196 let root = dir.path();
2197
2198 std::fs::write(
2199 root.join("a.sol"),
2200 "// SPDX-License-Identifier: MIT\nimport \"./b.sol\";\ncontract A {}",
2201 )
2202 .unwrap();
2203 std::fs::write(
2204 root.join("b.sol"),
2205 "// SPDX-License-Identifier: MIT\nimport \"./c.sol\";\ncontract B {}",
2206 )
2207 .unwrap();
2208 std::fs::write(
2209 root.join("c.sol"),
2210 "// SPDX-License-Identifier: MIT\ncontract C {}",
2211 )
2212 .unwrap();
2213 std::fs::write(
2214 root.join("d.sol"),
2215 "// SPDX-License-Identifier: MIT\ncontract D {}",
2216 )
2217 .unwrap();
2218
2219 let files: Vec<PathBuf> = vec![
2220 root.join("a.sol"),
2221 root.join("b.sol"),
2222 root.join("c.sol"),
2223 root.join("d.sol"),
2224 ];
2225
2226 let exclude: HashSet<PathBuf> = [root.join("c.sol")].into_iter().collect();
2227 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2228
2229 assert!(
2230 closure.contains(&root.join("c.sol")),
2231 "seed file in closure"
2232 );
2233 assert!(closure.contains(&root.join("b.sol")), "direct importer");
2234 assert!(closure.contains(&root.join("a.sol")), "transitive importer");
2235 assert!(
2236 !closure.contains(&root.join("d.sol")),
2237 "unrelated file not in closure"
2238 );
2239 assert_eq!(closure.len(), 3);
2240 }
2241
2242 #[test]
2243 fn test_reverse_import_closure_no_importers() {
2244 let dir = tempfile::tempdir().unwrap();
2246 let root = dir.path();
2247
2248 std::fs::write(root.join("a.sol"), "contract A {}").unwrap();
2249 std::fs::write(root.join("b.sol"), "contract B {}").unwrap();
2250
2251 let files: Vec<PathBuf> = vec![root.join("a.sol"), root.join("b.sol")];
2252 let exclude: HashSet<PathBuf> = [root.join("a.sol")].into_iter().collect();
2253
2254 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2255 assert_eq!(closure.len(), 1);
2256 assert!(closure.contains(&root.join("a.sol")));
2257 }
2258
2259 #[test]
2260 fn test_reverse_import_closure_diamond() {
2261 let dir = tempfile::tempdir().unwrap();
2268 let root = dir.path();
2269
2270 std::fs::write(
2271 root.join("a.sol"),
2272 "import \"./b.sol\";\nimport \"./c.sol\";\ncontract A {}",
2273 )
2274 .unwrap();
2275 std::fs::write(root.join("b.sol"), "import \"./d.sol\";\ncontract B {}").unwrap();
2276 std::fs::write(root.join("c.sol"), "import \"./d.sol\";\ncontract C {}").unwrap();
2277 std::fs::write(root.join("d.sol"), "contract D {}").unwrap();
2278
2279 let files: Vec<PathBuf> = vec![
2280 root.join("a.sol"),
2281 root.join("b.sol"),
2282 root.join("c.sol"),
2283 root.join("d.sol"),
2284 ];
2285 let exclude: HashSet<PathBuf> = [root.join("d.sol")].into_iter().collect();
2286
2287 let closure = reverse_import_closure(&files, &exclude, root, &[]);
2288 assert_eq!(closure.len(), 4);
2289 }
2290
2291 #[test]
2292 fn test_merge_normalized_outputs_basic() {
2293 let mut base = json!({
2294 "sources": {
2295 "/abs/src/A.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } },
2296 "/abs/src/B.sol": { "id": 1, "ast": { "nodeType": "SourceUnit" } }
2297 },
2298 "contracts": {
2299 "/abs/src/A.sol": { "A": { "abi": [] } }
2300 },
2301 "errors": [],
2302 "source_id_to_path": {
2303 "0": "/abs/src/A.sol",
2304 "1": "/abs/src/B.sol"
2305 }
2306 });
2307
2308 let other = json!({
2309 "sources": {
2310 "/abs/lib/C.sol": { "id": 0, "ast": { "nodeType": "SourceUnit" } }
2311 },
2312 "contracts": {
2313 "/abs/lib/C.sol": { "C": { "abi": [] } }
2314 },
2315 "errors": [],
2316 "source_id_to_path": {
2317 "0": "/abs/lib/C.sol"
2318 }
2319 });
2320
2321 merge_normalized_outputs(&mut base, other);
2322
2323 let sources = base["sources"].as_object().unwrap();
2325 assert_eq!(sources.len(), 3);
2326 assert!(sources.contains_key("/abs/lib/C.sol"));
2327
2328 let c_id = sources["/abs/lib/C.sol"]["id"].as_u64().unwrap();
2330 assert_eq!(
2331 c_id, 2,
2332 "remapped id should be max_base_id (2) + original (0)"
2333 );
2334
2335 let id_map = base["source_id_to_path"].as_object().unwrap();
2337 assert_eq!(id_map.len(), 3);
2338 assert_eq!(id_map["2"].as_str().unwrap(), "/abs/lib/C.sol");
2339
2340 let contracts = base["contracts"].as_object().unwrap();
2342 assert_eq!(contracts.len(), 2);
2343 assert!(contracts.contains_key("/abs/lib/C.sol"));
2344 }
2345
2346 #[test]
2347 fn test_merge_normalized_outputs_empty_other() {
2348 let mut base = json!({
2349 "sources": {
2350 "/abs/src/A.sol": { "id": 0, "ast": {} }
2351 },
2352 "contracts": {},
2353 "errors": [],
2354 "source_id_to_path": { "0": "/abs/src/A.sol" }
2355 });
2356
2357 let other = json!({
2358 "sources": {},
2359 "contracts": {},
2360 "errors": [],
2361 "source_id_to_path": {}
2362 });
2363
2364 merge_normalized_outputs(&mut base, other);
2365
2366 let sources = base["sources"].as_object().unwrap();
2367 assert_eq!(sources.len(), 1);
2368 }
2369
2370 #[test]
2371 fn test_merge_normalized_outputs_empty_base() {
2372 let mut base = json!({
2373 "sources": {},
2374 "contracts": {},
2375 "errors": [],
2376 "source_id_to_path": {}
2377 });
2378
2379 let other = json!({
2380 "sources": {
2381 "/abs/lib/X.sol": { "id": 0, "ast": {} }
2382 },
2383 "contracts": {
2384 "/abs/lib/X.sol": { "X": { "abi": [] } }
2385 },
2386 "errors": [],
2387 "source_id_to_path": { "0": "/abs/lib/X.sol" }
2388 });
2389
2390 merge_normalized_outputs(&mut base, other);
2391
2392 let sources = base["sources"].as_object().unwrap();
2393 assert_eq!(sources.len(), 1);
2394 let x_id = sources["/abs/lib/X.sol"]["id"].as_u64().unwrap();
2396 assert_eq!(x_id, 0);
2397 }
2398}