1use crate::completion;
2use crate::config::{self, FoundryConfig, LintConfig, Settings};
3use crate::file_operations;
4use crate::folding;
5use crate::goto;
6use crate::highlight;
7use crate::hover;
8use crate::inlay_hints;
9use crate::links;
10use crate::references;
11use crate::rename;
12use crate::runner::{ForgeRunner, Runner};
13use crate::selection;
14use crate::semantic_tokens;
15use crate::symbols;
16use crate::utils;
17use std::collections::{HashMap, HashSet};
18use std::path::{Component, Path, PathBuf};
19use std::sync::Arc;
20use std::sync::atomic::{AtomicU64, Ordering};
21use tokio::sync::RwLock;
22use tower_lsp::{Client, LanguageServer, lsp_types::*};
23
24type SemanticTokenCache = HashMap<String, (String, Vec<SemanticToken>)>;
26
27pub struct ForgeLsp {
28 client: Client,
29 compiler: Arc<dyn Runner>,
30 ast_cache: Arc<RwLock<HashMap<String, Arc<goto::CachedBuild>>>>,
31 text_cache: Arc<RwLock<HashMap<String, (i32, String)>>>,
35 completion_cache: Arc<RwLock<HashMap<String, Arc<completion::CompletionCache>>>>,
36 lint_config: Arc<RwLock<LintConfig>>,
38 foundry_config: Arc<RwLock<FoundryConfig>>,
40 client_capabilities: Arc<RwLock<Option<ClientCapabilities>>>,
42 settings: Arc<RwLock<Settings>>,
44 use_solc: bool,
46 semantic_token_cache: Arc<RwLock<SemanticTokenCache>>,
48 semantic_token_id: Arc<AtomicU64>,
50 root_uri: Arc<RwLock<Option<Url>>>,
52 project_indexed: Arc<std::sync::atomic::AtomicBool>,
54 project_cache_dirty: Arc<std::sync::atomic::AtomicBool>,
57 project_cache_sync_running: Arc<std::sync::atomic::AtomicBool>,
59 project_cache_sync_pending: Arc<std::sync::atomic::AtomicBool>,
61 project_cache_upsert_running: Arc<std::sync::atomic::AtomicBool>,
63 project_cache_upsert_pending: Arc<std::sync::atomic::AtomicBool>,
65 project_cache_changed_files: Arc<RwLock<HashSet<String>>>,
68 project_cache_upsert_files: Arc<RwLock<HashSet<String>>>,
70 pending_create_scaffold: Arc<RwLock<HashSet<String>>>,
73}
74
75impl ForgeLsp {
76 pub fn new(client: Client, use_solar: bool, use_solc: bool) -> Self {
77 let compiler: Arc<dyn Runner> = if use_solar {
78 Arc::new(crate::solar_runner::SolarRunner)
79 } else {
80 Arc::new(ForgeRunner)
81 };
82 let ast_cache = Arc::new(RwLock::new(HashMap::new()));
83 let text_cache = Arc::new(RwLock::new(HashMap::new()));
84 let completion_cache = Arc::new(RwLock::new(HashMap::new()));
85 let lint_config = Arc::new(RwLock::new(LintConfig::default()));
86 let foundry_config = Arc::new(RwLock::new(FoundryConfig::default()));
87 let client_capabilities = Arc::new(RwLock::new(None));
88 let settings = Arc::new(RwLock::new(Settings::default()));
89 Self {
90 client,
91 compiler,
92 ast_cache,
93 text_cache,
94 completion_cache,
95 lint_config,
96 foundry_config,
97 client_capabilities,
98 settings,
99 use_solc,
100 semantic_token_cache: Arc::new(RwLock::new(HashMap::new())),
101 semantic_token_id: Arc::new(AtomicU64::new(0)),
102 root_uri: Arc::new(RwLock::new(None)),
103 project_indexed: Arc::new(std::sync::atomic::AtomicBool::new(false)),
104 project_cache_dirty: Arc::new(std::sync::atomic::AtomicBool::new(false)),
105 project_cache_sync_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
106 project_cache_sync_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
107 project_cache_upsert_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
108 project_cache_upsert_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
109 project_cache_changed_files: Arc::new(RwLock::new(HashSet::new())),
110 project_cache_upsert_files: Arc::new(RwLock::new(HashSet::new())),
111 pending_create_scaffold: Arc::new(RwLock::new(HashSet::new())),
112 }
113 }
114
115 async fn foundry_config_for_file(&self, file_path: &std::path::Path) -> FoundryConfig {
123 config::load_foundry_config(file_path)
124 }
125
126 async fn on_change(&self, params: TextDocumentItem) {
127 let uri = params.uri.clone();
128 let version = params.version;
129
130 let file_path = match uri.to_file_path() {
131 Ok(path) => path,
132 Err(_) => {
133 self.client
134 .log_message(MessageType::ERROR, "Invalid file URI")
135 .await;
136 return;
137 }
138 };
139
140 let path_str = match file_path.to_str() {
141 Some(s) => s,
142 None => {
143 self.client
144 .log_message(MessageType::ERROR, "Invalid file path")
145 .await;
146 return;
147 }
148 };
149
150 let (should_lint, lint_settings) = {
152 let lint_cfg = self.lint_config.read().await;
153 let settings = self.settings.read().await;
154 let enabled = lint_cfg.should_lint(&file_path) && settings.lint.enabled;
155 let ls = settings.lint.clone();
156 (enabled, ls)
157 };
158
159 let (lint_result, build_result, ast_result) = if self.use_solc {
163 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
164 let solc_future = crate::solc::solc_ast(path_str, &foundry_cfg, Some(&self.client));
165
166 if should_lint {
167 let (lint, solc) = tokio::join!(
168 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
169 solc_future
170 );
171 match solc {
172 Ok(data) => {
173 self.client
174 .log_message(
175 MessageType::INFO,
176 "solc: AST + diagnostics from single run",
177 )
178 .await;
179 let content = tokio::fs::read_to_string(&file_path)
181 .await
182 .unwrap_or_default();
183 let build_diags = crate::build::build_output_to_diagnostics(
184 &data,
185 &file_path,
186 &content,
187 &foundry_cfg.ignored_error_codes,
188 );
189 (Some(lint), Ok(build_diags), Ok(data))
190 }
191 Err(e) => {
192 self.client
193 .log_message(
194 MessageType::WARNING,
195 format!("solc failed, falling back to forge: {e}"),
196 )
197 .await;
198 let (build, ast) = tokio::join!(
199 self.compiler.get_build_diagnostics(&uri),
200 self.compiler.ast(path_str)
201 );
202 (Some(lint), build, ast)
203 }
204 }
205 } else {
206 self.client
207 .log_message(
208 MessageType::INFO,
209 format!("skipping lint for ignored file: {path_str}"),
210 )
211 .await;
212 match solc_future.await {
213 Ok(data) => {
214 self.client
215 .log_message(
216 MessageType::INFO,
217 "solc: AST + diagnostics from single run",
218 )
219 .await;
220 let content = tokio::fs::read_to_string(&file_path)
221 .await
222 .unwrap_or_default();
223 let build_diags = crate::build::build_output_to_diagnostics(
224 &data,
225 &file_path,
226 &content,
227 &foundry_cfg.ignored_error_codes,
228 );
229 (None, Ok(build_diags), Ok(data))
230 }
231 Err(e) => {
232 self.client
233 .log_message(
234 MessageType::WARNING,
235 format!("solc failed, falling back to forge: {e}"),
236 )
237 .await;
238 let (build, ast) = tokio::join!(
239 self.compiler.get_build_diagnostics(&uri),
240 self.compiler.ast(path_str)
241 );
242 (None, build, ast)
243 }
244 }
245 }
246 } else {
247 if should_lint {
249 let (lint, build, ast) = tokio::join!(
250 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
251 self.compiler.get_build_diagnostics(&uri),
252 self.compiler.ast(path_str)
253 );
254 (Some(lint), build, ast)
255 } else {
256 self.client
257 .log_message(
258 MessageType::INFO,
259 format!("skipping lint for ignored file: {path_str}"),
260 )
261 .await;
262 let (build, ast) = tokio::join!(
263 self.compiler.get_build_diagnostics(&uri),
264 self.compiler.ast(path_str)
265 );
266 (None, build, ast)
267 }
268 };
269
270 let build_succeeded = matches!(&build_result, Ok(diagnostics) if diagnostics.iter().all(|d| d.severity != Some(DiagnosticSeverity::ERROR)));
272
273 if build_succeeded {
274 if let Ok(ast_data) = ast_result {
275 let cached_build = Arc::new(goto::CachedBuild::new(ast_data, version));
276 let mut cache = self.ast_cache.write().await;
277 cache.insert(uri.to_string(), cached_build.clone());
278 drop(cache);
279
280 {
282 let mut cc = self.completion_cache.write().await;
283 cc.insert(uri.to_string(), cached_build.completion_cache.clone());
284 }
285 self.client
286 .log_message(MessageType::INFO, "Build successful, AST cache updated")
287 .await;
288 } else if let Err(e) = ast_result {
289 self.client
290 .log_message(
291 MessageType::INFO,
292 format!("Build succeeded but failed to get AST: {e}"),
293 )
294 .await;
295 }
296 } else {
297 self.client
299 .log_message(
300 MessageType::INFO,
301 "Build errors detected, keeping existing AST cache",
302 )
303 .await;
304 }
305
306 {
308 let mut text_cache = self.text_cache.write().await;
309 let uri_str = uri.to_string();
310 let existing_version = text_cache.get(&uri_str).map(|(v, _)| *v).unwrap_or(-1);
311 if version >= existing_version {
312 text_cache.insert(uri_str, (version, params.text));
313 }
314 }
315
316 let mut all_diagnostics = vec![];
317
318 if let Some(lint_result) = lint_result {
319 match lint_result {
320 Ok(mut lints) => {
321 if !lint_settings.exclude.is_empty() {
323 lints.retain(|d| {
324 if let Some(NumberOrString::String(code)) = &d.code {
325 !lint_settings.exclude.iter().any(|ex| ex == code)
326 } else {
327 true
328 }
329 });
330 }
331 self.client
332 .log_message(
333 MessageType::INFO,
334 format!("found {} lint diagnostics", lints.len()),
335 )
336 .await;
337 all_diagnostics.append(&mut lints);
338 }
339 Err(e) => {
340 self.client
341 .log_message(
342 MessageType::ERROR,
343 format!("Forge lint diagnostics failed: {e}"),
344 )
345 .await;
346 }
347 }
348 }
349
350 match build_result {
351 Ok(mut builds) => {
352 self.client
353 .log_message(
354 MessageType::INFO,
355 format!("found {} build diagnostics", builds.len()),
356 )
357 .await;
358 all_diagnostics.append(&mut builds);
359 }
360 Err(e) => {
361 self.client
362 .log_message(
363 MessageType::WARNING,
364 format!("Forge build diagnostics failed: {e}"),
365 )
366 .await;
367 }
368 }
369
370 for diag in &mut all_diagnostics {
374 if diag.message.is_empty() {
375 diag.message = "Unknown issue".to_string();
376 }
377 }
378
379 self.client
381 .publish_diagnostics(uri, all_diagnostics, None)
382 .await;
383
384 if build_succeeded {
386 let client = self.client.clone();
387 tokio::spawn(async move {
388 let _ = client.inlay_hint_refresh().await;
389 });
390 }
391
392 if build_succeeded
398 && self.use_solc
399 && self.settings.read().await.project_index.full_project_scan
400 && !self
401 .project_indexed
402 .load(std::sync::atomic::Ordering::Relaxed)
403 {
404 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
405 self.project_indexed
406 .store(true, std::sync::atomic::Ordering::Relaxed);
407 let foundry_config = self.foundry_config.read().await.clone();
408 let root_uri = self.root_uri.read().await.clone();
409 let cache_key = root_uri.as_ref().map(|u| u.to_string());
410 let ast_cache = self.ast_cache.clone();
411 let client = self.client.clone();
412
413 tokio::spawn(async move {
414 let Some(cache_key) = cache_key else {
415 return;
416 };
417 if !foundry_config.root.is_dir() {
418 client
419 .log_message(
420 MessageType::INFO,
421 format!(
422 "project index: {} not found, skipping",
423 foundry_config.root.display(),
424 ),
425 )
426 .await;
427 return;
428 }
429
430 let token = NumberOrString::String("solidity/projectIndex".to_string());
432 let _ = client
433 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
434 token: token.clone(),
435 })
436 .await;
437
438 client
440 .send_notification::<notification::Progress>(ProgressParams {
441 token: token.clone(),
442 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
443 WorkDoneProgressBegin {
444 title: "Indexing project".to_string(),
445 message: Some("Discovering source files...".to_string()),
446 cancellable: Some(false),
447 percentage: None,
448 },
449 )),
450 })
451 .await;
452
453 let cfg_for_load = foundry_config.clone();
455 let cache_mode_for_load = cache_mode.clone();
456 let load_res = tokio::task::spawn_blocking(move || {
457 crate::project_cache::load_reference_cache_with_report(
458 &cfg_for_load,
459 cache_mode_for_load,
460 )
461 })
462 .await;
463 match load_res {
464 Ok(report) => {
465 if let Some(cached_build) = report.build {
466 let source_count = cached_build.nodes.len();
467 ast_cache
468 .write()
469 .await
470 .insert(cache_key.clone(), Arc::new(cached_build));
471 client
472 .log_message(
473 MessageType::INFO,
474 format!(
475 "project index: cache load hit (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
476 source_count,
477 report.file_count_reused,
478 report.file_count_hashed,
479 report.complete,
480 report.duration_ms
481 ),
482 )
483 .await;
484 if report.complete {
485 client
486 .send_notification::<notification::Progress>(ProgressParams {
487 token: token.clone(),
488 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
489 WorkDoneProgressEnd {
490 message: Some(format!(
491 "Loaded {} source files from cache",
492 source_count
493 )),
494 },
495 )),
496 })
497 .await;
498 return;
499 }
500 }
501
502 client
503 .log_message(
504 MessageType::INFO,
505 format!(
506 "project index: cache load miss/partial (reason={}, reused_files={}/{}, duration={}ms)",
507 report
508 .miss_reason
509 .unwrap_or_else(|| "unknown".to_string()),
510 report.file_count_reused,
511 report.file_count_hashed,
512 report.duration_ms
513 ),
514 )
515 .await;
516 }
517 Err(e) => {
518 client
519 .log_message(
520 MessageType::WARNING,
521 format!("project index: cache load task failed: {e}"),
522 )
523 .await;
524 }
525 }
526
527 match crate::solc::solc_project_index(&foundry_config, Some(&client), None).await {
528 Ok(ast_data) => {
529 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
530 let source_count = cached_build.nodes.len();
531 let build_for_save = (*cached_build).clone();
532 ast_cache
533 .write()
534 .await
535 .insert(cache_key.clone(), cached_build);
536 client
537 .log_message(
538 MessageType::INFO,
539 format!("project index: cached {} source files", source_count),
540 )
541 .await;
542
543 let cfg_for_save = foundry_config.clone();
544 let client_for_save = client.clone();
545 tokio::spawn(async move {
546 let res = tokio::task::spawn_blocking(move || {
547 crate::project_cache::save_reference_cache_with_report(
548 &cfg_for_save,
549 &build_for_save,
550 )
551 })
552 .await;
553 match res {
554 Ok(Ok(report)) => {
555 client_for_save
556 .log_message(
557 MessageType::INFO,
558 format!(
559 "project index: cache save complete (hashed_files={}, duration={}ms)",
560 report.file_count_hashed, report.duration_ms
561 ),
562 )
563 .await;
564 }
565 Ok(Err(e)) => {
566 client_for_save
567 .log_message(
568 MessageType::WARNING,
569 format!(
570 "project index: failed to persist cache: {e}"
571 ),
572 )
573 .await;
574 }
575 Err(e) => {
576 client_for_save
577 .log_message(
578 MessageType::WARNING,
579 format!(
580 "project index: cache save task failed: {e}"
581 ),
582 )
583 .await;
584 }
585 }
586 });
587
588 client
590 .send_notification::<notification::Progress>(ProgressParams {
591 token: token.clone(),
592 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
593 WorkDoneProgressEnd {
594 message: Some(format!(
595 "Indexed {} source files",
596 source_count
597 )),
598 },
599 )),
600 })
601 .await;
602 }
603 Err(e) => {
604 client
605 .log_message(MessageType::WARNING, format!("project index failed: {e}"))
606 .await;
607
608 client
610 .send_notification::<notification::Progress>(ProgressParams {
611 token: token.clone(),
612 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
613 WorkDoneProgressEnd {
614 message: Some("Indexing failed".to_string()),
615 },
616 )),
617 })
618 .await;
619 }
620 }
621 });
622 }
623 }
624
625 async fn get_or_fetch_build(
634 &self,
635 uri: &Url,
636 file_path: &std::path::Path,
637 insert_on_miss: bool,
638 ) -> Option<Arc<goto::CachedBuild>> {
639 let uri_str = uri.to_string();
640
641 {
644 let cache = self.ast_cache.read().await;
645 if let Some(cached) = cache.get(&uri_str) {
646 return Some(cached.clone());
647 }
648 }
649
650 if !insert_on_miss {
654 return None;
655 }
656
657 let path_str = file_path.to_str()?;
659 let ast_result = if self.use_solc {
660 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
661 match crate::solc::solc_ast(path_str, &foundry_cfg, Some(&self.client)).await {
662 Ok(data) => Ok(data),
663 Err(_) => self.compiler.ast(path_str).await,
664 }
665 } else {
666 self.compiler.ast(path_str).await
667 };
668 match ast_result {
669 Ok(data) => {
670 let build = Arc::new(goto::CachedBuild::new(data, 0));
673 let mut cache = self.ast_cache.write().await;
674 cache.insert(uri_str.clone(), build.clone());
675 Some(build)
676 }
677 Err(e) => {
678 self.client
679 .log_message(MessageType::ERROR, format!("failed to get AST: {e}"))
680 .await;
681 None
682 }
683 }
684 }
685
686 async fn get_source_bytes(&self, uri: &Url, file_path: &std::path::Path) -> Option<Vec<u8>> {
689 {
690 let text_cache = self.text_cache.read().await;
691 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
692 return Some(content.as_bytes().to_vec());
693 }
694 }
695 match std::fs::read(file_path) {
696 Ok(bytes) => Some(bytes),
697 Err(e) => {
698 if e.kind() == std::io::ErrorKind::NotFound {
699 self.client
702 .log_message(
703 MessageType::INFO,
704 format!("file not found yet (transient): {e}"),
705 )
706 .await;
707 } else {
708 self.client
709 .log_message(MessageType::ERROR, format!("failed to read file: {e}"))
710 .await;
711 }
712 None
713 }
714 }
715 }
716}
717
718fn update_imports_on_delete_enabled(settings: &crate::config::Settings) -> bool {
719 settings.file_operations.update_imports_on_delete
720}
721
722fn start_or_mark_project_cache_sync_pending(
723 pending: &std::sync::atomic::AtomicBool,
724 running: &std::sync::atomic::AtomicBool,
725) -> bool {
726 pending.store(true, Ordering::Release);
727 running
728 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
729 .is_ok()
730}
731
732fn take_project_cache_sync_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
733 pending.swap(false, Ordering::AcqRel)
734}
735
736fn stop_project_cache_sync_worker_or_reclaim(
737 pending: &std::sync::atomic::AtomicBool,
738 running: &std::sync::atomic::AtomicBool,
739) -> bool {
740 running.store(false, Ordering::Release);
741 pending.load(Ordering::Acquire)
742 && running
743 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
744 .is_ok()
745}
746
747fn try_claim_project_cache_dirty(dirty: &std::sync::atomic::AtomicBool) -> bool {
748 dirty
749 .compare_exchange(true, false, Ordering::AcqRel, Ordering::Acquire)
750 .is_ok()
751}
752
753fn start_or_mark_project_cache_upsert_pending(
754 pending: &std::sync::atomic::AtomicBool,
755 running: &std::sync::atomic::AtomicBool,
756) -> bool {
757 pending.store(true, Ordering::Release);
758 running
759 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
760 .is_ok()
761}
762
763fn take_project_cache_upsert_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
764 pending.swap(false, Ordering::AcqRel)
765}
766
767fn stop_project_cache_upsert_worker_or_reclaim(
768 pending: &std::sync::atomic::AtomicBool,
769 running: &std::sync::atomic::AtomicBool,
770) -> bool {
771 running.store(false, Ordering::Release);
772 pending.load(Ordering::Acquire)
773 && running
774 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
775 .is_ok()
776}
777
778fn lexical_normalize(path: &Path) -> PathBuf {
779 let mut out = PathBuf::new();
780 for comp in path.components() {
781 match comp {
782 Component::CurDir => {}
783 Component::ParentDir => {
784 out.pop();
785 }
786 Component::RootDir => out.push(comp.as_os_str()),
787 Component::Prefix(_) => out.push(comp.as_os_str()),
788 Component::Normal(seg) => out.push(seg),
789 }
790 }
791 out
792}
793
794fn resolve_import_spec_to_abs(
795 project_root: &Path,
796 importer_abs: &Path,
797 import_path: &str,
798 remappings: &[String],
799) -> Option<PathBuf> {
800 if import_path.starts_with("./") || import_path.starts_with("../") {
801 let base = importer_abs.parent()?;
802 return Some(lexical_normalize(&base.join(import_path)));
803 }
804
805 for remap in remappings {
806 let mut it = remap.splitn(2, '=');
807 let prefix = it.next().unwrap_or_default();
808 let target = it.next().unwrap_or_default();
809 if prefix.is_empty() || target.is_empty() {
810 continue;
811 }
812 if import_path.starts_with(prefix) {
813 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
814 return Some(lexical_normalize(&project_root.join(format!("{target}{suffix}"))));
815 }
816 }
817
818 Some(lexical_normalize(&project_root.join(import_path)))
819}
820
821fn compute_reverse_import_closure(
822 config: &FoundryConfig,
823 changed_abs: &[PathBuf],
824 remappings: &[String],
825) -> HashSet<PathBuf> {
826 let source_files = crate::solc::discover_source_files(config);
827 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
828
829 for importer in &source_files {
830 let Ok(bytes) = std::fs::read(importer) else {
831 continue;
832 };
833 for imp in links::ts_find_imports(&bytes) {
834 let Some(imported_abs) =
835 resolve_import_spec_to_abs(&config.root, importer, &imp.path, remappings)
836 else {
837 continue;
838 };
839 if !imported_abs.starts_with(&config.root) {
840 continue;
841 }
842 reverse_edges
843 .entry(imported_abs)
844 .or_default()
845 .insert(importer.clone());
846 }
847 }
848
849 let mut affected: HashSet<PathBuf> = HashSet::new();
850 let mut queue: std::collections::VecDeque<PathBuf> = std::collections::VecDeque::new();
851
852 for path in changed_abs {
853 if !path.starts_with(&config.root) {
854 continue;
855 }
856 let normalized = lexical_normalize(path);
857 if affected.insert(normalized.clone()) {
858 queue.push_back(normalized);
859 }
860 }
861
862 while let Some(current) = queue.pop_front() {
863 if let Some(importers) = reverse_edges.get(¤t) {
864 for importer in importers {
865 if affected.insert(importer.clone()) {
866 queue.push_back(importer.clone());
867 }
868 }
869 }
870 }
871
872 let source_set: HashSet<PathBuf> = source_files.into_iter().collect();
874 affected
875 .into_iter()
876 .filter(|p| source_set.contains(p) && p.is_file())
877 .collect()
878}
879
880fn src_file_id(src: &str) -> Option<&str> {
881 src.rsplit(':').next().filter(|id| !id.is_empty())
882}
883
884fn remap_src_file_id(src: &str, id_remap: &HashMap<String, String>) -> String {
885 let Some(old_id) = src_file_id(src) else {
886 return src.to_string();
887 };
888 let Some(new_id) = id_remap.get(old_id) else {
889 return src.to_string();
890 };
891 if new_id == old_id {
892 return src.to_string();
893 }
894 let prefix_len = src.len().saturating_sub(old_id.len());
895 format!("{}{}", &src[..prefix_len], new_id)
896}
897
898fn remap_node_info_file_ids(info: &mut goto::NodeInfo, id_remap: &HashMap<String, String>) {
899 info.src = remap_src_file_id(&info.src, id_remap);
900 if let Some(loc) = info.name_location.as_mut() {
901 *loc = remap_src_file_id(loc, id_remap);
902 }
903 for loc in &mut info.name_locations {
904 *loc = remap_src_file_id(loc, id_remap);
905 }
906 if let Some(loc) = info.member_location.as_mut() {
907 *loc = remap_src_file_id(loc, id_remap);
908 }
909}
910
911fn doc_key_path(key: &hover::DocKey) -> Option<&str> {
912 match key {
913 hover::DocKey::Contract(k) | hover::DocKey::StateVar(k) | hover::DocKey::Method(k) => {
914 k.split_once(':').map(|(path, _)| path)
915 }
916 hover::DocKey::Func(_) | hover::DocKey::Event(_) => None,
917 }
918}
919
920fn merge_scoped_cached_build(
921 existing: &mut goto::CachedBuild,
922 mut scoped: goto::CachedBuild,
923) -> Result<usize, String> {
924 let affected_paths: HashSet<String> = scoped.nodes.keys().cloned().collect();
925 if affected_paths.is_empty() {
926 return Ok(0);
927 }
928 let affected_abs_paths: HashSet<String> = scoped.path_to_abs.values().cloned().collect();
929
930 for scoped_id in scoped.decl_index.keys() {
933 if existing.decl_index.contains_key(scoped_id)
934 && let Some(path) = existing.node_id_to_source_path.get(scoped_id)
935 && !affected_abs_paths.contains(path)
936 {
937 return Err(format!(
938 "decl id collision for id={} in unaffected path {}",
939 scoped_id, path
940 ));
941 }
942 }
943
944 let mut path_to_existing_id: HashMap<String, String> = HashMap::new();
946 for (id, path) in &existing.id_to_path_map {
947 path_to_existing_id
948 .entry(path.clone())
949 .or_insert_with(|| id.clone());
950 }
951 let mut used_ids: HashSet<String> = existing.id_to_path_map.keys().cloned().collect();
952 let mut next_id = used_ids
953 .iter()
954 .filter_map(|k| k.parse::<u64>().ok())
955 .max()
956 .unwrap_or(0)
957 .saturating_add(1);
958
959 let mut id_remap: HashMap<String, String> = HashMap::new();
960 for (scoped_id, path) in &scoped.id_to_path_map {
961 let canonical = if let Some(id) = path_to_existing_id.get(path) {
962 id.clone()
963 } else {
964 let id = loop {
965 let candidate = next_id.to_string();
966 next_id = next_id.saturating_add(1);
967 if used_ids.insert(candidate.clone()) {
968 break candidate;
969 }
970 };
971 path_to_existing_id.insert(path.clone(), id.clone());
972 id
973 };
974 id_remap.insert(scoped_id.clone(), canonical);
975 }
976
977 for file_nodes in scoped.nodes.values_mut() {
978 for info in file_nodes.values_mut() {
979 remap_node_info_file_ids(info, &id_remap);
980 }
981 }
982 let scoped_external_refs: HashMap<String, crate::types::NodeId> = scoped
983 .external_refs
984 .into_iter()
985 .map(|(src, decl_id)| (remap_src_file_id(&src, &id_remap), decl_id))
986 .collect();
987
988 let old_id_to_path = existing.id_to_path_map.clone();
989 existing.external_refs.retain(|src, _| {
990 src_file_id(src)
991 .and_then(|fid| old_id_to_path.get(fid))
992 .map(|path| !affected_paths.contains(path))
993 .unwrap_or(true)
994 });
995 existing.nodes.retain(|path, _| !affected_paths.contains(path));
996 existing.path_to_abs.retain(|path, _| !affected_paths.contains(path));
997 existing
998 .id_to_path_map
999 .retain(|_, path| !affected_paths.contains(path));
1000
1001 existing
1002 .node_id_to_source_path
1003 .retain(|_, path| !affected_abs_paths.contains(path));
1004 existing
1005 .decl_index
1006 .retain(|id, _| match existing.node_id_to_source_path.get(id) {
1007 Some(path) => !affected_abs_paths.contains(path),
1008 None => true,
1009 });
1010 existing
1011 .hint_index
1012 .retain(|abs_path, _| !affected_abs_paths.contains(abs_path));
1013 existing.gas_index.retain(|k, _| {
1014 k.split_once(':')
1015 .map(|(path, _)| !affected_paths.contains(path))
1016 .unwrap_or(true)
1017 });
1018 existing
1019 .doc_index
1020 .retain(|k, _| doc_key_path(k).map(|p| !affected_paths.contains(p)).unwrap_or(true));
1021
1022 existing.nodes.extend(scoped.nodes);
1023 existing.path_to_abs.extend(scoped.path_to_abs);
1024 existing.external_refs.extend(scoped_external_refs);
1025 for (old_id, path) in scoped.id_to_path_map {
1026 let canonical = id_remap.get(&old_id).cloned().unwrap_or(old_id);
1027 existing.id_to_path_map.insert(canonical, path);
1028 }
1029 existing.decl_index.extend(scoped.decl_index);
1030 existing
1031 .node_id_to_source_path
1032 .extend(scoped.node_id_to_source_path);
1033 existing.gas_index.extend(scoped.gas_index);
1034 existing.hint_index.extend(scoped.hint_index);
1035 existing.doc_index.extend(scoped.doc_index);
1036
1037 Ok(affected_paths.len())
1038}
1039
1040#[tower_lsp::async_trait]
1041impl LanguageServer for ForgeLsp {
1042 async fn initialize(
1043 &self,
1044 params: InitializeParams,
1045 ) -> tower_lsp::jsonrpc::Result<InitializeResult> {
1046 {
1048 let mut caps = self.client_capabilities.write().await;
1049 *caps = Some(params.capabilities.clone());
1050 }
1051
1052 if let Some(init_opts) = ¶ms.initialization_options {
1054 let s = config::parse_settings(init_opts);
1055 self.client
1056 .log_message(
1057 MessageType::INFO,
1058 format!(
1059 "settings: inlayHints.parameters={}, inlayHints.gasEstimates={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}, projectIndex.incrementalEditReindexThreshold={}",
1060 s.inlay_hints.parameters, s.inlay_hints.gas_estimates, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex, s.project_index.incremental_edit_reindex_threshold,
1061 ),
1062 )
1063 .await;
1064 let mut settings = self.settings.write().await;
1065 *settings = s;
1066 }
1067
1068 if let Some(uri) = params.root_uri.as_ref() {
1070 let mut root = self.root_uri.write().await;
1071 *root = Some(uri.clone());
1072 }
1073
1074 if let Some(root_uri) = params
1076 .root_uri
1077 .as_ref()
1078 .and_then(|uri| uri.to_file_path().ok())
1079 {
1080 let lint_cfg = config::load_lint_config(&root_uri);
1081 self.client
1082 .log_message(
1083 MessageType::INFO,
1084 format!(
1085 "loaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
1086 lint_cfg.lint_on_build,
1087 lint_cfg.ignore_patterns.len()
1088 ),
1089 )
1090 .await;
1091 let mut config = self.lint_config.write().await;
1092 *config = lint_cfg;
1093
1094 let foundry_cfg = config::load_foundry_config(&root_uri);
1095 self.client
1096 .log_message(
1097 MessageType::INFO,
1098 format!(
1099 "loaded foundry.toml project config: solc_version={:?}, remappings={}",
1100 foundry_cfg.solc_version,
1101 foundry_cfg.remappings.len()
1102 ),
1103 )
1104 .await;
1105 if foundry_cfg.via_ir {
1106 self.client
1107 .log_message(
1108 MessageType::WARNING,
1109 "via_ir is enabled in foundry.toml — gas estimate inlay hints are disabled to avoid slow compilation",
1110 )
1111 .await;
1112 }
1113 let mut fc = self.foundry_config.write().await;
1114 *fc = foundry_cfg;
1115 }
1116
1117 let client_encodings = params
1119 .capabilities
1120 .general
1121 .as_ref()
1122 .and_then(|g| g.position_encodings.as_deref());
1123 let encoding = utils::PositionEncoding::negotiate(client_encodings);
1124 utils::set_encoding(encoding);
1125
1126 Ok(InitializeResult {
1127 server_info: Some(ServerInfo {
1128 name: "Solidity Language Server".to_string(),
1129 version: Some(env!("LONG_VERSION").to_string()),
1130 }),
1131 capabilities: ServerCapabilities {
1132 position_encoding: Some(encoding.into()),
1133 completion_provider: Some(CompletionOptions {
1134 trigger_characters: Some(vec![".".to_string()]),
1135 resolve_provider: Some(false),
1136 ..Default::default()
1137 }),
1138 signature_help_provider: Some(SignatureHelpOptions {
1139 trigger_characters: Some(vec![
1140 "(".to_string(),
1141 ",".to_string(),
1142 "[".to_string(),
1143 ]),
1144 retrigger_characters: None,
1145 work_done_progress_options: WorkDoneProgressOptions {
1146 work_done_progress: None,
1147 },
1148 }),
1149 definition_provider: Some(OneOf::Left(true)),
1150 declaration_provider: Some(DeclarationCapability::Simple(true)),
1151 references_provider: Some(OneOf::Left(true)),
1152 rename_provider: Some(OneOf::Right(RenameOptions {
1153 prepare_provider: Some(true),
1154 work_done_progress_options: WorkDoneProgressOptions {
1155 work_done_progress: Some(true),
1156 },
1157 })),
1158 workspace_symbol_provider: Some(OneOf::Left(true)),
1159 document_symbol_provider: Some(OneOf::Left(true)),
1160 document_highlight_provider: Some(OneOf::Left(true)),
1161 hover_provider: Some(HoverProviderCapability::Simple(true)),
1162 document_link_provider: Some(DocumentLinkOptions {
1163 resolve_provider: Some(false),
1164 work_done_progress_options: WorkDoneProgressOptions {
1165 work_done_progress: None,
1166 },
1167 }),
1168 document_formatting_provider: Some(OneOf::Left(true)),
1169 code_lens_provider: None,
1170 folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
1171 selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
1172 inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options(
1173 InlayHintOptions {
1174 resolve_provider: Some(false),
1175 work_done_progress_options: WorkDoneProgressOptions {
1176 work_done_progress: None,
1177 },
1178 },
1179 ))),
1180 semantic_tokens_provider: Some(
1181 SemanticTokensServerCapabilities::SemanticTokensOptions(
1182 SemanticTokensOptions {
1183 legend: semantic_tokens::legend(),
1184 full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
1185 range: Some(true),
1186 work_done_progress_options: WorkDoneProgressOptions {
1187 work_done_progress: None,
1188 },
1189 },
1190 ),
1191 ),
1192 text_document_sync: Some(TextDocumentSyncCapability::Options(
1193 TextDocumentSyncOptions {
1194 will_save: Some(true),
1195 will_save_wait_until: None,
1196 open_close: Some(true),
1197 save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
1198 include_text: Some(true),
1199 })),
1200 change: Some(TextDocumentSyncKind::FULL),
1201 },
1202 )),
1203 workspace: Some(WorkspaceServerCapabilities {
1204 workspace_folders: None,
1205 file_operations: Some(WorkspaceFileOperationsServerCapabilities {
1206 will_rename: Some(FileOperationRegistrationOptions {
1207 filters: vec![
1208 FileOperationFilter {
1210 scheme: Some("file".to_string()),
1211 pattern: FileOperationPattern {
1212 glob: "**/*.sol".to_string(),
1213 matches: Some(FileOperationPatternKind::File),
1214 options: None,
1215 },
1216 },
1217 FileOperationFilter {
1219 scheme: Some("file".to_string()),
1220 pattern: FileOperationPattern {
1221 glob: "**".to_string(),
1222 matches: Some(FileOperationPatternKind::Folder),
1223 options: None,
1224 },
1225 },
1226 ],
1227 }),
1228 did_rename: Some(FileOperationRegistrationOptions {
1229 filters: vec![
1230 FileOperationFilter {
1231 scheme: Some("file".to_string()),
1232 pattern: FileOperationPattern {
1233 glob: "**/*.sol".to_string(),
1234 matches: Some(FileOperationPatternKind::File),
1235 options: None,
1236 },
1237 },
1238 FileOperationFilter {
1239 scheme: Some("file".to_string()),
1240 pattern: FileOperationPattern {
1241 glob: "**".to_string(),
1242 matches: Some(FileOperationPatternKind::Folder),
1243 options: None,
1244 },
1245 },
1246 ],
1247 }),
1248 will_delete: Some(FileOperationRegistrationOptions {
1249 filters: vec![
1250 FileOperationFilter {
1251 scheme: Some("file".to_string()),
1252 pattern: FileOperationPattern {
1253 glob: "**/*.sol".to_string(),
1254 matches: Some(FileOperationPatternKind::File),
1255 options: None,
1256 },
1257 },
1258 FileOperationFilter {
1259 scheme: Some("file".to_string()),
1260 pattern: FileOperationPattern {
1261 glob: "**".to_string(),
1262 matches: Some(FileOperationPatternKind::Folder),
1263 options: None,
1264 },
1265 },
1266 ],
1267 }),
1268 did_delete: Some(FileOperationRegistrationOptions {
1269 filters: vec![
1270 FileOperationFilter {
1271 scheme: Some("file".to_string()),
1272 pattern: FileOperationPattern {
1273 glob: "**/*.sol".to_string(),
1274 matches: Some(FileOperationPatternKind::File),
1275 options: None,
1276 },
1277 },
1278 FileOperationFilter {
1279 scheme: Some("file".to_string()),
1280 pattern: FileOperationPattern {
1281 glob: "**".to_string(),
1282 matches: Some(FileOperationPatternKind::Folder),
1283 options: None,
1284 },
1285 },
1286 ],
1287 }),
1288 will_create: Some(FileOperationRegistrationOptions {
1289 filters: vec![FileOperationFilter {
1290 scheme: Some("file".to_string()),
1291 pattern: FileOperationPattern {
1292 glob: "**/*.sol".to_string(),
1293 matches: Some(FileOperationPatternKind::File),
1294 options: None,
1295 },
1296 }],
1297 }),
1298 did_create: Some(FileOperationRegistrationOptions {
1299 filters: vec![FileOperationFilter {
1300 scheme: Some("file".to_string()),
1301 pattern: FileOperationPattern {
1302 glob: "**/*.sol".to_string(),
1303 matches: Some(FileOperationPatternKind::File),
1304 options: None,
1305 },
1306 }],
1307 }),
1308 ..Default::default()
1309 }),
1310 }),
1311 ..ServerCapabilities::default()
1312 },
1313 })
1314 }
1315
1316 async fn initialized(&self, _: InitializedParams) {
1317 self.client
1318 .log_message(MessageType::INFO, "lsp server initialized.")
1319 .await;
1320
1321 let supports_dynamic = self
1323 .client_capabilities
1324 .read()
1325 .await
1326 .as_ref()
1327 .and_then(|caps| caps.workspace.as_ref())
1328 .and_then(|ws| ws.did_change_watched_files.as_ref())
1329 .and_then(|dcwf| dcwf.dynamic_registration)
1330 .unwrap_or(false);
1331
1332 if supports_dynamic {
1333 let registration = Registration {
1334 id: "foundry-toml-watcher".to_string(),
1335 method: "workspace/didChangeWatchedFiles".to_string(),
1336 register_options: Some(
1337 serde_json::to_value(DidChangeWatchedFilesRegistrationOptions {
1338 watchers: vec![
1339 FileSystemWatcher {
1340 glob_pattern: GlobPattern::String("**/foundry.toml".to_string()),
1341 kind: Some(WatchKind::all()),
1342 },
1343 FileSystemWatcher {
1344 glob_pattern: GlobPattern::String("**/remappings.txt".to_string()),
1345 kind: Some(WatchKind::all()),
1346 },
1347 ],
1348 })
1349 .unwrap(),
1350 ),
1351 };
1352
1353 if let Err(e) = self.client.register_capability(vec![registration]).await {
1354 self.client
1355 .log_message(
1356 MessageType::WARNING,
1357 format!("failed to register foundry.toml watcher: {e}"),
1358 )
1359 .await;
1360 } else {
1361 self.client
1362 .log_message(MessageType::INFO, "registered foundry.toml file watcher")
1363 .await;
1364 }
1365 }
1366
1367 if self.use_solc && self.settings.read().await.project_index.full_project_scan {
1371 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
1372 self.project_indexed
1373 .store(true, std::sync::atomic::Ordering::Relaxed);
1374 let foundry_config = self.foundry_config.read().await.clone();
1375 let root_uri = self.root_uri.read().await.clone();
1376 let cache_key = root_uri.as_ref().map(|u| u.to_string());
1377 let ast_cache = self.ast_cache.clone();
1378 let client = self.client.clone();
1379
1380 tokio::spawn(async move {
1381 let Some(cache_key) = cache_key else {
1382 return;
1383 };
1384 if !foundry_config.root.is_dir() {
1385 client
1386 .log_message(
1387 MessageType::INFO,
1388 format!(
1389 "project index: {} not found, skipping eager index",
1390 foundry_config.root.display(),
1391 ),
1392 )
1393 .await;
1394 return;
1395 }
1396
1397 let token = NumberOrString::String("solidity/projectIndex".to_string());
1398 let _ = client
1399 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
1400 token: token.clone(),
1401 })
1402 .await;
1403
1404 client
1405 .send_notification::<notification::Progress>(ProgressParams {
1406 token: token.clone(),
1407 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
1408 WorkDoneProgressBegin {
1409 title: "Indexing project".to_string(),
1410 message: Some("Discovering source files...".to_string()),
1411 cancellable: Some(false),
1412 percentage: None,
1413 },
1414 )),
1415 })
1416 .await;
1417
1418 let cfg_for_load = foundry_config.clone();
1420 let cache_mode_for_load = cache_mode.clone();
1421 let load_res = tokio::task::spawn_blocking(move || {
1422 crate::project_cache::load_reference_cache_with_report(
1423 &cfg_for_load,
1424 cache_mode_for_load,
1425 )
1426 })
1427 .await;
1428 match load_res {
1429 Ok(report) => {
1430 if let Some(cached_build) = report.build {
1431 let source_count = cached_build.nodes.len();
1432 ast_cache
1433 .write()
1434 .await
1435 .insert(cache_key.clone(), Arc::new(cached_build));
1436 client
1437 .log_message(
1438 MessageType::INFO,
1439 format!(
1440 "project index (eager): cache load hit (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
1441 source_count,
1442 report.file_count_reused,
1443 report.file_count_hashed,
1444 report.complete,
1445 report.duration_ms
1446 ),
1447 )
1448 .await;
1449 if report.complete {
1450 client
1451 .send_notification::<notification::Progress>(ProgressParams {
1452 token: token.clone(),
1453 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1454 WorkDoneProgressEnd {
1455 message: Some(format!(
1456 "Loaded {} source files from cache",
1457 source_count
1458 )),
1459 },
1460 )),
1461 })
1462 .await;
1463 return;
1464 }
1465 }
1466
1467 client
1468 .log_message(
1469 MessageType::INFO,
1470 format!(
1471 "project index (eager): cache load miss/partial (reason={}, reused_files={}/{}, duration={}ms)",
1472 report
1473 .miss_reason
1474 .unwrap_or_else(|| "unknown".to_string()),
1475 report.file_count_reused,
1476 report.file_count_hashed,
1477 report.duration_ms
1478 ),
1479 )
1480 .await;
1481 }
1482 Err(e) => {
1483 client
1484 .log_message(
1485 MessageType::WARNING,
1486 format!("project index (eager): cache load task failed: {e}"),
1487 )
1488 .await;
1489 }
1490 }
1491
1492 match crate::solc::solc_project_index(&foundry_config, Some(&client), None).await {
1493 Ok(ast_data) => {
1494 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
1495 let source_count = cached_build.nodes.len();
1496 let build_for_save = (*cached_build).clone();
1497 ast_cache
1498 .write()
1499 .await
1500 .insert(cache_key.clone(), cached_build);
1501 client
1502 .log_message(
1503 MessageType::INFO,
1504 format!(
1505 "project index (eager): cached {} source files",
1506 source_count
1507 ),
1508 )
1509 .await;
1510
1511 let cfg_for_save = foundry_config.clone();
1512 let client_for_save = client.clone();
1513 tokio::spawn(async move {
1514 let res = tokio::task::spawn_blocking(move || {
1515 crate::project_cache::save_reference_cache_with_report(
1516 &cfg_for_save,
1517 &build_for_save,
1518 )
1519 })
1520 .await;
1521 match res {
1522 Ok(Ok(report)) => {
1523 client_for_save
1524 .log_message(
1525 MessageType::INFO,
1526 format!(
1527 "project index (eager): cache save complete (hashed_files={}, duration={}ms)",
1528 report.file_count_hashed, report.duration_ms
1529 ),
1530 )
1531 .await;
1532 }
1533 Ok(Err(e)) => {
1534 client_for_save
1535 .log_message(
1536 MessageType::WARNING,
1537 format!(
1538 "project index (eager): failed to persist cache: {e}"
1539 ),
1540 )
1541 .await;
1542 }
1543 Err(e) => {
1544 client_for_save
1545 .log_message(
1546 MessageType::WARNING,
1547 format!(
1548 "project index (eager): cache save task failed: {e}"
1549 ),
1550 )
1551 .await;
1552 }
1553 }
1554 });
1555
1556 client
1557 .send_notification::<notification::Progress>(ProgressParams {
1558 token: token.clone(),
1559 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1560 WorkDoneProgressEnd {
1561 message: Some(format!(
1562 "Indexed {} source files",
1563 source_count
1564 )),
1565 },
1566 )),
1567 })
1568 .await;
1569 }
1570 Err(e) => {
1571 client
1572 .log_message(
1573 MessageType::WARNING,
1574 format!("project index (eager): failed: {e}"),
1575 )
1576 .await;
1577
1578 client
1579 .send_notification::<notification::Progress>(ProgressParams {
1580 token,
1581 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1582 WorkDoneProgressEnd {
1583 message: Some(format!("Index failed: {e}")),
1584 },
1585 )),
1586 })
1587 .await;
1588 }
1589 }
1590 });
1591 }
1592 }
1593
1594 async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
1595 self.client
1596 .log_message(MessageType::INFO, "lsp server shutting down.")
1597 .await;
1598 Ok(())
1599 }
1600
1601 async fn did_open(&self, params: DidOpenTextDocumentParams) {
1602 self.client
1603 .log_message(MessageType::INFO, "file opened")
1604 .await;
1605
1606 let mut td = params.text_document;
1607 let template_on_create = self
1608 .settings
1609 .read()
1610 .await
1611 .file_operations
1612 .template_on_create;
1613
1614 let should_attempt_scaffold = template_on_create
1617 && td.text.chars().all(|ch| ch.is_whitespace())
1618 && td.uri.scheme() == "file"
1619 && td
1620 .uri
1621 .to_file_path()
1622 .ok()
1623 .and_then(|p| p.extension().map(|e| e == "sol"))
1624 .unwrap_or(false);
1625
1626 if should_attempt_scaffold {
1627 let uri_str = td.uri.to_string();
1628 let create_flow_pending = {
1629 let pending = self.pending_create_scaffold.read().await;
1630 pending.contains(&uri_str)
1631 };
1632 if create_flow_pending {
1633 self.client
1634 .log_message(
1635 MessageType::INFO,
1636 format!(
1637 "didOpen: skip scaffold for {} (didCreateFiles scaffold pending)",
1638 uri_str
1639 ),
1640 )
1641 .await;
1642 } else {
1643 let cache_has_content = {
1644 let tc = self.text_cache.read().await;
1645 tc.get(&uri_str)
1646 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()))
1647 };
1648
1649 if !cache_has_content {
1650 let file_has_content = td.uri.to_file_path().ok().is_some_and(|p| {
1651 std::fs::read_to_string(&p)
1652 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()))
1653 });
1654
1655 if !file_has_content {
1656 let solc_version = self.foundry_config.read().await.solc_version.clone();
1657 if let Some(scaffold) =
1658 file_operations::generate_scaffold(&td.uri, solc_version.as_deref())
1659 {
1660 let end = utils::byte_offset_to_position(&td.text, td.text.len());
1661 let edit = WorkspaceEdit {
1662 changes: Some(HashMap::from([(
1663 td.uri.clone(),
1664 vec![TextEdit {
1665 range: Range {
1666 start: Position::default(),
1667 end,
1668 },
1669 new_text: scaffold.clone(),
1670 }],
1671 )])),
1672 document_changes: None,
1673 change_annotations: None,
1674 };
1675 if self
1676 .client
1677 .apply_edit(edit)
1678 .await
1679 .as_ref()
1680 .is_ok_and(|r| r.applied)
1681 {
1682 td.text = scaffold;
1683 self.client
1684 .log_message(
1685 MessageType::INFO,
1686 format!("didOpen: scaffolded empty file {}", uri_str),
1687 )
1688 .await;
1689 }
1690 }
1691 }
1692 }
1693 }
1694 }
1695
1696 self.on_change(td).await
1697 }
1698
1699 async fn did_change(&self, params: DidChangeTextDocumentParams) {
1700 self.client
1701 .log_message(MessageType::INFO, "file changed")
1702 .await;
1703
1704 if let Some(change) = params.content_changes.into_iter().next() {
1706 let has_substantive_content = change.text.chars().any(|ch| !ch.is_whitespace());
1707 let mut text_cache = self.text_cache.write().await;
1708 text_cache.insert(
1709 params.text_document.uri.to_string(),
1710 (params.text_document.version, change.text),
1711 );
1712 drop(text_cache);
1713
1714 if has_substantive_content {
1715 self.pending_create_scaffold
1716 .write()
1717 .await
1718 .remove(params.text_document.uri.as_str());
1719 }
1720 }
1721 }
1722
1723 async fn did_save(&self, params: DidSaveTextDocumentParams) {
1724 self.client
1725 .log_message(MessageType::INFO, "file saved")
1726 .await;
1727
1728 let mut text_content = if let Some(text) = params.text {
1729 text
1730 } else {
1731 let cached = {
1733 let text_cache = self.text_cache.read().await;
1734 text_cache
1735 .get(params.text_document.uri.as_str())
1736 .map(|(_, content)| content.clone())
1737 };
1738 if let Some(content) = cached {
1739 content
1740 } else {
1741 match std::fs::read_to_string(params.text_document.uri.path()) {
1742 Ok(content) => content,
1743 Err(e) => {
1744 self.client
1745 .log_message(
1746 MessageType::ERROR,
1747 format!("Failed to read file on save: {e}"),
1748 )
1749 .await;
1750 return;
1751 }
1752 }
1753 }
1754 };
1755
1756 let uri_str = params.text_document.uri.to_string();
1760 let template_on_create = self
1761 .settings
1762 .read()
1763 .await
1764 .file_operations
1765 .template_on_create;
1766 let needs_recover_scaffold = {
1767 let pending = self.pending_create_scaffold.read().await;
1768 template_on_create
1769 && pending.contains(&uri_str)
1770 && !text_content.chars().any(|ch| !ch.is_whitespace())
1771 };
1772 if needs_recover_scaffold {
1773 let solc_version = self.foundry_config.read().await.solc_version.clone();
1774 if let Some(scaffold) = file_operations::generate_scaffold(
1775 ¶ms.text_document.uri,
1776 solc_version.as_deref(),
1777 ) {
1778 let end = utils::byte_offset_to_position(&text_content, text_content.len());
1779 let edit = WorkspaceEdit {
1780 changes: Some(HashMap::from([(
1781 params.text_document.uri.clone(),
1782 vec![TextEdit {
1783 range: Range {
1784 start: Position::default(),
1785 end,
1786 },
1787 new_text: scaffold.clone(),
1788 }],
1789 )])),
1790 document_changes: None,
1791 change_annotations: None,
1792 };
1793 if self
1794 .client
1795 .apply_edit(edit)
1796 .await
1797 .as_ref()
1798 .is_ok_and(|r| r.applied)
1799 {
1800 text_content = scaffold.clone();
1801 let version = self
1802 .text_cache
1803 .read()
1804 .await
1805 .get(params.text_document.uri.as_str())
1806 .map(|(v, _)| *v)
1807 .unwrap_or_default();
1808 self.text_cache
1809 .write()
1810 .await
1811 .insert(uri_str.clone(), (version, scaffold));
1812 self.pending_create_scaffold.write().await.remove(&uri_str);
1813 self.client
1814 .log_message(
1815 MessageType::INFO,
1816 format!("didSave: recovered scaffold for {}", uri_str),
1817 )
1818 .await;
1819 }
1820 }
1821 }
1822
1823 let version = self
1824 .text_cache
1825 .read()
1826 .await
1827 .get(params.text_document.uri.as_str())
1828 .map(|(version, _)| *version)
1829 .unwrap_or_default();
1830
1831 let saved_uri = params.text_document.uri.clone();
1832 if let Ok(saved_file_path) = saved_uri.to_file_path() {
1833 let saved_abs = saved_file_path.to_string_lossy().to_string();
1834 self.project_cache_changed_files
1835 .write()
1836 .await
1837 .insert(saved_abs.clone());
1838 self.project_cache_upsert_files
1839 .write()
1840 .await
1841 .insert(saved_abs);
1842 }
1843 self.on_change(TextDocumentItem {
1844 uri: saved_uri.clone(),
1845 text: text_content,
1846 version,
1847 language_id: "".to_string(),
1848 })
1849 .await;
1850
1851 let settings_snapshot = self.settings.read().await.clone();
1855 if self.use_solc
1856 && settings_snapshot.project_index.full_project_scan
1857 && matches!(
1858 settings_snapshot.project_index.cache_mode,
1859 crate::config::ProjectIndexCacheMode::V2 | crate::config::ProjectIndexCacheMode::Auto
1860 )
1861 {
1862 if start_or_mark_project_cache_upsert_pending(
1863 &self.project_cache_upsert_pending,
1864 &self.project_cache_upsert_running,
1865 ) {
1866 let upsert_files = self.project_cache_upsert_files.clone();
1867 let ast_cache = self.ast_cache.clone();
1868 let client = self.client.clone();
1869 let running_flag = self.project_cache_upsert_running.clone();
1870 let pending_flag = self.project_cache_upsert_pending.clone();
1871
1872 tokio::spawn(async move {
1873 loop {
1874 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
1875
1876 if !take_project_cache_upsert_pending(&pending_flag) {
1877 if stop_project_cache_upsert_worker_or_reclaim(
1878 &pending_flag,
1879 &running_flag,
1880 ) {
1881 continue;
1882 }
1883 break;
1884 }
1885
1886 let changed_paths: Vec<String> = {
1887 let mut paths = upsert_files.write().await;
1888 paths.drain().collect()
1889 };
1890 if changed_paths.is_empty() {
1891 continue;
1892 }
1893
1894 let mut work_items: Vec<(crate::config::FoundryConfig, crate::goto::CachedBuild)> =
1895 Vec::new();
1896 {
1897 let cache = ast_cache.read().await;
1898 for abs_str in changed_paths {
1899 let path = PathBuf::from(&abs_str);
1900 let Ok(uri) = Url::from_file_path(&path) else {
1901 continue;
1902 };
1903 let uri_key = uri.to_string();
1904 let Some(build) = cache.get(&uri_key).cloned() else {
1905 continue;
1906 };
1907 if !build.nodes.contains_key(&abs_str) {
1909 continue;
1910 }
1911 let cfg = crate::config::load_foundry_config(&path);
1912 work_items.push((cfg, (*build).clone()));
1913 }
1914 }
1915
1916 if work_items.is_empty() {
1917 continue;
1918 }
1919
1920 let res = tokio::task::spawn_blocking(move || {
1921 let mut total_files = 0usize;
1922 let mut total_ms = 0u128;
1923 let mut failures: Vec<String> = Vec::new();
1924 for (cfg, build) in work_items {
1925 match crate::project_cache::upsert_reference_cache_v2_with_report(
1926 &cfg, &build,
1927 ) {
1928 Ok(report) => {
1929 total_files += report.file_count_hashed;
1930 total_ms += report.duration_ms;
1931 }
1932 Err(e) => failures.push(e),
1933 }
1934 }
1935 (total_files, total_ms, failures)
1936 })
1937 .await;
1938
1939 match res {
1940 Ok((total_files, total_ms, failures)) => {
1941 if !failures.is_empty() {
1942 client
1943 .log_message(
1944 MessageType::WARNING,
1945 format!(
1946 "project cache v2 upsert: {} failure(s), first={}",
1947 failures.len(),
1948 failures[0]
1949 ),
1950 )
1951 .await;
1952 } else {
1953 client
1954 .log_message(
1955 MessageType::INFO,
1956 format!(
1957 "project cache v2 upsert (debounced): touched_files={}, duration={}ms",
1958 total_files, total_ms
1959 ),
1960 )
1961 .await;
1962 }
1963 }
1964 Err(e) => {
1965 client
1966 .log_message(
1967 MessageType::WARNING,
1968 format!("project cache v2 upsert task failed: {e}"),
1969 )
1970 .await;
1971 }
1972 }
1973 }
1974 });
1975 }
1976 }
1977
1978 if self.use_solc
1981 && settings_snapshot.project_index.full_project_scan
1982 && self.project_cache_dirty.load(Ordering::Acquire)
1983 {
1984 if start_or_mark_project_cache_sync_pending(
1985 &self.project_cache_sync_pending,
1986 &self.project_cache_sync_running,
1987 ) {
1988 let foundry_config = self.foundry_config.read().await.clone();
1989 let root_key = self.root_uri.read().await.as_ref().map(|u| u.to_string());
1990 let ast_cache = self.ast_cache.clone();
1991 let text_cache = self.text_cache.clone();
1992 let client = self.client.clone();
1993 let dirty_flag = self.project_cache_dirty.clone();
1994 let running_flag = self.project_cache_sync_running.clone();
1995 let pending_flag = self.project_cache_sync_pending.clone();
1996 let changed_files = self.project_cache_changed_files.clone();
1997 let aggressive_scoped = settings_snapshot.project_index.incremental_edit_reindex;
1998 let aggressive_scoped_threshold = settings_snapshot
1999 .project_index
2000 .incremental_edit_reindex_threshold;
2001
2002 tokio::spawn(async move {
2003 loop {
2004 tokio::time::sleep(std::time::Duration::from_millis(700)).await;
2006
2007 if !take_project_cache_sync_pending(&pending_flag) {
2008 if stop_project_cache_sync_worker_or_reclaim(
2012 &pending_flag,
2013 &running_flag,
2014 ) {
2015 continue;
2016 }
2017 break;
2018 }
2019
2020 if !try_claim_project_cache_dirty(&dirty_flag) {
2021 continue;
2022 }
2023
2024 let Some(cache_key) = &root_key else {
2025 dirty_flag.store(true, Ordering::Release);
2026 continue;
2027 };
2028 if !foundry_config.root.is_dir() {
2029 dirty_flag.store(true, Ordering::Release);
2030 client
2031 .log_message(
2032 MessageType::WARNING,
2033 format!(
2034 "didSave cache sync: invalid project root {}, deferring",
2035 foundry_config.root.display()
2036 ),
2037 )
2038 .await;
2039 continue;
2040 }
2041
2042 let mut used_scoped_path = false;
2043 let mut scoped_ok = false;
2044
2045 if aggressive_scoped {
2046 let changed_abs: Vec<PathBuf> = {
2047 let mut changed = changed_files.write().await;
2048 let drained = changed
2049 .drain()
2050 .map(PathBuf::from)
2051 .collect::<Vec<PathBuf>>();
2052 drained
2053 };
2054 if !changed_abs.is_empty() {
2055 used_scoped_path = true;
2056 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
2057 let cfg_for_plan = foundry_config.clone();
2058 let changed_for_plan = changed_abs.clone();
2059 let remappings_for_plan = remappings.clone();
2060 let plan_res = tokio::task::spawn_blocking(move || {
2061 compute_reverse_import_closure(
2062 &cfg_for_plan,
2063 &changed_for_plan,
2064 &remappings_for_plan,
2065 )
2066 })
2067 .await;
2068
2069 let affected_files = match plan_res {
2070 Ok(set) => set.into_iter().collect::<Vec<PathBuf>>(),
2071 Err(_) => Vec::new(),
2072 };
2073 let total_sources =
2074 crate::solc::discover_source_files(&foundry_config).len();
2075
2076 let threshold = aggressive_scoped_threshold.clamp(0.0, 1.0);
2077 let ratio = if total_sources > 0 {
2078 affected_files.len() as f64 / total_sources as f64
2079 } else {
2080 1.0
2081 };
2082
2083 if !affected_files.is_empty()
2084 && affected_files.len() < total_sources
2085 && ratio <= threshold
2086 {
2087 client
2088 .log_message(
2089 MessageType::INFO,
2090 format!(
2091 "didSave cache sync: aggressive scoped reindex (affected={}/{}, ratio={:.3}, threshold={:.3})",
2092 affected_files.len(),
2093 total_sources,
2094 ratio,
2095 threshold
2096 ),
2097 )
2098 .await;
2099
2100 let text_cache_snapshot = text_cache.read().await.clone();
2101 match crate::solc::solc_project_index_scoped(
2102 &foundry_config,
2103 Some(&client),
2104 Some(&text_cache_snapshot),
2105 &affected_files,
2106 )
2107 .await
2108 {
2109 Ok(ast_data) => {
2110 let scoped_build =
2111 Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
2112 let source_count = scoped_build.nodes.len();
2113 enum ScopedApply {
2114 Merged { affected_count: usize },
2115 Stored,
2116 Failed(String),
2117 }
2118 let apply_outcome = {
2119 let mut cache = ast_cache.write().await;
2120 if let Some(existing) = cache.get(cache_key).cloned() {
2121 let mut merged = (*existing).clone();
2122 match merge_scoped_cached_build(
2123 &mut merged,
2124 (*scoped_build).clone(),
2125 ) {
2126 Ok(affected_count) => {
2127 cache.insert(
2128 cache_key.clone(),
2129 Arc::new(merged),
2130 );
2131 ScopedApply::Merged { affected_count }
2132 }
2133 Err(e) => ScopedApply::Failed(e),
2134 }
2135 } else {
2136 cache.insert(cache_key.clone(), scoped_build);
2137 ScopedApply::Stored
2138 }
2139 };
2140
2141 match apply_outcome {
2142 ScopedApply::Merged { affected_count } => {
2143 client
2144 .log_message(
2145 MessageType::INFO,
2146 format!(
2147 "didSave cache sync: scoped merge applied (scoped_sources={}, affected_paths={})",
2148 source_count, affected_count
2149 ),
2150 )
2151 .await;
2152 scoped_ok = true;
2153 }
2154 ScopedApply::Stored => {
2155 client
2156 .log_message(
2157 MessageType::INFO,
2158 format!(
2159 "didSave cache sync: scoped cache stored (scoped_sources={})",
2160 source_count
2161 ),
2162 )
2163 .await;
2164 scoped_ok = true;
2165 }
2166 ScopedApply::Failed(e) => {
2167 client
2168 .log_message(
2169 MessageType::WARNING,
2170 format!(
2171 "didSave cache sync: scoped merge rejected, falling back to full: {e}"
2172 ),
2173 )
2174 .await;
2175 }
2176 }
2177 }
2178 Err(e) => {
2179 client
2180 .log_message(
2181 MessageType::WARNING,
2182 format!(
2183 "didSave cache sync: scoped reindex failed, falling back to full: {e}"
2184 ),
2185 )
2186 .await;
2187 }
2188 }
2189 } else if !affected_files.is_empty() {
2190 client
2191 .log_message(
2192 MessageType::INFO,
2193 format!(
2194 "didSave cache sync: scoped reindex skipped by threshold/full-coverage (affected={}/{}, ratio={:.3}, threshold={:.3})",
2195 affected_files.len(),
2196 total_sources,
2197 ratio,
2198 threshold
2199 ),
2200 )
2201 .await;
2202 }
2203 }
2204 }
2205
2206 if scoped_ok {
2207 continue;
2208 }
2209
2210 if used_scoped_path {
2211 client
2214 .log_message(
2215 MessageType::INFO,
2216 "didSave cache sync: falling back to full project reindex",
2217 )
2218 .await;
2219 } else {
2220 client
2221 .log_message(
2222 MessageType::INFO,
2223 "didSave cache sync: rebuilding project index from disk",
2224 )
2225 .await;
2226 }
2227
2228 match crate::solc::solc_project_index(&foundry_config, Some(&client), None)
2229 .await
2230 {
2231 Ok(ast_data) => {
2232 let cached_build =
2233 Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
2234 let source_count = cached_build.nodes.len();
2235 let build_for_save = (*cached_build).clone();
2236 ast_cache.write().await.insert(cache_key.clone(), cached_build);
2237
2238 let cfg_for_save = foundry_config.clone();
2239 let save_res = tokio::task::spawn_blocking(move || {
2240 crate::project_cache::save_reference_cache_with_report(
2241 &cfg_for_save,
2242 &build_for_save,
2243 )
2244 })
2245 .await;
2246
2247 match save_res {
2248 Ok(Ok(report)) => {
2249 changed_files.write().await.clear();
2250 client
2251 .log_message(
2252 MessageType::INFO,
2253 format!(
2254 "didSave cache sync: persisted cache (sources={}, hashed_files={}, duration={}ms)",
2255 source_count, report.file_count_hashed, report.duration_ms
2256 ),
2257 )
2258 .await;
2259 }
2260 Ok(Err(e)) => {
2261 dirty_flag.store(true, Ordering::Release);
2262 client
2263 .log_message(
2264 MessageType::WARNING,
2265 format!(
2266 "didSave cache sync: persist failed, will retry: {e}"
2267 ),
2268 )
2269 .await;
2270 }
2271 Err(e) => {
2272 dirty_flag.store(true, Ordering::Release);
2273 client
2274 .log_message(
2275 MessageType::WARNING,
2276 format!(
2277 "didSave cache sync: save task failed, will retry: {e}"
2278 ),
2279 )
2280 .await;
2281 }
2282 }
2283 }
2284 Err(e) => {
2285 dirty_flag.store(true, Ordering::Release);
2286 client
2287 .log_message(
2288 MessageType::WARNING,
2289 format!(
2290 "didSave cache sync: re-index failed, will retry: {e}"
2291 ),
2292 )
2293 .await;
2294 }
2295 }
2296 }
2297 });
2298 }
2299 }
2300 }
2301
2302 async fn will_save(&self, params: WillSaveTextDocumentParams) {
2303 self.client
2304 .log_message(
2305 MessageType::INFO,
2306 format!(
2307 "file will save reason:{:?} {}",
2308 params.reason, params.text_document.uri
2309 ),
2310 )
2311 .await;
2312 }
2313
2314 async fn formatting(
2315 &self,
2316 params: DocumentFormattingParams,
2317 ) -> tower_lsp::jsonrpc::Result<Option<Vec<TextEdit>>> {
2318 self.client
2319 .log_message(MessageType::INFO, "formatting request")
2320 .await;
2321
2322 let uri = params.text_document.uri;
2323 let file_path = match uri.to_file_path() {
2324 Ok(path) => path,
2325 Err(_) => {
2326 self.client
2327 .log_message(MessageType::ERROR, "Invalid file URI for formatting")
2328 .await;
2329 return Ok(None);
2330 }
2331 };
2332 let path_str = match file_path.to_str() {
2333 Some(s) => s,
2334 None => {
2335 self.client
2336 .log_message(MessageType::ERROR, "Invalid file path for formatting")
2337 .await;
2338 return Ok(None);
2339 }
2340 };
2341
2342 let original_content = {
2344 let text_cache = self.text_cache.read().await;
2345 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
2346 content.clone()
2347 } else {
2348 match std::fs::read_to_string(&file_path) {
2350 Ok(content) => content,
2351 Err(_) => {
2352 self.client
2353 .log_message(MessageType::ERROR, "Failed to read file for formatting")
2354 .await;
2355 return Ok(None);
2356 }
2357 }
2358 }
2359 };
2360
2361 let formatted_content = match self.compiler.format(path_str).await {
2363 Ok(content) => content,
2364 Err(e) => {
2365 self.client
2366 .log_message(MessageType::WARNING, format!("Formatting failed: {e}"))
2367 .await;
2368 return Ok(None);
2369 }
2370 };
2371
2372 if original_content != formatted_content {
2374 let end = utils::byte_offset_to_position(&original_content, original_content.len());
2375
2376 {
2378 let mut text_cache = self.text_cache.write().await;
2379 let version = text_cache
2380 .get(&uri.to_string())
2381 .map(|(v, _)| *v)
2382 .unwrap_or(0);
2383 text_cache.insert(uri.to_string(), (version, formatted_content.clone()));
2384 }
2385
2386 let edit = TextEdit {
2387 range: Range {
2388 start: Position::default(),
2389 end,
2390 },
2391 new_text: formatted_content,
2392 };
2393 Ok(Some(vec![edit]))
2394 } else {
2395 Ok(None)
2396 }
2397 }
2398
2399 async fn did_close(&self, params: DidCloseTextDocumentParams) {
2400 let uri = params.text_document.uri.to_string();
2401 self.ast_cache.write().await.remove(&uri);
2402 self.text_cache.write().await.remove(&uri);
2403 self.completion_cache.write().await.remove(&uri);
2404 self.client
2405 .log_message(MessageType::INFO, "file closed, caches cleared.")
2406 .await;
2407 }
2408
2409 async fn did_change_configuration(&self, params: DidChangeConfigurationParams) {
2410 let s = config::parse_settings(¶ms.settings);
2411 self.client
2412 .log_message(
2413 MessageType::INFO,
2414 format!(
2415 "settings updated: inlayHints.parameters={}, inlayHints.gasEstimates={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}, projectIndex.incrementalEditReindexThreshold={}",
2416 s.inlay_hints.parameters, s.inlay_hints.gas_estimates, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex, s.project_index.incremental_edit_reindex_threshold,
2417 ),
2418 )
2419 .await;
2420 let mut settings = self.settings.write().await;
2421 *settings = s;
2422
2423 let client = self.client.clone();
2425 tokio::spawn(async move {
2426 let _ = client.inlay_hint_refresh().await;
2427 });
2428 }
2429 async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) {
2430 self.client
2431 .log_message(MessageType::INFO, "workdspace folders changed.")
2432 .await;
2433 }
2434
2435 async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
2436 self.client
2437 .log_message(MessageType::INFO, "watched files have changed.")
2438 .await;
2439
2440 for change in ¶ms.changes {
2442 let path = match change.uri.to_file_path() {
2443 Ok(p) => p,
2444 Err(_) => continue,
2445 };
2446
2447 let filename = path.file_name().and_then(|n| n.to_str());
2448
2449 if filename == Some("foundry.toml") {
2450 let lint_cfg = config::load_lint_config_from_toml(&path);
2451 self.client
2452 .log_message(
2453 MessageType::INFO,
2454 format!(
2455 "reloaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
2456 lint_cfg.lint_on_build,
2457 lint_cfg.ignore_patterns.len()
2458 ),
2459 )
2460 .await;
2461 let mut lc = self.lint_config.write().await;
2462 *lc = lint_cfg;
2463
2464 let foundry_cfg = config::load_foundry_config_from_toml(&path);
2465 self.client
2466 .log_message(
2467 MessageType::INFO,
2468 format!(
2469 "reloaded foundry.toml project config: solc_version={:?}, remappings={}",
2470 foundry_cfg.solc_version,
2471 foundry_cfg.remappings.len()
2472 ),
2473 )
2474 .await;
2475 if foundry_cfg.via_ir {
2476 self.client
2477 .log_message(
2478 MessageType::WARNING,
2479 "via_ir is enabled in foundry.toml — gas estimate inlay hints are disabled to avoid slow compilation",
2480 )
2481 .await;
2482 }
2483 let mut fc = self.foundry_config.write().await;
2484 *fc = foundry_cfg;
2485 break;
2486 }
2487
2488 if filename == Some("remappings.txt") {
2489 self.client
2490 .log_message(
2491 MessageType::INFO,
2492 "remappings.txt changed, config may need refresh",
2493 )
2494 .await;
2495 }
2498 }
2499 }
2500
2501 async fn completion(
2502 &self,
2503 params: CompletionParams,
2504 ) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> {
2505 let uri = params.text_document_position.text_document.uri;
2506 let position = params.text_document_position.position;
2507
2508 let trigger_char = params
2509 .context
2510 .as_ref()
2511 .and_then(|ctx| ctx.trigger_character.as_deref());
2512
2513 let source_text = {
2515 let text_cache = self.text_cache.read().await;
2516 if let Some((_, text)) = text_cache.get(&uri.to_string()) {
2517 text.clone()
2518 } else {
2519 match uri.to_file_path() {
2520 Ok(path) => std::fs::read_to_string(&path).unwrap_or_default(),
2521 Err(_) => return Ok(None),
2522 }
2523 }
2524 };
2525
2526 let local_cached: Option<Arc<completion::CompletionCache>> = {
2528 let comp_cache = self.completion_cache.read().await;
2529 comp_cache.get(&uri.to_string()).cloned()
2530 };
2531
2532 let root_cached: Option<Arc<completion::CompletionCache>> = {
2534 let root_key = self.root_uri.read().await.as_ref().map(|u| u.to_string());
2535 match root_key {
2536 Some(root_key) => {
2537 let ast_cache = self.ast_cache.read().await;
2538 ast_cache
2539 .get(&root_key)
2540 .map(|root_build| root_build.completion_cache.clone())
2541 }
2542 None => None,
2543 }
2544 };
2545
2546 let cached = local_cached.or(root_cached.clone());
2548
2549 if cached.is_none() {
2550 let ast_cache = self.ast_cache.clone();
2552 let completion_cache = self.completion_cache.clone();
2553 let uri_string = uri.to_string();
2554 tokio::spawn(async move {
2555 let cached_build = {
2556 let cache = ast_cache.read().await;
2557 match cache.get(&uri_string) {
2558 Some(v) => v.clone(),
2559 None => return,
2560 }
2561 };
2562 completion_cache
2563 .write()
2564 .await
2565 .insert(uri_string, cached_build.completion_cache.clone());
2566 });
2567 }
2568
2569 let cache_ref = cached.as_deref();
2570
2571 let file_id = {
2573 let uri_path = uri.to_file_path().ok();
2574 cache_ref.and_then(|c| {
2575 uri_path.as_ref().and_then(|p| {
2576 let path_str = p.to_str()?;
2577 c.path_to_file_id.get(path_str).copied()
2578 })
2579 })
2580 };
2581
2582 let current_file_path = uri
2583 .to_file_path()
2584 .ok()
2585 .and_then(|p| p.to_str().map(|s| s.to_string()));
2586
2587 let tail_candidates = if trigger_char == Some(".") {
2588 vec![]
2589 } else {
2590 root_cached.as_deref().map_or_else(Vec::new, |c| {
2591 completion::top_level_importable_completion_candidates(
2592 c,
2593 current_file_path.as_deref(),
2594 &source_text,
2595 )
2596 })
2597 };
2598
2599 let result = completion::handle_completion_with_tail_candidates(
2600 cache_ref,
2601 &source_text,
2602 position,
2603 trigger_char,
2604 file_id,
2605 tail_candidates,
2606 );
2607 Ok(result)
2608 }
2609
2610 async fn goto_definition(
2611 &self,
2612 params: GotoDefinitionParams,
2613 ) -> tower_lsp::jsonrpc::Result<Option<GotoDefinitionResponse>> {
2614 self.client
2615 .log_message(MessageType::INFO, "got textDocument/definition request")
2616 .await;
2617
2618 let uri = params.text_document_position_params.text_document.uri;
2619 let position = params.text_document_position_params.position;
2620
2621 let file_path = match uri.to_file_path() {
2622 Ok(path) => path,
2623 Err(_) => {
2624 self.client
2625 .log_message(MessageType::ERROR, "Invalid file uri")
2626 .await;
2627 return Ok(None);
2628 }
2629 };
2630
2631 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
2632 Some(bytes) => bytes,
2633 None => return Ok(None),
2634 };
2635
2636 let source_text = String::from_utf8_lossy(&source_bytes).to_string();
2637
2638 let cursor_name = goto::cursor_context(&source_text, position).map(|ctx| ctx.name);
2640
2641 let (is_dirty, cached_build) = {
2645 let text_version = self
2646 .text_cache
2647 .read()
2648 .await
2649 .get(&uri.to_string())
2650 .map(|(v, _)| *v)
2651 .unwrap_or(0);
2652 let cb = self.get_or_fetch_build(&uri, &file_path, false).await;
2653 let build_version = cb.as_ref().map(|b| b.build_version).unwrap_or(0);
2654 (text_version > build_version, cb)
2655 };
2656
2657 let validate_ts = |loc: &Location| -> bool {
2663 let Some(ref name) = cursor_name else {
2664 return true; };
2666 let target_src = if loc.uri == uri {
2667 Some(source_text.clone())
2668 } else {
2669 loc.uri
2670 .to_file_path()
2671 .ok()
2672 .and_then(|p| std::fs::read_to_string(&p).ok())
2673 };
2674 match target_src {
2675 Some(src) => goto::validate_goto_target(&src, loc, name),
2676 None => true, }
2678 };
2679
2680 if is_dirty {
2681 self.client
2682 .log_message(MessageType::INFO, "file is dirty, trying tree-sitter first")
2683 .await;
2684
2685 let ts_result = {
2687 let comp_cache = self.completion_cache.read().await;
2688 let text_cache = self.text_cache.read().await;
2689 if let Some(cc) = comp_cache.get(&uri.to_string()) {
2690 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
2691 } else {
2692 None
2693 }
2694 };
2695
2696 if let Some(location) = ts_result {
2697 if validate_ts(&location) {
2698 self.client
2699 .log_message(
2700 MessageType::INFO,
2701 format!(
2702 "found definition (tree-sitter) at {}:{}",
2703 location.uri, location.range.start.line
2704 ),
2705 )
2706 .await;
2707 return Ok(Some(GotoDefinitionResponse::from(location)));
2708 }
2709 self.client
2710 .log_message(
2711 MessageType::INFO,
2712 "tree-sitter result failed validation, trying AST fallback",
2713 )
2714 .await;
2715 }
2716
2717 if let Some(ref cb) = cached_build
2722 && let Some(ref name) = cursor_name
2723 {
2724 let byte_hint = goto::pos_to_bytes(&source_bytes, position);
2725 if let Some(location) = goto::goto_declaration_by_name(cb, &uri, name, byte_hint) {
2726 self.client
2727 .log_message(
2728 MessageType::INFO,
2729 format!(
2730 "found definition (AST by name) at {}:{}",
2731 location.uri, location.range.start.line
2732 ),
2733 )
2734 .await;
2735 return Ok(Some(GotoDefinitionResponse::from(location)));
2736 }
2737 }
2738 } else {
2739 if let Some(ref cb) = cached_build
2741 && let Some(location) =
2742 goto::goto_declaration_cached(cb, &uri, position, &source_bytes)
2743 {
2744 self.client
2745 .log_message(
2746 MessageType::INFO,
2747 format!(
2748 "found definition (AST) at {}:{}",
2749 location.uri, location.range.start.line
2750 ),
2751 )
2752 .await;
2753 return Ok(Some(GotoDefinitionResponse::from(location)));
2754 }
2755
2756 let ts_result = {
2758 let comp_cache = self.completion_cache.read().await;
2759 let text_cache = self.text_cache.read().await;
2760 if let Some(cc) = comp_cache.get(&uri.to_string()) {
2761 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
2762 } else {
2763 None
2764 }
2765 };
2766
2767 if let Some(location) = ts_result {
2768 if validate_ts(&location) {
2769 self.client
2770 .log_message(
2771 MessageType::INFO,
2772 format!(
2773 "found definition (tree-sitter fallback) at {}:{}",
2774 location.uri, location.range.start.line
2775 ),
2776 )
2777 .await;
2778 return Ok(Some(GotoDefinitionResponse::from(location)));
2779 }
2780 self.client
2781 .log_message(MessageType::INFO, "tree-sitter fallback failed validation")
2782 .await;
2783 }
2784 }
2785
2786 self.client
2787 .log_message(MessageType::INFO, "no definition found")
2788 .await;
2789 Ok(None)
2790 }
2791
2792 async fn goto_declaration(
2793 &self,
2794 params: request::GotoDeclarationParams,
2795 ) -> tower_lsp::jsonrpc::Result<Option<request::GotoDeclarationResponse>> {
2796 self.client
2797 .log_message(MessageType::INFO, "got textDocument/declaration request")
2798 .await;
2799
2800 let uri = params.text_document_position_params.text_document.uri;
2801 let position = params.text_document_position_params.position;
2802
2803 let file_path = match uri.to_file_path() {
2804 Ok(path) => path,
2805 Err(_) => {
2806 self.client
2807 .log_message(MessageType::ERROR, "invalid file uri")
2808 .await;
2809 return Ok(None);
2810 }
2811 };
2812
2813 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
2814 Some(bytes) => bytes,
2815 None => return Ok(None),
2816 };
2817
2818 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
2819 let cached_build = match cached_build {
2820 Some(cb) => cb,
2821 None => return Ok(None),
2822 };
2823
2824 if let Some(location) =
2825 goto::goto_declaration_cached(&cached_build, &uri, position, &source_bytes)
2826 {
2827 self.client
2828 .log_message(
2829 MessageType::INFO,
2830 format!(
2831 "found declaration at {}:{}",
2832 location.uri, location.range.start.line
2833 ),
2834 )
2835 .await;
2836 Ok(Some(request::GotoDeclarationResponse::from(location)))
2837 } else {
2838 self.client
2839 .log_message(MessageType::INFO, "no declaration found")
2840 .await;
2841 Ok(None)
2842 }
2843 }
2844
2845 async fn references(
2846 &self,
2847 params: ReferenceParams,
2848 ) -> tower_lsp::jsonrpc::Result<Option<Vec<Location>>> {
2849 self.client
2850 .log_message(MessageType::INFO, "Got a textDocument/references request")
2851 .await;
2852
2853 let uri = params.text_document_position.text_document.uri;
2854 let position = params.text_document_position.position;
2855 let file_path = match uri.to_file_path() {
2856 Ok(path) => path,
2857 Err(_) => {
2858 self.client
2859 .log_message(MessageType::ERROR, "Invalid file URI")
2860 .await;
2861 return Ok(None);
2862 }
2863 };
2864 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
2865 Some(bytes) => bytes,
2866 None => return Ok(None),
2867 };
2868 let cached_build = self.get_or_fetch_build(&uri, &file_path, true).await;
2869 let cached_build = match cached_build {
2870 Some(cb) => cb,
2871 None => return Ok(None),
2872 };
2873
2874 let mut locations = references::goto_references_cached(
2876 &cached_build,
2877 &uri,
2878 position,
2879 &source_bytes,
2880 None,
2881 params.context.include_declaration,
2882 );
2883
2884 if let Some((def_abs_path, def_byte_offset)) =
2886 references::resolve_target_location(&cached_build, &uri, position, &source_bytes)
2887 {
2888 let cache = self.ast_cache.read().await;
2889 for (cached_uri, other_build) in cache.iter() {
2890 if *cached_uri == uri.to_string() {
2891 continue;
2892 }
2893 let other_locations = references::goto_references_for_target(
2894 other_build,
2895 &def_abs_path,
2896 def_byte_offset,
2897 None,
2898 params.context.include_declaration,
2899 );
2900 locations.extend(other_locations);
2901 }
2902 }
2903
2904 let mut seen = std::collections::HashSet::new();
2906 locations.retain(|loc| {
2907 seen.insert((
2908 loc.uri.clone(),
2909 loc.range.start.line,
2910 loc.range.start.character,
2911 loc.range.end.line,
2912 loc.range.end.character,
2913 ))
2914 });
2915
2916 if locations.is_empty() {
2917 self.client
2918 .log_message(MessageType::INFO, "No references found")
2919 .await;
2920 Ok(None)
2921 } else {
2922 self.client
2923 .log_message(
2924 MessageType::INFO,
2925 format!("Found {} references", locations.len()),
2926 )
2927 .await;
2928 Ok(Some(locations))
2929 }
2930 }
2931
2932 async fn prepare_rename(
2933 &self,
2934 params: TextDocumentPositionParams,
2935 ) -> tower_lsp::jsonrpc::Result<Option<PrepareRenameResponse>> {
2936 self.client
2937 .log_message(MessageType::INFO, "got textDocument/prepareRename request")
2938 .await;
2939
2940 let uri = params.text_document.uri;
2941 let position = params.position;
2942
2943 let file_path = match uri.to_file_path() {
2944 Ok(path) => path,
2945 Err(_) => {
2946 self.client
2947 .log_message(MessageType::ERROR, "invalid file uri")
2948 .await;
2949 return Ok(None);
2950 }
2951 };
2952
2953 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
2954 Some(bytes) => bytes,
2955 None => return Ok(None),
2956 };
2957
2958 if let Some(range) = rename::get_identifier_range(&source_bytes, position) {
2959 self.client
2960 .log_message(
2961 MessageType::INFO,
2962 format!(
2963 "prepare rename range: {}:{}",
2964 range.start.line, range.start.character
2965 ),
2966 )
2967 .await;
2968 Ok(Some(PrepareRenameResponse::Range(range)))
2969 } else {
2970 self.client
2971 .log_message(MessageType::INFO, "no identifier found for prepare rename")
2972 .await;
2973 Ok(None)
2974 }
2975 }
2976
2977 async fn rename(
2978 &self,
2979 params: RenameParams,
2980 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
2981 self.client
2982 .log_message(MessageType::INFO, "got textDocument/rename request")
2983 .await;
2984
2985 let uri = params.text_document_position.text_document.uri;
2986 let position = params.text_document_position.position;
2987 let new_name = params.new_name;
2988 let file_path = match uri.to_file_path() {
2989 Ok(p) => p,
2990 Err(_) => {
2991 self.client
2992 .log_message(MessageType::ERROR, "invalid file uri")
2993 .await;
2994 return Ok(None);
2995 }
2996 };
2997 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
2998 Some(bytes) => bytes,
2999 None => return Ok(None),
3000 };
3001
3002 let current_identifier = match rename::get_identifier_at_position(&source_bytes, position) {
3003 Some(id) => id,
3004 None => {
3005 self.client
3006 .log_message(MessageType::ERROR, "No identifier found at position")
3007 .await;
3008 return Ok(None);
3009 }
3010 };
3011
3012 if !utils::is_valid_solidity_identifier(&new_name) {
3013 return Err(tower_lsp::jsonrpc::Error::invalid_params(
3014 "new name is not a valid solidity identifier",
3015 ));
3016 }
3017
3018 if new_name == current_identifier {
3019 self.client
3020 .log_message(
3021 MessageType::INFO,
3022 "new name is the same as current identifier",
3023 )
3024 .await;
3025 return Ok(None);
3026 }
3027
3028 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3029 let cached_build = match cached_build {
3030 Some(cb) => cb,
3031 None => return Ok(None),
3032 };
3033 let other_builds: Vec<Arc<goto::CachedBuild>> = {
3034 let cache = self.ast_cache.read().await;
3035 cache
3036 .iter()
3037 .filter(|(key, _)| **key != uri.to_string())
3038 .map(|(_, v)| v.clone())
3039 .collect()
3040 };
3041 let other_refs: Vec<&goto::CachedBuild> = other_builds.iter().map(|v| v.as_ref()).collect();
3042
3043 let text_buffers: HashMap<String, Vec<u8>> = {
3047 let text_cache = self.text_cache.read().await;
3048 text_cache
3049 .iter()
3050 .map(|(uri, (_, content))| (uri.clone(), content.as_bytes().to_vec()))
3051 .collect()
3052 };
3053
3054 match rename::rename_symbol(
3055 &cached_build,
3056 &uri,
3057 position,
3058 &source_bytes,
3059 new_name,
3060 &other_refs,
3061 &text_buffers,
3062 ) {
3063 Some(workspace_edit) => {
3064 self.client
3065 .log_message(
3066 MessageType::INFO,
3067 format!(
3068 "created rename edit with {} file(s), {} total change(s)",
3069 workspace_edit
3070 .changes
3071 .as_ref()
3072 .map(|c| c.len())
3073 .unwrap_or(0),
3074 workspace_edit
3075 .changes
3076 .as_ref()
3077 .map(|c| c.values().map(|v| v.len()).sum::<usize>())
3078 .unwrap_or(0)
3079 ),
3080 )
3081 .await;
3082
3083 Ok(Some(workspace_edit))
3088 }
3089
3090 None => {
3091 self.client
3092 .log_message(MessageType::INFO, "No locations found for renaming")
3093 .await;
3094 Ok(None)
3095 }
3096 }
3097 }
3098
3099 async fn symbol(
3100 &self,
3101 params: WorkspaceSymbolParams,
3102 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SymbolInformation>>> {
3103 self.client
3104 .log_message(MessageType::INFO, "got workspace/symbol request")
3105 .await;
3106
3107 let files: Vec<(Url, String)> = {
3109 let cache = self.text_cache.read().await;
3110 cache
3111 .iter()
3112 .filter(|(uri_str, _)| uri_str.ends_with(".sol"))
3113 .filter_map(|(uri_str, (_, content))| {
3114 Url::parse(uri_str).ok().map(|uri| (uri, content.clone()))
3115 })
3116 .collect()
3117 };
3118
3119 let mut all_symbols = symbols::extract_workspace_symbols(&files);
3120 if !params.query.is_empty() {
3121 let query = params.query.to_lowercase();
3122 all_symbols.retain(|symbol| symbol.name.to_lowercase().contains(&query));
3123 }
3124 if all_symbols.is_empty() {
3125 self.client
3126 .log_message(MessageType::INFO, "No symbols found")
3127 .await;
3128 Ok(None)
3129 } else {
3130 self.client
3131 .log_message(
3132 MessageType::INFO,
3133 format!("found {} symbols", all_symbols.len()),
3134 )
3135 .await;
3136 Ok(Some(all_symbols))
3137 }
3138 }
3139
3140 async fn document_symbol(
3141 &self,
3142 params: DocumentSymbolParams,
3143 ) -> tower_lsp::jsonrpc::Result<Option<DocumentSymbolResponse>> {
3144 self.client
3145 .log_message(MessageType::INFO, "got textDocument/documentSymbol request")
3146 .await;
3147 let uri = params.text_document.uri;
3148 let file_path = match uri.to_file_path() {
3149 Ok(path) => path,
3150 Err(_) => {
3151 self.client
3152 .log_message(MessageType::ERROR, "invalid file uri")
3153 .await;
3154 return Ok(None);
3155 }
3156 };
3157
3158 let source = {
3160 let cache = self.text_cache.read().await;
3161 cache
3162 .get(&uri.to_string())
3163 .map(|(_, content)| content.clone())
3164 };
3165 let source = match source {
3166 Some(s) => s,
3167 None => match std::fs::read_to_string(&file_path) {
3168 Ok(s) => s,
3169 Err(_) => return Ok(None),
3170 },
3171 };
3172
3173 let symbols = symbols::extract_document_symbols(&source);
3174 if symbols.is_empty() {
3175 self.client
3176 .log_message(MessageType::INFO, "no document symbols found")
3177 .await;
3178 Ok(None)
3179 } else {
3180 self.client
3181 .log_message(
3182 MessageType::INFO,
3183 format!("found {} document symbols", symbols.len()),
3184 )
3185 .await;
3186 Ok(Some(DocumentSymbolResponse::Nested(symbols)))
3187 }
3188 }
3189
3190 async fn document_highlight(
3191 &self,
3192 params: DocumentHighlightParams,
3193 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentHighlight>>> {
3194 self.client
3195 .log_message(
3196 MessageType::INFO,
3197 "got textDocument/documentHighlight request",
3198 )
3199 .await;
3200
3201 let uri = params.text_document_position_params.text_document.uri;
3202 let position = params.text_document_position_params.position;
3203
3204 let source = {
3205 let cache = self.text_cache.read().await;
3206 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3207 };
3208
3209 let source = match source {
3210 Some(s) => s,
3211 None => {
3212 let file_path = match uri.to_file_path() {
3213 Ok(p) => p,
3214 Err(_) => return Ok(None),
3215 };
3216 match std::fs::read_to_string(&file_path) {
3217 Ok(s) => s,
3218 Err(_) => return Ok(None),
3219 }
3220 }
3221 };
3222
3223 let highlights = highlight::document_highlights(&source, position);
3224
3225 if highlights.is_empty() {
3226 self.client
3227 .log_message(MessageType::INFO, "no document highlights found")
3228 .await;
3229 Ok(None)
3230 } else {
3231 self.client
3232 .log_message(
3233 MessageType::INFO,
3234 format!("found {} document highlights", highlights.len()),
3235 )
3236 .await;
3237 Ok(Some(highlights))
3238 }
3239 }
3240
3241 async fn hover(&self, params: HoverParams) -> tower_lsp::jsonrpc::Result<Option<Hover>> {
3242 self.client
3243 .log_message(MessageType::INFO, "got textDocument/hover request")
3244 .await;
3245
3246 let uri = params.text_document_position_params.text_document.uri;
3247 let position = params.text_document_position_params.position;
3248
3249 let file_path = match uri.to_file_path() {
3250 Ok(path) => path,
3251 Err(_) => {
3252 self.client
3253 .log_message(MessageType::ERROR, "invalid file uri")
3254 .await;
3255 return Ok(None);
3256 }
3257 };
3258
3259 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3260 Some(bytes) => bytes,
3261 None => return Ok(None),
3262 };
3263
3264 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3265 let cached_build = match cached_build {
3266 Some(cb) => cb,
3267 None => return Ok(None),
3268 };
3269
3270 let result = hover::hover_info(&cached_build, &uri, position, &source_bytes);
3271
3272 if result.is_some() {
3273 self.client
3274 .log_message(MessageType::INFO, "hover info found")
3275 .await;
3276 } else {
3277 self.client
3278 .log_message(MessageType::INFO, "no hover info found")
3279 .await;
3280 }
3281
3282 Ok(result)
3283 }
3284
3285 async fn signature_help(
3286 &self,
3287 params: SignatureHelpParams,
3288 ) -> tower_lsp::jsonrpc::Result<Option<SignatureHelp>> {
3289 self.client
3290 .log_message(MessageType::INFO, "got textDocument/signatureHelp request")
3291 .await;
3292
3293 let uri = params.text_document_position_params.text_document.uri;
3294 let position = params.text_document_position_params.position;
3295
3296 let file_path = match uri.to_file_path() {
3297 Ok(path) => path,
3298 Err(_) => {
3299 self.client
3300 .log_message(MessageType::ERROR, "invalid file uri")
3301 .await;
3302 return Ok(None);
3303 }
3304 };
3305
3306 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3307 Some(bytes) => bytes,
3308 None => return Ok(None),
3309 };
3310
3311 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3312 let cached_build = match cached_build {
3313 Some(cb) => cb,
3314 None => return Ok(None),
3315 };
3316
3317 let result = hover::signature_help(&cached_build, &source_bytes, position);
3318
3319 Ok(result)
3320 }
3321
3322 async fn document_link(
3323 &self,
3324 params: DocumentLinkParams,
3325 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentLink>>> {
3326 self.client
3327 .log_message(MessageType::INFO, "got textDocument/documentLink request")
3328 .await;
3329
3330 let uri = params.text_document.uri;
3331 let file_path = match uri.to_file_path() {
3332 Ok(path) => path,
3333 Err(_) => {
3334 self.client
3335 .log_message(MessageType::ERROR, "invalid file uri")
3336 .await;
3337 return Ok(None);
3338 }
3339 };
3340
3341 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3342 Some(bytes) => bytes,
3343 None => return Ok(None),
3344 };
3345
3346 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3347 let cached_build = match cached_build {
3348 Some(cb) => cb,
3349 None => return Ok(None),
3350 };
3351
3352 let result = links::document_links(&cached_build, &uri, &source_bytes);
3353
3354 if result.is_empty() {
3355 self.client
3356 .log_message(MessageType::INFO, "no document links found")
3357 .await;
3358 Ok(None)
3359 } else {
3360 self.client
3361 .log_message(
3362 MessageType::INFO,
3363 format!("found {} document links", result.len()),
3364 )
3365 .await;
3366 Ok(Some(result))
3367 }
3368 }
3369
3370 async fn semantic_tokens_full(
3371 &self,
3372 params: SemanticTokensParams,
3373 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensResult>> {
3374 self.client
3375 .log_message(
3376 MessageType::INFO,
3377 "got textDocument/semanticTokens/full request",
3378 )
3379 .await;
3380
3381 let uri = params.text_document.uri;
3382 let source = {
3383 let cache = self.text_cache.read().await;
3384 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3385 };
3386
3387 let source = match source {
3388 Some(s) => s,
3389 None => {
3390 let file_path = match uri.to_file_path() {
3392 Ok(p) => p,
3393 Err(_) => return Ok(None),
3394 };
3395 match std::fs::read_to_string(&file_path) {
3396 Ok(s) => s,
3397 Err(_) => return Ok(None),
3398 }
3399 }
3400 };
3401
3402 let mut tokens = semantic_tokens::semantic_tokens_full(&source);
3403
3404 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
3406 let result_id = id.to_string();
3407 tokens.result_id = Some(result_id.clone());
3408
3409 {
3410 let mut cache = self.semantic_token_cache.write().await;
3411 cache.insert(uri.to_string(), (result_id, tokens.data.clone()));
3412 }
3413
3414 Ok(Some(SemanticTokensResult::Tokens(tokens)))
3415 }
3416
3417 async fn semantic_tokens_range(
3418 &self,
3419 params: SemanticTokensRangeParams,
3420 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensRangeResult>> {
3421 self.client
3422 .log_message(
3423 MessageType::INFO,
3424 "got textDocument/semanticTokens/range request",
3425 )
3426 .await;
3427
3428 let uri = params.text_document.uri;
3429 let range = params.range;
3430 let source = {
3431 let cache = self.text_cache.read().await;
3432 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3433 };
3434
3435 let source = match source {
3436 Some(s) => s,
3437 None => {
3438 let file_path = match uri.to_file_path() {
3439 Ok(p) => p,
3440 Err(_) => return Ok(None),
3441 };
3442 match std::fs::read_to_string(&file_path) {
3443 Ok(s) => s,
3444 Err(_) => return Ok(None),
3445 }
3446 }
3447 };
3448
3449 let tokens =
3450 semantic_tokens::semantic_tokens_range(&source, range.start.line, range.end.line);
3451
3452 Ok(Some(SemanticTokensRangeResult::Tokens(tokens)))
3453 }
3454
3455 async fn semantic_tokens_full_delta(
3456 &self,
3457 params: SemanticTokensDeltaParams,
3458 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensFullDeltaResult>> {
3459 self.client
3460 .log_message(
3461 MessageType::INFO,
3462 "got textDocument/semanticTokens/full/delta request",
3463 )
3464 .await;
3465
3466 let uri = params.text_document.uri;
3467 let previous_result_id = params.previous_result_id;
3468
3469 let source = {
3470 let cache = self.text_cache.read().await;
3471 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3472 };
3473
3474 let source = match source {
3475 Some(s) => s,
3476 None => {
3477 let file_path = match uri.to_file_path() {
3478 Ok(p) => p,
3479 Err(_) => return Ok(None),
3480 };
3481 match std::fs::read_to_string(&file_path) {
3482 Ok(s) => s,
3483 Err(_) => return Ok(None),
3484 }
3485 }
3486 };
3487
3488 let mut new_tokens = semantic_tokens::semantic_tokens_full(&source);
3489
3490 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
3492 let new_result_id = id.to_string();
3493 new_tokens.result_id = Some(new_result_id.clone());
3494
3495 let uri_str = uri.to_string();
3496
3497 let old_tokens = {
3499 let cache = self.semantic_token_cache.read().await;
3500 cache
3501 .get(&uri_str)
3502 .filter(|(rid, _)| *rid == previous_result_id)
3503 .map(|(_, tokens)| tokens.clone())
3504 };
3505
3506 {
3508 let mut cache = self.semantic_token_cache.write().await;
3509 cache.insert(uri_str, (new_result_id.clone(), new_tokens.data.clone()));
3510 }
3511
3512 match old_tokens {
3513 Some(old) => {
3514 let edits = semantic_tokens::compute_delta(&old, &new_tokens.data);
3516 Ok(Some(SemanticTokensFullDeltaResult::TokensDelta(
3517 SemanticTokensDelta {
3518 result_id: Some(new_result_id),
3519 edits,
3520 },
3521 )))
3522 }
3523 None => {
3524 Ok(Some(SemanticTokensFullDeltaResult::Tokens(new_tokens)))
3526 }
3527 }
3528 }
3529
3530 async fn folding_range(
3531 &self,
3532 params: FoldingRangeParams,
3533 ) -> tower_lsp::jsonrpc::Result<Option<Vec<FoldingRange>>> {
3534 self.client
3535 .log_message(MessageType::INFO, "got textDocument/foldingRange request")
3536 .await;
3537
3538 let uri = params.text_document.uri;
3539
3540 let source = {
3541 let cache = self.text_cache.read().await;
3542 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3543 };
3544
3545 let source = match source {
3546 Some(s) => s,
3547 None => {
3548 let file_path = match uri.to_file_path() {
3549 Ok(p) => p,
3550 Err(_) => return Ok(None),
3551 };
3552 match std::fs::read_to_string(&file_path) {
3553 Ok(s) => s,
3554 Err(_) => return Ok(None),
3555 }
3556 }
3557 };
3558
3559 let ranges = folding::folding_ranges(&source);
3560
3561 if ranges.is_empty() {
3562 self.client
3563 .log_message(MessageType::INFO, "no folding ranges found")
3564 .await;
3565 Ok(None)
3566 } else {
3567 self.client
3568 .log_message(
3569 MessageType::INFO,
3570 format!("found {} folding ranges", ranges.len()),
3571 )
3572 .await;
3573 Ok(Some(ranges))
3574 }
3575 }
3576
3577 async fn selection_range(
3578 &self,
3579 params: SelectionRangeParams,
3580 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SelectionRange>>> {
3581 self.client
3582 .log_message(MessageType::INFO, "got textDocument/selectionRange request")
3583 .await;
3584
3585 let uri = params.text_document.uri;
3586
3587 let source = {
3588 let cache = self.text_cache.read().await;
3589 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3590 };
3591
3592 let source = match source {
3593 Some(s) => s,
3594 None => {
3595 let file_path = match uri.to_file_path() {
3596 Ok(p) => p,
3597 Err(_) => return Ok(None),
3598 };
3599 match std::fs::read_to_string(&file_path) {
3600 Ok(s) => s,
3601 Err(_) => return Ok(None),
3602 }
3603 }
3604 };
3605
3606 let ranges = selection::selection_ranges(&source, ¶ms.positions);
3607
3608 if ranges.is_empty() {
3609 self.client
3610 .log_message(MessageType::INFO, "no selection ranges found")
3611 .await;
3612 Ok(None)
3613 } else {
3614 self.client
3615 .log_message(
3616 MessageType::INFO,
3617 format!("found {} selection ranges", ranges.len()),
3618 )
3619 .await;
3620 Ok(Some(ranges))
3621 }
3622 }
3623
3624 async fn inlay_hint(
3625 &self,
3626 params: InlayHintParams,
3627 ) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>> {
3628 self.client
3629 .log_message(MessageType::INFO, "got textDocument/inlayHint request")
3630 .await;
3631
3632 let uri = params.text_document.uri;
3633 let range = params.range;
3634
3635 let file_path = match uri.to_file_path() {
3636 Ok(path) => path,
3637 Err(_) => {
3638 self.client
3639 .log_message(MessageType::ERROR, "invalid file uri")
3640 .await;
3641 return Ok(None);
3642 }
3643 };
3644
3645 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3646 Some(bytes) => bytes,
3647 None => return Ok(None),
3648 };
3649
3650 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3651 let cached_build = match cached_build {
3652 Some(cb) => cb,
3653 None => return Ok(None),
3654 };
3655
3656 let mut hints = inlay_hints::inlay_hints(&cached_build, &uri, range, &source_bytes);
3657
3658 let settings = self.settings.read().await;
3660 if !settings.inlay_hints.parameters {
3661 hints.retain(|h| h.kind != Some(InlayHintKind::PARAMETER));
3662 }
3663 if !settings.inlay_hints.gas_estimates {
3664 hints.retain(|h| h.kind != Some(InlayHintKind::TYPE));
3665 }
3666
3667 if hints.is_empty() {
3668 self.client
3669 .log_message(MessageType::INFO, "no inlay hints found")
3670 .await;
3671 Ok(None)
3672 } else {
3673 self.client
3674 .log_message(
3675 MessageType::INFO,
3676 format!("found {} inlay hints", hints.len()),
3677 )
3678 .await;
3679 Ok(Some(hints))
3680 }
3681 }
3682
3683 async fn will_rename_files(
3684 &self,
3685 params: RenameFilesParams,
3686 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
3687 self.client
3688 .log_message(
3689 MessageType::INFO,
3690 format!("workspace/willRenameFiles: {} file(s)", params.files.len()),
3691 )
3692 .await;
3693 if !self
3694 .settings
3695 .read()
3696 .await
3697 .file_operations
3698 .update_imports_on_rename
3699 {
3700 self.client
3701 .log_message(
3702 MessageType::INFO,
3703 "willRenameFiles: updateImportsOnRename disabled",
3704 )
3705 .await;
3706 return Ok(None);
3707 }
3708
3709 let config = self.foundry_config.read().await.clone();
3711 let project_root = config.root.clone();
3712 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
3713 crate::solc::discover_source_files(&config)
3714 .into_iter()
3715 .filter_map(|p| p.to_str().map(String::from))
3716 .collect()
3717 })
3718 .await
3719 .unwrap_or_default();
3720
3721 if source_files.is_empty() {
3722 self.client
3723 .log_message(
3724 MessageType::WARNING,
3725 "willRenameFiles: no source files found",
3726 )
3727 .await;
3728 return Ok(None);
3729 }
3730
3731 let raw_renames: Vec<(std::path::PathBuf, std::path::PathBuf)> = params
3733 .files
3734 .iter()
3735 .filter_map(|fr| {
3736 let old_uri = Url::parse(&fr.old_uri).ok()?;
3737 let new_uri = Url::parse(&fr.new_uri).ok()?;
3738 let old_path = old_uri.to_file_path().ok()?;
3739 let new_path = new_uri.to_file_path().ok()?;
3740 Some((old_path, new_path))
3741 })
3742 .collect();
3743
3744 let renames = file_operations::expand_folder_renames(&raw_renames, &source_files);
3745
3746 if renames.is_empty() {
3747 return Ok(None);
3748 }
3749
3750 self.client
3751 .log_message(
3752 MessageType::INFO,
3753 format!(
3754 "willRenameFiles: {} rename(s) after folder expansion",
3755 renames.len()
3756 ),
3757 )
3758 .await;
3759
3760 let files_to_read: Vec<(String, String)> = {
3763 let tc = self.text_cache.read().await;
3764 source_files
3765 .iter()
3766 .filter_map(|fs_path| {
3767 let uri = Url::from_file_path(fs_path).ok()?;
3768 let uri_str = uri.to_string();
3769 if tc.contains_key(&uri_str) {
3770 None
3771 } else {
3772 Some((uri_str, fs_path.clone()))
3773 }
3774 })
3775 .collect()
3776 };
3777
3778 if !files_to_read.is_empty() {
3779 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
3780 files_to_read
3781 .into_iter()
3782 .filter_map(|(uri_str, fs_path)| {
3783 let content = std::fs::read_to_string(&fs_path).ok()?;
3784 Some((uri_str, content))
3785 })
3786 .collect()
3787 })
3788 .await
3789 .unwrap_or_default();
3790
3791 let mut tc = self.text_cache.write().await;
3792 for (uri_str, content) in loaded {
3793 tc.entry(uri_str).or_insert((0, content));
3794 }
3795 }
3796
3797 let text_cache = self.text_cache.clone();
3802 let result = {
3803 let tc = text_cache.read().await;
3804 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
3805 let uri = Url::from_file_path(fs_path).ok()?;
3806 let (_, content) = tc.get(&uri.to_string())?;
3807 Some(content.as_bytes().to_vec())
3808 };
3809
3810 file_operations::rename_imports(
3811 &source_files,
3812 &renames,
3813 &project_root,
3814 &get_source_bytes,
3815 )
3816 };
3817
3818 let stats = &result.stats;
3820 if stats.read_failures > 0 || stats.pathdiff_failures > 0 || stats.duplicate_renames > 0 {
3821 self.client
3822 .log_message(
3823 MessageType::WARNING,
3824 format!(
3825 "willRenameFiles stats: read_failures={}, pathdiff_failures={}, \
3826 duplicate_renames={}, no_parent={}, no_op_skips={}, dedup_skips={}",
3827 stats.read_failures,
3828 stats.pathdiff_failures,
3829 stats.duplicate_renames,
3830 stats.no_parent,
3831 stats.no_op_skips,
3832 stats.dedup_skips,
3833 ),
3834 )
3835 .await;
3836 }
3837
3838 let all_edits = result.edits;
3839
3840 if all_edits.is_empty() {
3841 self.client
3842 .log_message(MessageType::INFO, "willRenameFiles: no import edits needed")
3843 .await;
3844 return Ok(None);
3845 }
3846
3847 {
3849 let mut tc = self.text_cache.write().await;
3850 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
3851 self.client
3852 .log_message(
3853 MessageType::INFO,
3854 format!("willRenameFiles: patched {} cached file(s)", patched),
3855 )
3856 .await;
3857 }
3858
3859 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
3860 self.client
3861 .log_message(
3862 MessageType::INFO,
3863 format!(
3864 "willRenameFiles: {} edit(s) across {} file(s)",
3865 total_edits,
3866 all_edits.len()
3867 ),
3868 )
3869 .await;
3870
3871 Ok(Some(WorkspaceEdit {
3872 changes: Some(all_edits),
3873 document_changes: None,
3874 change_annotations: None,
3875 }))
3876 }
3877
3878 async fn did_rename_files(&self, params: RenameFilesParams) {
3879 self.client
3880 .log_message(
3881 MessageType::INFO,
3882 format!("workspace/didRenameFiles: {} file(s)", params.files.len()),
3883 )
3884 .await;
3885 self.project_cache_dirty.store(true, Ordering::Release);
3886 {
3887 let mut changed = self.project_cache_changed_files.write().await;
3888 for file in ¶ms.files {
3889 if let Ok(old_uri) = Url::parse(&file.old_uri)
3890 && let Ok(old_path) = old_uri.to_file_path()
3891 {
3892 changed.insert(old_path.to_string_lossy().to_string());
3893 }
3894 if let Ok(new_uri) = Url::parse(&file.new_uri)
3895 && let Ok(new_path) = new_uri.to_file_path()
3896 {
3897 changed.insert(new_path.to_string_lossy().to_string());
3898 }
3899 }
3900 }
3901
3902 let raw_uri_pairs: Vec<(Url, Url)> = params
3904 .files
3905 .iter()
3906 .filter_map(|fr| {
3907 let old_uri = Url::parse(&fr.old_uri).ok()?;
3908 let new_uri = Url::parse(&fr.new_uri).ok()?;
3909 Some((old_uri, new_uri))
3910 })
3911 .collect();
3912
3913 let file_renames = {
3914 let tc = self.text_cache.read().await;
3915 let cache_paths: Vec<std::path::PathBuf> = tc
3916 .keys()
3917 .filter_map(|k| Url::parse(k).ok())
3918 .filter_map(|u| u.to_file_path().ok())
3919 .collect();
3920 drop(tc);
3921
3922 let cfg = self.foundry_config.read().await.clone();
3925 let discovered_paths =
3926 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
3927 .await
3928 .unwrap_or_default();
3929
3930 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
3931 all_paths.extend(cache_paths);
3932 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
3933
3934 file_operations::expand_folder_renames_from_paths(&raw_uri_pairs, &all_paths)
3935 };
3936
3937 self.client
3938 .log_message(
3939 MessageType::INFO,
3940 format!(
3941 "didRenameFiles: migrating {} cache entry/entries",
3942 file_renames.len()
3943 ),
3944 )
3945 .await;
3946
3947 {
3951 let mut tc = self.text_cache.write().await;
3952 for (old_key, new_key) in &file_renames {
3953 if let Some(entry) = tc.remove(old_key) {
3954 tc.insert(new_key.clone(), entry);
3955 }
3956 }
3957 }
3958 {
3959 let mut ac = self.ast_cache.write().await;
3960 for (old_key, _) in &file_renames {
3961 ac.remove(old_key);
3962 }
3963 }
3964 {
3965 let mut cc = self.completion_cache.write().await;
3966 for (old_key, _) in &file_renames {
3967 cc.remove(old_key);
3968 }
3969 }
3970
3971 let root_key = self.root_uri.read().await.as_ref().map(|u| u.to_string());
3974 if let Some(ref key) = root_key {
3975 self.ast_cache.write().await.remove(key);
3976 }
3977
3978 let foundry_config = self.foundry_config.read().await.clone();
3979 let ast_cache = self.ast_cache.clone();
3980 let client = self.client.clone();
3981 let text_cache_snapshot = self.text_cache.read().await.clone();
3985
3986 tokio::spawn(async move {
3987 let Some(cache_key) = root_key else {
3988 return;
3989 };
3990 match crate::solc::solc_project_index(
3991 &foundry_config,
3992 Some(&client),
3993 Some(&text_cache_snapshot),
3994 )
3995 .await
3996 {
3997 Ok(ast_data) => {
3998 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
3999 let source_count = cached_build.nodes.len();
4000 ast_cache.write().await.insert(cache_key, cached_build);
4001 client
4002 .log_message(
4003 MessageType::INFO,
4004 format!("didRenameFiles: re-indexed {} source files", source_count),
4005 )
4006 .await;
4007 }
4008 Err(e) => {
4009 client
4010 .log_message(
4011 MessageType::WARNING,
4012 format!("didRenameFiles: re-index failed: {e}"),
4013 )
4014 .await;
4015 }
4016 }
4017 });
4018 }
4019
4020 async fn will_delete_files(
4021 &self,
4022 params: DeleteFilesParams,
4023 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
4024 self.client
4025 .log_message(
4026 MessageType::INFO,
4027 format!("workspace/willDeleteFiles: {} file(s)", params.files.len()),
4028 )
4029 .await;
4030 if !update_imports_on_delete_enabled(&*self.settings.read().await) {
4031 self.client
4032 .log_message(
4033 MessageType::INFO,
4034 "willDeleteFiles: updateImportsOnDelete disabled",
4035 )
4036 .await;
4037 return Ok(None);
4038 }
4039
4040 let config = self.foundry_config.read().await.clone();
4041 let project_root = config.root.clone();
4042 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
4043 crate::solc::discover_source_files(&config)
4044 .into_iter()
4045 .filter_map(|p| p.to_str().map(String::from))
4046 .collect()
4047 })
4048 .await
4049 .unwrap_or_default();
4050
4051 if source_files.is_empty() {
4052 self.client
4053 .log_message(
4054 MessageType::WARNING,
4055 "willDeleteFiles: no source files found",
4056 )
4057 .await;
4058 return Ok(None);
4059 }
4060
4061 let raw_deletes: Vec<std::path::PathBuf> = params
4062 .files
4063 .iter()
4064 .filter_map(|fd| Url::parse(&fd.uri).ok())
4065 .filter_map(|u| u.to_file_path().ok())
4066 .collect();
4067
4068 let deletes = file_operations::expand_folder_deletes(&raw_deletes, &source_files);
4069 if deletes.is_empty() {
4070 return Ok(None);
4071 }
4072
4073 self.client
4074 .log_message(
4075 MessageType::INFO,
4076 format!(
4077 "willDeleteFiles: {} delete target(s) after folder expansion",
4078 deletes.len()
4079 ),
4080 )
4081 .await;
4082
4083 let files_to_read: Vec<(String, String)> = {
4084 let tc = self.text_cache.read().await;
4085 source_files
4086 .iter()
4087 .filter_map(|fs_path| {
4088 let uri = Url::from_file_path(fs_path).ok()?;
4089 let uri_str = uri.to_string();
4090 if tc.contains_key(&uri_str) {
4091 None
4092 } else {
4093 Some((uri_str, fs_path.clone()))
4094 }
4095 })
4096 .collect()
4097 };
4098
4099 if !files_to_read.is_empty() {
4100 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
4101 files_to_read
4102 .into_iter()
4103 .filter_map(|(uri_str, fs_path)| {
4104 let content = std::fs::read_to_string(&fs_path).ok()?;
4105 Some((uri_str, content))
4106 })
4107 .collect()
4108 })
4109 .await
4110 .unwrap_or_default();
4111
4112 let mut tc = self.text_cache.write().await;
4113 for (uri_str, content) in loaded {
4114 tc.entry(uri_str).or_insert((0, content));
4115 }
4116 }
4117
4118 let result = {
4119 let tc = self.text_cache.read().await;
4120 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
4121 let uri = Url::from_file_path(fs_path).ok()?;
4122 let (_, content) = tc.get(&uri.to_string())?;
4123 Some(content.as_bytes().to_vec())
4124 };
4125
4126 file_operations::delete_imports(
4127 &source_files,
4128 &deletes,
4129 &project_root,
4130 &get_source_bytes,
4131 )
4132 };
4133
4134 let stats = &result.stats;
4135 if stats.read_failures > 0
4136 || stats.statement_range_failures > 0
4137 || stats.duplicate_deletes > 0
4138 {
4139 self.client
4140 .log_message(
4141 MessageType::WARNING,
4142 format!(
4143 "willDeleteFiles stats: read_failures={}, statement_range_failures={}, \
4144 duplicate_deletes={}, no_parent={}, dedup_skips={}",
4145 stats.read_failures,
4146 stats.statement_range_failures,
4147 stats.duplicate_deletes,
4148 stats.no_parent,
4149 stats.dedup_skips,
4150 ),
4151 )
4152 .await;
4153 }
4154
4155 let all_edits = result.edits;
4156 if all_edits.is_empty() {
4157 self.client
4158 .log_message(
4159 MessageType::INFO,
4160 "willDeleteFiles: no import-removal edits needed",
4161 )
4162 .await;
4163 return Ok(None);
4164 }
4165
4166 {
4167 let mut tc = self.text_cache.write().await;
4168 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
4169 self.client
4170 .log_message(
4171 MessageType::INFO,
4172 format!("willDeleteFiles: patched {} cached file(s)", patched),
4173 )
4174 .await;
4175 }
4176
4177 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
4178 self.client
4179 .log_message(
4180 MessageType::INFO,
4181 format!(
4182 "willDeleteFiles: {} edit(s) across {} file(s)",
4183 total_edits,
4184 all_edits.len()
4185 ),
4186 )
4187 .await;
4188
4189 Ok(Some(WorkspaceEdit {
4190 changes: Some(all_edits),
4191 document_changes: None,
4192 change_annotations: None,
4193 }))
4194 }
4195
4196 async fn did_delete_files(&self, params: DeleteFilesParams) {
4197 self.client
4198 .log_message(
4199 MessageType::INFO,
4200 format!("workspace/didDeleteFiles: {} file(s)", params.files.len()),
4201 )
4202 .await;
4203 self.project_cache_dirty.store(true, Ordering::Release);
4204 {
4205 let mut changed = self.project_cache_changed_files.write().await;
4206 for file in ¶ms.files {
4207 if let Ok(uri) = Url::parse(&file.uri)
4208 && let Ok(path) = uri.to_file_path()
4209 {
4210 changed.insert(path.to_string_lossy().to_string());
4211 }
4212 }
4213 }
4214
4215 let raw_delete_uris: Vec<Url> = params
4216 .files
4217 .iter()
4218 .filter_map(|fd| Url::parse(&fd.uri).ok())
4219 .collect();
4220
4221 let deleted_paths = {
4222 let tc = self.text_cache.read().await;
4223 let cache_paths: Vec<std::path::PathBuf> = tc
4224 .keys()
4225 .filter_map(|k| Url::parse(k).ok())
4226 .filter_map(|u| u.to_file_path().ok())
4227 .collect();
4228 drop(tc);
4229
4230 let cfg = self.foundry_config.read().await.clone();
4231 let discovered_paths =
4232 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
4233 .await
4234 .unwrap_or_default();
4235
4236 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
4237 all_paths.extend(cache_paths);
4238 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
4239
4240 file_operations::expand_folder_deletes_from_paths(&raw_delete_uris, &all_paths)
4241 };
4242
4243 let mut deleted_keys: HashSet<String> = HashSet::new();
4244 let mut deleted_uris: Vec<Url> = Vec::new();
4245 for path in deleted_paths {
4246 if let Ok(uri) = Url::from_file_path(&path) {
4247 deleted_keys.insert(uri.to_string());
4248 deleted_uris.push(uri);
4249 }
4250 }
4251 if deleted_keys.is_empty() {
4252 return;
4253 }
4254
4255 self.client
4256 .log_message(
4257 MessageType::INFO,
4258 format!(
4259 "didDeleteFiles: deleting {} cache/diagnostic entry(ies)",
4260 deleted_keys.len()
4261 ),
4262 )
4263 .await;
4264
4265 for uri in &deleted_uris {
4266 self.client
4267 .publish_diagnostics(uri.clone(), vec![], None)
4268 .await;
4269 }
4270
4271 let mut removed_text = 0usize;
4272 let mut removed_ast = 0usize;
4273 let mut removed_completion = 0usize;
4274 let mut removed_semantic = 0usize;
4275 let mut removed_pending_create = 0usize;
4276 {
4277 let mut tc = self.text_cache.write().await;
4278 for key in &deleted_keys {
4279 if tc.remove(key).is_some() {
4280 removed_text += 1;
4281 }
4282 }
4283 }
4284 {
4285 let mut ac = self.ast_cache.write().await;
4286 for key in &deleted_keys {
4287 if ac.remove(key).is_some() {
4288 removed_ast += 1;
4289 }
4290 }
4291 }
4292 {
4293 let mut cc = self.completion_cache.write().await;
4294 for key in &deleted_keys {
4295 if cc.remove(key).is_some() {
4296 removed_completion += 1;
4297 }
4298 }
4299 }
4300 {
4301 let mut sc = self.semantic_token_cache.write().await;
4302 for key in &deleted_keys {
4303 if sc.remove(key).is_some() {
4304 removed_semantic += 1;
4305 }
4306 }
4307 }
4308 {
4309 let mut pending = self.pending_create_scaffold.write().await;
4310 for key in &deleted_keys {
4311 if pending.remove(key) {
4312 removed_pending_create += 1;
4313 }
4314 }
4315 }
4316 self.client
4317 .log_message(
4318 MessageType::INFO,
4319 format!(
4320 "didDeleteFiles: removed caches text={} ast={} completion={} semantic={} pendingCreate={}",
4321 removed_text,
4322 removed_ast,
4323 removed_completion,
4324 removed_semantic,
4325 removed_pending_create,
4326 ),
4327 )
4328 .await;
4329
4330 let root_key = self.root_uri.read().await.as_ref().map(|u| u.to_string());
4331 if let Some(ref key) = root_key {
4332 self.ast_cache.write().await.remove(key);
4333 }
4334
4335 let foundry_config = self.foundry_config.read().await.clone();
4336 let ast_cache = self.ast_cache.clone();
4337 let client = self.client.clone();
4338 let text_cache_snapshot = self.text_cache.read().await.clone();
4339
4340 tokio::spawn(async move {
4341 let Some(cache_key) = root_key else {
4342 return;
4343 };
4344 match crate::solc::solc_project_index(
4345 &foundry_config,
4346 Some(&client),
4347 Some(&text_cache_snapshot),
4348 )
4349 .await
4350 {
4351 Ok(ast_data) => {
4352 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
4353 let source_count = cached_build.nodes.len();
4354 ast_cache.write().await.insert(cache_key, cached_build);
4355 client
4356 .log_message(
4357 MessageType::INFO,
4358 format!("didDeleteFiles: re-indexed {} source files", source_count),
4359 )
4360 .await;
4361 }
4362 Err(e) => {
4363 client
4364 .log_message(
4365 MessageType::WARNING,
4366 format!("didDeleteFiles: re-index failed: {e}"),
4367 )
4368 .await;
4369 }
4370 }
4371 });
4372 }
4373
4374 async fn will_create_files(
4375 &self,
4376 params: CreateFilesParams,
4377 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
4378 self.client
4379 .log_message(
4380 MessageType::INFO,
4381 format!("workspace/willCreateFiles: {} file(s)", params.files.len()),
4382 )
4383 .await;
4384 if !self
4385 .settings
4386 .read()
4387 .await
4388 .file_operations
4389 .template_on_create
4390 {
4391 self.client
4392 .log_message(
4393 MessageType::INFO,
4394 "willCreateFiles: templateOnCreate disabled",
4395 )
4396 .await;
4397 return Ok(None);
4398 }
4399 self.client
4400 .log_message(
4401 MessageType::INFO,
4402 "willCreateFiles: skipping pre-create edits; scaffolding via didCreateFiles",
4403 )
4404 .await;
4405 Ok(None)
4406 }
4407
4408 async fn did_create_files(&self, params: CreateFilesParams) {
4409 self.client
4410 .log_message(
4411 MessageType::INFO,
4412 format!("workspace/didCreateFiles: {} file(s)", params.files.len()),
4413 )
4414 .await;
4415 self.project_cache_dirty.store(true, Ordering::Release);
4416 {
4417 let mut changed = self.project_cache_changed_files.write().await;
4418 for file in ¶ms.files {
4419 if let Ok(uri) = Url::parse(&file.uri)
4420 && let Ok(path) = uri.to_file_path()
4421 {
4422 changed.insert(path.to_string_lossy().to_string());
4423 }
4424 }
4425 }
4426 if !self
4427 .settings
4428 .read()
4429 .await
4430 .file_operations
4431 .template_on_create
4432 {
4433 self.client
4434 .log_message(
4435 MessageType::INFO,
4436 "didCreateFiles: templateOnCreate disabled",
4437 )
4438 .await;
4439 return;
4440 }
4441
4442 let config = self.foundry_config.read().await;
4443 let solc_version = config.solc_version.clone();
4444 drop(config);
4445
4446 let mut apply_edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
4451 let mut staged_content: HashMap<String, String> = HashMap::new();
4452 let mut created_uris: Vec<String> = Vec::new();
4453 {
4454 let tc = self.text_cache.read().await;
4455 for file_create in ¶ms.files {
4456 let uri = match Url::parse(&file_create.uri) {
4457 Ok(u) => u,
4458 Err(_) => continue,
4459 };
4460 let uri_str = uri.to_string();
4461
4462 let open_has_content = tc
4463 .get(&uri_str)
4464 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()));
4465 let path = match uri.to_file_path() {
4466 Ok(p) => p,
4467 Err(_) => continue,
4468 };
4469 let disk_has_content = std::fs::read_to_string(&path)
4470 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()));
4471
4472 if open_has_content {
4475 self.client
4476 .log_message(
4477 MessageType::INFO,
4478 format!(
4479 "didCreateFiles: skip {} (open buffer already has content)",
4480 uri_str
4481 ),
4482 )
4483 .await;
4484 continue;
4485 }
4486
4487 if disk_has_content {
4489 self.client
4490 .log_message(
4491 MessageType::INFO,
4492 format!(
4493 "didCreateFiles: skip {} (disk file already has content)",
4494 uri_str
4495 ),
4496 )
4497 .await;
4498 continue;
4499 }
4500
4501 let content =
4502 match file_operations::generate_scaffold(&uri, solc_version.as_deref()) {
4503 Some(s) => s,
4504 None => continue,
4505 };
4506
4507 staged_content.insert(uri_str, content.clone());
4508 created_uris.push(uri.to_string());
4509
4510 apply_edits.entry(uri).or_default().push(TextEdit {
4511 range: Range {
4512 start: Position {
4513 line: 0,
4514 character: 0,
4515 },
4516 end: Position {
4517 line: 0,
4518 character: 0,
4519 },
4520 },
4521 new_text: content,
4522 });
4523 }
4524 }
4525
4526 if !apply_edits.is_empty() {
4527 {
4528 let mut pending = self.pending_create_scaffold.write().await;
4529 for uri in &created_uris {
4530 pending.insert(uri.clone());
4531 }
4532 }
4533
4534 let edit = WorkspaceEdit {
4535 changes: Some(apply_edits.clone()),
4536 document_changes: None,
4537 change_annotations: None,
4538 };
4539 self.client
4540 .log_message(
4541 MessageType::INFO,
4542 format!(
4543 "didCreateFiles: scaffolding {} empty file(s) via workspace/applyEdit",
4544 apply_edits.len()
4545 ),
4546 )
4547 .await;
4548 let apply_result = self.client.apply_edit(edit).await;
4549 let applied = apply_result.as_ref().is_ok_and(|r| r.applied);
4550
4551 if applied {
4552 let mut tc = self.text_cache.write().await;
4553 for (uri_str, content) in staged_content {
4554 tc.insert(uri_str, (0, content));
4555 }
4556 } else {
4557 if let Ok(resp) = &apply_result {
4558 self.client
4559 .log_message(
4560 MessageType::WARNING,
4561 format!(
4562 "didCreateFiles: applyEdit rejected (no disk fallback): {:?}",
4563 resp.failure_reason
4564 ),
4565 )
4566 .await;
4567 } else if let Err(e) = &apply_result {
4568 self.client
4569 .log_message(
4570 MessageType::WARNING,
4571 format!("didCreateFiles: applyEdit failed (no disk fallback): {e}"),
4572 )
4573 .await;
4574 }
4575 }
4576 }
4577
4578 for file_create in ¶ms.files {
4582 let Ok(uri) = Url::parse(&file_create.uri) else {
4583 continue;
4584 };
4585 let (version, content) = {
4586 let tc = self.text_cache.read().await;
4587 match tc.get(&uri.to_string()) {
4588 Some((v, c)) => (*v, c.clone()),
4589 None => continue,
4590 }
4591 };
4592 if !content.chars().any(|ch| !ch.is_whitespace()) {
4593 continue;
4594 }
4595 self.on_change(TextDocumentItem {
4596 uri,
4597 version,
4598 text: content,
4599 language_id: "solidity".to_string(),
4600 })
4601 .await;
4602 }
4603
4604 let root_key = self.root_uri.read().await.as_ref().map(|u| u.to_string());
4606 if let Some(ref key) = root_key {
4607 self.ast_cache.write().await.remove(key);
4608 }
4609
4610 let foundry_config = self.foundry_config.read().await.clone();
4611 let ast_cache = self.ast_cache.clone();
4612 let client = self.client.clone();
4613 let text_cache_snapshot = self.text_cache.read().await.clone();
4614
4615 tokio::spawn(async move {
4616 let Some(cache_key) = root_key else {
4617 return;
4618 };
4619 match crate::solc::solc_project_index(
4620 &foundry_config,
4621 Some(&client),
4622 Some(&text_cache_snapshot),
4623 )
4624 .await
4625 {
4626 Ok(ast_data) => {
4627 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
4628 let source_count = cached_build.nodes.len();
4629 ast_cache.write().await.insert(cache_key, cached_build);
4630 client
4631 .log_message(
4632 MessageType::INFO,
4633 format!("didCreateFiles: re-indexed {} source files", source_count),
4634 )
4635 .await;
4636 }
4637 Err(e) => {
4638 client
4639 .log_message(
4640 MessageType::WARNING,
4641 format!("didCreateFiles: re-index failed: {e}"),
4642 )
4643 .await;
4644 }
4645 }
4646 });
4647 }
4648}
4649
4650#[cfg(test)]
4651mod tests {
4652 use super::{
4653 start_or_mark_project_cache_sync_pending, stop_project_cache_sync_worker_or_reclaim,
4654 take_project_cache_sync_pending, try_claim_project_cache_dirty,
4655 update_imports_on_delete_enabled,
4656 };
4657 use std::sync::atomic::{AtomicBool, Ordering};
4658
4659 #[test]
4660 fn update_imports_on_delete_enabled_defaults_true() {
4661 let s = crate::config::Settings::default();
4662 assert!(update_imports_on_delete_enabled(&s));
4663 }
4664
4665 #[test]
4666 fn update_imports_on_delete_enabled_respects_false() {
4667 let mut s = crate::config::Settings::default();
4668 s.file_operations.update_imports_on_delete = false;
4669 assert!(!update_imports_on_delete_enabled(&s));
4670 }
4671
4672 #[test]
4673 fn project_cache_sync_burst_only_first_starts_worker() {
4674 let pending = AtomicBool::new(false);
4675 let running = AtomicBool::new(false);
4676
4677 assert!(start_or_mark_project_cache_sync_pending(&pending, &running));
4678 assert!(pending.load(Ordering::Acquire));
4679 assert!(running.load(Ordering::Acquire));
4680
4681 assert!(!start_or_mark_project_cache_sync_pending(&pending, &running));
4683 assert!(pending.load(Ordering::Acquire));
4684 assert!(running.load(Ordering::Acquire));
4685 }
4686
4687 #[test]
4688 fn project_cache_sync_take_pending_is_one_shot() {
4689 let pending = AtomicBool::new(true);
4690 assert!(take_project_cache_sync_pending(&pending));
4691 assert!(!pending.load(Ordering::Acquire));
4692 assert!(!take_project_cache_sync_pending(&pending));
4693 }
4694
4695 #[test]
4696 fn project_cache_sync_worker_stop_or_reclaim_handles_race() {
4697 let pending = AtomicBool::new(false);
4698 let running = AtomicBool::new(true);
4699
4700 assert!(!stop_project_cache_sync_worker_or_reclaim(
4702 &pending, &running
4703 ));
4704 assert!(!running.load(Ordering::Acquire));
4705
4706 pending.store(true, Ordering::Release);
4708 running.store(true, Ordering::Release);
4709 assert!(stop_project_cache_sync_worker_or_reclaim(
4710 &pending, &running
4711 ));
4712 assert!(running.load(Ordering::Acquire));
4713 }
4714
4715 #[test]
4716 fn project_cache_dirty_claim_and_retry_cycle() {
4717 let dirty = AtomicBool::new(true);
4718
4719 assert!(try_claim_project_cache_dirty(&dirty));
4720 assert!(!dirty.load(Ordering::Acquire));
4721
4722 assert!(!try_claim_project_cache_dirty(&dirty));
4724
4725 dirty.store(true, Ordering::Release);
4727 assert!(try_claim_project_cache_dirty(&dirty));
4728 assert!(!dirty.load(Ordering::Acquire));
4729 }
4730}