1use crate::completion;
2use crate::config::{self, FoundryConfig, LintConfig, Settings};
3use crate::file_operations;
4use crate::folding;
5use crate::goto;
6use crate::highlight;
7use crate::hover;
8use crate::inlay_hints;
9use crate::links;
10use crate::references;
11use crate::rename;
12use crate::runner::{ForgeRunner, Runner};
13use crate::selection;
14use crate::semantic_tokens;
15use crate::symbols;
16use crate::types::DocumentUri;
17use crate::types::ErrorCode;
18use crate::utils;
19use std::collections::{HashMap, HashSet};
20use std::path::{Component, Path, PathBuf};
21use std::sync::Arc;
22use std::sync::atomic::{AtomicU64, Ordering};
23use tokio::sync::RwLock;
24use tower_lsp::{Client, LanguageServer, lsp_types::*};
25
26type SemanticTokenCache = HashMap<DocumentUri, (String, Vec<SemanticToken>)>;
28
29const CURRENT_VERSION: &str = env!("CARGO_PKG_VERSION");
33
34async fn check_for_updates(client: Client) {
36 let url = "https://api.github.com/repos/mmsaki/solidity-language-server/releases/latest";
37
38 let http = match reqwest::Client::builder()
39 .user_agent("solidity-language-server")
40 .timeout(std::time::Duration::from_secs(5))
41 .build()
42 {
43 Ok(c) => c,
44 Err(_) => return,
45 };
46
47 let resp = match http.get(url).send().await {
48 Ok(r) if r.status().is_success() => r,
49 _ => return,
50 };
51
52 let body: serde_json::Value = match resp.json().await {
53 Ok(v) => v,
54 Err(_) => return,
55 };
56
57 let tag = match body.get("tag_name").and_then(|v| v.as_str()) {
58 Some(t) => t.strip_prefix('v').unwrap_or(t),
59 None => return,
60 };
61
62 let latest = match semver::Version::parse(tag) {
63 Ok(v) => v,
64 Err(_) => return,
65 };
66
67 let current = match semver::Version::parse(CURRENT_VERSION) {
68 Ok(v) => v,
69 Err(_) => return,
70 };
71
72 if latest > current {
73 client
74 .show_message(
75 MessageType::INFO,
76 format!(
77 "Solidity Language Server v{latest} is available (current: v{current}). \
78 Update: cargo install solidity-language-server"
79 ),
80 )
81 .await;
82 }
83}
84
85#[derive(Clone)]
86pub struct ForgeLsp {
87 client: Client,
88 compiler: Arc<dyn Runner>,
89 ast_cache: Arc<RwLock<HashMap<DocumentUri, Arc<goto::CachedBuild>>>>,
90 text_cache: Arc<RwLock<HashMap<DocumentUri, (i32, String)>>>,
94 completion_cache: Arc<RwLock<HashMap<DocumentUri, Arc<completion::CompletionCache>>>>,
95 lint_config: Arc<RwLock<LintConfig>>,
97 foundry_config: Arc<RwLock<FoundryConfig>>,
99 client_capabilities: Arc<RwLock<Option<ClientCapabilities>>>,
101 settings: Arc<RwLock<Settings>>,
103 use_solc: bool,
105 semantic_token_cache: Arc<RwLock<SemanticTokenCache>>,
107 semantic_token_id: Arc<AtomicU64>,
109 root_uri: Arc<RwLock<Option<Url>>>,
111 project_indexed: Arc<std::sync::atomic::AtomicBool>,
113 project_cache_dirty: Arc<std::sync::atomic::AtomicBool>,
116 project_cache_sync_running: Arc<std::sync::atomic::AtomicBool>,
118 project_cache_sync_pending: Arc<std::sync::atomic::AtomicBool>,
120 project_cache_force_full_rebuild: Arc<std::sync::atomic::AtomicBool>,
125 project_cache_upsert_running: Arc<std::sync::atomic::AtomicBool>,
127 project_cache_upsert_pending: Arc<std::sync::atomic::AtomicBool>,
129 project_cache_changed_files: Arc<RwLock<HashSet<String>>>,
132 project_cache_upsert_files: Arc<RwLock<HashSet<String>>>,
134 pending_create_scaffold: Arc<RwLock<HashSet<DocumentUri>>>,
137 settings_from_init: Arc<std::sync::atomic::AtomicBool>,
141 did_save_workers: Arc<
147 RwLock<HashMap<DocumentUri, tokio::sync::watch::Sender<Option<DidSaveTextDocumentParams>>>>,
148 >,
149 code_action_db: Arc<HashMap<ErrorCode, crate::code_actions::CodeActionDef>>,
151 sub_caches: Arc<RwLock<Vec<Arc<goto::CachedBuild>>>>,
162 sub_caches_loading: Arc<std::sync::atomic::AtomicBool>,
164 path_interner: Arc<RwLock<crate::types::PathInterner>>,
172 cross_file_diag_uris: Arc<RwLock<HashSet<Url>>>,
176}
177
178fn spawn_load_lib_sub_caches_task(
184 foundry_config: crate::config::FoundryConfig,
185 sub_caches: Arc<RwLock<Vec<Arc<goto::CachedBuild>>>>,
186 loading_flag: Arc<std::sync::atomic::AtomicBool>,
187 path_interner: Arc<RwLock<crate::types::PathInterner>>,
188 client: Client,
189) {
190 if loading_flag
192 .compare_exchange(
193 false,
194 true,
195 std::sync::atomic::Ordering::SeqCst,
196 std::sync::atomic::Ordering::SeqCst,
197 )
198 .is_err()
199 {
200 return;
201 }
202 tokio::spawn(async move {
203 let cfg = foundry_config.clone();
204 let discovered = tokio::task::spawn_blocking(move || {
205 crate::project_cache::discover_lib_sub_projects(&cfg)
206 })
207 .await
208 .unwrap_or_else(|_| crate::project_cache::DiscoveredLibs {
209 cached: Vec::new(),
210 uncached: Vec::new(),
211 });
212
213 let sub_caches_start = std::time::Instant::now();
215 spawn_and_collect_sub_cache_builds(&discovered.uncached, &client, &path_interner).await;
216
217 let cfg2 = foundry_config.clone();
219 let all_cached =
220 tokio::task::spawn_blocking(move || crate::project_cache::discover_lib_caches(&cfg2))
221 .await
222 .unwrap_or_default();
223
224 if all_cached.is_empty() {
225 emit_sub_caches_loaded(&client, 0, 0, sub_caches_start.elapsed().as_secs_f64()).await;
226 loading_flag.store(false, std::sync::atomic::Ordering::SeqCst);
227 return;
228 }
229
230 let mut loaded = Vec::new();
231 for sub_root in &all_cached {
232 let root = sub_root.clone();
233 let build =
234 tokio::task::spawn_blocking(move || crate::project_cache::load_lib_cache(&root))
235 .await
236 .ok()
237 .flatten();
238 if let Some(build) = build {
239 {
242 let mut interner = path_interner.write().await;
243 for (_solc_id, path) in &build.id_to_path_map {
244 interner.intern(path);
245 }
246 }
247 loaded.push(Arc::new(build));
248 }
249 }
250
251 let count = loaded.len();
252 let total: usize = loaded.iter().map(|b| b.nodes.len()).sum();
253 let elapsed = sub_caches_start.elapsed().as_secs_f64();
254
255 if !loaded.is_empty() {
256 client
257 .log_message(
258 MessageType::INFO,
259 format!(
260 "sub-caches: loaded {} lib caches ({} total sources, {:.1}s total)",
261 count, total, elapsed,
262 ),
263 )
264 .await;
265 *sub_caches.write().await = loaded;
266 }
267
268 emit_sub_caches_loaded(&client, count, total, elapsed).await;
269 loading_flag.store(false, std::sync::atomic::Ordering::SeqCst);
270 });
271}
272
273async fn spawn_and_collect_sub_cache_builds(
280 roots: &[std::path::PathBuf],
281 client: &Client,
282 path_interner: &Arc<RwLock<crate::types::PathInterner>>,
283) {
284 if roots.is_empty() {
285 return;
286 }
287 let max_parallel = std::thread::available_parallelism()
288 .map(|n| n.get())
289 .unwrap_or(4);
290 let semaphore = Arc::new(tokio::sync::Semaphore::new(max_parallel));
291 client
292 .log_message(
293 MessageType::INFO,
294 format!(
295 "sub-cache: building {} libs (max {max_parallel} parallel)",
296 roots.len()
297 ),
298 )
299 .await;
300 let mut join_set = tokio::task::JoinSet::new();
301 for sub_root in roots {
302 let sub_name = sub_root
303 .file_name()
304 .map(|n| n.to_string_lossy().into_owned())
305 .unwrap_or_else(|| sub_root.display().to_string());
306 let sub_config =
307 crate::config::load_foundry_config_from_toml(&sub_root.join("foundry.toml"));
308 let sem = semaphore.clone();
309 join_set.spawn(async move {
310 let _permit = sem.acquire().await.expect("semaphore closed");
311 let sub_start = std::time::Instant::now();
312 let result = crate::solc::solc_project_index_ast_only(&sub_config, None).await;
313 let elapsed = sub_start.elapsed().as_secs_f64();
314 (sub_name, sub_config, result, elapsed)
315 });
316 }
317
318 while let Some(join_result) = join_set.join_next().await {
319 let Ok((sub_name, sub_config, result, elapsed)) = join_result else {
320 continue;
321 };
322 match result {
323 Ok(ast_data) => {
324 let mut interner = path_interner.write().await;
325 let build = crate::goto::CachedBuild::new(ast_data, 0, Some(&mut interner));
326 drop(interner);
327 let source_count = build.nodes.len();
328 if source_count == 0 {
329 client
330 .log_message(
331 MessageType::WARNING,
332 format!("sub-cache: {sub_name} produced 0 sources"),
333 )
334 .await;
335 continue;
336 }
337 let cfg_for_save = sub_config.clone();
338 let build_for_save = build.clone();
339 let _ = tokio::task::spawn_blocking(move || {
340 crate::project_cache::save_reference_cache_with_report(
341 &cfg_for_save,
342 &build_for_save,
343 None,
344 )
345 })
346 .await;
347 }
348 Err(e) => {
349 client
350 .log_message(
351 MessageType::WARNING,
352 format!("sub-cache: {sub_name} failed ({elapsed:.1}s): {e}"),
353 )
354 .await;
355 }
356 }
357 }
358}
359
360async fn emit_sub_caches_loaded(client: &Client, count: usize, total: usize, elapsed: f64) {
363 let token = NumberOrString::String("solidity/subCachesLoaded".to_string());
364 let _ = client
365 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
366 token: token.clone(),
367 })
368 .await;
369 client
370 .send_notification::<notification::Progress>(ProgressParams {
371 token: token.clone(),
372 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(WorkDoneProgressBegin {
373 title: "Sub-caches loaded".to_string(),
374 message: Some(format!(
375 "{count} lib caches ({total} sources) in {elapsed:.1}s",
376 )),
377 cancellable: Some(false),
378 percentage: None,
379 })),
380 })
381 .await;
382 client
383 .send_notification::<notification::Progress>(ProgressParams {
384 token,
385 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(WorkDoneProgressEnd {
386 message: Some(format!("Loaded {count} lib caches ({total} sources)",)),
387 })),
388 })
389 .await;
390}
391
392impl ForgeLsp {
393 pub fn new(client: Client, use_solar: bool, use_solc: bool) -> Self {
394 let compiler: Arc<dyn Runner> = if use_solar {
395 Arc::new(crate::solar_runner::SolarRunner)
396 } else {
397 Arc::new(ForgeRunner)
398 };
399 let ast_cache = Arc::new(RwLock::new(HashMap::new()));
400 let text_cache = Arc::new(RwLock::new(HashMap::new()));
401 let completion_cache = Arc::new(RwLock::new(HashMap::new()));
402 let lint_config = Arc::new(RwLock::new(LintConfig::default()));
403 let foundry_config = Arc::new(RwLock::new(FoundryConfig::default()));
404 let client_capabilities = Arc::new(RwLock::new(None));
405 let settings = Arc::new(RwLock::new(Settings::default()));
406 Self {
407 client,
408 compiler,
409 ast_cache,
410 text_cache,
411 completion_cache,
412 lint_config,
413 foundry_config,
414 client_capabilities,
415 settings,
416 use_solc,
417 semantic_token_cache: Arc::new(RwLock::new(HashMap::new())),
418 semantic_token_id: Arc::new(AtomicU64::new(0)),
419 root_uri: Arc::new(RwLock::new(None)),
420 project_indexed: Arc::new(std::sync::atomic::AtomicBool::new(false)),
421 project_cache_dirty: Arc::new(std::sync::atomic::AtomicBool::new(false)),
422 project_cache_sync_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
423 project_cache_sync_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
424 project_cache_force_full_rebuild: Arc::new(std::sync::atomic::AtomicBool::new(false)),
425 project_cache_upsert_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
426 project_cache_upsert_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
427 project_cache_changed_files: Arc::new(RwLock::new(HashSet::new())),
428 project_cache_upsert_files: Arc::new(RwLock::new(HashSet::new())),
429 pending_create_scaffold: Arc::new(RwLock::new(HashSet::new())),
430 settings_from_init: Arc::new(std::sync::atomic::AtomicBool::new(false)),
431 did_save_workers: Arc::new(RwLock::new(HashMap::new())),
432 code_action_db: Arc::new(crate::code_actions::load()),
433 sub_caches: Arc::new(RwLock::new(Vec::new())),
434 sub_caches_loading: Arc::new(std::sync::atomic::AtomicBool::new(false)),
435 path_interner: Arc::new(RwLock::new(crate::types::PathInterner::new())),
436 cross_file_diag_uris: Arc::new(RwLock::new(HashSet::new())),
437 }
438 }
439
440 async fn foundry_config_for_file(&self, file_path: &std::path::Path) -> FoundryConfig {
448 config::load_foundry_config(file_path)
449 }
450
451 async fn project_cache_key(&self) -> Option<String> {
456 if let Some(uri) = self.root_uri.read().await.as_ref() {
457 return Some(uri.to_string());
458 }
459
460 let mut root = self.foundry_config.read().await.root.clone();
461 if !root.is_absolute()
462 && let Ok(cwd) = std::env::current_dir()
463 {
464 root = cwd.join(root);
465 }
466 if !root.is_dir() {
467 root = root.parent()?.to_path_buf();
468 }
469 Url::from_directory_path(root).ok().map(|u| u.to_string())
470 }
471
472 fn spawn_load_lib_sub_caches(&self) {
481 let foundry_config = self.foundry_config.clone();
482 let sub_caches = self.sub_caches.clone();
483 let loading_flag = self.sub_caches_loading.clone();
484 let path_interner = self.path_interner.clone();
485 let client = self.client.clone();
486 tokio::spawn(async move {
487 let cfg = foundry_config.read().await.clone();
488 spawn_load_lib_sub_caches_task(cfg, sub_caches, loading_flag, path_interner, client);
489 });
490 }
491
492 async fn invalidate_lib_sub_caches_if_affected(&self, changed_paths: &[std::path::PathBuf]) {
496 let config = self.foundry_config.read().await.clone();
497 let affected = changed_paths.iter().any(|p| {
498 config
499 .libs
500 .iter()
501 .any(|lib_name| p.starts_with(config.root.join(lib_name)))
502 });
503 if affected {
504 self.sub_caches.write().await.clear();
505 self.spawn_load_lib_sub_caches();
506 }
507 }
508
509 async fn ensure_project_cached_build(&self) -> Option<Arc<goto::CachedBuild>> {
514 let root_key = self.project_cache_key().await?;
515 if let Some(existing) = self.ast_cache.read().await.get(&root_key).cloned() {
516 self.spawn_load_lib_sub_caches();
518 return Some(existing);
519 }
520
521 let settings = self.settings.read().await.clone();
522 if !self.use_solc || !settings.project_index.full_project_scan {
523 return None;
524 }
525
526 let foundry_config = self.foundry_config.read().await.clone();
527 if !foundry_config.root.is_dir() {
528 return None;
529 }
530
531 let cache_mode = settings.project_index.cache_mode.clone();
532 let cfg_for_load = foundry_config.clone();
533 let load_res = tokio::task::spawn_blocking(move || {
534 crate::project_cache::load_reference_cache_with_report(&cfg_for_load, cache_mode, true)
535 })
536 .await;
537
538 let Ok(report) = load_res else {
539 return None;
540 };
541 let Some(build) = report.build else {
542 return None;
543 };
544
545 let source_count = build.nodes.len();
546 let complete = report.complete;
547 let duration_ms = report.duration_ms;
548 let reused = report.file_count_reused;
549 let hashed = report.file_count_hashed;
550 let arc = Arc::new(build);
551 self.ast_cache
552 .write()
553 .await
554 .insert(root_key.clone().into(), arc.clone());
555 self.project_indexed
556 .store(true, std::sync::atomic::Ordering::Relaxed);
557 self.client
558 .log_message(
559 MessageType::INFO,
560 format!(
561 "references warm-load: project cache loaded (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
562 source_count, reused, hashed, complete, duration_ms
563 ),
564 )
565 .await;
566
567 self.spawn_load_lib_sub_caches();
569
570 if complete {
571 return Some(arc);
572 }
573
574 let cfg_for_diff = foundry_config.clone();
577 let changed = tokio::task::spawn_blocking(move || {
578 crate::project_cache::changed_files_since_v2_cache(&cfg_for_diff, true)
579 })
580 .await
581 .ok()
582 .and_then(Result::ok)
583 .unwrap_or_default();
584
585 if changed.is_empty() {
586 return Some(arc);
587 }
588
589 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
590 let cfg_for_plan = foundry_config.clone();
591 let changed_for_plan = changed.clone();
592 let remappings_for_plan = remappings.clone();
593 let affected_set = tokio::task::spawn_blocking(move || {
594 compute_reverse_import_closure(&cfg_for_plan, &changed_for_plan, &remappings_for_plan)
595 })
596 .await
597 .ok()
598 .unwrap_or_default();
599 let mut affected_files: Vec<PathBuf> = affected_set.into_iter().collect();
600 if affected_files.is_empty() {
601 affected_files = changed;
602 }
603
604 let text_cache_snapshot = self.text_cache.read().await.clone();
605 match crate::solc::solc_project_index_scoped(
606 &foundry_config,
607 Some(&self.client),
608 Some(&text_cache_snapshot),
609 &affected_files,
610 )
611 .await
612 {
613 Ok(ast_data) => {
614 let scoped_build = Arc::new(crate::goto::CachedBuild::new(
615 ast_data,
616 0,
617 Some(&mut *self.path_interner.write().await),
618 ));
619 let mut merge_error: Option<String> = None;
620 let merged = {
621 let mut cache = self.ast_cache.write().await;
622 let merged = if let Some(existing) = cache.get(&root_key).cloned() {
623 let mut merged = (*existing).clone();
624 match merge_scoped_cached_build(&mut merged, (*scoped_build).clone()) {
625 Ok(_) => Arc::new(merged),
626 Err(e) => {
627 merge_error = Some(e);
628 scoped_build.clone()
629 }
630 }
631 } else {
632 scoped_build.clone()
633 };
634 cache.insert(root_key.clone().into(), merged.clone());
635 merged
636 };
637 if let Some(e) = merge_error {
638 self.client
639 .log_message(
640 MessageType::WARNING,
641 format!(
642 "references warm-load reconcile: merge failed, using scoped build: {}",
643 e
644 ),
645 )
646 .await;
647 }
648
649 let cfg_for_save = foundry_config.clone();
650 let build_for_save = (*merged).clone();
651 let save_res = tokio::task::spawn_blocking(move || {
652 crate::project_cache::save_reference_cache_with_report(
653 &cfg_for_save,
654 &build_for_save,
655 None,
656 )
657 })
658 .await;
659 if let Ok(Ok(report)) = save_res {
660 self.client
661 .log_message(
662 MessageType::INFO,
663 format!(
664 "references warm-load reconcile: saved cache (affected={}, hashed_files={}, duration={}ms)",
665 affected_files.len(),
666 report.file_count_hashed,
667 report.duration_ms
668 ),
669 )
670 .await;
671 }
672 Some(merged)
673 }
674 Err(e) => {
675 self.client
676 .log_message(
677 MessageType::WARNING,
678 format!(
679 "references warm-load reconcile: scoped reindex failed: {}",
680 e
681 ),
682 )
683 .await;
684 Some(arc)
685 }
686 }
687 }
688
689 async fn flush_project_cache_to_disk(&self, reason: &str) {
693 if !self.use_solc || !self.settings.read().await.project_index.full_project_scan {
694 return;
695 }
696 let Some(root_key) = self.project_cache_key().await else {
697 return;
698 };
699 let build = {
700 let cache = self.ast_cache.read().await;
701 cache.get(&root_key).cloned()
702 };
703 let Some(build) = build else {
704 return;
705 };
706
707 let foundry_config = self.foundry_config.read().await.clone();
708 let build_for_save = (*build).clone();
709 let res = tokio::task::spawn_blocking(move || {
710 crate::project_cache::save_reference_cache_with_report(
711 &foundry_config,
712 &build_for_save,
713 None,
714 )
715 })
716 .await;
717
718 match res {
719 Ok(Ok(report)) => {
720 self.client
721 .log_message(
722 MessageType::INFO,
723 format!(
724 "project cache flush ({}): saved hashed_files={}, duration={}ms",
725 reason, report.file_count_hashed, report.duration_ms
726 ),
727 )
728 .await;
729 }
730 Ok(Err(e)) => {
731 self.client
732 .log_message(
733 MessageType::WARNING,
734 format!("project cache flush ({}) failed: {}", reason, e),
735 )
736 .await;
737 }
738 Err(e) => {
739 self.client
740 .log_message(
741 MessageType::WARNING,
742 format!("project cache flush ({}) task failed: {}", reason, e),
743 )
744 .await;
745 }
746 }
747 }
748
749 async fn on_change(&self, params: TextDocumentItem) {
750 let uri = params.uri.clone();
751 let version = params.version;
752
753 let file_path = match uri.to_file_path() {
754 Ok(path) => path,
755 Err(_) => {
756 self.client
757 .log_message(MessageType::ERROR, "Invalid file URI")
758 .await;
759 return;
760 }
761 };
762
763 let path_str = match file_path.to_str() {
764 Some(s) => s,
765 None => {
766 self.client
767 .log_message(MessageType::ERROR, "Invalid file path")
768 .await;
769 return;
770 }
771 };
772
773 self.client
777 .publish_diagnostics(uri.clone(), vec![], None)
778 .await;
779
780 let (should_lint, lint_settings) = {
782 let lint_cfg = self.lint_config.read().await;
783 let settings = self.settings.read().await;
784 let enabled = lint_cfg.should_lint(&file_path) && settings.lint.enabled;
785 let ls = settings.lint.clone();
786 (enabled, ls)
787 };
788
789 let (lint_result, build_result, ast_result) = if self.use_solc {
792 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
793 let solc_future = crate::solc::solc_ast(
796 path_str,
797 &foundry_cfg,
798 Some(&self.client),
799 Some(¶ms.text),
800 );
801
802 if should_lint {
803 let (lint, solc) = tokio::join!(
804 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
805 solc_future
806 );
807 match solc {
808 Ok(data) => {
809 let content = tokio::fs::read_to_string(&file_path)
811 .await
812 .unwrap_or_default();
813 let build_diags = crate::build::build_output_to_diagnostics(
814 &data,
815 &file_path,
816 &content,
817 &foundry_cfg.ignored_error_codes,
818 );
819 (Some(lint), Ok(build_diags), Ok(data))
820 }
821 Err(e) => {
822 self.client
823 .log_message(
824 MessageType::WARNING,
825 format!("solc failed, falling back to forge build: {e}"),
826 )
827 .await;
828 let (build, ast) = tokio::join!(
829 self.compiler.get_build_diagnostics(&uri),
830 self.compiler.ast(path_str)
831 );
832 (Some(lint), build, ast)
833 }
834 }
835 } else {
836 self.client
837 .log_message(
838 MessageType::INFO,
839 format!("skipping lint for ignored file: {path_str}"),
840 )
841 .await;
842 match solc_future.await {
843 Ok(data) => {
844 let content = tokio::fs::read_to_string(&file_path)
845 .await
846 .unwrap_or_default();
847 let build_diags = crate::build::build_output_to_diagnostics(
848 &data,
849 &file_path,
850 &content,
851 &foundry_cfg.ignored_error_codes,
852 );
853 (None, Ok(build_diags), Ok(data))
854 }
855 Err(e) => {
856 self.client
857 .log_message(
858 MessageType::WARNING,
859 format!("solc failed, falling back to forge build: {e}"),
860 )
861 .await;
862 let (build, ast) = tokio::join!(
863 self.compiler.get_build_diagnostics(&uri),
864 self.compiler.ast(path_str)
865 );
866 (None, build, ast)
867 }
868 }
869 }
870 } else {
871 if should_lint {
873 let (lint, build, ast) = tokio::join!(
874 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
875 self.compiler.get_build_diagnostics(&uri),
876 self.compiler.ast(path_str)
877 );
878 (Some(lint), build, ast)
879 } else {
880 self.client
881 .log_message(
882 MessageType::INFO,
883 format!("skipping lint for ignored file: {path_str}"),
884 )
885 .await;
886 let (build, ast) = tokio::join!(
887 self.compiler.get_build_diagnostics(&uri),
888 self.compiler.ast(path_str)
889 );
890 (None, build, ast)
891 }
892 };
893
894 let has_file_local_errors = matches!(
904 &build_result,
905 Ok(diagnostics) if diagnostics.iter().any(|d| d.severity == Some(DiagnosticSeverity::ERROR))
906 );
907 let has_solc_errors = ast_result.as_ref().is_ok_and(|data| {
908 data.get("errors")
909 .and_then(|v| v.as_array())
910 .is_some_and(|errs| {
911 errs.iter().any(|e| {
912 e.get("severity")
913 .and_then(|s| s.as_str())
914 .is_some_and(|s| s == "error")
915 })
916 })
917 });
918 let build_succeeded = !has_file_local_errors && !has_solc_errors;
919
920 let cross_file_diags = if has_solc_errors {
924 if let Ok(ref data) = ast_result {
925 let cfg = self.foundry_config_for_file(&file_path).await;
926 crate::build::cross_file_error_diagnostics(
927 data,
928 &file_path,
929 &cfg.root,
930 &cfg.ignored_error_codes,
931 )
932 } else {
933 std::collections::HashMap::new()
934 }
935 } else {
936 std::collections::HashMap::new()
937 };
938
939 if build_succeeded {
940 if let Ok(ast_data) = ast_result {
941 let sources_empty = ast_data
945 .get("sources")
946 .and_then(|v| v.as_object())
947 .map_or(true, |m| m.is_empty());
948
949 if sources_empty {
950 self.client
951 .log_message(
952 MessageType::INFO,
953 "Build produced empty AST, keeping existing cache",
954 )
955 .await;
956 } else {
957 let cached_build = goto::CachedBuild::new(
958 ast_data,
959 version,
960 Some(&mut *self.path_interner.write().await),
961 );
962 let cached_build = Arc::new(cached_build);
963 let mut cache = self.ast_cache.write().await;
964 cache.insert(uri.to_string().into(), cached_build.clone());
965 drop(cache);
966
967 {
969 let mut cc = self.completion_cache.write().await;
970 cc.insert(
971 uri.to_string().into(),
972 cached_build.completion_cache.clone(),
973 );
974 }
975 }
976 } else if let Err(e) = ast_result {
977 self.client
978 .log_message(
979 MessageType::INFO,
980 format!("Build succeeded but failed to get AST: {e}"),
981 )
982 .await;
983 }
984 } else {
985 let reason = if has_solc_errors && !has_file_local_errors {
988 "Cross-file compilation errors detected, keeping existing AST cache"
989 } else {
990 "Build errors detected, keeping existing AST cache"
991 };
992 self.client.log_message(MessageType::INFO, reason).await;
993 }
994
995 {
997 let mut text_cache = self.text_cache.write().await;
998 let uri_str = uri.to_string();
999 let existing_version = text_cache.get(&uri_str).map(|(v, _)| *v).unwrap_or(-1);
1000 if version >= existing_version {
1001 text_cache.insert(uri_str.into(), (version, params.text));
1002 }
1003 }
1004
1005 let mut all_diagnostics = vec![];
1006
1007 if let Some(lint_result) = lint_result {
1008 match lint_result {
1009 Ok(mut lints) => {
1010 if !lint_settings.exclude.is_empty() {
1012 lints.retain(|d| {
1013 if let Some(NumberOrString::String(code)) = &d.code {
1014 !lint_settings.exclude.iter().any(|ex| ex == code)
1015 } else {
1016 true
1017 }
1018 });
1019 }
1020 if !lints.is_empty() {
1021 self.client
1022 .log_message(
1023 MessageType::INFO,
1024 format!("found {} lint diagnostics", lints.len()),
1025 )
1026 .await;
1027 }
1028 all_diagnostics.append(&mut lints);
1029 }
1030 Err(e) => {
1031 self.client
1032 .log_message(
1033 MessageType::ERROR,
1034 format!("Forge lint diagnostics failed: {e}"),
1035 )
1036 .await;
1037 }
1038 }
1039 }
1040
1041 match build_result {
1042 Ok(mut builds) => {
1043 if !builds.is_empty() {
1044 self.client
1045 .log_message(
1046 MessageType::INFO,
1047 format!("found {} build diagnostics", builds.len()),
1048 )
1049 .await;
1050 }
1051 all_diagnostics.append(&mut builds);
1052 }
1053 Err(e) => {
1054 self.client
1055 .log_message(
1056 MessageType::WARNING,
1057 format!("Forge build diagnostics failed: {e}"),
1058 )
1059 .await;
1060 }
1061 }
1062
1063 for diag in &mut all_diagnostics {
1067 if diag.message.is_empty() {
1068 diag.message = "Unknown issue".to_string();
1069 }
1070 }
1071
1072 self.client
1074 .publish_diagnostics(uri, all_diagnostics, None)
1075 .await;
1076
1077 {
1080 let mut prev_uris = self.cross_file_diag_uris.write().await;
1081 let mut new_uris = HashSet::new();
1082
1083 for (abs_path, diags) in &cross_file_diags {
1084 if let Ok(file_uri) = Url::from_file_path(abs_path) {
1085 self.client
1086 .publish_diagnostics(file_uri.clone(), diags.clone(), None)
1087 .await;
1088 new_uris.insert(file_uri);
1089 }
1090 }
1091
1092 for stale_uri in prev_uris.difference(&new_uris) {
1095 self.client
1096 .publish_diagnostics(stale_uri.clone(), vec![], None)
1097 .await;
1098 }
1099
1100 *prev_uris = new_uris;
1101 }
1102
1103 if build_succeeded {
1105 let client = self.client.clone();
1106 tokio::spawn(async move {
1107 let _ = client.inlay_hint_refresh().await;
1108 });
1109 }
1110
1111 if build_succeeded
1117 && self.use_solc
1118 && self.settings.read().await.project_index.full_project_scan
1119 && !self
1120 .project_indexed
1121 .load(std::sync::atomic::Ordering::Relaxed)
1122 {
1123 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
1124 self.project_indexed
1125 .store(true, std::sync::atomic::Ordering::Relaxed);
1126 let foundry_config = self.foundry_config.read().await.clone();
1127 let cache_key = self.project_cache_key().await;
1128 let ast_cache = self.ast_cache.clone();
1129 let client = self.client.clone();
1130 let path_interner = self.path_interner.clone();
1131
1132 tokio::spawn(async move {
1133 let Some(cache_key) = cache_key else {
1134 return;
1135 };
1136 if !foundry_config.root.is_dir() {
1137 client
1138 .log_message(
1139 MessageType::INFO,
1140 format!(
1141 "project index: {} not found, skipping",
1142 foundry_config.root.display(),
1143 ),
1144 )
1145 .await;
1146 return;
1147 }
1148
1149 let token = NumberOrString::String("solidity/projectIndex".to_string());
1151 let _ = client
1152 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
1153 token: token.clone(),
1154 })
1155 .await;
1156
1157 client
1159 .send_notification::<notification::Progress>(ProgressParams {
1160 token: token.clone(),
1161 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
1162 WorkDoneProgressBegin {
1163 title: "Indexing project".to_string(),
1164 message: Some("Discovering source files...".to_string()),
1165 cancellable: Some(false),
1166 percentage: None,
1167 },
1168 )),
1169 })
1170 .await;
1171
1172 let cfg_for_load = foundry_config.clone();
1174 let cache_mode_for_load = cache_mode.clone();
1175 let load_res = tokio::task::spawn_blocking(move || {
1176 crate::project_cache::load_reference_cache_with_report(
1177 &cfg_for_load,
1178 cache_mode_for_load,
1179 true,
1180 )
1181 })
1182 .await;
1183 match load_res {
1184 Ok(report) => {
1185 if let Some(cached_build) = report.build {
1186 let source_count = cached_build.nodes.len();
1187 ast_cache
1188 .write()
1189 .await
1190 .insert(cache_key.clone().into(), Arc::new(cached_build));
1191 client
1192 .log_message(
1193 MessageType::INFO,
1194 format!(
1195 "project index: cache load hit (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
1196 source_count,
1197 report.file_count_reused,
1198 report.file_count_hashed,
1199 report.complete,
1200 report.duration_ms
1201 ),
1202 )
1203 .await;
1204 if report.complete {
1205 client
1206 .send_notification::<notification::Progress>(ProgressParams {
1207 token: token.clone(),
1208 value: ProgressParamsValue::WorkDone(
1209 WorkDoneProgress::End(WorkDoneProgressEnd {
1210 message: Some(format!(
1211 "Loaded {} source files from cache",
1212 source_count
1213 )),
1214 }),
1215 ),
1216 })
1217 .await;
1218 return;
1219 }
1220 }
1221
1222 client
1223 .log_message(
1224 MessageType::INFO,
1225 format!(
1226 "project index: cache load miss/partial (reason={}, reused_files={}/{}, duration={}ms)",
1227 report
1228 .miss_reason
1229 .unwrap_or_else(|| "unknown".to_string()),
1230 report.file_count_reused,
1231 report.file_count_hashed,
1232 report.duration_ms
1233 ),
1234 )
1235 .await;
1236 }
1237 Err(e) => {
1238 client
1239 .log_message(
1240 MessageType::WARNING,
1241 format!("project index: cache load task failed: {e}"),
1242 )
1243 .await;
1244 }
1245 }
1246
1247 match crate::solc::solc_project_index(&foundry_config, Some(&client), None).await {
1248 Ok(ast_data) => {
1249 let mut new_build = crate::goto::CachedBuild::new(
1250 ast_data,
1251 0,
1252 Some(&mut *path_interner.write().await),
1253 );
1254 if let Some(prev) = ast_cache.read().await.get(&cache_key) {
1257 new_build.merge_missing_from(prev);
1258 }
1259 let source_count = new_build.nodes.len();
1260 let cached_build = Arc::new(new_build);
1261 let build_for_save = (*cached_build).clone();
1262 ast_cache
1263 .write()
1264 .await
1265 .insert(cache_key.clone().into(), cached_build);
1266 client
1267 .log_message(
1268 MessageType::INFO,
1269 format!("project index: cached {} source files", source_count),
1270 )
1271 .await;
1272
1273 let cfg_for_save = foundry_config.clone();
1274 let client_for_save = client.clone();
1275 tokio::spawn(async move {
1276 let res = tokio::task::spawn_blocking(move || {
1277 crate::project_cache::save_reference_cache_with_report(
1278 &cfg_for_save,
1279 &build_for_save,
1280 None,
1281 )
1282 })
1283 .await;
1284 match res {
1285 Ok(Ok(report)) => {
1286 client_for_save
1287 .log_message(
1288 MessageType::INFO,
1289 format!(
1290 "project index: cache save complete (hashed_files={}, duration={}ms)",
1291 report.file_count_hashed, report.duration_ms
1292 ),
1293 )
1294 .await;
1295 }
1296 Ok(Err(e)) => {
1297 client_for_save
1298 .log_message(
1299 MessageType::WARNING,
1300 format!("project index: failed to persist cache: {e}"),
1301 )
1302 .await;
1303 }
1304 Err(e) => {
1305 client_for_save
1306 .log_message(
1307 MessageType::WARNING,
1308 format!("project index: cache save task failed: {e}"),
1309 )
1310 .await;
1311 }
1312 }
1313 });
1314
1315 client
1317 .send_notification::<notification::Progress>(ProgressParams {
1318 token: token.clone(),
1319 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1320 WorkDoneProgressEnd {
1321 message: Some(format!(
1322 "Indexed {} source files",
1323 source_count
1324 )),
1325 },
1326 )),
1327 })
1328 .await;
1329 }
1330 Err(e) => {
1331 client
1332 .log_message(MessageType::WARNING, format!("project index failed: {e}"))
1333 .await;
1334
1335 client
1337 .send_notification::<notification::Progress>(ProgressParams {
1338 token: token.clone(),
1339 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1340 WorkDoneProgressEnd {
1341 message: Some("Indexing failed".to_string()),
1342 },
1343 )),
1344 })
1345 .await;
1346 }
1347 }
1348 });
1349 }
1350 }
1351
1352 async fn get_or_fetch_build(
1361 &self,
1362 uri: &Url,
1363 file_path: &std::path::Path,
1364 insert_on_miss: bool,
1365 ) -> Option<Arc<goto::CachedBuild>> {
1366 let uri_str = uri.to_string();
1367
1368 {
1371 let cache = self.ast_cache.read().await;
1372 if let Some(cached) = cache.get(&uri_str) {
1373 return Some(cached.clone());
1374 }
1375 }
1376
1377 if !insert_on_miss {
1381 return None;
1382 }
1383
1384 let path_str = file_path.to_str()?;
1387 let ast_result = if self.use_solc {
1388 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
1389 let cached_text = {
1392 let tc = self.text_cache.read().await;
1393 tc.get(&uri_str).map(|(_, c)| c.clone())
1394 };
1395 match crate::solc::solc_ast(
1396 path_str,
1397 &foundry_cfg,
1398 Some(&self.client),
1399 cached_text.as_deref(),
1400 )
1401 .await
1402 {
1403 Ok(data) => Ok(data),
1404 Err(_) => self.compiler.ast(path_str).await,
1405 }
1406 } else {
1407 self.compiler.ast(path_str).await
1408 };
1409 match ast_result {
1410 Ok(data) => {
1411 let build = Arc::new(goto::CachedBuild::new(
1414 data,
1415 0,
1416 Some(&mut *self.path_interner.write().await),
1417 ));
1418 let mut cache = self.ast_cache.write().await;
1419 cache.insert(uri_str.clone().into(), build.clone());
1420 Some(build)
1421 }
1422 Err(e) => {
1423 self.client
1424 .log_message(MessageType::ERROR, format!("failed to get AST: {e}"))
1425 .await;
1426 None
1427 }
1428 }
1429 }
1430
1431 async fn get_source_bytes(&self, uri: &Url, file_path: &std::path::Path) -> Option<Vec<u8>> {
1434 {
1435 let text_cache = self.text_cache.read().await;
1436 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
1437 return Some(content.as_bytes().to_vec());
1438 }
1439 }
1440 match std::fs::read(file_path) {
1441 Ok(bytes) => Some(bytes),
1442 Err(e) => {
1443 if e.kind() == std::io::ErrorKind::NotFound {
1444 self.client
1447 .log_message(
1448 MessageType::INFO,
1449 format!("file not found yet (transient): {e}"),
1450 )
1451 .await;
1452 } else {
1453 self.client
1454 .log_message(MessageType::ERROR, format!("failed to read file: {e}"))
1455 .await;
1456 }
1457 None
1458 }
1459 }
1460 }
1461}
1462
1463fn update_imports_on_delete_enabled(settings: &crate::config::Settings) -> bool {
1464 settings.file_operations.update_imports_on_delete
1465}
1466
1467fn start_or_mark_project_cache_sync_pending(
1468 pending: &std::sync::atomic::AtomicBool,
1469 running: &std::sync::atomic::AtomicBool,
1470) -> bool {
1471 pending.store(true, Ordering::Release);
1472 running
1473 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1474 .is_ok()
1475}
1476
1477fn take_project_cache_sync_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
1478 pending.swap(false, Ordering::AcqRel)
1479}
1480
1481fn stop_project_cache_sync_worker_or_reclaim(
1482 pending: &std::sync::atomic::AtomicBool,
1483 running: &std::sync::atomic::AtomicBool,
1484) -> bool {
1485 running.store(false, Ordering::Release);
1486 pending.load(Ordering::Acquire)
1487 && running
1488 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1489 .is_ok()
1490}
1491
1492fn try_claim_project_cache_dirty(dirty: &std::sync::atomic::AtomicBool) -> bool {
1493 dirty
1494 .compare_exchange(true, false, Ordering::AcqRel, Ordering::Acquire)
1495 .is_ok()
1496}
1497
1498fn start_or_mark_project_cache_upsert_pending(
1499 pending: &std::sync::atomic::AtomicBool,
1500 running: &std::sync::atomic::AtomicBool,
1501) -> bool {
1502 pending.store(true, Ordering::Release);
1503 running
1504 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1505 .is_ok()
1506}
1507
1508fn take_project_cache_upsert_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
1509 pending.swap(false, Ordering::AcqRel)
1510}
1511
1512fn stop_project_cache_upsert_worker_or_reclaim(
1513 pending: &std::sync::atomic::AtomicBool,
1514 running: &std::sync::atomic::AtomicBool,
1515) -> bool {
1516 running.store(false, Ordering::Release);
1517 pending.load(Ordering::Acquire)
1518 && running
1519 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1520 .is_ok()
1521}
1522
1523fn lexical_normalize(path: &Path) -> PathBuf {
1524 let mut out = PathBuf::new();
1525 for comp in path.components() {
1526 match comp {
1527 Component::CurDir => {}
1528 Component::ParentDir => {
1529 out.pop();
1530 }
1531 Component::RootDir => out.push(comp.as_os_str()),
1532 Component::Prefix(_) => out.push(comp.as_os_str()),
1533 Component::Normal(seg) => out.push(seg),
1534 }
1535 }
1536 out
1537}
1538
1539fn resolve_import_spec_to_abs(
1540 project_root: &Path,
1541 importer_abs: &Path,
1542 import_path: &str,
1543 remappings: &[String],
1544) -> Option<PathBuf> {
1545 if import_path.starts_with("./") || import_path.starts_with("../") {
1546 let base = importer_abs.parent()?;
1547 return Some(lexical_normalize(&base.join(import_path)));
1548 }
1549
1550 for remap in remappings {
1551 let mut it = remap.splitn(2, '=');
1552 let prefix = it.next().unwrap_or_default();
1553 let target = it.next().unwrap_or_default();
1554 if prefix.is_empty() || target.is_empty() {
1555 continue;
1556 }
1557 if import_path.starts_with(prefix) {
1558 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
1559 return Some(lexical_normalize(
1560 &project_root.join(format!("{target}{suffix}")),
1561 ));
1562 }
1563 }
1564
1565 Some(lexical_normalize(&project_root.join(import_path)))
1566}
1567
1568fn compute_reverse_import_closure(
1569 config: &FoundryConfig,
1570 changed_abs: &[PathBuf],
1571 remappings: &[String],
1572) -> HashSet<PathBuf> {
1573 let source_files = crate::solc::discover_source_files(config);
1574 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1575
1576 for importer in &source_files {
1577 let Ok(bytes) = std::fs::read(importer) else {
1578 continue;
1579 };
1580 for imp in links::ts_find_imports(&bytes) {
1581 let Some(imported_abs) =
1582 resolve_import_spec_to_abs(&config.root, importer, &imp.path, remappings)
1583 else {
1584 continue;
1585 };
1586 if !imported_abs.starts_with(&config.root) {
1587 continue;
1588 }
1589 reverse_edges
1590 .entry(imported_abs)
1591 .or_default()
1592 .insert(importer.clone());
1593 }
1594 }
1595
1596 let mut affected: HashSet<PathBuf> = HashSet::new();
1597 let mut queue: std::collections::VecDeque<PathBuf> = std::collections::VecDeque::new();
1598
1599 for path in changed_abs {
1600 if !path.starts_with(&config.root) {
1601 continue;
1602 }
1603 let normalized = lexical_normalize(path);
1604 if affected.insert(normalized.clone()) {
1605 queue.push_back(normalized);
1606 }
1607 }
1608
1609 while let Some(current) = queue.pop_front() {
1610 if let Some(importers) = reverse_edges.get(¤t) {
1611 for importer in importers {
1612 if affected.insert(importer.clone()) {
1613 queue.push_back(importer.clone());
1614 }
1615 }
1616 }
1617 }
1618
1619 let source_set: HashSet<PathBuf> = source_files.into_iter().collect();
1621 affected
1622 .into_iter()
1623 .filter(|p| source_set.contains(p) && p.is_file())
1624 .collect()
1625}
1626
1627fn src_file_id(src: &str) -> Option<&str> {
1628 src.rsplit(':').next().filter(|id| !id.is_empty())
1629}
1630
1631fn doc_key_path(key: &hover::DocKey) -> Option<&str> {
1632 match key {
1633 hover::DocKey::Contract(k) | hover::DocKey::StateVar(k) | hover::DocKey::Method(k) => {
1634 k.split_once(':').map(|(path, _)| path)
1635 }
1636 hover::DocKey::Func(_) | hover::DocKey::Event(_) => None,
1637 }
1638}
1639
1640fn merge_scoped_cached_build(
1641 existing: &mut goto::CachedBuild,
1642 scoped: goto::CachedBuild,
1643) -> Result<usize, String> {
1644 let affected_paths: HashSet<String> = scoped.nodes.keys().map(|p| p.to_string()).collect();
1645 if affected_paths.is_empty() {
1646 return Ok(0);
1647 }
1648 let affected_abs_paths: HashSet<crate::types::AbsPath> =
1649 scoped.path_to_abs.values().cloned().collect();
1650
1651 for scoped_id in scoped.decl_index.keys() {
1654 if existing.decl_index.contains_key(scoped_id)
1655 && let Some(path) = existing.node_id_to_source_path.get(scoped_id)
1656 && !affected_abs_paths.contains(path)
1657 {
1658 return Err(format!(
1659 "decl id collision for id={} in unaffected path {}",
1660 scoped_id, path
1661 ));
1662 }
1663 }
1664
1665 let old_id_to_path = existing.id_to_path_map.clone();
1670 existing.external_refs.retain(|src, _| {
1671 src_file_id(src.as_str())
1672 .and_then(|fid| old_id_to_path.get(fid))
1673 .map(|path| !affected_paths.contains(path))
1674 .unwrap_or(true)
1675 });
1676 existing
1677 .nodes
1678 .retain(|path, _| !affected_paths.contains(path.as_str()));
1679 existing
1680 .path_to_abs
1681 .retain(|path, _| !affected_paths.contains(path.as_str()));
1682 existing
1683 .id_to_path_map
1684 .retain(|_, path| !affected_paths.contains(path));
1685
1686 existing
1687 .node_id_to_source_path
1688 .retain(|_, path| !affected_abs_paths.contains(path));
1689 existing
1690 .decl_index
1691 .retain(|id, _| match existing.node_id_to_source_path.get(id) {
1692 Some(path) => !affected_abs_paths.contains(path),
1693 None => true,
1694 });
1695 existing
1696 .hint_index
1697 .retain(|abs_path, _| !affected_abs_paths.contains(abs_path));
1698 existing.doc_index.retain(|k, _| {
1699 doc_key_path(k)
1700 .map(|p| !affected_paths.contains(p))
1701 .unwrap_or(true)
1702 });
1703 existing.nodes.extend(scoped.nodes);
1704 existing.path_to_abs.extend(scoped.path_to_abs);
1705 existing.external_refs.extend(scoped.external_refs);
1706 existing.id_to_path_map.extend(scoped.id_to_path_map);
1707 existing.decl_index.extend(scoped.decl_index);
1708 existing
1709 .node_id_to_source_path
1710 .extend(scoped.node_id_to_source_path);
1711 existing.hint_index.extend(scoped.hint_index);
1712 existing.doc_index.extend(scoped.doc_index);
1713
1714 Ok(affected_paths.len())
1715}
1716
1717async fn run_did_save(this: ForgeLsp, params: DidSaveTextDocumentParams) {
1723 this.client
1724 .log_message(MessageType::INFO, "file saved")
1725 .await;
1726
1727 let mut text_content = if let Some(text) = params.text {
1728 text
1729 } else {
1730 let cached = {
1732 let text_cache = this.text_cache.read().await;
1733 text_cache
1734 .get(params.text_document.uri.as_str())
1735 .map(|(_, content)| content.clone())
1736 };
1737 if let Some(content) = cached {
1738 content
1739 } else {
1740 match std::fs::read_to_string(params.text_document.uri.path()) {
1741 Ok(content) => content,
1742 Err(e) => {
1743 this.client
1744 .log_message(
1745 MessageType::ERROR,
1746 format!("Failed to read file on save: {e}"),
1747 )
1748 .await;
1749 return;
1750 }
1751 }
1752 }
1753 };
1754
1755 let uri_str = params.text_document.uri.to_string();
1759 let template_on_create = this
1760 .settings
1761 .read()
1762 .await
1763 .file_operations
1764 .template_on_create;
1765 let needs_recover_scaffold = {
1766 let pending = this.pending_create_scaffold.read().await;
1767 template_on_create
1768 && pending.contains(&uri_str)
1769 && !text_content.chars().any(|ch| !ch.is_whitespace())
1770 };
1771 if needs_recover_scaffold {
1772 let solc_version = this.foundry_config.read().await.solc_version.clone();
1773 if let Some(scaffold) =
1774 file_operations::generate_scaffold(¶ms.text_document.uri, solc_version.as_deref())
1775 {
1776 let end = utils::byte_offset_to_position(&text_content, text_content.len());
1777 let edit = WorkspaceEdit {
1778 changes: Some(HashMap::from([(
1779 params.text_document.uri.clone(),
1780 vec![TextEdit {
1781 range: Range {
1782 start: Position::default(),
1783 end,
1784 },
1785 new_text: scaffold.clone(),
1786 }],
1787 )])),
1788 document_changes: None,
1789 change_annotations: None,
1790 };
1791 if this
1792 .client
1793 .apply_edit(edit)
1794 .await
1795 .as_ref()
1796 .is_ok_and(|r| r.applied)
1797 {
1798 text_content = scaffold.clone();
1799 let version = this
1800 .text_cache
1801 .read()
1802 .await
1803 .get(params.text_document.uri.as_str())
1804 .map(|(v, _)| *v)
1805 .unwrap_or_default();
1806 this.text_cache
1807 .write()
1808 .await
1809 .insert(uri_str.clone().into(), (version, scaffold));
1810 this.pending_create_scaffold.write().await.remove(&uri_str);
1811 this.client
1812 .log_message(
1813 MessageType::INFO,
1814 format!("didSave: recovered scaffold for {}", uri_str),
1815 )
1816 .await;
1817 }
1818 }
1819 }
1820
1821 let version = this
1822 .text_cache
1823 .read()
1824 .await
1825 .get(params.text_document.uri.as_str())
1826 .map(|(version, _)| *version)
1827 .unwrap_or_default();
1828
1829 let saved_uri = params.text_document.uri.clone();
1830 if let Ok(saved_file_path) = saved_uri.to_file_path() {
1831 let saved_abs = saved_file_path.to_string_lossy().to_string();
1832 this.project_cache_changed_files
1833 .write()
1834 .await
1835 .insert(saved_abs.clone());
1836 this.project_cache_upsert_files
1837 .write()
1838 .await
1839 .insert(saved_abs);
1840 }
1841 this.on_change(TextDocumentItem {
1842 uri: saved_uri.clone(),
1843 text: text_content,
1844 version,
1845 language_id: "".to_string(),
1846 })
1847 .await;
1848
1849 let settings_snapshot = this.settings.read().await.clone();
1850
1851 if this.use_solc
1857 && settings_snapshot.project_index.full_project_scan
1858 && matches!(
1859 settings_snapshot.project_index.cache_mode,
1860 crate::config::ProjectIndexCacheMode::V2 | crate::config::ProjectIndexCacheMode::Auto
1861 )
1862 {
1863 if start_or_mark_project_cache_upsert_pending(
1864 &this.project_cache_upsert_pending,
1865 &this.project_cache_upsert_running,
1866 ) {
1867 let upsert_files = this.project_cache_upsert_files.clone();
1868 let ast_cache = this.ast_cache.clone();
1869 let client = this.client.clone();
1870 let running_flag = this.project_cache_upsert_running.clone();
1871 let pending_flag = this.project_cache_upsert_pending.clone();
1872 let foundry_config = this.foundry_config.read().await.clone();
1873 let root_key = this.project_cache_key().await;
1874
1875 tokio::spawn(async move {
1876 loop {
1877 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
1878
1879 if !take_project_cache_upsert_pending(&pending_flag) {
1880 if stop_project_cache_upsert_worker_or_reclaim(&pending_flag, &running_flag)
1881 {
1882 continue;
1883 }
1884 break;
1885 }
1886
1887 let changed_paths: Vec<String> = {
1888 let mut paths = upsert_files.write().await;
1889 paths.drain().collect()
1890 };
1891 if changed_paths.is_empty() {
1892 continue;
1893 }
1894
1895 let Some(ref rk) = root_key else {
1900 continue;
1901 };
1902 let Some(root_build) = ast_cache.read().await.get(rk).cloned() else {
1903 continue;
1904 };
1905
1906 let cfg = foundry_config.clone();
1907 let build = (*root_build).clone();
1908 let changed = changed_paths.clone();
1909
1910 let res = tokio::task::spawn_blocking(move || {
1911 crate::project_cache::upsert_reference_cache_v2_with_report(
1912 &cfg, &build, &changed,
1913 )
1914 })
1915 .await;
1916
1917 match res {
1918 Ok(Ok(report)) => {
1919 client
1920 .log_message(
1921 MessageType::INFO,
1922 format!(
1923 "project cache v2 upsert (debounced): touched_files={}, duration={}ms",
1924 report.file_count_hashed, report.duration_ms
1925 ),
1926 )
1927 .await;
1928 }
1929 Ok(Err(e)) => {
1930 client
1931 .log_message(
1932 MessageType::WARNING,
1933 format!("project cache v2 upsert: {e}"),
1934 )
1935 .await;
1936 }
1937 Err(e) => {
1938 client
1939 .log_message(
1940 MessageType::WARNING,
1941 format!("project cache v2 upsert task failed: {e}"),
1942 )
1943 .await;
1944 }
1945 }
1946 }
1947 });
1948 }
1949 }
1950
1951 if this.use_solc
1954 && settings_snapshot.project_index.full_project_scan
1955 && this.project_cache_dirty.load(Ordering::Acquire)
1956 {
1957 if start_or_mark_project_cache_sync_pending(
1958 &this.project_cache_sync_pending,
1959 &this.project_cache_sync_running,
1960 ) {
1961 let foundry_config = this.foundry_config.read().await.clone();
1962 let root_key = this.project_cache_key().await;
1963 let ast_cache = this.ast_cache.clone();
1964 let text_cache = this.text_cache.clone();
1965 let client = this.client.clone();
1966 let dirty_flag = this.project_cache_dirty.clone();
1967 let running_flag = this.project_cache_sync_running.clone();
1968 let pending_flag = this.project_cache_sync_pending.clone();
1969 let changed_files = this.project_cache_changed_files.clone();
1970 let aggressive_scoped = settings_snapshot.project_index.incremental_edit_reindex;
1971 let force_full_rebuild_flag = this.project_cache_force_full_rebuild.clone();
1972 let path_interner = this.path_interner.clone();
1973
1974 tokio::spawn(async move {
1975 loop {
1976 tokio::time::sleep(std::time::Duration::from_millis(700)).await;
1978
1979 if !take_project_cache_sync_pending(&pending_flag) {
1980 if stop_project_cache_sync_worker_or_reclaim(&pending_flag, &running_flag) {
1981 continue;
1982 }
1983 break;
1984 }
1985
1986 if !try_claim_project_cache_dirty(&dirty_flag) {
1987 continue;
1988 }
1989
1990 let Some(cache_key) = &root_key else {
1991 dirty_flag.store(true, Ordering::Release);
1992 continue;
1993 };
1994 if !foundry_config.root.is_dir() {
1995 dirty_flag.store(true, Ordering::Release);
1996 client
1997 .log_message(
1998 MessageType::WARNING,
1999 format!(
2000 "didSave cache sync: invalid project root {}, deferring",
2001 foundry_config.root.display()
2002 ),
2003 )
2004 .await;
2005 continue;
2006 }
2007
2008 let mut scoped_ok = false;
2009
2010 let force_full = force_full_rebuild_flag.swap(false, Ordering::AcqRel);
2014
2015 if aggressive_scoped && !force_full {
2016 let changed_abs: Vec<PathBuf> = {
2017 let mut changed = changed_files.write().await;
2018 let drained =
2019 changed.drain().map(PathBuf::from).collect::<Vec<PathBuf>>();
2020 drained
2021 };
2022 if !changed_abs.is_empty() {
2023 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
2024 let cfg_for_plan = foundry_config.clone();
2025 let changed_for_plan = changed_abs.clone();
2026 let remappings_for_plan = remappings.clone();
2027 let plan_res = tokio::task::spawn_blocking(move || {
2028 compute_reverse_import_closure(
2029 &cfg_for_plan,
2030 &changed_for_plan,
2031 &remappings_for_plan,
2032 )
2033 })
2034 .await;
2035
2036 let affected_files = match plan_res {
2037 Ok(set) => set.into_iter().collect::<Vec<PathBuf>>(),
2038 Err(_) => Vec::new(),
2039 };
2040 if !affected_files.is_empty() {
2041 client
2042 .log_message(
2043 MessageType::INFO,
2044 format!(
2045 "didSave cache sync: aggressive scoped reindex (affected={})",
2046 affected_files.len(),
2047 ),
2048 )
2049 .await;
2050
2051 let text_cache_snapshot = text_cache.read().await.clone();
2052 match crate::solc::solc_project_index_scoped(
2053 &foundry_config,
2054 Some(&client),
2055 Some(&text_cache_snapshot),
2056 &affected_files,
2057 )
2058 .await
2059 {
2060 Ok(ast_data) => {
2061 let scoped_build = Arc::new(crate::goto::CachedBuild::new(
2062 ast_data,
2063 0,
2064 Some(&mut *path_interner.write().await),
2065 ));
2066 let source_count = scoped_build.nodes.len();
2067 enum ScopedApply {
2068 Merged { affected_count: usize },
2069 Stored,
2070 Failed(String),
2071 }
2072 let apply_outcome = {
2073 let mut cache = ast_cache.write().await;
2074 if let Some(existing) = cache.get(cache_key).cloned() {
2075 let mut merged = (*existing).clone();
2076 match merge_scoped_cached_build(
2077 &mut merged,
2078 (*scoped_build).clone(),
2079 ) {
2080 Ok(affected_count) => {
2081 cache.insert(
2082 cache_key.clone().into(),
2083 Arc::new(merged),
2084 );
2085 ScopedApply::Merged { affected_count }
2086 }
2087 Err(e) => ScopedApply::Failed(e),
2088 }
2089 } else {
2090 cache
2091 .insert(cache_key.clone().into(), scoped_build);
2092 ScopedApply::Stored
2093 }
2094 };
2095
2096 match apply_outcome {
2097 ScopedApply::Merged { affected_count } => {
2098 client
2099 .log_message(
2100 MessageType::INFO,
2101 format!(
2102 "didSave cache sync: scoped merge applied (scoped_sources={}, affected_paths={})",
2103 source_count, affected_count
2104 ),
2105 )
2106 .await;
2107 scoped_ok = true;
2108 }
2109 ScopedApply::Stored => {
2110 client
2111 .log_message(
2112 MessageType::INFO,
2113 format!(
2114 "didSave cache sync: scoped cache stored (scoped_sources={})",
2115 source_count
2116 ),
2117 )
2118 .await;
2119 scoped_ok = true;
2120 }
2121 ScopedApply::Failed(e) => {
2122 client
2123 .log_message(
2124 MessageType::WARNING,
2125 format!(
2126 "didSave cache sync: scoped merge rejected, will retry scoped on next save: {e}"
2127 ),
2128 )
2129 .await;
2130 dirty_flag.store(true, Ordering::Release);
2131 }
2132 }
2133 }
2134 Err(e) => {
2135 client
2136 .log_message(
2137 MessageType::WARNING,
2138 format!(
2139 "didSave cache sync: scoped reindex failed, will retry scoped on next save: {e}"
2140 ),
2141 )
2142 .await;
2143 dirty_flag.store(true, Ordering::Release);
2144 }
2145 }
2146 } else {
2147 client
2148 .log_message(
2149 MessageType::INFO,
2150 "didSave cache sync: no affected files from scoped planner",
2151 )
2152 .await;
2153 }
2154 }
2155 }
2156
2157 if scoped_ok {
2158 continue;
2159 }
2160 if aggressive_scoped {
2161 continue;
2162 }
2163
2164 client
2165 .log_message(
2166 MessageType::INFO,
2167 "didSave cache sync: rebuilding project index from disk",
2168 )
2169 .await;
2170
2171 match crate::solc::solc_project_index(&foundry_config, Some(&client), None)
2172 .await
2173 {
2174 Ok(ast_data) => {
2175 let mut new_build = crate::goto::CachedBuild::new(
2176 ast_data,
2177 0,
2178 Some(&mut *path_interner.write().await),
2179 );
2180 if let Some(prev) = ast_cache.read().await.get(cache_key) {
2181 new_build.merge_missing_from(prev);
2182 }
2183 let source_count = new_build.nodes.len();
2184 let cached_build = Arc::new(new_build);
2185 let build_for_save = (*cached_build).clone();
2186 ast_cache
2187 .write()
2188 .await
2189 .insert(cache_key.clone().into(), cached_build);
2190
2191 let cfg_for_save = foundry_config.clone();
2192 let save_res = tokio::task::spawn_blocking(move || {
2193 crate::project_cache::save_reference_cache_with_report(
2194 &cfg_for_save,
2195 &build_for_save,
2196 None,
2197 )
2198 })
2199 .await;
2200
2201 match save_res {
2202 Ok(Ok(report)) => {
2203 changed_files.write().await.clear();
2204 client
2205 .log_message(
2206 MessageType::INFO,
2207 format!(
2208 "didSave cache sync: persisted cache (sources={}, hashed_files={}, duration={}ms)",
2209 source_count, report.file_count_hashed, report.duration_ms
2210 ),
2211 )
2212 .await;
2213 }
2214 Ok(Err(e)) => {
2215 dirty_flag.store(true, Ordering::Release);
2216 client
2217 .log_message(
2218 MessageType::WARNING,
2219 format!(
2220 "didSave cache sync: persist failed, will retry: {e}"
2221 ),
2222 )
2223 .await;
2224 }
2225 Err(e) => {
2226 dirty_flag.store(true, Ordering::Release);
2227 client
2228 .log_message(
2229 MessageType::WARNING,
2230 format!(
2231 "didSave cache sync: save task failed, will retry: {e}"
2232 ),
2233 )
2234 .await;
2235 }
2236 }
2237 }
2238 Err(e) => {
2239 dirty_flag.store(true, Ordering::Release);
2240 client
2241 .log_message(
2242 MessageType::WARNING,
2243 format!("didSave cache sync: re-index failed, will retry: {e}"),
2244 )
2245 .await;
2246 }
2247 }
2248 }
2249 });
2250 }
2251 }
2252}
2253
2254#[tower_lsp::async_trait]
2255impl LanguageServer for ForgeLsp {
2256 async fn initialize(
2257 &self,
2258 params: InitializeParams,
2259 ) -> tower_lsp::jsonrpc::Result<InitializeResult> {
2260 {
2262 let mut caps = self.client_capabilities.write().await;
2263 *caps = Some(params.capabilities.clone());
2264 }
2265
2266 if let Some(init_opts) = ¶ms.initialization_options {
2268 let s = config::parse_settings(init_opts);
2269 self.client
2270 .log_message(
2271 MessageType::INFO,
2272 format!(
2273 "settings: inlayHints.parameters={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}",
2274 s.inlay_hints.parameters, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex,
2275 ),
2276 )
2277 .await;
2278 let mut settings = self.settings.write().await;
2279 *settings = s;
2280 self.settings_from_init
2281 .store(true, std::sync::atomic::Ordering::Relaxed);
2282 }
2283
2284 if let Some(uri) = params.root_uri.as_ref() {
2286 let mut root = self.root_uri.write().await;
2287 *root = Some(uri.clone());
2288 }
2289
2290 if let Some(root_uri) = params
2292 .root_uri
2293 .as_ref()
2294 .and_then(|uri| uri.to_file_path().ok())
2295 {
2296 let lint_cfg = config::load_lint_config(&root_uri);
2297 self.client
2298 .log_message(
2299 MessageType::INFO,
2300 format!(
2301 "loaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
2302 lint_cfg.lint_on_build,
2303 lint_cfg.ignore_patterns.len()
2304 ),
2305 )
2306 .await;
2307 let mut config = self.lint_config.write().await;
2308 *config = lint_cfg;
2309
2310 let foundry_cfg = config::load_foundry_config(&root_uri);
2311 self.client
2312 .log_message(
2313 MessageType::INFO,
2314 format!(
2315 "loaded foundry.toml: solc={}, remappings={}",
2316 foundry_cfg.solc_version.as_deref().unwrap_or("auto"),
2317 foundry_cfg.remappings.len()
2318 ),
2319 )
2320 .await;
2321 let mut fc = self.foundry_config.write().await;
2322 *fc = foundry_cfg;
2323 }
2324
2325 let client_encodings = params
2327 .capabilities
2328 .general
2329 .as_ref()
2330 .and_then(|g| g.position_encodings.as_deref());
2331 let encoding = utils::PositionEncoding::negotiate(client_encodings);
2332 utils::set_encoding(encoding);
2333
2334 Ok(InitializeResult {
2335 server_info: Some(ServerInfo {
2336 name: "Solidity Language Server".to_string(),
2337 version: Some(env!("LONG_VERSION").to_string()),
2338 }),
2339 capabilities: ServerCapabilities {
2340 position_encoding: Some(encoding.into()),
2341 completion_provider: Some(CompletionOptions {
2342 trigger_characters: Some(vec![
2343 ".".to_string(),
2344 "\"".to_string(),
2345 "'".to_string(),
2346 "/".to_string(),
2347 ]),
2348 resolve_provider: Some(false),
2349 ..Default::default()
2350 }),
2351 signature_help_provider: Some(SignatureHelpOptions {
2352 trigger_characters: Some(vec![
2353 "(".to_string(),
2354 ",".to_string(),
2355 "[".to_string(),
2356 ]),
2357 retrigger_characters: None,
2358 work_done_progress_options: WorkDoneProgressOptions {
2359 work_done_progress: None,
2360 },
2361 }),
2362 definition_provider: Some(OneOf::Left(true)),
2363 declaration_provider: Some(DeclarationCapability::Simple(true)),
2364 implementation_provider: Some(ImplementationProviderCapability::Simple(true)),
2365 references_provider: Some(OneOf::Left(true)),
2366 rename_provider: Some(OneOf::Right(RenameOptions {
2367 prepare_provider: Some(true),
2368 work_done_progress_options: WorkDoneProgressOptions {
2369 work_done_progress: Some(true),
2370 },
2371 })),
2372 workspace_symbol_provider: Some(OneOf::Left(true)),
2373 document_symbol_provider: Some(OneOf::Left(true)),
2374 document_highlight_provider: Some(OneOf::Left(true)),
2375 hover_provider: Some(HoverProviderCapability::Simple(true)),
2376 document_link_provider: Some(DocumentLinkOptions {
2377 resolve_provider: Some(false),
2378 work_done_progress_options: WorkDoneProgressOptions {
2379 work_done_progress: None,
2380 },
2381 }),
2382 document_formatting_provider: Some(OneOf::Left(true)),
2383 code_action_provider: Some(CodeActionProviderCapability::Options(
2384 CodeActionOptions {
2385 code_action_kinds: Some(vec![CodeActionKind::QUICKFIX]),
2386 resolve_provider: Some(false),
2387 work_done_progress_options: WorkDoneProgressOptions {
2388 work_done_progress: None,
2389 },
2390 },
2391 )),
2392 call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
2393 code_lens_provider: None,
2394 folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
2395 selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
2396 inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options(
2397 InlayHintOptions {
2398 resolve_provider: Some(false),
2399 work_done_progress_options: WorkDoneProgressOptions {
2400 work_done_progress: None,
2401 },
2402 },
2403 ))),
2404 semantic_tokens_provider: Some(
2405 SemanticTokensServerCapabilities::SemanticTokensOptions(
2406 SemanticTokensOptions {
2407 legend: semantic_tokens::legend(),
2408 full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
2409 range: Some(true),
2410 work_done_progress_options: WorkDoneProgressOptions {
2411 work_done_progress: None,
2412 },
2413 },
2414 ),
2415 ),
2416 text_document_sync: Some(TextDocumentSyncCapability::Options(
2417 TextDocumentSyncOptions {
2418 will_save: Some(true),
2419 will_save_wait_until: None,
2420 open_close: Some(true),
2421 save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
2422 include_text: Some(true),
2423 })),
2424 change: Some(TextDocumentSyncKind::FULL),
2425 },
2426 )),
2427 workspace: Some(WorkspaceServerCapabilities {
2428 workspace_folders: None,
2429 file_operations: Some(WorkspaceFileOperationsServerCapabilities {
2430 will_rename: Some(FileOperationRegistrationOptions {
2431 filters: vec![
2432 FileOperationFilter {
2434 scheme: Some("file".to_string()),
2435 pattern: FileOperationPattern {
2436 glob: "**/*.sol".to_string(),
2437 matches: Some(FileOperationPatternKind::File),
2438 options: None,
2439 },
2440 },
2441 FileOperationFilter {
2443 scheme: Some("file".to_string()),
2444 pattern: FileOperationPattern {
2445 glob: "**".to_string(),
2446 matches: Some(FileOperationPatternKind::Folder),
2447 options: None,
2448 },
2449 },
2450 ],
2451 }),
2452 did_rename: Some(FileOperationRegistrationOptions {
2453 filters: vec![
2454 FileOperationFilter {
2455 scheme: Some("file".to_string()),
2456 pattern: FileOperationPattern {
2457 glob: "**/*.sol".to_string(),
2458 matches: Some(FileOperationPatternKind::File),
2459 options: None,
2460 },
2461 },
2462 FileOperationFilter {
2463 scheme: Some("file".to_string()),
2464 pattern: FileOperationPattern {
2465 glob: "**".to_string(),
2466 matches: Some(FileOperationPatternKind::Folder),
2467 options: None,
2468 },
2469 },
2470 ],
2471 }),
2472 will_delete: Some(FileOperationRegistrationOptions {
2473 filters: vec![
2474 FileOperationFilter {
2475 scheme: Some("file".to_string()),
2476 pattern: FileOperationPattern {
2477 glob: "**/*.sol".to_string(),
2478 matches: Some(FileOperationPatternKind::File),
2479 options: None,
2480 },
2481 },
2482 FileOperationFilter {
2483 scheme: Some("file".to_string()),
2484 pattern: FileOperationPattern {
2485 glob: "**".to_string(),
2486 matches: Some(FileOperationPatternKind::Folder),
2487 options: None,
2488 },
2489 },
2490 ],
2491 }),
2492 did_delete: Some(FileOperationRegistrationOptions {
2493 filters: vec![
2494 FileOperationFilter {
2495 scheme: Some("file".to_string()),
2496 pattern: FileOperationPattern {
2497 glob: "**/*.sol".to_string(),
2498 matches: Some(FileOperationPatternKind::File),
2499 options: None,
2500 },
2501 },
2502 FileOperationFilter {
2503 scheme: Some("file".to_string()),
2504 pattern: FileOperationPattern {
2505 glob: "**".to_string(),
2506 matches: Some(FileOperationPatternKind::Folder),
2507 options: None,
2508 },
2509 },
2510 ],
2511 }),
2512 will_create: Some(FileOperationRegistrationOptions {
2513 filters: vec![FileOperationFilter {
2514 scheme: Some("file".to_string()),
2515 pattern: FileOperationPattern {
2516 glob: "**/*.sol".to_string(),
2517 matches: Some(FileOperationPatternKind::File),
2518 options: None,
2519 },
2520 }],
2521 }),
2522 did_create: Some(FileOperationRegistrationOptions {
2523 filters: vec![FileOperationFilter {
2524 scheme: Some("file".to_string()),
2525 pattern: FileOperationPattern {
2526 glob: "**/*.sol".to_string(),
2527 matches: Some(FileOperationPatternKind::File),
2528 options: None,
2529 },
2530 }],
2531 }),
2532 ..Default::default()
2533 }),
2534 }),
2535 execute_command_provider: Some(ExecuteCommandOptions {
2536 commands: vec![
2537 "solidity.clearCache".to_string(),
2538 "solidity.reindex".to_string(),
2539 ],
2540 work_done_progress_options: WorkDoneProgressOptions {
2541 work_done_progress: None,
2542 },
2543 }),
2544 ..ServerCapabilities::default()
2545 },
2546 })
2547 }
2548
2549 async fn initialized(&self, _: InitializedParams) {
2550 self.client
2551 .log_message(MessageType::INFO, "lsp server initialized.")
2552 .await;
2553
2554 let supports_dynamic = self
2556 .client_capabilities
2557 .read()
2558 .await
2559 .as_ref()
2560 .and_then(|caps| caps.workspace.as_ref())
2561 .and_then(|ws| ws.did_change_watched_files.as_ref())
2562 .and_then(|dcwf| dcwf.dynamic_registration)
2563 .unwrap_or(false);
2564
2565 if supports_dynamic {
2566 let registration = Registration {
2567 id: "foundry-toml-watcher".to_string(),
2568 method: "workspace/didChangeWatchedFiles".to_string(),
2569 register_options: Some(
2570 serde_json::to_value(DidChangeWatchedFilesRegistrationOptions {
2571 watchers: vec![
2572 FileSystemWatcher {
2573 glob_pattern: GlobPattern::String("**/foundry.toml".to_string()),
2574 kind: Some(WatchKind::all()),
2575 },
2576 FileSystemWatcher {
2577 glob_pattern: GlobPattern::String("**/remappings.txt".to_string()),
2578 kind: Some(WatchKind::all()),
2579 },
2580 ],
2581 })
2582 .unwrap(),
2583 ),
2584 };
2585
2586 if let Err(e) = self.client.register_capability(vec![registration]).await {
2587 self.client
2588 .log_message(
2589 MessageType::WARNING,
2590 format!("failed to register foundry.toml watcher: {e}"),
2591 )
2592 .await;
2593 } else {
2594 self.client
2595 .log_message(MessageType::INFO, "registered foundry.toml file watcher")
2596 .await;
2597 }
2598 }
2599
2600 if !self
2605 .settings_from_init
2606 .load(std::sync::atomic::Ordering::Relaxed)
2607 {
2608 let supports_config = self
2609 .client_capabilities
2610 .read()
2611 .await
2612 .as_ref()
2613 .and_then(|caps| caps.workspace.as_ref())
2614 .and_then(|ws| ws.configuration)
2615 .unwrap_or(false);
2616
2617 if supports_config {
2618 match self
2619 .client
2620 .configuration(vec![ConfigurationItem {
2621 scope_uri: None,
2622 section: Some("solidity-language-server".to_string()),
2623 }])
2624 .await
2625 {
2626 Ok(values) => {
2627 if let Some(val) = values.into_iter().next() {
2628 if !val.is_null() {
2629 let s = config::parse_settings(&val);
2630 self.client
2631 .log_message(
2632 MessageType::INFO,
2633 format!(
2634 "settings (workspace/configuration): lint.enabled={}, lint.exclude={:?}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}",
2635 s.lint.enabled, s.lint.exclude, s.project_index.full_project_scan, s.project_index.cache_mode,
2636 ),
2637 )
2638 .await;
2639 let mut settings = self.settings.write().await;
2640 *settings = s;
2641 }
2642 }
2643 }
2644 Err(e) => {
2645 self.client
2646 .log_message(
2647 MessageType::WARNING,
2648 format!("workspace/configuration request failed: {e}"),
2649 )
2650 .await;
2651 }
2652 }
2653 }
2654 }
2655
2656 if self.settings.read().await.check_for_updates {
2658 let client = self.client.clone();
2659 tokio::spawn(check_for_updates(client));
2660 }
2661
2662 if self.use_solc && self.settings.read().await.project_index.full_project_scan {
2666 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
2667 self.project_indexed
2668 .store(true, std::sync::atomic::Ordering::Relaxed);
2669 let foundry_config = self.foundry_config.read().await.clone();
2670 let cache_key = self.project_cache_key().await;
2671 let ast_cache = self.ast_cache.clone();
2672 let client = self.client.clone();
2673 let sub_caches_arc = self.sub_caches.clone();
2674 let sub_caches_loading_flag = self.sub_caches_loading.clone();
2675 let path_interner = self.path_interner.clone();
2676
2677 tokio::spawn(async move {
2678 let Some(cache_key) = cache_key else {
2679 return;
2680 };
2681 if !foundry_config.root.is_dir() {
2682 client
2683 .log_message(
2684 MessageType::INFO,
2685 format!(
2686 "project index: {} not found, skipping eager index",
2687 foundry_config.root.display(),
2688 ),
2689 )
2690 .await;
2691 return;
2692 }
2693
2694 let token = NumberOrString::String("solidity/projectIndex".to_string());
2695 let _ = client
2696 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
2697 token: token.clone(),
2698 })
2699 .await;
2700
2701 client
2702 .send_notification::<notification::Progress>(ProgressParams {
2703 token: token.clone(),
2704 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
2705 WorkDoneProgressBegin {
2706 title: "Indexing project".to_string(),
2707 message: Some("Discovering source files...".to_string()),
2708 cancellable: Some(false),
2709 percentage: None,
2710 },
2711 )),
2712 })
2713 .await;
2714
2715 {
2720 let cfg_for_discover = foundry_config.clone();
2721 let all_files = tokio::task::spawn_blocking(move || {
2722 crate::solc::discover_source_files_with_libs(&cfg_for_discover)
2723 })
2724 .await
2725 .unwrap_or_default();
2726 let mut interner = path_interner.write().await;
2727 for file in &all_files {
2728 if let Some(path_str) = file.to_str() {
2729 interner.intern(path_str);
2730 }
2731 }
2732 }
2733
2734 let cfg_for_load = foundry_config.clone();
2736 let cache_mode_for_load = cache_mode.clone();
2737 let load_res = tokio::task::spawn_blocking(move || {
2738 crate::project_cache::load_reference_cache_with_report(
2739 &cfg_for_load,
2740 cache_mode_for_load,
2741 true,
2742 )
2743 })
2744 .await;
2745 match load_res {
2746 Ok(report) => {
2747 if let Some(cached_build) = report.build {
2748 let source_count = cached_build.nodes.len();
2749 ast_cache
2750 .write()
2751 .await
2752 .insert(cache_key.clone().into(), Arc::new(cached_build));
2753 client
2754 .log_message(
2755 MessageType::INFO,
2756 format!(
2757 "loaded {source_count} sources from cache ({}ms)",
2758 report.duration_ms
2759 ),
2760 )
2761 .await;
2762 if report.complete {
2763 spawn_load_lib_sub_caches_task(
2766 foundry_config.clone(),
2767 sub_caches_arc.clone(),
2768 sub_caches_loading_flag.clone(),
2769 path_interner.clone(),
2770 client.clone(),
2771 );
2772 client
2773 .send_notification::<notification::Progress>(ProgressParams {
2774 token: token.clone(),
2775 value: ProgressParamsValue::WorkDone(
2776 WorkDoneProgress::End(WorkDoneProgressEnd {
2777 message: Some(format!(
2778 "Loaded {} source files from cache",
2779 source_count
2780 )),
2781 }),
2782 ),
2783 })
2784 .await;
2785 return;
2786 }
2787 }
2788
2789 client
2790 .log_message(
2791 MessageType::INFO,
2792 "no cached index found, building from source",
2793 )
2794 .await;
2795 }
2796 Err(e) => {
2797 client
2798 .log_message(MessageType::WARNING, format!("cache load failed: {e}"))
2799 .await;
2800 }
2801 }
2802
2803 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
2819 let cfg_for_src = foundry_config.clone();
2820 let remappings_for_src = remappings.clone();
2821 let src_files = tokio::task::spawn_blocking(move || {
2822 crate::solc::discover_src_only_closure(&cfg_for_src, &remappings_for_src)
2823 })
2824 .await
2825 .unwrap_or_default();
2826
2827 let cfg_for_full = foundry_config.clone();
2830 let remappings_for_full = remappings.clone();
2831 let full_files = tokio::task::spawn_blocking(move || {
2832 crate::solc::discover_compilation_closure(&cfg_for_full, &remappings_for_full)
2833 })
2834 .await
2835 .unwrap_or_default();
2836
2837 let src_count = src_files.len();
2838 let full_count = full_files.len();
2839
2840 let phase1_start = std::time::Instant::now();
2842 client
2843 .send_notification::<notification::Progress>(ProgressParams {
2844 token: token.clone(),
2845 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Report(
2846 WorkDoneProgressReport {
2847 message: Some(format!("Compiling {} src files...", src_count,)),
2848 cancellable: Some(false),
2849 percentage: None,
2850 },
2851 )),
2852 })
2853 .await;
2854
2855 let phase1_ok = match crate::solc::solc_project_index_scoped(
2856 &foundry_config,
2857 Some(&client),
2858 None,
2859 &src_files,
2860 )
2861 .await
2862 {
2863 Ok(ast_data) => {
2864 let mut new_build = crate::goto::CachedBuild::new(
2865 ast_data,
2866 0,
2867 Some(&mut *path_interner.write().await),
2868 );
2869 if let Some(prev) = ast_cache.read().await.get(&cache_key) {
2870 new_build.merge_missing_from(prev);
2871 }
2872 let source_count = new_build.nodes.len();
2873 ast_cache
2874 .write()
2875 .await
2876 .insert(cache_key.clone().into(), Arc::new(new_build));
2877 client
2878 .log_message(
2879 MessageType::INFO,
2880 format!(
2881 "project index: phase 1 complete — {} source files indexed in {:.1}s",
2882 source_count,
2883 phase1_start.elapsed().as_secs_f64(),
2884 ),
2885 )
2886 .await;
2887
2888 client
2890 .send_notification::<notification::Progress>(ProgressParams {
2891 token: token.clone(),
2892 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
2893 WorkDoneProgressEnd {
2894 message: Some(format!(
2895 "Indexed {} source files (full index in background)",
2896 source_count,
2897 )),
2898 },
2899 )),
2900 })
2901 .await;
2902 true
2903 }
2904 Err(e) => {
2905 client
2906 .log_message(
2907 MessageType::WARNING,
2908 format!("project index: phase 1 failed: {e}"),
2909 )
2910 .await;
2911 false
2913 }
2914 };
2915
2916 let phase2_foundry_config = foundry_config.clone();
2922 let phase2_client = client.clone();
2923 let phase2_cache_key = cache_key.clone();
2924 let phase2_ast_cache = ast_cache.clone();
2925 let phase2_path_interner = path_interner.clone();
2926 let phase2_sub_caches = sub_caches_arc.clone();
2927 let phase2_loading_flag = sub_caches_loading_flag.clone();
2928 let phase2 = async move {
2929 let phase2_start = std::time::Instant::now();
2930 let token2 = NumberOrString::String("solidity/projectIndexFull".to_string());
2932 let _ = phase2_client
2933 .send_request::<request::WorkDoneProgressCreate>(
2934 WorkDoneProgressCreateParams {
2935 token: token2.clone(),
2936 },
2937 )
2938 .await;
2939 phase2_client
2940 .send_notification::<notification::Progress>(ProgressParams {
2941 token: token2.clone(),
2942 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
2943 WorkDoneProgressBegin {
2944 title: "Full project index".to_string(),
2945 message: Some(format!(
2946 "Compiling {} files (src + test + script)...",
2947 full_count,
2948 )),
2949 cancellable: Some(false),
2950 percentage: None,
2951 },
2952 )),
2953 })
2954 .await;
2955
2956 match crate::solc::solc_project_index_scoped(
2957 &phase2_foundry_config,
2958 Some(&phase2_client),
2959 None,
2960 &full_files,
2961 )
2962 .await
2963 {
2964 Ok(ast_data) => {
2965 let mut new_build = crate::goto::CachedBuild::new(
2966 ast_data,
2967 0,
2968 Some(&mut *phase2_path_interner.write().await),
2969 );
2970 if let Some(prev) = phase2_ast_cache.read().await.get(&phase2_cache_key)
2974 {
2975 new_build.merge_missing_from(prev);
2976 }
2977 let source_count = new_build.nodes.len();
2978 let cached_build = Arc::new(new_build);
2979 let build_for_save = (*cached_build).clone();
2980 phase2_ast_cache
2981 .write()
2982 .await
2983 .insert(phase2_cache_key.clone().into(), cached_build);
2984 phase2_client
2985 .log_message(
2986 MessageType::INFO,
2987 format!(
2988 "project index: phase 2 complete — {} source files indexed in {:.1}s",
2989 source_count,
2990 phase2_start.elapsed().as_secs_f64(),
2991 ),
2992 )
2993 .await;
2994
2995 spawn_load_lib_sub_caches_task(
2997 phase2_foundry_config.clone(),
2998 phase2_sub_caches,
2999 phase2_loading_flag,
3000 phase2_path_interner,
3001 phase2_client.clone(),
3002 );
3003
3004 let cfg_for_save = phase2_foundry_config.clone();
3006 let client_for_save = phase2_client.clone();
3007 tokio::spawn(async move {
3008 let res = tokio::task::spawn_blocking(move || {
3009 crate::project_cache::save_reference_cache_with_report(
3010 &cfg_for_save,
3011 &build_for_save,
3012 None,
3013 )
3014 })
3015 .await;
3016 match res {
3017 Ok(Ok(_report)) => {}
3018 Ok(Err(e)) => {
3019 client_for_save
3020 .log_message(
3021 MessageType::WARNING,
3022 format!("project index: cache save failed: {e}"),
3023 )
3024 .await;
3025 }
3026 Err(e) => {
3027 client_for_save
3028 .log_message(
3029 MessageType::WARNING,
3030 format!(
3031 "project index: cache save task failed: {e}"
3032 ),
3033 )
3034 .await;
3035 }
3036 }
3037 });
3038
3039 phase2_client
3040 .send_notification::<notification::Progress>(ProgressParams {
3041 token: token2,
3042 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
3043 WorkDoneProgressEnd {
3044 message: Some(format!(
3045 "Indexed {} source files in {:.1}s",
3046 source_count,
3047 phase2_start.elapsed().as_secs_f64(),
3048 )),
3049 },
3050 )),
3051 })
3052 .await;
3053 }
3054 Err(e) => {
3055 phase2_client
3056 .log_message(
3057 MessageType::WARNING,
3058 format!("project index: phase 2 failed: {e}"),
3059 )
3060 .await;
3061 phase2_client
3062 .send_notification::<notification::Progress>(ProgressParams {
3063 token: token2,
3064 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
3065 WorkDoneProgressEnd {
3066 message: Some(format!("Full index failed: {e}",)),
3067 },
3068 )),
3069 })
3070 .await;
3071 }
3072 }
3073 };
3074
3075 if phase1_ok {
3076 tokio::spawn(phase2);
3078 } else {
3079 phase2.await;
3082
3083 client
3086 .send_notification::<notification::Progress>(ProgressParams {
3087 token,
3088 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
3089 WorkDoneProgressEnd {
3090 message: Some("Index complete (phase 1 skipped)".to_string()),
3091 },
3092 )),
3093 })
3094 .await;
3095 }
3096 });
3097 }
3098 }
3099
3100 async fn execute_command(
3101 &self,
3102 params: ExecuteCommandParams,
3103 ) -> tower_lsp::jsonrpc::Result<Option<serde_json::Value>> {
3104 match params.command.as_str() {
3105 "solidity.clearCache" => {
3117 let root = self.foundry_config.read().await.root.clone();
3118 let cache_dir = crate::project_cache::cache_dir(&root);
3119
3120 let disk_result = if cache_dir.exists() {
3122 std::fs::remove_dir_all(&cache_dir).map_err(|e| format!("{e}"))
3123 } else {
3124 Ok(())
3125 };
3126
3127 self.ast_cache.write().await.clear();
3129 self.completion_cache.write().await.clear();
3130 self.sub_caches.write().await.clear();
3131 self.semantic_token_cache.write().await.clear();
3132 *self.path_interner.write().await = crate::types::PathInterner::new();
3133
3134 match disk_result {
3135 Ok(()) => {
3136 self.client
3137 .show_message(
3138 MessageType::INFO,
3139 format!(
3140 "Cache cleared: {} removed, all in-memory caches reset",
3141 cache_dir.display()
3142 ),
3143 )
3144 .await;
3145 Ok(Some(serde_json::json!({ "success": true })))
3146 }
3147 Err(e) => {
3148 self.client
3149 .show_message(
3150 MessageType::ERROR,
3151 format!("solidity.clearCache: failed to remove cache dir: {e}"),
3152 )
3153 .await;
3154 Err(tower_lsp::jsonrpc::Error {
3155 code: tower_lsp::jsonrpc::ErrorCode::InternalError,
3156 message: std::borrow::Cow::Owned(e),
3157 data: None,
3158 })
3159 }
3160 }
3161 }
3162
3163 "solidity.reindex" => {
3175 if let Some(root_key) = self.project_cache_key().await {
3176 self.ast_cache.write().await.remove(&root_key);
3177 }
3178 self.project_cache_dirty
3179 .store(true, std::sync::atomic::Ordering::Relaxed);
3180 self.project_cache_force_full_rebuild
3184 .store(true, std::sync::atomic::Ordering::Release);
3185
3186 if start_or_mark_project_cache_sync_pending(
3191 &self.project_cache_sync_pending,
3192 &self.project_cache_sync_running,
3193 ) {
3194 let foundry_config = self.foundry_config.read().await.clone();
3195 let root_key = self.project_cache_key().await;
3196 let ast_cache = self.ast_cache.clone();
3197 let client = self.client.clone();
3198 let dirty_flag = self.project_cache_dirty.clone();
3199 let running_flag = self.project_cache_sync_running.clone();
3200 let pending_flag = self.project_cache_sync_pending.clone();
3201 let changed_files = self.project_cache_changed_files.clone();
3202 let force_full_rebuild_flag = self.project_cache_force_full_rebuild.clone();
3203 let path_interner = self.path_interner.clone();
3204
3205 tokio::spawn(async move {
3206 loop {
3207 tokio::time::sleep(std::time::Duration::from_millis(700)).await;
3208
3209 if !take_project_cache_sync_pending(&pending_flag) {
3210 if stop_project_cache_sync_worker_or_reclaim(
3211 &pending_flag,
3212 &running_flag,
3213 ) {
3214 continue;
3215 }
3216 break;
3217 }
3218
3219 if !try_claim_project_cache_dirty(&dirty_flag) {
3220 continue;
3221 }
3222
3223 let Some(cache_key) = &root_key else {
3224 dirty_flag.store(true, Ordering::Release);
3225 continue;
3226 };
3227 if !foundry_config.root.is_dir() {
3228 dirty_flag.store(true, Ordering::Release);
3229 client
3230 .log_message(
3231 MessageType::WARNING,
3232 format!(
3233 "solidity.reindex cache sync: invalid project root {}, deferring",
3234 foundry_config.root.display()
3235 ),
3236 )
3237 .await;
3238 continue;
3239 }
3240
3241 client
3242 .log_message(
3243 MessageType::INFO,
3244 "solidity.reindex: rebuilding project index from disk",
3245 )
3246 .await;
3247
3248 match crate::solc::solc_project_index(
3249 &foundry_config,
3250 Some(&client),
3251 None,
3252 )
3253 .await
3254 {
3255 Ok(ast_data) => {
3256 let mut new_build = crate::goto::CachedBuild::new(
3257 ast_data,
3258 0,
3259 Some(&mut *path_interner.write().await),
3260 );
3261 if let Some(prev) = ast_cache.read().await.get(cache_key) {
3262 new_build.merge_missing_from(prev);
3263 }
3264 let source_count = new_build.nodes.len();
3265 let cached_build = Arc::new(new_build);
3266 let build_for_save = (*cached_build).clone();
3267 ast_cache
3268 .write()
3269 .await
3270 .insert(cache_key.clone().into(), cached_build);
3271
3272 let cfg_for_save = foundry_config.clone();
3273 let save_res = tokio::task::spawn_blocking(move || {
3274 crate::project_cache::save_reference_cache_with_report(
3275 &cfg_for_save,
3276 &build_for_save,
3277 None,
3278 )
3279 })
3280 .await;
3281
3282 match save_res {
3283 Ok(Ok(report)) => {
3284 changed_files.write().await.clear();
3285 force_full_rebuild_flag.store(false, Ordering::Release);
3288 client
3289 .log_message(
3290 MessageType::INFO,
3291 format!(
3292 "solidity.reindex: persisted cache (sources={}, hashed_files={}, duration={}ms)",
3293 source_count, report.file_count_hashed, report.duration_ms
3294 ),
3295 )
3296 .await;
3297 }
3298 Ok(Err(e)) => {
3299 dirty_flag.store(true, Ordering::Release);
3300 client
3301 .log_message(
3302 MessageType::WARNING,
3303 format!(
3304 "solidity.reindex: persist failed, will retry: {e}"
3305 ),
3306 )
3307 .await;
3308 }
3309 Err(e) => {
3310 dirty_flag.store(true, Ordering::Release);
3311 client
3312 .log_message(
3313 MessageType::WARNING,
3314 format!(
3315 "solidity.reindex: save task failed, will retry: {e}"
3316 ),
3317 )
3318 .await;
3319 }
3320 }
3321 }
3322 Err(e) => {
3323 dirty_flag.store(true, Ordering::Release);
3324 client
3325 .log_message(
3326 MessageType::WARNING,
3327 format!(
3328 "solidity.reindex: re-index failed, will retry: {e}"
3329 ),
3330 )
3331 .await;
3332 }
3333 }
3334
3335 if stop_project_cache_sync_worker_or_reclaim(
3336 &pending_flag,
3337 &running_flag,
3338 ) {
3339 continue;
3340 }
3341 break;
3342 }
3343 });
3344 }
3345
3346 self.client
3347 .log_message(
3348 MessageType::INFO,
3349 "solidity.reindex: in-memory cache evicted, background reindex triggered",
3350 )
3351 .await;
3352 Ok(Some(serde_json::json!({ "success": true })))
3353 }
3354
3355 _ => Err(tower_lsp::jsonrpc::Error::method_not_found()),
3356 }
3357 }
3358
3359 async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
3360 self.flush_project_cache_to_disk("shutdown").await;
3361 self.client
3362 .log_message(MessageType::INFO, "lsp server shutting down.")
3363 .await;
3364 Ok(())
3365 }
3366
3367 async fn did_open(&self, params: DidOpenTextDocumentParams) {
3368 self.client
3369 .log_message(MessageType::INFO, "file opened")
3370 .await;
3371
3372 let mut td = params.text_document;
3373 let template_on_create = self
3374 .settings
3375 .read()
3376 .await
3377 .file_operations
3378 .template_on_create;
3379
3380 let should_attempt_scaffold = template_on_create
3383 && td.text.chars().all(|ch| ch.is_whitespace())
3384 && td.uri.scheme() == "file"
3385 && td
3386 .uri
3387 .to_file_path()
3388 .ok()
3389 .and_then(|p| p.extension().map(|e| e == "sol"))
3390 .unwrap_or(false);
3391
3392 if should_attempt_scaffold {
3393 let uri_str = td.uri.to_string();
3394 let create_flow_pending = {
3395 let pending = self.pending_create_scaffold.read().await;
3396 pending.contains(&uri_str)
3397 };
3398 if create_flow_pending {
3399 self.client
3400 .log_message(
3401 MessageType::INFO,
3402 format!(
3403 "didOpen: skip scaffold for {} (didCreateFiles scaffold pending)",
3404 uri_str
3405 ),
3406 )
3407 .await;
3408 } else {
3409 let cache_has_content = {
3410 let tc = self.text_cache.read().await;
3411 tc.get(&uri_str)
3412 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()))
3413 };
3414
3415 if !cache_has_content {
3416 let file_has_content = td.uri.to_file_path().ok().is_some_and(|p| {
3417 std::fs::read_to_string(&p)
3418 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()))
3419 });
3420
3421 if !file_has_content {
3422 let solc_version = self.foundry_config.read().await.solc_version.clone();
3423 if let Some(scaffold) =
3424 file_operations::generate_scaffold(&td.uri, solc_version.as_deref())
3425 {
3426 let end = utils::byte_offset_to_position(&td.text, td.text.len());
3427 let edit = WorkspaceEdit {
3428 changes: Some(HashMap::from([(
3429 td.uri.clone(),
3430 vec![TextEdit {
3431 range: Range {
3432 start: Position::default(),
3433 end,
3434 },
3435 new_text: scaffold.clone(),
3436 }],
3437 )])),
3438 document_changes: None,
3439 change_annotations: None,
3440 };
3441 if self
3442 .client
3443 .apply_edit(edit)
3444 .await
3445 .as_ref()
3446 .is_ok_and(|r| r.applied)
3447 {
3448 td.text = scaffold;
3449 self.client
3450 .log_message(
3451 MessageType::INFO,
3452 format!("didOpen: scaffolded empty file {}", uri_str),
3453 )
3454 .await;
3455 }
3456 }
3457 }
3458 }
3459 }
3460 }
3461
3462 self.on_change(td).await
3463 }
3464
3465 async fn did_change(&self, params: DidChangeTextDocumentParams) {
3466 self.client
3467 .log_message(MessageType::INFO, "file changed")
3468 .await;
3469
3470 if let Some(change) = params.content_changes.into_iter().next() {
3472 let has_substantive_content = change.text.chars().any(|ch| !ch.is_whitespace());
3473 let mut text_cache = self.text_cache.write().await;
3474 text_cache.insert(
3475 params.text_document.uri.to_string().into(),
3476 (params.text_document.version, change.text),
3477 );
3478 drop(text_cache);
3479
3480 if has_substantive_content {
3481 self.pending_create_scaffold
3482 .write()
3483 .await
3484 .remove(params.text_document.uri.as_str());
3485 }
3486 }
3487 }
3488
3489 async fn did_save(&self, params: DidSaveTextDocumentParams) {
3490 let uri_key = params.text_document.uri.to_string();
3495
3496 {
3498 let workers = self.did_save_workers.read().await;
3499 if let Some(tx) = workers.get(&uri_key) {
3500 if tx.send(Some(params.clone())).is_ok() {
3503 return;
3504 }
3505 }
3506 }
3507
3508 let (tx, mut rx) = tokio::sync::watch::channel(Some(params));
3511 rx.mark_changed();
3516 self.did_save_workers
3517 .write()
3518 .await
3519 .insert(uri_key.into(), tx);
3520
3521 let this = self.clone();
3522 tokio::spawn(async move {
3523 loop {
3524 if rx.changed().await.is_err() {
3526 break;
3529 }
3530 let params = match rx.borrow_and_update().clone() {
3531 Some(p) => p,
3532 None => continue,
3533 };
3534 run_did_save(this.clone(), params).await;
3535 }
3536 });
3537 }
3538
3539 async fn will_save(&self, params: WillSaveTextDocumentParams) {
3540 self.client
3541 .log_message(
3542 MessageType::INFO,
3543 format!(
3544 "file will save reason:{:?} {}",
3545 params.reason, params.text_document.uri
3546 ),
3547 )
3548 .await;
3549 }
3550
3551 async fn formatting(
3552 &self,
3553 params: DocumentFormattingParams,
3554 ) -> tower_lsp::jsonrpc::Result<Option<Vec<TextEdit>>> {
3555 self.client
3556 .log_message(MessageType::INFO, "formatting request")
3557 .await;
3558
3559 let uri = params.text_document.uri;
3560 let file_path = match uri.to_file_path() {
3561 Ok(path) => path,
3562 Err(_) => {
3563 self.client
3564 .log_message(MessageType::ERROR, "Invalid file URI for formatting")
3565 .await;
3566 return Ok(None);
3567 }
3568 };
3569 let path_str = match file_path.to_str() {
3570 Some(s) => s,
3571 None => {
3572 self.client
3573 .log_message(MessageType::ERROR, "Invalid file path for formatting")
3574 .await;
3575 return Ok(None);
3576 }
3577 };
3578
3579 let original_content = {
3581 let text_cache = self.text_cache.read().await;
3582 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
3583 content.clone()
3584 } else {
3585 match std::fs::read_to_string(&file_path) {
3587 Ok(content) => content,
3588 Err(_) => {
3589 self.client
3590 .log_message(MessageType::ERROR, "Failed to read file for formatting")
3591 .await;
3592 return Ok(None);
3593 }
3594 }
3595 }
3596 };
3597
3598 let formatted_content = match self.compiler.format(path_str).await {
3600 Ok(content) => content,
3601 Err(e) => {
3602 self.client
3603 .log_message(MessageType::WARNING, format!("Formatting failed: {e}"))
3604 .await;
3605 return Ok(None);
3606 }
3607 };
3608
3609 if original_content != formatted_content {
3611 let end = utils::byte_offset_to_position(&original_content, original_content.len());
3612
3613 {
3615 let mut text_cache = self.text_cache.write().await;
3616 let version = text_cache
3617 .get(&uri.to_string())
3618 .map(|(v, _)| *v)
3619 .unwrap_or(0);
3620 text_cache.insert(uri.to_string().into(), (version, formatted_content.clone()));
3621 }
3622
3623 let edit = TextEdit {
3624 range: Range {
3625 start: Position::default(),
3626 end,
3627 },
3628 new_text: formatted_content,
3629 };
3630 Ok(Some(vec![edit]))
3631 } else {
3632 Ok(None)
3633 }
3634 }
3635
3636 async fn did_close(&self, params: DidCloseTextDocumentParams) {
3637 self.flush_project_cache_to_disk("didClose").await;
3638 let uri = params.text_document.uri.to_string();
3639 self.ast_cache.write().await.remove(&uri);
3640 self.text_cache.write().await.remove(&uri);
3641 self.completion_cache.write().await.remove(&uri);
3642 self.client
3643 .log_message(MessageType::INFO, "file closed, caches cleared.")
3644 .await;
3645 }
3646
3647 async fn did_change_configuration(&self, params: DidChangeConfigurationParams) {
3648 let s = config::parse_settings(¶ms.settings);
3649 self.client
3650 .log_message(
3651 MessageType::INFO,
3652 format!(
3653 "settings updated: inlayHints.parameters={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}",
3654 s.inlay_hints.parameters, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex,
3655 ),
3656 )
3657 .await;
3658 let mut settings = self.settings.write().await;
3659 *settings = s;
3660
3661 let client = self.client.clone();
3663 tokio::spawn(async move {
3664 let _ = client.inlay_hint_refresh().await;
3665 });
3666 }
3667 async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) {
3668 self.client
3669 .log_message(MessageType::INFO, "workdspace folders changed.")
3670 .await;
3671 }
3672
3673 async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
3674 self.client
3675 .log_message(MessageType::INFO, "watched files have changed.")
3676 .await;
3677
3678 for change in ¶ms.changes {
3680 let path = match change.uri.to_file_path() {
3681 Ok(p) => p,
3682 Err(_) => continue,
3683 };
3684
3685 let filename = path.file_name().and_then(|n| n.to_str());
3686
3687 if filename == Some("foundry.toml") {
3688 let lint_cfg = config::load_lint_config_from_toml(&path);
3689 self.client
3690 .log_message(
3691 MessageType::INFO,
3692 format!(
3693 "reloaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
3694 lint_cfg.lint_on_build,
3695 lint_cfg.ignore_patterns.len()
3696 ),
3697 )
3698 .await;
3699 let mut lc = self.lint_config.write().await;
3700 *lc = lint_cfg;
3701
3702 let foundry_cfg = config::load_foundry_config_from_toml(&path);
3703 self.client
3704 .log_message(
3705 MessageType::INFO,
3706 format!(
3707 "reloaded foundry.toml: solc={}, remappings={}",
3708 foundry_cfg.solc_version.as_deref().unwrap_or("auto"),
3709 foundry_cfg.remappings.len()
3710 ),
3711 )
3712 .await;
3713 if foundry_cfg.via_ir {
3714 self.client
3715 .log_message(
3716 MessageType::WARNING,
3717 "via_ir is enabled in foundry.toml — gas estimate inlay hints are disabled to avoid slow compilation",
3718 )
3719 .await;
3720 }
3721 let mut fc = self.foundry_config.write().await;
3722 *fc = foundry_cfg;
3723 break;
3724 }
3725
3726 if filename == Some("remappings.txt") {
3727 self.client
3728 .log_message(
3729 MessageType::INFO,
3730 "remappings.txt changed, config may need refresh",
3731 )
3732 .await;
3733 }
3736 }
3737 }
3738
3739 async fn completion(
3740 &self,
3741 params: CompletionParams,
3742 ) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> {
3743 let uri = params.text_document_position.text_document.uri;
3744 let position = params.text_document_position.position;
3745
3746 let trigger_char = params
3747 .context
3748 .as_ref()
3749 .and_then(|ctx| ctx.trigger_character.as_deref());
3750
3751 let source_text = {
3753 let text_cache = self.text_cache.read().await;
3754 if let Some((_, text)) = text_cache.get(&uri.to_string()) {
3755 text.clone()
3756 } else {
3757 match uri.to_file_path() {
3758 Ok(path) => std::fs::read_to_string(&path).unwrap_or_default(),
3759 Err(_) => return Ok(None),
3760 }
3761 }
3762 };
3763
3764 let local_cached: Option<Arc<completion::CompletionCache>> = {
3766 let comp_cache = self.completion_cache.read().await;
3767 comp_cache.get(&uri.to_string()).cloned()
3768 };
3769
3770 let root_cached: Option<Arc<completion::CompletionCache>> = {
3772 let root_key = self.project_cache_key().await;
3773 match root_key {
3774 Some(root_key) => {
3775 let ast_cache = self.ast_cache.read().await;
3776 ast_cache
3777 .get(&root_key)
3778 .map(|root_build| root_build.completion_cache.clone())
3779 }
3780 None => None,
3781 }
3782 };
3783
3784 let cached = local_cached.or(root_cached.clone());
3786
3787 if cached.is_none() {
3788 let ast_cache = self.ast_cache.clone();
3790 let completion_cache = self.completion_cache.clone();
3791 let uri_string = uri.to_string();
3792 tokio::spawn(async move {
3793 let cached_build = {
3794 let cache = ast_cache.read().await;
3795 match cache.get(&uri_string) {
3796 Some(v) => v.clone(),
3797 None => return,
3798 }
3799 };
3800 completion_cache
3801 .write()
3802 .await
3803 .insert(uri_string.into(), cached_build.completion_cache.clone());
3804 });
3805 }
3806
3807 let cache_ref = cached.as_deref();
3808
3809 let file_id = {
3811 let uri_path = uri.to_file_path().ok();
3812 cache_ref.and_then(|c| {
3813 uri_path.as_ref().and_then(|p| {
3814 let path_str = p.to_str()?;
3815 c.path_to_file_id.get(path_str).copied()
3816 })
3817 })
3818 };
3819
3820 let current_file_path = uri
3821 .to_file_path()
3822 .ok()
3823 .and_then(|p| p.to_str().map(|s| s.to_string()));
3824
3825 let check_pos = if matches!(trigger_char, Some("\"") | Some("'")) {
3836 Position {
3837 line: position.line,
3838 character: position.character.saturating_add(1),
3839 }
3840 } else {
3841 position
3842 };
3843
3844 if let Some(asm_range) =
3849 links::ts_cursor_in_assembly_flags(source_text.as_bytes(), check_pos)
3850 {
3851 let text_edit = CompletionTextEdit::Edit(TextEdit {
3852 range: Range {
3853 start: Position {
3854 line: position.line,
3855 character: asm_range.start.character,
3856 },
3857 end: Position {
3858 line: position.line,
3859 character: check_pos.character,
3860 },
3861 },
3862 new_text: "memory-safe".to_string(),
3863 });
3864 let item = CompletionItem {
3865 label: "memory-safe".to_string(),
3866 kind: Some(CompletionItemKind::VALUE),
3867 detail: Some("Solidity assembly dialect".to_string()),
3868 filter_text: Some("memory-safe".to_string()),
3869 text_edit: Some(text_edit),
3870 ..Default::default()
3871 };
3872 return Ok(Some(CompletionResponse::List(CompletionList {
3873 is_incomplete: false,
3874 items: vec![item],
3875 })));
3876 }
3877
3878 if let Some(import_range) =
3885 links::ts_cursor_in_import_string(source_text.as_bytes(), check_pos)
3886 {
3887 if let Ok(current_file) = uri.to_file_path() {
3888 let foundry_cfg = self.foundry_config.read().await.clone();
3889 let project_root = foundry_cfg.root.clone();
3890 let remappings = crate::solc::resolve_remappings(&foundry_cfg).await;
3891 let typed_range = Some((
3894 position.line,
3895 import_range.start.character,
3896 check_pos.character,
3897 ));
3898 let items = completion::all_sol_import_paths(
3899 ¤t_file,
3900 &project_root,
3901 &remappings,
3902 typed_range,
3903 );
3904 return Ok(Some(CompletionResponse::List(CompletionList {
3905 is_incomplete: true,
3906 items,
3907 })));
3908 }
3909 return Ok(None);
3910 }
3911
3912 if matches!(trigger_char, Some("\"") | Some("'")) {
3916 return Ok(None);
3917 }
3918
3919 let tail_candidates = if trigger_char == Some(".") {
3920 vec![]
3921 } else {
3922 root_cached.as_deref().map_or_else(Vec::new, |c| {
3923 completion::top_level_importable_completion_candidates(
3924 c,
3925 current_file_path.as_deref(),
3926 &source_text,
3927 )
3928 })
3929 };
3930
3931 let result = completion::handle_completion_with_tail_candidates(
3932 cache_ref,
3933 &source_text,
3934 position,
3935 trigger_char,
3936 file_id,
3937 tail_candidates,
3938 );
3939 Ok(result)
3940 }
3941
3942 async fn goto_definition(
3943 &self,
3944 params: GotoDefinitionParams,
3945 ) -> tower_lsp::jsonrpc::Result<Option<GotoDefinitionResponse>> {
3946 self.client
3947 .log_message(MessageType::INFO, "got textDocument/definition request")
3948 .await;
3949
3950 let uri = params.text_document_position_params.text_document.uri;
3951 let position = params.text_document_position_params.position;
3952
3953 let file_path = match uri.to_file_path() {
3954 Ok(path) => path,
3955 Err(_) => {
3956 self.client
3957 .log_message(MessageType::ERROR, "Invalid file uri")
3958 .await;
3959 return Ok(None);
3960 }
3961 };
3962
3963 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3964 Some(bytes) => bytes,
3965 None => return Ok(None),
3966 };
3967
3968 let source_text = String::from_utf8_lossy(&source_bytes).to_string();
3969
3970 {
3975 let imports = crate::links::ts_find_imports(&source_bytes);
3976 if let Some(imp) = imports.iter().find(|imp| {
3977 let r = &imp.inner_range;
3978 position >= r.start && position <= r.end
3979 }) {
3980 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
3981 let remappings = crate::solc::resolve_remappings(&foundry_cfg).await;
3982 if let Some(abs) = resolve_import_spec_to_abs(
3983 &foundry_cfg.root,
3984 &file_path,
3985 &imp.path,
3986 &remappings,
3987 ) {
3988 if abs.exists() {
3989 if let Ok(target_uri) = Url::from_file_path(&abs) {
3990 let location = Location {
3991 uri: target_uri,
3992 range: Range::default(), };
3994 self.client
3995 .log_message(
3996 MessageType::INFO,
3997 format!("found definition (import path) at {}", location.uri),
3998 )
3999 .await;
4000 return Ok(Some(GotoDefinitionResponse::from(location)));
4001 }
4002 }
4003 }
4004 }
4005 }
4006
4007 {
4013 let identifier = crate::rename::get_identifier_at_position(&source_bytes, position);
4014 if let Some(ref ident) = identifier {
4015 let alias_names = crate::rename::ts_find_alias_names(&source_bytes);
4016 if alias_names.contains(ident.as_str()) {
4017 if let Some(decl_range) =
4018 crate::rename::ts_find_alias_declaration(&source_bytes, ident)
4019 {
4020 let location = Location {
4021 uri: uri.clone(),
4022 range: decl_range,
4023 };
4024 self.client
4025 .log_message(
4026 MessageType::INFO,
4027 format!(
4028 "found definition (alias declaration) for '{}' at {}:{}",
4029 ident, location.uri, decl_range.start.line
4030 ),
4031 )
4032 .await;
4033 return Ok(Some(GotoDefinitionResponse::from(location)));
4034 }
4035 }
4036 }
4037 }
4038
4039 let cursor_name = goto::cursor_context(&source_text, position).map(|ctx| ctx.name);
4041
4042 let (is_dirty, cached_build) = {
4046 let text_version = self
4047 .text_cache
4048 .read()
4049 .await
4050 .get(&uri.to_string())
4051 .map(|(v, _)| *v)
4052 .unwrap_or(0);
4053 let cb = self.get_or_fetch_build(&uri, &file_path, false).await;
4054 let build_version = cb.as_ref().map(|b| b.build_version).unwrap_or(0);
4055 (text_version > build_version, cb)
4056 };
4057
4058 let validate_ts = |loc: &Location| -> bool {
4064 let Some(ref name) = cursor_name else {
4065 return true; };
4067 let target_src = if loc.uri == uri {
4068 Some(source_text.clone())
4069 } else {
4070 loc.uri
4071 .to_file_path()
4072 .ok()
4073 .and_then(|p| std::fs::read_to_string(&p).ok())
4074 };
4075 match target_src {
4076 Some(src) => goto::validate_goto_target(&src, loc, name),
4077 None => true, }
4079 };
4080
4081 if is_dirty {
4082 self.client
4083 .log_message(MessageType::INFO, "file is dirty, trying tree-sitter first")
4084 .await;
4085
4086 let ts_result = {
4088 let comp_cache = self.completion_cache.read().await;
4089 let text_cache = self.text_cache.read().await;
4090 if let Some(cc) = comp_cache.get(&uri.to_string()) {
4091 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
4092 } else {
4093 None
4094 }
4095 };
4096
4097 if let Some(location) = ts_result {
4098 if validate_ts(&location) {
4099 self.client
4100 .log_message(
4101 MessageType::INFO,
4102 format!(
4103 "found definition (tree-sitter) at {}:{}",
4104 location.uri, location.range.start.line
4105 ),
4106 )
4107 .await;
4108 return Ok(Some(GotoDefinitionResponse::from(location)));
4109 }
4110 self.client
4111 .log_message(
4112 MessageType::INFO,
4113 "tree-sitter result failed validation, trying AST fallback",
4114 )
4115 .await;
4116 }
4117
4118 if let Some(ref cb) = cached_build
4123 && let Some(ref name) = cursor_name
4124 {
4125 let byte_hint = goto::pos_to_bytes(&source_bytes, position);
4126 if let Some(location) = goto::goto_declaration_by_name(cb, &uri, name, byte_hint) {
4127 self.client
4128 .log_message(
4129 MessageType::INFO,
4130 format!(
4131 "found definition (AST by name) at {}:{}",
4132 location.uri, location.range.start.line
4133 ),
4134 )
4135 .await;
4136 return Ok(Some(GotoDefinitionResponse::from(location)));
4137 }
4138 }
4139 } else {
4140 if let Some(ref cb) = cached_build
4142 && let Some(location) =
4143 goto::goto_declaration_cached(cb, &uri, position, &source_bytes)
4144 {
4145 self.client
4146 .log_message(
4147 MessageType::INFO,
4148 format!(
4149 "found definition (AST) at {}:{}",
4150 location.uri, location.range.start.line
4151 ),
4152 )
4153 .await;
4154 return Ok(Some(GotoDefinitionResponse::from(location)));
4155 }
4156
4157 let ts_result = {
4159 let comp_cache = self.completion_cache.read().await;
4160 let text_cache = self.text_cache.read().await;
4161 if let Some(cc) = comp_cache.get(&uri.to_string()) {
4162 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
4163 } else {
4164 None
4165 }
4166 };
4167
4168 if let Some(location) = ts_result {
4169 if validate_ts(&location) {
4170 self.client
4171 .log_message(
4172 MessageType::INFO,
4173 format!(
4174 "found definition (tree-sitter fallback) at {}:{}",
4175 location.uri, location.range.start.line
4176 ),
4177 )
4178 .await;
4179 return Ok(Some(GotoDefinitionResponse::from(location)));
4180 }
4181 self.client
4182 .log_message(MessageType::INFO, "tree-sitter fallback failed validation")
4183 .await;
4184 }
4185 }
4186
4187 self.client
4188 .log_message(MessageType::INFO, "no definition found")
4189 .await;
4190 Ok(None)
4191 }
4192
4193 async fn goto_declaration(
4194 &self,
4195 params: request::GotoDeclarationParams,
4196 ) -> tower_lsp::jsonrpc::Result<Option<request::GotoDeclarationResponse>> {
4197 self.client
4198 .log_message(MessageType::INFO, "got textDocument/declaration request")
4199 .await;
4200
4201 let uri = params.text_document_position_params.text_document.uri;
4202 let position = params.text_document_position_params.position;
4203
4204 let file_path = match uri.to_file_path() {
4205 Ok(path) => path,
4206 Err(_) => {
4207 self.client
4208 .log_message(MessageType::ERROR, "invalid file uri")
4209 .await;
4210 return Ok(None);
4211 }
4212 };
4213
4214 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4215 Some(bytes) => bytes,
4216 None => return Ok(None),
4217 };
4218
4219 {
4221 let identifier = crate::rename::get_identifier_at_position(&source_bytes, position);
4222 if let Some(ref ident) = identifier {
4223 let alias_names = crate::rename::ts_find_alias_names(&source_bytes);
4224 if alias_names.contains(ident.as_str()) {
4225 if let Some(decl_range) =
4226 crate::rename::ts_find_alias_declaration(&source_bytes, ident)
4227 {
4228 let location = Location {
4229 uri: uri.clone(),
4230 range: decl_range,
4231 };
4232 self.client
4233 .log_message(
4234 MessageType::INFO,
4235 format!(
4236 "found declaration (alias) for '{}' at line {}",
4237 ident, decl_range.start.line
4238 ),
4239 )
4240 .await;
4241 return Ok(Some(request::GotoDeclarationResponse::from(location)));
4242 }
4243 }
4244 }
4245 }
4246
4247 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4248 let cached_build = match cached_build {
4249 Some(cb) => cb,
4250 None => return Ok(None),
4251 };
4252
4253 if let Some(location) =
4254 goto::goto_declaration_cached(&cached_build, &uri, position, &source_bytes)
4255 {
4256 self.client
4257 .log_message(
4258 MessageType::INFO,
4259 format!(
4260 "found declaration at {}:{}",
4261 location.uri, location.range.start.line
4262 ),
4263 )
4264 .await;
4265 Ok(Some(request::GotoDeclarationResponse::from(location)))
4266 } else {
4267 self.client
4268 .log_message(MessageType::INFO, "no declaration found")
4269 .await;
4270 Ok(None)
4271 }
4272 }
4273
4274 async fn goto_implementation(
4275 &self,
4276 params: request::GotoImplementationParams,
4277 ) -> tower_lsp::jsonrpc::Result<Option<request::GotoImplementationResponse>> {
4278 self.client
4279 .log_message(MessageType::INFO, "got textDocument/implementation request")
4280 .await;
4281
4282 let uri = params.text_document_position_params.text_document.uri;
4283 let position = params.text_document_position_params.position;
4284
4285 let file_path = match uri.to_file_path() {
4286 Ok(path) => path,
4287 Err(_) => {
4288 self.client
4289 .log_message(MessageType::ERROR, "invalid file uri")
4290 .await;
4291 return Ok(None);
4292 }
4293 };
4294
4295 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4296 Some(bytes) => bytes,
4297 None => return Ok(None),
4298 };
4299
4300 let is_alias = {
4305 let ident = crate::rename::get_identifier_at_position(&source_bytes, position);
4306 if let Some(ref name) = ident {
4307 let alias_names = crate::rename::ts_find_alias_names(&source_bytes);
4308 alias_names.contains(name.as_str())
4309 } else {
4310 false
4311 }
4312 };
4313
4314 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4315
4316 if is_alias {
4321 let build = match &cached_build {
4324 Some(cb) => Some(cb.clone()),
4325 None => self.get_or_fetch_build(&uri, &file_path, true).await,
4326 };
4327 if let Some(ref cb) = build {
4328 let byte_pos = goto::pos_to_bytes(&source_bytes, position);
4329
4330 let lookup_position = if let Some(foreign_byte) =
4336 crate::rename::ts_alias_foreign_byte_offset(&source_bytes, byte_pos)
4337 {
4338 goto::bytes_to_pos(&source_bytes, foreign_byte).unwrap_or(position)
4340 } else {
4341 position
4343 };
4344
4345 if let Some(location) =
4346 goto::goto_declaration_cached(cb, &uri, lookup_position, &source_bytes)
4347 {
4348 let ident = crate::rename::get_identifier_at_position(&source_bytes, position)
4349 .unwrap_or_default();
4350 self.client
4351 .log_message(
4352 MessageType::INFO,
4353 format!(
4354 "found implementation (alias → original) for '{}' at {}:{}",
4355 ident, location.uri, location.range.start.line
4356 ),
4357 )
4358 .await;
4359 return Ok(Some(request::GotoImplementationResponse::Scalar(location)));
4360 }
4361 }
4362 }
4363
4364 let cached_build = match cached_build {
4365 Some(cb) => cb,
4366 None => return Ok(None),
4367 };
4368
4369 let byte_position = goto::pos_to_bytes(&source_bytes, position);
4370 let abs_path = uri.as_ref().strip_prefix("file://").unwrap_or(uri.as_ref());
4371
4372 let (target_id, target_decl_abs, target_decl_offset) =
4376 match references::byte_to_id(&cached_build.nodes, abs_path, byte_position) {
4377 Some(id) => {
4378 let resolved = cached_build
4379 .nodes
4380 .get(abs_path)
4381 .and_then(|f| f.get(&id))
4382 .and_then(|info| info.referenced_declaration)
4383 .unwrap_or(id);
4384
4385 let (decl_abs, decl_offset) = references::resolve_target_location(
4387 &cached_build,
4388 &uri,
4389 position,
4390 &source_bytes,
4391 )
4392 .unwrap_or_else(|| (abs_path.to_string(), byte_position));
4393
4394 (resolved, decl_abs, decl_offset)
4395 }
4396 None => return Ok(None),
4397 };
4398
4399 let project_build = self.ensure_project_cached_build().await;
4401 let sub_caches = self.sub_caches.read().await;
4402
4403 let mut builds: Vec<&goto::CachedBuild> = vec![&cached_build];
4404 if let Some(ref pb) = project_build {
4405 builds.push(pb);
4406 }
4407 for sc in sub_caches.iter() {
4408 builds.push(sc);
4409 }
4410
4411 let mut locations: Vec<Location> = Vec::new();
4416 let mut seen_positions: Vec<(String, u32, u32)> = Vec::new(); for build in &builds {
4419 let local_target =
4421 references::byte_to_id(&build.nodes, &target_decl_abs, target_decl_offset).or_else(
4422 || {
4423 if build.nodes.values().any(|f| f.contains_key(&target_id)) {
4425 Some(target_id)
4426 } else {
4427 None
4428 }
4429 },
4430 );
4431
4432 let Some(local_id) = local_target else {
4433 continue;
4434 };
4435
4436 let Some(impls) = build.base_function_implementation.get(&local_id) else {
4438 continue;
4439 };
4440
4441 for &impl_id in impls {
4442 if let Some(loc) =
4443 references::id_to_location(&build.nodes, &build.id_to_path_map, impl_id)
4444 {
4445 let key = (
4447 loc.uri.to_string(),
4448 loc.range.start.line,
4449 loc.range.start.character,
4450 );
4451 if !seen_positions.contains(&key) {
4452 seen_positions.push(key);
4453 locations.push(loc);
4454 }
4455 }
4456 }
4457 }
4458
4459 if locations.is_empty() {
4462 if let Some(location) =
4463 goto::goto_declaration_cached(&cached_build, &uri, position, &source_bytes)
4464 {
4465 self.client
4466 .log_message(
4467 MessageType::INFO,
4468 "no implementations found, falling back to definition",
4469 )
4470 .await;
4471 return Ok(Some(request::GotoImplementationResponse::Scalar(location)));
4472 }
4473
4474 self.client
4475 .log_message(MessageType::INFO, "no implementations found")
4476 .await;
4477 return Ok(None);
4478 }
4479
4480 self.client
4481 .log_message(
4482 MessageType::INFO,
4483 format!("found {} implementation(s)", locations.len()),
4484 )
4485 .await;
4486
4487 if locations.len() == 1 {
4488 Ok(Some(request::GotoImplementationResponse::Scalar(
4489 locations.into_iter().next().unwrap(),
4490 )))
4491 } else {
4492 Ok(Some(request::GotoImplementationResponse::Array(locations)))
4493 }
4494 }
4495
4496 async fn references(
4497 &self,
4498 params: ReferenceParams,
4499 ) -> tower_lsp::jsonrpc::Result<Option<Vec<Location>>> {
4500 self.client
4501 .log_message(MessageType::INFO, "Got a textDocument/references request")
4502 .await;
4503
4504 let uri = params.text_document_position.text_document.uri;
4505 let position = params.text_document_position.position;
4506 let file_path = match uri.to_file_path() {
4507 Ok(path) => path,
4508 Err(_) => {
4509 self.client
4510 .log_message(MessageType::ERROR, "Invalid file URI")
4511 .await;
4512 return Ok(None);
4513 }
4514 };
4515 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4516 Some(bytes) => bytes,
4517 None => return Ok(None),
4518 };
4519
4520 {
4532 let cursor_byte = crate::goto::pos_to_bytes(&source_bytes, position);
4533 let identifier = crate::rename::get_identifier_at_position(&source_bytes, position);
4534
4535 let is_alias = if let Some(alias_name) =
4536 crate::rename::ts_alias_local_name_at_cursor(&source_bytes, cursor_byte)
4537 {
4538 Some(alias_name)
4540 } else if let Some(ref ident) = identifier {
4541 let alias_names = crate::rename::ts_find_alias_names(&source_bytes);
4544 if alias_names.contains(ident.as_str()) {
4545 Some(ident.clone())
4546 } else {
4547 None
4548 }
4549 } else {
4550 None
4551 };
4552
4553 if let Some(alias_name) = is_alias {
4554 let locations = crate::rename::ts_collect_identifier_locations(
4555 &source_bytes,
4556 &uri,
4557 &alias_name,
4558 );
4559 self.client
4560 .log_message(
4561 MessageType::INFO,
4562 format!(
4563 "Found {} references for alias '{}'",
4564 locations.len(),
4565 alias_name
4566 ),
4567 )
4568 .await;
4569 return Ok(Some(locations));
4570 }
4571 }
4572
4573 let file_build = self.get_or_fetch_build(&uri, &file_path, true).await;
4574 let file_build = match file_build {
4575 Some(cb) => cb,
4576 None => return Ok(None),
4577 };
4578 let mut project_build = self.ensure_project_cached_build().await;
4579 let current_abs = file_path.to_string_lossy().to_string();
4580 if self.use_solc
4581 && self.settings.read().await.project_index.full_project_scan
4582 && project_build
4583 .as_ref()
4584 .is_some_and(|b| !b.nodes.contains_key(current_abs.as_str()))
4585 {
4586 let foundry_config = self.foundry_config_for_file(&file_path).await;
4587 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
4588 let changed = vec![PathBuf::from(¤t_abs)];
4589 let cfg_for_plan = foundry_config.clone();
4590 let remappings_for_plan = remappings.clone();
4591 let affected_set = tokio::task::spawn_blocking(move || {
4592 compute_reverse_import_closure(&cfg_for_plan, &changed, &remappings_for_plan)
4593 })
4594 .await
4595 .ok()
4596 .unwrap_or_default();
4597 let mut affected_files: Vec<PathBuf> = affected_set.into_iter().collect();
4598 if affected_files.is_empty() {
4599 affected_files.push(PathBuf::from(¤t_abs));
4600 }
4601 let text_cache_snapshot = self.text_cache.read().await.clone();
4602 match crate::solc::solc_project_index_scoped(
4603 &foundry_config,
4604 Some(&self.client),
4605 Some(&text_cache_snapshot),
4606 &affected_files,
4607 )
4608 .await
4609 {
4610 Ok(ast_data) => {
4611 let scoped_build = Arc::new(crate::goto::CachedBuild::new(
4612 ast_data,
4613 0,
4614 Some(&mut *self.path_interner.write().await),
4615 ));
4616 if let Some(root_key) = self.project_cache_key().await {
4617 let merged = {
4618 let mut cache = self.ast_cache.write().await;
4619 let merged = if let Some(existing) = cache.get(&root_key).cloned() {
4620 let mut merged = (*existing).clone();
4621 match merge_scoped_cached_build(
4622 &mut merged,
4623 (*scoped_build).clone(),
4624 ) {
4625 Ok(_) => Arc::new(merged),
4626 Err(_) => scoped_build.clone(),
4627 }
4628 } else {
4629 scoped_build.clone()
4630 };
4631 cache.insert(root_key.into(), merged.clone());
4632 merged
4633 };
4634 project_build = Some(merged);
4635 } else {
4636 project_build = Some(scoped_build);
4637 }
4638 self.client
4639 .log_message(
4640 MessageType::INFO,
4641 format!(
4642 "references warm-refresh: scoped reindex applied (affected={})",
4643 affected_files.len()
4644 ),
4645 )
4646 .await;
4647 }
4648 Err(e) => {
4649 self.client
4650 .log_message(
4651 MessageType::WARNING,
4652 format!("references warm-refresh: scoped reindex failed: {e}"),
4653 )
4654 .await;
4655 }
4656 }
4657 }
4658
4659 let mut locations = references::goto_references_cached(
4662 &file_build,
4663 &uri,
4664 position,
4665 &source_bytes,
4666 None,
4667 params.context.include_declaration,
4668 );
4669
4670 if let Some((def_abs_path, def_byte_offset)) =
4676 references::resolve_target_location(&file_build, &uri, position, &source_bytes)
4677 {
4678 if let Some(project_build) = project_build {
4679 let other_locations = references::goto_references_for_target(
4680 &project_build,
4681 &def_abs_path,
4682 def_byte_offset,
4683 None,
4684 params.context.include_declaration,
4685 Some(¤t_abs),
4686 );
4687 locations.extend(other_locations);
4688 }
4689
4690 let sub_caches = self.sub_caches.read().await;
4695 for sub_cache in sub_caches.iter() {
4696 let sub_locations = references::goto_references_for_target(
4697 sub_cache,
4698 &def_abs_path,
4699 def_byte_offset,
4700 None,
4701 params.context.include_declaration,
4702 None,
4703 );
4704 locations.extend(sub_locations);
4705 }
4706 }
4707
4708 locations = references::dedup_locations(locations);
4712
4713 self.client
4714 .log_message(
4715 MessageType::INFO,
4716 format!("Found {} references", locations.len()),
4717 )
4718 .await;
4719 Ok(Some(locations))
4720 }
4721
4722 async fn prepare_rename(
4723 &self,
4724 params: TextDocumentPositionParams,
4725 ) -> tower_lsp::jsonrpc::Result<Option<PrepareRenameResponse>> {
4726 self.client
4727 .log_message(MessageType::INFO, "got textDocument/prepareRename request")
4728 .await;
4729
4730 let uri = params.text_document.uri;
4731 let position = params.position;
4732
4733 let file_path = match uri.to_file_path() {
4734 Ok(path) => path,
4735 Err(_) => {
4736 self.client
4737 .log_message(MessageType::ERROR, "invalid file uri")
4738 .await;
4739 return Ok(None);
4740 }
4741 };
4742
4743 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4744 Some(bytes) => bytes,
4745 None => return Ok(None),
4746 };
4747
4748 if let Some(range) = rename::get_identifier_range(&source_bytes, position) {
4749 self.client
4750 .log_message(
4751 MessageType::INFO,
4752 format!(
4753 "prepare rename range: {}:{}",
4754 range.start.line, range.start.character
4755 ),
4756 )
4757 .await;
4758 Ok(Some(PrepareRenameResponse::Range(range)))
4759 } else {
4760 self.client
4761 .log_message(MessageType::INFO, "no identifier found for prepare rename")
4762 .await;
4763 Ok(None)
4764 }
4765 }
4766
4767 async fn rename(
4768 &self,
4769 params: RenameParams,
4770 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
4771 self.client
4772 .log_message(MessageType::INFO, "got textDocument/rename request")
4773 .await;
4774
4775 let uri = params.text_document_position.text_document.uri;
4776 let position = params.text_document_position.position;
4777 let new_name = params.new_name;
4778 let file_path = match uri.to_file_path() {
4779 Ok(p) => p,
4780 Err(_) => {
4781 self.client
4782 .log_message(MessageType::ERROR, "invalid file uri")
4783 .await;
4784 return Ok(None);
4785 }
4786 };
4787 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4788 Some(bytes) => bytes,
4789 None => return Ok(None),
4790 };
4791
4792 let current_identifier = match rename::get_identifier_at_position(&source_bytes, position) {
4793 Some(id) => id,
4794 None => {
4795 self.client
4796 .log_message(MessageType::ERROR, "No identifier found at position")
4797 .await;
4798 return Ok(None);
4799 }
4800 };
4801
4802 if !utils::is_valid_solidity_identifier(&new_name) {
4803 return Err(tower_lsp::jsonrpc::Error::invalid_params(
4804 "new name is not a valid solidity identifier",
4805 ));
4806 }
4807
4808 if new_name == current_identifier {
4809 self.client
4810 .log_message(
4811 MessageType::INFO,
4812 "new name is the same as current identifier",
4813 )
4814 .await;
4815 return Ok(None);
4816 }
4817
4818 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4819 let cached_build = match cached_build {
4820 Some(cb) => cb,
4821 None => return Ok(None),
4822 };
4823 let other_builds: Vec<Arc<goto::CachedBuild>> = {
4824 let cache = self.ast_cache.read().await;
4825 cache
4826 .iter()
4827 .filter(|(key, _)| key.as_str() != uri.to_string())
4828 .map(|(_, v)| v.clone())
4829 .collect()
4830 };
4831 let other_refs: Vec<&goto::CachedBuild> = other_builds.iter().map(|v| v.as_ref()).collect();
4832
4833 let text_buffers: HashMap<String, Vec<u8>> = {
4837 let text_cache = self.text_cache.read().await;
4838 text_cache
4839 .iter()
4840 .map(|(uri, (_, content))| (uri.to_string(), content.as_bytes().to_vec()))
4841 .collect()
4842 };
4843
4844 match rename::rename_symbol(
4845 &cached_build,
4846 &uri,
4847 position,
4848 &source_bytes,
4849 new_name,
4850 &other_refs,
4851 &text_buffers,
4852 ) {
4853 Some(workspace_edit) => {
4854 self.client
4855 .log_message(
4856 MessageType::INFO,
4857 format!(
4858 "created rename edit with {} file(s), {} total change(s)",
4859 workspace_edit
4860 .changes
4861 .as_ref()
4862 .map(|c| c.len())
4863 .unwrap_or(0),
4864 workspace_edit
4865 .changes
4866 .as_ref()
4867 .map(|c| c.values().map(|v| v.len()).sum::<usize>())
4868 .unwrap_or(0)
4869 ),
4870 )
4871 .await;
4872
4873 Ok(Some(workspace_edit))
4878 }
4879
4880 None => {
4881 self.client
4882 .log_message(MessageType::INFO, "No locations found for renaming")
4883 .await;
4884 Ok(None)
4885 }
4886 }
4887 }
4888
4889 async fn symbol(
4890 &self,
4891 params: WorkspaceSymbolParams,
4892 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SymbolInformation>>> {
4893 self.client
4894 .log_message(MessageType::INFO, "got workspace/symbol request")
4895 .await;
4896
4897 let files: Vec<(Url, String)> = {
4899 let cache = self.text_cache.read().await;
4900 cache
4901 .iter()
4902 .filter(|(uri_str, _)| uri_str.ends_with(".sol"))
4903 .filter_map(|(uri_str, (_, content))| {
4904 Url::parse(uri_str).ok().map(|uri| (uri, content.clone()))
4905 })
4906 .collect()
4907 };
4908
4909 let mut all_symbols = symbols::extract_workspace_symbols(&files);
4910 if !params.query.is_empty() {
4911 let query = params.query.to_lowercase();
4912 all_symbols.retain(|symbol| symbol.name.to_lowercase().contains(&query));
4913 }
4914 self.client
4915 .log_message(
4916 MessageType::INFO,
4917 format!("found {} symbols", all_symbols.len()),
4918 )
4919 .await;
4920 Ok(Some(all_symbols))
4921 }
4922
4923 async fn document_symbol(
4924 &self,
4925 params: DocumentSymbolParams,
4926 ) -> tower_lsp::jsonrpc::Result<Option<DocumentSymbolResponse>> {
4927 self.client
4928 .log_message(MessageType::INFO, "got textDocument/documentSymbol request")
4929 .await;
4930 let uri = params.text_document.uri;
4931 let file_path = match uri.to_file_path() {
4932 Ok(path) => path,
4933 Err(_) => {
4934 self.client
4935 .log_message(MessageType::ERROR, "invalid file uri")
4936 .await;
4937 return Ok(None);
4938 }
4939 };
4940
4941 let source = {
4943 let cache = self.text_cache.read().await;
4944 cache
4945 .get(&uri.to_string())
4946 .map(|(_, content)| content.clone())
4947 };
4948 let source = match source {
4949 Some(s) => s,
4950 None => match std::fs::read_to_string(&file_path) {
4951 Ok(s) => s,
4952 Err(_) => return Ok(None),
4953 },
4954 };
4955
4956 let symbols = symbols::extract_document_symbols(&source);
4957 self.client
4958 .log_message(
4959 MessageType::INFO,
4960 format!("found {} document symbols", symbols.len()),
4961 )
4962 .await;
4963 Ok(Some(DocumentSymbolResponse::Nested(symbols)))
4964 }
4965
4966 async fn document_highlight(
4967 &self,
4968 params: DocumentHighlightParams,
4969 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentHighlight>>> {
4970 self.client
4971 .log_message(
4972 MessageType::INFO,
4973 "got textDocument/documentHighlight request",
4974 )
4975 .await;
4976
4977 let uri = params.text_document_position_params.text_document.uri;
4978 let position = params.text_document_position_params.position;
4979
4980 let source = {
4981 let cache = self.text_cache.read().await;
4982 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4983 };
4984
4985 let source = match source {
4986 Some(s) => s,
4987 None => {
4988 let file_path = match uri.to_file_path() {
4989 Ok(p) => p,
4990 Err(_) => return Ok(None),
4991 };
4992 match std::fs::read_to_string(&file_path) {
4993 Ok(s) => s,
4994 Err(_) => return Ok(None),
4995 }
4996 }
4997 };
4998
4999 let highlights = highlight::document_highlights(&source, position);
5000 self.client
5001 .log_message(
5002 MessageType::INFO,
5003 format!("found {} document highlights", highlights.len()),
5004 )
5005 .await;
5006 Ok(Some(highlights))
5007 }
5008
5009 async fn hover(&self, params: HoverParams) -> tower_lsp::jsonrpc::Result<Option<Hover>> {
5010 self.client
5011 .log_message(MessageType::INFO, "got textDocument/hover request")
5012 .await;
5013
5014 let uri = params.text_document_position_params.text_document.uri;
5015 let position = params.text_document_position_params.position;
5016
5017 let file_path = match uri.to_file_path() {
5018 Ok(path) => path,
5019 Err(_) => {
5020 self.client
5021 .log_message(MessageType::ERROR, "invalid file uri")
5022 .await;
5023 return Ok(None);
5024 }
5025 };
5026
5027 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
5028 Some(bytes) => bytes,
5029 None => return Ok(None),
5030 };
5031
5032 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
5033 let cached_build = match cached_build {
5034 Some(cb) => cb,
5035 None => return Ok(None),
5036 };
5037
5038 let result = hover::hover_info(&cached_build, &uri, position, &source_bytes);
5039
5040 if result.is_some() {
5041 self.client
5042 .log_message(MessageType::INFO, "hover info found")
5043 .await;
5044 } else {
5045 self.client
5046 .log_message(MessageType::INFO, "no hover info found")
5047 .await;
5048 }
5049
5050 Ok(result)
5051 }
5052
5053 async fn signature_help(
5054 &self,
5055 params: SignatureHelpParams,
5056 ) -> tower_lsp::jsonrpc::Result<Option<SignatureHelp>> {
5057 self.client
5058 .log_message(MessageType::INFO, "got textDocument/signatureHelp request")
5059 .await;
5060
5061 let uri = params.text_document_position_params.text_document.uri;
5062 let position = params.text_document_position_params.position;
5063
5064 let file_path = match uri.to_file_path() {
5065 Ok(path) => path,
5066 Err(_) => {
5067 self.client
5068 .log_message(MessageType::ERROR, "invalid file uri")
5069 .await;
5070 return Ok(None);
5071 }
5072 };
5073
5074 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
5075 Some(bytes) => bytes,
5076 None => return Ok(None),
5077 };
5078
5079 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
5080 let cached_build = match cached_build {
5081 Some(cb) => cb,
5082 None => return Ok(None),
5083 };
5084
5085 let result = hover::signature_help(&cached_build, &source_bytes, position);
5086
5087 Ok(result)
5088 }
5089
5090 async fn document_link(
5091 &self,
5092 params: DocumentLinkParams,
5093 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentLink>>> {
5094 self.client
5095 .log_message(MessageType::INFO, "got textDocument/documentLink request")
5096 .await;
5097
5098 let uri = params.text_document.uri;
5099 let file_path = match uri.to_file_path() {
5100 Ok(path) => path,
5101 Err(_) => {
5102 self.client
5103 .log_message(MessageType::ERROR, "invalid file uri")
5104 .await;
5105 return Ok(None);
5106 }
5107 };
5108
5109 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
5110 Some(bytes) => bytes,
5111 None => return Ok(None),
5112 };
5113
5114 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
5115 let cached_build = match cached_build {
5116 Some(cb) => cb,
5117 None => return Ok(None),
5118 };
5119
5120 let result = links::document_links(&cached_build, &uri, &source_bytes);
5121 self.client
5122 .log_message(
5123 MessageType::INFO,
5124 format!("found {} document links", result.len()),
5125 )
5126 .await;
5127 Ok(Some(result))
5128 }
5129
5130 async fn semantic_tokens_full(
5131 &self,
5132 params: SemanticTokensParams,
5133 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensResult>> {
5134 self.client
5135 .log_message(
5136 MessageType::INFO,
5137 "got textDocument/semanticTokens/full request",
5138 )
5139 .await;
5140
5141 let uri = params.text_document.uri;
5142 let source = {
5143 let cache = self.text_cache.read().await;
5144 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
5145 };
5146
5147 let source = match source {
5148 Some(s) => s,
5149 None => {
5150 let file_path = match uri.to_file_path() {
5152 Ok(p) => p,
5153 Err(_) => return Ok(None),
5154 };
5155 match std::fs::read_to_string(&file_path) {
5156 Ok(s) => s,
5157 Err(_) => return Ok(None),
5158 }
5159 }
5160 };
5161
5162 let mut tokens = semantic_tokens::semantic_tokens_full(&source);
5163
5164 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
5166 let result_id = id.to_string();
5167 tokens.result_id = Some(result_id.clone());
5168
5169 {
5170 let mut cache = self.semantic_token_cache.write().await;
5171 cache.insert(uri.to_string().into(), (result_id, tokens.data.clone()));
5172 }
5173
5174 Ok(Some(SemanticTokensResult::Tokens(tokens)))
5175 }
5176
5177 async fn semantic_tokens_range(
5178 &self,
5179 params: SemanticTokensRangeParams,
5180 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensRangeResult>> {
5181 self.client
5182 .log_message(
5183 MessageType::INFO,
5184 "got textDocument/semanticTokens/range request",
5185 )
5186 .await;
5187
5188 let uri = params.text_document.uri;
5189 let range = params.range;
5190 let source = {
5191 let cache = self.text_cache.read().await;
5192 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
5193 };
5194
5195 let source = match source {
5196 Some(s) => s,
5197 None => {
5198 let file_path = match uri.to_file_path() {
5199 Ok(p) => p,
5200 Err(_) => return Ok(None),
5201 };
5202 match std::fs::read_to_string(&file_path) {
5203 Ok(s) => s,
5204 Err(_) => return Ok(None),
5205 }
5206 }
5207 };
5208
5209 let tokens =
5210 semantic_tokens::semantic_tokens_range(&source, range.start.line, range.end.line);
5211
5212 Ok(Some(SemanticTokensRangeResult::Tokens(tokens)))
5213 }
5214
5215 async fn semantic_tokens_full_delta(
5216 &self,
5217 params: SemanticTokensDeltaParams,
5218 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensFullDeltaResult>> {
5219 self.client
5220 .log_message(
5221 MessageType::INFO,
5222 "got textDocument/semanticTokens/full/delta request",
5223 )
5224 .await;
5225
5226 let uri = params.text_document.uri;
5227 let previous_result_id = params.previous_result_id;
5228
5229 let source = {
5230 let cache = self.text_cache.read().await;
5231 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
5232 };
5233
5234 let source = match source {
5235 Some(s) => s,
5236 None => {
5237 let file_path = match uri.to_file_path() {
5238 Ok(p) => p,
5239 Err(_) => return Ok(None),
5240 };
5241 match std::fs::read_to_string(&file_path) {
5242 Ok(s) => s,
5243 Err(_) => return Ok(None),
5244 }
5245 }
5246 };
5247
5248 let mut new_tokens = semantic_tokens::semantic_tokens_full(&source);
5249
5250 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
5252 let new_result_id = id.to_string();
5253 new_tokens.result_id = Some(new_result_id.clone());
5254
5255 let uri_str = uri.to_string();
5256
5257 let old_tokens = {
5259 let cache = self.semantic_token_cache.read().await;
5260 cache
5261 .get(&uri_str)
5262 .filter(|(rid, _)| *rid == previous_result_id)
5263 .map(|(_, tokens)| tokens.clone())
5264 };
5265
5266 {
5268 let mut cache = self.semantic_token_cache.write().await;
5269 cache.insert(
5270 uri_str.into(),
5271 (new_result_id.clone(), new_tokens.data.clone()),
5272 );
5273 }
5274
5275 match old_tokens {
5276 Some(old) => {
5277 let edits = semantic_tokens::compute_delta(&old, &new_tokens.data);
5279 Ok(Some(SemanticTokensFullDeltaResult::TokensDelta(
5280 SemanticTokensDelta {
5281 result_id: Some(new_result_id),
5282 edits,
5283 },
5284 )))
5285 }
5286 None => {
5287 Ok(Some(SemanticTokensFullDeltaResult::Tokens(new_tokens)))
5289 }
5290 }
5291 }
5292
5293 async fn folding_range(
5294 &self,
5295 params: FoldingRangeParams,
5296 ) -> tower_lsp::jsonrpc::Result<Option<Vec<FoldingRange>>> {
5297 self.client
5298 .log_message(MessageType::INFO, "got textDocument/foldingRange request")
5299 .await;
5300
5301 let uri = params.text_document.uri;
5302
5303 let source = {
5304 let cache = self.text_cache.read().await;
5305 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
5306 };
5307
5308 let source = match source {
5309 Some(s) => s,
5310 None => {
5311 let file_path = match uri.to_file_path() {
5312 Ok(p) => p,
5313 Err(_) => return Ok(None),
5314 };
5315 match std::fs::read_to_string(&file_path) {
5316 Ok(s) => s,
5317 Err(_) => return Ok(None),
5318 }
5319 }
5320 };
5321
5322 let ranges = folding::folding_ranges(&source);
5323 self.client
5324 .log_message(
5325 MessageType::INFO,
5326 format!("found {} folding ranges", ranges.len()),
5327 )
5328 .await;
5329 Ok(Some(ranges))
5330 }
5331
5332 async fn selection_range(
5333 &self,
5334 params: SelectionRangeParams,
5335 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SelectionRange>>> {
5336 self.client
5337 .log_message(MessageType::INFO, "got textDocument/selectionRange request")
5338 .await;
5339
5340 let uri = params.text_document.uri;
5341
5342 let source = {
5343 let cache = self.text_cache.read().await;
5344 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
5345 };
5346
5347 let source = match source {
5348 Some(s) => s,
5349 None => {
5350 let file_path = match uri.to_file_path() {
5351 Ok(p) => p,
5352 Err(_) => return Ok(None),
5353 };
5354 match std::fs::read_to_string(&file_path) {
5355 Ok(s) => s,
5356 Err(_) => return Ok(None),
5357 }
5358 }
5359 };
5360
5361 let ranges = selection::selection_ranges(&source, ¶ms.positions);
5362 self.client
5363 .log_message(
5364 MessageType::INFO,
5365 format!("found {} selection ranges", ranges.len()),
5366 )
5367 .await;
5368 Ok(Some(ranges))
5369 }
5370
5371 async fn inlay_hint(
5372 &self,
5373 params: InlayHintParams,
5374 ) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>> {
5375 self.client
5376 .log_message(MessageType::INFO, "got textDocument/inlayHint request")
5377 .await;
5378
5379 let uri = params.text_document.uri;
5380 let range = params.range;
5381
5382 let file_path = match uri.to_file_path() {
5383 Ok(path) => path,
5384 Err(_) => {
5385 self.client
5386 .log_message(MessageType::ERROR, "invalid file uri")
5387 .await;
5388 return Ok(None);
5389 }
5390 };
5391
5392 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
5393 Some(bytes) => bytes,
5394 None => return Ok(None),
5395 };
5396
5397 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
5398 let cached_build = match cached_build {
5399 Some(cb) => cb,
5400 None => return Ok(None),
5401 };
5402
5403 let mut hints = inlay_hints::inlay_hints(&cached_build, &uri, range, &source_bytes);
5404
5405 let settings = self.settings.read().await;
5407 if !settings.inlay_hints.parameters {
5408 hints.retain(|h| h.kind != Some(InlayHintKind::PARAMETER));
5409 }
5410 self.client
5411 .log_message(
5412 MessageType::INFO,
5413 format!("found {} inlay hints", hints.len()),
5414 )
5415 .await;
5416
5417 Ok(Some(hints))
5423 }
5424
5425 async fn code_action(
5426 &self,
5427 params: CodeActionParams,
5428 ) -> tower_lsp::jsonrpc::Result<Option<CodeActionResponse>> {
5429 use crate::code_actions::FixKind;
5430
5431 let uri = ¶ms.text_document.uri;
5432
5433 let source: Option<String> = if let Ok(path) = uri.to_file_path() {
5435 self.get_source_bytes(uri, &path)
5436 .await
5437 .map(|b| String::from_utf8_lossy(&b).into_owned())
5438 } else {
5439 None
5440 };
5441
5442 let db = &self.code_action_db;
5443 let mut actions: Vec<CodeActionOrCommand> = Vec::new();
5444
5445 for diag in ¶ms.context.diagnostics {
5446 if let Some(NumberOrString::String(s)) = &diag.code {
5448 if s == "unused-import" {
5449 if let Some(edit) = source.as_deref().and_then(|src| {
5450 goto::code_action_edit(
5451 src,
5452 diag.range,
5453 goto::CodeActionKind::DeleteNodeByKind {
5454 node_kind: "import_directive",
5455 },
5456 )
5457 }) {
5458 let mut changes = HashMap::new();
5459 changes.insert(uri.clone(), vec![edit]);
5460 actions.push(CodeActionOrCommand::CodeAction(CodeAction {
5461 title: "Remove unused import".to_string(),
5462 kind: Some(CodeActionKind::QUICKFIX),
5463 diagnostics: Some(vec![diag.clone()]),
5464 edit: Some(WorkspaceEdit {
5465 changes: Some(changes),
5466 ..Default::default()
5467 }),
5468 is_preferred: Some(true),
5469 ..Default::default()
5470 }));
5471 }
5472 continue;
5473 }
5474 }
5475
5476 let code: ErrorCode = match &diag.code {
5478 Some(NumberOrString::String(s)) => match s.parse() {
5479 Ok(n) => ErrorCode(n),
5480 Err(_) => continue,
5481 },
5482 _ => continue,
5483 };
5484
5485 if let Some(def) = db.get(&code) {
5487 let edit_opt: Option<TextEdit> = match &def.fix {
5489 FixKind::Insert { text, anchor: _ } => {
5490 goto::code_action_edit(
5492 source.as_deref().unwrap_or(""),
5493 diag.range,
5494 goto::CodeActionKind::InsertAtFileStart { text },
5495 )
5496 }
5497
5498 FixKind::ReplaceToken {
5499 replacement,
5500 walk_to,
5501 } => source.as_deref().and_then(|src| {
5502 goto::code_action_edit(
5503 src,
5504 diag.range,
5505 goto::CodeActionKind::ReplaceToken {
5506 replacement,
5507 walk_to: walk_to.as_deref(),
5508 },
5509 )
5510 }),
5511
5512 FixKind::DeleteToken => source.as_deref().and_then(|src| {
5513 goto::code_action_edit(src, diag.range, goto::CodeActionKind::DeleteToken)
5514 }),
5515
5516 FixKind::DeleteNode { node_kind } => {
5517 if node_kind == "variable_declaration_statement" {
5519 source.as_deref().and_then(|src| {
5520 goto::code_action_edit(
5521 src,
5522 diag.range,
5523 goto::CodeActionKind::DeleteLocalVar,
5524 )
5525 })
5526 } else {
5527 None
5528 }
5529 }
5530
5531 FixKind::DeleteChildNode {
5532 walk_to,
5533 child_kinds,
5534 } => {
5535 let ck: Vec<&str> = child_kinds.iter().map(|s| s.as_str()).collect();
5536 source.as_deref().and_then(|src| {
5537 goto::code_action_edit(
5538 src,
5539 diag.range,
5540 goto::CodeActionKind::DeleteChildNode {
5541 walk_to,
5542 child_kinds: &ck,
5543 },
5544 )
5545 })
5546 }
5547
5548 FixKind::ReplaceChildNode {
5549 walk_to,
5550 child_kind,
5551 replacement,
5552 } => source.as_deref().and_then(|src| {
5553 goto::code_action_edit(
5554 src,
5555 diag.range,
5556 goto::CodeActionKind::ReplaceChildNode {
5557 walk_to,
5558 child_kind,
5559 replacement,
5560 },
5561 )
5562 }),
5563
5564 FixKind::InsertBeforeNode {
5565 walk_to,
5566 before_child,
5567 text,
5568 } => {
5569 let bc: Vec<&str> = before_child.iter().map(|s| s.as_str()).collect();
5570 source.as_deref().and_then(|src| {
5571 goto::code_action_edit(
5572 src,
5573 diag.range,
5574 goto::CodeActionKind::InsertBeforeNode {
5575 walk_to,
5576 before_child: &bc,
5577 text,
5578 },
5579 )
5580 })
5581 }
5582
5583 FixKind::Custom => None,
5585 };
5586
5587 if let Some(edit) = edit_opt {
5588 let mut changes = HashMap::new();
5589 changes.insert(uri.clone(), vec![edit]);
5590 actions.push(CodeActionOrCommand::CodeAction(CodeAction {
5591 title: def.title.clone(),
5592 kind: Some(CodeActionKind::QUICKFIX),
5593 diagnostics: Some(vec![diag.clone()]),
5594 edit: Some(WorkspaceEdit {
5595 changes: Some(changes),
5596 ..Default::default()
5597 }),
5598 is_preferred: Some(true),
5599 ..Default::default()
5600 }));
5601 continue; }
5603
5604 if !matches!(def.fix, FixKind::Custom) {
5607 continue;
5608 }
5609 }
5610
5611 #[allow(clippy::match_single_binding)]
5615 match code {
5616 _ => {}
5621 }
5622 }
5623
5624 Ok(Some(actions))
5625 }
5626
5627 async fn will_rename_files(
5628 &self,
5629 params: RenameFilesParams,
5630 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
5631 self.client
5632 .log_message(
5633 MessageType::INFO,
5634 format!("workspace/willRenameFiles: {} file(s)", params.files.len()),
5635 )
5636 .await;
5637 if !self
5638 .settings
5639 .read()
5640 .await
5641 .file_operations
5642 .update_imports_on_rename
5643 {
5644 self.client
5645 .log_message(
5646 MessageType::INFO,
5647 "willRenameFiles: updateImportsOnRename disabled",
5648 )
5649 .await;
5650 return Ok(None);
5651 }
5652
5653 let config = self.foundry_config.read().await.clone();
5655 let project_root = config.root.clone();
5656 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
5657 crate::solc::discover_source_files(&config)
5658 .into_iter()
5659 .filter_map(|p| p.to_str().map(String::from))
5660 .collect()
5661 })
5662 .await
5663 .unwrap_or_default();
5664
5665 if source_files.is_empty() {
5666 self.client
5667 .log_message(
5668 MessageType::WARNING,
5669 "willRenameFiles: no source files found",
5670 )
5671 .await;
5672 return Ok(None);
5673 }
5674
5675 let raw_renames: Vec<(std::path::PathBuf, std::path::PathBuf)> = params
5677 .files
5678 .iter()
5679 .filter_map(|fr| {
5680 let old_uri = Url::parse(&fr.old_uri).ok()?;
5681 let new_uri = Url::parse(&fr.new_uri).ok()?;
5682 let old_path = old_uri.to_file_path().ok()?;
5683 let new_path = new_uri.to_file_path().ok()?;
5684 Some((old_path, new_path))
5685 })
5686 .collect();
5687
5688 let renames = file_operations::expand_folder_renames(&raw_renames, &source_files);
5689
5690 if renames.is_empty() {
5691 return Ok(None);
5692 }
5693
5694 self.client
5695 .log_message(
5696 MessageType::INFO,
5697 format!(
5698 "willRenameFiles: {} rename(s) after folder expansion",
5699 renames.len()
5700 ),
5701 )
5702 .await;
5703
5704 let files_to_read: Vec<(String, String)> = {
5707 let tc = self.text_cache.read().await;
5708 source_files
5709 .iter()
5710 .filter_map(|fs_path| {
5711 let uri = Url::from_file_path(fs_path).ok()?;
5712 let uri_str = uri.to_string();
5713 if tc.contains_key(&uri_str) {
5714 None
5715 } else {
5716 Some((uri_str, fs_path.clone()))
5717 }
5718 })
5719 .collect()
5720 };
5721
5722 if !files_to_read.is_empty() {
5723 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
5724 files_to_read
5725 .into_iter()
5726 .filter_map(|(uri_str, fs_path)| {
5727 let content = std::fs::read_to_string(&fs_path).ok()?;
5728 Some((uri_str, content))
5729 })
5730 .collect()
5731 })
5732 .await
5733 .unwrap_or_default();
5734
5735 let mut tc = self.text_cache.write().await;
5736 for (uri_str, content) in loaded {
5737 tc.entry(uri_str.into()).or_insert((0, content));
5738 }
5739 }
5740
5741 let text_cache = self.text_cache.clone();
5746 let result = {
5747 let tc = text_cache.read().await;
5748 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
5749 let uri = Url::from_file_path(fs_path).ok()?;
5750 let (_, content) = tc.get(&uri.to_string())?;
5751 Some(content.as_bytes().to_vec())
5752 };
5753
5754 file_operations::rename_imports(
5755 &source_files,
5756 &renames,
5757 &project_root,
5758 &get_source_bytes,
5759 )
5760 };
5761
5762 let stats = &result.stats;
5764 if stats.read_failures > 0 || stats.pathdiff_failures > 0 || stats.duplicate_renames > 0 {
5765 self.client
5766 .log_message(
5767 MessageType::WARNING,
5768 format!(
5769 "willRenameFiles stats: read_failures={}, pathdiff_failures={}, \
5770 duplicate_renames={}, no_parent={}, no_op_skips={}, dedup_skips={}",
5771 stats.read_failures,
5772 stats.pathdiff_failures,
5773 stats.duplicate_renames,
5774 stats.no_parent,
5775 stats.no_op_skips,
5776 stats.dedup_skips,
5777 ),
5778 )
5779 .await;
5780 }
5781
5782 let all_edits = result.edits;
5783
5784 if all_edits.is_empty() {
5785 self.client
5786 .log_message(MessageType::INFO, "willRenameFiles: no import edits needed")
5787 .await;
5788 return Ok(None);
5789 }
5790
5791 {
5793 let mut tc = self.text_cache.write().await;
5794 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
5795 self.client
5796 .log_message(
5797 MessageType::INFO,
5798 format!("willRenameFiles: patched {} cached file(s)", patched),
5799 )
5800 .await;
5801 }
5802
5803 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
5804 self.client
5805 .log_message(
5806 MessageType::INFO,
5807 format!(
5808 "willRenameFiles: {} edit(s) across {} file(s)",
5809 total_edits,
5810 all_edits.len()
5811 ),
5812 )
5813 .await;
5814
5815 Ok(Some(WorkspaceEdit {
5816 changes: Some(all_edits),
5817 document_changes: None,
5818 change_annotations: None,
5819 }))
5820 }
5821
5822 async fn did_rename_files(&self, params: RenameFilesParams) {
5823 self.client
5824 .log_message(
5825 MessageType::INFO,
5826 format!("workspace/didRenameFiles: {} file(s)", params.files.len()),
5827 )
5828 .await;
5829 self.project_cache_dirty.store(true, Ordering::Release);
5830 {
5831 let mut changed = self.project_cache_changed_files.write().await;
5832 for file in ¶ms.files {
5833 if let Ok(old_uri) = Url::parse(&file.old_uri)
5834 && let Ok(old_path) = old_uri.to_file_path()
5835 {
5836 changed.insert(old_path.to_string_lossy().to_string());
5837 }
5838 if let Ok(new_uri) = Url::parse(&file.new_uri)
5839 && let Ok(new_path) = new_uri.to_file_path()
5840 {
5841 changed.insert(new_path.to_string_lossy().to_string());
5842 }
5843 }
5844 }
5845
5846 let raw_uri_pairs: Vec<(Url, Url)> = params
5848 .files
5849 .iter()
5850 .filter_map(|fr| {
5851 let old_uri = Url::parse(&fr.old_uri).ok()?;
5852 let new_uri = Url::parse(&fr.new_uri).ok()?;
5853 Some((old_uri, new_uri))
5854 })
5855 .collect();
5856
5857 let file_renames = {
5858 let tc = self.text_cache.read().await;
5859 let cache_paths: Vec<std::path::PathBuf> = tc
5860 .keys()
5861 .filter_map(|k| Url::parse(k).ok())
5862 .filter_map(|u| u.to_file_path().ok())
5863 .collect();
5864 drop(tc);
5865
5866 let cfg = self.foundry_config.read().await.clone();
5869 let discovered_paths =
5870 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
5871 .await
5872 .unwrap_or_default();
5873
5874 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
5875 all_paths.extend(cache_paths);
5876 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
5877
5878 file_operations::expand_folder_renames_from_paths(&raw_uri_pairs, &all_paths)
5879 };
5880
5881 self.client
5882 .log_message(
5883 MessageType::INFO,
5884 format!(
5885 "didRenameFiles: migrating {} cache entry/entries",
5886 file_renames.len()
5887 ),
5888 )
5889 .await;
5890
5891 {
5895 let mut tc = self.text_cache.write().await;
5896 for (old_key, new_key) in &file_renames {
5897 if let Some(entry) = tc.remove(old_key) {
5898 tc.insert(new_key.clone().into(), entry);
5899 }
5900 }
5901 }
5902 {
5903 let mut ac = self.ast_cache.write().await;
5904 for (old_key, _) in &file_renames {
5905 ac.remove(old_key);
5906 }
5907 }
5908 {
5909 let mut cc = self.completion_cache.write().await;
5910 for (old_key, _) in &file_renames {
5911 cc.remove(old_key);
5912 }
5913 }
5914 {
5915 let mut sc = self.semantic_token_cache.write().await;
5916 for (old_key, _) in &file_renames {
5917 sc.remove(old_key);
5918 }
5919 }
5920 {
5921 let mut pending = self.pending_create_scaffold.write().await;
5922 for (old_key, _) in &file_renames {
5923 pending.remove(old_key);
5924 }
5925 }
5926
5927 {
5929 let affected_paths: Vec<std::path::PathBuf> = file_renames
5930 .iter()
5931 .flat_map(|(old_key, new_key)| {
5932 let mut paths = Vec::new();
5933 if let Ok(u) = Url::parse(old_key) {
5934 if let Ok(p) = u.to_file_path() {
5935 paths.push(p);
5936 }
5937 }
5938 if let Ok(u) = Url::parse(new_key) {
5939 if let Ok(p) = u.to_file_path() {
5940 paths.push(p);
5941 }
5942 }
5943 paths
5944 })
5945 .collect();
5946 self.invalidate_lib_sub_caches_if_affected(&affected_paths)
5947 .await;
5948 }
5949
5950 let root_key = self.project_cache_key().await;
5954
5955 let foundry_config = self.foundry_config.read().await.clone();
5956 let ast_cache = self.ast_cache.clone();
5957 let client = self.client.clone();
5958 let path_interner = self.path_interner.clone();
5959 let text_cache_snapshot = self.text_cache.read().await.clone();
5963
5964 tokio::spawn(async move {
5965 let Some(cache_key) = root_key else {
5966 return;
5967 };
5968 match crate::solc::solc_project_index(
5969 &foundry_config,
5970 Some(&client),
5971 Some(&text_cache_snapshot),
5972 )
5973 .await
5974 {
5975 Ok(ast_data) => {
5976 let cached_build = Arc::new(crate::goto::CachedBuild::new(
5977 ast_data,
5978 0,
5979 Some(&mut *path_interner.write().await),
5980 ));
5981 let source_count = cached_build.nodes.len();
5982 ast_cache
5983 .write()
5984 .await
5985 .insert(cache_key.into(), cached_build);
5986 client
5987 .log_message(
5988 MessageType::INFO,
5989 format!("didRenameFiles: re-indexed {} source files", source_count),
5990 )
5991 .await;
5992 }
5993 Err(e) => {
5994 client
5995 .log_message(
5996 MessageType::WARNING,
5997 format!("didRenameFiles: re-index failed: {e}"),
5998 )
5999 .await;
6000 }
6001 }
6002 });
6003 }
6004
6005 async fn will_delete_files(
6006 &self,
6007 params: DeleteFilesParams,
6008 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
6009 self.client
6010 .log_message(
6011 MessageType::INFO,
6012 format!("workspace/willDeleteFiles: {} file(s)", params.files.len()),
6013 )
6014 .await;
6015 if !update_imports_on_delete_enabled(&*self.settings.read().await) {
6016 self.client
6017 .log_message(
6018 MessageType::INFO,
6019 "willDeleteFiles: updateImportsOnDelete disabled",
6020 )
6021 .await;
6022 return Ok(None);
6023 }
6024
6025 let config = self.foundry_config.read().await.clone();
6026 let project_root = config.root.clone();
6027 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
6028 crate::solc::discover_source_files(&config)
6029 .into_iter()
6030 .filter_map(|p| p.to_str().map(String::from))
6031 .collect()
6032 })
6033 .await
6034 .unwrap_or_default();
6035
6036 if source_files.is_empty() {
6037 self.client
6038 .log_message(
6039 MessageType::WARNING,
6040 "willDeleteFiles: no source files found",
6041 )
6042 .await;
6043 return Ok(None);
6044 }
6045
6046 let raw_deletes: Vec<std::path::PathBuf> = params
6047 .files
6048 .iter()
6049 .filter_map(|fd| Url::parse(&fd.uri).ok())
6050 .filter_map(|u| u.to_file_path().ok())
6051 .collect();
6052
6053 let deletes = file_operations::expand_folder_deletes(&raw_deletes, &source_files);
6054 if deletes.is_empty() {
6055 return Ok(None);
6056 }
6057
6058 self.client
6059 .log_message(
6060 MessageType::INFO,
6061 format!(
6062 "willDeleteFiles: {} delete target(s) after folder expansion",
6063 deletes.len()
6064 ),
6065 )
6066 .await;
6067
6068 let files_to_read: Vec<(String, String)> = {
6069 let tc = self.text_cache.read().await;
6070 source_files
6071 .iter()
6072 .filter_map(|fs_path| {
6073 let uri = Url::from_file_path(fs_path).ok()?;
6074 let uri_str = uri.to_string();
6075 if tc.contains_key(&uri_str) {
6076 None
6077 } else {
6078 Some((uri_str, fs_path.clone()))
6079 }
6080 })
6081 .collect()
6082 };
6083
6084 if !files_to_read.is_empty() {
6085 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
6086 files_to_read
6087 .into_iter()
6088 .filter_map(|(uri_str, fs_path)| {
6089 let content = std::fs::read_to_string(&fs_path).ok()?;
6090 Some((uri_str, content))
6091 })
6092 .collect()
6093 })
6094 .await
6095 .unwrap_or_default();
6096
6097 let mut tc = self.text_cache.write().await;
6098 for (uri_str, content) in loaded {
6099 tc.entry(uri_str.into()).or_insert((0, content));
6100 }
6101 }
6102
6103 let result = {
6104 let tc = self.text_cache.read().await;
6105 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
6106 let uri = Url::from_file_path(fs_path).ok()?;
6107 let (_, content) = tc.get(&uri.to_string())?;
6108 Some(content.as_bytes().to_vec())
6109 };
6110
6111 file_operations::delete_imports(
6112 &source_files,
6113 &deletes,
6114 &project_root,
6115 &get_source_bytes,
6116 )
6117 };
6118
6119 let stats = &result.stats;
6120 if stats.read_failures > 0
6121 || stats.statement_range_failures > 0
6122 || stats.duplicate_deletes > 0
6123 {
6124 self.client
6125 .log_message(
6126 MessageType::WARNING,
6127 format!(
6128 "willDeleteFiles stats: read_failures={}, statement_range_failures={}, \
6129 duplicate_deletes={}, no_parent={}, dedup_skips={}",
6130 stats.read_failures,
6131 stats.statement_range_failures,
6132 stats.duplicate_deletes,
6133 stats.no_parent,
6134 stats.dedup_skips,
6135 ),
6136 )
6137 .await;
6138 }
6139
6140 let all_edits = result.edits;
6141 if all_edits.is_empty() {
6142 self.client
6143 .log_message(
6144 MessageType::INFO,
6145 "willDeleteFiles: no import-removal edits needed",
6146 )
6147 .await;
6148 return Ok(None);
6149 }
6150
6151 {
6152 let mut tc = self.text_cache.write().await;
6153 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
6154 self.client
6155 .log_message(
6156 MessageType::INFO,
6157 format!("willDeleteFiles: patched {} cached file(s)", patched),
6158 )
6159 .await;
6160 }
6161
6162 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
6163 self.client
6164 .log_message(
6165 MessageType::INFO,
6166 format!(
6167 "willDeleteFiles: {} edit(s) across {} file(s)",
6168 total_edits,
6169 all_edits.len()
6170 ),
6171 )
6172 .await;
6173
6174 Ok(Some(WorkspaceEdit {
6175 changes: Some(all_edits),
6176 document_changes: None,
6177 change_annotations: None,
6178 }))
6179 }
6180
6181 async fn did_delete_files(&self, params: DeleteFilesParams) {
6182 self.client
6183 .log_message(
6184 MessageType::INFO,
6185 format!("workspace/didDeleteFiles: {} file(s)", params.files.len()),
6186 )
6187 .await;
6188 self.project_cache_dirty.store(true, Ordering::Release);
6189 {
6190 let mut changed = self.project_cache_changed_files.write().await;
6191 for file in ¶ms.files {
6192 if let Ok(uri) = Url::parse(&file.uri)
6193 && let Ok(path) = uri.to_file_path()
6194 {
6195 changed.insert(path.to_string_lossy().to_string());
6196 }
6197 }
6198 }
6199
6200 let raw_delete_uris: Vec<Url> = params
6201 .files
6202 .iter()
6203 .filter_map(|fd| Url::parse(&fd.uri).ok())
6204 .collect();
6205
6206 let deleted_paths = {
6207 let tc = self.text_cache.read().await;
6208 let cache_paths: Vec<std::path::PathBuf> = tc
6209 .keys()
6210 .filter_map(|k| Url::parse(k).ok())
6211 .filter_map(|u| u.to_file_path().ok())
6212 .collect();
6213 drop(tc);
6214
6215 let cfg = self.foundry_config.read().await.clone();
6216 let discovered_paths =
6217 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
6218 .await
6219 .unwrap_or_default();
6220
6221 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
6222 all_paths.extend(cache_paths);
6223 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
6224
6225 file_operations::expand_folder_deletes_from_paths(&raw_delete_uris, &all_paths)
6226 };
6227
6228 let mut deleted_keys: HashSet<String> = HashSet::new();
6229 let mut deleted_uris: Vec<Url> = Vec::new();
6230 for path in deleted_paths {
6231 if let Ok(uri) = Url::from_file_path(&path) {
6232 deleted_keys.insert(uri.to_string());
6233 deleted_uris.push(uri);
6234 }
6235 }
6236 if deleted_keys.is_empty() {
6237 return;
6238 }
6239
6240 self.client
6241 .log_message(
6242 MessageType::INFO,
6243 format!(
6244 "didDeleteFiles: deleting {} cache/diagnostic entry(ies)",
6245 deleted_keys.len()
6246 ),
6247 )
6248 .await;
6249
6250 for uri in &deleted_uris {
6251 self.client
6252 .publish_diagnostics(uri.clone(), vec![], None)
6253 .await;
6254 }
6255
6256 let mut removed_text = 0usize;
6257 let mut removed_ast = 0usize;
6258 let mut removed_completion = 0usize;
6259 let mut removed_semantic = 0usize;
6260 let mut removed_pending_create = 0usize;
6261 {
6262 let mut tc = self.text_cache.write().await;
6263 for key in &deleted_keys {
6264 if tc.remove(key).is_some() {
6265 removed_text += 1;
6266 }
6267 }
6268 }
6269 {
6270 let mut ac = self.ast_cache.write().await;
6271 for key in &deleted_keys {
6272 if ac.remove(key).is_some() {
6273 removed_ast += 1;
6274 }
6275 }
6276 }
6277 {
6278 let mut cc = self.completion_cache.write().await;
6279 for key in &deleted_keys {
6280 if cc.remove(key).is_some() {
6281 removed_completion += 1;
6282 }
6283 }
6284 }
6285 {
6286 let mut sc = self.semantic_token_cache.write().await;
6287 for key in &deleted_keys {
6288 if sc.remove(key).is_some() {
6289 removed_semantic += 1;
6290 }
6291 }
6292 }
6293 {
6294 let mut pending = self.pending_create_scaffold.write().await;
6295 for key in &deleted_keys {
6296 if pending.remove(key) {
6297 removed_pending_create += 1;
6298 }
6299 }
6300 }
6301 self.client
6302 .log_message(
6303 MessageType::INFO,
6304 format!(
6305 "didDeleteFiles: removed caches text={} ast={} completion={} semantic={} pendingCreate={}",
6306 removed_text,
6307 removed_ast,
6308 removed_completion,
6309 removed_semantic,
6310 removed_pending_create,
6311 ),
6312 )
6313 .await;
6314
6315 {
6317 let affected_paths: Vec<std::path::PathBuf> = deleted_keys
6318 .iter()
6319 .filter_map(|k| Url::parse(k).ok())
6320 .filter_map(|u| u.to_file_path().ok())
6321 .collect();
6322 self.invalidate_lib_sub_caches_if_affected(&affected_paths)
6323 .await;
6324 }
6325
6326 let root_key = self.project_cache_key().await;
6330
6331 let foundry_config = self.foundry_config.read().await.clone();
6332 let ast_cache = self.ast_cache.clone();
6333 let client = self.client.clone();
6334 let path_interner = self.path_interner.clone();
6335 let text_cache_snapshot = self.text_cache.read().await.clone();
6336
6337 tokio::spawn(async move {
6338 let Some(cache_key) = root_key else {
6339 return;
6340 };
6341 match crate::solc::solc_project_index(
6342 &foundry_config,
6343 Some(&client),
6344 Some(&text_cache_snapshot),
6345 )
6346 .await
6347 {
6348 Ok(ast_data) => {
6349 let cached_build = Arc::new(crate::goto::CachedBuild::new(
6350 ast_data,
6351 0,
6352 Some(&mut *path_interner.write().await),
6353 ));
6354 let source_count = cached_build.nodes.len();
6355 ast_cache
6356 .write()
6357 .await
6358 .insert(cache_key.into(), cached_build);
6359 client
6360 .log_message(
6361 MessageType::INFO,
6362 format!("didDeleteFiles: re-indexed {} source files", source_count),
6363 )
6364 .await;
6365 }
6366 Err(e) => {
6367 client
6368 .log_message(
6369 MessageType::WARNING,
6370 format!("didDeleteFiles: re-index failed: {e}"),
6371 )
6372 .await;
6373 }
6374 }
6375 });
6376 }
6377
6378 async fn will_create_files(
6379 &self,
6380 params: CreateFilesParams,
6381 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
6382 self.client
6383 .log_message(
6384 MessageType::INFO,
6385 format!("workspace/willCreateFiles: {} file(s)", params.files.len()),
6386 )
6387 .await;
6388 if !self
6389 .settings
6390 .read()
6391 .await
6392 .file_operations
6393 .template_on_create
6394 {
6395 self.client
6396 .log_message(
6397 MessageType::INFO,
6398 "willCreateFiles: templateOnCreate disabled",
6399 )
6400 .await;
6401 return Ok(None);
6402 }
6403 self.client
6404 .log_message(
6405 MessageType::INFO,
6406 "willCreateFiles: skipping pre-create edits; scaffolding via didCreateFiles",
6407 )
6408 .await;
6409 Ok(None)
6410 }
6411
6412 async fn did_create_files(&self, params: CreateFilesParams) {
6413 self.client
6414 .log_message(
6415 MessageType::INFO,
6416 format!("workspace/didCreateFiles: {} file(s)", params.files.len()),
6417 )
6418 .await;
6419 self.project_cache_dirty.store(true, Ordering::Release);
6420 {
6421 let mut changed = self.project_cache_changed_files.write().await;
6422 for file in ¶ms.files {
6423 if let Ok(uri) = Url::parse(&file.uri)
6424 && let Ok(path) = uri.to_file_path()
6425 {
6426 changed.insert(path.to_string_lossy().to_string());
6427 }
6428 }
6429 }
6430 if !self
6431 .settings
6432 .read()
6433 .await
6434 .file_operations
6435 .template_on_create
6436 {
6437 self.client
6438 .log_message(
6439 MessageType::INFO,
6440 "didCreateFiles: templateOnCreate disabled",
6441 )
6442 .await;
6443 return;
6444 }
6445
6446 let config = self.foundry_config.read().await;
6447 let solc_version = config.solc_version.clone();
6448 drop(config);
6449
6450 let mut apply_edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
6455 let mut staged_content: HashMap<String, String> = HashMap::new();
6456 let mut created_uris: Vec<String> = Vec::new();
6457 {
6458 let tc = self.text_cache.read().await;
6459 for file_create in ¶ms.files {
6460 let uri = match Url::parse(&file_create.uri) {
6461 Ok(u) => u,
6462 Err(_) => continue,
6463 };
6464 let uri_str = uri.to_string();
6465
6466 let open_has_content = tc
6467 .get(&uri_str)
6468 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()));
6469 let path = match uri.to_file_path() {
6470 Ok(p) => p,
6471 Err(_) => continue,
6472 };
6473 let disk_has_content = std::fs::read_to_string(&path)
6474 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()));
6475
6476 if open_has_content {
6479 self.client
6480 .log_message(
6481 MessageType::INFO,
6482 format!(
6483 "didCreateFiles: skip {} (open buffer already has content)",
6484 uri_str
6485 ),
6486 )
6487 .await;
6488 continue;
6489 }
6490
6491 if disk_has_content {
6493 self.client
6494 .log_message(
6495 MessageType::INFO,
6496 format!(
6497 "didCreateFiles: skip {} (disk file already has content)",
6498 uri_str
6499 ),
6500 )
6501 .await;
6502 continue;
6503 }
6504
6505 let content =
6506 match file_operations::generate_scaffold(&uri, solc_version.as_deref()) {
6507 Some(s) => s,
6508 None => continue,
6509 };
6510
6511 staged_content.insert(uri_str, content.clone());
6512 created_uris.push(uri.to_string());
6513
6514 apply_edits.entry(uri).or_default().push(TextEdit {
6515 range: Range {
6516 start: Position {
6517 line: 0,
6518 character: 0,
6519 },
6520 end: Position {
6521 line: 0,
6522 character: 0,
6523 },
6524 },
6525 new_text: content,
6526 });
6527 }
6528 }
6529
6530 if !apply_edits.is_empty() {
6531 {
6532 let mut pending = self.pending_create_scaffold.write().await;
6533 for uri in &created_uris {
6534 pending.insert(uri.clone().into());
6535 }
6536 }
6537
6538 let edit = WorkspaceEdit {
6539 changes: Some(apply_edits.clone()),
6540 document_changes: None,
6541 change_annotations: None,
6542 };
6543 self.client
6544 .log_message(
6545 MessageType::INFO,
6546 format!(
6547 "didCreateFiles: scaffolding {} empty file(s) via workspace/applyEdit",
6548 apply_edits.len()
6549 ),
6550 )
6551 .await;
6552 let apply_result = self.client.apply_edit(edit).await;
6553 let applied = apply_result.as_ref().is_ok_and(|r| r.applied);
6554
6555 if applied {
6556 let mut tc = self.text_cache.write().await;
6557 for (uri_str, content) in staged_content {
6558 tc.insert(uri_str.into(), (0, content));
6559 }
6560 } else {
6561 if let Ok(resp) = &apply_result {
6562 self.client
6563 .log_message(
6564 MessageType::WARNING,
6565 format!(
6566 "didCreateFiles: applyEdit rejected (no disk fallback): {:?}",
6567 resp.failure_reason
6568 ),
6569 )
6570 .await;
6571 } else if let Err(e) = &apply_result {
6572 self.client
6573 .log_message(
6574 MessageType::WARNING,
6575 format!("didCreateFiles: applyEdit failed (no disk fallback): {e}"),
6576 )
6577 .await;
6578 }
6579 }
6580 }
6581
6582 for file_create in ¶ms.files {
6586 let Ok(uri) = Url::parse(&file_create.uri) else {
6587 continue;
6588 };
6589 let (version, content) = {
6590 let tc = self.text_cache.read().await;
6591 match tc.get(&uri.to_string()) {
6592 Some((v, c)) => (*v, c.clone()),
6593 None => continue,
6594 }
6595 };
6596 if !content.chars().any(|ch| !ch.is_whitespace()) {
6597 continue;
6598 }
6599 self.on_change(TextDocumentItem {
6600 uri,
6601 version,
6602 text: content,
6603 language_id: "solidity".to_string(),
6604 })
6605 .await;
6606 }
6607
6608 {
6610 let affected_paths: Vec<std::path::PathBuf> = params
6611 .files
6612 .iter()
6613 .filter_map(|f| Url::parse(&f.uri).ok())
6614 .filter_map(|u| u.to_file_path().ok())
6615 .collect();
6616 self.invalidate_lib_sub_caches_if_affected(&affected_paths)
6617 .await;
6618 }
6619
6620 let root_key = self.project_cache_key().await;
6624
6625 let foundry_config = self.foundry_config.read().await.clone();
6626 let ast_cache = self.ast_cache.clone();
6627 let client = self.client.clone();
6628 let path_interner = self.path_interner.clone();
6629 let text_cache_snapshot = self.text_cache.read().await.clone();
6630
6631 tokio::spawn(async move {
6632 let Some(cache_key) = root_key else {
6633 return;
6634 };
6635 match crate::solc::solc_project_index(
6636 &foundry_config,
6637 Some(&client),
6638 Some(&text_cache_snapshot),
6639 )
6640 .await
6641 {
6642 Ok(ast_data) => {
6643 let cached_build = Arc::new(crate::goto::CachedBuild::new(
6644 ast_data,
6645 0,
6646 Some(&mut *path_interner.write().await),
6647 ));
6648 let source_count = cached_build.nodes.len();
6649 ast_cache
6650 .write()
6651 .await
6652 .insert(cache_key.into(), cached_build);
6653 client
6654 .log_message(
6655 MessageType::INFO,
6656 format!("didCreateFiles: re-indexed {} source files", source_count),
6657 )
6658 .await;
6659 }
6660 Err(e) => {
6661 client
6662 .log_message(
6663 MessageType::WARNING,
6664 format!("didCreateFiles: re-index failed: {e}"),
6665 )
6666 .await;
6667 }
6668 }
6669 });
6670 }
6671
6672 async fn prepare_call_hierarchy(
6675 &self,
6676 params: CallHierarchyPrepareParams,
6677 ) -> tower_lsp::jsonrpc::Result<Option<Vec<CallHierarchyItem>>> {
6678 self.client
6679 .log_message(
6680 MessageType::INFO,
6681 "got textDocument/prepareCallHierarchy request",
6682 )
6683 .await;
6684
6685 let uri = params.text_document_position_params.text_document.uri;
6686 let position = params.text_document_position_params.position;
6687
6688 let file_path = match uri.to_file_path() {
6689 Ok(path) => path,
6690 Err(_) => {
6691 self.client
6692 .log_message(MessageType::ERROR, "invalid file uri")
6693 .await;
6694 return Ok(None);
6695 }
6696 };
6697
6698 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
6699 Some(bytes) => bytes,
6700 None => return Ok(None),
6701 };
6702
6703 let cached_build = match self.get_or_fetch_build(&uri, &file_path, true).await {
6704 Some(cb) => cb,
6705 None => return Ok(None),
6706 };
6707
6708 let path_str = match file_path.to_str() {
6709 Some(s) => s,
6710 None => return Ok(None),
6711 };
6712 let abs_path = match cached_build.path_to_abs.get(path_str) {
6713 Some(ap) => ap.clone(),
6714 None => {
6715 crate::types::AbsPath::new(path_str)
6717 }
6718 };
6719
6720 let byte_position = goto::pos_to_bytes(&source_bytes, position);
6721
6722 let callable_id = match crate::call_hierarchy::resolve_callable_at_position(
6724 &cached_build,
6725 abs_path.as_str(),
6726 byte_position,
6727 ) {
6728 Some(id) => id,
6729 None => {
6730 self.client
6731 .log_message(MessageType::INFO, "no callable found at cursor position")
6732 .await;
6733 return Ok(None);
6734 }
6735 };
6736
6737 let item = if let Some(decl) = cached_build.decl_index.get(&callable_id) {
6740 crate::call_hierarchy::decl_to_hierarchy_item(
6741 decl,
6742 callable_id,
6743 &cached_build.node_id_to_source_path,
6744 &cached_build.id_to_path_map,
6745 &cached_build.nodes,
6746 )
6747 } else if let Some(info) =
6748 crate::call_hierarchy::find_node_info(&cached_build.nodes, callable_id)
6749 {
6750 crate::call_hierarchy::node_info_to_hierarchy_item(
6751 callable_id,
6752 info,
6753 &cached_build.id_to_path_map,
6754 )
6755 } else {
6756 None
6757 };
6758
6759 match item {
6760 Some(it) => {
6761 self.client
6762 .log_message(
6763 MessageType::INFO,
6764 format!("prepared call hierarchy for: {}", it.name),
6765 )
6766 .await;
6767 Ok(Some(vec![it]))
6768 }
6769 None => {
6770 self.client
6771 .log_message(
6772 MessageType::INFO,
6773 "could not build CallHierarchyItem for callable",
6774 )
6775 .await;
6776 Ok(None)
6777 }
6778 }
6779 }
6780
6781 async fn incoming_calls(
6782 &self,
6783 params: CallHierarchyIncomingCallsParams,
6784 ) -> tower_lsp::jsonrpc::Result<Option<Vec<CallHierarchyIncomingCall>>> {
6785 self.client
6786 .log_message(MessageType::INFO, "got callHierarchy/incomingCalls request")
6787 .await;
6788
6789 let item = ¶ms.item;
6790
6791 let node_id = match item
6793 .data
6794 .as_ref()
6795 .and_then(|d| d.get("nodeId"))
6796 .and_then(|v| v.as_i64())
6797 {
6798 Some(id) => crate::types::NodeId(id),
6799 None => {
6800 self.client
6801 .log_message(
6802 MessageType::ERROR,
6803 "missing nodeId in CallHierarchyItem data",
6804 )
6805 .await;
6806 return Ok(None);
6807 }
6808 };
6809
6810 let file_path = match item.uri.to_file_path() {
6812 Ok(p) => p,
6813 Err(_) => return Ok(None),
6814 };
6815 let file_build = match self.get_or_fetch_build(&item.uri, &file_path, true).await {
6816 Some(cb) => cb,
6817 None => return Ok(None),
6818 };
6819
6820 let project_build = self.ensure_project_cached_build().await;
6822
6823 let mut builds: Vec<&goto::CachedBuild> = vec![&file_build];
6825 if let Some(ref pb) = project_build {
6826 builds.push(pb);
6827 }
6828 let sub_caches = self.sub_caches.read().await;
6829 for sc in sub_caches.iter() {
6830 builds.push(sc);
6831 }
6832
6833 let target_name = &item.name;
6840 let target_sel = &item.selection_range;
6841 let target_abs = item
6842 .uri
6843 .as_ref()
6844 .strip_prefix("file://")
6845 .unwrap_or(item.uri.as_ref());
6846
6847 let target_name_offset = {
6851 let source_bytes = std::fs::read(target_abs).unwrap_or_default();
6852 goto::pos_to_bytes(&source_bytes, target_sel.start)
6853 };
6854
6855 let mut resolved_incoming: Vec<(CallHierarchyItem, (u32, u32), Range)> = Vec::new();
6860
6861 for build in &builds {
6862 let mut build_target_ids = crate::call_hierarchy::resolve_target_in_build(
6866 build,
6867 node_id,
6868 target_abs,
6869 target_name,
6870 target_name_offset,
6871 );
6872
6873 let snapshot: Vec<crate::types::NodeId> = build_target_ids.clone();
6875 for id in &snapshot {
6876 if let Some(related) = build.base_function_implementation.get(id) {
6877 for &related_id in related {
6878 if !build_target_ids.contains(&related_id) {
6879 build_target_ids.push(related_id);
6880 }
6881 }
6882 }
6883 }
6884
6885 if build_target_ids.is_empty() {
6886 continue;
6887 }
6888
6889 let calls = crate::call_hierarchy::incoming_calls(&build.nodes, &build_target_ids);
6890 for (caller_id, call_src) in calls {
6891 let call_range = match crate::call_hierarchy::call_src_to_range(
6892 &call_src,
6893 &build.id_to_path_map,
6894 ) {
6895 Some(r) => r,
6896 None => continue,
6897 };
6898 let caller_item = if let Some(decl) = build.decl_index.get(&caller_id) {
6901 crate::call_hierarchy::decl_to_hierarchy_item(
6902 decl,
6903 caller_id,
6904 &build.node_id_to_source_path,
6905 &build.id_to_path_map,
6906 &build.nodes,
6907 )
6908 } else if let Some(info) =
6909 crate::call_hierarchy::find_node_info(&build.nodes, caller_id)
6910 {
6911 crate::call_hierarchy::node_info_to_hierarchy_item(
6912 caller_id,
6913 info,
6914 &build.id_to_path_map,
6915 )
6916 } else {
6917 None
6918 };
6919 let Some(caller_item) = caller_item else {
6920 continue;
6921 };
6922 let pos = (
6923 caller_item.selection_range.start.line,
6924 caller_item.selection_range.start.character,
6925 );
6926 resolved_incoming.push((caller_item, pos, call_range));
6927 }
6928 }
6929
6930 if resolved_incoming.is_empty() {
6931 self.client
6932 .log_message(MessageType::INFO, "no incoming calls found")
6933 .await;
6934 return Ok(Some(vec![]));
6935 }
6936
6937 let mut grouped: HashMap<(u32, u32), (CallHierarchyItem, Vec<Range>)> = HashMap::new();
6940 for (caller_item, pos, call_range) in resolved_incoming {
6941 let entry = grouped
6942 .entry(pos)
6943 .or_insert_with(|| (caller_item, Vec::new()));
6944 if !entry.1.contains(&call_range) {
6945 entry.1.push(call_range);
6946 }
6947 }
6948
6949 let results: Vec<CallHierarchyIncomingCall> = grouped
6950 .into_values()
6951 .map(|(from, from_ranges)| CallHierarchyIncomingCall { from, from_ranges })
6952 .collect();
6953
6954 self.client
6955 .log_message(
6956 MessageType::INFO,
6957 format!("found {} incoming callers", results.len()),
6958 )
6959 .await;
6960 Ok(Some(results))
6961 }
6962
6963 async fn outgoing_calls(
6964 &self,
6965 params: CallHierarchyOutgoingCallsParams,
6966 ) -> tower_lsp::jsonrpc::Result<Option<Vec<CallHierarchyOutgoingCall>>> {
6967 self.client
6968 .log_message(MessageType::INFO, "got callHierarchy/outgoingCalls request")
6969 .await;
6970
6971 let item = ¶ms.item;
6972
6973 let node_id = match item
6975 .data
6976 .as_ref()
6977 .and_then(|d| d.get("nodeId"))
6978 .and_then(|v| v.as_i64())
6979 {
6980 Some(id) => crate::types::NodeId(id),
6981 None => {
6982 self.client
6983 .log_message(
6984 MessageType::ERROR,
6985 "missing nodeId in CallHierarchyItem data",
6986 )
6987 .await;
6988 return Ok(None);
6989 }
6990 };
6991
6992 let file_path = match item.uri.to_file_path() {
6994 Ok(p) => p,
6995 Err(_) => return Ok(None),
6996 };
6997 let file_build = match self.get_or_fetch_build(&item.uri, &file_path, true).await {
6998 Some(cb) => cb,
6999 None => return Ok(None),
7000 };
7001
7002 let project_build = self.ensure_project_cached_build().await;
7004
7005 let mut builds: Vec<&goto::CachedBuild> = vec![&file_build];
7007 if let Some(ref pb) = project_build {
7008 builds.push(pb);
7009 }
7010 let sub_caches = self.sub_caches.read().await;
7011 for sc in sub_caches.iter() {
7012 builds.push(sc);
7013 }
7014
7015 let target_name = &item.name;
7019 let target_sel = &item.selection_range;
7020 let target_abs = item
7021 .uri
7022 .as_ref()
7023 .strip_prefix("file://")
7024 .unwrap_or(item.uri.as_ref());
7025
7026 let target_name_offset = {
7028 let source_bytes = std::fs::read(target_abs).unwrap_or_default();
7029 goto::pos_to_bytes(&source_bytes, target_sel.start)
7030 };
7031
7032 let mut resolved_outgoing: Vec<(CallHierarchyItem, (u32, u32), Range)> = Vec::new();
7040
7041 for build in &builds {
7042 let build_caller_ids = crate::call_hierarchy::resolve_target_in_build(
7046 build,
7047 node_id,
7048 target_abs,
7049 target_name,
7050 target_name_offset,
7051 );
7052
7053 for &cid in &build_caller_ids {
7054 let calls = crate::call_hierarchy::outgoing_calls(&build.nodes, cid);
7055 for (callee_id, call_src) in calls {
7056 let call_range = match crate::call_hierarchy::call_src_to_range(
7057 &call_src,
7058 &build.id_to_path_map,
7059 ) {
7060 Some(r) => r,
7061 None => continue,
7062 };
7063 let callee_item = if let Some(decl) = build.decl_index.get(&callee_id) {
7066 crate::call_hierarchy::decl_to_hierarchy_item(
7067 decl,
7068 callee_id,
7069 &build.node_id_to_source_path,
7070 &build.id_to_path_map,
7071 &build.nodes,
7072 )
7073 } else if let Some(info) =
7074 crate::call_hierarchy::find_node_info(&build.nodes, callee_id)
7075 {
7076 crate::call_hierarchy::node_info_to_hierarchy_item(
7077 callee_id,
7078 info,
7079 &build.id_to_path_map,
7080 )
7081 } else {
7082 None
7083 };
7084 let Some(callee_item) = callee_item else {
7085 continue;
7086 };
7087 let pos = (
7088 callee_item.selection_range.start.line,
7089 callee_item.selection_range.start.character,
7090 );
7091 resolved_outgoing.push((callee_item, pos, call_range));
7092 }
7093 }
7094 }
7095
7096 if resolved_outgoing.is_empty() {
7097 return Ok(Some(vec![]));
7098 }
7099
7100 let mut grouped: HashMap<(u32, u32), (CallHierarchyItem, Vec<Range>)> = HashMap::new();
7103 for (callee_item, pos, call_range) in resolved_outgoing {
7104 let entry = grouped
7105 .entry(pos)
7106 .or_insert_with(|| (callee_item, Vec::new()));
7107 if !entry.1.contains(&call_range) {
7108 entry.1.push(call_range);
7109 }
7110 }
7111
7112 let mut results: Vec<CallHierarchyOutgoingCall> = grouped
7113 .into_values()
7114 .map(|(to, from_ranges)| CallHierarchyOutgoingCall { to, from_ranges })
7115 .collect();
7116
7117 results.sort_by(|a, b| {
7120 let a_first = a.from_ranges.first();
7121 let b_first = b.from_ranges.first();
7122 match (a_first, b_first) {
7123 (Some(a_r), Some(b_r)) => a_r
7124 .start
7125 .line
7126 .cmp(&b_r.start.line)
7127 .then_with(|| a_r.start.character.cmp(&b_r.start.character)),
7128 (Some(_), None) => std::cmp::Ordering::Less,
7129 (None, Some(_)) => std::cmp::Ordering::Greater,
7130 (None, None) => std::cmp::Ordering::Equal,
7131 }
7132 });
7133
7134 Ok(Some(results))
7135 }
7136}
7137
7138#[cfg(test)]
7139mod tests {
7140 use super::{
7141 start_or_mark_project_cache_sync_pending, stop_project_cache_sync_worker_or_reclaim,
7142 take_project_cache_sync_pending, try_claim_project_cache_dirty,
7143 update_imports_on_delete_enabled,
7144 };
7145 use std::sync::atomic::{AtomicBool, Ordering};
7146
7147 #[test]
7148 fn update_imports_on_delete_enabled_defaults_true() {
7149 let s = crate::config::Settings::default();
7150 assert!(update_imports_on_delete_enabled(&s));
7151 }
7152
7153 #[test]
7154 fn update_imports_on_delete_enabled_respects_false() {
7155 let mut s = crate::config::Settings::default();
7156 s.file_operations.update_imports_on_delete = false;
7157 assert!(!update_imports_on_delete_enabled(&s));
7158 }
7159
7160 #[test]
7161 fn project_cache_sync_burst_only_first_starts_worker() {
7162 let pending = AtomicBool::new(false);
7163 let running = AtomicBool::new(false);
7164
7165 assert!(start_or_mark_project_cache_sync_pending(&pending, &running));
7166 assert!(pending.load(Ordering::Acquire));
7167 assert!(running.load(Ordering::Acquire));
7168
7169 assert!(!start_or_mark_project_cache_sync_pending(
7171 &pending, &running
7172 ));
7173 assert!(pending.load(Ordering::Acquire));
7174 assert!(running.load(Ordering::Acquire));
7175 }
7176
7177 #[test]
7178 fn project_cache_sync_take_pending_is_one_shot() {
7179 let pending = AtomicBool::new(true);
7180 assert!(take_project_cache_sync_pending(&pending));
7181 assert!(!pending.load(Ordering::Acquire));
7182 assert!(!take_project_cache_sync_pending(&pending));
7183 }
7184
7185 #[test]
7186 fn project_cache_sync_worker_stop_or_reclaim_handles_race() {
7187 let pending = AtomicBool::new(false);
7188 let running = AtomicBool::new(true);
7189
7190 assert!(!stop_project_cache_sync_worker_or_reclaim(
7192 &pending, &running
7193 ));
7194 assert!(!running.load(Ordering::Acquire));
7195
7196 pending.store(true, Ordering::Release);
7198 running.store(true, Ordering::Release);
7199 assert!(stop_project_cache_sync_worker_or_reclaim(
7200 &pending, &running
7201 ));
7202 assert!(running.load(Ordering::Acquire));
7203 }
7204
7205 #[test]
7206 fn project_cache_dirty_claim_and_retry_cycle() {
7207 let dirty = AtomicBool::new(true);
7208
7209 assert!(try_claim_project_cache_dirty(&dirty));
7210 assert!(!dirty.load(Ordering::Acquire));
7211
7212 assert!(!try_claim_project_cache_dirty(&dirty));
7214
7215 dirty.store(true, Ordering::Release);
7217 assert!(try_claim_project_cache_dirty(&dirty));
7218 assert!(!dirty.load(Ordering::Acquire));
7219 }
7220}