1use crate::completion;
2use crate::config::{self, FoundryConfig, LintConfig, Settings};
3use crate::file_operations;
4use crate::folding;
5use crate::goto;
6use crate::highlight;
7use crate::hover;
8use crate::inlay_hints;
9use crate::links;
10use crate::references;
11use crate::rename;
12use crate::runner::{ForgeRunner, Runner};
13use crate::selection;
14use crate::semantic_tokens;
15use crate::symbols;
16use crate::types::DocumentUri;
17use crate::types::ErrorCode;
18use crate::utils;
19use std::collections::{HashMap, HashSet};
20use std::path::{Component, Path, PathBuf};
21use std::sync::Arc;
22use std::sync::atomic::{AtomicU64, Ordering};
23use tokio::sync::RwLock;
24use tower_lsp::{Client, LanguageServer, lsp_types::*};
25
26type SemanticTokenCache = HashMap<DocumentUri, (String, Vec<SemanticToken>)>;
28
29#[derive(Clone)]
30pub struct ForgeLsp {
31 client: Client,
32 compiler: Arc<dyn Runner>,
33 ast_cache: Arc<RwLock<HashMap<DocumentUri, Arc<goto::CachedBuild>>>>,
34 text_cache: Arc<RwLock<HashMap<DocumentUri, (i32, String)>>>,
38 completion_cache: Arc<RwLock<HashMap<DocumentUri, Arc<completion::CompletionCache>>>>,
39 lint_config: Arc<RwLock<LintConfig>>,
41 foundry_config: Arc<RwLock<FoundryConfig>>,
43 client_capabilities: Arc<RwLock<Option<ClientCapabilities>>>,
45 settings: Arc<RwLock<Settings>>,
47 use_solc: bool,
49 semantic_token_cache: Arc<RwLock<SemanticTokenCache>>,
51 semantic_token_id: Arc<AtomicU64>,
53 root_uri: Arc<RwLock<Option<Url>>>,
55 project_indexed: Arc<std::sync::atomic::AtomicBool>,
57 project_cache_dirty: Arc<std::sync::atomic::AtomicBool>,
60 project_cache_sync_running: Arc<std::sync::atomic::AtomicBool>,
62 project_cache_sync_pending: Arc<std::sync::atomic::AtomicBool>,
64 project_cache_force_full_rebuild: Arc<std::sync::atomic::AtomicBool>,
69 project_cache_upsert_running: Arc<std::sync::atomic::AtomicBool>,
71 project_cache_upsert_pending: Arc<std::sync::atomic::AtomicBool>,
73 project_cache_changed_files: Arc<RwLock<HashSet<String>>>,
76 project_cache_upsert_files: Arc<RwLock<HashSet<String>>>,
78 pending_create_scaffold: Arc<RwLock<HashSet<DocumentUri>>>,
81 settings_from_init: Arc<std::sync::atomic::AtomicBool>,
85 did_save_workers: Arc<
91 RwLock<HashMap<DocumentUri, tokio::sync::watch::Sender<Option<DidSaveTextDocumentParams>>>>,
92 >,
93 code_action_db: Arc<HashMap<ErrorCode, crate::code_actions::CodeActionDef>>,
95 sub_caches: Arc<RwLock<Vec<Arc<goto::CachedBuild>>>>,
106 sub_caches_loading: Arc<std::sync::atomic::AtomicBool>,
108 path_interner: Arc<RwLock<crate::types::PathInterner>>,
116}
117
118fn spawn_load_lib_sub_caches_task(
124 foundry_config: crate::config::FoundryConfig,
125 sub_caches: Arc<RwLock<Vec<Arc<goto::CachedBuild>>>>,
126 loading_flag: Arc<std::sync::atomic::AtomicBool>,
127 path_interner: Arc<RwLock<crate::types::PathInterner>>,
128 client: Client,
129) {
130 if loading_flag
132 .compare_exchange(
133 false,
134 true,
135 std::sync::atomic::Ordering::SeqCst,
136 std::sync::atomic::Ordering::SeqCst,
137 )
138 .is_err()
139 {
140 return;
141 }
142 tokio::spawn(async move {
143 let cfg = foundry_config.clone();
144 let discovered = tokio::task::spawn_blocking(move || {
145 crate::project_cache::discover_lib_sub_projects(&cfg)
146 })
147 .await
148 .unwrap_or_else(|_| crate::project_cache::DiscoveredLibs {
149 cached: Vec::new(),
150 uncached: Vec::new(),
151 });
152
153 let sub_caches_start = std::time::Instant::now();
155 spawn_and_collect_sub_cache_builds(&discovered.uncached, &client, &path_interner).await;
156
157 let cfg2 = foundry_config.clone();
159 let all_cached =
160 tokio::task::spawn_blocking(move || crate::project_cache::discover_lib_caches(&cfg2))
161 .await
162 .unwrap_or_default();
163
164 if all_cached.is_empty() {
165 emit_sub_caches_loaded(&client, 0, 0, sub_caches_start.elapsed().as_secs_f64()).await;
166 loading_flag.store(false, std::sync::atomic::Ordering::SeqCst);
167 return;
168 }
169
170 let mut loaded = Vec::new();
171 for sub_root in &all_cached {
172 let root = sub_root.clone();
173 let build =
174 tokio::task::spawn_blocking(move || crate::project_cache::load_lib_cache(&root))
175 .await
176 .ok()
177 .flatten();
178 if let Some(build) = build {
179 {
182 let mut interner = path_interner.write().await;
183 for (_solc_id, path) in &build.id_to_path_map {
184 interner.intern(path);
185 }
186 }
187 loaded.push(Arc::new(build));
188 }
189 }
190
191 let count = loaded.len();
192 let total: usize = loaded.iter().map(|b| b.nodes.len()).sum();
193 let elapsed = sub_caches_start.elapsed().as_secs_f64();
194
195 if !loaded.is_empty() {
196 client
197 .log_message(
198 MessageType::INFO,
199 format!(
200 "sub-caches: loaded {} lib caches ({} total sources, {:.1}s total)",
201 count, total, elapsed,
202 ),
203 )
204 .await;
205 *sub_caches.write().await = loaded;
206 }
207
208 emit_sub_caches_loaded(&client, count, total, elapsed).await;
209 loading_flag.store(false, std::sync::atomic::Ordering::SeqCst);
210 });
211}
212
213async fn spawn_and_collect_sub_cache_builds(
220 roots: &[std::path::PathBuf],
221 client: &Client,
222 path_interner: &Arc<RwLock<crate::types::PathInterner>>,
223) {
224 if roots.is_empty() {
225 return;
226 }
227 let max_parallel = std::thread::available_parallelism()
228 .map(|n| n.get())
229 .unwrap_or(4);
230 let semaphore = Arc::new(tokio::sync::Semaphore::new(max_parallel));
231 client
232 .log_message(
233 MessageType::INFO,
234 format!(
235 "sub-cache: building {} libs (max {max_parallel} parallel)",
236 roots.len()
237 ),
238 )
239 .await;
240 let mut join_set = tokio::task::JoinSet::new();
241 for sub_root in roots {
242 let sub_name = sub_root
243 .file_name()
244 .map(|n| n.to_string_lossy().into_owned())
245 .unwrap_or_else(|| sub_root.display().to_string());
246 let sub_config =
247 crate::config::load_foundry_config_from_toml(&sub_root.join("foundry.toml"));
248 let sem = semaphore.clone();
249 join_set.spawn(async move {
250 let _permit = sem.acquire().await.expect("semaphore closed");
251 let sub_start = std::time::Instant::now();
252 let result = crate::solc::solc_project_index_ast_only(&sub_config, None).await;
253 let elapsed = sub_start.elapsed().as_secs_f64();
254 (sub_name, sub_config, result, elapsed)
255 });
256 }
257
258 while let Some(join_result) = join_set.join_next().await {
259 let Ok((sub_name, sub_config, result, elapsed)) = join_result else {
260 continue;
261 };
262 match result {
263 Ok(ast_data) => {
264 let mut interner = path_interner.write().await;
265 let build = crate::goto::CachedBuild::new(ast_data, 0, Some(&mut interner));
266 drop(interner);
267 let source_count = build.nodes.len();
268 if source_count == 0 {
269 client
270 .log_message(
271 MessageType::WARNING,
272 format!("sub-cache: {sub_name} produced 0 sources"),
273 )
274 .await;
275 continue;
276 }
277 let cfg_for_save = sub_config.clone();
278 let build_for_save = build.clone();
279 let _ = tokio::task::spawn_blocking(move || {
280 crate::project_cache::save_reference_cache_with_report(
281 &cfg_for_save,
282 &build_for_save,
283 None,
284 )
285 })
286 .await;
287 }
288 Err(e) => {
289 client
290 .log_message(
291 MessageType::WARNING,
292 format!("sub-cache: {sub_name} failed ({elapsed:.1}s): {e}"),
293 )
294 .await;
295 }
296 }
297 }
298}
299
300async fn emit_sub_caches_loaded(client: &Client, count: usize, total: usize, elapsed: f64) {
303 let token = NumberOrString::String("solidity/subCachesLoaded".to_string());
304 let _ = client
305 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
306 token: token.clone(),
307 })
308 .await;
309 client
310 .send_notification::<notification::Progress>(ProgressParams {
311 token: token.clone(),
312 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(WorkDoneProgressBegin {
313 title: "Sub-caches loaded".to_string(),
314 message: Some(format!(
315 "{count} lib caches ({total} sources) in {elapsed:.1}s",
316 )),
317 cancellable: Some(false),
318 percentage: None,
319 })),
320 })
321 .await;
322 client
323 .send_notification::<notification::Progress>(ProgressParams {
324 token,
325 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(WorkDoneProgressEnd {
326 message: Some(format!("Loaded {count} lib caches ({total} sources)",)),
327 })),
328 })
329 .await;
330}
331
332impl ForgeLsp {
333 pub fn new(client: Client, use_solar: bool, use_solc: bool) -> Self {
334 let compiler: Arc<dyn Runner> = if use_solar {
335 Arc::new(crate::solar_runner::SolarRunner)
336 } else {
337 Arc::new(ForgeRunner)
338 };
339 let ast_cache = Arc::new(RwLock::new(HashMap::new()));
340 let text_cache = Arc::new(RwLock::new(HashMap::new()));
341 let completion_cache = Arc::new(RwLock::new(HashMap::new()));
342 let lint_config = Arc::new(RwLock::new(LintConfig::default()));
343 let foundry_config = Arc::new(RwLock::new(FoundryConfig::default()));
344 let client_capabilities = Arc::new(RwLock::new(None));
345 let settings = Arc::new(RwLock::new(Settings::default()));
346 Self {
347 client,
348 compiler,
349 ast_cache,
350 text_cache,
351 completion_cache,
352 lint_config,
353 foundry_config,
354 client_capabilities,
355 settings,
356 use_solc,
357 semantic_token_cache: Arc::new(RwLock::new(HashMap::new())),
358 semantic_token_id: Arc::new(AtomicU64::new(0)),
359 root_uri: Arc::new(RwLock::new(None)),
360 project_indexed: Arc::new(std::sync::atomic::AtomicBool::new(false)),
361 project_cache_dirty: Arc::new(std::sync::atomic::AtomicBool::new(false)),
362 project_cache_sync_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
363 project_cache_sync_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
364 project_cache_force_full_rebuild: Arc::new(std::sync::atomic::AtomicBool::new(false)),
365 project_cache_upsert_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
366 project_cache_upsert_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
367 project_cache_changed_files: Arc::new(RwLock::new(HashSet::new())),
368 project_cache_upsert_files: Arc::new(RwLock::new(HashSet::new())),
369 pending_create_scaffold: Arc::new(RwLock::new(HashSet::new())),
370 settings_from_init: Arc::new(std::sync::atomic::AtomicBool::new(false)),
371 did_save_workers: Arc::new(RwLock::new(HashMap::new())),
372 code_action_db: Arc::new(crate::code_actions::load()),
373 sub_caches: Arc::new(RwLock::new(Vec::new())),
374 sub_caches_loading: Arc::new(std::sync::atomic::AtomicBool::new(false)),
375 path_interner: Arc::new(RwLock::new(crate::types::PathInterner::new())),
376 }
377 }
378
379 async fn foundry_config_for_file(&self, file_path: &std::path::Path) -> FoundryConfig {
387 config::load_foundry_config(file_path)
388 }
389
390 async fn project_cache_key(&self) -> Option<String> {
395 if let Some(uri) = self.root_uri.read().await.as_ref() {
396 return Some(uri.to_string());
397 }
398
399 let mut root = self.foundry_config.read().await.root.clone();
400 if !root.is_absolute()
401 && let Ok(cwd) = std::env::current_dir()
402 {
403 root = cwd.join(root);
404 }
405 if !root.is_dir() {
406 root = root.parent()?.to_path_buf();
407 }
408 Url::from_directory_path(root).ok().map(|u| u.to_string())
409 }
410
411 fn spawn_load_lib_sub_caches(&self) {
420 let foundry_config = self.foundry_config.clone();
421 let sub_caches = self.sub_caches.clone();
422 let loading_flag = self.sub_caches_loading.clone();
423 let path_interner = self.path_interner.clone();
424 let client = self.client.clone();
425 tokio::spawn(async move {
426 let cfg = foundry_config.read().await.clone();
427 spawn_load_lib_sub_caches_task(cfg, sub_caches, loading_flag, path_interner, client);
428 });
429 }
430
431 async fn invalidate_lib_sub_caches_if_affected(&self, changed_paths: &[std::path::PathBuf]) {
435 let config = self.foundry_config.read().await.clone();
436 let affected = changed_paths.iter().any(|p| {
437 config
438 .libs
439 .iter()
440 .any(|lib_name| p.starts_with(config.root.join(lib_name)))
441 });
442 if affected {
443 self.sub_caches.write().await.clear();
444 self.spawn_load_lib_sub_caches();
445 }
446 }
447
448 async fn ensure_project_cached_build(&self) -> Option<Arc<goto::CachedBuild>> {
453 let root_key = self.project_cache_key().await?;
454 if let Some(existing) = self.ast_cache.read().await.get(&root_key).cloned() {
455 self.spawn_load_lib_sub_caches();
457 return Some(existing);
458 }
459
460 let settings = self.settings.read().await.clone();
461 if !self.use_solc || !settings.project_index.full_project_scan {
462 return None;
463 }
464
465 let foundry_config = self.foundry_config.read().await.clone();
466 if !foundry_config.root.is_dir() {
467 return None;
468 }
469
470 let cache_mode = settings.project_index.cache_mode.clone();
471 let cfg_for_load = foundry_config.clone();
472 let load_res = tokio::task::spawn_blocking(move || {
473 crate::project_cache::load_reference_cache_with_report(&cfg_for_load, cache_mode, true)
474 })
475 .await;
476
477 let Ok(report) = load_res else {
478 return None;
479 };
480 let Some(build) = report.build else {
481 return None;
482 };
483
484 let source_count = build.nodes.len();
485 let complete = report.complete;
486 let duration_ms = report.duration_ms;
487 let reused = report.file_count_reused;
488 let hashed = report.file_count_hashed;
489 let arc = Arc::new(build);
490 self.ast_cache
491 .write()
492 .await
493 .insert(root_key.clone().into(), arc.clone());
494 self.project_indexed
495 .store(true, std::sync::atomic::Ordering::Relaxed);
496 self.client
497 .log_message(
498 MessageType::INFO,
499 format!(
500 "references warm-load: project cache loaded (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
501 source_count, reused, hashed, complete, duration_ms
502 ),
503 )
504 .await;
505
506 self.spawn_load_lib_sub_caches();
508
509 if complete {
510 return Some(arc);
511 }
512
513 let cfg_for_diff = foundry_config.clone();
516 let changed = tokio::task::spawn_blocking(move || {
517 crate::project_cache::changed_files_since_v2_cache(&cfg_for_diff, true)
518 })
519 .await
520 .ok()
521 .and_then(Result::ok)
522 .unwrap_or_default();
523
524 if changed.is_empty() {
525 return Some(arc);
526 }
527
528 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
529 let cfg_for_plan = foundry_config.clone();
530 let changed_for_plan = changed.clone();
531 let remappings_for_plan = remappings.clone();
532 let affected_set = tokio::task::spawn_blocking(move || {
533 compute_reverse_import_closure(&cfg_for_plan, &changed_for_plan, &remappings_for_plan)
534 })
535 .await
536 .ok()
537 .unwrap_or_default();
538 let mut affected_files: Vec<PathBuf> = affected_set.into_iter().collect();
539 if affected_files.is_empty() {
540 affected_files = changed;
541 }
542
543 let text_cache_snapshot = self.text_cache.read().await.clone();
544 match crate::solc::solc_project_index_scoped(
545 &foundry_config,
546 Some(&self.client),
547 Some(&text_cache_snapshot),
548 &affected_files,
549 )
550 .await
551 {
552 Ok(ast_data) => {
553 let scoped_build = Arc::new(crate::goto::CachedBuild::new(
554 ast_data,
555 0,
556 Some(&mut *self.path_interner.write().await),
557 ));
558 let mut merge_error: Option<String> = None;
559 let merged = {
560 let mut cache = self.ast_cache.write().await;
561 let merged = if let Some(existing) = cache.get(&root_key).cloned() {
562 let mut merged = (*existing).clone();
563 match merge_scoped_cached_build(&mut merged, (*scoped_build).clone()) {
564 Ok(_) => Arc::new(merged),
565 Err(e) => {
566 merge_error = Some(e);
567 scoped_build.clone()
568 }
569 }
570 } else {
571 scoped_build.clone()
572 };
573 cache.insert(root_key.clone().into(), merged.clone());
574 merged
575 };
576 if let Some(e) = merge_error {
577 self.client
578 .log_message(
579 MessageType::WARNING,
580 format!(
581 "references warm-load reconcile: merge failed, using scoped build: {}",
582 e
583 ),
584 )
585 .await;
586 }
587
588 let cfg_for_save = foundry_config.clone();
589 let build_for_save = (*merged).clone();
590 let save_res = tokio::task::spawn_blocking(move || {
591 crate::project_cache::save_reference_cache_with_report(
592 &cfg_for_save,
593 &build_for_save,
594 None,
595 )
596 })
597 .await;
598 if let Ok(Ok(report)) = save_res {
599 self.client
600 .log_message(
601 MessageType::INFO,
602 format!(
603 "references warm-load reconcile: saved cache (affected={}, hashed_files={}, duration={}ms)",
604 affected_files.len(),
605 report.file_count_hashed,
606 report.duration_ms
607 ),
608 )
609 .await;
610 }
611 Some(merged)
612 }
613 Err(e) => {
614 self.client
615 .log_message(
616 MessageType::WARNING,
617 format!(
618 "references warm-load reconcile: scoped reindex failed: {}",
619 e
620 ),
621 )
622 .await;
623 Some(arc)
624 }
625 }
626 }
627
628 async fn flush_project_cache_to_disk(&self, reason: &str) {
632 if !self.use_solc || !self.settings.read().await.project_index.full_project_scan {
633 return;
634 }
635 let Some(root_key) = self.project_cache_key().await else {
636 return;
637 };
638 let build = {
639 let cache = self.ast_cache.read().await;
640 cache.get(&root_key).cloned()
641 };
642 let Some(build) = build else {
643 return;
644 };
645
646 let foundry_config = self.foundry_config.read().await.clone();
647 let build_for_save = (*build).clone();
648 let res = tokio::task::spawn_blocking(move || {
649 crate::project_cache::save_reference_cache_with_report(
650 &foundry_config,
651 &build_for_save,
652 None,
653 )
654 })
655 .await;
656
657 match res {
658 Ok(Ok(report)) => {
659 self.client
660 .log_message(
661 MessageType::INFO,
662 format!(
663 "project cache flush ({}): saved hashed_files={}, duration={}ms",
664 reason, report.file_count_hashed, report.duration_ms
665 ),
666 )
667 .await;
668 }
669 Ok(Err(e)) => {
670 self.client
671 .log_message(
672 MessageType::WARNING,
673 format!("project cache flush ({}) failed: {}", reason, e),
674 )
675 .await;
676 }
677 Err(e) => {
678 self.client
679 .log_message(
680 MessageType::WARNING,
681 format!("project cache flush ({}) task failed: {}", reason, e),
682 )
683 .await;
684 }
685 }
686 }
687
688 async fn on_change(&self, params: TextDocumentItem) {
689 let uri = params.uri.clone();
690 let version = params.version;
691
692 let file_path = match uri.to_file_path() {
693 Ok(path) => path,
694 Err(_) => {
695 self.client
696 .log_message(MessageType::ERROR, "Invalid file URI")
697 .await;
698 return;
699 }
700 };
701
702 let path_str = match file_path.to_str() {
703 Some(s) => s,
704 None => {
705 self.client
706 .log_message(MessageType::ERROR, "Invalid file path")
707 .await;
708 return;
709 }
710 };
711
712 {
717 use std::hash::{Hash, Hasher};
718 let mut hasher = std::collections::hash_map::DefaultHasher::new();
719 params.text.hash(&mut hasher);
720 let incoming_hash = hasher.finish();
721
722 let cache = self.ast_cache.read().await;
723 if let Some(cached) = cache.get(&uri.to_string()) {
724 if cached.content_hash != 0 && cached.content_hash == incoming_hash {
725 self.client
726 .log_message(
727 MessageType::INFO,
728 "on_change: content unchanged since last build, skipping rebuild",
729 )
730 .await;
731 return;
732 }
733 }
734 }
735
736 self.client
740 .publish_diagnostics(uri.clone(), vec![], None)
741 .await;
742
743 let (should_lint, lint_settings) = {
745 let lint_cfg = self.lint_config.read().await;
746 let settings = self.settings.read().await;
747 let enabled = lint_cfg.should_lint(&file_path) && settings.lint.enabled;
748 let ls = settings.lint.clone();
749 (enabled, ls)
750 };
751
752 let (lint_result, build_result, ast_result) = if self.use_solc {
755 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
756 let solc_future = crate::solc::solc_ast(path_str, &foundry_cfg, Some(&self.client));
757
758 if should_lint {
759 let (lint, solc) = tokio::join!(
760 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
761 solc_future
762 );
763 match solc {
764 Ok(data) => {
765 let content = tokio::fs::read_to_string(&file_path)
767 .await
768 .unwrap_or_default();
769 let build_diags = crate::build::build_output_to_diagnostics(
770 &data,
771 &file_path,
772 &content,
773 &foundry_cfg.ignored_error_codes,
774 );
775 (Some(lint), Ok(build_diags), Ok(data))
776 }
777 Err(e) => {
778 self.client
779 .log_message(
780 MessageType::WARNING,
781 format!("solc failed, falling back to forge build: {e}"),
782 )
783 .await;
784 let (build, ast) = tokio::join!(
785 self.compiler.get_build_diagnostics(&uri),
786 self.compiler.ast(path_str)
787 );
788 (Some(lint), build, ast)
789 }
790 }
791 } else {
792 self.client
793 .log_message(
794 MessageType::INFO,
795 format!("skipping lint for ignored file: {path_str}"),
796 )
797 .await;
798 match solc_future.await {
799 Ok(data) => {
800 let content = tokio::fs::read_to_string(&file_path)
801 .await
802 .unwrap_or_default();
803 let build_diags = crate::build::build_output_to_diagnostics(
804 &data,
805 &file_path,
806 &content,
807 &foundry_cfg.ignored_error_codes,
808 );
809 (None, Ok(build_diags), Ok(data))
810 }
811 Err(e) => {
812 self.client
813 .log_message(
814 MessageType::WARNING,
815 format!("solc failed, falling back to forge build: {e}"),
816 )
817 .await;
818 let (build, ast) = tokio::join!(
819 self.compiler.get_build_diagnostics(&uri),
820 self.compiler.ast(path_str)
821 );
822 (None, build, ast)
823 }
824 }
825 }
826 } else {
827 if should_lint {
829 let (lint, build, ast) = tokio::join!(
830 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
831 self.compiler.get_build_diagnostics(&uri),
832 self.compiler.ast(path_str)
833 );
834 (Some(lint), build, ast)
835 } else {
836 self.client
837 .log_message(
838 MessageType::INFO,
839 format!("skipping lint for ignored file: {path_str}"),
840 )
841 .await;
842 let (build, ast) = tokio::join!(
843 self.compiler.get_build_diagnostics(&uri),
844 self.compiler.ast(path_str)
845 );
846 (None, build, ast)
847 }
848 };
849
850 let build_succeeded = matches!(&build_result, Ok(diagnostics) if diagnostics.iter().all(|d| d.severity != Some(DiagnosticSeverity::ERROR)));
852
853 let content_hash = {
856 use std::hash::{Hash, Hasher};
857 let mut hasher = std::collections::hash_map::DefaultHasher::new();
858 params.text.hash(&mut hasher);
859 hasher.finish()
860 };
861
862 if build_succeeded {
863 if let Ok(ast_data) = ast_result {
864 let mut cached_build = goto::CachedBuild::new(
865 ast_data,
866 version,
867 Some(&mut *self.path_interner.write().await),
868 );
869 cached_build.content_hash = content_hash;
870 let cached_build = Arc::new(cached_build);
871 let mut cache = self.ast_cache.write().await;
872 cache.insert(uri.to_string().into(), cached_build.clone());
873 drop(cache);
874
875 {
877 let mut cc = self.completion_cache.write().await;
878 cc.insert(
879 uri.to_string().into(),
880 cached_build.completion_cache.clone(),
881 );
882 }
883 } else if let Err(e) = ast_result {
884 self.client
885 .log_message(
886 MessageType::INFO,
887 format!("Build succeeded but failed to get AST: {e}"),
888 )
889 .await;
890 }
891 } else {
892 {
899 let mut cache = self.ast_cache.write().await;
900 let uri_key = uri.to_string();
901 if let Some(existing) = cache.get(&uri_key).cloned() {
902 let mut updated = (*existing).clone();
903 updated.content_hash = content_hash;
904 cache.insert(uri_key.into(), Arc::new(updated));
905 }
906 }
907 self.client
908 .log_message(
909 MessageType::INFO,
910 "Build errors detected, keeping existing AST cache",
911 )
912 .await;
913 }
914
915 {
917 let mut text_cache = self.text_cache.write().await;
918 let uri_str = uri.to_string();
919 let existing_version = text_cache.get(&uri_str).map(|(v, _)| *v).unwrap_or(-1);
920 if version >= existing_version {
921 text_cache.insert(uri_str.into(), (version, params.text));
922 }
923 }
924
925 let mut all_diagnostics = vec![];
926
927 if let Some(lint_result) = lint_result {
928 match lint_result {
929 Ok(mut lints) => {
930 if !lint_settings.exclude.is_empty() {
932 lints.retain(|d| {
933 if let Some(NumberOrString::String(code)) = &d.code {
934 !lint_settings.exclude.iter().any(|ex| ex == code)
935 } else {
936 true
937 }
938 });
939 }
940 if !lints.is_empty() {
941 self.client
942 .log_message(
943 MessageType::INFO,
944 format!("found {} lint diagnostics", lints.len()),
945 )
946 .await;
947 }
948 all_diagnostics.append(&mut lints);
949 }
950 Err(e) => {
951 self.client
952 .log_message(
953 MessageType::ERROR,
954 format!("Forge lint diagnostics failed: {e}"),
955 )
956 .await;
957 }
958 }
959 }
960
961 match build_result {
962 Ok(mut builds) => {
963 if !builds.is_empty() {
964 self.client
965 .log_message(
966 MessageType::INFO,
967 format!("found {} build diagnostics", builds.len()),
968 )
969 .await;
970 }
971 all_diagnostics.append(&mut builds);
972 }
973 Err(e) => {
974 self.client
975 .log_message(
976 MessageType::WARNING,
977 format!("Forge build diagnostics failed: {e}"),
978 )
979 .await;
980 }
981 }
982
983 for diag in &mut all_diagnostics {
987 if diag.message.is_empty() {
988 diag.message = "Unknown issue".to_string();
989 }
990 }
991
992 self.client
994 .publish_diagnostics(uri, all_diagnostics, None)
995 .await;
996
997 if build_succeeded {
999 let client = self.client.clone();
1000 tokio::spawn(async move {
1001 let _ = client.inlay_hint_refresh().await;
1002 });
1003 }
1004
1005 if build_succeeded
1011 && self.use_solc
1012 && self.settings.read().await.project_index.full_project_scan
1013 && !self
1014 .project_indexed
1015 .load(std::sync::atomic::Ordering::Relaxed)
1016 {
1017 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
1018 self.project_indexed
1019 .store(true, std::sync::atomic::Ordering::Relaxed);
1020 let foundry_config = self.foundry_config.read().await.clone();
1021 let cache_key = self.project_cache_key().await;
1022 let ast_cache = self.ast_cache.clone();
1023 let client = self.client.clone();
1024 let path_interner = self.path_interner.clone();
1025
1026 tokio::spawn(async move {
1027 let Some(cache_key) = cache_key else {
1028 return;
1029 };
1030 if !foundry_config.root.is_dir() {
1031 client
1032 .log_message(
1033 MessageType::INFO,
1034 format!(
1035 "project index: {} not found, skipping",
1036 foundry_config.root.display(),
1037 ),
1038 )
1039 .await;
1040 return;
1041 }
1042
1043 let token = NumberOrString::String("solidity/projectIndex".to_string());
1045 let _ = client
1046 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
1047 token: token.clone(),
1048 })
1049 .await;
1050
1051 client
1053 .send_notification::<notification::Progress>(ProgressParams {
1054 token: token.clone(),
1055 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
1056 WorkDoneProgressBegin {
1057 title: "Indexing project".to_string(),
1058 message: Some("Discovering source files...".to_string()),
1059 cancellable: Some(false),
1060 percentage: None,
1061 },
1062 )),
1063 })
1064 .await;
1065
1066 let cfg_for_load = foundry_config.clone();
1068 let cache_mode_for_load = cache_mode.clone();
1069 let load_res = tokio::task::spawn_blocking(move || {
1070 crate::project_cache::load_reference_cache_with_report(
1071 &cfg_for_load,
1072 cache_mode_for_load,
1073 true,
1074 )
1075 })
1076 .await;
1077 match load_res {
1078 Ok(report) => {
1079 if let Some(cached_build) = report.build {
1080 let source_count = cached_build.nodes.len();
1081 ast_cache
1082 .write()
1083 .await
1084 .insert(cache_key.clone().into(), Arc::new(cached_build));
1085 client
1086 .log_message(
1087 MessageType::INFO,
1088 format!(
1089 "project index: cache load hit (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
1090 source_count,
1091 report.file_count_reused,
1092 report.file_count_hashed,
1093 report.complete,
1094 report.duration_ms
1095 ),
1096 )
1097 .await;
1098 if report.complete {
1099 client
1100 .send_notification::<notification::Progress>(ProgressParams {
1101 token: token.clone(),
1102 value: ProgressParamsValue::WorkDone(
1103 WorkDoneProgress::End(WorkDoneProgressEnd {
1104 message: Some(format!(
1105 "Loaded {} source files from cache",
1106 source_count
1107 )),
1108 }),
1109 ),
1110 })
1111 .await;
1112 return;
1113 }
1114 }
1115
1116 client
1117 .log_message(
1118 MessageType::INFO,
1119 format!(
1120 "project index: cache load miss/partial (reason={}, reused_files={}/{}, duration={}ms)",
1121 report
1122 .miss_reason
1123 .unwrap_or_else(|| "unknown".to_string()),
1124 report.file_count_reused,
1125 report.file_count_hashed,
1126 report.duration_ms
1127 ),
1128 )
1129 .await;
1130 }
1131 Err(e) => {
1132 client
1133 .log_message(
1134 MessageType::WARNING,
1135 format!("project index: cache load task failed: {e}"),
1136 )
1137 .await;
1138 }
1139 }
1140
1141 match crate::solc::solc_project_index(&foundry_config, Some(&client), None).await {
1142 Ok(ast_data) => {
1143 let mut new_build = crate::goto::CachedBuild::new(
1144 ast_data,
1145 0,
1146 Some(&mut *path_interner.write().await),
1147 );
1148 if let Some(prev) = ast_cache.read().await.get(&cache_key) {
1151 new_build.merge_missing_from(prev);
1152 }
1153 let source_count = new_build.nodes.len();
1154 let cached_build = Arc::new(new_build);
1155 let build_for_save = (*cached_build).clone();
1156 ast_cache
1157 .write()
1158 .await
1159 .insert(cache_key.clone().into(), cached_build);
1160 client
1161 .log_message(
1162 MessageType::INFO,
1163 format!("project index: cached {} source files", source_count),
1164 )
1165 .await;
1166
1167 let cfg_for_save = foundry_config.clone();
1168 let client_for_save = client.clone();
1169 tokio::spawn(async move {
1170 let res = tokio::task::spawn_blocking(move || {
1171 crate::project_cache::save_reference_cache_with_report(
1172 &cfg_for_save,
1173 &build_for_save,
1174 None,
1175 )
1176 })
1177 .await;
1178 match res {
1179 Ok(Ok(report)) => {
1180 client_for_save
1181 .log_message(
1182 MessageType::INFO,
1183 format!(
1184 "project index: cache save complete (hashed_files={}, duration={}ms)",
1185 report.file_count_hashed, report.duration_ms
1186 ),
1187 )
1188 .await;
1189 }
1190 Ok(Err(e)) => {
1191 client_for_save
1192 .log_message(
1193 MessageType::WARNING,
1194 format!("project index: failed to persist cache: {e}"),
1195 )
1196 .await;
1197 }
1198 Err(e) => {
1199 client_for_save
1200 .log_message(
1201 MessageType::WARNING,
1202 format!("project index: cache save task failed: {e}"),
1203 )
1204 .await;
1205 }
1206 }
1207 });
1208
1209 client
1211 .send_notification::<notification::Progress>(ProgressParams {
1212 token: token.clone(),
1213 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1214 WorkDoneProgressEnd {
1215 message: Some(format!(
1216 "Indexed {} source files",
1217 source_count
1218 )),
1219 },
1220 )),
1221 })
1222 .await;
1223 }
1224 Err(e) => {
1225 client
1226 .log_message(MessageType::WARNING, format!("project index failed: {e}"))
1227 .await;
1228
1229 client
1231 .send_notification::<notification::Progress>(ProgressParams {
1232 token: token.clone(),
1233 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1234 WorkDoneProgressEnd {
1235 message: Some("Indexing failed".to_string()),
1236 },
1237 )),
1238 })
1239 .await;
1240 }
1241 }
1242 });
1243 }
1244 }
1245
1246 async fn get_or_fetch_build(
1255 &self,
1256 uri: &Url,
1257 file_path: &std::path::Path,
1258 insert_on_miss: bool,
1259 ) -> Option<Arc<goto::CachedBuild>> {
1260 let uri_str = uri.to_string();
1261
1262 {
1265 let cache = self.ast_cache.read().await;
1266 if let Some(cached) = cache.get(&uri_str) {
1267 return Some(cached.clone());
1268 }
1269 }
1270
1271 if !insert_on_miss {
1275 return None;
1276 }
1277
1278 let path_str = file_path.to_str()?;
1280 let ast_result = if self.use_solc {
1281 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
1282 match crate::solc::solc_ast(path_str, &foundry_cfg, Some(&self.client)).await {
1283 Ok(data) => Ok(data),
1284 Err(_) => self.compiler.ast(path_str).await,
1285 }
1286 } else {
1287 self.compiler.ast(path_str).await
1288 };
1289 match ast_result {
1290 Ok(data) => {
1291 let build = Arc::new(goto::CachedBuild::new(
1294 data,
1295 0,
1296 Some(&mut *self.path_interner.write().await),
1297 ));
1298 let mut cache = self.ast_cache.write().await;
1299 cache.insert(uri_str.clone().into(), build.clone());
1300 Some(build)
1301 }
1302 Err(e) => {
1303 self.client
1304 .log_message(MessageType::ERROR, format!("failed to get AST: {e}"))
1305 .await;
1306 None
1307 }
1308 }
1309 }
1310
1311 async fn get_source_bytes(&self, uri: &Url, file_path: &std::path::Path) -> Option<Vec<u8>> {
1314 {
1315 let text_cache = self.text_cache.read().await;
1316 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
1317 return Some(content.as_bytes().to_vec());
1318 }
1319 }
1320 match std::fs::read(file_path) {
1321 Ok(bytes) => Some(bytes),
1322 Err(e) => {
1323 if e.kind() == std::io::ErrorKind::NotFound {
1324 self.client
1327 .log_message(
1328 MessageType::INFO,
1329 format!("file not found yet (transient): {e}"),
1330 )
1331 .await;
1332 } else {
1333 self.client
1334 .log_message(MessageType::ERROR, format!("failed to read file: {e}"))
1335 .await;
1336 }
1337 None
1338 }
1339 }
1340 }
1341}
1342
1343fn update_imports_on_delete_enabled(settings: &crate::config::Settings) -> bool {
1344 settings.file_operations.update_imports_on_delete
1345}
1346
1347fn start_or_mark_project_cache_sync_pending(
1348 pending: &std::sync::atomic::AtomicBool,
1349 running: &std::sync::atomic::AtomicBool,
1350) -> bool {
1351 pending.store(true, Ordering::Release);
1352 running
1353 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1354 .is_ok()
1355}
1356
1357fn take_project_cache_sync_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
1358 pending.swap(false, Ordering::AcqRel)
1359}
1360
1361fn stop_project_cache_sync_worker_or_reclaim(
1362 pending: &std::sync::atomic::AtomicBool,
1363 running: &std::sync::atomic::AtomicBool,
1364) -> bool {
1365 running.store(false, Ordering::Release);
1366 pending.load(Ordering::Acquire)
1367 && running
1368 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1369 .is_ok()
1370}
1371
1372fn try_claim_project_cache_dirty(dirty: &std::sync::atomic::AtomicBool) -> bool {
1373 dirty
1374 .compare_exchange(true, false, Ordering::AcqRel, Ordering::Acquire)
1375 .is_ok()
1376}
1377
1378fn start_or_mark_project_cache_upsert_pending(
1379 pending: &std::sync::atomic::AtomicBool,
1380 running: &std::sync::atomic::AtomicBool,
1381) -> bool {
1382 pending.store(true, Ordering::Release);
1383 running
1384 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1385 .is_ok()
1386}
1387
1388fn take_project_cache_upsert_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
1389 pending.swap(false, Ordering::AcqRel)
1390}
1391
1392fn stop_project_cache_upsert_worker_or_reclaim(
1393 pending: &std::sync::atomic::AtomicBool,
1394 running: &std::sync::atomic::AtomicBool,
1395) -> bool {
1396 running.store(false, Ordering::Release);
1397 pending.load(Ordering::Acquire)
1398 && running
1399 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1400 .is_ok()
1401}
1402
1403fn lexical_normalize(path: &Path) -> PathBuf {
1404 let mut out = PathBuf::new();
1405 for comp in path.components() {
1406 match comp {
1407 Component::CurDir => {}
1408 Component::ParentDir => {
1409 out.pop();
1410 }
1411 Component::RootDir => out.push(comp.as_os_str()),
1412 Component::Prefix(_) => out.push(comp.as_os_str()),
1413 Component::Normal(seg) => out.push(seg),
1414 }
1415 }
1416 out
1417}
1418
1419fn resolve_import_spec_to_abs(
1420 project_root: &Path,
1421 importer_abs: &Path,
1422 import_path: &str,
1423 remappings: &[String],
1424) -> Option<PathBuf> {
1425 if import_path.starts_with("./") || import_path.starts_with("../") {
1426 let base = importer_abs.parent()?;
1427 return Some(lexical_normalize(&base.join(import_path)));
1428 }
1429
1430 for remap in remappings {
1431 let mut it = remap.splitn(2, '=');
1432 let prefix = it.next().unwrap_or_default();
1433 let target = it.next().unwrap_or_default();
1434 if prefix.is_empty() || target.is_empty() {
1435 continue;
1436 }
1437 if import_path.starts_with(prefix) {
1438 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
1439 return Some(lexical_normalize(
1440 &project_root.join(format!("{target}{suffix}")),
1441 ));
1442 }
1443 }
1444
1445 Some(lexical_normalize(&project_root.join(import_path)))
1446}
1447
1448fn compute_reverse_import_closure(
1449 config: &FoundryConfig,
1450 changed_abs: &[PathBuf],
1451 remappings: &[String],
1452) -> HashSet<PathBuf> {
1453 let source_files = crate::solc::discover_source_files(config);
1454 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1455
1456 for importer in &source_files {
1457 let Ok(bytes) = std::fs::read(importer) else {
1458 continue;
1459 };
1460 for imp in links::ts_find_imports(&bytes) {
1461 let Some(imported_abs) =
1462 resolve_import_spec_to_abs(&config.root, importer, &imp.path, remappings)
1463 else {
1464 continue;
1465 };
1466 if !imported_abs.starts_with(&config.root) {
1467 continue;
1468 }
1469 reverse_edges
1470 .entry(imported_abs)
1471 .or_default()
1472 .insert(importer.clone());
1473 }
1474 }
1475
1476 let mut affected: HashSet<PathBuf> = HashSet::new();
1477 let mut queue: std::collections::VecDeque<PathBuf> = std::collections::VecDeque::new();
1478
1479 for path in changed_abs {
1480 if !path.starts_with(&config.root) {
1481 continue;
1482 }
1483 let normalized = lexical_normalize(path);
1484 if affected.insert(normalized.clone()) {
1485 queue.push_back(normalized);
1486 }
1487 }
1488
1489 while let Some(current) = queue.pop_front() {
1490 if let Some(importers) = reverse_edges.get(¤t) {
1491 for importer in importers {
1492 if affected.insert(importer.clone()) {
1493 queue.push_back(importer.clone());
1494 }
1495 }
1496 }
1497 }
1498
1499 let source_set: HashSet<PathBuf> = source_files.into_iter().collect();
1501 affected
1502 .into_iter()
1503 .filter(|p| source_set.contains(p) && p.is_file())
1504 .collect()
1505}
1506
1507fn src_file_id(src: &str) -> Option<&str> {
1508 src.rsplit(':').next().filter(|id| !id.is_empty())
1509}
1510
1511fn doc_key_path(key: &hover::DocKey) -> Option<&str> {
1512 match key {
1513 hover::DocKey::Contract(k) | hover::DocKey::StateVar(k) | hover::DocKey::Method(k) => {
1514 k.split_once(':').map(|(path, _)| path)
1515 }
1516 hover::DocKey::Func(_) | hover::DocKey::Event(_) => None,
1517 }
1518}
1519
1520fn merge_scoped_cached_build(
1521 existing: &mut goto::CachedBuild,
1522 scoped: goto::CachedBuild,
1523) -> Result<usize, String> {
1524 let affected_paths: HashSet<String> = scoped.nodes.keys().map(|p| p.to_string()).collect();
1525 if affected_paths.is_empty() {
1526 return Ok(0);
1527 }
1528 let affected_abs_paths: HashSet<crate::types::AbsPath> =
1529 scoped.path_to_abs.values().cloned().collect();
1530
1531 for scoped_id in scoped.decl_index.keys() {
1534 if existing.decl_index.contains_key(scoped_id)
1535 && let Some(path) = existing.node_id_to_source_path.get(scoped_id)
1536 && !affected_abs_paths.contains(path)
1537 {
1538 return Err(format!(
1539 "decl id collision for id={} in unaffected path {}",
1540 scoped_id, path
1541 ));
1542 }
1543 }
1544
1545 let old_id_to_path = existing.id_to_path_map.clone();
1550 existing.external_refs.retain(|src, _| {
1551 src_file_id(src.as_str())
1552 .and_then(|fid| old_id_to_path.get(fid))
1553 .map(|path| !affected_paths.contains(path))
1554 .unwrap_or(true)
1555 });
1556 existing
1557 .nodes
1558 .retain(|path, _| !affected_paths.contains(path.as_str()));
1559 existing
1560 .path_to_abs
1561 .retain(|path, _| !affected_paths.contains(path.as_str()));
1562 existing
1563 .id_to_path_map
1564 .retain(|_, path| !affected_paths.contains(path));
1565
1566 existing
1567 .node_id_to_source_path
1568 .retain(|_, path| !affected_abs_paths.contains(path));
1569 existing
1570 .decl_index
1571 .retain(|id, _| match existing.node_id_to_source_path.get(id) {
1572 Some(path) => !affected_abs_paths.contains(path),
1573 None => true,
1574 });
1575 existing
1576 .hint_index
1577 .retain(|abs_path, _| !affected_abs_paths.contains(abs_path));
1578 existing.doc_index.retain(|k, _| {
1579 doc_key_path(k)
1580 .map(|p| !affected_paths.contains(p))
1581 .unwrap_or(true)
1582 });
1583 existing.nodes.extend(scoped.nodes);
1584 existing.path_to_abs.extend(scoped.path_to_abs);
1585 existing.external_refs.extend(scoped.external_refs);
1586 existing.id_to_path_map.extend(scoped.id_to_path_map);
1587 existing.decl_index.extend(scoped.decl_index);
1588 existing
1589 .node_id_to_source_path
1590 .extend(scoped.node_id_to_source_path);
1591 existing.hint_index.extend(scoped.hint_index);
1592 existing.doc_index.extend(scoped.doc_index);
1593
1594 Ok(affected_paths.len())
1595}
1596
1597async fn run_did_save(this: ForgeLsp, params: DidSaveTextDocumentParams) {
1603 this.client
1604 .log_message(MessageType::INFO, "file saved")
1605 .await;
1606
1607 let mut text_content = if let Some(text) = params.text {
1608 text
1609 } else {
1610 let cached = {
1612 let text_cache = this.text_cache.read().await;
1613 text_cache
1614 .get(params.text_document.uri.as_str())
1615 .map(|(_, content)| content.clone())
1616 };
1617 if let Some(content) = cached {
1618 content
1619 } else {
1620 match std::fs::read_to_string(params.text_document.uri.path()) {
1621 Ok(content) => content,
1622 Err(e) => {
1623 this.client
1624 .log_message(
1625 MessageType::ERROR,
1626 format!("Failed to read file on save: {e}"),
1627 )
1628 .await;
1629 return;
1630 }
1631 }
1632 }
1633 };
1634
1635 let uri_str = params.text_document.uri.to_string();
1639 let template_on_create = this
1640 .settings
1641 .read()
1642 .await
1643 .file_operations
1644 .template_on_create;
1645 let needs_recover_scaffold = {
1646 let pending = this.pending_create_scaffold.read().await;
1647 template_on_create
1648 && pending.contains(&uri_str)
1649 && !text_content.chars().any(|ch| !ch.is_whitespace())
1650 };
1651 if needs_recover_scaffold {
1652 let solc_version = this.foundry_config.read().await.solc_version.clone();
1653 if let Some(scaffold) =
1654 file_operations::generate_scaffold(¶ms.text_document.uri, solc_version.as_deref())
1655 {
1656 let end = utils::byte_offset_to_position(&text_content, text_content.len());
1657 let edit = WorkspaceEdit {
1658 changes: Some(HashMap::from([(
1659 params.text_document.uri.clone(),
1660 vec![TextEdit {
1661 range: Range {
1662 start: Position::default(),
1663 end,
1664 },
1665 new_text: scaffold.clone(),
1666 }],
1667 )])),
1668 document_changes: None,
1669 change_annotations: None,
1670 };
1671 if this
1672 .client
1673 .apply_edit(edit)
1674 .await
1675 .as_ref()
1676 .is_ok_and(|r| r.applied)
1677 {
1678 text_content = scaffold.clone();
1679 let version = this
1680 .text_cache
1681 .read()
1682 .await
1683 .get(params.text_document.uri.as_str())
1684 .map(|(v, _)| *v)
1685 .unwrap_or_default();
1686 this.text_cache
1687 .write()
1688 .await
1689 .insert(uri_str.clone().into(), (version, scaffold));
1690 this.pending_create_scaffold.write().await.remove(&uri_str);
1691 this.client
1692 .log_message(
1693 MessageType::INFO,
1694 format!("didSave: recovered scaffold for {}", uri_str),
1695 )
1696 .await;
1697 }
1698 }
1699 }
1700
1701 let version = this
1702 .text_cache
1703 .read()
1704 .await
1705 .get(params.text_document.uri.as_str())
1706 .map(|(version, _)| *version)
1707 .unwrap_or_default();
1708
1709 let saved_uri = params.text_document.uri.clone();
1710 if let Ok(saved_file_path) = saved_uri.to_file_path() {
1711 let saved_abs = saved_file_path.to_string_lossy().to_string();
1712 this.project_cache_changed_files
1713 .write()
1714 .await
1715 .insert(saved_abs.clone());
1716 this.project_cache_upsert_files
1717 .write()
1718 .await
1719 .insert(saved_abs);
1720 }
1721 this.on_change(TextDocumentItem {
1722 uri: saved_uri.clone(),
1723 text: text_content,
1724 version,
1725 language_id: "".to_string(),
1726 })
1727 .await;
1728
1729 let settings_snapshot = this.settings.read().await.clone();
1730
1731 if this.use_solc
1737 && settings_snapshot.project_index.full_project_scan
1738 && matches!(
1739 settings_snapshot.project_index.cache_mode,
1740 crate::config::ProjectIndexCacheMode::V2 | crate::config::ProjectIndexCacheMode::Auto
1741 )
1742 {
1743 if start_or_mark_project_cache_upsert_pending(
1744 &this.project_cache_upsert_pending,
1745 &this.project_cache_upsert_running,
1746 ) {
1747 let upsert_files = this.project_cache_upsert_files.clone();
1748 let ast_cache = this.ast_cache.clone();
1749 let client = this.client.clone();
1750 let running_flag = this.project_cache_upsert_running.clone();
1751 let pending_flag = this.project_cache_upsert_pending.clone();
1752 let foundry_config = this.foundry_config.read().await.clone();
1753 let root_key = this.project_cache_key().await;
1754
1755 tokio::spawn(async move {
1756 loop {
1757 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
1758
1759 if !take_project_cache_upsert_pending(&pending_flag) {
1760 if stop_project_cache_upsert_worker_or_reclaim(&pending_flag, &running_flag)
1761 {
1762 continue;
1763 }
1764 break;
1765 }
1766
1767 let changed_paths: Vec<String> = {
1768 let mut paths = upsert_files.write().await;
1769 paths.drain().collect()
1770 };
1771 if changed_paths.is_empty() {
1772 continue;
1773 }
1774
1775 let Some(ref rk) = root_key else {
1780 continue;
1781 };
1782 let Some(root_build) = ast_cache.read().await.get(rk).cloned() else {
1783 continue;
1784 };
1785
1786 let cfg = foundry_config.clone();
1787 let build = (*root_build).clone();
1788 let changed = changed_paths.clone();
1789
1790 let res = tokio::task::spawn_blocking(move || {
1791 crate::project_cache::upsert_reference_cache_v2_with_report(
1792 &cfg, &build, &changed,
1793 )
1794 })
1795 .await;
1796
1797 match res {
1798 Ok(Ok(report)) => {
1799 client
1800 .log_message(
1801 MessageType::INFO,
1802 format!(
1803 "project cache v2 upsert (debounced): touched_files={}, duration={}ms",
1804 report.file_count_hashed, report.duration_ms
1805 ),
1806 )
1807 .await;
1808 }
1809 Ok(Err(e)) => {
1810 client
1811 .log_message(
1812 MessageType::WARNING,
1813 format!("project cache v2 upsert: {e}"),
1814 )
1815 .await;
1816 }
1817 Err(e) => {
1818 client
1819 .log_message(
1820 MessageType::WARNING,
1821 format!("project cache v2 upsert task failed: {e}"),
1822 )
1823 .await;
1824 }
1825 }
1826 }
1827 });
1828 }
1829 }
1830
1831 if this.use_solc
1834 && settings_snapshot.project_index.full_project_scan
1835 && this.project_cache_dirty.load(Ordering::Acquire)
1836 {
1837 if start_or_mark_project_cache_sync_pending(
1838 &this.project_cache_sync_pending,
1839 &this.project_cache_sync_running,
1840 ) {
1841 let foundry_config = this.foundry_config.read().await.clone();
1842 let root_key = this.project_cache_key().await;
1843 let ast_cache = this.ast_cache.clone();
1844 let text_cache = this.text_cache.clone();
1845 let client = this.client.clone();
1846 let dirty_flag = this.project_cache_dirty.clone();
1847 let running_flag = this.project_cache_sync_running.clone();
1848 let pending_flag = this.project_cache_sync_pending.clone();
1849 let changed_files = this.project_cache_changed_files.clone();
1850 let aggressive_scoped = settings_snapshot.project_index.incremental_edit_reindex;
1851 let force_full_rebuild_flag = this.project_cache_force_full_rebuild.clone();
1852 let path_interner = this.path_interner.clone();
1853
1854 tokio::spawn(async move {
1855 loop {
1856 tokio::time::sleep(std::time::Duration::from_millis(700)).await;
1858
1859 if !take_project_cache_sync_pending(&pending_flag) {
1860 if stop_project_cache_sync_worker_or_reclaim(&pending_flag, &running_flag) {
1861 continue;
1862 }
1863 break;
1864 }
1865
1866 if !try_claim_project_cache_dirty(&dirty_flag) {
1867 continue;
1868 }
1869
1870 let Some(cache_key) = &root_key else {
1871 dirty_flag.store(true, Ordering::Release);
1872 continue;
1873 };
1874 if !foundry_config.root.is_dir() {
1875 dirty_flag.store(true, Ordering::Release);
1876 client
1877 .log_message(
1878 MessageType::WARNING,
1879 format!(
1880 "didSave cache sync: invalid project root {}, deferring",
1881 foundry_config.root.display()
1882 ),
1883 )
1884 .await;
1885 continue;
1886 }
1887
1888 let mut scoped_ok = false;
1889
1890 let force_full = force_full_rebuild_flag.swap(false, Ordering::AcqRel);
1894
1895 if aggressive_scoped && !force_full {
1896 let changed_abs: Vec<PathBuf> = {
1897 let mut changed = changed_files.write().await;
1898 let drained =
1899 changed.drain().map(PathBuf::from).collect::<Vec<PathBuf>>();
1900 drained
1901 };
1902 if !changed_abs.is_empty() {
1903 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
1904 let cfg_for_plan = foundry_config.clone();
1905 let changed_for_plan = changed_abs.clone();
1906 let remappings_for_plan = remappings.clone();
1907 let plan_res = tokio::task::spawn_blocking(move || {
1908 compute_reverse_import_closure(
1909 &cfg_for_plan,
1910 &changed_for_plan,
1911 &remappings_for_plan,
1912 )
1913 })
1914 .await;
1915
1916 let affected_files = match plan_res {
1917 Ok(set) => set.into_iter().collect::<Vec<PathBuf>>(),
1918 Err(_) => Vec::new(),
1919 };
1920 if !affected_files.is_empty() {
1921 client
1922 .log_message(
1923 MessageType::INFO,
1924 format!(
1925 "didSave cache sync: aggressive scoped reindex (affected={})",
1926 affected_files.len(),
1927 ),
1928 )
1929 .await;
1930
1931 let text_cache_snapshot = text_cache.read().await.clone();
1932 match crate::solc::solc_project_index_scoped(
1933 &foundry_config,
1934 Some(&client),
1935 Some(&text_cache_snapshot),
1936 &affected_files,
1937 )
1938 .await
1939 {
1940 Ok(ast_data) => {
1941 let scoped_build = Arc::new(crate::goto::CachedBuild::new(
1942 ast_data,
1943 0,
1944 Some(&mut *path_interner.write().await),
1945 ));
1946 let source_count = scoped_build.nodes.len();
1947 enum ScopedApply {
1948 Merged { affected_count: usize },
1949 Stored,
1950 Failed(String),
1951 }
1952 let apply_outcome = {
1953 let mut cache = ast_cache.write().await;
1954 if let Some(existing) = cache.get(cache_key).cloned() {
1955 let mut merged = (*existing).clone();
1956 match merge_scoped_cached_build(
1957 &mut merged,
1958 (*scoped_build).clone(),
1959 ) {
1960 Ok(affected_count) => {
1961 cache.insert(
1962 cache_key.clone().into(),
1963 Arc::new(merged),
1964 );
1965 ScopedApply::Merged { affected_count }
1966 }
1967 Err(e) => ScopedApply::Failed(e),
1968 }
1969 } else {
1970 cache
1971 .insert(cache_key.clone().into(), scoped_build);
1972 ScopedApply::Stored
1973 }
1974 };
1975
1976 match apply_outcome {
1977 ScopedApply::Merged { affected_count } => {
1978 client
1979 .log_message(
1980 MessageType::INFO,
1981 format!(
1982 "didSave cache sync: scoped merge applied (scoped_sources={}, affected_paths={})",
1983 source_count, affected_count
1984 ),
1985 )
1986 .await;
1987 scoped_ok = true;
1988 }
1989 ScopedApply::Stored => {
1990 client
1991 .log_message(
1992 MessageType::INFO,
1993 format!(
1994 "didSave cache sync: scoped cache stored (scoped_sources={})",
1995 source_count
1996 ),
1997 )
1998 .await;
1999 scoped_ok = true;
2000 }
2001 ScopedApply::Failed(e) => {
2002 client
2003 .log_message(
2004 MessageType::WARNING,
2005 format!(
2006 "didSave cache sync: scoped merge rejected, will retry scoped on next save: {e}"
2007 ),
2008 )
2009 .await;
2010 dirty_flag.store(true, Ordering::Release);
2011 }
2012 }
2013 }
2014 Err(e) => {
2015 client
2016 .log_message(
2017 MessageType::WARNING,
2018 format!(
2019 "didSave cache sync: scoped reindex failed, will retry scoped on next save: {e}"
2020 ),
2021 )
2022 .await;
2023 dirty_flag.store(true, Ordering::Release);
2024 }
2025 }
2026 } else {
2027 client
2028 .log_message(
2029 MessageType::INFO,
2030 "didSave cache sync: no affected files from scoped planner",
2031 )
2032 .await;
2033 }
2034 }
2035 }
2036
2037 if scoped_ok {
2038 continue;
2039 }
2040 if aggressive_scoped {
2041 continue;
2042 }
2043
2044 client
2045 .log_message(
2046 MessageType::INFO,
2047 "didSave cache sync: rebuilding project index from disk",
2048 )
2049 .await;
2050
2051 match crate::solc::solc_project_index(&foundry_config, Some(&client), None)
2052 .await
2053 {
2054 Ok(ast_data) => {
2055 let mut new_build = crate::goto::CachedBuild::new(
2056 ast_data,
2057 0,
2058 Some(&mut *path_interner.write().await),
2059 );
2060 if let Some(prev) = ast_cache.read().await.get(cache_key) {
2061 new_build.merge_missing_from(prev);
2062 }
2063 let source_count = new_build.nodes.len();
2064 let cached_build = Arc::new(new_build);
2065 let build_for_save = (*cached_build).clone();
2066 ast_cache
2067 .write()
2068 .await
2069 .insert(cache_key.clone().into(), cached_build);
2070
2071 let cfg_for_save = foundry_config.clone();
2072 let save_res = tokio::task::spawn_blocking(move || {
2073 crate::project_cache::save_reference_cache_with_report(
2074 &cfg_for_save,
2075 &build_for_save,
2076 None,
2077 )
2078 })
2079 .await;
2080
2081 match save_res {
2082 Ok(Ok(report)) => {
2083 changed_files.write().await.clear();
2084 client
2085 .log_message(
2086 MessageType::INFO,
2087 format!(
2088 "didSave cache sync: persisted cache (sources={}, hashed_files={}, duration={}ms)",
2089 source_count, report.file_count_hashed, report.duration_ms
2090 ),
2091 )
2092 .await;
2093 }
2094 Ok(Err(e)) => {
2095 dirty_flag.store(true, Ordering::Release);
2096 client
2097 .log_message(
2098 MessageType::WARNING,
2099 format!(
2100 "didSave cache sync: persist failed, will retry: {e}"
2101 ),
2102 )
2103 .await;
2104 }
2105 Err(e) => {
2106 dirty_flag.store(true, Ordering::Release);
2107 client
2108 .log_message(
2109 MessageType::WARNING,
2110 format!(
2111 "didSave cache sync: save task failed, will retry: {e}"
2112 ),
2113 )
2114 .await;
2115 }
2116 }
2117 }
2118 Err(e) => {
2119 dirty_flag.store(true, Ordering::Release);
2120 client
2121 .log_message(
2122 MessageType::WARNING,
2123 format!("didSave cache sync: re-index failed, will retry: {e}"),
2124 )
2125 .await;
2126 }
2127 }
2128 }
2129 });
2130 }
2131 }
2132}
2133
2134#[tower_lsp::async_trait]
2135impl LanguageServer for ForgeLsp {
2136 async fn initialize(
2137 &self,
2138 params: InitializeParams,
2139 ) -> tower_lsp::jsonrpc::Result<InitializeResult> {
2140 {
2142 let mut caps = self.client_capabilities.write().await;
2143 *caps = Some(params.capabilities.clone());
2144 }
2145
2146 if let Some(init_opts) = ¶ms.initialization_options {
2148 let s = config::parse_settings(init_opts);
2149 self.client
2150 .log_message(
2151 MessageType::INFO,
2152 format!(
2153 "settings: inlayHints.parameters={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}",
2154 s.inlay_hints.parameters, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex,
2155 ),
2156 )
2157 .await;
2158 let mut settings = self.settings.write().await;
2159 *settings = s;
2160 self.settings_from_init
2161 .store(true, std::sync::atomic::Ordering::Relaxed);
2162 }
2163
2164 if let Some(uri) = params.root_uri.as_ref() {
2166 let mut root = self.root_uri.write().await;
2167 *root = Some(uri.clone());
2168 }
2169
2170 if let Some(root_uri) = params
2172 .root_uri
2173 .as_ref()
2174 .and_then(|uri| uri.to_file_path().ok())
2175 {
2176 let lint_cfg = config::load_lint_config(&root_uri);
2177 self.client
2178 .log_message(
2179 MessageType::INFO,
2180 format!(
2181 "loaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
2182 lint_cfg.lint_on_build,
2183 lint_cfg.ignore_patterns.len()
2184 ),
2185 )
2186 .await;
2187 let mut config = self.lint_config.write().await;
2188 *config = lint_cfg;
2189
2190 let foundry_cfg = config::load_foundry_config(&root_uri);
2191 self.client
2192 .log_message(
2193 MessageType::INFO,
2194 format!(
2195 "loaded foundry.toml: solc={}, remappings={}",
2196 foundry_cfg.solc_version.as_deref().unwrap_or("auto"),
2197 foundry_cfg.remappings.len()
2198 ),
2199 )
2200 .await;
2201 let mut fc = self.foundry_config.write().await;
2202 *fc = foundry_cfg;
2203 }
2204
2205 let client_encodings = params
2207 .capabilities
2208 .general
2209 .as_ref()
2210 .and_then(|g| g.position_encodings.as_deref());
2211 let encoding = utils::PositionEncoding::negotiate(client_encodings);
2212 utils::set_encoding(encoding);
2213
2214 Ok(InitializeResult {
2215 server_info: Some(ServerInfo {
2216 name: "Solidity Language Server".to_string(),
2217 version: Some(env!("LONG_VERSION").to_string()),
2218 }),
2219 capabilities: ServerCapabilities {
2220 position_encoding: Some(encoding.into()),
2221 completion_provider: Some(CompletionOptions {
2222 trigger_characters: Some(vec![
2223 ".".to_string(),
2224 "\"".to_string(),
2225 "'".to_string(),
2226 "/".to_string(),
2227 ]),
2228 resolve_provider: Some(false),
2229 ..Default::default()
2230 }),
2231 signature_help_provider: Some(SignatureHelpOptions {
2232 trigger_characters: Some(vec![
2233 "(".to_string(),
2234 ",".to_string(),
2235 "[".to_string(),
2236 ]),
2237 retrigger_characters: None,
2238 work_done_progress_options: WorkDoneProgressOptions {
2239 work_done_progress: None,
2240 },
2241 }),
2242 definition_provider: Some(OneOf::Left(true)),
2243 declaration_provider: Some(DeclarationCapability::Simple(true)),
2244 implementation_provider: Some(ImplementationProviderCapability::Simple(true)),
2245 references_provider: Some(OneOf::Left(true)),
2246 rename_provider: Some(OneOf::Right(RenameOptions {
2247 prepare_provider: Some(true),
2248 work_done_progress_options: WorkDoneProgressOptions {
2249 work_done_progress: Some(true),
2250 },
2251 })),
2252 workspace_symbol_provider: Some(OneOf::Left(true)),
2253 document_symbol_provider: Some(OneOf::Left(true)),
2254 document_highlight_provider: Some(OneOf::Left(true)),
2255 hover_provider: Some(HoverProviderCapability::Simple(true)),
2256 document_link_provider: Some(DocumentLinkOptions {
2257 resolve_provider: Some(false),
2258 work_done_progress_options: WorkDoneProgressOptions {
2259 work_done_progress: None,
2260 },
2261 }),
2262 document_formatting_provider: Some(OneOf::Left(true)),
2263 code_action_provider: Some(CodeActionProviderCapability::Options(
2264 CodeActionOptions {
2265 code_action_kinds: Some(vec![CodeActionKind::QUICKFIX]),
2266 resolve_provider: Some(false),
2267 work_done_progress_options: WorkDoneProgressOptions {
2268 work_done_progress: None,
2269 },
2270 },
2271 )),
2272 call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
2273 code_lens_provider: None,
2274 folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
2275 selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
2276 inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options(
2277 InlayHintOptions {
2278 resolve_provider: Some(false),
2279 work_done_progress_options: WorkDoneProgressOptions {
2280 work_done_progress: None,
2281 },
2282 },
2283 ))),
2284 semantic_tokens_provider: Some(
2285 SemanticTokensServerCapabilities::SemanticTokensOptions(
2286 SemanticTokensOptions {
2287 legend: semantic_tokens::legend(),
2288 full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
2289 range: Some(true),
2290 work_done_progress_options: WorkDoneProgressOptions {
2291 work_done_progress: None,
2292 },
2293 },
2294 ),
2295 ),
2296 text_document_sync: Some(TextDocumentSyncCapability::Options(
2297 TextDocumentSyncOptions {
2298 will_save: Some(true),
2299 will_save_wait_until: None,
2300 open_close: Some(true),
2301 save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
2302 include_text: Some(true),
2303 })),
2304 change: Some(TextDocumentSyncKind::FULL),
2305 },
2306 )),
2307 workspace: Some(WorkspaceServerCapabilities {
2308 workspace_folders: None,
2309 file_operations: Some(WorkspaceFileOperationsServerCapabilities {
2310 will_rename: Some(FileOperationRegistrationOptions {
2311 filters: vec![
2312 FileOperationFilter {
2314 scheme: Some("file".to_string()),
2315 pattern: FileOperationPattern {
2316 glob: "**/*.sol".to_string(),
2317 matches: Some(FileOperationPatternKind::File),
2318 options: None,
2319 },
2320 },
2321 FileOperationFilter {
2323 scheme: Some("file".to_string()),
2324 pattern: FileOperationPattern {
2325 glob: "**".to_string(),
2326 matches: Some(FileOperationPatternKind::Folder),
2327 options: None,
2328 },
2329 },
2330 ],
2331 }),
2332 did_rename: Some(FileOperationRegistrationOptions {
2333 filters: vec![
2334 FileOperationFilter {
2335 scheme: Some("file".to_string()),
2336 pattern: FileOperationPattern {
2337 glob: "**/*.sol".to_string(),
2338 matches: Some(FileOperationPatternKind::File),
2339 options: None,
2340 },
2341 },
2342 FileOperationFilter {
2343 scheme: Some("file".to_string()),
2344 pattern: FileOperationPattern {
2345 glob: "**".to_string(),
2346 matches: Some(FileOperationPatternKind::Folder),
2347 options: None,
2348 },
2349 },
2350 ],
2351 }),
2352 will_delete: Some(FileOperationRegistrationOptions {
2353 filters: vec![
2354 FileOperationFilter {
2355 scheme: Some("file".to_string()),
2356 pattern: FileOperationPattern {
2357 glob: "**/*.sol".to_string(),
2358 matches: Some(FileOperationPatternKind::File),
2359 options: None,
2360 },
2361 },
2362 FileOperationFilter {
2363 scheme: Some("file".to_string()),
2364 pattern: FileOperationPattern {
2365 glob: "**".to_string(),
2366 matches: Some(FileOperationPatternKind::Folder),
2367 options: None,
2368 },
2369 },
2370 ],
2371 }),
2372 did_delete: Some(FileOperationRegistrationOptions {
2373 filters: vec![
2374 FileOperationFilter {
2375 scheme: Some("file".to_string()),
2376 pattern: FileOperationPattern {
2377 glob: "**/*.sol".to_string(),
2378 matches: Some(FileOperationPatternKind::File),
2379 options: None,
2380 },
2381 },
2382 FileOperationFilter {
2383 scheme: Some("file".to_string()),
2384 pattern: FileOperationPattern {
2385 glob: "**".to_string(),
2386 matches: Some(FileOperationPatternKind::Folder),
2387 options: None,
2388 },
2389 },
2390 ],
2391 }),
2392 will_create: Some(FileOperationRegistrationOptions {
2393 filters: vec![FileOperationFilter {
2394 scheme: Some("file".to_string()),
2395 pattern: FileOperationPattern {
2396 glob: "**/*.sol".to_string(),
2397 matches: Some(FileOperationPatternKind::File),
2398 options: None,
2399 },
2400 }],
2401 }),
2402 did_create: Some(FileOperationRegistrationOptions {
2403 filters: vec![FileOperationFilter {
2404 scheme: Some("file".to_string()),
2405 pattern: FileOperationPattern {
2406 glob: "**/*.sol".to_string(),
2407 matches: Some(FileOperationPatternKind::File),
2408 options: None,
2409 },
2410 }],
2411 }),
2412 ..Default::default()
2413 }),
2414 }),
2415 execute_command_provider: Some(ExecuteCommandOptions {
2416 commands: vec![
2417 "solidity.clearCache".to_string(),
2418 "solidity.reindex".to_string(),
2419 ],
2420 work_done_progress_options: WorkDoneProgressOptions {
2421 work_done_progress: None,
2422 },
2423 }),
2424 ..ServerCapabilities::default()
2425 },
2426 })
2427 }
2428
2429 async fn initialized(&self, _: InitializedParams) {
2430 self.client
2431 .log_message(MessageType::INFO, "lsp server initialized.")
2432 .await;
2433
2434 let supports_dynamic = self
2436 .client_capabilities
2437 .read()
2438 .await
2439 .as_ref()
2440 .and_then(|caps| caps.workspace.as_ref())
2441 .and_then(|ws| ws.did_change_watched_files.as_ref())
2442 .and_then(|dcwf| dcwf.dynamic_registration)
2443 .unwrap_or(false);
2444
2445 if supports_dynamic {
2446 let registration = Registration {
2447 id: "foundry-toml-watcher".to_string(),
2448 method: "workspace/didChangeWatchedFiles".to_string(),
2449 register_options: Some(
2450 serde_json::to_value(DidChangeWatchedFilesRegistrationOptions {
2451 watchers: vec![
2452 FileSystemWatcher {
2453 glob_pattern: GlobPattern::String("**/foundry.toml".to_string()),
2454 kind: Some(WatchKind::all()),
2455 },
2456 FileSystemWatcher {
2457 glob_pattern: GlobPattern::String("**/remappings.txt".to_string()),
2458 kind: Some(WatchKind::all()),
2459 },
2460 ],
2461 })
2462 .unwrap(),
2463 ),
2464 };
2465
2466 if let Err(e) = self.client.register_capability(vec![registration]).await {
2467 self.client
2468 .log_message(
2469 MessageType::WARNING,
2470 format!("failed to register foundry.toml watcher: {e}"),
2471 )
2472 .await;
2473 } else {
2474 self.client
2475 .log_message(MessageType::INFO, "registered foundry.toml file watcher")
2476 .await;
2477 }
2478 }
2479
2480 if !self
2485 .settings_from_init
2486 .load(std::sync::atomic::Ordering::Relaxed)
2487 {
2488 let supports_config = self
2489 .client_capabilities
2490 .read()
2491 .await
2492 .as_ref()
2493 .and_then(|caps| caps.workspace.as_ref())
2494 .and_then(|ws| ws.configuration)
2495 .unwrap_or(false);
2496
2497 if supports_config {
2498 match self
2499 .client
2500 .configuration(vec![ConfigurationItem {
2501 scope_uri: None,
2502 section: Some("solidity-language-server".to_string()),
2503 }])
2504 .await
2505 {
2506 Ok(values) => {
2507 if let Some(val) = values.into_iter().next() {
2508 if !val.is_null() {
2509 let s = config::parse_settings(&val);
2510 self.client
2511 .log_message(
2512 MessageType::INFO,
2513 format!(
2514 "settings (workspace/configuration): lint.enabled={}, lint.exclude={:?}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}",
2515 s.lint.enabled, s.lint.exclude, s.project_index.full_project_scan, s.project_index.cache_mode,
2516 ),
2517 )
2518 .await;
2519 let mut settings = self.settings.write().await;
2520 *settings = s;
2521 }
2522 }
2523 }
2524 Err(e) => {
2525 self.client
2526 .log_message(
2527 MessageType::WARNING,
2528 format!("workspace/configuration request failed: {e}"),
2529 )
2530 .await;
2531 }
2532 }
2533 }
2534 }
2535
2536 if self.use_solc && self.settings.read().await.project_index.full_project_scan {
2540 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
2541 self.project_indexed
2542 .store(true, std::sync::atomic::Ordering::Relaxed);
2543 let foundry_config = self.foundry_config.read().await.clone();
2544 let cache_key = self.project_cache_key().await;
2545 let ast_cache = self.ast_cache.clone();
2546 let client = self.client.clone();
2547 let sub_caches_arc = self.sub_caches.clone();
2548 let sub_caches_loading_flag = self.sub_caches_loading.clone();
2549 let path_interner = self.path_interner.clone();
2550
2551 tokio::spawn(async move {
2552 let Some(cache_key) = cache_key else {
2553 return;
2554 };
2555 if !foundry_config.root.is_dir() {
2556 client
2557 .log_message(
2558 MessageType::INFO,
2559 format!(
2560 "project index: {} not found, skipping eager index",
2561 foundry_config.root.display(),
2562 ),
2563 )
2564 .await;
2565 return;
2566 }
2567
2568 let token = NumberOrString::String("solidity/projectIndex".to_string());
2569 let _ = client
2570 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
2571 token: token.clone(),
2572 })
2573 .await;
2574
2575 client
2576 .send_notification::<notification::Progress>(ProgressParams {
2577 token: token.clone(),
2578 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
2579 WorkDoneProgressBegin {
2580 title: "Indexing project".to_string(),
2581 message: Some("Discovering source files...".to_string()),
2582 cancellable: Some(false),
2583 percentage: None,
2584 },
2585 )),
2586 })
2587 .await;
2588
2589 {
2594 let cfg_for_discover = foundry_config.clone();
2595 let all_files = tokio::task::spawn_blocking(move || {
2596 crate::solc::discover_source_files_with_libs(&cfg_for_discover)
2597 })
2598 .await
2599 .unwrap_or_default();
2600 let mut interner = path_interner.write().await;
2601 for file in &all_files {
2602 if let Some(path_str) = file.to_str() {
2603 interner.intern(path_str);
2604 }
2605 }
2606 }
2607
2608 let cfg_for_load = foundry_config.clone();
2610 let cache_mode_for_load = cache_mode.clone();
2611 let load_res = tokio::task::spawn_blocking(move || {
2612 crate::project_cache::load_reference_cache_with_report(
2613 &cfg_for_load,
2614 cache_mode_for_load,
2615 true,
2616 )
2617 })
2618 .await;
2619 match load_res {
2620 Ok(report) => {
2621 if let Some(cached_build) = report.build {
2622 let source_count = cached_build.nodes.len();
2623 ast_cache
2624 .write()
2625 .await
2626 .insert(cache_key.clone().into(), Arc::new(cached_build));
2627 client
2628 .log_message(
2629 MessageType::INFO,
2630 format!(
2631 "loaded {source_count} sources from cache ({}ms)",
2632 report.duration_ms
2633 ),
2634 )
2635 .await;
2636 if report.complete {
2637 spawn_load_lib_sub_caches_task(
2640 foundry_config.clone(),
2641 sub_caches_arc.clone(),
2642 sub_caches_loading_flag.clone(),
2643 path_interner.clone(),
2644 client.clone(),
2645 );
2646 client
2647 .send_notification::<notification::Progress>(ProgressParams {
2648 token: token.clone(),
2649 value: ProgressParamsValue::WorkDone(
2650 WorkDoneProgress::End(WorkDoneProgressEnd {
2651 message: Some(format!(
2652 "Loaded {} source files from cache",
2653 source_count
2654 )),
2655 }),
2656 ),
2657 })
2658 .await;
2659 return;
2660 }
2661 }
2662
2663 client
2664 .log_message(
2665 MessageType::INFO,
2666 "no cached index found, building from source",
2667 )
2668 .await;
2669 }
2670 Err(e) => {
2671 client
2672 .log_message(MessageType::WARNING, format!("cache load failed: {e}"))
2673 .await;
2674 }
2675 }
2676
2677 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
2693 let cfg_for_src = foundry_config.clone();
2694 let remappings_for_src = remappings.clone();
2695 let src_files = tokio::task::spawn_blocking(move || {
2696 crate::solc::discover_src_only_closure(&cfg_for_src, &remappings_for_src)
2697 })
2698 .await
2699 .unwrap_or_default();
2700
2701 let cfg_for_full = foundry_config.clone();
2704 let remappings_for_full = remappings.clone();
2705 let full_files = tokio::task::spawn_blocking(move || {
2706 crate::solc::discover_compilation_closure(&cfg_for_full, &remappings_for_full)
2707 })
2708 .await
2709 .unwrap_or_default();
2710
2711 let src_count = src_files.len();
2712 let full_count = full_files.len();
2713
2714 let phase1_start = std::time::Instant::now();
2716 client
2717 .send_notification::<notification::Progress>(ProgressParams {
2718 token: token.clone(),
2719 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Report(
2720 WorkDoneProgressReport {
2721 message: Some(format!("Compiling {} src files...", src_count,)),
2722 cancellable: Some(false),
2723 percentage: None,
2724 },
2725 )),
2726 })
2727 .await;
2728
2729 let phase1_ok = match crate::solc::solc_project_index_scoped(
2730 &foundry_config,
2731 Some(&client),
2732 None,
2733 &src_files,
2734 )
2735 .await
2736 {
2737 Ok(ast_data) => {
2738 let mut new_build = crate::goto::CachedBuild::new(
2739 ast_data,
2740 0,
2741 Some(&mut *path_interner.write().await),
2742 );
2743 if let Some(prev) = ast_cache.read().await.get(&cache_key) {
2744 new_build.merge_missing_from(prev);
2745 }
2746 let source_count = new_build.nodes.len();
2747 ast_cache
2748 .write()
2749 .await
2750 .insert(cache_key.clone().into(), Arc::new(new_build));
2751 client
2752 .log_message(
2753 MessageType::INFO,
2754 format!(
2755 "project index: phase 1 complete — {} source files indexed in {:.1}s",
2756 source_count,
2757 phase1_start.elapsed().as_secs_f64(),
2758 ),
2759 )
2760 .await;
2761
2762 client
2764 .send_notification::<notification::Progress>(ProgressParams {
2765 token: token.clone(),
2766 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
2767 WorkDoneProgressEnd {
2768 message: Some(format!(
2769 "Indexed {} source files (full index in background)",
2770 source_count,
2771 )),
2772 },
2773 )),
2774 })
2775 .await;
2776 true
2777 }
2778 Err(e) => {
2779 client
2780 .log_message(
2781 MessageType::WARNING,
2782 format!("project index: phase 1 failed: {e}"),
2783 )
2784 .await;
2785 false
2787 }
2788 };
2789
2790 let phase2_foundry_config = foundry_config.clone();
2796 let phase2_client = client.clone();
2797 let phase2_cache_key = cache_key.clone();
2798 let phase2_ast_cache = ast_cache.clone();
2799 let phase2_path_interner = path_interner.clone();
2800 let phase2_sub_caches = sub_caches_arc.clone();
2801 let phase2_loading_flag = sub_caches_loading_flag.clone();
2802 let phase2 = async move {
2803 let phase2_start = std::time::Instant::now();
2804 let token2 = NumberOrString::String("solidity/projectIndexFull".to_string());
2806 let _ = phase2_client
2807 .send_request::<request::WorkDoneProgressCreate>(
2808 WorkDoneProgressCreateParams {
2809 token: token2.clone(),
2810 },
2811 )
2812 .await;
2813 phase2_client
2814 .send_notification::<notification::Progress>(ProgressParams {
2815 token: token2.clone(),
2816 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
2817 WorkDoneProgressBegin {
2818 title: "Full project index".to_string(),
2819 message: Some(format!(
2820 "Compiling {} files (src + test + script)...",
2821 full_count,
2822 )),
2823 cancellable: Some(false),
2824 percentage: None,
2825 },
2826 )),
2827 })
2828 .await;
2829
2830 match crate::solc::solc_project_index_scoped(
2831 &phase2_foundry_config,
2832 Some(&phase2_client),
2833 None,
2834 &full_files,
2835 )
2836 .await
2837 {
2838 Ok(ast_data) => {
2839 let mut new_build = crate::goto::CachedBuild::new(
2840 ast_data,
2841 0,
2842 Some(&mut *phase2_path_interner.write().await),
2843 );
2844 if let Some(prev) = phase2_ast_cache.read().await.get(&phase2_cache_key)
2848 {
2849 new_build.merge_missing_from(prev);
2850 }
2851 let source_count = new_build.nodes.len();
2852 let cached_build = Arc::new(new_build);
2853 let build_for_save = (*cached_build).clone();
2854 phase2_ast_cache
2855 .write()
2856 .await
2857 .insert(phase2_cache_key.clone().into(), cached_build);
2858 phase2_client
2859 .log_message(
2860 MessageType::INFO,
2861 format!(
2862 "project index: phase 2 complete — {} source files indexed in {:.1}s",
2863 source_count,
2864 phase2_start.elapsed().as_secs_f64(),
2865 ),
2866 )
2867 .await;
2868
2869 spawn_load_lib_sub_caches_task(
2871 phase2_foundry_config.clone(),
2872 phase2_sub_caches,
2873 phase2_loading_flag,
2874 phase2_path_interner,
2875 phase2_client.clone(),
2876 );
2877
2878 let cfg_for_save = phase2_foundry_config.clone();
2880 let client_for_save = phase2_client.clone();
2881 tokio::spawn(async move {
2882 let res = tokio::task::spawn_blocking(move || {
2883 crate::project_cache::save_reference_cache_with_report(
2884 &cfg_for_save,
2885 &build_for_save,
2886 None,
2887 )
2888 })
2889 .await;
2890 match res {
2891 Ok(Ok(_report)) => {}
2892 Ok(Err(e)) => {
2893 client_for_save
2894 .log_message(
2895 MessageType::WARNING,
2896 format!("project index: cache save failed: {e}"),
2897 )
2898 .await;
2899 }
2900 Err(e) => {
2901 client_for_save
2902 .log_message(
2903 MessageType::WARNING,
2904 format!(
2905 "project index: cache save task failed: {e}"
2906 ),
2907 )
2908 .await;
2909 }
2910 }
2911 });
2912
2913 phase2_client
2914 .send_notification::<notification::Progress>(ProgressParams {
2915 token: token2,
2916 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
2917 WorkDoneProgressEnd {
2918 message: Some(format!(
2919 "Indexed {} source files in {:.1}s",
2920 source_count,
2921 phase2_start.elapsed().as_secs_f64(),
2922 )),
2923 },
2924 )),
2925 })
2926 .await;
2927 }
2928 Err(e) => {
2929 phase2_client
2930 .log_message(
2931 MessageType::WARNING,
2932 format!("project index: phase 2 failed: {e}"),
2933 )
2934 .await;
2935 phase2_client
2936 .send_notification::<notification::Progress>(ProgressParams {
2937 token: token2,
2938 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
2939 WorkDoneProgressEnd {
2940 message: Some(format!("Full index failed: {e}",)),
2941 },
2942 )),
2943 })
2944 .await;
2945 }
2946 }
2947 };
2948
2949 if phase1_ok {
2950 tokio::spawn(phase2);
2952 } else {
2953 phase2.await;
2956
2957 client
2960 .send_notification::<notification::Progress>(ProgressParams {
2961 token,
2962 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
2963 WorkDoneProgressEnd {
2964 message: Some("Index complete (phase 1 skipped)".to_string()),
2965 },
2966 )),
2967 })
2968 .await;
2969 }
2970 });
2971 }
2972 }
2973
2974 async fn execute_command(
2975 &self,
2976 params: ExecuteCommandParams,
2977 ) -> tower_lsp::jsonrpc::Result<Option<serde_json::Value>> {
2978 match params.command.as_str() {
2979 "solidity.clearCache" => {
2991 let root = self.foundry_config.read().await.root.clone();
2992 let cache_dir = crate::project_cache::cache_dir(&root);
2993
2994 let disk_result = if cache_dir.exists() {
2995 std::fs::remove_dir_all(&cache_dir).map_err(|e| format!("{e}"))
2996 } else {
2997 Ok(())
2998 };
2999
3000 if let Some(root_key) = self.project_cache_key().await {
3001 self.ast_cache.write().await.remove(&root_key);
3002 }
3003
3004 match disk_result {
3005 Ok(()) => {
3006 self.client
3007 .log_message(
3008 MessageType::INFO,
3009 format!(
3010 "solidity.clearCache: removed {} and cleared in-memory cache",
3011 cache_dir.display()
3012 ),
3013 )
3014 .await;
3015 Ok(Some(serde_json::json!({ "success": true })))
3016 }
3017 Err(e) => {
3018 self.client
3019 .log_message(
3020 MessageType::ERROR,
3021 format!("solidity.clearCache: failed to remove cache dir: {e}"),
3022 )
3023 .await;
3024 Err(tower_lsp::jsonrpc::Error {
3025 code: tower_lsp::jsonrpc::ErrorCode::InternalError,
3026 message: std::borrow::Cow::Owned(e),
3027 data: None,
3028 })
3029 }
3030 }
3031 }
3032
3033 "solidity.reindex" => {
3045 if let Some(root_key) = self.project_cache_key().await {
3046 self.ast_cache.write().await.remove(&root_key);
3047 }
3048 self.project_cache_dirty
3049 .store(true, std::sync::atomic::Ordering::Relaxed);
3050 self.project_cache_force_full_rebuild
3054 .store(true, std::sync::atomic::Ordering::Release);
3055
3056 if start_or_mark_project_cache_sync_pending(
3061 &self.project_cache_sync_pending,
3062 &self.project_cache_sync_running,
3063 ) {
3064 let foundry_config = self.foundry_config.read().await.clone();
3065 let root_key = self.project_cache_key().await;
3066 let ast_cache = self.ast_cache.clone();
3067 let client = self.client.clone();
3068 let dirty_flag = self.project_cache_dirty.clone();
3069 let running_flag = self.project_cache_sync_running.clone();
3070 let pending_flag = self.project_cache_sync_pending.clone();
3071 let changed_files = self.project_cache_changed_files.clone();
3072 let force_full_rebuild_flag = self.project_cache_force_full_rebuild.clone();
3073 let path_interner = self.path_interner.clone();
3074
3075 tokio::spawn(async move {
3076 loop {
3077 tokio::time::sleep(std::time::Duration::from_millis(700)).await;
3078
3079 if !take_project_cache_sync_pending(&pending_flag) {
3080 if stop_project_cache_sync_worker_or_reclaim(
3081 &pending_flag,
3082 &running_flag,
3083 ) {
3084 continue;
3085 }
3086 break;
3087 }
3088
3089 if !try_claim_project_cache_dirty(&dirty_flag) {
3090 continue;
3091 }
3092
3093 let Some(cache_key) = &root_key else {
3094 dirty_flag.store(true, Ordering::Release);
3095 continue;
3096 };
3097 if !foundry_config.root.is_dir() {
3098 dirty_flag.store(true, Ordering::Release);
3099 client
3100 .log_message(
3101 MessageType::WARNING,
3102 format!(
3103 "solidity.reindex cache sync: invalid project root {}, deferring",
3104 foundry_config.root.display()
3105 ),
3106 )
3107 .await;
3108 continue;
3109 }
3110
3111 client
3112 .log_message(
3113 MessageType::INFO,
3114 "solidity.reindex: rebuilding project index from disk",
3115 )
3116 .await;
3117
3118 match crate::solc::solc_project_index(
3119 &foundry_config,
3120 Some(&client),
3121 None,
3122 )
3123 .await
3124 {
3125 Ok(ast_data) => {
3126 let mut new_build = crate::goto::CachedBuild::new(
3127 ast_data,
3128 0,
3129 Some(&mut *path_interner.write().await),
3130 );
3131 if let Some(prev) = ast_cache.read().await.get(cache_key) {
3132 new_build.merge_missing_from(prev);
3133 }
3134 let source_count = new_build.nodes.len();
3135 let cached_build = Arc::new(new_build);
3136 let build_for_save = (*cached_build).clone();
3137 ast_cache
3138 .write()
3139 .await
3140 .insert(cache_key.clone().into(), cached_build);
3141
3142 let cfg_for_save = foundry_config.clone();
3143 let save_res = tokio::task::spawn_blocking(move || {
3144 crate::project_cache::save_reference_cache_with_report(
3145 &cfg_for_save,
3146 &build_for_save,
3147 None,
3148 )
3149 })
3150 .await;
3151
3152 match save_res {
3153 Ok(Ok(report)) => {
3154 changed_files.write().await.clear();
3155 force_full_rebuild_flag.store(false, Ordering::Release);
3158 client
3159 .log_message(
3160 MessageType::INFO,
3161 format!(
3162 "solidity.reindex: persisted cache (sources={}, hashed_files={}, duration={}ms)",
3163 source_count, report.file_count_hashed, report.duration_ms
3164 ),
3165 )
3166 .await;
3167 }
3168 Ok(Err(e)) => {
3169 dirty_flag.store(true, Ordering::Release);
3170 client
3171 .log_message(
3172 MessageType::WARNING,
3173 format!(
3174 "solidity.reindex: persist failed, will retry: {e}"
3175 ),
3176 )
3177 .await;
3178 }
3179 Err(e) => {
3180 dirty_flag.store(true, Ordering::Release);
3181 client
3182 .log_message(
3183 MessageType::WARNING,
3184 format!(
3185 "solidity.reindex: save task failed, will retry: {e}"
3186 ),
3187 )
3188 .await;
3189 }
3190 }
3191 }
3192 Err(e) => {
3193 dirty_flag.store(true, Ordering::Release);
3194 client
3195 .log_message(
3196 MessageType::WARNING,
3197 format!(
3198 "solidity.reindex: re-index failed, will retry: {e}"
3199 ),
3200 )
3201 .await;
3202 }
3203 }
3204
3205 if stop_project_cache_sync_worker_or_reclaim(
3206 &pending_flag,
3207 &running_flag,
3208 ) {
3209 continue;
3210 }
3211 break;
3212 }
3213 });
3214 }
3215
3216 self.client
3217 .log_message(
3218 MessageType::INFO,
3219 "solidity.reindex: in-memory cache evicted, background reindex triggered",
3220 )
3221 .await;
3222 Ok(Some(serde_json::json!({ "success": true })))
3223 }
3224
3225 _ => Err(tower_lsp::jsonrpc::Error::method_not_found()),
3226 }
3227 }
3228
3229 async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
3230 self.flush_project_cache_to_disk("shutdown").await;
3231 self.client
3232 .log_message(MessageType::INFO, "lsp server shutting down.")
3233 .await;
3234 Ok(())
3235 }
3236
3237 async fn did_open(&self, params: DidOpenTextDocumentParams) {
3238 self.client
3239 .log_message(MessageType::INFO, "file opened")
3240 .await;
3241
3242 let mut td = params.text_document;
3243 let template_on_create = self
3244 .settings
3245 .read()
3246 .await
3247 .file_operations
3248 .template_on_create;
3249
3250 let should_attempt_scaffold = template_on_create
3253 && td.text.chars().all(|ch| ch.is_whitespace())
3254 && td.uri.scheme() == "file"
3255 && td
3256 .uri
3257 .to_file_path()
3258 .ok()
3259 .and_then(|p| p.extension().map(|e| e == "sol"))
3260 .unwrap_or(false);
3261
3262 if should_attempt_scaffold {
3263 let uri_str = td.uri.to_string();
3264 let create_flow_pending = {
3265 let pending = self.pending_create_scaffold.read().await;
3266 pending.contains(&uri_str)
3267 };
3268 if create_flow_pending {
3269 self.client
3270 .log_message(
3271 MessageType::INFO,
3272 format!(
3273 "didOpen: skip scaffold for {} (didCreateFiles scaffold pending)",
3274 uri_str
3275 ),
3276 )
3277 .await;
3278 } else {
3279 let cache_has_content = {
3280 let tc = self.text_cache.read().await;
3281 tc.get(&uri_str)
3282 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()))
3283 };
3284
3285 if !cache_has_content {
3286 let file_has_content = td.uri.to_file_path().ok().is_some_and(|p| {
3287 std::fs::read_to_string(&p)
3288 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()))
3289 });
3290
3291 if !file_has_content {
3292 let solc_version = self.foundry_config.read().await.solc_version.clone();
3293 if let Some(scaffold) =
3294 file_operations::generate_scaffold(&td.uri, solc_version.as_deref())
3295 {
3296 let end = utils::byte_offset_to_position(&td.text, td.text.len());
3297 let edit = WorkspaceEdit {
3298 changes: Some(HashMap::from([(
3299 td.uri.clone(),
3300 vec![TextEdit {
3301 range: Range {
3302 start: Position::default(),
3303 end,
3304 },
3305 new_text: scaffold.clone(),
3306 }],
3307 )])),
3308 document_changes: None,
3309 change_annotations: None,
3310 };
3311 if self
3312 .client
3313 .apply_edit(edit)
3314 .await
3315 .as_ref()
3316 .is_ok_and(|r| r.applied)
3317 {
3318 td.text = scaffold;
3319 self.client
3320 .log_message(
3321 MessageType::INFO,
3322 format!("didOpen: scaffolded empty file {}", uri_str),
3323 )
3324 .await;
3325 }
3326 }
3327 }
3328 }
3329 }
3330 }
3331
3332 self.on_change(td).await
3333 }
3334
3335 async fn did_change(&self, params: DidChangeTextDocumentParams) {
3336 self.client
3337 .log_message(MessageType::INFO, "file changed")
3338 .await;
3339
3340 if let Some(change) = params.content_changes.into_iter().next() {
3342 let has_substantive_content = change.text.chars().any(|ch| !ch.is_whitespace());
3343 let mut text_cache = self.text_cache.write().await;
3344 text_cache.insert(
3345 params.text_document.uri.to_string().into(),
3346 (params.text_document.version, change.text),
3347 );
3348 drop(text_cache);
3349
3350 if has_substantive_content {
3351 self.pending_create_scaffold
3352 .write()
3353 .await
3354 .remove(params.text_document.uri.as_str());
3355 }
3356 }
3357 }
3358
3359 async fn did_save(&self, params: DidSaveTextDocumentParams) {
3360 let uri_key = params.text_document.uri.to_string();
3365
3366 {
3368 let workers = self.did_save_workers.read().await;
3369 if let Some(tx) = workers.get(&uri_key) {
3370 if tx.send(Some(params.clone())).is_ok() {
3373 return;
3374 }
3375 }
3376 }
3377
3378 let (tx, mut rx) = tokio::sync::watch::channel(Some(params));
3381 self.did_save_workers
3382 .write()
3383 .await
3384 .insert(uri_key.into(), tx);
3385
3386 let this = self.clone();
3387 tokio::spawn(async move {
3388 loop {
3389 if rx.changed().await.is_err() {
3391 break;
3394 }
3395 let params = match rx.borrow_and_update().clone() {
3396 Some(p) => p,
3397 None => continue,
3398 };
3399 run_did_save(this.clone(), params).await;
3400 }
3401 });
3402 }
3403
3404 async fn will_save(&self, params: WillSaveTextDocumentParams) {
3405 self.client
3406 .log_message(
3407 MessageType::INFO,
3408 format!(
3409 "file will save reason:{:?} {}",
3410 params.reason, params.text_document.uri
3411 ),
3412 )
3413 .await;
3414 }
3415
3416 async fn formatting(
3417 &self,
3418 params: DocumentFormattingParams,
3419 ) -> tower_lsp::jsonrpc::Result<Option<Vec<TextEdit>>> {
3420 self.client
3421 .log_message(MessageType::INFO, "formatting request")
3422 .await;
3423
3424 let uri = params.text_document.uri;
3425 let file_path = match uri.to_file_path() {
3426 Ok(path) => path,
3427 Err(_) => {
3428 self.client
3429 .log_message(MessageType::ERROR, "Invalid file URI for formatting")
3430 .await;
3431 return Ok(None);
3432 }
3433 };
3434 let path_str = match file_path.to_str() {
3435 Some(s) => s,
3436 None => {
3437 self.client
3438 .log_message(MessageType::ERROR, "Invalid file path for formatting")
3439 .await;
3440 return Ok(None);
3441 }
3442 };
3443
3444 let original_content = {
3446 let text_cache = self.text_cache.read().await;
3447 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
3448 content.clone()
3449 } else {
3450 match std::fs::read_to_string(&file_path) {
3452 Ok(content) => content,
3453 Err(_) => {
3454 self.client
3455 .log_message(MessageType::ERROR, "Failed to read file for formatting")
3456 .await;
3457 return Ok(None);
3458 }
3459 }
3460 }
3461 };
3462
3463 let formatted_content = match self.compiler.format(path_str).await {
3465 Ok(content) => content,
3466 Err(e) => {
3467 self.client
3468 .log_message(MessageType::WARNING, format!("Formatting failed: {e}"))
3469 .await;
3470 return Ok(None);
3471 }
3472 };
3473
3474 if original_content != formatted_content {
3476 let end = utils::byte_offset_to_position(&original_content, original_content.len());
3477
3478 {
3480 let mut text_cache = self.text_cache.write().await;
3481 let version = text_cache
3482 .get(&uri.to_string())
3483 .map(|(v, _)| *v)
3484 .unwrap_or(0);
3485 text_cache.insert(uri.to_string().into(), (version, formatted_content.clone()));
3486 }
3487
3488 let edit = TextEdit {
3489 range: Range {
3490 start: Position::default(),
3491 end,
3492 },
3493 new_text: formatted_content,
3494 };
3495 Ok(Some(vec![edit]))
3496 } else {
3497 Ok(None)
3498 }
3499 }
3500
3501 async fn did_close(&self, params: DidCloseTextDocumentParams) {
3502 self.flush_project_cache_to_disk("didClose").await;
3503 let uri = params.text_document.uri.to_string();
3504 self.ast_cache.write().await.remove(&uri);
3505 self.text_cache.write().await.remove(&uri);
3506 self.completion_cache.write().await.remove(&uri);
3507 self.client
3508 .log_message(MessageType::INFO, "file closed, caches cleared.")
3509 .await;
3510 }
3511
3512 async fn did_change_configuration(&self, params: DidChangeConfigurationParams) {
3513 let s = config::parse_settings(¶ms.settings);
3514 self.client
3515 .log_message(
3516 MessageType::INFO,
3517 format!(
3518 "settings updated: inlayHints.parameters={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}",
3519 s.inlay_hints.parameters, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex,
3520 ),
3521 )
3522 .await;
3523 let mut settings = self.settings.write().await;
3524 *settings = s;
3525
3526 let client = self.client.clone();
3528 tokio::spawn(async move {
3529 let _ = client.inlay_hint_refresh().await;
3530 });
3531 }
3532 async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) {
3533 self.client
3534 .log_message(MessageType::INFO, "workdspace folders changed.")
3535 .await;
3536 }
3537
3538 async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
3539 self.client
3540 .log_message(MessageType::INFO, "watched files have changed.")
3541 .await;
3542
3543 for change in ¶ms.changes {
3545 let path = match change.uri.to_file_path() {
3546 Ok(p) => p,
3547 Err(_) => continue,
3548 };
3549
3550 let filename = path.file_name().and_then(|n| n.to_str());
3551
3552 if filename == Some("foundry.toml") {
3553 let lint_cfg = config::load_lint_config_from_toml(&path);
3554 self.client
3555 .log_message(
3556 MessageType::INFO,
3557 format!(
3558 "reloaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
3559 lint_cfg.lint_on_build,
3560 lint_cfg.ignore_patterns.len()
3561 ),
3562 )
3563 .await;
3564 let mut lc = self.lint_config.write().await;
3565 *lc = lint_cfg;
3566
3567 let foundry_cfg = config::load_foundry_config_from_toml(&path);
3568 self.client
3569 .log_message(
3570 MessageType::INFO,
3571 format!(
3572 "reloaded foundry.toml: solc={}, remappings={}",
3573 foundry_cfg.solc_version.as_deref().unwrap_or("auto"),
3574 foundry_cfg.remappings.len()
3575 ),
3576 )
3577 .await;
3578 if foundry_cfg.via_ir {
3579 self.client
3580 .log_message(
3581 MessageType::WARNING,
3582 "via_ir is enabled in foundry.toml — gas estimate inlay hints are disabled to avoid slow compilation",
3583 )
3584 .await;
3585 }
3586 let mut fc = self.foundry_config.write().await;
3587 *fc = foundry_cfg;
3588 break;
3589 }
3590
3591 if filename == Some("remappings.txt") {
3592 self.client
3593 .log_message(
3594 MessageType::INFO,
3595 "remappings.txt changed, config may need refresh",
3596 )
3597 .await;
3598 }
3601 }
3602 }
3603
3604 async fn completion(
3605 &self,
3606 params: CompletionParams,
3607 ) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> {
3608 let uri = params.text_document_position.text_document.uri;
3609 let position = params.text_document_position.position;
3610
3611 let trigger_char = params
3612 .context
3613 .as_ref()
3614 .and_then(|ctx| ctx.trigger_character.as_deref());
3615
3616 let source_text = {
3618 let text_cache = self.text_cache.read().await;
3619 if let Some((_, text)) = text_cache.get(&uri.to_string()) {
3620 text.clone()
3621 } else {
3622 match uri.to_file_path() {
3623 Ok(path) => std::fs::read_to_string(&path).unwrap_or_default(),
3624 Err(_) => return Ok(None),
3625 }
3626 }
3627 };
3628
3629 let local_cached: Option<Arc<completion::CompletionCache>> = {
3631 let comp_cache = self.completion_cache.read().await;
3632 comp_cache.get(&uri.to_string()).cloned()
3633 };
3634
3635 let root_cached: Option<Arc<completion::CompletionCache>> = {
3637 let root_key = self.project_cache_key().await;
3638 match root_key {
3639 Some(root_key) => {
3640 let ast_cache = self.ast_cache.read().await;
3641 ast_cache
3642 .get(&root_key)
3643 .map(|root_build| root_build.completion_cache.clone())
3644 }
3645 None => None,
3646 }
3647 };
3648
3649 let cached = local_cached.or(root_cached.clone());
3651
3652 if cached.is_none() {
3653 let ast_cache = self.ast_cache.clone();
3655 let completion_cache = self.completion_cache.clone();
3656 let uri_string = uri.to_string();
3657 tokio::spawn(async move {
3658 let cached_build = {
3659 let cache = ast_cache.read().await;
3660 match cache.get(&uri_string) {
3661 Some(v) => v.clone(),
3662 None => return,
3663 }
3664 };
3665 completion_cache
3666 .write()
3667 .await
3668 .insert(uri_string.into(), cached_build.completion_cache.clone());
3669 });
3670 }
3671
3672 let cache_ref = cached.as_deref();
3673
3674 let file_id = {
3676 let uri_path = uri.to_file_path().ok();
3677 cache_ref.and_then(|c| {
3678 uri_path.as_ref().and_then(|p| {
3679 let path_str = p.to_str()?;
3680 c.path_to_file_id.get(path_str).copied()
3681 })
3682 })
3683 };
3684
3685 let current_file_path = uri
3686 .to_file_path()
3687 .ok()
3688 .and_then(|p| p.to_str().map(|s| s.to_string()));
3689
3690 let check_pos = if matches!(trigger_char, Some("\"") | Some("'")) {
3701 Position {
3702 line: position.line,
3703 character: position.character.saturating_add(1),
3704 }
3705 } else {
3706 position
3707 };
3708
3709 if let Some(asm_range) =
3714 links::ts_cursor_in_assembly_flags(source_text.as_bytes(), check_pos)
3715 {
3716 let text_edit = CompletionTextEdit::Edit(TextEdit {
3717 range: Range {
3718 start: Position {
3719 line: position.line,
3720 character: asm_range.start.character,
3721 },
3722 end: Position {
3723 line: position.line,
3724 character: check_pos.character,
3725 },
3726 },
3727 new_text: "memory-safe".to_string(),
3728 });
3729 let item = CompletionItem {
3730 label: "memory-safe".to_string(),
3731 kind: Some(CompletionItemKind::VALUE),
3732 detail: Some("Solidity assembly dialect".to_string()),
3733 filter_text: Some("memory-safe".to_string()),
3734 text_edit: Some(text_edit),
3735 ..Default::default()
3736 };
3737 return Ok(Some(CompletionResponse::List(CompletionList {
3738 is_incomplete: false,
3739 items: vec![item],
3740 })));
3741 }
3742
3743 if let Some(import_range) =
3750 links::ts_cursor_in_import_string(source_text.as_bytes(), check_pos)
3751 {
3752 if let Ok(current_file) = uri.to_file_path() {
3753 let foundry_cfg = self.foundry_config.read().await.clone();
3754 let project_root = foundry_cfg.root.clone();
3755 let remappings = crate::solc::resolve_remappings(&foundry_cfg).await;
3756 let typed_range = Some((
3759 position.line,
3760 import_range.start.character,
3761 check_pos.character,
3762 ));
3763 let items = completion::all_sol_import_paths(
3764 ¤t_file,
3765 &project_root,
3766 &remappings,
3767 typed_range,
3768 );
3769 return Ok(Some(CompletionResponse::List(CompletionList {
3770 is_incomplete: true,
3771 items,
3772 })));
3773 }
3774 return Ok(None);
3775 }
3776
3777 if matches!(trigger_char, Some("\"") | Some("'")) {
3781 return Ok(None);
3782 }
3783
3784 let tail_candidates = if trigger_char == Some(".") {
3785 vec![]
3786 } else {
3787 root_cached.as_deref().map_or_else(Vec::new, |c| {
3788 completion::top_level_importable_completion_candidates(
3789 c,
3790 current_file_path.as_deref(),
3791 &source_text,
3792 )
3793 })
3794 };
3795
3796 let result = completion::handle_completion_with_tail_candidates(
3797 cache_ref,
3798 &source_text,
3799 position,
3800 trigger_char,
3801 file_id,
3802 tail_candidates,
3803 );
3804 Ok(result)
3805 }
3806
3807 async fn goto_definition(
3808 &self,
3809 params: GotoDefinitionParams,
3810 ) -> tower_lsp::jsonrpc::Result<Option<GotoDefinitionResponse>> {
3811 self.client
3812 .log_message(MessageType::INFO, "got textDocument/definition request")
3813 .await;
3814
3815 let uri = params.text_document_position_params.text_document.uri;
3816 let position = params.text_document_position_params.position;
3817
3818 let file_path = match uri.to_file_path() {
3819 Ok(path) => path,
3820 Err(_) => {
3821 self.client
3822 .log_message(MessageType::ERROR, "Invalid file uri")
3823 .await;
3824 return Ok(None);
3825 }
3826 };
3827
3828 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3829 Some(bytes) => bytes,
3830 None => return Ok(None),
3831 };
3832
3833 let source_text = String::from_utf8_lossy(&source_bytes).to_string();
3834
3835 let cursor_name = goto::cursor_context(&source_text, position).map(|ctx| ctx.name);
3837
3838 let (is_dirty, cached_build) = {
3842 let text_version = self
3843 .text_cache
3844 .read()
3845 .await
3846 .get(&uri.to_string())
3847 .map(|(v, _)| *v)
3848 .unwrap_or(0);
3849 let cb = self.get_or_fetch_build(&uri, &file_path, false).await;
3850 let build_version = cb.as_ref().map(|b| b.build_version).unwrap_or(0);
3851 (text_version > build_version, cb)
3852 };
3853
3854 let validate_ts = |loc: &Location| -> bool {
3860 let Some(ref name) = cursor_name else {
3861 return true; };
3863 let target_src = if loc.uri == uri {
3864 Some(source_text.clone())
3865 } else {
3866 loc.uri
3867 .to_file_path()
3868 .ok()
3869 .and_then(|p| std::fs::read_to_string(&p).ok())
3870 };
3871 match target_src {
3872 Some(src) => goto::validate_goto_target(&src, loc, name),
3873 None => true, }
3875 };
3876
3877 if is_dirty {
3878 self.client
3879 .log_message(MessageType::INFO, "file is dirty, trying tree-sitter first")
3880 .await;
3881
3882 let ts_result = {
3884 let comp_cache = self.completion_cache.read().await;
3885 let text_cache = self.text_cache.read().await;
3886 if let Some(cc) = comp_cache.get(&uri.to_string()) {
3887 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
3888 } else {
3889 None
3890 }
3891 };
3892
3893 if let Some(location) = ts_result {
3894 if validate_ts(&location) {
3895 self.client
3896 .log_message(
3897 MessageType::INFO,
3898 format!(
3899 "found definition (tree-sitter) at {}:{}",
3900 location.uri, location.range.start.line
3901 ),
3902 )
3903 .await;
3904 return Ok(Some(GotoDefinitionResponse::from(location)));
3905 }
3906 self.client
3907 .log_message(
3908 MessageType::INFO,
3909 "tree-sitter result failed validation, trying AST fallback",
3910 )
3911 .await;
3912 }
3913
3914 if let Some(ref cb) = cached_build
3919 && let Some(ref name) = cursor_name
3920 {
3921 let byte_hint = goto::pos_to_bytes(&source_bytes, position);
3922 if let Some(location) = goto::goto_declaration_by_name(cb, &uri, name, byte_hint) {
3923 self.client
3924 .log_message(
3925 MessageType::INFO,
3926 format!(
3927 "found definition (AST by name) at {}:{}",
3928 location.uri, location.range.start.line
3929 ),
3930 )
3931 .await;
3932 return Ok(Some(GotoDefinitionResponse::from(location)));
3933 }
3934 }
3935 } else {
3936 if let Some(ref cb) = cached_build
3938 && let Some(location) =
3939 goto::goto_declaration_cached(cb, &uri, position, &source_bytes)
3940 {
3941 self.client
3942 .log_message(
3943 MessageType::INFO,
3944 format!(
3945 "found definition (AST) at {}:{}",
3946 location.uri, location.range.start.line
3947 ),
3948 )
3949 .await;
3950 return Ok(Some(GotoDefinitionResponse::from(location)));
3951 }
3952
3953 let ts_result = {
3955 let comp_cache = self.completion_cache.read().await;
3956 let text_cache = self.text_cache.read().await;
3957 if let Some(cc) = comp_cache.get(&uri.to_string()) {
3958 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
3959 } else {
3960 None
3961 }
3962 };
3963
3964 if let Some(location) = ts_result {
3965 if validate_ts(&location) {
3966 self.client
3967 .log_message(
3968 MessageType::INFO,
3969 format!(
3970 "found definition (tree-sitter fallback) at {}:{}",
3971 location.uri, location.range.start.line
3972 ),
3973 )
3974 .await;
3975 return Ok(Some(GotoDefinitionResponse::from(location)));
3976 }
3977 self.client
3978 .log_message(MessageType::INFO, "tree-sitter fallback failed validation")
3979 .await;
3980 }
3981 }
3982
3983 self.client
3984 .log_message(MessageType::INFO, "no definition found")
3985 .await;
3986 Ok(None)
3987 }
3988
3989 async fn goto_declaration(
3990 &self,
3991 params: request::GotoDeclarationParams,
3992 ) -> tower_lsp::jsonrpc::Result<Option<request::GotoDeclarationResponse>> {
3993 self.client
3994 .log_message(MessageType::INFO, "got textDocument/declaration request")
3995 .await;
3996
3997 let uri = params.text_document_position_params.text_document.uri;
3998 let position = params.text_document_position_params.position;
3999
4000 let file_path = match uri.to_file_path() {
4001 Ok(path) => path,
4002 Err(_) => {
4003 self.client
4004 .log_message(MessageType::ERROR, "invalid file uri")
4005 .await;
4006 return Ok(None);
4007 }
4008 };
4009
4010 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4011 Some(bytes) => bytes,
4012 None => return Ok(None),
4013 };
4014
4015 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4016 let cached_build = match cached_build {
4017 Some(cb) => cb,
4018 None => return Ok(None),
4019 };
4020
4021 if let Some(location) =
4022 goto::goto_declaration_cached(&cached_build, &uri, position, &source_bytes)
4023 {
4024 self.client
4025 .log_message(
4026 MessageType::INFO,
4027 format!(
4028 "found declaration at {}:{}",
4029 location.uri, location.range.start.line
4030 ),
4031 )
4032 .await;
4033 Ok(Some(request::GotoDeclarationResponse::from(location)))
4034 } else {
4035 self.client
4036 .log_message(MessageType::INFO, "no declaration found")
4037 .await;
4038 Ok(None)
4039 }
4040 }
4041
4042 async fn goto_implementation(
4043 &self,
4044 params: request::GotoImplementationParams,
4045 ) -> tower_lsp::jsonrpc::Result<Option<request::GotoImplementationResponse>> {
4046 self.client
4047 .log_message(MessageType::INFO, "got textDocument/implementation request")
4048 .await;
4049
4050 let uri = params.text_document_position_params.text_document.uri;
4051 let position = params.text_document_position_params.position;
4052
4053 let file_path = match uri.to_file_path() {
4054 Ok(path) => path,
4055 Err(_) => {
4056 self.client
4057 .log_message(MessageType::ERROR, "invalid file uri")
4058 .await;
4059 return Ok(None);
4060 }
4061 };
4062
4063 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4064 Some(bytes) => bytes,
4065 None => return Ok(None),
4066 };
4067
4068 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4069 let cached_build = match cached_build {
4070 Some(cb) => cb,
4071 None => return Ok(None),
4072 };
4073
4074 let byte_position = goto::pos_to_bytes(&source_bytes, position);
4075 let abs_path = uri.as_ref().strip_prefix("file://").unwrap_or(uri.as_ref());
4076
4077 let (target_id, target_decl_abs, target_decl_offset) =
4081 match references::byte_to_id(&cached_build.nodes, abs_path, byte_position) {
4082 Some(id) => {
4083 let resolved = cached_build
4084 .nodes
4085 .get(abs_path)
4086 .and_then(|f| f.get(&id))
4087 .and_then(|info| info.referenced_declaration)
4088 .unwrap_or(id);
4089
4090 let (decl_abs, decl_offset) = references::resolve_target_location(
4092 &cached_build,
4093 &uri,
4094 position,
4095 &source_bytes,
4096 )
4097 .unwrap_or_else(|| (abs_path.to_string(), byte_position));
4098
4099 (resolved, decl_abs, decl_offset)
4100 }
4101 None => return Ok(None),
4102 };
4103
4104 let project_build = self.ensure_project_cached_build().await;
4106 let sub_caches = self.sub_caches.read().await;
4107
4108 let mut builds: Vec<&goto::CachedBuild> = vec![&cached_build];
4109 if let Some(ref pb) = project_build {
4110 builds.push(pb);
4111 }
4112 for sc in sub_caches.iter() {
4113 builds.push(sc);
4114 }
4115
4116 let mut locations: Vec<Location> = Vec::new();
4121 let mut seen_positions: Vec<(String, u32, u32)> = Vec::new(); for build in &builds {
4124 let local_target =
4126 references::byte_to_id(&build.nodes, &target_decl_abs, target_decl_offset).or_else(
4127 || {
4128 if build.nodes.values().any(|f| f.contains_key(&target_id)) {
4130 Some(target_id)
4131 } else {
4132 None
4133 }
4134 },
4135 );
4136
4137 let Some(local_id) = local_target else {
4138 continue;
4139 };
4140
4141 let Some(impls) = build.base_function_implementation.get(&local_id) else {
4143 continue;
4144 };
4145
4146 for &impl_id in impls {
4147 if let Some(loc) =
4148 references::id_to_location(&build.nodes, &build.id_to_path_map, impl_id)
4149 {
4150 let key = (
4152 loc.uri.to_string(),
4153 loc.range.start.line,
4154 loc.range.start.character,
4155 );
4156 if !seen_positions.contains(&key) {
4157 seen_positions.push(key);
4158 locations.push(loc);
4159 }
4160 }
4161 }
4162 }
4163
4164 if locations.is_empty() {
4165 self.client
4166 .log_message(MessageType::INFO, "no implementations found")
4167 .await;
4168 return Ok(None);
4169 }
4170
4171 self.client
4172 .log_message(
4173 MessageType::INFO,
4174 format!("found {} implementation(s)", locations.len()),
4175 )
4176 .await;
4177
4178 if locations.len() == 1 {
4179 Ok(Some(request::GotoImplementationResponse::Scalar(
4180 locations.into_iter().next().unwrap(),
4181 )))
4182 } else {
4183 Ok(Some(request::GotoImplementationResponse::Array(locations)))
4184 }
4185 }
4186
4187 async fn references(
4188 &self,
4189 params: ReferenceParams,
4190 ) -> tower_lsp::jsonrpc::Result<Option<Vec<Location>>> {
4191 self.client
4192 .log_message(MessageType::INFO, "Got a textDocument/references request")
4193 .await;
4194
4195 let uri = params.text_document_position.text_document.uri;
4196 let position = params.text_document_position.position;
4197 let file_path = match uri.to_file_path() {
4198 Ok(path) => path,
4199 Err(_) => {
4200 self.client
4201 .log_message(MessageType::ERROR, "Invalid file URI")
4202 .await;
4203 return Ok(None);
4204 }
4205 };
4206 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4207 Some(bytes) => bytes,
4208 None => return Ok(None),
4209 };
4210 let file_build = self.get_or_fetch_build(&uri, &file_path, true).await;
4211 let file_build = match file_build {
4212 Some(cb) => cb,
4213 None => return Ok(None),
4214 };
4215 let mut project_build = self.ensure_project_cached_build().await;
4216 let current_abs = file_path.to_string_lossy().to_string();
4217 if self.use_solc
4218 && self.settings.read().await.project_index.full_project_scan
4219 && project_build
4220 .as_ref()
4221 .is_some_and(|b| !b.nodes.contains_key(current_abs.as_str()))
4222 {
4223 let foundry_config = self.foundry_config_for_file(&file_path).await;
4224 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
4225 let changed = vec![PathBuf::from(¤t_abs)];
4226 let cfg_for_plan = foundry_config.clone();
4227 let remappings_for_plan = remappings.clone();
4228 let affected_set = tokio::task::spawn_blocking(move || {
4229 compute_reverse_import_closure(&cfg_for_plan, &changed, &remappings_for_plan)
4230 })
4231 .await
4232 .ok()
4233 .unwrap_or_default();
4234 let mut affected_files: Vec<PathBuf> = affected_set.into_iter().collect();
4235 if affected_files.is_empty() {
4236 affected_files.push(PathBuf::from(¤t_abs));
4237 }
4238 let text_cache_snapshot = self.text_cache.read().await.clone();
4239 match crate::solc::solc_project_index_scoped(
4240 &foundry_config,
4241 Some(&self.client),
4242 Some(&text_cache_snapshot),
4243 &affected_files,
4244 )
4245 .await
4246 {
4247 Ok(ast_data) => {
4248 let scoped_build = Arc::new(crate::goto::CachedBuild::new(
4249 ast_data,
4250 0,
4251 Some(&mut *self.path_interner.write().await),
4252 ));
4253 if let Some(root_key) = self.project_cache_key().await {
4254 let merged = {
4255 let mut cache = self.ast_cache.write().await;
4256 let merged = if let Some(existing) = cache.get(&root_key).cloned() {
4257 let mut merged = (*existing).clone();
4258 match merge_scoped_cached_build(
4259 &mut merged,
4260 (*scoped_build).clone(),
4261 ) {
4262 Ok(_) => Arc::new(merged),
4263 Err(_) => scoped_build.clone(),
4264 }
4265 } else {
4266 scoped_build.clone()
4267 };
4268 cache.insert(root_key.into(), merged.clone());
4269 merged
4270 };
4271 project_build = Some(merged);
4272 } else {
4273 project_build = Some(scoped_build);
4274 }
4275 self.client
4276 .log_message(
4277 MessageType::INFO,
4278 format!(
4279 "references warm-refresh: scoped reindex applied (affected={})",
4280 affected_files.len()
4281 ),
4282 )
4283 .await;
4284 }
4285 Err(e) => {
4286 self.client
4287 .log_message(
4288 MessageType::WARNING,
4289 format!("references warm-refresh: scoped reindex failed: {e}"),
4290 )
4291 .await;
4292 }
4293 }
4294 }
4295
4296 let mut locations = references::goto_references_cached(
4299 &file_build,
4300 &uri,
4301 position,
4302 &source_bytes,
4303 None,
4304 params.context.include_declaration,
4305 );
4306
4307 if let Some((def_abs_path, def_byte_offset)) =
4313 references::resolve_target_location(&file_build, &uri, position, &source_bytes)
4314 {
4315 if let Some(project_build) = project_build {
4316 let other_locations = references::goto_references_for_target(
4317 &project_build,
4318 &def_abs_path,
4319 def_byte_offset,
4320 None,
4321 params.context.include_declaration,
4322 Some(¤t_abs),
4323 );
4324 locations.extend(other_locations);
4325 }
4326
4327 let sub_caches = self.sub_caches.read().await;
4332 for sub_cache in sub_caches.iter() {
4333 let sub_locations = references::goto_references_for_target(
4334 sub_cache,
4335 &def_abs_path,
4336 def_byte_offset,
4337 None,
4338 params.context.include_declaration,
4339 None,
4340 );
4341 locations.extend(sub_locations);
4342 }
4343 }
4344
4345 locations = references::dedup_locations(locations);
4349
4350 if locations.is_empty() {
4351 self.client
4352 .log_message(MessageType::INFO, "No references found")
4353 .await;
4354 Ok(None)
4355 } else {
4356 self.client
4357 .log_message(
4358 MessageType::INFO,
4359 format!("Found {} references", locations.len()),
4360 )
4361 .await;
4362 Ok(Some(locations))
4363 }
4364 }
4365
4366 async fn prepare_rename(
4367 &self,
4368 params: TextDocumentPositionParams,
4369 ) -> tower_lsp::jsonrpc::Result<Option<PrepareRenameResponse>> {
4370 self.client
4371 .log_message(MessageType::INFO, "got textDocument/prepareRename request")
4372 .await;
4373
4374 let uri = params.text_document.uri;
4375 let position = params.position;
4376
4377 let file_path = match uri.to_file_path() {
4378 Ok(path) => path,
4379 Err(_) => {
4380 self.client
4381 .log_message(MessageType::ERROR, "invalid file uri")
4382 .await;
4383 return Ok(None);
4384 }
4385 };
4386
4387 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4388 Some(bytes) => bytes,
4389 None => return Ok(None),
4390 };
4391
4392 if let Some(range) = rename::get_identifier_range(&source_bytes, position) {
4393 self.client
4394 .log_message(
4395 MessageType::INFO,
4396 format!(
4397 "prepare rename range: {}:{}",
4398 range.start.line, range.start.character
4399 ),
4400 )
4401 .await;
4402 Ok(Some(PrepareRenameResponse::Range(range)))
4403 } else {
4404 self.client
4405 .log_message(MessageType::INFO, "no identifier found for prepare rename")
4406 .await;
4407 Ok(None)
4408 }
4409 }
4410
4411 async fn rename(
4412 &self,
4413 params: RenameParams,
4414 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
4415 self.client
4416 .log_message(MessageType::INFO, "got textDocument/rename request")
4417 .await;
4418
4419 let uri = params.text_document_position.text_document.uri;
4420 let position = params.text_document_position.position;
4421 let new_name = params.new_name;
4422 let file_path = match uri.to_file_path() {
4423 Ok(p) => p,
4424 Err(_) => {
4425 self.client
4426 .log_message(MessageType::ERROR, "invalid file uri")
4427 .await;
4428 return Ok(None);
4429 }
4430 };
4431 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4432 Some(bytes) => bytes,
4433 None => return Ok(None),
4434 };
4435
4436 let current_identifier = match rename::get_identifier_at_position(&source_bytes, position) {
4437 Some(id) => id,
4438 None => {
4439 self.client
4440 .log_message(MessageType::ERROR, "No identifier found at position")
4441 .await;
4442 return Ok(None);
4443 }
4444 };
4445
4446 if !utils::is_valid_solidity_identifier(&new_name) {
4447 return Err(tower_lsp::jsonrpc::Error::invalid_params(
4448 "new name is not a valid solidity identifier",
4449 ));
4450 }
4451
4452 if new_name == current_identifier {
4453 self.client
4454 .log_message(
4455 MessageType::INFO,
4456 "new name is the same as current identifier",
4457 )
4458 .await;
4459 return Ok(None);
4460 }
4461
4462 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4463 let cached_build = match cached_build {
4464 Some(cb) => cb,
4465 None => return Ok(None),
4466 };
4467 let other_builds: Vec<Arc<goto::CachedBuild>> = {
4468 let cache = self.ast_cache.read().await;
4469 cache
4470 .iter()
4471 .filter(|(key, _)| key.as_str() != uri.to_string())
4472 .map(|(_, v)| v.clone())
4473 .collect()
4474 };
4475 let other_refs: Vec<&goto::CachedBuild> = other_builds.iter().map(|v| v.as_ref()).collect();
4476
4477 let text_buffers: HashMap<String, Vec<u8>> = {
4481 let text_cache = self.text_cache.read().await;
4482 text_cache
4483 .iter()
4484 .map(|(uri, (_, content))| (uri.to_string(), content.as_bytes().to_vec()))
4485 .collect()
4486 };
4487
4488 match rename::rename_symbol(
4489 &cached_build,
4490 &uri,
4491 position,
4492 &source_bytes,
4493 new_name,
4494 &other_refs,
4495 &text_buffers,
4496 ) {
4497 Some(workspace_edit) => {
4498 self.client
4499 .log_message(
4500 MessageType::INFO,
4501 format!(
4502 "created rename edit with {} file(s), {} total change(s)",
4503 workspace_edit
4504 .changes
4505 .as_ref()
4506 .map(|c| c.len())
4507 .unwrap_or(0),
4508 workspace_edit
4509 .changes
4510 .as_ref()
4511 .map(|c| c.values().map(|v| v.len()).sum::<usize>())
4512 .unwrap_or(0)
4513 ),
4514 )
4515 .await;
4516
4517 Ok(Some(workspace_edit))
4522 }
4523
4524 None => {
4525 self.client
4526 .log_message(MessageType::INFO, "No locations found for renaming")
4527 .await;
4528 Ok(None)
4529 }
4530 }
4531 }
4532
4533 async fn symbol(
4534 &self,
4535 params: WorkspaceSymbolParams,
4536 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SymbolInformation>>> {
4537 self.client
4538 .log_message(MessageType::INFO, "got workspace/symbol request")
4539 .await;
4540
4541 let files: Vec<(Url, String)> = {
4543 let cache = self.text_cache.read().await;
4544 cache
4545 .iter()
4546 .filter(|(uri_str, _)| uri_str.ends_with(".sol"))
4547 .filter_map(|(uri_str, (_, content))| {
4548 Url::parse(uri_str).ok().map(|uri| (uri, content.clone()))
4549 })
4550 .collect()
4551 };
4552
4553 let mut all_symbols = symbols::extract_workspace_symbols(&files);
4554 if !params.query.is_empty() {
4555 let query = params.query.to_lowercase();
4556 all_symbols.retain(|symbol| symbol.name.to_lowercase().contains(&query));
4557 }
4558 if all_symbols.is_empty() {
4559 self.client
4560 .log_message(MessageType::INFO, "No symbols found")
4561 .await;
4562 Ok(None)
4563 } else {
4564 self.client
4565 .log_message(
4566 MessageType::INFO,
4567 format!("found {} symbols", all_symbols.len()),
4568 )
4569 .await;
4570 Ok(Some(all_symbols))
4571 }
4572 }
4573
4574 async fn document_symbol(
4575 &self,
4576 params: DocumentSymbolParams,
4577 ) -> tower_lsp::jsonrpc::Result<Option<DocumentSymbolResponse>> {
4578 self.client
4579 .log_message(MessageType::INFO, "got textDocument/documentSymbol request")
4580 .await;
4581 let uri = params.text_document.uri;
4582 let file_path = match uri.to_file_path() {
4583 Ok(path) => path,
4584 Err(_) => {
4585 self.client
4586 .log_message(MessageType::ERROR, "invalid file uri")
4587 .await;
4588 return Ok(None);
4589 }
4590 };
4591
4592 let source = {
4594 let cache = self.text_cache.read().await;
4595 cache
4596 .get(&uri.to_string())
4597 .map(|(_, content)| content.clone())
4598 };
4599 let source = match source {
4600 Some(s) => s,
4601 None => match std::fs::read_to_string(&file_path) {
4602 Ok(s) => s,
4603 Err(_) => return Ok(None),
4604 },
4605 };
4606
4607 let symbols = symbols::extract_document_symbols(&source);
4608 if symbols.is_empty() {
4609 self.client
4610 .log_message(MessageType::INFO, "no document symbols found")
4611 .await;
4612 Ok(None)
4613 } else {
4614 self.client
4615 .log_message(
4616 MessageType::INFO,
4617 format!("found {} document symbols", symbols.len()),
4618 )
4619 .await;
4620 Ok(Some(DocumentSymbolResponse::Nested(symbols)))
4621 }
4622 }
4623
4624 async fn document_highlight(
4625 &self,
4626 params: DocumentHighlightParams,
4627 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentHighlight>>> {
4628 self.client
4629 .log_message(
4630 MessageType::INFO,
4631 "got textDocument/documentHighlight request",
4632 )
4633 .await;
4634
4635 let uri = params.text_document_position_params.text_document.uri;
4636 let position = params.text_document_position_params.position;
4637
4638 let source = {
4639 let cache = self.text_cache.read().await;
4640 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4641 };
4642
4643 let source = match source {
4644 Some(s) => s,
4645 None => {
4646 let file_path = match uri.to_file_path() {
4647 Ok(p) => p,
4648 Err(_) => return Ok(None),
4649 };
4650 match std::fs::read_to_string(&file_path) {
4651 Ok(s) => s,
4652 Err(_) => return Ok(None),
4653 }
4654 }
4655 };
4656
4657 let highlights = highlight::document_highlights(&source, position);
4658
4659 if highlights.is_empty() {
4660 self.client
4661 .log_message(MessageType::INFO, "no document highlights found")
4662 .await;
4663 Ok(None)
4664 } else {
4665 self.client
4666 .log_message(
4667 MessageType::INFO,
4668 format!("found {} document highlights", highlights.len()),
4669 )
4670 .await;
4671 Ok(Some(highlights))
4672 }
4673 }
4674
4675 async fn hover(&self, params: HoverParams) -> tower_lsp::jsonrpc::Result<Option<Hover>> {
4676 self.client
4677 .log_message(MessageType::INFO, "got textDocument/hover request")
4678 .await;
4679
4680 let uri = params.text_document_position_params.text_document.uri;
4681 let position = params.text_document_position_params.position;
4682
4683 let file_path = match uri.to_file_path() {
4684 Ok(path) => path,
4685 Err(_) => {
4686 self.client
4687 .log_message(MessageType::ERROR, "invalid file uri")
4688 .await;
4689 return Ok(None);
4690 }
4691 };
4692
4693 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4694 Some(bytes) => bytes,
4695 None => return Ok(None),
4696 };
4697
4698 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4699 let cached_build = match cached_build {
4700 Some(cb) => cb,
4701 None => return Ok(None),
4702 };
4703
4704 let result = hover::hover_info(&cached_build, &uri, position, &source_bytes);
4705
4706 if result.is_some() {
4707 self.client
4708 .log_message(MessageType::INFO, "hover info found")
4709 .await;
4710 } else {
4711 self.client
4712 .log_message(MessageType::INFO, "no hover info found")
4713 .await;
4714 }
4715
4716 Ok(result)
4717 }
4718
4719 async fn signature_help(
4720 &self,
4721 params: SignatureHelpParams,
4722 ) -> tower_lsp::jsonrpc::Result<Option<SignatureHelp>> {
4723 self.client
4724 .log_message(MessageType::INFO, "got textDocument/signatureHelp request")
4725 .await;
4726
4727 let uri = params.text_document_position_params.text_document.uri;
4728 let position = params.text_document_position_params.position;
4729
4730 let file_path = match uri.to_file_path() {
4731 Ok(path) => path,
4732 Err(_) => {
4733 self.client
4734 .log_message(MessageType::ERROR, "invalid file uri")
4735 .await;
4736 return Ok(None);
4737 }
4738 };
4739
4740 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4741 Some(bytes) => bytes,
4742 None => return Ok(None),
4743 };
4744
4745 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4746 let cached_build = match cached_build {
4747 Some(cb) => cb,
4748 None => return Ok(None),
4749 };
4750
4751 let result = hover::signature_help(&cached_build, &source_bytes, position);
4752
4753 Ok(result)
4754 }
4755
4756 async fn document_link(
4757 &self,
4758 params: DocumentLinkParams,
4759 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentLink>>> {
4760 self.client
4761 .log_message(MessageType::INFO, "got textDocument/documentLink request")
4762 .await;
4763
4764 let uri = params.text_document.uri;
4765 let file_path = match uri.to_file_path() {
4766 Ok(path) => path,
4767 Err(_) => {
4768 self.client
4769 .log_message(MessageType::ERROR, "invalid file uri")
4770 .await;
4771 return Ok(None);
4772 }
4773 };
4774
4775 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4776 Some(bytes) => bytes,
4777 None => return Ok(None),
4778 };
4779
4780 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4781 let cached_build = match cached_build {
4782 Some(cb) => cb,
4783 None => return Ok(None),
4784 };
4785
4786 let result = links::document_links(&cached_build, &uri, &source_bytes);
4787
4788 if result.is_empty() {
4789 self.client
4790 .log_message(MessageType::INFO, "no document links found")
4791 .await;
4792 Ok(None)
4793 } else {
4794 self.client
4795 .log_message(
4796 MessageType::INFO,
4797 format!("found {} document links", result.len()),
4798 )
4799 .await;
4800 Ok(Some(result))
4801 }
4802 }
4803
4804 async fn semantic_tokens_full(
4805 &self,
4806 params: SemanticTokensParams,
4807 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensResult>> {
4808 self.client
4809 .log_message(
4810 MessageType::INFO,
4811 "got textDocument/semanticTokens/full request",
4812 )
4813 .await;
4814
4815 let uri = params.text_document.uri;
4816 let source = {
4817 let cache = self.text_cache.read().await;
4818 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4819 };
4820
4821 let source = match source {
4822 Some(s) => s,
4823 None => {
4824 let file_path = match uri.to_file_path() {
4826 Ok(p) => p,
4827 Err(_) => return Ok(None),
4828 };
4829 match std::fs::read_to_string(&file_path) {
4830 Ok(s) => s,
4831 Err(_) => return Ok(None),
4832 }
4833 }
4834 };
4835
4836 let mut tokens = semantic_tokens::semantic_tokens_full(&source);
4837
4838 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
4840 let result_id = id.to_string();
4841 tokens.result_id = Some(result_id.clone());
4842
4843 {
4844 let mut cache = self.semantic_token_cache.write().await;
4845 cache.insert(uri.to_string().into(), (result_id, tokens.data.clone()));
4846 }
4847
4848 Ok(Some(SemanticTokensResult::Tokens(tokens)))
4849 }
4850
4851 async fn semantic_tokens_range(
4852 &self,
4853 params: SemanticTokensRangeParams,
4854 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensRangeResult>> {
4855 self.client
4856 .log_message(
4857 MessageType::INFO,
4858 "got textDocument/semanticTokens/range request",
4859 )
4860 .await;
4861
4862 let uri = params.text_document.uri;
4863 let range = params.range;
4864 let source = {
4865 let cache = self.text_cache.read().await;
4866 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4867 };
4868
4869 let source = match source {
4870 Some(s) => s,
4871 None => {
4872 let file_path = match uri.to_file_path() {
4873 Ok(p) => p,
4874 Err(_) => return Ok(None),
4875 };
4876 match std::fs::read_to_string(&file_path) {
4877 Ok(s) => s,
4878 Err(_) => return Ok(None),
4879 }
4880 }
4881 };
4882
4883 let tokens =
4884 semantic_tokens::semantic_tokens_range(&source, range.start.line, range.end.line);
4885
4886 Ok(Some(SemanticTokensRangeResult::Tokens(tokens)))
4887 }
4888
4889 async fn semantic_tokens_full_delta(
4890 &self,
4891 params: SemanticTokensDeltaParams,
4892 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensFullDeltaResult>> {
4893 self.client
4894 .log_message(
4895 MessageType::INFO,
4896 "got textDocument/semanticTokens/full/delta request",
4897 )
4898 .await;
4899
4900 let uri = params.text_document.uri;
4901 let previous_result_id = params.previous_result_id;
4902
4903 let source = {
4904 let cache = self.text_cache.read().await;
4905 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4906 };
4907
4908 let source = match source {
4909 Some(s) => s,
4910 None => {
4911 let file_path = match uri.to_file_path() {
4912 Ok(p) => p,
4913 Err(_) => return Ok(None),
4914 };
4915 match std::fs::read_to_string(&file_path) {
4916 Ok(s) => s,
4917 Err(_) => return Ok(None),
4918 }
4919 }
4920 };
4921
4922 let mut new_tokens = semantic_tokens::semantic_tokens_full(&source);
4923
4924 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
4926 let new_result_id = id.to_string();
4927 new_tokens.result_id = Some(new_result_id.clone());
4928
4929 let uri_str = uri.to_string();
4930
4931 let old_tokens = {
4933 let cache = self.semantic_token_cache.read().await;
4934 cache
4935 .get(&uri_str)
4936 .filter(|(rid, _)| *rid == previous_result_id)
4937 .map(|(_, tokens)| tokens.clone())
4938 };
4939
4940 {
4942 let mut cache = self.semantic_token_cache.write().await;
4943 cache.insert(
4944 uri_str.into(),
4945 (new_result_id.clone(), new_tokens.data.clone()),
4946 );
4947 }
4948
4949 match old_tokens {
4950 Some(old) => {
4951 let edits = semantic_tokens::compute_delta(&old, &new_tokens.data);
4953 Ok(Some(SemanticTokensFullDeltaResult::TokensDelta(
4954 SemanticTokensDelta {
4955 result_id: Some(new_result_id),
4956 edits,
4957 },
4958 )))
4959 }
4960 None => {
4961 Ok(Some(SemanticTokensFullDeltaResult::Tokens(new_tokens)))
4963 }
4964 }
4965 }
4966
4967 async fn folding_range(
4968 &self,
4969 params: FoldingRangeParams,
4970 ) -> tower_lsp::jsonrpc::Result<Option<Vec<FoldingRange>>> {
4971 self.client
4972 .log_message(MessageType::INFO, "got textDocument/foldingRange request")
4973 .await;
4974
4975 let uri = params.text_document.uri;
4976
4977 let source = {
4978 let cache = self.text_cache.read().await;
4979 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4980 };
4981
4982 let source = match source {
4983 Some(s) => s,
4984 None => {
4985 let file_path = match uri.to_file_path() {
4986 Ok(p) => p,
4987 Err(_) => return Ok(None),
4988 };
4989 match std::fs::read_to_string(&file_path) {
4990 Ok(s) => s,
4991 Err(_) => return Ok(None),
4992 }
4993 }
4994 };
4995
4996 let ranges = folding::folding_ranges(&source);
4997
4998 if ranges.is_empty() {
4999 self.client
5000 .log_message(MessageType::INFO, "no folding ranges found")
5001 .await;
5002 Ok(None)
5003 } else {
5004 self.client
5005 .log_message(
5006 MessageType::INFO,
5007 format!("found {} folding ranges", ranges.len()),
5008 )
5009 .await;
5010 Ok(Some(ranges))
5011 }
5012 }
5013
5014 async fn selection_range(
5015 &self,
5016 params: SelectionRangeParams,
5017 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SelectionRange>>> {
5018 self.client
5019 .log_message(MessageType::INFO, "got textDocument/selectionRange request")
5020 .await;
5021
5022 let uri = params.text_document.uri;
5023
5024 let source = {
5025 let cache = self.text_cache.read().await;
5026 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
5027 };
5028
5029 let source = match source {
5030 Some(s) => s,
5031 None => {
5032 let file_path = match uri.to_file_path() {
5033 Ok(p) => p,
5034 Err(_) => return Ok(None),
5035 };
5036 match std::fs::read_to_string(&file_path) {
5037 Ok(s) => s,
5038 Err(_) => return Ok(None),
5039 }
5040 }
5041 };
5042
5043 let ranges = selection::selection_ranges(&source, ¶ms.positions);
5044
5045 if ranges.is_empty() {
5046 self.client
5047 .log_message(MessageType::INFO, "no selection ranges found")
5048 .await;
5049 Ok(None)
5050 } else {
5051 self.client
5052 .log_message(
5053 MessageType::INFO,
5054 format!("found {} selection ranges", ranges.len()),
5055 )
5056 .await;
5057 Ok(Some(ranges))
5058 }
5059 }
5060
5061 async fn inlay_hint(
5062 &self,
5063 params: InlayHintParams,
5064 ) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>> {
5065 self.client
5066 .log_message(MessageType::INFO, "got textDocument/inlayHint request")
5067 .await;
5068
5069 let uri = params.text_document.uri;
5070 let range = params.range;
5071
5072 let file_path = match uri.to_file_path() {
5073 Ok(path) => path,
5074 Err(_) => {
5075 self.client
5076 .log_message(MessageType::ERROR, "invalid file uri")
5077 .await;
5078 return Ok(None);
5079 }
5080 };
5081
5082 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
5083 Some(bytes) => bytes,
5084 None => return Ok(None),
5085 };
5086
5087 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
5088 let cached_build = match cached_build {
5089 Some(cb) => cb,
5090 None => return Ok(None),
5091 };
5092
5093 let mut hints = inlay_hints::inlay_hints(&cached_build, &uri, range, &source_bytes);
5094
5095 let settings = self.settings.read().await;
5097 if !settings.inlay_hints.parameters {
5098 hints.retain(|h| h.kind != Some(InlayHintKind::PARAMETER));
5099 }
5100 if hints.is_empty() {
5101 self.client
5102 .log_message(MessageType::INFO, "no inlay hints found")
5103 .await;
5104 Ok(None)
5105 } else {
5106 self.client
5107 .log_message(
5108 MessageType::INFO,
5109 format!("found {} inlay hints", hints.len()),
5110 )
5111 .await;
5112 Ok(Some(hints))
5113 }
5114 }
5115
5116 async fn code_action(
5117 &self,
5118 params: CodeActionParams,
5119 ) -> tower_lsp::jsonrpc::Result<Option<CodeActionResponse>> {
5120 use crate::code_actions::FixKind;
5121
5122 let uri = ¶ms.text_document.uri;
5123
5124 let source: Option<String> = if let Ok(path) = uri.to_file_path() {
5126 self.get_source_bytes(uri, &path)
5127 .await
5128 .map(|b| String::from_utf8_lossy(&b).into_owned())
5129 } else {
5130 None
5131 };
5132
5133 let db = &self.code_action_db;
5134 let mut actions: Vec<CodeActionOrCommand> = Vec::new();
5135
5136 for diag in ¶ms.context.diagnostics {
5137 if let Some(NumberOrString::String(s)) = &diag.code {
5139 if s == "unused-import" {
5140 if let Some(edit) = source.as_deref().and_then(|src| {
5141 goto::code_action_edit(
5142 src,
5143 diag.range,
5144 goto::CodeActionKind::DeleteNodeByKind {
5145 node_kind: "import_directive",
5146 },
5147 )
5148 }) {
5149 let mut changes = HashMap::new();
5150 changes.insert(uri.clone(), vec![edit]);
5151 actions.push(CodeActionOrCommand::CodeAction(CodeAction {
5152 title: "Remove unused import".to_string(),
5153 kind: Some(CodeActionKind::QUICKFIX),
5154 diagnostics: Some(vec![diag.clone()]),
5155 edit: Some(WorkspaceEdit {
5156 changes: Some(changes),
5157 ..Default::default()
5158 }),
5159 is_preferred: Some(true),
5160 ..Default::default()
5161 }));
5162 }
5163 continue;
5164 }
5165 }
5166
5167 let code: ErrorCode = match &diag.code {
5169 Some(NumberOrString::String(s)) => match s.parse() {
5170 Ok(n) => ErrorCode(n),
5171 Err(_) => continue,
5172 },
5173 _ => continue,
5174 };
5175
5176 if let Some(def) = db.get(&code) {
5178 let edit_opt: Option<TextEdit> = match &def.fix {
5180 FixKind::Insert { text, anchor: _ } => {
5181 goto::code_action_edit(
5183 source.as_deref().unwrap_or(""),
5184 diag.range,
5185 goto::CodeActionKind::InsertAtFileStart { text },
5186 )
5187 }
5188
5189 FixKind::ReplaceToken {
5190 replacement,
5191 walk_to,
5192 } => source.as_deref().and_then(|src| {
5193 goto::code_action_edit(
5194 src,
5195 diag.range,
5196 goto::CodeActionKind::ReplaceToken {
5197 replacement,
5198 walk_to: walk_to.as_deref(),
5199 },
5200 )
5201 }),
5202
5203 FixKind::DeleteToken => source.as_deref().and_then(|src| {
5204 goto::code_action_edit(src, diag.range, goto::CodeActionKind::DeleteToken)
5205 }),
5206
5207 FixKind::DeleteNode { node_kind } => {
5208 if node_kind == "variable_declaration_statement" {
5210 source.as_deref().and_then(|src| {
5211 goto::code_action_edit(
5212 src,
5213 diag.range,
5214 goto::CodeActionKind::DeleteLocalVar,
5215 )
5216 })
5217 } else {
5218 None
5219 }
5220 }
5221
5222 FixKind::DeleteChildNode {
5223 walk_to,
5224 child_kinds,
5225 } => {
5226 let ck: Vec<&str> = child_kinds.iter().map(|s| s.as_str()).collect();
5227 source.as_deref().and_then(|src| {
5228 goto::code_action_edit(
5229 src,
5230 diag.range,
5231 goto::CodeActionKind::DeleteChildNode {
5232 walk_to,
5233 child_kinds: &ck,
5234 },
5235 )
5236 })
5237 }
5238
5239 FixKind::ReplaceChildNode {
5240 walk_to,
5241 child_kind,
5242 replacement,
5243 } => source.as_deref().and_then(|src| {
5244 goto::code_action_edit(
5245 src,
5246 diag.range,
5247 goto::CodeActionKind::ReplaceChildNode {
5248 walk_to,
5249 child_kind,
5250 replacement,
5251 },
5252 )
5253 }),
5254
5255 FixKind::InsertBeforeNode {
5256 walk_to,
5257 before_child,
5258 text,
5259 } => {
5260 let bc: Vec<&str> = before_child.iter().map(|s| s.as_str()).collect();
5261 source.as_deref().and_then(|src| {
5262 goto::code_action_edit(
5263 src,
5264 diag.range,
5265 goto::CodeActionKind::InsertBeforeNode {
5266 walk_to,
5267 before_child: &bc,
5268 text,
5269 },
5270 )
5271 })
5272 }
5273
5274 FixKind::Custom => None,
5276 };
5277
5278 if let Some(edit) = edit_opt {
5279 let mut changes = HashMap::new();
5280 changes.insert(uri.clone(), vec![edit]);
5281 actions.push(CodeActionOrCommand::CodeAction(CodeAction {
5282 title: def.title.clone(),
5283 kind: Some(CodeActionKind::QUICKFIX),
5284 diagnostics: Some(vec![diag.clone()]),
5285 edit: Some(WorkspaceEdit {
5286 changes: Some(changes),
5287 ..Default::default()
5288 }),
5289 is_preferred: Some(true),
5290 ..Default::default()
5291 }));
5292 continue; }
5294
5295 if !matches!(def.fix, FixKind::Custom) {
5298 continue;
5299 }
5300 }
5301
5302 #[allow(clippy::match_single_binding)]
5306 match code {
5307 _ => {}
5312 }
5313 }
5314
5315 if actions.is_empty() {
5316 Ok(None)
5317 } else {
5318 Ok(Some(actions))
5319 }
5320 }
5321
5322 async fn will_rename_files(
5323 &self,
5324 params: RenameFilesParams,
5325 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
5326 self.client
5327 .log_message(
5328 MessageType::INFO,
5329 format!("workspace/willRenameFiles: {} file(s)", params.files.len()),
5330 )
5331 .await;
5332 if !self
5333 .settings
5334 .read()
5335 .await
5336 .file_operations
5337 .update_imports_on_rename
5338 {
5339 self.client
5340 .log_message(
5341 MessageType::INFO,
5342 "willRenameFiles: updateImportsOnRename disabled",
5343 )
5344 .await;
5345 return Ok(None);
5346 }
5347
5348 let config = self.foundry_config.read().await.clone();
5350 let project_root = config.root.clone();
5351 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
5352 crate::solc::discover_source_files(&config)
5353 .into_iter()
5354 .filter_map(|p| p.to_str().map(String::from))
5355 .collect()
5356 })
5357 .await
5358 .unwrap_or_default();
5359
5360 if source_files.is_empty() {
5361 self.client
5362 .log_message(
5363 MessageType::WARNING,
5364 "willRenameFiles: no source files found",
5365 )
5366 .await;
5367 return Ok(None);
5368 }
5369
5370 let raw_renames: Vec<(std::path::PathBuf, std::path::PathBuf)> = params
5372 .files
5373 .iter()
5374 .filter_map(|fr| {
5375 let old_uri = Url::parse(&fr.old_uri).ok()?;
5376 let new_uri = Url::parse(&fr.new_uri).ok()?;
5377 let old_path = old_uri.to_file_path().ok()?;
5378 let new_path = new_uri.to_file_path().ok()?;
5379 Some((old_path, new_path))
5380 })
5381 .collect();
5382
5383 let renames = file_operations::expand_folder_renames(&raw_renames, &source_files);
5384
5385 if renames.is_empty() {
5386 return Ok(None);
5387 }
5388
5389 self.client
5390 .log_message(
5391 MessageType::INFO,
5392 format!(
5393 "willRenameFiles: {} rename(s) after folder expansion",
5394 renames.len()
5395 ),
5396 )
5397 .await;
5398
5399 let files_to_read: Vec<(String, String)> = {
5402 let tc = self.text_cache.read().await;
5403 source_files
5404 .iter()
5405 .filter_map(|fs_path| {
5406 let uri = Url::from_file_path(fs_path).ok()?;
5407 let uri_str = uri.to_string();
5408 if tc.contains_key(&uri_str) {
5409 None
5410 } else {
5411 Some((uri_str, fs_path.clone()))
5412 }
5413 })
5414 .collect()
5415 };
5416
5417 if !files_to_read.is_empty() {
5418 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
5419 files_to_read
5420 .into_iter()
5421 .filter_map(|(uri_str, fs_path)| {
5422 let content = std::fs::read_to_string(&fs_path).ok()?;
5423 Some((uri_str, content))
5424 })
5425 .collect()
5426 })
5427 .await
5428 .unwrap_or_default();
5429
5430 let mut tc = self.text_cache.write().await;
5431 for (uri_str, content) in loaded {
5432 tc.entry(uri_str.into()).or_insert((0, content));
5433 }
5434 }
5435
5436 let text_cache = self.text_cache.clone();
5441 let result = {
5442 let tc = text_cache.read().await;
5443 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
5444 let uri = Url::from_file_path(fs_path).ok()?;
5445 let (_, content) = tc.get(&uri.to_string())?;
5446 Some(content.as_bytes().to_vec())
5447 };
5448
5449 file_operations::rename_imports(
5450 &source_files,
5451 &renames,
5452 &project_root,
5453 &get_source_bytes,
5454 )
5455 };
5456
5457 let stats = &result.stats;
5459 if stats.read_failures > 0 || stats.pathdiff_failures > 0 || stats.duplicate_renames > 0 {
5460 self.client
5461 .log_message(
5462 MessageType::WARNING,
5463 format!(
5464 "willRenameFiles stats: read_failures={}, pathdiff_failures={}, \
5465 duplicate_renames={}, no_parent={}, no_op_skips={}, dedup_skips={}",
5466 stats.read_failures,
5467 stats.pathdiff_failures,
5468 stats.duplicate_renames,
5469 stats.no_parent,
5470 stats.no_op_skips,
5471 stats.dedup_skips,
5472 ),
5473 )
5474 .await;
5475 }
5476
5477 let all_edits = result.edits;
5478
5479 if all_edits.is_empty() {
5480 self.client
5481 .log_message(MessageType::INFO, "willRenameFiles: no import edits needed")
5482 .await;
5483 return Ok(None);
5484 }
5485
5486 {
5488 let mut tc = self.text_cache.write().await;
5489 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
5490 self.client
5491 .log_message(
5492 MessageType::INFO,
5493 format!("willRenameFiles: patched {} cached file(s)", patched),
5494 )
5495 .await;
5496 }
5497
5498 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
5499 self.client
5500 .log_message(
5501 MessageType::INFO,
5502 format!(
5503 "willRenameFiles: {} edit(s) across {} file(s)",
5504 total_edits,
5505 all_edits.len()
5506 ),
5507 )
5508 .await;
5509
5510 Ok(Some(WorkspaceEdit {
5511 changes: Some(all_edits),
5512 document_changes: None,
5513 change_annotations: None,
5514 }))
5515 }
5516
5517 async fn did_rename_files(&self, params: RenameFilesParams) {
5518 self.client
5519 .log_message(
5520 MessageType::INFO,
5521 format!("workspace/didRenameFiles: {} file(s)", params.files.len()),
5522 )
5523 .await;
5524 self.project_cache_dirty.store(true, Ordering::Release);
5525 {
5526 let mut changed = self.project_cache_changed_files.write().await;
5527 for file in ¶ms.files {
5528 if let Ok(old_uri) = Url::parse(&file.old_uri)
5529 && let Ok(old_path) = old_uri.to_file_path()
5530 {
5531 changed.insert(old_path.to_string_lossy().to_string());
5532 }
5533 if let Ok(new_uri) = Url::parse(&file.new_uri)
5534 && let Ok(new_path) = new_uri.to_file_path()
5535 {
5536 changed.insert(new_path.to_string_lossy().to_string());
5537 }
5538 }
5539 }
5540
5541 let raw_uri_pairs: Vec<(Url, Url)> = params
5543 .files
5544 .iter()
5545 .filter_map(|fr| {
5546 let old_uri = Url::parse(&fr.old_uri).ok()?;
5547 let new_uri = Url::parse(&fr.new_uri).ok()?;
5548 Some((old_uri, new_uri))
5549 })
5550 .collect();
5551
5552 let file_renames = {
5553 let tc = self.text_cache.read().await;
5554 let cache_paths: Vec<std::path::PathBuf> = tc
5555 .keys()
5556 .filter_map(|k| Url::parse(k).ok())
5557 .filter_map(|u| u.to_file_path().ok())
5558 .collect();
5559 drop(tc);
5560
5561 let cfg = self.foundry_config.read().await.clone();
5564 let discovered_paths =
5565 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
5566 .await
5567 .unwrap_or_default();
5568
5569 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
5570 all_paths.extend(cache_paths);
5571 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
5572
5573 file_operations::expand_folder_renames_from_paths(&raw_uri_pairs, &all_paths)
5574 };
5575
5576 self.client
5577 .log_message(
5578 MessageType::INFO,
5579 format!(
5580 "didRenameFiles: migrating {} cache entry/entries",
5581 file_renames.len()
5582 ),
5583 )
5584 .await;
5585
5586 {
5590 let mut tc = self.text_cache.write().await;
5591 for (old_key, new_key) in &file_renames {
5592 if let Some(entry) = tc.remove(old_key) {
5593 tc.insert(new_key.clone().into(), entry);
5594 }
5595 }
5596 }
5597 {
5598 let mut ac = self.ast_cache.write().await;
5599 for (old_key, _) in &file_renames {
5600 ac.remove(old_key);
5601 }
5602 }
5603 {
5604 let mut cc = self.completion_cache.write().await;
5605 for (old_key, _) in &file_renames {
5606 cc.remove(old_key);
5607 }
5608 }
5609 {
5610 let mut sc = self.semantic_token_cache.write().await;
5611 for (old_key, _) in &file_renames {
5612 sc.remove(old_key);
5613 }
5614 }
5615 {
5616 let mut pending = self.pending_create_scaffold.write().await;
5617 for (old_key, _) in &file_renames {
5618 pending.remove(old_key);
5619 }
5620 }
5621
5622 {
5624 let affected_paths: Vec<std::path::PathBuf> = file_renames
5625 .iter()
5626 .flat_map(|(old_key, new_key)| {
5627 let mut paths = Vec::new();
5628 if let Ok(u) = Url::parse(old_key) {
5629 if let Ok(p) = u.to_file_path() {
5630 paths.push(p);
5631 }
5632 }
5633 if let Ok(u) = Url::parse(new_key) {
5634 if let Ok(p) = u.to_file_path() {
5635 paths.push(p);
5636 }
5637 }
5638 paths
5639 })
5640 .collect();
5641 self.invalidate_lib_sub_caches_if_affected(&affected_paths)
5642 .await;
5643 }
5644
5645 let root_key = self.project_cache_key().await;
5649
5650 let foundry_config = self.foundry_config.read().await.clone();
5651 let ast_cache = self.ast_cache.clone();
5652 let client = self.client.clone();
5653 let path_interner = self.path_interner.clone();
5654 let text_cache_snapshot = self.text_cache.read().await.clone();
5658
5659 tokio::spawn(async move {
5660 let Some(cache_key) = root_key else {
5661 return;
5662 };
5663 match crate::solc::solc_project_index(
5664 &foundry_config,
5665 Some(&client),
5666 Some(&text_cache_snapshot),
5667 )
5668 .await
5669 {
5670 Ok(ast_data) => {
5671 let cached_build = Arc::new(crate::goto::CachedBuild::new(
5672 ast_data,
5673 0,
5674 Some(&mut *path_interner.write().await),
5675 ));
5676 let source_count = cached_build.nodes.len();
5677 ast_cache
5678 .write()
5679 .await
5680 .insert(cache_key.into(), cached_build);
5681 client
5682 .log_message(
5683 MessageType::INFO,
5684 format!("didRenameFiles: re-indexed {} source files", source_count),
5685 )
5686 .await;
5687 }
5688 Err(e) => {
5689 client
5690 .log_message(
5691 MessageType::WARNING,
5692 format!("didRenameFiles: re-index failed: {e}"),
5693 )
5694 .await;
5695 }
5696 }
5697 });
5698 }
5699
5700 async fn will_delete_files(
5701 &self,
5702 params: DeleteFilesParams,
5703 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
5704 self.client
5705 .log_message(
5706 MessageType::INFO,
5707 format!("workspace/willDeleteFiles: {} file(s)", params.files.len()),
5708 )
5709 .await;
5710 if !update_imports_on_delete_enabled(&*self.settings.read().await) {
5711 self.client
5712 .log_message(
5713 MessageType::INFO,
5714 "willDeleteFiles: updateImportsOnDelete disabled",
5715 )
5716 .await;
5717 return Ok(None);
5718 }
5719
5720 let config = self.foundry_config.read().await.clone();
5721 let project_root = config.root.clone();
5722 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
5723 crate::solc::discover_source_files(&config)
5724 .into_iter()
5725 .filter_map(|p| p.to_str().map(String::from))
5726 .collect()
5727 })
5728 .await
5729 .unwrap_or_default();
5730
5731 if source_files.is_empty() {
5732 self.client
5733 .log_message(
5734 MessageType::WARNING,
5735 "willDeleteFiles: no source files found",
5736 )
5737 .await;
5738 return Ok(None);
5739 }
5740
5741 let raw_deletes: Vec<std::path::PathBuf> = params
5742 .files
5743 .iter()
5744 .filter_map(|fd| Url::parse(&fd.uri).ok())
5745 .filter_map(|u| u.to_file_path().ok())
5746 .collect();
5747
5748 let deletes = file_operations::expand_folder_deletes(&raw_deletes, &source_files);
5749 if deletes.is_empty() {
5750 return Ok(None);
5751 }
5752
5753 self.client
5754 .log_message(
5755 MessageType::INFO,
5756 format!(
5757 "willDeleteFiles: {} delete target(s) after folder expansion",
5758 deletes.len()
5759 ),
5760 )
5761 .await;
5762
5763 let files_to_read: Vec<(String, String)> = {
5764 let tc = self.text_cache.read().await;
5765 source_files
5766 .iter()
5767 .filter_map(|fs_path| {
5768 let uri = Url::from_file_path(fs_path).ok()?;
5769 let uri_str = uri.to_string();
5770 if tc.contains_key(&uri_str) {
5771 None
5772 } else {
5773 Some((uri_str, fs_path.clone()))
5774 }
5775 })
5776 .collect()
5777 };
5778
5779 if !files_to_read.is_empty() {
5780 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
5781 files_to_read
5782 .into_iter()
5783 .filter_map(|(uri_str, fs_path)| {
5784 let content = std::fs::read_to_string(&fs_path).ok()?;
5785 Some((uri_str, content))
5786 })
5787 .collect()
5788 })
5789 .await
5790 .unwrap_or_default();
5791
5792 let mut tc = self.text_cache.write().await;
5793 for (uri_str, content) in loaded {
5794 tc.entry(uri_str.into()).or_insert((0, content));
5795 }
5796 }
5797
5798 let result = {
5799 let tc = self.text_cache.read().await;
5800 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
5801 let uri = Url::from_file_path(fs_path).ok()?;
5802 let (_, content) = tc.get(&uri.to_string())?;
5803 Some(content.as_bytes().to_vec())
5804 };
5805
5806 file_operations::delete_imports(
5807 &source_files,
5808 &deletes,
5809 &project_root,
5810 &get_source_bytes,
5811 )
5812 };
5813
5814 let stats = &result.stats;
5815 if stats.read_failures > 0
5816 || stats.statement_range_failures > 0
5817 || stats.duplicate_deletes > 0
5818 {
5819 self.client
5820 .log_message(
5821 MessageType::WARNING,
5822 format!(
5823 "willDeleteFiles stats: read_failures={}, statement_range_failures={}, \
5824 duplicate_deletes={}, no_parent={}, dedup_skips={}",
5825 stats.read_failures,
5826 stats.statement_range_failures,
5827 stats.duplicate_deletes,
5828 stats.no_parent,
5829 stats.dedup_skips,
5830 ),
5831 )
5832 .await;
5833 }
5834
5835 let all_edits = result.edits;
5836 if all_edits.is_empty() {
5837 self.client
5838 .log_message(
5839 MessageType::INFO,
5840 "willDeleteFiles: no import-removal edits needed",
5841 )
5842 .await;
5843 return Ok(None);
5844 }
5845
5846 {
5847 let mut tc = self.text_cache.write().await;
5848 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
5849 self.client
5850 .log_message(
5851 MessageType::INFO,
5852 format!("willDeleteFiles: patched {} cached file(s)", patched),
5853 )
5854 .await;
5855 }
5856
5857 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
5858 self.client
5859 .log_message(
5860 MessageType::INFO,
5861 format!(
5862 "willDeleteFiles: {} edit(s) across {} file(s)",
5863 total_edits,
5864 all_edits.len()
5865 ),
5866 )
5867 .await;
5868
5869 Ok(Some(WorkspaceEdit {
5870 changes: Some(all_edits),
5871 document_changes: None,
5872 change_annotations: None,
5873 }))
5874 }
5875
5876 async fn did_delete_files(&self, params: DeleteFilesParams) {
5877 self.client
5878 .log_message(
5879 MessageType::INFO,
5880 format!("workspace/didDeleteFiles: {} file(s)", params.files.len()),
5881 )
5882 .await;
5883 self.project_cache_dirty.store(true, Ordering::Release);
5884 {
5885 let mut changed = self.project_cache_changed_files.write().await;
5886 for file in ¶ms.files {
5887 if let Ok(uri) = Url::parse(&file.uri)
5888 && let Ok(path) = uri.to_file_path()
5889 {
5890 changed.insert(path.to_string_lossy().to_string());
5891 }
5892 }
5893 }
5894
5895 let raw_delete_uris: Vec<Url> = params
5896 .files
5897 .iter()
5898 .filter_map(|fd| Url::parse(&fd.uri).ok())
5899 .collect();
5900
5901 let deleted_paths = {
5902 let tc = self.text_cache.read().await;
5903 let cache_paths: Vec<std::path::PathBuf> = tc
5904 .keys()
5905 .filter_map(|k| Url::parse(k).ok())
5906 .filter_map(|u| u.to_file_path().ok())
5907 .collect();
5908 drop(tc);
5909
5910 let cfg = self.foundry_config.read().await.clone();
5911 let discovered_paths =
5912 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
5913 .await
5914 .unwrap_or_default();
5915
5916 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
5917 all_paths.extend(cache_paths);
5918 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
5919
5920 file_operations::expand_folder_deletes_from_paths(&raw_delete_uris, &all_paths)
5921 };
5922
5923 let mut deleted_keys: HashSet<String> = HashSet::new();
5924 let mut deleted_uris: Vec<Url> = Vec::new();
5925 for path in deleted_paths {
5926 if let Ok(uri) = Url::from_file_path(&path) {
5927 deleted_keys.insert(uri.to_string());
5928 deleted_uris.push(uri);
5929 }
5930 }
5931 if deleted_keys.is_empty() {
5932 return;
5933 }
5934
5935 self.client
5936 .log_message(
5937 MessageType::INFO,
5938 format!(
5939 "didDeleteFiles: deleting {} cache/diagnostic entry(ies)",
5940 deleted_keys.len()
5941 ),
5942 )
5943 .await;
5944
5945 for uri in &deleted_uris {
5946 self.client
5947 .publish_diagnostics(uri.clone(), vec![], None)
5948 .await;
5949 }
5950
5951 let mut removed_text = 0usize;
5952 let mut removed_ast = 0usize;
5953 let mut removed_completion = 0usize;
5954 let mut removed_semantic = 0usize;
5955 let mut removed_pending_create = 0usize;
5956 {
5957 let mut tc = self.text_cache.write().await;
5958 for key in &deleted_keys {
5959 if tc.remove(key).is_some() {
5960 removed_text += 1;
5961 }
5962 }
5963 }
5964 {
5965 let mut ac = self.ast_cache.write().await;
5966 for key in &deleted_keys {
5967 if ac.remove(key).is_some() {
5968 removed_ast += 1;
5969 }
5970 }
5971 }
5972 {
5973 let mut cc = self.completion_cache.write().await;
5974 for key in &deleted_keys {
5975 if cc.remove(key).is_some() {
5976 removed_completion += 1;
5977 }
5978 }
5979 }
5980 {
5981 let mut sc = self.semantic_token_cache.write().await;
5982 for key in &deleted_keys {
5983 if sc.remove(key).is_some() {
5984 removed_semantic += 1;
5985 }
5986 }
5987 }
5988 {
5989 let mut pending = self.pending_create_scaffold.write().await;
5990 for key in &deleted_keys {
5991 if pending.remove(key) {
5992 removed_pending_create += 1;
5993 }
5994 }
5995 }
5996 self.client
5997 .log_message(
5998 MessageType::INFO,
5999 format!(
6000 "didDeleteFiles: removed caches text={} ast={} completion={} semantic={} pendingCreate={}",
6001 removed_text,
6002 removed_ast,
6003 removed_completion,
6004 removed_semantic,
6005 removed_pending_create,
6006 ),
6007 )
6008 .await;
6009
6010 {
6012 let affected_paths: Vec<std::path::PathBuf> = deleted_keys
6013 .iter()
6014 .filter_map(|k| Url::parse(k).ok())
6015 .filter_map(|u| u.to_file_path().ok())
6016 .collect();
6017 self.invalidate_lib_sub_caches_if_affected(&affected_paths)
6018 .await;
6019 }
6020
6021 let root_key = self.project_cache_key().await;
6025
6026 let foundry_config = self.foundry_config.read().await.clone();
6027 let ast_cache = self.ast_cache.clone();
6028 let client = self.client.clone();
6029 let path_interner = self.path_interner.clone();
6030 let text_cache_snapshot = self.text_cache.read().await.clone();
6031
6032 tokio::spawn(async move {
6033 let Some(cache_key) = root_key else {
6034 return;
6035 };
6036 match crate::solc::solc_project_index(
6037 &foundry_config,
6038 Some(&client),
6039 Some(&text_cache_snapshot),
6040 )
6041 .await
6042 {
6043 Ok(ast_data) => {
6044 let cached_build = Arc::new(crate::goto::CachedBuild::new(
6045 ast_data,
6046 0,
6047 Some(&mut *path_interner.write().await),
6048 ));
6049 let source_count = cached_build.nodes.len();
6050 ast_cache
6051 .write()
6052 .await
6053 .insert(cache_key.into(), cached_build);
6054 client
6055 .log_message(
6056 MessageType::INFO,
6057 format!("didDeleteFiles: re-indexed {} source files", source_count),
6058 )
6059 .await;
6060 }
6061 Err(e) => {
6062 client
6063 .log_message(
6064 MessageType::WARNING,
6065 format!("didDeleteFiles: re-index failed: {e}"),
6066 )
6067 .await;
6068 }
6069 }
6070 });
6071 }
6072
6073 async fn will_create_files(
6074 &self,
6075 params: CreateFilesParams,
6076 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
6077 self.client
6078 .log_message(
6079 MessageType::INFO,
6080 format!("workspace/willCreateFiles: {} file(s)", params.files.len()),
6081 )
6082 .await;
6083 if !self
6084 .settings
6085 .read()
6086 .await
6087 .file_operations
6088 .template_on_create
6089 {
6090 self.client
6091 .log_message(
6092 MessageType::INFO,
6093 "willCreateFiles: templateOnCreate disabled",
6094 )
6095 .await;
6096 return Ok(None);
6097 }
6098 self.client
6099 .log_message(
6100 MessageType::INFO,
6101 "willCreateFiles: skipping pre-create edits; scaffolding via didCreateFiles",
6102 )
6103 .await;
6104 Ok(None)
6105 }
6106
6107 async fn did_create_files(&self, params: CreateFilesParams) {
6108 self.client
6109 .log_message(
6110 MessageType::INFO,
6111 format!("workspace/didCreateFiles: {} file(s)", params.files.len()),
6112 )
6113 .await;
6114 self.project_cache_dirty.store(true, Ordering::Release);
6115 {
6116 let mut changed = self.project_cache_changed_files.write().await;
6117 for file in ¶ms.files {
6118 if let Ok(uri) = Url::parse(&file.uri)
6119 && let Ok(path) = uri.to_file_path()
6120 {
6121 changed.insert(path.to_string_lossy().to_string());
6122 }
6123 }
6124 }
6125 if !self
6126 .settings
6127 .read()
6128 .await
6129 .file_operations
6130 .template_on_create
6131 {
6132 self.client
6133 .log_message(
6134 MessageType::INFO,
6135 "didCreateFiles: templateOnCreate disabled",
6136 )
6137 .await;
6138 return;
6139 }
6140
6141 let config = self.foundry_config.read().await;
6142 let solc_version = config.solc_version.clone();
6143 drop(config);
6144
6145 let mut apply_edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
6150 let mut staged_content: HashMap<String, String> = HashMap::new();
6151 let mut created_uris: Vec<String> = Vec::new();
6152 {
6153 let tc = self.text_cache.read().await;
6154 for file_create in ¶ms.files {
6155 let uri = match Url::parse(&file_create.uri) {
6156 Ok(u) => u,
6157 Err(_) => continue,
6158 };
6159 let uri_str = uri.to_string();
6160
6161 let open_has_content = tc
6162 .get(&uri_str)
6163 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()));
6164 let path = match uri.to_file_path() {
6165 Ok(p) => p,
6166 Err(_) => continue,
6167 };
6168 let disk_has_content = std::fs::read_to_string(&path)
6169 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()));
6170
6171 if open_has_content {
6174 self.client
6175 .log_message(
6176 MessageType::INFO,
6177 format!(
6178 "didCreateFiles: skip {} (open buffer already has content)",
6179 uri_str
6180 ),
6181 )
6182 .await;
6183 continue;
6184 }
6185
6186 if disk_has_content {
6188 self.client
6189 .log_message(
6190 MessageType::INFO,
6191 format!(
6192 "didCreateFiles: skip {} (disk file already has content)",
6193 uri_str
6194 ),
6195 )
6196 .await;
6197 continue;
6198 }
6199
6200 let content =
6201 match file_operations::generate_scaffold(&uri, solc_version.as_deref()) {
6202 Some(s) => s,
6203 None => continue,
6204 };
6205
6206 staged_content.insert(uri_str, content.clone());
6207 created_uris.push(uri.to_string());
6208
6209 apply_edits.entry(uri).or_default().push(TextEdit {
6210 range: Range {
6211 start: Position {
6212 line: 0,
6213 character: 0,
6214 },
6215 end: Position {
6216 line: 0,
6217 character: 0,
6218 },
6219 },
6220 new_text: content,
6221 });
6222 }
6223 }
6224
6225 if !apply_edits.is_empty() {
6226 {
6227 let mut pending = self.pending_create_scaffold.write().await;
6228 for uri in &created_uris {
6229 pending.insert(uri.clone().into());
6230 }
6231 }
6232
6233 let edit = WorkspaceEdit {
6234 changes: Some(apply_edits.clone()),
6235 document_changes: None,
6236 change_annotations: None,
6237 };
6238 self.client
6239 .log_message(
6240 MessageType::INFO,
6241 format!(
6242 "didCreateFiles: scaffolding {} empty file(s) via workspace/applyEdit",
6243 apply_edits.len()
6244 ),
6245 )
6246 .await;
6247 let apply_result = self.client.apply_edit(edit).await;
6248 let applied = apply_result.as_ref().is_ok_and(|r| r.applied);
6249
6250 if applied {
6251 let mut tc = self.text_cache.write().await;
6252 for (uri_str, content) in staged_content {
6253 tc.insert(uri_str.into(), (0, content));
6254 }
6255 } else {
6256 if let Ok(resp) = &apply_result {
6257 self.client
6258 .log_message(
6259 MessageType::WARNING,
6260 format!(
6261 "didCreateFiles: applyEdit rejected (no disk fallback): {:?}",
6262 resp.failure_reason
6263 ),
6264 )
6265 .await;
6266 } else if let Err(e) = &apply_result {
6267 self.client
6268 .log_message(
6269 MessageType::WARNING,
6270 format!("didCreateFiles: applyEdit failed (no disk fallback): {e}"),
6271 )
6272 .await;
6273 }
6274 }
6275 }
6276
6277 for file_create in ¶ms.files {
6281 let Ok(uri) = Url::parse(&file_create.uri) else {
6282 continue;
6283 };
6284 let (version, content) = {
6285 let tc = self.text_cache.read().await;
6286 match tc.get(&uri.to_string()) {
6287 Some((v, c)) => (*v, c.clone()),
6288 None => continue,
6289 }
6290 };
6291 if !content.chars().any(|ch| !ch.is_whitespace()) {
6292 continue;
6293 }
6294 self.on_change(TextDocumentItem {
6295 uri,
6296 version,
6297 text: content,
6298 language_id: "solidity".to_string(),
6299 })
6300 .await;
6301 }
6302
6303 {
6305 let affected_paths: Vec<std::path::PathBuf> = params
6306 .files
6307 .iter()
6308 .filter_map(|f| Url::parse(&f.uri).ok())
6309 .filter_map(|u| u.to_file_path().ok())
6310 .collect();
6311 self.invalidate_lib_sub_caches_if_affected(&affected_paths)
6312 .await;
6313 }
6314
6315 let root_key = self.project_cache_key().await;
6319
6320 let foundry_config = self.foundry_config.read().await.clone();
6321 let ast_cache = self.ast_cache.clone();
6322 let client = self.client.clone();
6323 let path_interner = self.path_interner.clone();
6324 let text_cache_snapshot = self.text_cache.read().await.clone();
6325
6326 tokio::spawn(async move {
6327 let Some(cache_key) = root_key else {
6328 return;
6329 };
6330 match crate::solc::solc_project_index(
6331 &foundry_config,
6332 Some(&client),
6333 Some(&text_cache_snapshot),
6334 )
6335 .await
6336 {
6337 Ok(ast_data) => {
6338 let cached_build = Arc::new(crate::goto::CachedBuild::new(
6339 ast_data,
6340 0,
6341 Some(&mut *path_interner.write().await),
6342 ));
6343 let source_count = cached_build.nodes.len();
6344 ast_cache
6345 .write()
6346 .await
6347 .insert(cache_key.into(), cached_build);
6348 client
6349 .log_message(
6350 MessageType::INFO,
6351 format!("didCreateFiles: re-indexed {} source files", source_count),
6352 )
6353 .await;
6354 }
6355 Err(e) => {
6356 client
6357 .log_message(
6358 MessageType::WARNING,
6359 format!("didCreateFiles: re-index failed: {e}"),
6360 )
6361 .await;
6362 }
6363 }
6364 });
6365 }
6366
6367 async fn prepare_call_hierarchy(
6370 &self,
6371 params: CallHierarchyPrepareParams,
6372 ) -> tower_lsp::jsonrpc::Result<Option<Vec<CallHierarchyItem>>> {
6373 self.client
6374 .log_message(
6375 MessageType::INFO,
6376 "got textDocument/prepareCallHierarchy request",
6377 )
6378 .await;
6379
6380 let uri = params.text_document_position_params.text_document.uri;
6381 let position = params.text_document_position_params.position;
6382
6383 let file_path = match uri.to_file_path() {
6384 Ok(path) => path,
6385 Err(_) => {
6386 self.client
6387 .log_message(MessageType::ERROR, "invalid file uri")
6388 .await;
6389 return Ok(None);
6390 }
6391 };
6392
6393 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
6394 Some(bytes) => bytes,
6395 None => return Ok(None),
6396 };
6397
6398 let cached_build = match self.get_or_fetch_build(&uri, &file_path, true).await {
6399 Some(cb) => cb,
6400 None => return Ok(None),
6401 };
6402
6403 let path_str = match file_path.to_str() {
6404 Some(s) => s,
6405 None => return Ok(None),
6406 };
6407 let abs_path = match cached_build.path_to_abs.get(path_str) {
6408 Some(ap) => ap.clone(),
6409 None => {
6410 crate::types::AbsPath::new(path_str)
6412 }
6413 };
6414
6415 let byte_position = goto::pos_to_bytes(&source_bytes, position);
6416
6417 let callable_id = match crate::call_hierarchy::resolve_callable_at_position(
6419 &cached_build,
6420 abs_path.as_str(),
6421 byte_position,
6422 ) {
6423 Some(id) => id,
6424 None => {
6425 self.client
6426 .log_message(MessageType::INFO, "no callable found at cursor position")
6427 .await;
6428 return Ok(None);
6429 }
6430 };
6431
6432 let item = if let Some(decl) = cached_build.decl_index.get(&callable_id) {
6435 crate::call_hierarchy::decl_to_hierarchy_item(
6436 decl,
6437 callable_id,
6438 &cached_build.node_id_to_source_path,
6439 &cached_build.id_to_path_map,
6440 &cached_build.nodes,
6441 )
6442 } else if let Some(info) =
6443 crate::call_hierarchy::find_node_info(&cached_build.nodes, callable_id)
6444 {
6445 crate::call_hierarchy::node_info_to_hierarchy_item(
6446 callable_id,
6447 info,
6448 &cached_build.id_to_path_map,
6449 )
6450 } else {
6451 None
6452 };
6453
6454 match item {
6455 Some(it) => {
6456 self.client
6457 .log_message(
6458 MessageType::INFO,
6459 format!("prepared call hierarchy for: {}", it.name),
6460 )
6461 .await;
6462 Ok(Some(vec![it]))
6463 }
6464 None => {
6465 self.client
6466 .log_message(
6467 MessageType::INFO,
6468 "could not build CallHierarchyItem for callable",
6469 )
6470 .await;
6471 Ok(None)
6472 }
6473 }
6474 }
6475
6476 async fn incoming_calls(
6477 &self,
6478 params: CallHierarchyIncomingCallsParams,
6479 ) -> tower_lsp::jsonrpc::Result<Option<Vec<CallHierarchyIncomingCall>>> {
6480 self.client
6481 .log_message(MessageType::INFO, "got callHierarchy/incomingCalls request")
6482 .await;
6483
6484 let item = ¶ms.item;
6485
6486 let node_id = match item
6488 .data
6489 .as_ref()
6490 .and_then(|d| d.get("nodeId"))
6491 .and_then(|v| v.as_i64())
6492 {
6493 Some(id) => crate::types::NodeId(id),
6494 None => {
6495 self.client
6496 .log_message(
6497 MessageType::ERROR,
6498 "missing nodeId in CallHierarchyItem data",
6499 )
6500 .await;
6501 return Ok(None);
6502 }
6503 };
6504
6505 let file_path = match item.uri.to_file_path() {
6507 Ok(p) => p,
6508 Err(_) => return Ok(None),
6509 };
6510 let file_build = match self.get_or_fetch_build(&item.uri, &file_path, true).await {
6511 Some(cb) => cb,
6512 None => return Ok(None),
6513 };
6514
6515 let project_build = self.ensure_project_cached_build().await;
6517
6518 let mut builds: Vec<&goto::CachedBuild> = vec![&file_build];
6520 if let Some(ref pb) = project_build {
6521 builds.push(pb);
6522 }
6523 let sub_caches = self.sub_caches.read().await;
6524 for sc in sub_caches.iter() {
6525 builds.push(sc);
6526 }
6527
6528 let target_name = &item.name;
6535 let target_sel = &item.selection_range;
6536 let target_abs = item
6537 .uri
6538 .as_ref()
6539 .strip_prefix("file://")
6540 .unwrap_or(item.uri.as_ref());
6541
6542 let target_name_offset = {
6546 let source_bytes = std::fs::read(target_abs).unwrap_or_default();
6547 goto::pos_to_bytes(&source_bytes, target_sel.start)
6548 };
6549
6550 let mut resolved_incoming: Vec<(CallHierarchyItem, (u32, u32), Range)> = Vec::new();
6555
6556 for build in &builds {
6557 let mut build_target_ids = crate::call_hierarchy::resolve_target_in_build(
6561 build,
6562 node_id,
6563 target_abs,
6564 target_name,
6565 target_name_offset,
6566 );
6567
6568 let snapshot: Vec<crate::types::NodeId> = build_target_ids.clone();
6570 for id in &snapshot {
6571 if let Some(related) = build.base_function_implementation.get(id) {
6572 for &related_id in related {
6573 if !build_target_ids.contains(&related_id) {
6574 build_target_ids.push(related_id);
6575 }
6576 }
6577 }
6578 }
6579
6580 if build_target_ids.is_empty() {
6581 continue;
6582 }
6583
6584 let calls = crate::call_hierarchy::incoming_calls(&build.nodes, &build_target_ids);
6585 for (caller_id, call_src) in calls {
6586 let call_range = match crate::call_hierarchy::call_src_to_range(
6587 &call_src,
6588 &build.id_to_path_map,
6589 ) {
6590 Some(r) => r,
6591 None => continue,
6592 };
6593 let caller_item = if let Some(decl) = build.decl_index.get(&caller_id) {
6596 crate::call_hierarchy::decl_to_hierarchy_item(
6597 decl,
6598 caller_id,
6599 &build.node_id_to_source_path,
6600 &build.id_to_path_map,
6601 &build.nodes,
6602 )
6603 } else if let Some(info) =
6604 crate::call_hierarchy::find_node_info(&build.nodes, caller_id)
6605 {
6606 crate::call_hierarchy::node_info_to_hierarchy_item(
6607 caller_id,
6608 info,
6609 &build.id_to_path_map,
6610 )
6611 } else {
6612 None
6613 };
6614 let Some(caller_item) = caller_item else {
6615 continue;
6616 };
6617 let pos = (
6618 caller_item.selection_range.start.line,
6619 caller_item.selection_range.start.character,
6620 );
6621 resolved_incoming.push((caller_item, pos, call_range));
6622 }
6623 }
6624
6625 if resolved_incoming.is_empty() {
6626 self.client
6627 .log_message(MessageType::INFO, "no incoming calls found")
6628 .await;
6629 return Ok(Some(vec![]));
6630 }
6631
6632 let mut grouped: HashMap<(u32, u32), (CallHierarchyItem, Vec<Range>)> = HashMap::new();
6635 for (caller_item, pos, call_range) in resolved_incoming {
6636 let entry = grouped
6637 .entry(pos)
6638 .or_insert_with(|| (caller_item, Vec::new()));
6639 if !entry.1.contains(&call_range) {
6640 entry.1.push(call_range);
6641 }
6642 }
6643
6644 let results: Vec<CallHierarchyIncomingCall> = grouped
6645 .into_values()
6646 .map(|(from, from_ranges)| CallHierarchyIncomingCall { from, from_ranges })
6647 .collect();
6648
6649 self.client
6650 .log_message(
6651 MessageType::INFO,
6652 format!("found {} incoming callers", results.len()),
6653 )
6654 .await;
6655 Ok(Some(results))
6656 }
6657
6658 async fn outgoing_calls(
6659 &self,
6660 params: CallHierarchyOutgoingCallsParams,
6661 ) -> tower_lsp::jsonrpc::Result<Option<Vec<CallHierarchyOutgoingCall>>> {
6662 self.client
6663 .log_message(MessageType::INFO, "got callHierarchy/outgoingCalls request")
6664 .await;
6665
6666 let item = ¶ms.item;
6667
6668 let node_id = match item
6670 .data
6671 .as_ref()
6672 .and_then(|d| d.get("nodeId"))
6673 .and_then(|v| v.as_i64())
6674 {
6675 Some(id) => crate::types::NodeId(id),
6676 None => {
6677 self.client
6678 .log_message(
6679 MessageType::ERROR,
6680 "missing nodeId in CallHierarchyItem data",
6681 )
6682 .await;
6683 return Ok(None);
6684 }
6685 };
6686
6687 let file_path = match item.uri.to_file_path() {
6689 Ok(p) => p,
6690 Err(_) => return Ok(None),
6691 };
6692 let file_build = match self.get_or_fetch_build(&item.uri, &file_path, true).await {
6693 Some(cb) => cb,
6694 None => return Ok(None),
6695 };
6696
6697 let project_build = self.ensure_project_cached_build().await;
6699
6700 let mut builds: Vec<&goto::CachedBuild> = vec![&file_build];
6702 if let Some(ref pb) = project_build {
6703 builds.push(pb);
6704 }
6705 let sub_caches = self.sub_caches.read().await;
6706 for sc in sub_caches.iter() {
6707 builds.push(sc);
6708 }
6709
6710 let target_name = &item.name;
6714 let target_sel = &item.selection_range;
6715 let target_abs = item
6716 .uri
6717 .as_ref()
6718 .strip_prefix("file://")
6719 .unwrap_or(item.uri.as_ref());
6720
6721 let target_name_offset = {
6723 let source_bytes = std::fs::read(target_abs).unwrap_or_default();
6724 goto::pos_to_bytes(&source_bytes, target_sel.start)
6725 };
6726
6727 let mut resolved_outgoing: Vec<(CallHierarchyItem, (u32, u32), Range)> = Vec::new();
6735
6736 for build in &builds {
6737 let build_caller_ids = crate::call_hierarchy::resolve_target_in_build(
6741 build,
6742 node_id,
6743 target_abs,
6744 target_name,
6745 target_name_offset,
6746 );
6747
6748 for &cid in &build_caller_ids {
6749 let calls = crate::call_hierarchy::outgoing_calls(&build.nodes, cid);
6750 for (callee_id, call_src) in calls {
6751 let call_range = match crate::call_hierarchy::call_src_to_range(
6752 &call_src,
6753 &build.id_to_path_map,
6754 ) {
6755 Some(r) => r,
6756 None => continue,
6757 };
6758 let callee_item = if let Some(decl) = build.decl_index.get(&callee_id) {
6761 crate::call_hierarchy::decl_to_hierarchy_item(
6762 decl,
6763 callee_id,
6764 &build.node_id_to_source_path,
6765 &build.id_to_path_map,
6766 &build.nodes,
6767 )
6768 } else if let Some(info) =
6769 crate::call_hierarchy::find_node_info(&build.nodes, callee_id)
6770 {
6771 crate::call_hierarchy::node_info_to_hierarchy_item(
6772 callee_id,
6773 info,
6774 &build.id_to_path_map,
6775 )
6776 } else {
6777 None
6778 };
6779 let Some(callee_item) = callee_item else {
6780 continue;
6781 };
6782 let pos = (
6783 callee_item.selection_range.start.line,
6784 callee_item.selection_range.start.character,
6785 );
6786 resolved_outgoing.push((callee_item, pos, call_range));
6787 }
6788 }
6789 }
6790
6791 if resolved_outgoing.is_empty() {
6792 return Ok(Some(vec![]));
6793 }
6794
6795 let mut grouped: HashMap<(u32, u32), (CallHierarchyItem, Vec<Range>)> = HashMap::new();
6798 for (callee_item, pos, call_range) in resolved_outgoing {
6799 let entry = grouped
6800 .entry(pos)
6801 .or_insert_with(|| (callee_item, Vec::new()));
6802 if !entry.1.contains(&call_range) {
6803 entry.1.push(call_range);
6804 }
6805 }
6806
6807 let mut results: Vec<CallHierarchyOutgoingCall> = grouped
6808 .into_values()
6809 .map(|(to, from_ranges)| CallHierarchyOutgoingCall { to, from_ranges })
6810 .collect();
6811
6812 results.sort_by(|a, b| {
6815 let a_first = a.from_ranges.first();
6816 let b_first = b.from_ranges.first();
6817 match (a_first, b_first) {
6818 (Some(a_r), Some(b_r)) => a_r
6819 .start
6820 .line
6821 .cmp(&b_r.start.line)
6822 .then_with(|| a_r.start.character.cmp(&b_r.start.character)),
6823 (Some(_), None) => std::cmp::Ordering::Less,
6824 (None, Some(_)) => std::cmp::Ordering::Greater,
6825 (None, None) => std::cmp::Ordering::Equal,
6826 }
6827 });
6828
6829 Ok(Some(results))
6830 }
6831}
6832
6833#[cfg(test)]
6834mod tests {
6835 use super::{
6836 start_or_mark_project_cache_sync_pending, stop_project_cache_sync_worker_or_reclaim,
6837 take_project_cache_sync_pending, try_claim_project_cache_dirty,
6838 update_imports_on_delete_enabled,
6839 };
6840 use std::sync::atomic::{AtomicBool, Ordering};
6841
6842 #[test]
6843 fn update_imports_on_delete_enabled_defaults_true() {
6844 let s = crate::config::Settings::default();
6845 assert!(update_imports_on_delete_enabled(&s));
6846 }
6847
6848 #[test]
6849 fn update_imports_on_delete_enabled_respects_false() {
6850 let mut s = crate::config::Settings::default();
6851 s.file_operations.update_imports_on_delete = false;
6852 assert!(!update_imports_on_delete_enabled(&s));
6853 }
6854
6855 #[test]
6856 fn project_cache_sync_burst_only_first_starts_worker() {
6857 let pending = AtomicBool::new(false);
6858 let running = AtomicBool::new(false);
6859
6860 assert!(start_or_mark_project_cache_sync_pending(&pending, &running));
6861 assert!(pending.load(Ordering::Acquire));
6862 assert!(running.load(Ordering::Acquire));
6863
6864 assert!(!start_or_mark_project_cache_sync_pending(
6866 &pending, &running
6867 ));
6868 assert!(pending.load(Ordering::Acquire));
6869 assert!(running.load(Ordering::Acquire));
6870 }
6871
6872 #[test]
6873 fn project_cache_sync_take_pending_is_one_shot() {
6874 let pending = AtomicBool::new(true);
6875 assert!(take_project_cache_sync_pending(&pending));
6876 assert!(!pending.load(Ordering::Acquire));
6877 assert!(!take_project_cache_sync_pending(&pending));
6878 }
6879
6880 #[test]
6881 fn project_cache_sync_worker_stop_or_reclaim_handles_race() {
6882 let pending = AtomicBool::new(false);
6883 let running = AtomicBool::new(true);
6884
6885 assert!(!stop_project_cache_sync_worker_or_reclaim(
6887 &pending, &running
6888 ));
6889 assert!(!running.load(Ordering::Acquire));
6890
6891 pending.store(true, Ordering::Release);
6893 running.store(true, Ordering::Release);
6894 assert!(stop_project_cache_sync_worker_or_reclaim(
6895 &pending, &running
6896 ));
6897 assert!(running.load(Ordering::Acquire));
6898 }
6899
6900 #[test]
6901 fn project_cache_dirty_claim_and_retry_cycle() {
6902 let dirty = AtomicBool::new(true);
6903
6904 assert!(try_claim_project_cache_dirty(&dirty));
6905 assert!(!dirty.load(Ordering::Acquire));
6906
6907 assert!(!try_claim_project_cache_dirty(&dirty));
6909
6910 dirty.store(true, Ordering::Release);
6912 assert!(try_claim_project_cache_dirty(&dirty));
6913 assert!(!dirty.load(Ordering::Acquire));
6914 }
6915}