1use crate::completion;
2use crate::config::{self, FoundryConfig, LintConfig, Settings};
3use crate::file_operations;
4use crate::folding;
5use crate::goto;
6use crate::highlight;
7use crate::hover;
8use crate::inlay_hints;
9use crate::links;
10use crate::references;
11use crate::rename;
12use crate::runner::{ForgeRunner, Runner};
13use crate::selection;
14use crate::semantic_tokens;
15use crate::symbols;
16use crate::utils;
17use std::collections::{HashMap, HashSet};
18use std::path::{Component, Path, PathBuf};
19use std::sync::Arc;
20use std::sync::atomic::{AtomicU64, Ordering};
21use tokio::sync::RwLock;
22use tower_lsp::{Client, LanguageServer, lsp_types::*};
23
24type SemanticTokenCache = HashMap<String, (String, Vec<SemanticToken>)>;
26
27#[derive(Clone)]
28pub struct ForgeLsp {
29 client: Client,
30 compiler: Arc<dyn Runner>,
31 ast_cache: Arc<RwLock<HashMap<String, Arc<goto::CachedBuild>>>>,
32 text_cache: Arc<RwLock<HashMap<String, (i32, String)>>>,
36 completion_cache: Arc<RwLock<HashMap<String, Arc<completion::CompletionCache>>>>,
37 lint_config: Arc<RwLock<LintConfig>>,
39 foundry_config: Arc<RwLock<FoundryConfig>>,
41 client_capabilities: Arc<RwLock<Option<ClientCapabilities>>>,
43 settings: Arc<RwLock<Settings>>,
45 use_solc: bool,
47 semantic_token_cache: Arc<RwLock<SemanticTokenCache>>,
49 semantic_token_id: Arc<AtomicU64>,
51 root_uri: Arc<RwLock<Option<Url>>>,
53 project_indexed: Arc<std::sync::atomic::AtomicBool>,
55 project_cache_dirty: Arc<std::sync::atomic::AtomicBool>,
58 project_cache_sync_running: Arc<std::sync::atomic::AtomicBool>,
60 project_cache_sync_pending: Arc<std::sync::atomic::AtomicBool>,
62 project_cache_force_full_rebuild: Arc<std::sync::atomic::AtomicBool>,
67 project_cache_upsert_running: Arc<std::sync::atomic::AtomicBool>,
69 project_cache_upsert_pending: Arc<std::sync::atomic::AtomicBool>,
71 project_cache_changed_files: Arc<RwLock<HashSet<String>>>,
74 project_cache_upsert_files: Arc<RwLock<HashSet<String>>>,
76 pending_create_scaffold: Arc<RwLock<HashSet<String>>>,
79 settings_from_init: Arc<std::sync::atomic::AtomicBool>,
83 did_save_workers:
89 Arc<RwLock<HashMap<String, tokio::sync::watch::Sender<Option<DidSaveTextDocumentParams>>>>>,
90 code_action_db: Arc<HashMap<u32, crate::code_actions::CodeActionDef>>,
92}
93
94impl ForgeLsp {
95 pub fn new(client: Client, use_solar: bool, use_solc: bool) -> Self {
96 let compiler: Arc<dyn Runner> = if use_solar {
97 Arc::new(crate::solar_runner::SolarRunner)
98 } else {
99 Arc::new(ForgeRunner)
100 };
101 let ast_cache = Arc::new(RwLock::new(HashMap::new()));
102 let text_cache = Arc::new(RwLock::new(HashMap::new()));
103 let completion_cache = Arc::new(RwLock::new(HashMap::new()));
104 let lint_config = Arc::new(RwLock::new(LintConfig::default()));
105 let foundry_config = Arc::new(RwLock::new(FoundryConfig::default()));
106 let client_capabilities = Arc::new(RwLock::new(None));
107 let settings = Arc::new(RwLock::new(Settings::default()));
108 Self {
109 client,
110 compiler,
111 ast_cache,
112 text_cache,
113 completion_cache,
114 lint_config,
115 foundry_config,
116 client_capabilities,
117 settings,
118 use_solc,
119 semantic_token_cache: Arc::new(RwLock::new(HashMap::new())),
120 semantic_token_id: Arc::new(AtomicU64::new(0)),
121 root_uri: Arc::new(RwLock::new(None)),
122 project_indexed: Arc::new(std::sync::atomic::AtomicBool::new(false)),
123 project_cache_dirty: Arc::new(std::sync::atomic::AtomicBool::new(false)),
124 project_cache_sync_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
125 project_cache_sync_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
126 project_cache_force_full_rebuild: Arc::new(std::sync::atomic::AtomicBool::new(false)),
127 project_cache_upsert_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
128 project_cache_upsert_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
129 project_cache_changed_files: Arc::new(RwLock::new(HashSet::new())),
130 project_cache_upsert_files: Arc::new(RwLock::new(HashSet::new())),
131 pending_create_scaffold: Arc::new(RwLock::new(HashSet::new())),
132 settings_from_init: Arc::new(std::sync::atomic::AtomicBool::new(false)),
133 did_save_workers: Arc::new(RwLock::new(HashMap::new())),
134 code_action_db: Arc::new(crate::code_actions::load()),
135 }
136 }
137
138 async fn foundry_config_for_file(&self, file_path: &std::path::Path) -> FoundryConfig {
146 config::load_foundry_config(file_path)
147 }
148
149 async fn project_cache_key(&self) -> Option<String> {
154 if let Some(uri) = self.root_uri.read().await.as_ref() {
155 return Some(uri.to_string());
156 }
157
158 let mut root = self.foundry_config.read().await.root.clone();
159 if !root.is_absolute()
160 && let Ok(cwd) = std::env::current_dir()
161 {
162 root = cwd.join(root);
163 }
164 if !root.is_dir() {
165 root = root.parent()?.to_path_buf();
166 }
167 Url::from_directory_path(root).ok().map(|u| u.to_string())
168 }
169
170 async fn ensure_project_cached_build(&self) -> Option<Arc<goto::CachedBuild>> {
175 let root_key = self.project_cache_key().await?;
176 if let Some(existing) = self.ast_cache.read().await.get(&root_key).cloned() {
177 return Some(existing);
178 }
179
180 let settings = self.settings.read().await.clone();
181 if !self.use_solc || !settings.project_index.full_project_scan {
182 return None;
183 }
184
185 let foundry_config = self.foundry_config.read().await.clone();
186 if !foundry_config.root.is_dir() {
187 return None;
188 }
189
190 let cache_mode = settings.project_index.cache_mode.clone();
191 let cfg_for_load = foundry_config.clone();
192 let load_res = tokio::task::spawn_blocking(move || {
193 crate::project_cache::load_reference_cache_with_report(&cfg_for_load, cache_mode, true)
194 })
195 .await;
196
197 let Ok(report) = load_res else {
198 return None;
199 };
200 let Some(build) = report.build else {
201 return None;
202 };
203
204 let source_count = build.nodes.len();
205 let complete = report.complete;
206 let duration_ms = report.duration_ms;
207 let reused = report.file_count_reused;
208 let hashed = report.file_count_hashed;
209 let arc = Arc::new(build);
210 self.ast_cache
211 .write()
212 .await
213 .insert(root_key.clone(), arc.clone());
214 self.project_indexed
215 .store(true, std::sync::atomic::Ordering::Relaxed);
216 self.client
217 .log_message(
218 MessageType::INFO,
219 format!(
220 "references warm-load: project cache loaded (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
221 source_count, reused, hashed, complete, duration_ms
222 ),
223 )
224 .await;
225
226 if complete {
227 return Some(arc);
228 }
229
230 let cfg_for_diff = foundry_config.clone();
233 let changed = tokio::task::spawn_blocking(move || {
234 crate::project_cache::changed_files_since_v2_cache(&cfg_for_diff, true)
235 })
236 .await
237 .ok()
238 .and_then(Result::ok)
239 .unwrap_or_default();
240
241 if changed.is_empty() {
242 return Some(arc);
243 }
244
245 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
246 let cfg_for_plan = foundry_config.clone();
247 let changed_for_plan = changed.clone();
248 let remappings_for_plan = remappings.clone();
249 let affected_set = tokio::task::spawn_blocking(move || {
250 compute_reverse_import_closure(&cfg_for_plan, &changed_for_plan, &remappings_for_plan)
251 })
252 .await
253 .ok()
254 .unwrap_or_default();
255 let mut affected_files: Vec<PathBuf> = affected_set.into_iter().collect();
256 if affected_files.is_empty() {
257 affected_files = changed;
258 }
259
260 let text_cache_snapshot = self.text_cache.read().await.clone();
261 match crate::solc::solc_project_index_scoped(
262 &foundry_config,
263 Some(&self.client),
264 Some(&text_cache_snapshot),
265 &affected_files,
266 )
267 .await
268 {
269 Ok(ast_data) => {
270 let scoped_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
271 let mut merge_error: Option<String> = None;
272 let merged = {
273 let mut cache = self.ast_cache.write().await;
274 let merged = if let Some(existing) = cache.get(&root_key).cloned() {
275 let mut merged = (*existing).clone();
276 match merge_scoped_cached_build(&mut merged, (*scoped_build).clone()) {
277 Ok(_) => Arc::new(merged),
278 Err(e) => {
279 merge_error = Some(e);
280 scoped_build.clone()
281 }
282 }
283 } else {
284 scoped_build.clone()
285 };
286 cache.insert(root_key.clone(), merged.clone());
287 merged
288 };
289 if let Some(e) = merge_error {
290 self.client
291 .log_message(
292 MessageType::WARNING,
293 format!(
294 "references warm-load reconcile: merge failed, using scoped build: {}",
295 e
296 ),
297 )
298 .await;
299 }
300
301 let cfg_for_save = foundry_config.clone();
302 let build_for_save = (*merged).clone();
303 let save_res = tokio::task::spawn_blocking(move || {
304 crate::project_cache::save_reference_cache_with_report(
305 &cfg_for_save,
306 &build_for_save,
307 None,
308 )
309 })
310 .await;
311 if let Ok(Ok(report)) = save_res {
312 self.client
313 .log_message(
314 MessageType::INFO,
315 format!(
316 "references warm-load reconcile: saved cache (affected={}, hashed_files={}, duration={}ms)",
317 affected_files.len(),
318 report.file_count_hashed,
319 report.duration_ms
320 ),
321 )
322 .await;
323 }
324 Some(merged)
325 }
326 Err(e) => {
327 self.client
328 .log_message(
329 MessageType::WARNING,
330 format!(
331 "references warm-load reconcile: scoped reindex failed: {}",
332 e
333 ),
334 )
335 .await;
336 Some(arc)
337 }
338 }
339 }
340
341 async fn flush_project_cache_to_disk(&self, reason: &str) {
345 if !self.use_solc || !self.settings.read().await.project_index.full_project_scan {
346 return;
347 }
348 let Some(root_key) = self.project_cache_key().await else {
349 return;
350 };
351 let build = {
352 let cache = self.ast_cache.read().await;
353 cache.get(&root_key).cloned()
354 };
355 let Some(build) = build else {
356 return;
357 };
358
359 let foundry_config = self.foundry_config.read().await.clone();
360 let build_for_save = (*build).clone();
361 let res = tokio::task::spawn_blocking(move || {
362 crate::project_cache::save_reference_cache_with_report(
363 &foundry_config,
364 &build_for_save,
365 None,
366 )
367 })
368 .await;
369
370 match res {
371 Ok(Ok(report)) => {
372 self.client
373 .log_message(
374 MessageType::INFO,
375 format!(
376 "project cache flush ({}): saved hashed_files={}, duration={}ms",
377 reason, report.file_count_hashed, report.duration_ms
378 ),
379 )
380 .await;
381 }
382 Ok(Err(e)) => {
383 self.client
384 .log_message(
385 MessageType::WARNING,
386 format!("project cache flush ({}) failed: {}", reason, e),
387 )
388 .await;
389 }
390 Err(e) => {
391 self.client
392 .log_message(
393 MessageType::WARNING,
394 format!("project cache flush ({}) task failed: {}", reason, e),
395 )
396 .await;
397 }
398 }
399 }
400
401 async fn on_change(&self, params: TextDocumentItem) {
402 let uri = params.uri.clone();
403 let version = params.version;
404
405 let file_path = match uri.to_file_path() {
406 Ok(path) => path,
407 Err(_) => {
408 self.client
409 .log_message(MessageType::ERROR, "Invalid file URI")
410 .await;
411 return;
412 }
413 };
414
415 let path_str = match file_path.to_str() {
416 Some(s) => s,
417 None => {
418 self.client
419 .log_message(MessageType::ERROR, "Invalid file path")
420 .await;
421 return;
422 }
423 };
424
425 {
430 use std::hash::{Hash, Hasher};
431 let mut hasher = std::collections::hash_map::DefaultHasher::new();
432 params.text.hash(&mut hasher);
433 let incoming_hash = hasher.finish();
434
435 let cache = self.ast_cache.read().await;
436 if let Some(cached) = cache.get(&uri.to_string()) {
437 if cached.content_hash != 0 && cached.content_hash == incoming_hash {
438 self.client
439 .log_message(
440 MessageType::INFO,
441 "on_change: content unchanged since last build, skipping rebuild",
442 )
443 .await;
444 return;
445 }
446 }
447 }
448
449 let (should_lint, lint_settings) = {
451 let lint_cfg = self.lint_config.read().await;
452 let settings = self.settings.read().await;
453 let enabled = lint_cfg.should_lint(&file_path) && settings.lint.enabled;
454 let ls = settings.lint.clone();
455 (enabled, ls)
456 };
457
458 let (lint_result, build_result, ast_result) = if self.use_solc {
462 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
463 let solc_future = crate::solc::solc_ast(path_str, &foundry_cfg, Some(&self.client));
464
465 if should_lint {
466 let (lint, solc) = tokio::join!(
467 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
468 solc_future
469 );
470 match solc {
471 Ok(data) => {
472 self.client
473 .log_message(
474 MessageType::INFO,
475 "solc: AST + diagnostics from single run",
476 )
477 .await;
478 let content = tokio::fs::read_to_string(&file_path)
480 .await
481 .unwrap_or_default();
482 let build_diags = crate::build::build_output_to_diagnostics(
483 &data,
484 &file_path,
485 &content,
486 &foundry_cfg.ignored_error_codes,
487 );
488 (Some(lint), Ok(build_diags), Ok(data))
489 }
490 Err(e) => {
491 self.client
492 .log_message(
493 MessageType::WARNING,
494 format!("solc failed, falling back to forge: {e}"),
495 )
496 .await;
497 let (build, ast) = tokio::join!(
498 self.compiler.get_build_diagnostics(&uri),
499 self.compiler.ast(path_str)
500 );
501 (Some(lint), build, ast)
502 }
503 }
504 } else {
505 self.client
506 .log_message(
507 MessageType::INFO,
508 format!("skipping lint for ignored file: {path_str}"),
509 )
510 .await;
511 match solc_future.await {
512 Ok(data) => {
513 self.client
514 .log_message(
515 MessageType::INFO,
516 "solc: AST + diagnostics from single run",
517 )
518 .await;
519 let content = tokio::fs::read_to_string(&file_path)
520 .await
521 .unwrap_or_default();
522 let build_diags = crate::build::build_output_to_diagnostics(
523 &data,
524 &file_path,
525 &content,
526 &foundry_cfg.ignored_error_codes,
527 );
528 (None, Ok(build_diags), Ok(data))
529 }
530 Err(e) => {
531 self.client
532 .log_message(
533 MessageType::WARNING,
534 format!("solc failed, falling back to forge: {e}"),
535 )
536 .await;
537 let (build, ast) = tokio::join!(
538 self.compiler.get_build_diagnostics(&uri),
539 self.compiler.ast(path_str)
540 );
541 (None, build, ast)
542 }
543 }
544 }
545 } else {
546 if should_lint {
548 let (lint, build, ast) = tokio::join!(
549 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
550 self.compiler.get_build_diagnostics(&uri),
551 self.compiler.ast(path_str)
552 );
553 (Some(lint), build, ast)
554 } else {
555 self.client
556 .log_message(
557 MessageType::INFO,
558 format!("skipping lint for ignored file: {path_str}"),
559 )
560 .await;
561 let (build, ast) = tokio::join!(
562 self.compiler.get_build_diagnostics(&uri),
563 self.compiler.ast(path_str)
564 );
565 (None, build, ast)
566 }
567 };
568
569 let build_succeeded = matches!(&build_result, Ok(diagnostics) if diagnostics.iter().all(|d| d.severity != Some(DiagnosticSeverity::ERROR)));
571
572 let content_hash = {
575 use std::hash::{Hash, Hasher};
576 let mut hasher = std::collections::hash_map::DefaultHasher::new();
577 params.text.hash(&mut hasher);
578 hasher.finish()
579 };
580
581 if build_succeeded {
582 if let Ok(ast_data) = ast_result {
583 let mut cached_build = goto::CachedBuild::new(ast_data, version);
584 cached_build.content_hash = content_hash;
585 let cached_build = Arc::new(cached_build);
586 let mut cache = self.ast_cache.write().await;
587 cache.insert(uri.to_string(), cached_build.clone());
588 drop(cache);
589
590 {
592 let mut cc = self.completion_cache.write().await;
593 cc.insert(uri.to_string(), cached_build.completion_cache.clone());
594 }
595 self.client
596 .log_message(MessageType::INFO, "Build successful, AST cache updated")
597 .await;
598 } else if let Err(e) = ast_result {
599 self.client
600 .log_message(
601 MessageType::INFO,
602 format!("Build succeeded but failed to get AST: {e}"),
603 )
604 .await;
605 }
606 } else {
607 self.client
609 .log_message(
610 MessageType::INFO,
611 "Build errors detected, keeping existing AST cache",
612 )
613 .await;
614 }
615
616 {
618 let mut text_cache = self.text_cache.write().await;
619 let uri_str = uri.to_string();
620 let existing_version = text_cache.get(&uri_str).map(|(v, _)| *v).unwrap_or(-1);
621 if version >= existing_version {
622 text_cache.insert(uri_str, (version, params.text));
623 }
624 }
625
626 let mut all_diagnostics = vec![];
627
628 if let Some(lint_result) = lint_result {
629 match lint_result {
630 Ok(mut lints) => {
631 if !lint_settings.exclude.is_empty() {
633 lints.retain(|d| {
634 if let Some(NumberOrString::String(code)) = &d.code {
635 !lint_settings.exclude.iter().any(|ex| ex == code)
636 } else {
637 true
638 }
639 });
640 }
641 self.client
642 .log_message(
643 MessageType::INFO,
644 format!("found {} lint diagnostics", lints.len()),
645 )
646 .await;
647 all_diagnostics.append(&mut lints);
648 }
649 Err(e) => {
650 self.client
651 .log_message(
652 MessageType::ERROR,
653 format!("Forge lint diagnostics failed: {e}"),
654 )
655 .await;
656 }
657 }
658 }
659
660 match build_result {
661 Ok(mut builds) => {
662 self.client
663 .log_message(
664 MessageType::INFO,
665 format!("found {} build diagnostics", builds.len()),
666 )
667 .await;
668 all_diagnostics.append(&mut builds);
669 }
670 Err(e) => {
671 self.client
672 .log_message(
673 MessageType::WARNING,
674 format!("Forge build diagnostics failed: {e}"),
675 )
676 .await;
677 }
678 }
679
680 for diag in &mut all_diagnostics {
684 if diag.message.is_empty() {
685 diag.message = "Unknown issue".to_string();
686 }
687 }
688
689 self.client
691 .publish_diagnostics(uri, all_diagnostics, None)
692 .await;
693
694 if build_succeeded {
696 let client = self.client.clone();
697 tokio::spawn(async move {
698 let _ = client.inlay_hint_refresh().await;
699 });
700 }
701
702 if build_succeeded
708 && self.use_solc
709 && self.settings.read().await.project_index.full_project_scan
710 && !self
711 .project_indexed
712 .load(std::sync::atomic::Ordering::Relaxed)
713 {
714 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
715 self.project_indexed
716 .store(true, std::sync::atomic::Ordering::Relaxed);
717 let foundry_config = self.foundry_config.read().await.clone();
718 let cache_key = self.project_cache_key().await;
719 let ast_cache = self.ast_cache.clone();
720 let client = self.client.clone();
721
722 tokio::spawn(async move {
723 let Some(cache_key) = cache_key else {
724 return;
725 };
726 if !foundry_config.root.is_dir() {
727 client
728 .log_message(
729 MessageType::INFO,
730 format!(
731 "project index: {} not found, skipping",
732 foundry_config.root.display(),
733 ),
734 )
735 .await;
736 return;
737 }
738
739 let token = NumberOrString::String("solidity/projectIndex".to_string());
741 let _ = client
742 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
743 token: token.clone(),
744 })
745 .await;
746
747 client
749 .send_notification::<notification::Progress>(ProgressParams {
750 token: token.clone(),
751 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
752 WorkDoneProgressBegin {
753 title: "Indexing project".to_string(),
754 message: Some("Discovering source files...".to_string()),
755 cancellable: Some(false),
756 percentage: None,
757 },
758 )),
759 })
760 .await;
761
762 let cfg_for_load = foundry_config.clone();
764 let cache_mode_for_load = cache_mode.clone();
765 let load_res = tokio::task::spawn_blocking(move || {
766 crate::project_cache::load_reference_cache_with_report(
767 &cfg_for_load,
768 cache_mode_for_load,
769 true,
770 )
771 })
772 .await;
773 match load_res {
774 Ok(report) => {
775 if let Some(cached_build) = report.build {
776 let source_count = cached_build.nodes.len();
777 ast_cache
778 .write()
779 .await
780 .insert(cache_key.clone(), Arc::new(cached_build));
781 client
782 .log_message(
783 MessageType::INFO,
784 format!(
785 "project index: cache load hit (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
786 source_count,
787 report.file_count_reused,
788 report.file_count_hashed,
789 report.complete,
790 report.duration_ms
791 ),
792 )
793 .await;
794 if report.complete {
795 client
796 .send_notification::<notification::Progress>(ProgressParams {
797 token: token.clone(),
798 value: ProgressParamsValue::WorkDone(
799 WorkDoneProgress::End(WorkDoneProgressEnd {
800 message: Some(format!(
801 "Loaded {} source files from cache",
802 source_count
803 )),
804 }),
805 ),
806 })
807 .await;
808 return;
809 }
810 }
811
812 client
813 .log_message(
814 MessageType::INFO,
815 format!(
816 "project index: cache load miss/partial (reason={}, reused_files={}/{}, duration={}ms)",
817 report
818 .miss_reason
819 .unwrap_or_else(|| "unknown".to_string()),
820 report.file_count_reused,
821 report.file_count_hashed,
822 report.duration_ms
823 ),
824 )
825 .await;
826 }
827 Err(e) => {
828 client
829 .log_message(
830 MessageType::WARNING,
831 format!("project index: cache load task failed: {e}"),
832 )
833 .await;
834 }
835 }
836
837 match crate::solc::solc_project_index(&foundry_config, Some(&client), None).await {
838 Ok(ast_data) => {
839 let mut new_build = crate::goto::CachedBuild::new(ast_data, 0);
840 if let Some(prev) = ast_cache.read().await.get(&cache_key) {
843 new_build.merge_missing_from(prev);
844 }
845 let source_count = new_build.nodes.len();
846 let cached_build = Arc::new(new_build);
847 let build_for_save = (*cached_build).clone();
848 ast_cache
849 .write()
850 .await
851 .insert(cache_key.clone(), cached_build);
852 client
853 .log_message(
854 MessageType::INFO,
855 format!("project index: cached {} source files", source_count),
856 )
857 .await;
858
859 let cfg_for_save = foundry_config.clone();
860 let client_for_save = client.clone();
861 tokio::spawn(async move {
862 let res = tokio::task::spawn_blocking(move || {
863 crate::project_cache::save_reference_cache_with_report(
864 &cfg_for_save,
865 &build_for_save,
866 None,
867 )
868 })
869 .await;
870 match res {
871 Ok(Ok(report)) => {
872 client_for_save
873 .log_message(
874 MessageType::INFO,
875 format!(
876 "project index: cache save complete (hashed_files={}, duration={}ms)",
877 report.file_count_hashed, report.duration_ms
878 ),
879 )
880 .await;
881 }
882 Ok(Err(e)) => {
883 client_for_save
884 .log_message(
885 MessageType::WARNING,
886 format!("project index: failed to persist cache: {e}"),
887 )
888 .await;
889 }
890 Err(e) => {
891 client_for_save
892 .log_message(
893 MessageType::WARNING,
894 format!("project index: cache save task failed: {e}"),
895 )
896 .await;
897 }
898 }
899 });
900
901 client
903 .send_notification::<notification::Progress>(ProgressParams {
904 token: token.clone(),
905 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
906 WorkDoneProgressEnd {
907 message: Some(format!(
908 "Indexed {} source files",
909 source_count
910 )),
911 },
912 )),
913 })
914 .await;
915 }
916 Err(e) => {
917 client
918 .log_message(MessageType::WARNING, format!("project index failed: {e}"))
919 .await;
920
921 client
923 .send_notification::<notification::Progress>(ProgressParams {
924 token: token.clone(),
925 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
926 WorkDoneProgressEnd {
927 message: Some("Indexing failed".to_string()),
928 },
929 )),
930 })
931 .await;
932 }
933 }
934 });
935 }
936 }
937
938 async fn get_or_fetch_build(
947 &self,
948 uri: &Url,
949 file_path: &std::path::Path,
950 insert_on_miss: bool,
951 ) -> Option<Arc<goto::CachedBuild>> {
952 let uri_str = uri.to_string();
953
954 {
957 let cache = self.ast_cache.read().await;
958 if let Some(cached) = cache.get(&uri_str) {
959 return Some(cached.clone());
960 }
961 }
962
963 if !insert_on_miss {
967 return None;
968 }
969
970 let path_str = file_path.to_str()?;
972 let ast_result = if self.use_solc {
973 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
974 match crate::solc::solc_ast(path_str, &foundry_cfg, Some(&self.client)).await {
975 Ok(data) => Ok(data),
976 Err(_) => self.compiler.ast(path_str).await,
977 }
978 } else {
979 self.compiler.ast(path_str).await
980 };
981 match ast_result {
982 Ok(data) => {
983 let build = Arc::new(goto::CachedBuild::new(data, 0));
986 let mut cache = self.ast_cache.write().await;
987 cache.insert(uri_str.clone(), build.clone());
988 Some(build)
989 }
990 Err(e) => {
991 self.client
992 .log_message(MessageType::ERROR, format!("failed to get AST: {e}"))
993 .await;
994 None
995 }
996 }
997 }
998
999 async fn get_source_bytes(&self, uri: &Url, file_path: &std::path::Path) -> Option<Vec<u8>> {
1002 {
1003 let text_cache = self.text_cache.read().await;
1004 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
1005 return Some(content.as_bytes().to_vec());
1006 }
1007 }
1008 match std::fs::read(file_path) {
1009 Ok(bytes) => Some(bytes),
1010 Err(e) => {
1011 if e.kind() == std::io::ErrorKind::NotFound {
1012 self.client
1015 .log_message(
1016 MessageType::INFO,
1017 format!("file not found yet (transient): {e}"),
1018 )
1019 .await;
1020 } else {
1021 self.client
1022 .log_message(MessageType::ERROR, format!("failed to read file: {e}"))
1023 .await;
1024 }
1025 None
1026 }
1027 }
1028 }
1029}
1030
1031fn update_imports_on_delete_enabled(settings: &crate::config::Settings) -> bool {
1032 settings.file_operations.update_imports_on_delete
1033}
1034
1035fn start_or_mark_project_cache_sync_pending(
1036 pending: &std::sync::atomic::AtomicBool,
1037 running: &std::sync::atomic::AtomicBool,
1038) -> bool {
1039 pending.store(true, Ordering::Release);
1040 running
1041 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1042 .is_ok()
1043}
1044
1045fn take_project_cache_sync_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
1046 pending.swap(false, Ordering::AcqRel)
1047}
1048
1049fn stop_project_cache_sync_worker_or_reclaim(
1050 pending: &std::sync::atomic::AtomicBool,
1051 running: &std::sync::atomic::AtomicBool,
1052) -> bool {
1053 running.store(false, Ordering::Release);
1054 pending.load(Ordering::Acquire)
1055 && running
1056 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1057 .is_ok()
1058}
1059
1060fn try_claim_project_cache_dirty(dirty: &std::sync::atomic::AtomicBool) -> bool {
1061 dirty
1062 .compare_exchange(true, false, Ordering::AcqRel, Ordering::Acquire)
1063 .is_ok()
1064}
1065
1066fn start_or_mark_project_cache_upsert_pending(
1067 pending: &std::sync::atomic::AtomicBool,
1068 running: &std::sync::atomic::AtomicBool,
1069) -> bool {
1070 pending.store(true, Ordering::Release);
1071 running
1072 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1073 .is_ok()
1074}
1075
1076fn take_project_cache_upsert_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
1077 pending.swap(false, Ordering::AcqRel)
1078}
1079
1080fn stop_project_cache_upsert_worker_or_reclaim(
1081 pending: &std::sync::atomic::AtomicBool,
1082 running: &std::sync::atomic::AtomicBool,
1083) -> bool {
1084 running.store(false, Ordering::Release);
1085 pending.load(Ordering::Acquire)
1086 && running
1087 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1088 .is_ok()
1089}
1090
1091fn lexical_normalize(path: &Path) -> PathBuf {
1092 let mut out = PathBuf::new();
1093 for comp in path.components() {
1094 match comp {
1095 Component::CurDir => {}
1096 Component::ParentDir => {
1097 out.pop();
1098 }
1099 Component::RootDir => out.push(comp.as_os_str()),
1100 Component::Prefix(_) => out.push(comp.as_os_str()),
1101 Component::Normal(seg) => out.push(seg),
1102 }
1103 }
1104 out
1105}
1106
1107fn resolve_import_spec_to_abs(
1108 project_root: &Path,
1109 importer_abs: &Path,
1110 import_path: &str,
1111 remappings: &[String],
1112) -> Option<PathBuf> {
1113 if import_path.starts_with("./") || import_path.starts_with("../") {
1114 let base = importer_abs.parent()?;
1115 return Some(lexical_normalize(&base.join(import_path)));
1116 }
1117
1118 for remap in remappings {
1119 let mut it = remap.splitn(2, '=');
1120 let prefix = it.next().unwrap_or_default();
1121 let target = it.next().unwrap_or_default();
1122 if prefix.is_empty() || target.is_empty() {
1123 continue;
1124 }
1125 if import_path.starts_with(prefix) {
1126 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
1127 return Some(lexical_normalize(
1128 &project_root.join(format!("{target}{suffix}")),
1129 ));
1130 }
1131 }
1132
1133 Some(lexical_normalize(&project_root.join(import_path)))
1134}
1135
1136fn compute_reverse_import_closure(
1137 config: &FoundryConfig,
1138 changed_abs: &[PathBuf],
1139 remappings: &[String],
1140) -> HashSet<PathBuf> {
1141 let source_files = crate::solc::discover_source_files(config);
1142 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1143
1144 for importer in &source_files {
1145 let Ok(bytes) = std::fs::read(importer) else {
1146 continue;
1147 };
1148 for imp in links::ts_find_imports(&bytes) {
1149 let Some(imported_abs) =
1150 resolve_import_spec_to_abs(&config.root, importer, &imp.path, remappings)
1151 else {
1152 continue;
1153 };
1154 if !imported_abs.starts_with(&config.root) {
1155 continue;
1156 }
1157 reverse_edges
1158 .entry(imported_abs)
1159 .or_default()
1160 .insert(importer.clone());
1161 }
1162 }
1163
1164 let mut affected: HashSet<PathBuf> = HashSet::new();
1165 let mut queue: std::collections::VecDeque<PathBuf> = std::collections::VecDeque::new();
1166
1167 for path in changed_abs {
1168 if !path.starts_with(&config.root) {
1169 continue;
1170 }
1171 let normalized = lexical_normalize(path);
1172 if affected.insert(normalized.clone()) {
1173 queue.push_back(normalized);
1174 }
1175 }
1176
1177 while let Some(current) = queue.pop_front() {
1178 if let Some(importers) = reverse_edges.get(¤t) {
1179 for importer in importers {
1180 if affected.insert(importer.clone()) {
1181 queue.push_back(importer.clone());
1182 }
1183 }
1184 }
1185 }
1186
1187 let source_set: HashSet<PathBuf> = source_files.into_iter().collect();
1189 affected
1190 .into_iter()
1191 .filter(|p| source_set.contains(p) && p.is_file())
1192 .collect()
1193}
1194
1195fn src_file_id(src: &str) -> Option<&str> {
1196 src.rsplit(':').next().filter(|id| !id.is_empty())
1197}
1198
1199fn remap_src_file_id(src: &str, id_remap: &HashMap<String, String>) -> String {
1200 let Some(old_id) = src_file_id(src) else {
1201 return src.to_string();
1202 };
1203 let Some(new_id) = id_remap.get(old_id) else {
1204 return src.to_string();
1205 };
1206 if new_id == old_id {
1207 return src.to_string();
1208 }
1209 let prefix_len = src.len().saturating_sub(old_id.len());
1210 format!("{}{}", &src[..prefix_len], new_id)
1211}
1212
1213fn remap_node_info_file_ids(info: &mut goto::NodeInfo, id_remap: &HashMap<String, String>) {
1214 info.src = remap_src_file_id(&info.src, id_remap);
1215 if let Some(loc) = info.name_location.as_mut() {
1216 *loc = remap_src_file_id(loc, id_remap);
1217 }
1218 for loc in &mut info.name_locations {
1219 *loc = remap_src_file_id(loc, id_remap);
1220 }
1221 if let Some(loc) = info.member_location.as_mut() {
1222 *loc = remap_src_file_id(loc, id_remap);
1223 }
1224}
1225
1226fn doc_key_path(key: &hover::DocKey) -> Option<&str> {
1227 match key {
1228 hover::DocKey::Contract(k) | hover::DocKey::StateVar(k) | hover::DocKey::Method(k) => {
1229 k.split_once(':').map(|(path, _)| path)
1230 }
1231 hover::DocKey::Func(_) | hover::DocKey::Event(_) => None,
1232 }
1233}
1234
1235fn merge_scoped_cached_build(
1236 existing: &mut goto::CachedBuild,
1237 mut scoped: goto::CachedBuild,
1238) -> Result<usize, String> {
1239 let affected_paths: HashSet<String> = scoped.nodes.keys().cloned().collect();
1240 if affected_paths.is_empty() {
1241 return Ok(0);
1242 }
1243 let affected_abs_paths: HashSet<String> = scoped.path_to_abs.values().cloned().collect();
1244
1245 for scoped_id in scoped.decl_index.keys() {
1248 if existing.decl_index.contains_key(scoped_id)
1249 && let Some(path) = existing.node_id_to_source_path.get(scoped_id)
1250 && !affected_abs_paths.contains(path)
1251 {
1252 return Err(format!(
1253 "decl id collision for id={} in unaffected path {}",
1254 scoped_id, path
1255 ));
1256 }
1257 }
1258
1259 let mut path_to_existing_id: HashMap<String, String> = HashMap::new();
1261 for (id, path) in &existing.id_to_path_map {
1262 path_to_existing_id
1263 .entry(path.clone())
1264 .or_insert_with(|| id.clone());
1265 }
1266 let mut used_ids: HashSet<String> = existing.id_to_path_map.keys().cloned().collect();
1267 let mut next_id = used_ids
1268 .iter()
1269 .filter_map(|k| k.parse::<u64>().ok())
1270 .max()
1271 .unwrap_or(0)
1272 .saturating_add(1);
1273
1274 let mut id_remap: HashMap<String, String> = HashMap::new();
1275 for (scoped_id, path) in &scoped.id_to_path_map {
1276 let canonical = if let Some(id) = path_to_existing_id.get(path) {
1277 id.clone()
1278 } else {
1279 let id = loop {
1280 let candidate = next_id.to_string();
1281 next_id = next_id.saturating_add(1);
1282 if used_ids.insert(candidate.clone()) {
1283 break candidate;
1284 }
1285 };
1286 path_to_existing_id.insert(path.clone(), id.clone());
1287 id
1288 };
1289 id_remap.insert(scoped_id.clone(), canonical);
1290 }
1291
1292 for file_nodes in scoped.nodes.values_mut() {
1293 for info in file_nodes.values_mut() {
1294 remap_node_info_file_ids(info, &id_remap);
1295 }
1296 }
1297 let scoped_external_refs: HashMap<String, crate::types::NodeId> = scoped
1298 .external_refs
1299 .into_iter()
1300 .map(|(src, decl_id)| (remap_src_file_id(&src, &id_remap), decl_id))
1301 .collect();
1302
1303 let old_id_to_path = existing.id_to_path_map.clone();
1304 existing.external_refs.retain(|src, _| {
1305 src_file_id(src)
1306 .and_then(|fid| old_id_to_path.get(fid))
1307 .map(|path| !affected_paths.contains(path))
1308 .unwrap_or(true)
1309 });
1310 existing
1311 .nodes
1312 .retain(|path, _| !affected_paths.contains(path));
1313 existing
1314 .path_to_abs
1315 .retain(|path, _| !affected_paths.contains(path));
1316 existing
1317 .id_to_path_map
1318 .retain(|_, path| !affected_paths.contains(path));
1319
1320 existing
1321 .node_id_to_source_path
1322 .retain(|_, path| !affected_abs_paths.contains(path));
1323 existing
1324 .decl_index
1325 .retain(|id, _| match existing.node_id_to_source_path.get(id) {
1326 Some(path) => !affected_abs_paths.contains(path),
1327 None => true,
1328 });
1329 existing
1330 .hint_index
1331 .retain(|abs_path, _| !affected_abs_paths.contains(abs_path));
1332 existing.gas_index.retain(|k, _| {
1333 k.split_once(':')
1334 .map(|(path, _)| !affected_paths.contains(path))
1335 .unwrap_or(true)
1336 });
1337 existing.doc_index.retain(|k, _| {
1338 doc_key_path(k)
1339 .map(|p| !affected_paths.contains(p))
1340 .unwrap_or(true)
1341 });
1342
1343 existing.nodes.extend(scoped.nodes);
1344 existing.path_to_abs.extend(scoped.path_to_abs);
1345 existing.external_refs.extend(scoped_external_refs);
1346 for (old_id, path) in scoped.id_to_path_map {
1347 let canonical = id_remap.get(&old_id).cloned().unwrap_or(old_id);
1348 existing.id_to_path_map.insert(canonical, path);
1349 }
1350 existing.decl_index.extend(scoped.decl_index);
1351 existing
1352 .node_id_to_source_path
1353 .extend(scoped.node_id_to_source_path);
1354 existing.gas_index.extend(scoped.gas_index);
1355 existing.hint_index.extend(scoped.hint_index);
1356 existing.doc_index.extend(scoped.doc_index);
1357
1358 Ok(affected_paths.len())
1359}
1360
1361async fn run_did_save(this: ForgeLsp, params: DidSaveTextDocumentParams) {
1367 this.client
1368 .log_message(MessageType::INFO, "file saved")
1369 .await;
1370
1371 let mut text_content = if let Some(text) = params.text {
1372 text
1373 } else {
1374 let cached = {
1376 let text_cache = this.text_cache.read().await;
1377 text_cache
1378 .get(params.text_document.uri.as_str())
1379 .map(|(_, content)| content.clone())
1380 };
1381 if let Some(content) = cached {
1382 content
1383 } else {
1384 match std::fs::read_to_string(params.text_document.uri.path()) {
1385 Ok(content) => content,
1386 Err(e) => {
1387 this.client
1388 .log_message(
1389 MessageType::ERROR,
1390 format!("Failed to read file on save: {e}"),
1391 )
1392 .await;
1393 return;
1394 }
1395 }
1396 }
1397 };
1398
1399 let uri_str = params.text_document.uri.to_string();
1403 let template_on_create = this
1404 .settings
1405 .read()
1406 .await
1407 .file_operations
1408 .template_on_create;
1409 let needs_recover_scaffold = {
1410 let pending = this.pending_create_scaffold.read().await;
1411 template_on_create
1412 && pending.contains(&uri_str)
1413 && !text_content.chars().any(|ch| !ch.is_whitespace())
1414 };
1415 if needs_recover_scaffold {
1416 let solc_version = this.foundry_config.read().await.solc_version.clone();
1417 if let Some(scaffold) =
1418 file_operations::generate_scaffold(¶ms.text_document.uri, solc_version.as_deref())
1419 {
1420 let end = utils::byte_offset_to_position(&text_content, text_content.len());
1421 let edit = WorkspaceEdit {
1422 changes: Some(HashMap::from([(
1423 params.text_document.uri.clone(),
1424 vec![TextEdit {
1425 range: Range {
1426 start: Position::default(),
1427 end,
1428 },
1429 new_text: scaffold.clone(),
1430 }],
1431 )])),
1432 document_changes: None,
1433 change_annotations: None,
1434 };
1435 if this
1436 .client
1437 .apply_edit(edit)
1438 .await
1439 .as_ref()
1440 .is_ok_and(|r| r.applied)
1441 {
1442 text_content = scaffold.clone();
1443 let version = this
1444 .text_cache
1445 .read()
1446 .await
1447 .get(params.text_document.uri.as_str())
1448 .map(|(v, _)| *v)
1449 .unwrap_or_default();
1450 this.text_cache
1451 .write()
1452 .await
1453 .insert(uri_str.clone(), (version, scaffold));
1454 this.pending_create_scaffold.write().await.remove(&uri_str);
1455 this.client
1456 .log_message(
1457 MessageType::INFO,
1458 format!("didSave: recovered scaffold for {}", uri_str),
1459 )
1460 .await;
1461 }
1462 }
1463 }
1464
1465 let version = this
1466 .text_cache
1467 .read()
1468 .await
1469 .get(params.text_document.uri.as_str())
1470 .map(|(version, _)| *version)
1471 .unwrap_or_default();
1472
1473 let saved_uri = params.text_document.uri.clone();
1474 if let Ok(saved_file_path) = saved_uri.to_file_path() {
1475 let saved_abs = saved_file_path.to_string_lossy().to_string();
1476 this.project_cache_changed_files
1477 .write()
1478 .await
1479 .insert(saved_abs.clone());
1480 this.project_cache_upsert_files
1481 .write()
1482 .await
1483 .insert(saved_abs);
1484 }
1485 this.on_change(TextDocumentItem {
1486 uri: saved_uri.clone(),
1487 text: text_content,
1488 version,
1489 language_id: "".to_string(),
1490 })
1491 .await;
1492
1493 let settings_snapshot = this.settings.read().await.clone();
1494
1495 if this.use_solc
1499 && settings_snapshot.project_index.full_project_scan
1500 && matches!(
1501 settings_snapshot.project_index.cache_mode,
1502 crate::config::ProjectIndexCacheMode::V2 | crate::config::ProjectIndexCacheMode::Auto
1503 )
1504 {
1505 if start_or_mark_project_cache_upsert_pending(
1506 &this.project_cache_upsert_pending,
1507 &this.project_cache_upsert_running,
1508 ) {
1509 let upsert_files = this.project_cache_upsert_files.clone();
1510 let ast_cache = this.ast_cache.clone();
1511 let client = this.client.clone();
1512 let running_flag = this.project_cache_upsert_running.clone();
1513 let pending_flag = this.project_cache_upsert_pending.clone();
1514
1515 tokio::spawn(async move {
1516 loop {
1517 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
1518
1519 if !take_project_cache_upsert_pending(&pending_flag) {
1520 if stop_project_cache_upsert_worker_or_reclaim(&pending_flag, &running_flag)
1521 {
1522 continue;
1523 }
1524 break;
1525 }
1526
1527 let changed_paths: Vec<String> = {
1528 let mut paths = upsert_files.write().await;
1529 paths.drain().collect()
1530 };
1531 if changed_paths.is_empty() {
1532 continue;
1533 }
1534
1535 let mut work_items: Vec<(
1536 crate::config::FoundryConfig,
1537 crate::goto::CachedBuild,
1538 )> = Vec::new();
1539 {
1540 let cache = ast_cache.read().await;
1541 for abs_str in changed_paths {
1542 let path = PathBuf::from(&abs_str);
1543 let Ok(uri) = Url::from_file_path(&path) else {
1544 continue;
1545 };
1546 let uri_key = uri.to_string();
1547 let Some(build) = cache.get(&uri_key).cloned() else {
1548 continue;
1549 };
1550 if !build.nodes.contains_key(&abs_str) {
1552 continue;
1553 }
1554 let cfg = crate::config::load_foundry_config(&path);
1555 work_items.push((cfg, (*build).clone()));
1556 }
1557 }
1558
1559 if work_items.is_empty() {
1560 continue;
1561 }
1562
1563 let res = tokio::task::spawn_blocking(move || {
1564 let mut total_files = 0usize;
1565 let mut total_ms = 0u128;
1566 let mut failures: Vec<String> = Vec::new();
1567 for (cfg, build) in work_items {
1568 match crate::project_cache::upsert_reference_cache_v2_with_report(
1569 &cfg, &build,
1570 ) {
1571 Ok(report) => {
1572 total_files += report.file_count_hashed;
1573 total_ms += report.duration_ms;
1574 }
1575 Err(e) => failures.push(e),
1576 }
1577 }
1578 (total_files, total_ms, failures)
1579 })
1580 .await;
1581
1582 match res {
1583 Ok((total_files, total_ms, failures)) => {
1584 if !failures.is_empty() {
1585 client
1586 .log_message(
1587 MessageType::WARNING,
1588 format!(
1589 "project cache v2 upsert: {} failure(s), first={}",
1590 failures.len(),
1591 failures[0]
1592 ),
1593 )
1594 .await;
1595 } else {
1596 client
1597 .log_message(
1598 MessageType::INFO,
1599 format!(
1600 "project cache v2 upsert (debounced): touched_files={}, duration={}ms",
1601 total_files, total_ms
1602 ),
1603 )
1604 .await;
1605 }
1606 }
1607 Err(e) => {
1608 client
1609 .log_message(
1610 MessageType::WARNING,
1611 format!("project cache v2 upsert task failed: {e}"),
1612 )
1613 .await;
1614 }
1615 }
1616 }
1617 });
1618 }
1619 }
1620
1621 if this.use_solc
1624 && settings_snapshot.project_index.full_project_scan
1625 && this.project_cache_dirty.load(Ordering::Acquire)
1626 {
1627 if start_or_mark_project_cache_sync_pending(
1628 &this.project_cache_sync_pending,
1629 &this.project_cache_sync_running,
1630 ) {
1631 let foundry_config = this.foundry_config.read().await.clone();
1632 let root_key = this.project_cache_key().await;
1633 let ast_cache = this.ast_cache.clone();
1634 let text_cache = this.text_cache.clone();
1635 let client = this.client.clone();
1636 let dirty_flag = this.project_cache_dirty.clone();
1637 let running_flag = this.project_cache_sync_running.clone();
1638 let pending_flag = this.project_cache_sync_pending.clone();
1639 let changed_files = this.project_cache_changed_files.clone();
1640 let aggressive_scoped = settings_snapshot.project_index.incremental_edit_reindex;
1641 let force_full_rebuild_flag = this.project_cache_force_full_rebuild.clone();
1642
1643 tokio::spawn(async move {
1644 loop {
1645 tokio::time::sleep(std::time::Duration::from_millis(700)).await;
1647
1648 if !take_project_cache_sync_pending(&pending_flag) {
1649 if stop_project_cache_sync_worker_or_reclaim(&pending_flag, &running_flag) {
1650 continue;
1651 }
1652 break;
1653 }
1654
1655 if !try_claim_project_cache_dirty(&dirty_flag) {
1656 continue;
1657 }
1658
1659 let Some(cache_key) = &root_key else {
1660 dirty_flag.store(true, Ordering::Release);
1661 continue;
1662 };
1663 if !foundry_config.root.is_dir() {
1664 dirty_flag.store(true, Ordering::Release);
1665 client
1666 .log_message(
1667 MessageType::WARNING,
1668 format!(
1669 "didSave cache sync: invalid project root {}, deferring",
1670 foundry_config.root.display()
1671 ),
1672 )
1673 .await;
1674 continue;
1675 }
1676
1677 let mut scoped_ok = false;
1678
1679 let force_full = force_full_rebuild_flag.swap(false, Ordering::AcqRel);
1683
1684 if aggressive_scoped && !force_full {
1685 let changed_abs: Vec<PathBuf> = {
1686 let mut changed = changed_files.write().await;
1687 let drained =
1688 changed.drain().map(PathBuf::from).collect::<Vec<PathBuf>>();
1689 drained
1690 };
1691 if !changed_abs.is_empty() {
1692 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
1693 let cfg_for_plan = foundry_config.clone();
1694 let changed_for_plan = changed_abs.clone();
1695 let remappings_for_plan = remappings.clone();
1696 let plan_res = tokio::task::spawn_blocking(move || {
1697 compute_reverse_import_closure(
1698 &cfg_for_plan,
1699 &changed_for_plan,
1700 &remappings_for_plan,
1701 )
1702 })
1703 .await;
1704
1705 let affected_files = match plan_res {
1706 Ok(set) => set.into_iter().collect::<Vec<PathBuf>>(),
1707 Err(_) => Vec::new(),
1708 };
1709 if !affected_files.is_empty() {
1710 client
1711 .log_message(
1712 MessageType::INFO,
1713 format!(
1714 "didSave cache sync: aggressive scoped reindex (affected={})",
1715 affected_files.len(),
1716 ),
1717 )
1718 .await;
1719
1720 let text_cache_snapshot = text_cache.read().await.clone();
1721 match crate::solc::solc_project_index_scoped(
1722 &foundry_config,
1723 Some(&client),
1724 Some(&text_cache_snapshot),
1725 &affected_files,
1726 )
1727 .await
1728 {
1729 Ok(ast_data) => {
1730 let scoped_build =
1731 Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
1732 let source_count = scoped_build.nodes.len();
1733 enum ScopedApply {
1734 Merged { affected_count: usize },
1735 Stored,
1736 Failed(String),
1737 }
1738 let apply_outcome = {
1739 let mut cache = ast_cache.write().await;
1740 if let Some(existing) = cache.get(cache_key).cloned() {
1741 let mut merged = (*existing).clone();
1742 match merge_scoped_cached_build(
1743 &mut merged,
1744 (*scoped_build).clone(),
1745 ) {
1746 Ok(affected_count) => {
1747 cache.insert(
1748 cache_key.clone(),
1749 Arc::new(merged),
1750 );
1751 ScopedApply::Merged { affected_count }
1752 }
1753 Err(e) => ScopedApply::Failed(e),
1754 }
1755 } else {
1756 cache.insert(cache_key.clone(), scoped_build);
1757 ScopedApply::Stored
1758 }
1759 };
1760
1761 match apply_outcome {
1762 ScopedApply::Merged { affected_count } => {
1763 client
1764 .log_message(
1765 MessageType::INFO,
1766 format!(
1767 "didSave cache sync: scoped merge applied (scoped_sources={}, affected_paths={})",
1768 source_count, affected_count
1769 ),
1770 )
1771 .await;
1772 scoped_ok = true;
1773 }
1774 ScopedApply::Stored => {
1775 client
1776 .log_message(
1777 MessageType::INFO,
1778 format!(
1779 "didSave cache sync: scoped cache stored (scoped_sources={})",
1780 source_count
1781 ),
1782 )
1783 .await;
1784 scoped_ok = true;
1785 }
1786 ScopedApply::Failed(e) => {
1787 client
1788 .log_message(
1789 MessageType::WARNING,
1790 format!(
1791 "didSave cache sync: scoped merge rejected, will retry scoped on next save: {e}"
1792 ),
1793 )
1794 .await;
1795 dirty_flag.store(true, Ordering::Release);
1796 }
1797 }
1798 }
1799 Err(e) => {
1800 client
1801 .log_message(
1802 MessageType::WARNING,
1803 format!(
1804 "didSave cache sync: scoped reindex failed, will retry scoped on next save: {e}"
1805 ),
1806 )
1807 .await;
1808 dirty_flag.store(true, Ordering::Release);
1809 }
1810 }
1811 } else {
1812 client
1813 .log_message(
1814 MessageType::INFO,
1815 "didSave cache sync: no affected files from scoped planner",
1816 )
1817 .await;
1818 }
1819 }
1820 }
1821
1822 if scoped_ok {
1823 continue;
1824 }
1825 if aggressive_scoped {
1826 continue;
1827 }
1828
1829 client
1830 .log_message(
1831 MessageType::INFO,
1832 "didSave cache sync: rebuilding project index from disk",
1833 )
1834 .await;
1835
1836 match crate::solc::solc_project_index(&foundry_config, Some(&client), None)
1837 .await
1838 {
1839 Ok(ast_data) => {
1840 let mut new_build = crate::goto::CachedBuild::new(ast_data, 0);
1841 if let Some(prev) = ast_cache.read().await.get(cache_key) {
1842 new_build.merge_missing_from(prev);
1843 }
1844 let source_count = new_build.nodes.len();
1845 let cached_build = Arc::new(new_build);
1846 let build_for_save = (*cached_build).clone();
1847 ast_cache
1848 .write()
1849 .await
1850 .insert(cache_key.clone(), cached_build);
1851
1852 let cfg_for_save = foundry_config.clone();
1853 let save_res = tokio::task::spawn_blocking(move || {
1854 crate::project_cache::save_reference_cache_with_report(
1855 &cfg_for_save,
1856 &build_for_save,
1857 None,
1858 )
1859 })
1860 .await;
1861
1862 match save_res {
1863 Ok(Ok(report)) => {
1864 changed_files.write().await.clear();
1865 client
1866 .log_message(
1867 MessageType::INFO,
1868 format!(
1869 "didSave cache sync: persisted cache (sources={}, hashed_files={}, duration={}ms)",
1870 source_count, report.file_count_hashed, report.duration_ms
1871 ),
1872 )
1873 .await;
1874 }
1875 Ok(Err(e)) => {
1876 dirty_flag.store(true, Ordering::Release);
1877 client
1878 .log_message(
1879 MessageType::WARNING,
1880 format!(
1881 "didSave cache sync: persist failed, will retry: {e}"
1882 ),
1883 )
1884 .await;
1885 }
1886 Err(e) => {
1887 dirty_flag.store(true, Ordering::Release);
1888 client
1889 .log_message(
1890 MessageType::WARNING,
1891 format!(
1892 "didSave cache sync: save task failed, will retry: {e}"
1893 ),
1894 )
1895 .await;
1896 }
1897 }
1898 }
1899 Err(e) => {
1900 dirty_flag.store(true, Ordering::Release);
1901 client
1902 .log_message(
1903 MessageType::WARNING,
1904 format!("didSave cache sync: re-index failed, will retry: {e}"),
1905 )
1906 .await;
1907 }
1908 }
1909 }
1910 });
1911 }
1912 }
1913}
1914
1915#[tower_lsp::async_trait]
1916impl LanguageServer for ForgeLsp {
1917 async fn initialize(
1918 &self,
1919 params: InitializeParams,
1920 ) -> tower_lsp::jsonrpc::Result<InitializeResult> {
1921 {
1923 let mut caps = self.client_capabilities.write().await;
1924 *caps = Some(params.capabilities.clone());
1925 }
1926
1927 if let Some(init_opts) = ¶ms.initialization_options {
1929 let s = config::parse_settings(init_opts);
1930 self.client
1931 .log_message(
1932 MessageType::INFO,
1933 format!(
1934 "settings: inlayHints.parameters={}, inlayHints.gasEstimates={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}",
1935 s.inlay_hints.parameters, s.inlay_hints.gas_estimates, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex,
1936 ),
1937 )
1938 .await;
1939 let mut settings = self.settings.write().await;
1940 *settings = s;
1941 self.settings_from_init
1942 .store(true, std::sync::atomic::Ordering::Relaxed);
1943 }
1944
1945 if let Some(uri) = params.root_uri.as_ref() {
1947 let mut root = self.root_uri.write().await;
1948 *root = Some(uri.clone());
1949 }
1950
1951 if let Some(root_uri) = params
1953 .root_uri
1954 .as_ref()
1955 .and_then(|uri| uri.to_file_path().ok())
1956 {
1957 let lint_cfg = config::load_lint_config(&root_uri);
1958 self.client
1959 .log_message(
1960 MessageType::INFO,
1961 format!(
1962 "loaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
1963 lint_cfg.lint_on_build,
1964 lint_cfg.ignore_patterns.len()
1965 ),
1966 )
1967 .await;
1968 let mut config = self.lint_config.write().await;
1969 *config = lint_cfg;
1970
1971 let foundry_cfg = config::load_foundry_config(&root_uri);
1972 self.client
1973 .log_message(
1974 MessageType::INFO,
1975 format!(
1976 "loaded foundry.toml project config: solc_version={:?}, remappings={}",
1977 foundry_cfg.solc_version,
1978 foundry_cfg.remappings.len()
1979 ),
1980 )
1981 .await;
1982 if foundry_cfg.via_ir {
1983 self.client
1984 .log_message(
1985 MessageType::WARNING,
1986 "via_ir is enabled in foundry.toml — gas estimate inlay hints are disabled to avoid slow compilation",
1987 )
1988 .await;
1989 }
1990 let mut fc = self.foundry_config.write().await;
1991 *fc = foundry_cfg;
1992 }
1993
1994 let client_encodings = params
1996 .capabilities
1997 .general
1998 .as_ref()
1999 .and_then(|g| g.position_encodings.as_deref());
2000 let encoding = utils::PositionEncoding::negotiate(client_encodings);
2001 utils::set_encoding(encoding);
2002
2003 Ok(InitializeResult {
2004 server_info: Some(ServerInfo {
2005 name: "Solidity Language Server".to_string(),
2006 version: Some(env!("LONG_VERSION").to_string()),
2007 }),
2008 capabilities: ServerCapabilities {
2009 position_encoding: Some(encoding.into()),
2010 completion_provider: Some(CompletionOptions {
2011 trigger_characters: Some(vec![
2012 ".".to_string(),
2013 "\"".to_string(),
2014 "'".to_string(),
2015 "/".to_string(),
2016 ]),
2017 resolve_provider: Some(false),
2018 ..Default::default()
2019 }),
2020 signature_help_provider: Some(SignatureHelpOptions {
2021 trigger_characters: Some(vec![
2022 "(".to_string(),
2023 ",".to_string(),
2024 "[".to_string(),
2025 ]),
2026 retrigger_characters: None,
2027 work_done_progress_options: WorkDoneProgressOptions {
2028 work_done_progress: None,
2029 },
2030 }),
2031 definition_provider: Some(OneOf::Left(true)),
2032 declaration_provider: Some(DeclarationCapability::Simple(true)),
2033 references_provider: Some(OneOf::Left(true)),
2034 rename_provider: Some(OneOf::Right(RenameOptions {
2035 prepare_provider: Some(true),
2036 work_done_progress_options: WorkDoneProgressOptions {
2037 work_done_progress: Some(true),
2038 },
2039 })),
2040 workspace_symbol_provider: Some(OneOf::Left(true)),
2041 document_symbol_provider: Some(OneOf::Left(true)),
2042 document_highlight_provider: Some(OneOf::Left(true)),
2043 hover_provider: Some(HoverProviderCapability::Simple(true)),
2044 document_link_provider: Some(DocumentLinkOptions {
2045 resolve_provider: Some(false),
2046 work_done_progress_options: WorkDoneProgressOptions {
2047 work_done_progress: None,
2048 },
2049 }),
2050 document_formatting_provider: Some(OneOf::Left(true)),
2051 code_action_provider: Some(CodeActionProviderCapability::Options(
2052 CodeActionOptions {
2053 code_action_kinds: Some(vec![CodeActionKind::QUICKFIX]),
2054 resolve_provider: Some(false),
2055 work_done_progress_options: WorkDoneProgressOptions {
2056 work_done_progress: None,
2057 },
2058 },
2059 )),
2060 code_lens_provider: None,
2061 folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
2062 selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
2063 inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options(
2064 InlayHintOptions {
2065 resolve_provider: Some(false),
2066 work_done_progress_options: WorkDoneProgressOptions {
2067 work_done_progress: None,
2068 },
2069 },
2070 ))),
2071 semantic_tokens_provider: Some(
2072 SemanticTokensServerCapabilities::SemanticTokensOptions(
2073 SemanticTokensOptions {
2074 legend: semantic_tokens::legend(),
2075 full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
2076 range: Some(true),
2077 work_done_progress_options: WorkDoneProgressOptions {
2078 work_done_progress: None,
2079 },
2080 },
2081 ),
2082 ),
2083 text_document_sync: Some(TextDocumentSyncCapability::Options(
2084 TextDocumentSyncOptions {
2085 will_save: Some(true),
2086 will_save_wait_until: None,
2087 open_close: Some(true),
2088 save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
2089 include_text: Some(true),
2090 })),
2091 change: Some(TextDocumentSyncKind::FULL),
2092 },
2093 )),
2094 workspace: Some(WorkspaceServerCapabilities {
2095 workspace_folders: None,
2096 file_operations: Some(WorkspaceFileOperationsServerCapabilities {
2097 will_rename: Some(FileOperationRegistrationOptions {
2098 filters: vec![
2099 FileOperationFilter {
2101 scheme: Some("file".to_string()),
2102 pattern: FileOperationPattern {
2103 glob: "**/*.sol".to_string(),
2104 matches: Some(FileOperationPatternKind::File),
2105 options: None,
2106 },
2107 },
2108 FileOperationFilter {
2110 scheme: Some("file".to_string()),
2111 pattern: FileOperationPattern {
2112 glob: "**".to_string(),
2113 matches: Some(FileOperationPatternKind::Folder),
2114 options: None,
2115 },
2116 },
2117 ],
2118 }),
2119 did_rename: Some(FileOperationRegistrationOptions {
2120 filters: vec![
2121 FileOperationFilter {
2122 scheme: Some("file".to_string()),
2123 pattern: FileOperationPattern {
2124 glob: "**/*.sol".to_string(),
2125 matches: Some(FileOperationPatternKind::File),
2126 options: None,
2127 },
2128 },
2129 FileOperationFilter {
2130 scheme: Some("file".to_string()),
2131 pattern: FileOperationPattern {
2132 glob: "**".to_string(),
2133 matches: Some(FileOperationPatternKind::Folder),
2134 options: None,
2135 },
2136 },
2137 ],
2138 }),
2139 will_delete: Some(FileOperationRegistrationOptions {
2140 filters: vec![
2141 FileOperationFilter {
2142 scheme: Some("file".to_string()),
2143 pattern: FileOperationPattern {
2144 glob: "**/*.sol".to_string(),
2145 matches: Some(FileOperationPatternKind::File),
2146 options: None,
2147 },
2148 },
2149 FileOperationFilter {
2150 scheme: Some("file".to_string()),
2151 pattern: FileOperationPattern {
2152 glob: "**".to_string(),
2153 matches: Some(FileOperationPatternKind::Folder),
2154 options: None,
2155 },
2156 },
2157 ],
2158 }),
2159 did_delete: Some(FileOperationRegistrationOptions {
2160 filters: vec![
2161 FileOperationFilter {
2162 scheme: Some("file".to_string()),
2163 pattern: FileOperationPattern {
2164 glob: "**/*.sol".to_string(),
2165 matches: Some(FileOperationPatternKind::File),
2166 options: None,
2167 },
2168 },
2169 FileOperationFilter {
2170 scheme: Some("file".to_string()),
2171 pattern: FileOperationPattern {
2172 glob: "**".to_string(),
2173 matches: Some(FileOperationPatternKind::Folder),
2174 options: None,
2175 },
2176 },
2177 ],
2178 }),
2179 will_create: Some(FileOperationRegistrationOptions {
2180 filters: vec![FileOperationFilter {
2181 scheme: Some("file".to_string()),
2182 pattern: FileOperationPattern {
2183 glob: "**/*.sol".to_string(),
2184 matches: Some(FileOperationPatternKind::File),
2185 options: None,
2186 },
2187 }],
2188 }),
2189 did_create: Some(FileOperationRegistrationOptions {
2190 filters: vec![FileOperationFilter {
2191 scheme: Some("file".to_string()),
2192 pattern: FileOperationPattern {
2193 glob: "**/*.sol".to_string(),
2194 matches: Some(FileOperationPatternKind::File),
2195 options: None,
2196 },
2197 }],
2198 }),
2199 ..Default::default()
2200 }),
2201 }),
2202 execute_command_provider: Some(ExecuteCommandOptions {
2203 commands: vec![
2204 "solidity.clearCache".to_string(),
2205 "solidity.reindex".to_string(),
2206 ],
2207 work_done_progress_options: WorkDoneProgressOptions {
2208 work_done_progress: None,
2209 },
2210 }),
2211 ..ServerCapabilities::default()
2212 },
2213 })
2214 }
2215
2216 async fn initialized(&self, _: InitializedParams) {
2217 self.client
2218 .log_message(MessageType::INFO, "lsp server initialized.")
2219 .await;
2220
2221 let supports_dynamic = self
2223 .client_capabilities
2224 .read()
2225 .await
2226 .as_ref()
2227 .and_then(|caps| caps.workspace.as_ref())
2228 .and_then(|ws| ws.did_change_watched_files.as_ref())
2229 .and_then(|dcwf| dcwf.dynamic_registration)
2230 .unwrap_or(false);
2231
2232 if supports_dynamic {
2233 let registration = Registration {
2234 id: "foundry-toml-watcher".to_string(),
2235 method: "workspace/didChangeWatchedFiles".to_string(),
2236 register_options: Some(
2237 serde_json::to_value(DidChangeWatchedFilesRegistrationOptions {
2238 watchers: vec![
2239 FileSystemWatcher {
2240 glob_pattern: GlobPattern::String("**/foundry.toml".to_string()),
2241 kind: Some(WatchKind::all()),
2242 },
2243 FileSystemWatcher {
2244 glob_pattern: GlobPattern::String("**/remappings.txt".to_string()),
2245 kind: Some(WatchKind::all()),
2246 },
2247 ],
2248 })
2249 .unwrap(),
2250 ),
2251 };
2252
2253 if let Err(e) = self.client.register_capability(vec![registration]).await {
2254 self.client
2255 .log_message(
2256 MessageType::WARNING,
2257 format!("failed to register foundry.toml watcher: {e}"),
2258 )
2259 .await;
2260 } else {
2261 self.client
2262 .log_message(MessageType::INFO, "registered foundry.toml file watcher")
2263 .await;
2264 }
2265 }
2266
2267 if !self
2272 .settings_from_init
2273 .load(std::sync::atomic::Ordering::Relaxed)
2274 {
2275 let supports_config = self
2276 .client_capabilities
2277 .read()
2278 .await
2279 .as_ref()
2280 .and_then(|caps| caps.workspace.as_ref())
2281 .and_then(|ws| ws.configuration)
2282 .unwrap_or(false);
2283
2284 if supports_config {
2285 match self
2286 .client
2287 .configuration(vec![ConfigurationItem {
2288 scope_uri: None,
2289 section: Some("solidity-language-server".to_string()),
2290 }])
2291 .await
2292 {
2293 Ok(values) => {
2294 if let Some(val) = values.into_iter().next() {
2295 if !val.is_null() {
2296 let s = config::parse_settings(&val);
2297 self.client
2298 .log_message(
2299 MessageType::INFO,
2300 format!(
2301 "settings (workspace/configuration): lint.enabled={}, lint.exclude={:?}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}",
2302 s.lint.enabled, s.lint.exclude, s.project_index.full_project_scan, s.project_index.cache_mode,
2303 ),
2304 )
2305 .await;
2306 let mut settings = self.settings.write().await;
2307 *settings = s;
2308 }
2309 }
2310 }
2311 Err(e) => {
2312 self.client
2313 .log_message(
2314 MessageType::WARNING,
2315 format!("workspace/configuration request failed: {e}"),
2316 )
2317 .await;
2318 }
2319 }
2320 }
2321 }
2322
2323 if self.use_solc && self.settings.read().await.project_index.full_project_scan {
2327 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
2328 self.project_indexed
2329 .store(true, std::sync::atomic::Ordering::Relaxed);
2330 let foundry_config = self.foundry_config.read().await.clone();
2331 let cache_key = self.project_cache_key().await;
2332 let ast_cache = self.ast_cache.clone();
2333 let client = self.client.clone();
2334
2335 tokio::spawn(async move {
2336 let Some(cache_key) = cache_key else {
2337 return;
2338 };
2339 if !foundry_config.root.is_dir() {
2340 client
2341 .log_message(
2342 MessageType::INFO,
2343 format!(
2344 "project index: {} not found, skipping eager index",
2345 foundry_config.root.display(),
2346 ),
2347 )
2348 .await;
2349 return;
2350 }
2351
2352 let token = NumberOrString::String("solidity/projectIndex".to_string());
2353 let _ = client
2354 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
2355 token: token.clone(),
2356 })
2357 .await;
2358
2359 client
2360 .send_notification::<notification::Progress>(ProgressParams {
2361 token: token.clone(),
2362 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
2363 WorkDoneProgressBegin {
2364 title: "Indexing project".to_string(),
2365 message: Some("Discovering source files...".to_string()),
2366 cancellable: Some(false),
2367 percentage: None,
2368 },
2369 )),
2370 })
2371 .await;
2372
2373 let cfg_for_load = foundry_config.clone();
2375 let cache_mode_for_load = cache_mode.clone();
2376 let load_res = tokio::task::spawn_blocking(move || {
2377 crate::project_cache::load_reference_cache_with_report(
2378 &cfg_for_load,
2379 cache_mode_for_load,
2380 true,
2381 )
2382 })
2383 .await;
2384 match load_res {
2385 Ok(report) => {
2386 if let Some(cached_build) = report.build {
2387 let source_count = cached_build.nodes.len();
2388 ast_cache
2389 .write()
2390 .await
2391 .insert(cache_key.clone(), Arc::new(cached_build));
2392 client
2393 .log_message(
2394 MessageType::INFO,
2395 format!(
2396 "project index (eager): cache load hit (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
2397 source_count,
2398 report.file_count_reused,
2399 report.file_count_hashed,
2400 report.complete,
2401 report.duration_ms
2402 ),
2403 )
2404 .await;
2405 if report.complete {
2406 client
2407 .send_notification::<notification::Progress>(ProgressParams {
2408 token: token.clone(),
2409 value: ProgressParamsValue::WorkDone(
2410 WorkDoneProgress::End(WorkDoneProgressEnd {
2411 message: Some(format!(
2412 "Loaded {} source files from cache",
2413 source_count
2414 )),
2415 }),
2416 ),
2417 })
2418 .await;
2419 return;
2420 }
2421 }
2422
2423 client
2424 .log_message(
2425 MessageType::INFO,
2426 format!(
2427 "project index (eager): cache load miss/partial (reason={}, reused_files={}/{}, duration={}ms)",
2428 report
2429 .miss_reason
2430 .unwrap_or_else(|| "unknown".to_string()),
2431 report.file_count_reused,
2432 report.file_count_hashed,
2433 report.duration_ms
2434 ),
2435 )
2436 .await;
2437 }
2438 Err(e) => {
2439 client
2440 .log_message(
2441 MessageType::WARNING,
2442 format!("project index (eager): cache load task failed: {e}"),
2443 )
2444 .await;
2445 }
2446 }
2447
2448 match crate::solc::solc_project_index(&foundry_config, Some(&client), None).await {
2449 Ok(ast_data) => {
2450 let mut new_build = crate::goto::CachedBuild::new(ast_data, 0);
2451 if let Some(prev) = ast_cache.read().await.get(&cache_key) {
2452 new_build.merge_missing_from(prev);
2453 }
2454 let source_count = new_build.nodes.len();
2455 let cached_build = Arc::new(new_build);
2456 let build_for_save = (*cached_build).clone();
2457 ast_cache
2458 .write()
2459 .await
2460 .insert(cache_key.clone(), cached_build);
2461 client
2462 .log_message(
2463 MessageType::INFO,
2464 format!(
2465 "project index (eager): cached {} source files",
2466 source_count
2467 ),
2468 )
2469 .await;
2470
2471 let cfg_for_save = foundry_config.clone();
2472 let client_for_save = client.clone();
2473 tokio::spawn(async move {
2474 let res = tokio::task::spawn_blocking(move || {
2475 crate::project_cache::save_reference_cache_with_report(
2476 &cfg_for_save,
2477 &build_for_save,
2478 None,
2479 )
2480 })
2481 .await;
2482 match res {
2483 Ok(Ok(report)) => {
2484 client_for_save
2485 .log_message(
2486 MessageType::INFO,
2487 format!(
2488 "project index (eager): cache save complete (hashed_files={}, duration={}ms)",
2489 report.file_count_hashed, report.duration_ms
2490 ),
2491 )
2492 .await;
2493 }
2494 Ok(Err(e)) => {
2495 client_for_save
2496 .log_message(
2497 MessageType::WARNING,
2498 format!(
2499 "project index (eager): failed to persist cache: {e}"
2500 ),
2501 )
2502 .await;
2503 }
2504 Err(e) => {
2505 client_for_save
2506 .log_message(
2507 MessageType::WARNING,
2508 format!(
2509 "project index (eager): cache save task failed: {e}"
2510 ),
2511 )
2512 .await;
2513 }
2514 }
2515 });
2516
2517 client
2518 .send_notification::<notification::Progress>(ProgressParams {
2519 token: token.clone(),
2520 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
2521 WorkDoneProgressEnd {
2522 message: Some(format!(
2523 "Indexed {} source files",
2524 source_count
2525 )),
2526 },
2527 )),
2528 })
2529 .await;
2530 }
2531 Err(e) => {
2532 client
2533 .log_message(
2534 MessageType::WARNING,
2535 format!("project index (eager): failed: {e}"),
2536 )
2537 .await;
2538
2539 client
2540 .send_notification::<notification::Progress>(ProgressParams {
2541 token,
2542 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
2543 WorkDoneProgressEnd {
2544 message: Some(format!("Index failed: {e}")),
2545 },
2546 )),
2547 })
2548 .await;
2549 }
2550 }
2551 });
2552 }
2553 }
2554
2555 async fn execute_command(
2556 &self,
2557 params: ExecuteCommandParams,
2558 ) -> tower_lsp::jsonrpc::Result<Option<serde_json::Value>> {
2559 match params.command.as_str() {
2560 "solidity.clearCache" => {
2572 let root = self.foundry_config.read().await.root.clone();
2573 let cache_dir = crate::project_cache::cache_dir(&root);
2574
2575 let disk_result = if cache_dir.exists() {
2576 std::fs::remove_dir_all(&cache_dir).map_err(|e| format!("{e}"))
2577 } else {
2578 Ok(())
2579 };
2580
2581 if let Some(root_key) = self.project_cache_key().await {
2582 self.ast_cache.write().await.remove(&root_key);
2583 }
2584
2585 match disk_result {
2586 Ok(()) => {
2587 self.client
2588 .log_message(
2589 MessageType::INFO,
2590 format!(
2591 "solidity.clearCache: removed {} and cleared in-memory cache",
2592 cache_dir.display()
2593 ),
2594 )
2595 .await;
2596 Ok(Some(serde_json::json!({ "success": true })))
2597 }
2598 Err(e) => {
2599 self.client
2600 .log_message(
2601 MessageType::ERROR,
2602 format!("solidity.clearCache: failed to remove cache dir: {e}"),
2603 )
2604 .await;
2605 Err(tower_lsp::jsonrpc::Error {
2606 code: tower_lsp::jsonrpc::ErrorCode::InternalError,
2607 message: std::borrow::Cow::Owned(e),
2608 data: None,
2609 })
2610 }
2611 }
2612 }
2613
2614 "solidity.reindex" => {
2626 if let Some(root_key) = self.project_cache_key().await {
2627 self.ast_cache.write().await.remove(&root_key);
2628 }
2629 self.project_cache_dirty
2630 .store(true, std::sync::atomic::Ordering::Relaxed);
2631 self.project_cache_force_full_rebuild
2635 .store(true, std::sync::atomic::Ordering::Release);
2636
2637 if start_or_mark_project_cache_sync_pending(
2642 &self.project_cache_sync_pending,
2643 &self.project_cache_sync_running,
2644 ) {
2645 let foundry_config = self.foundry_config.read().await.clone();
2646 let root_key = self.project_cache_key().await;
2647 let ast_cache = self.ast_cache.clone();
2648 let client = self.client.clone();
2649 let dirty_flag = self.project_cache_dirty.clone();
2650 let running_flag = self.project_cache_sync_running.clone();
2651 let pending_flag = self.project_cache_sync_pending.clone();
2652 let changed_files = self.project_cache_changed_files.clone();
2653 let force_full_rebuild_flag = self.project_cache_force_full_rebuild.clone();
2654
2655 tokio::spawn(async move {
2656 loop {
2657 tokio::time::sleep(std::time::Duration::from_millis(700)).await;
2658
2659 if !take_project_cache_sync_pending(&pending_flag) {
2660 if stop_project_cache_sync_worker_or_reclaim(
2661 &pending_flag,
2662 &running_flag,
2663 ) {
2664 continue;
2665 }
2666 break;
2667 }
2668
2669 if !try_claim_project_cache_dirty(&dirty_flag) {
2670 continue;
2671 }
2672
2673 let Some(cache_key) = &root_key else {
2674 dirty_flag.store(true, Ordering::Release);
2675 continue;
2676 };
2677 if !foundry_config.root.is_dir() {
2678 dirty_flag.store(true, Ordering::Release);
2679 client
2680 .log_message(
2681 MessageType::WARNING,
2682 format!(
2683 "solidity.reindex cache sync: invalid project root {}, deferring",
2684 foundry_config.root.display()
2685 ),
2686 )
2687 .await;
2688 continue;
2689 }
2690
2691 client
2692 .log_message(
2693 MessageType::INFO,
2694 "solidity.reindex: rebuilding project index from disk",
2695 )
2696 .await;
2697
2698 match crate::solc::solc_project_index(
2699 &foundry_config,
2700 Some(&client),
2701 None,
2702 )
2703 .await
2704 {
2705 Ok(ast_data) => {
2706 let mut new_build = crate::goto::CachedBuild::new(ast_data, 0);
2707 if let Some(prev) = ast_cache.read().await.get(cache_key) {
2708 new_build.merge_missing_from(prev);
2709 }
2710 let source_count = new_build.nodes.len();
2711 let cached_build = Arc::new(new_build);
2712 let build_for_save = (*cached_build).clone();
2713 ast_cache
2714 .write()
2715 .await
2716 .insert(cache_key.clone(), cached_build);
2717
2718 let cfg_for_save = foundry_config.clone();
2719 let save_res = tokio::task::spawn_blocking(move || {
2720 crate::project_cache::save_reference_cache_with_report(
2721 &cfg_for_save,
2722 &build_for_save,
2723 None,
2724 )
2725 })
2726 .await;
2727
2728 match save_res {
2729 Ok(Ok(report)) => {
2730 changed_files.write().await.clear();
2731 force_full_rebuild_flag.store(false, Ordering::Release);
2734 client
2735 .log_message(
2736 MessageType::INFO,
2737 format!(
2738 "solidity.reindex: persisted cache (sources={}, hashed_files={}, duration={}ms)",
2739 source_count, report.file_count_hashed, report.duration_ms
2740 ),
2741 )
2742 .await;
2743 }
2744 Ok(Err(e)) => {
2745 dirty_flag.store(true, Ordering::Release);
2746 client
2747 .log_message(
2748 MessageType::WARNING,
2749 format!(
2750 "solidity.reindex: persist failed, will retry: {e}"
2751 ),
2752 )
2753 .await;
2754 }
2755 Err(e) => {
2756 dirty_flag.store(true, Ordering::Release);
2757 client
2758 .log_message(
2759 MessageType::WARNING,
2760 format!(
2761 "solidity.reindex: save task failed, will retry: {e}"
2762 ),
2763 )
2764 .await;
2765 }
2766 }
2767 }
2768 Err(e) => {
2769 dirty_flag.store(true, Ordering::Release);
2770 client
2771 .log_message(
2772 MessageType::WARNING,
2773 format!(
2774 "solidity.reindex: re-index failed, will retry: {e}"
2775 ),
2776 )
2777 .await;
2778 }
2779 }
2780
2781 if stop_project_cache_sync_worker_or_reclaim(
2782 &pending_flag,
2783 &running_flag,
2784 ) {
2785 continue;
2786 }
2787 break;
2788 }
2789 });
2790 }
2791
2792 self.client
2793 .log_message(
2794 MessageType::INFO,
2795 "solidity.reindex: in-memory cache evicted, background reindex triggered",
2796 )
2797 .await;
2798 Ok(Some(serde_json::json!({ "success": true })))
2799 }
2800
2801 _ => Err(tower_lsp::jsonrpc::Error::method_not_found()),
2802 }
2803 }
2804
2805 async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
2806 self.flush_project_cache_to_disk("shutdown").await;
2807 self.client
2808 .log_message(MessageType::INFO, "lsp server shutting down.")
2809 .await;
2810 Ok(())
2811 }
2812
2813 async fn did_open(&self, params: DidOpenTextDocumentParams) {
2814 self.client
2815 .log_message(MessageType::INFO, "file opened")
2816 .await;
2817
2818 let mut td = params.text_document;
2819 let template_on_create = self
2820 .settings
2821 .read()
2822 .await
2823 .file_operations
2824 .template_on_create;
2825
2826 let should_attempt_scaffold = template_on_create
2829 && td.text.chars().all(|ch| ch.is_whitespace())
2830 && td.uri.scheme() == "file"
2831 && td
2832 .uri
2833 .to_file_path()
2834 .ok()
2835 .and_then(|p| p.extension().map(|e| e == "sol"))
2836 .unwrap_or(false);
2837
2838 if should_attempt_scaffold {
2839 let uri_str = td.uri.to_string();
2840 let create_flow_pending = {
2841 let pending = self.pending_create_scaffold.read().await;
2842 pending.contains(&uri_str)
2843 };
2844 if create_flow_pending {
2845 self.client
2846 .log_message(
2847 MessageType::INFO,
2848 format!(
2849 "didOpen: skip scaffold for {} (didCreateFiles scaffold pending)",
2850 uri_str
2851 ),
2852 )
2853 .await;
2854 } else {
2855 let cache_has_content = {
2856 let tc = self.text_cache.read().await;
2857 tc.get(&uri_str)
2858 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()))
2859 };
2860
2861 if !cache_has_content {
2862 let file_has_content = td.uri.to_file_path().ok().is_some_and(|p| {
2863 std::fs::read_to_string(&p)
2864 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()))
2865 });
2866
2867 if !file_has_content {
2868 let solc_version = self.foundry_config.read().await.solc_version.clone();
2869 if let Some(scaffold) =
2870 file_operations::generate_scaffold(&td.uri, solc_version.as_deref())
2871 {
2872 let end = utils::byte_offset_to_position(&td.text, td.text.len());
2873 let edit = WorkspaceEdit {
2874 changes: Some(HashMap::from([(
2875 td.uri.clone(),
2876 vec![TextEdit {
2877 range: Range {
2878 start: Position::default(),
2879 end,
2880 },
2881 new_text: scaffold.clone(),
2882 }],
2883 )])),
2884 document_changes: None,
2885 change_annotations: None,
2886 };
2887 if self
2888 .client
2889 .apply_edit(edit)
2890 .await
2891 .as_ref()
2892 .is_ok_and(|r| r.applied)
2893 {
2894 td.text = scaffold;
2895 self.client
2896 .log_message(
2897 MessageType::INFO,
2898 format!("didOpen: scaffolded empty file {}", uri_str),
2899 )
2900 .await;
2901 }
2902 }
2903 }
2904 }
2905 }
2906 }
2907
2908 self.on_change(td).await
2909 }
2910
2911 async fn did_change(&self, params: DidChangeTextDocumentParams) {
2912 self.client
2913 .log_message(MessageType::INFO, "file changed")
2914 .await;
2915
2916 if let Some(change) = params.content_changes.into_iter().next() {
2918 let has_substantive_content = change.text.chars().any(|ch| !ch.is_whitespace());
2919 let mut text_cache = self.text_cache.write().await;
2920 text_cache.insert(
2921 params.text_document.uri.to_string(),
2922 (params.text_document.version, change.text),
2923 );
2924 drop(text_cache);
2925
2926 if has_substantive_content {
2927 self.pending_create_scaffold
2928 .write()
2929 .await
2930 .remove(params.text_document.uri.as_str());
2931 }
2932 }
2933 }
2934
2935 async fn did_save(&self, params: DidSaveTextDocumentParams) {
2936 let uri_key = params.text_document.uri.to_string();
2941
2942 {
2944 let workers = self.did_save_workers.read().await;
2945 if let Some(tx) = workers.get(&uri_key) {
2946 if tx.send(Some(params.clone())).is_ok() {
2949 return;
2950 }
2951 }
2952 }
2953
2954 let (tx, mut rx) = tokio::sync::watch::channel(Some(params));
2957 self.did_save_workers.write().await.insert(uri_key, tx);
2958
2959 let this = self.clone();
2960 tokio::spawn(async move {
2961 loop {
2962 if rx.changed().await.is_err() {
2964 break;
2967 }
2968 let params = match rx.borrow_and_update().clone() {
2969 Some(p) => p,
2970 None => continue,
2971 };
2972 run_did_save(this.clone(), params).await;
2973 }
2974 });
2975 }
2976
2977 async fn will_save(&self, params: WillSaveTextDocumentParams) {
2978 self.client
2979 .log_message(
2980 MessageType::INFO,
2981 format!(
2982 "file will save reason:{:?} {}",
2983 params.reason, params.text_document.uri
2984 ),
2985 )
2986 .await;
2987 }
2988
2989 async fn formatting(
2990 &self,
2991 params: DocumentFormattingParams,
2992 ) -> tower_lsp::jsonrpc::Result<Option<Vec<TextEdit>>> {
2993 self.client
2994 .log_message(MessageType::INFO, "formatting request")
2995 .await;
2996
2997 let uri = params.text_document.uri;
2998 let file_path = match uri.to_file_path() {
2999 Ok(path) => path,
3000 Err(_) => {
3001 self.client
3002 .log_message(MessageType::ERROR, "Invalid file URI for formatting")
3003 .await;
3004 return Ok(None);
3005 }
3006 };
3007 let path_str = match file_path.to_str() {
3008 Some(s) => s,
3009 None => {
3010 self.client
3011 .log_message(MessageType::ERROR, "Invalid file path for formatting")
3012 .await;
3013 return Ok(None);
3014 }
3015 };
3016
3017 let original_content = {
3019 let text_cache = self.text_cache.read().await;
3020 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
3021 content.clone()
3022 } else {
3023 match std::fs::read_to_string(&file_path) {
3025 Ok(content) => content,
3026 Err(_) => {
3027 self.client
3028 .log_message(MessageType::ERROR, "Failed to read file for formatting")
3029 .await;
3030 return Ok(None);
3031 }
3032 }
3033 }
3034 };
3035
3036 let formatted_content = match self.compiler.format(path_str).await {
3038 Ok(content) => content,
3039 Err(e) => {
3040 self.client
3041 .log_message(MessageType::WARNING, format!("Formatting failed: {e}"))
3042 .await;
3043 return Ok(None);
3044 }
3045 };
3046
3047 if original_content != formatted_content {
3049 let end = utils::byte_offset_to_position(&original_content, original_content.len());
3050
3051 {
3053 let mut text_cache = self.text_cache.write().await;
3054 let version = text_cache
3055 .get(&uri.to_string())
3056 .map(|(v, _)| *v)
3057 .unwrap_or(0);
3058 text_cache.insert(uri.to_string(), (version, formatted_content.clone()));
3059 }
3060
3061 let edit = TextEdit {
3062 range: Range {
3063 start: Position::default(),
3064 end,
3065 },
3066 new_text: formatted_content,
3067 };
3068 Ok(Some(vec![edit]))
3069 } else {
3070 Ok(None)
3071 }
3072 }
3073
3074 async fn did_close(&self, params: DidCloseTextDocumentParams) {
3075 self.flush_project_cache_to_disk("didClose").await;
3076 let uri = params.text_document.uri.to_string();
3077 self.ast_cache.write().await.remove(&uri);
3078 self.text_cache.write().await.remove(&uri);
3079 self.completion_cache.write().await.remove(&uri);
3080 self.client
3081 .log_message(MessageType::INFO, "file closed, caches cleared.")
3082 .await;
3083 }
3084
3085 async fn did_change_configuration(&self, params: DidChangeConfigurationParams) {
3086 let s = config::parse_settings(¶ms.settings);
3087 self.client
3088 .log_message(
3089 MessageType::INFO,
3090 format!(
3091 "settings updated: inlayHints.parameters={}, inlayHints.gasEstimates={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}",
3092 s.inlay_hints.parameters, s.inlay_hints.gas_estimates, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex,
3093 ),
3094 )
3095 .await;
3096 let mut settings = self.settings.write().await;
3097 *settings = s;
3098
3099 let client = self.client.clone();
3101 tokio::spawn(async move {
3102 let _ = client.inlay_hint_refresh().await;
3103 });
3104 }
3105 async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) {
3106 self.client
3107 .log_message(MessageType::INFO, "workdspace folders changed.")
3108 .await;
3109 }
3110
3111 async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
3112 self.client
3113 .log_message(MessageType::INFO, "watched files have changed.")
3114 .await;
3115
3116 for change in ¶ms.changes {
3118 let path = match change.uri.to_file_path() {
3119 Ok(p) => p,
3120 Err(_) => continue,
3121 };
3122
3123 let filename = path.file_name().and_then(|n| n.to_str());
3124
3125 if filename == Some("foundry.toml") {
3126 let lint_cfg = config::load_lint_config_from_toml(&path);
3127 self.client
3128 .log_message(
3129 MessageType::INFO,
3130 format!(
3131 "reloaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
3132 lint_cfg.lint_on_build,
3133 lint_cfg.ignore_patterns.len()
3134 ),
3135 )
3136 .await;
3137 let mut lc = self.lint_config.write().await;
3138 *lc = lint_cfg;
3139
3140 let foundry_cfg = config::load_foundry_config_from_toml(&path);
3141 self.client
3142 .log_message(
3143 MessageType::INFO,
3144 format!(
3145 "reloaded foundry.toml project config: solc_version={:?}, remappings={}",
3146 foundry_cfg.solc_version,
3147 foundry_cfg.remappings.len()
3148 ),
3149 )
3150 .await;
3151 if foundry_cfg.via_ir {
3152 self.client
3153 .log_message(
3154 MessageType::WARNING,
3155 "via_ir is enabled in foundry.toml — gas estimate inlay hints are disabled to avoid slow compilation",
3156 )
3157 .await;
3158 }
3159 let mut fc = self.foundry_config.write().await;
3160 *fc = foundry_cfg;
3161 break;
3162 }
3163
3164 if filename == Some("remappings.txt") {
3165 self.client
3166 .log_message(
3167 MessageType::INFO,
3168 "remappings.txt changed, config may need refresh",
3169 )
3170 .await;
3171 }
3174 }
3175 }
3176
3177 async fn completion(
3178 &self,
3179 params: CompletionParams,
3180 ) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> {
3181 let uri = params.text_document_position.text_document.uri;
3182 let position = params.text_document_position.position;
3183
3184 let trigger_char = params
3185 .context
3186 .as_ref()
3187 .and_then(|ctx| ctx.trigger_character.as_deref());
3188
3189 let source_text = {
3191 let text_cache = self.text_cache.read().await;
3192 if let Some((_, text)) = text_cache.get(&uri.to_string()) {
3193 text.clone()
3194 } else {
3195 match uri.to_file_path() {
3196 Ok(path) => std::fs::read_to_string(&path).unwrap_or_default(),
3197 Err(_) => return Ok(None),
3198 }
3199 }
3200 };
3201
3202 let local_cached: Option<Arc<completion::CompletionCache>> = {
3204 let comp_cache = self.completion_cache.read().await;
3205 comp_cache.get(&uri.to_string()).cloned()
3206 };
3207
3208 let root_cached: Option<Arc<completion::CompletionCache>> = {
3210 let root_key = self.project_cache_key().await;
3211 match root_key {
3212 Some(root_key) => {
3213 let ast_cache = self.ast_cache.read().await;
3214 ast_cache
3215 .get(&root_key)
3216 .map(|root_build| root_build.completion_cache.clone())
3217 }
3218 None => None,
3219 }
3220 };
3221
3222 let cached = local_cached.or(root_cached.clone());
3224
3225 if cached.is_none() {
3226 let ast_cache = self.ast_cache.clone();
3228 let completion_cache = self.completion_cache.clone();
3229 let uri_string = uri.to_string();
3230 tokio::spawn(async move {
3231 let cached_build = {
3232 let cache = ast_cache.read().await;
3233 match cache.get(&uri_string) {
3234 Some(v) => v.clone(),
3235 None => return,
3236 }
3237 };
3238 completion_cache
3239 .write()
3240 .await
3241 .insert(uri_string, cached_build.completion_cache.clone());
3242 });
3243 }
3244
3245 let cache_ref = cached.as_deref();
3246
3247 let file_id = {
3249 let uri_path = uri.to_file_path().ok();
3250 cache_ref.and_then(|c| {
3251 uri_path.as_ref().and_then(|p| {
3252 let path_str = p.to_str()?;
3253 c.path_to_file_id.get(path_str).copied()
3254 })
3255 })
3256 };
3257
3258 let current_file_path = uri
3259 .to_file_path()
3260 .ok()
3261 .and_then(|p| p.to_str().map(|s| s.to_string()));
3262
3263 let check_pos = if matches!(trigger_char, Some("\"") | Some("'")) {
3274 Position {
3275 line: position.line,
3276 character: position.character.saturating_add(1),
3277 }
3278 } else {
3279 position
3280 };
3281
3282 if let Some(asm_range) =
3287 links::ts_cursor_in_assembly_flags(source_text.as_bytes(), check_pos)
3288 {
3289 let text_edit = CompletionTextEdit::Edit(TextEdit {
3290 range: Range {
3291 start: Position {
3292 line: position.line,
3293 character: asm_range.start.character,
3294 },
3295 end: Position {
3296 line: position.line,
3297 character: check_pos.character,
3298 },
3299 },
3300 new_text: "memory-safe".to_string(),
3301 });
3302 let item = CompletionItem {
3303 label: "memory-safe".to_string(),
3304 kind: Some(CompletionItemKind::VALUE),
3305 detail: Some("Solidity assembly dialect".to_string()),
3306 filter_text: Some("memory-safe".to_string()),
3307 text_edit: Some(text_edit),
3308 ..Default::default()
3309 };
3310 return Ok(Some(CompletionResponse::List(CompletionList {
3311 is_incomplete: false,
3312 items: vec![item],
3313 })));
3314 }
3315
3316 if let Some(import_range) =
3323 links::ts_cursor_in_import_string(source_text.as_bytes(), check_pos)
3324 {
3325 if let Ok(current_file) = uri.to_file_path() {
3326 let foundry_cfg = self.foundry_config.read().await.clone();
3327 let project_root = foundry_cfg.root.clone();
3328 let remappings = crate::solc::resolve_remappings(&foundry_cfg).await;
3329 let typed_range = Some((
3332 position.line,
3333 import_range.start.character,
3334 check_pos.character,
3335 ));
3336 let items = completion::all_sol_import_paths(
3337 ¤t_file,
3338 &project_root,
3339 &remappings,
3340 typed_range,
3341 );
3342 return Ok(Some(CompletionResponse::List(CompletionList {
3343 is_incomplete: true,
3344 items,
3345 })));
3346 }
3347 return Ok(None);
3348 }
3349
3350 if matches!(trigger_char, Some("\"") | Some("'")) {
3354 return Ok(None);
3355 }
3356
3357 let tail_candidates = if trigger_char == Some(".") {
3358 vec![]
3359 } else {
3360 root_cached.as_deref().map_or_else(Vec::new, |c| {
3361 completion::top_level_importable_completion_candidates(
3362 c,
3363 current_file_path.as_deref(),
3364 &source_text,
3365 )
3366 })
3367 };
3368
3369 let result = completion::handle_completion_with_tail_candidates(
3370 cache_ref,
3371 &source_text,
3372 position,
3373 trigger_char,
3374 file_id,
3375 tail_candidates,
3376 );
3377 Ok(result)
3378 }
3379
3380 async fn goto_definition(
3381 &self,
3382 params: GotoDefinitionParams,
3383 ) -> tower_lsp::jsonrpc::Result<Option<GotoDefinitionResponse>> {
3384 self.client
3385 .log_message(MessageType::INFO, "got textDocument/definition request")
3386 .await;
3387
3388 let uri = params.text_document_position_params.text_document.uri;
3389 let position = params.text_document_position_params.position;
3390
3391 let file_path = match uri.to_file_path() {
3392 Ok(path) => path,
3393 Err(_) => {
3394 self.client
3395 .log_message(MessageType::ERROR, "Invalid file uri")
3396 .await;
3397 return Ok(None);
3398 }
3399 };
3400
3401 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3402 Some(bytes) => bytes,
3403 None => return Ok(None),
3404 };
3405
3406 let source_text = String::from_utf8_lossy(&source_bytes).to_string();
3407
3408 let cursor_name = goto::cursor_context(&source_text, position).map(|ctx| ctx.name);
3410
3411 let (is_dirty, cached_build) = {
3415 let text_version = self
3416 .text_cache
3417 .read()
3418 .await
3419 .get(&uri.to_string())
3420 .map(|(v, _)| *v)
3421 .unwrap_or(0);
3422 let cb = self.get_or_fetch_build(&uri, &file_path, false).await;
3423 let build_version = cb.as_ref().map(|b| b.build_version).unwrap_or(0);
3424 (text_version > build_version, cb)
3425 };
3426
3427 let validate_ts = |loc: &Location| -> bool {
3433 let Some(ref name) = cursor_name else {
3434 return true; };
3436 let target_src = if loc.uri == uri {
3437 Some(source_text.clone())
3438 } else {
3439 loc.uri
3440 .to_file_path()
3441 .ok()
3442 .and_then(|p| std::fs::read_to_string(&p).ok())
3443 };
3444 match target_src {
3445 Some(src) => goto::validate_goto_target(&src, loc, name),
3446 None => true, }
3448 };
3449
3450 if is_dirty {
3451 self.client
3452 .log_message(MessageType::INFO, "file is dirty, trying tree-sitter first")
3453 .await;
3454
3455 let ts_result = {
3457 let comp_cache = self.completion_cache.read().await;
3458 let text_cache = self.text_cache.read().await;
3459 if let Some(cc) = comp_cache.get(&uri.to_string()) {
3460 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
3461 } else {
3462 None
3463 }
3464 };
3465
3466 if let Some(location) = ts_result {
3467 if validate_ts(&location) {
3468 self.client
3469 .log_message(
3470 MessageType::INFO,
3471 format!(
3472 "found definition (tree-sitter) at {}:{}",
3473 location.uri, location.range.start.line
3474 ),
3475 )
3476 .await;
3477 return Ok(Some(GotoDefinitionResponse::from(location)));
3478 }
3479 self.client
3480 .log_message(
3481 MessageType::INFO,
3482 "tree-sitter result failed validation, trying AST fallback",
3483 )
3484 .await;
3485 }
3486
3487 if let Some(ref cb) = cached_build
3492 && let Some(ref name) = cursor_name
3493 {
3494 let byte_hint = goto::pos_to_bytes(&source_bytes, position);
3495 if let Some(location) = goto::goto_declaration_by_name(cb, &uri, name, byte_hint) {
3496 self.client
3497 .log_message(
3498 MessageType::INFO,
3499 format!(
3500 "found definition (AST by name) at {}:{}",
3501 location.uri, location.range.start.line
3502 ),
3503 )
3504 .await;
3505 return Ok(Some(GotoDefinitionResponse::from(location)));
3506 }
3507 }
3508 } else {
3509 if let Some(ref cb) = cached_build
3511 && let Some(location) =
3512 goto::goto_declaration_cached(cb, &uri, position, &source_bytes)
3513 {
3514 self.client
3515 .log_message(
3516 MessageType::INFO,
3517 format!(
3518 "found definition (AST) at {}:{}",
3519 location.uri, location.range.start.line
3520 ),
3521 )
3522 .await;
3523 return Ok(Some(GotoDefinitionResponse::from(location)));
3524 }
3525
3526 let ts_result = {
3528 let comp_cache = self.completion_cache.read().await;
3529 let text_cache = self.text_cache.read().await;
3530 if let Some(cc) = comp_cache.get(&uri.to_string()) {
3531 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
3532 } else {
3533 None
3534 }
3535 };
3536
3537 if let Some(location) = ts_result {
3538 if validate_ts(&location) {
3539 self.client
3540 .log_message(
3541 MessageType::INFO,
3542 format!(
3543 "found definition (tree-sitter fallback) at {}:{}",
3544 location.uri, location.range.start.line
3545 ),
3546 )
3547 .await;
3548 return Ok(Some(GotoDefinitionResponse::from(location)));
3549 }
3550 self.client
3551 .log_message(MessageType::INFO, "tree-sitter fallback failed validation")
3552 .await;
3553 }
3554 }
3555
3556 self.client
3557 .log_message(MessageType::INFO, "no definition found")
3558 .await;
3559 Ok(None)
3560 }
3561
3562 async fn goto_declaration(
3563 &self,
3564 params: request::GotoDeclarationParams,
3565 ) -> tower_lsp::jsonrpc::Result<Option<request::GotoDeclarationResponse>> {
3566 self.client
3567 .log_message(MessageType::INFO, "got textDocument/declaration request")
3568 .await;
3569
3570 let uri = params.text_document_position_params.text_document.uri;
3571 let position = params.text_document_position_params.position;
3572
3573 let file_path = match uri.to_file_path() {
3574 Ok(path) => path,
3575 Err(_) => {
3576 self.client
3577 .log_message(MessageType::ERROR, "invalid file uri")
3578 .await;
3579 return Ok(None);
3580 }
3581 };
3582
3583 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3584 Some(bytes) => bytes,
3585 None => return Ok(None),
3586 };
3587
3588 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3589 let cached_build = match cached_build {
3590 Some(cb) => cb,
3591 None => return Ok(None),
3592 };
3593
3594 if let Some(location) =
3595 goto::goto_declaration_cached(&cached_build, &uri, position, &source_bytes)
3596 {
3597 self.client
3598 .log_message(
3599 MessageType::INFO,
3600 format!(
3601 "found declaration at {}:{}",
3602 location.uri, location.range.start.line
3603 ),
3604 )
3605 .await;
3606 Ok(Some(request::GotoDeclarationResponse::from(location)))
3607 } else {
3608 self.client
3609 .log_message(MessageType::INFO, "no declaration found")
3610 .await;
3611 Ok(None)
3612 }
3613 }
3614
3615 async fn references(
3616 &self,
3617 params: ReferenceParams,
3618 ) -> tower_lsp::jsonrpc::Result<Option<Vec<Location>>> {
3619 self.client
3620 .log_message(MessageType::INFO, "Got a textDocument/references request")
3621 .await;
3622
3623 let uri = params.text_document_position.text_document.uri;
3624 let position = params.text_document_position.position;
3625 let file_path = match uri.to_file_path() {
3626 Ok(path) => path,
3627 Err(_) => {
3628 self.client
3629 .log_message(MessageType::ERROR, "Invalid file URI")
3630 .await;
3631 return Ok(None);
3632 }
3633 };
3634 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3635 Some(bytes) => bytes,
3636 None => return Ok(None),
3637 };
3638 let file_build = self.get_or_fetch_build(&uri, &file_path, true).await;
3639 let file_build = match file_build {
3640 Some(cb) => cb,
3641 None => return Ok(None),
3642 };
3643 let mut project_build = self.ensure_project_cached_build().await;
3644 let current_abs = file_path.to_string_lossy().to_string();
3645 if self.use_solc
3646 && self.settings.read().await.project_index.full_project_scan
3647 && project_build
3648 .as_ref()
3649 .is_some_and(|b| !b.nodes.contains_key(¤t_abs))
3650 {
3651 let foundry_config = self.foundry_config_for_file(&file_path).await;
3652 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
3653 let changed = vec![PathBuf::from(¤t_abs)];
3654 let cfg_for_plan = foundry_config.clone();
3655 let remappings_for_plan = remappings.clone();
3656 let affected_set = tokio::task::spawn_blocking(move || {
3657 compute_reverse_import_closure(&cfg_for_plan, &changed, &remappings_for_plan)
3658 })
3659 .await
3660 .ok()
3661 .unwrap_or_default();
3662 let mut affected_files: Vec<PathBuf> = affected_set.into_iter().collect();
3663 if affected_files.is_empty() {
3664 affected_files.push(PathBuf::from(¤t_abs));
3665 }
3666 let text_cache_snapshot = self.text_cache.read().await.clone();
3667 match crate::solc::solc_project_index_scoped(
3668 &foundry_config,
3669 Some(&self.client),
3670 Some(&text_cache_snapshot),
3671 &affected_files,
3672 )
3673 .await
3674 {
3675 Ok(ast_data) => {
3676 let scoped_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
3677 if let Some(root_key) = self.project_cache_key().await {
3678 let merged = {
3679 let mut cache = self.ast_cache.write().await;
3680 let merged = if let Some(existing) = cache.get(&root_key).cloned() {
3681 let mut merged = (*existing).clone();
3682 match merge_scoped_cached_build(
3683 &mut merged,
3684 (*scoped_build).clone(),
3685 ) {
3686 Ok(_) => Arc::new(merged),
3687 Err(_) => scoped_build.clone(),
3688 }
3689 } else {
3690 scoped_build.clone()
3691 };
3692 cache.insert(root_key, merged.clone());
3693 merged
3694 };
3695 project_build = Some(merged);
3696 } else {
3697 project_build = Some(scoped_build);
3698 }
3699 self.client
3700 .log_message(
3701 MessageType::INFO,
3702 format!(
3703 "references warm-refresh: scoped reindex applied (affected={})",
3704 affected_files.len()
3705 ),
3706 )
3707 .await;
3708 }
3709 Err(e) => {
3710 self.client
3711 .log_message(
3712 MessageType::WARNING,
3713 format!("references warm-refresh: scoped reindex failed: {e}"),
3714 )
3715 .await;
3716 }
3717 }
3718 }
3719
3720 let mut locations = references::goto_references_cached(
3723 &file_build,
3724 &uri,
3725 position,
3726 &source_bytes,
3727 None,
3728 params.context.include_declaration,
3729 );
3730
3731 if let Some((def_abs_path, def_byte_offset)) =
3733 references::resolve_target_location(&file_build, &uri, position, &source_bytes)
3734 {
3735 if let Some(project_build) = project_build {
3736 let other_locations = references::goto_references_for_target(
3737 &project_build,
3738 &def_abs_path,
3739 def_byte_offset,
3740 None,
3741 params.context.include_declaration,
3742 );
3743 locations.extend(other_locations);
3744 }
3745 }
3746
3747 let mut seen = std::collections::HashSet::new();
3749 locations.retain(|loc| {
3750 seen.insert((
3751 loc.uri.clone(),
3752 loc.range.start.line,
3753 loc.range.start.character,
3754 loc.range.end.line,
3755 loc.range.end.character,
3756 ))
3757 });
3758
3759 if locations.is_empty() {
3760 self.client
3761 .log_message(MessageType::INFO, "No references found")
3762 .await;
3763 Ok(None)
3764 } else {
3765 self.client
3766 .log_message(
3767 MessageType::INFO,
3768 format!("Found {} references", locations.len()),
3769 )
3770 .await;
3771 Ok(Some(locations))
3772 }
3773 }
3774
3775 async fn prepare_rename(
3776 &self,
3777 params: TextDocumentPositionParams,
3778 ) -> tower_lsp::jsonrpc::Result<Option<PrepareRenameResponse>> {
3779 self.client
3780 .log_message(MessageType::INFO, "got textDocument/prepareRename request")
3781 .await;
3782
3783 let uri = params.text_document.uri;
3784 let position = params.position;
3785
3786 let file_path = match uri.to_file_path() {
3787 Ok(path) => path,
3788 Err(_) => {
3789 self.client
3790 .log_message(MessageType::ERROR, "invalid file uri")
3791 .await;
3792 return Ok(None);
3793 }
3794 };
3795
3796 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3797 Some(bytes) => bytes,
3798 None => return Ok(None),
3799 };
3800
3801 if let Some(range) = rename::get_identifier_range(&source_bytes, position) {
3802 self.client
3803 .log_message(
3804 MessageType::INFO,
3805 format!(
3806 "prepare rename range: {}:{}",
3807 range.start.line, range.start.character
3808 ),
3809 )
3810 .await;
3811 Ok(Some(PrepareRenameResponse::Range(range)))
3812 } else {
3813 self.client
3814 .log_message(MessageType::INFO, "no identifier found for prepare rename")
3815 .await;
3816 Ok(None)
3817 }
3818 }
3819
3820 async fn rename(
3821 &self,
3822 params: RenameParams,
3823 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
3824 self.client
3825 .log_message(MessageType::INFO, "got textDocument/rename request")
3826 .await;
3827
3828 let uri = params.text_document_position.text_document.uri;
3829 let position = params.text_document_position.position;
3830 let new_name = params.new_name;
3831 let file_path = match uri.to_file_path() {
3832 Ok(p) => p,
3833 Err(_) => {
3834 self.client
3835 .log_message(MessageType::ERROR, "invalid file uri")
3836 .await;
3837 return Ok(None);
3838 }
3839 };
3840 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3841 Some(bytes) => bytes,
3842 None => return Ok(None),
3843 };
3844
3845 let current_identifier = match rename::get_identifier_at_position(&source_bytes, position) {
3846 Some(id) => id,
3847 None => {
3848 self.client
3849 .log_message(MessageType::ERROR, "No identifier found at position")
3850 .await;
3851 return Ok(None);
3852 }
3853 };
3854
3855 if !utils::is_valid_solidity_identifier(&new_name) {
3856 return Err(tower_lsp::jsonrpc::Error::invalid_params(
3857 "new name is not a valid solidity identifier",
3858 ));
3859 }
3860
3861 if new_name == current_identifier {
3862 self.client
3863 .log_message(
3864 MessageType::INFO,
3865 "new name is the same as current identifier",
3866 )
3867 .await;
3868 return Ok(None);
3869 }
3870
3871 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3872 let cached_build = match cached_build {
3873 Some(cb) => cb,
3874 None => return Ok(None),
3875 };
3876 let other_builds: Vec<Arc<goto::CachedBuild>> = {
3877 let cache = self.ast_cache.read().await;
3878 cache
3879 .iter()
3880 .filter(|(key, _)| **key != uri.to_string())
3881 .map(|(_, v)| v.clone())
3882 .collect()
3883 };
3884 let other_refs: Vec<&goto::CachedBuild> = other_builds.iter().map(|v| v.as_ref()).collect();
3885
3886 let text_buffers: HashMap<String, Vec<u8>> = {
3890 let text_cache = self.text_cache.read().await;
3891 text_cache
3892 .iter()
3893 .map(|(uri, (_, content))| (uri.clone(), content.as_bytes().to_vec()))
3894 .collect()
3895 };
3896
3897 match rename::rename_symbol(
3898 &cached_build,
3899 &uri,
3900 position,
3901 &source_bytes,
3902 new_name,
3903 &other_refs,
3904 &text_buffers,
3905 ) {
3906 Some(workspace_edit) => {
3907 self.client
3908 .log_message(
3909 MessageType::INFO,
3910 format!(
3911 "created rename edit with {} file(s), {} total change(s)",
3912 workspace_edit
3913 .changes
3914 .as_ref()
3915 .map(|c| c.len())
3916 .unwrap_or(0),
3917 workspace_edit
3918 .changes
3919 .as_ref()
3920 .map(|c| c.values().map(|v| v.len()).sum::<usize>())
3921 .unwrap_or(0)
3922 ),
3923 )
3924 .await;
3925
3926 Ok(Some(workspace_edit))
3931 }
3932
3933 None => {
3934 self.client
3935 .log_message(MessageType::INFO, "No locations found for renaming")
3936 .await;
3937 Ok(None)
3938 }
3939 }
3940 }
3941
3942 async fn symbol(
3943 &self,
3944 params: WorkspaceSymbolParams,
3945 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SymbolInformation>>> {
3946 self.client
3947 .log_message(MessageType::INFO, "got workspace/symbol request")
3948 .await;
3949
3950 let files: Vec<(Url, String)> = {
3952 let cache = self.text_cache.read().await;
3953 cache
3954 .iter()
3955 .filter(|(uri_str, _)| uri_str.ends_with(".sol"))
3956 .filter_map(|(uri_str, (_, content))| {
3957 Url::parse(uri_str).ok().map(|uri| (uri, content.clone()))
3958 })
3959 .collect()
3960 };
3961
3962 let mut all_symbols = symbols::extract_workspace_symbols(&files);
3963 if !params.query.is_empty() {
3964 let query = params.query.to_lowercase();
3965 all_symbols.retain(|symbol| symbol.name.to_lowercase().contains(&query));
3966 }
3967 if all_symbols.is_empty() {
3968 self.client
3969 .log_message(MessageType::INFO, "No symbols found")
3970 .await;
3971 Ok(None)
3972 } else {
3973 self.client
3974 .log_message(
3975 MessageType::INFO,
3976 format!("found {} symbols", all_symbols.len()),
3977 )
3978 .await;
3979 Ok(Some(all_symbols))
3980 }
3981 }
3982
3983 async fn document_symbol(
3984 &self,
3985 params: DocumentSymbolParams,
3986 ) -> tower_lsp::jsonrpc::Result<Option<DocumentSymbolResponse>> {
3987 self.client
3988 .log_message(MessageType::INFO, "got textDocument/documentSymbol request")
3989 .await;
3990 let uri = params.text_document.uri;
3991 let file_path = match uri.to_file_path() {
3992 Ok(path) => path,
3993 Err(_) => {
3994 self.client
3995 .log_message(MessageType::ERROR, "invalid file uri")
3996 .await;
3997 return Ok(None);
3998 }
3999 };
4000
4001 let source = {
4003 let cache = self.text_cache.read().await;
4004 cache
4005 .get(&uri.to_string())
4006 .map(|(_, content)| content.clone())
4007 };
4008 let source = match source {
4009 Some(s) => s,
4010 None => match std::fs::read_to_string(&file_path) {
4011 Ok(s) => s,
4012 Err(_) => return Ok(None),
4013 },
4014 };
4015
4016 let symbols = symbols::extract_document_symbols(&source);
4017 if symbols.is_empty() {
4018 self.client
4019 .log_message(MessageType::INFO, "no document symbols found")
4020 .await;
4021 Ok(None)
4022 } else {
4023 self.client
4024 .log_message(
4025 MessageType::INFO,
4026 format!("found {} document symbols", symbols.len()),
4027 )
4028 .await;
4029 Ok(Some(DocumentSymbolResponse::Nested(symbols)))
4030 }
4031 }
4032
4033 async fn document_highlight(
4034 &self,
4035 params: DocumentHighlightParams,
4036 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentHighlight>>> {
4037 self.client
4038 .log_message(
4039 MessageType::INFO,
4040 "got textDocument/documentHighlight request",
4041 )
4042 .await;
4043
4044 let uri = params.text_document_position_params.text_document.uri;
4045 let position = params.text_document_position_params.position;
4046
4047 let source = {
4048 let cache = self.text_cache.read().await;
4049 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4050 };
4051
4052 let source = match source {
4053 Some(s) => s,
4054 None => {
4055 let file_path = match uri.to_file_path() {
4056 Ok(p) => p,
4057 Err(_) => return Ok(None),
4058 };
4059 match std::fs::read_to_string(&file_path) {
4060 Ok(s) => s,
4061 Err(_) => return Ok(None),
4062 }
4063 }
4064 };
4065
4066 let highlights = highlight::document_highlights(&source, position);
4067
4068 if highlights.is_empty() {
4069 self.client
4070 .log_message(MessageType::INFO, "no document highlights found")
4071 .await;
4072 Ok(None)
4073 } else {
4074 self.client
4075 .log_message(
4076 MessageType::INFO,
4077 format!("found {} document highlights", highlights.len()),
4078 )
4079 .await;
4080 Ok(Some(highlights))
4081 }
4082 }
4083
4084 async fn hover(&self, params: HoverParams) -> tower_lsp::jsonrpc::Result<Option<Hover>> {
4085 self.client
4086 .log_message(MessageType::INFO, "got textDocument/hover request")
4087 .await;
4088
4089 let uri = params.text_document_position_params.text_document.uri;
4090 let position = params.text_document_position_params.position;
4091
4092 let file_path = match uri.to_file_path() {
4093 Ok(path) => path,
4094 Err(_) => {
4095 self.client
4096 .log_message(MessageType::ERROR, "invalid file uri")
4097 .await;
4098 return Ok(None);
4099 }
4100 };
4101
4102 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4103 Some(bytes) => bytes,
4104 None => return Ok(None),
4105 };
4106
4107 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4108 let cached_build = match cached_build {
4109 Some(cb) => cb,
4110 None => return Ok(None),
4111 };
4112
4113 let result = hover::hover_info(&cached_build, &uri, position, &source_bytes);
4114
4115 if result.is_some() {
4116 self.client
4117 .log_message(MessageType::INFO, "hover info found")
4118 .await;
4119 } else {
4120 self.client
4121 .log_message(MessageType::INFO, "no hover info found")
4122 .await;
4123 }
4124
4125 Ok(result)
4126 }
4127
4128 async fn signature_help(
4129 &self,
4130 params: SignatureHelpParams,
4131 ) -> tower_lsp::jsonrpc::Result<Option<SignatureHelp>> {
4132 self.client
4133 .log_message(MessageType::INFO, "got textDocument/signatureHelp request")
4134 .await;
4135
4136 let uri = params.text_document_position_params.text_document.uri;
4137 let position = params.text_document_position_params.position;
4138
4139 let file_path = match uri.to_file_path() {
4140 Ok(path) => path,
4141 Err(_) => {
4142 self.client
4143 .log_message(MessageType::ERROR, "invalid file uri")
4144 .await;
4145 return Ok(None);
4146 }
4147 };
4148
4149 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4150 Some(bytes) => bytes,
4151 None => return Ok(None),
4152 };
4153
4154 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4155 let cached_build = match cached_build {
4156 Some(cb) => cb,
4157 None => return Ok(None),
4158 };
4159
4160 let result = hover::signature_help(&cached_build, &source_bytes, position);
4161
4162 Ok(result)
4163 }
4164
4165 async fn document_link(
4166 &self,
4167 params: DocumentLinkParams,
4168 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentLink>>> {
4169 self.client
4170 .log_message(MessageType::INFO, "got textDocument/documentLink request")
4171 .await;
4172
4173 let uri = params.text_document.uri;
4174 let file_path = match uri.to_file_path() {
4175 Ok(path) => path,
4176 Err(_) => {
4177 self.client
4178 .log_message(MessageType::ERROR, "invalid file uri")
4179 .await;
4180 return Ok(None);
4181 }
4182 };
4183
4184 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4185 Some(bytes) => bytes,
4186 None => return Ok(None),
4187 };
4188
4189 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4190 let cached_build = match cached_build {
4191 Some(cb) => cb,
4192 None => return Ok(None),
4193 };
4194
4195 let result = links::document_links(&cached_build, &uri, &source_bytes);
4196
4197 if result.is_empty() {
4198 self.client
4199 .log_message(MessageType::INFO, "no document links found")
4200 .await;
4201 Ok(None)
4202 } else {
4203 self.client
4204 .log_message(
4205 MessageType::INFO,
4206 format!("found {} document links", result.len()),
4207 )
4208 .await;
4209 Ok(Some(result))
4210 }
4211 }
4212
4213 async fn semantic_tokens_full(
4214 &self,
4215 params: SemanticTokensParams,
4216 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensResult>> {
4217 self.client
4218 .log_message(
4219 MessageType::INFO,
4220 "got textDocument/semanticTokens/full request",
4221 )
4222 .await;
4223
4224 let uri = params.text_document.uri;
4225 let source = {
4226 let cache = self.text_cache.read().await;
4227 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4228 };
4229
4230 let source = match source {
4231 Some(s) => s,
4232 None => {
4233 let file_path = match uri.to_file_path() {
4235 Ok(p) => p,
4236 Err(_) => return Ok(None),
4237 };
4238 match std::fs::read_to_string(&file_path) {
4239 Ok(s) => s,
4240 Err(_) => return Ok(None),
4241 }
4242 }
4243 };
4244
4245 let mut tokens = semantic_tokens::semantic_tokens_full(&source);
4246
4247 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
4249 let result_id = id.to_string();
4250 tokens.result_id = Some(result_id.clone());
4251
4252 {
4253 let mut cache = self.semantic_token_cache.write().await;
4254 cache.insert(uri.to_string(), (result_id, tokens.data.clone()));
4255 }
4256
4257 Ok(Some(SemanticTokensResult::Tokens(tokens)))
4258 }
4259
4260 async fn semantic_tokens_range(
4261 &self,
4262 params: SemanticTokensRangeParams,
4263 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensRangeResult>> {
4264 self.client
4265 .log_message(
4266 MessageType::INFO,
4267 "got textDocument/semanticTokens/range request",
4268 )
4269 .await;
4270
4271 let uri = params.text_document.uri;
4272 let range = params.range;
4273 let source = {
4274 let cache = self.text_cache.read().await;
4275 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4276 };
4277
4278 let source = match source {
4279 Some(s) => s,
4280 None => {
4281 let file_path = match uri.to_file_path() {
4282 Ok(p) => p,
4283 Err(_) => return Ok(None),
4284 };
4285 match std::fs::read_to_string(&file_path) {
4286 Ok(s) => s,
4287 Err(_) => return Ok(None),
4288 }
4289 }
4290 };
4291
4292 let tokens =
4293 semantic_tokens::semantic_tokens_range(&source, range.start.line, range.end.line);
4294
4295 Ok(Some(SemanticTokensRangeResult::Tokens(tokens)))
4296 }
4297
4298 async fn semantic_tokens_full_delta(
4299 &self,
4300 params: SemanticTokensDeltaParams,
4301 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensFullDeltaResult>> {
4302 self.client
4303 .log_message(
4304 MessageType::INFO,
4305 "got textDocument/semanticTokens/full/delta request",
4306 )
4307 .await;
4308
4309 let uri = params.text_document.uri;
4310 let previous_result_id = params.previous_result_id;
4311
4312 let source = {
4313 let cache = self.text_cache.read().await;
4314 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4315 };
4316
4317 let source = match source {
4318 Some(s) => s,
4319 None => {
4320 let file_path = match uri.to_file_path() {
4321 Ok(p) => p,
4322 Err(_) => return Ok(None),
4323 };
4324 match std::fs::read_to_string(&file_path) {
4325 Ok(s) => s,
4326 Err(_) => return Ok(None),
4327 }
4328 }
4329 };
4330
4331 let mut new_tokens = semantic_tokens::semantic_tokens_full(&source);
4332
4333 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
4335 let new_result_id = id.to_string();
4336 new_tokens.result_id = Some(new_result_id.clone());
4337
4338 let uri_str = uri.to_string();
4339
4340 let old_tokens = {
4342 let cache = self.semantic_token_cache.read().await;
4343 cache
4344 .get(&uri_str)
4345 .filter(|(rid, _)| *rid == previous_result_id)
4346 .map(|(_, tokens)| tokens.clone())
4347 };
4348
4349 {
4351 let mut cache = self.semantic_token_cache.write().await;
4352 cache.insert(uri_str, (new_result_id.clone(), new_tokens.data.clone()));
4353 }
4354
4355 match old_tokens {
4356 Some(old) => {
4357 let edits = semantic_tokens::compute_delta(&old, &new_tokens.data);
4359 Ok(Some(SemanticTokensFullDeltaResult::TokensDelta(
4360 SemanticTokensDelta {
4361 result_id: Some(new_result_id),
4362 edits,
4363 },
4364 )))
4365 }
4366 None => {
4367 Ok(Some(SemanticTokensFullDeltaResult::Tokens(new_tokens)))
4369 }
4370 }
4371 }
4372
4373 async fn folding_range(
4374 &self,
4375 params: FoldingRangeParams,
4376 ) -> tower_lsp::jsonrpc::Result<Option<Vec<FoldingRange>>> {
4377 self.client
4378 .log_message(MessageType::INFO, "got textDocument/foldingRange request")
4379 .await;
4380
4381 let uri = params.text_document.uri;
4382
4383 let source = {
4384 let cache = self.text_cache.read().await;
4385 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4386 };
4387
4388 let source = match source {
4389 Some(s) => s,
4390 None => {
4391 let file_path = match uri.to_file_path() {
4392 Ok(p) => p,
4393 Err(_) => return Ok(None),
4394 };
4395 match std::fs::read_to_string(&file_path) {
4396 Ok(s) => s,
4397 Err(_) => return Ok(None),
4398 }
4399 }
4400 };
4401
4402 let ranges = folding::folding_ranges(&source);
4403
4404 if ranges.is_empty() {
4405 self.client
4406 .log_message(MessageType::INFO, "no folding ranges found")
4407 .await;
4408 Ok(None)
4409 } else {
4410 self.client
4411 .log_message(
4412 MessageType::INFO,
4413 format!("found {} folding ranges", ranges.len()),
4414 )
4415 .await;
4416 Ok(Some(ranges))
4417 }
4418 }
4419
4420 async fn selection_range(
4421 &self,
4422 params: SelectionRangeParams,
4423 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SelectionRange>>> {
4424 self.client
4425 .log_message(MessageType::INFO, "got textDocument/selectionRange request")
4426 .await;
4427
4428 let uri = params.text_document.uri;
4429
4430 let source = {
4431 let cache = self.text_cache.read().await;
4432 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4433 };
4434
4435 let source = match source {
4436 Some(s) => s,
4437 None => {
4438 let file_path = match uri.to_file_path() {
4439 Ok(p) => p,
4440 Err(_) => return Ok(None),
4441 };
4442 match std::fs::read_to_string(&file_path) {
4443 Ok(s) => s,
4444 Err(_) => return Ok(None),
4445 }
4446 }
4447 };
4448
4449 let ranges = selection::selection_ranges(&source, ¶ms.positions);
4450
4451 if ranges.is_empty() {
4452 self.client
4453 .log_message(MessageType::INFO, "no selection ranges found")
4454 .await;
4455 Ok(None)
4456 } else {
4457 self.client
4458 .log_message(
4459 MessageType::INFO,
4460 format!("found {} selection ranges", ranges.len()),
4461 )
4462 .await;
4463 Ok(Some(ranges))
4464 }
4465 }
4466
4467 async fn inlay_hint(
4468 &self,
4469 params: InlayHintParams,
4470 ) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>> {
4471 self.client
4472 .log_message(MessageType::INFO, "got textDocument/inlayHint request")
4473 .await;
4474
4475 let uri = params.text_document.uri;
4476 let range = params.range;
4477
4478 let file_path = match uri.to_file_path() {
4479 Ok(path) => path,
4480 Err(_) => {
4481 self.client
4482 .log_message(MessageType::ERROR, "invalid file uri")
4483 .await;
4484 return Ok(None);
4485 }
4486 };
4487
4488 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4489 Some(bytes) => bytes,
4490 None => return Ok(None),
4491 };
4492
4493 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4494 let cached_build = match cached_build {
4495 Some(cb) => cb,
4496 None => return Ok(None),
4497 };
4498
4499 let mut hints = inlay_hints::inlay_hints(&cached_build, &uri, range, &source_bytes);
4500
4501 let settings = self.settings.read().await;
4503 if !settings.inlay_hints.parameters {
4504 hints.retain(|h| h.kind != Some(InlayHintKind::PARAMETER));
4505 }
4506 if !settings.inlay_hints.gas_estimates {
4507 hints.retain(|h| h.kind != Some(InlayHintKind::TYPE));
4508 }
4509
4510 if hints.is_empty() {
4511 self.client
4512 .log_message(MessageType::INFO, "no inlay hints found")
4513 .await;
4514 Ok(None)
4515 } else {
4516 self.client
4517 .log_message(
4518 MessageType::INFO,
4519 format!("found {} inlay hints", hints.len()),
4520 )
4521 .await;
4522 Ok(Some(hints))
4523 }
4524 }
4525
4526 async fn code_action(
4527 &self,
4528 params: CodeActionParams,
4529 ) -> tower_lsp::jsonrpc::Result<Option<CodeActionResponse>> {
4530 use crate::code_actions::FixKind;
4531
4532 let uri = ¶ms.text_document.uri;
4533
4534 let source: Option<String> = if let Ok(path) = uri.to_file_path() {
4536 self.get_source_bytes(uri, &path)
4537 .await
4538 .map(|b| String::from_utf8_lossy(&b).into_owned())
4539 } else {
4540 None
4541 };
4542
4543 let db = &self.code_action_db;
4544 let mut actions: Vec<CodeActionOrCommand> = Vec::new();
4545
4546 for diag in ¶ms.context.diagnostics {
4547 if let Some(NumberOrString::String(s)) = &diag.code {
4549 if s == "unused-import" {
4550 if let Some(edit) = source.as_deref().and_then(|src| {
4551 goto::code_action_edit(
4552 src,
4553 diag.range,
4554 goto::CodeActionKind::DeleteNodeByKind {
4555 node_kind: "import_directive",
4556 },
4557 )
4558 }) {
4559 let mut changes = HashMap::new();
4560 changes.insert(uri.clone(), vec![edit]);
4561 actions.push(CodeActionOrCommand::CodeAction(CodeAction {
4562 title: "Remove unused import".to_string(),
4563 kind: Some(CodeActionKind::QUICKFIX),
4564 diagnostics: Some(vec![diag.clone()]),
4565 edit: Some(WorkspaceEdit {
4566 changes: Some(changes),
4567 ..Default::default()
4568 }),
4569 is_preferred: Some(true),
4570 ..Default::default()
4571 }));
4572 }
4573 continue;
4574 }
4575 }
4576
4577 let code: u32 = match &diag.code {
4579 Some(NumberOrString::String(s)) => match s.parse() {
4580 Ok(n) => n,
4581 Err(_) => continue,
4582 },
4583 _ => continue,
4584 };
4585
4586 if let Some(def) = db.get(&code) {
4588 let edit_opt: Option<TextEdit> = match &def.fix {
4590 FixKind::Insert { text, anchor: _ } => {
4591 goto::code_action_edit(
4593 source.as_deref().unwrap_or(""),
4594 diag.range,
4595 goto::CodeActionKind::InsertAtFileStart { text },
4596 )
4597 }
4598
4599 FixKind::ReplaceToken {
4600 replacement,
4601 walk_to,
4602 } => source.as_deref().and_then(|src| {
4603 goto::code_action_edit(
4604 src,
4605 diag.range,
4606 goto::CodeActionKind::ReplaceToken {
4607 replacement,
4608 walk_to: walk_to.as_deref(),
4609 },
4610 )
4611 }),
4612
4613 FixKind::DeleteToken => source.as_deref().and_then(|src| {
4614 goto::code_action_edit(src, diag.range, goto::CodeActionKind::DeleteToken)
4615 }),
4616
4617 FixKind::DeleteNode { node_kind } => {
4618 if node_kind == "variable_declaration_statement" {
4620 source.as_deref().and_then(|src| {
4621 goto::code_action_edit(
4622 src,
4623 diag.range,
4624 goto::CodeActionKind::DeleteLocalVar,
4625 )
4626 })
4627 } else {
4628 None
4629 }
4630 }
4631
4632 FixKind::DeleteChildNode {
4633 walk_to,
4634 child_kinds,
4635 } => {
4636 let ck: Vec<&str> = child_kinds.iter().map(|s| s.as_str()).collect();
4637 source.as_deref().and_then(|src| {
4638 goto::code_action_edit(
4639 src,
4640 diag.range,
4641 goto::CodeActionKind::DeleteChildNode {
4642 walk_to,
4643 child_kinds: &ck,
4644 },
4645 )
4646 })
4647 }
4648
4649 FixKind::ReplaceChildNode {
4650 walk_to,
4651 child_kind,
4652 replacement,
4653 } => source.as_deref().and_then(|src| {
4654 goto::code_action_edit(
4655 src,
4656 diag.range,
4657 goto::CodeActionKind::ReplaceChildNode {
4658 walk_to,
4659 child_kind,
4660 replacement,
4661 },
4662 )
4663 }),
4664
4665 FixKind::InsertBeforeNode {
4666 walk_to,
4667 before_child,
4668 text,
4669 } => {
4670 let bc: Vec<&str> = before_child.iter().map(|s| s.as_str()).collect();
4671 source.as_deref().and_then(|src| {
4672 goto::code_action_edit(
4673 src,
4674 diag.range,
4675 goto::CodeActionKind::InsertBeforeNode {
4676 walk_to,
4677 before_child: &bc,
4678 text,
4679 },
4680 )
4681 })
4682 }
4683
4684 FixKind::Custom => None,
4686 };
4687
4688 if let Some(edit) = edit_opt {
4689 let mut changes = HashMap::new();
4690 changes.insert(uri.clone(), vec![edit]);
4691 actions.push(CodeActionOrCommand::CodeAction(CodeAction {
4692 title: def.title.clone(),
4693 kind: Some(CodeActionKind::QUICKFIX),
4694 diagnostics: Some(vec![diag.clone()]),
4695 edit: Some(WorkspaceEdit {
4696 changes: Some(changes),
4697 ..Default::default()
4698 }),
4699 is_preferred: Some(true),
4700 ..Default::default()
4701 }));
4702 continue; }
4704
4705 if !matches!(def.fix, FixKind::Custom) {
4708 continue;
4709 }
4710 }
4711
4712 #[allow(clippy::match_single_binding)]
4716 match code {
4717 _ => {}
4722 }
4723 }
4724
4725 if actions.is_empty() {
4726 Ok(None)
4727 } else {
4728 Ok(Some(actions))
4729 }
4730 }
4731
4732 async fn will_rename_files(
4733 &self,
4734 params: RenameFilesParams,
4735 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
4736 self.client
4737 .log_message(
4738 MessageType::INFO,
4739 format!("workspace/willRenameFiles: {} file(s)", params.files.len()),
4740 )
4741 .await;
4742 if !self
4743 .settings
4744 .read()
4745 .await
4746 .file_operations
4747 .update_imports_on_rename
4748 {
4749 self.client
4750 .log_message(
4751 MessageType::INFO,
4752 "willRenameFiles: updateImportsOnRename disabled",
4753 )
4754 .await;
4755 return Ok(None);
4756 }
4757
4758 let config = self.foundry_config.read().await.clone();
4760 let project_root = config.root.clone();
4761 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
4762 crate::solc::discover_source_files(&config)
4763 .into_iter()
4764 .filter_map(|p| p.to_str().map(String::from))
4765 .collect()
4766 })
4767 .await
4768 .unwrap_or_default();
4769
4770 if source_files.is_empty() {
4771 self.client
4772 .log_message(
4773 MessageType::WARNING,
4774 "willRenameFiles: no source files found",
4775 )
4776 .await;
4777 return Ok(None);
4778 }
4779
4780 let raw_renames: Vec<(std::path::PathBuf, std::path::PathBuf)> = params
4782 .files
4783 .iter()
4784 .filter_map(|fr| {
4785 let old_uri = Url::parse(&fr.old_uri).ok()?;
4786 let new_uri = Url::parse(&fr.new_uri).ok()?;
4787 let old_path = old_uri.to_file_path().ok()?;
4788 let new_path = new_uri.to_file_path().ok()?;
4789 Some((old_path, new_path))
4790 })
4791 .collect();
4792
4793 let renames = file_operations::expand_folder_renames(&raw_renames, &source_files);
4794
4795 if renames.is_empty() {
4796 return Ok(None);
4797 }
4798
4799 self.client
4800 .log_message(
4801 MessageType::INFO,
4802 format!(
4803 "willRenameFiles: {} rename(s) after folder expansion",
4804 renames.len()
4805 ),
4806 )
4807 .await;
4808
4809 let files_to_read: Vec<(String, String)> = {
4812 let tc = self.text_cache.read().await;
4813 source_files
4814 .iter()
4815 .filter_map(|fs_path| {
4816 let uri = Url::from_file_path(fs_path).ok()?;
4817 let uri_str = uri.to_string();
4818 if tc.contains_key(&uri_str) {
4819 None
4820 } else {
4821 Some((uri_str, fs_path.clone()))
4822 }
4823 })
4824 .collect()
4825 };
4826
4827 if !files_to_read.is_empty() {
4828 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
4829 files_to_read
4830 .into_iter()
4831 .filter_map(|(uri_str, fs_path)| {
4832 let content = std::fs::read_to_string(&fs_path).ok()?;
4833 Some((uri_str, content))
4834 })
4835 .collect()
4836 })
4837 .await
4838 .unwrap_or_default();
4839
4840 let mut tc = self.text_cache.write().await;
4841 for (uri_str, content) in loaded {
4842 tc.entry(uri_str).or_insert((0, content));
4843 }
4844 }
4845
4846 let text_cache = self.text_cache.clone();
4851 let result = {
4852 let tc = text_cache.read().await;
4853 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
4854 let uri = Url::from_file_path(fs_path).ok()?;
4855 let (_, content) = tc.get(&uri.to_string())?;
4856 Some(content.as_bytes().to_vec())
4857 };
4858
4859 file_operations::rename_imports(
4860 &source_files,
4861 &renames,
4862 &project_root,
4863 &get_source_bytes,
4864 )
4865 };
4866
4867 let stats = &result.stats;
4869 if stats.read_failures > 0 || stats.pathdiff_failures > 0 || stats.duplicate_renames > 0 {
4870 self.client
4871 .log_message(
4872 MessageType::WARNING,
4873 format!(
4874 "willRenameFiles stats: read_failures={}, pathdiff_failures={}, \
4875 duplicate_renames={}, no_parent={}, no_op_skips={}, dedup_skips={}",
4876 stats.read_failures,
4877 stats.pathdiff_failures,
4878 stats.duplicate_renames,
4879 stats.no_parent,
4880 stats.no_op_skips,
4881 stats.dedup_skips,
4882 ),
4883 )
4884 .await;
4885 }
4886
4887 let all_edits = result.edits;
4888
4889 if all_edits.is_empty() {
4890 self.client
4891 .log_message(MessageType::INFO, "willRenameFiles: no import edits needed")
4892 .await;
4893 return Ok(None);
4894 }
4895
4896 {
4898 let mut tc = self.text_cache.write().await;
4899 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
4900 self.client
4901 .log_message(
4902 MessageType::INFO,
4903 format!("willRenameFiles: patched {} cached file(s)", patched),
4904 )
4905 .await;
4906 }
4907
4908 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
4909 self.client
4910 .log_message(
4911 MessageType::INFO,
4912 format!(
4913 "willRenameFiles: {} edit(s) across {} file(s)",
4914 total_edits,
4915 all_edits.len()
4916 ),
4917 )
4918 .await;
4919
4920 Ok(Some(WorkspaceEdit {
4921 changes: Some(all_edits),
4922 document_changes: None,
4923 change_annotations: None,
4924 }))
4925 }
4926
4927 async fn did_rename_files(&self, params: RenameFilesParams) {
4928 self.client
4929 .log_message(
4930 MessageType::INFO,
4931 format!("workspace/didRenameFiles: {} file(s)", params.files.len()),
4932 )
4933 .await;
4934 self.project_cache_dirty.store(true, Ordering::Release);
4935 {
4936 let mut changed = self.project_cache_changed_files.write().await;
4937 for file in ¶ms.files {
4938 if let Ok(old_uri) = Url::parse(&file.old_uri)
4939 && let Ok(old_path) = old_uri.to_file_path()
4940 {
4941 changed.insert(old_path.to_string_lossy().to_string());
4942 }
4943 if let Ok(new_uri) = Url::parse(&file.new_uri)
4944 && let Ok(new_path) = new_uri.to_file_path()
4945 {
4946 changed.insert(new_path.to_string_lossy().to_string());
4947 }
4948 }
4949 }
4950
4951 let raw_uri_pairs: Vec<(Url, Url)> = params
4953 .files
4954 .iter()
4955 .filter_map(|fr| {
4956 let old_uri = Url::parse(&fr.old_uri).ok()?;
4957 let new_uri = Url::parse(&fr.new_uri).ok()?;
4958 Some((old_uri, new_uri))
4959 })
4960 .collect();
4961
4962 let file_renames = {
4963 let tc = self.text_cache.read().await;
4964 let cache_paths: Vec<std::path::PathBuf> = tc
4965 .keys()
4966 .filter_map(|k| Url::parse(k).ok())
4967 .filter_map(|u| u.to_file_path().ok())
4968 .collect();
4969 drop(tc);
4970
4971 let cfg = self.foundry_config.read().await.clone();
4974 let discovered_paths =
4975 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
4976 .await
4977 .unwrap_or_default();
4978
4979 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
4980 all_paths.extend(cache_paths);
4981 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
4982
4983 file_operations::expand_folder_renames_from_paths(&raw_uri_pairs, &all_paths)
4984 };
4985
4986 self.client
4987 .log_message(
4988 MessageType::INFO,
4989 format!(
4990 "didRenameFiles: migrating {} cache entry/entries",
4991 file_renames.len()
4992 ),
4993 )
4994 .await;
4995
4996 {
5000 let mut tc = self.text_cache.write().await;
5001 for (old_key, new_key) in &file_renames {
5002 if let Some(entry) = tc.remove(old_key) {
5003 tc.insert(new_key.clone(), entry);
5004 }
5005 }
5006 }
5007 {
5008 let mut ac = self.ast_cache.write().await;
5009 for (old_key, _) in &file_renames {
5010 ac.remove(old_key);
5011 }
5012 }
5013 {
5014 let mut cc = self.completion_cache.write().await;
5015 for (old_key, _) in &file_renames {
5016 cc.remove(old_key);
5017 }
5018 }
5019
5020 let root_key = self.project_cache_key().await;
5023 if let Some(ref key) = root_key {
5024 self.ast_cache.write().await.remove(key);
5025 }
5026
5027 let foundry_config = self.foundry_config.read().await.clone();
5028 let ast_cache = self.ast_cache.clone();
5029 let client = self.client.clone();
5030 let text_cache_snapshot = self.text_cache.read().await.clone();
5034
5035 tokio::spawn(async move {
5036 let Some(cache_key) = root_key else {
5037 return;
5038 };
5039 match crate::solc::solc_project_index(
5040 &foundry_config,
5041 Some(&client),
5042 Some(&text_cache_snapshot),
5043 )
5044 .await
5045 {
5046 Ok(ast_data) => {
5047 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
5048 let source_count = cached_build.nodes.len();
5049 ast_cache.write().await.insert(cache_key, cached_build);
5050 client
5051 .log_message(
5052 MessageType::INFO,
5053 format!("didRenameFiles: re-indexed {} source files", source_count),
5054 )
5055 .await;
5056 }
5057 Err(e) => {
5058 client
5059 .log_message(
5060 MessageType::WARNING,
5061 format!("didRenameFiles: re-index failed: {e}"),
5062 )
5063 .await;
5064 }
5065 }
5066 });
5067 }
5068
5069 async fn will_delete_files(
5070 &self,
5071 params: DeleteFilesParams,
5072 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
5073 self.client
5074 .log_message(
5075 MessageType::INFO,
5076 format!("workspace/willDeleteFiles: {} file(s)", params.files.len()),
5077 )
5078 .await;
5079 if !update_imports_on_delete_enabled(&*self.settings.read().await) {
5080 self.client
5081 .log_message(
5082 MessageType::INFO,
5083 "willDeleteFiles: updateImportsOnDelete disabled",
5084 )
5085 .await;
5086 return Ok(None);
5087 }
5088
5089 let config = self.foundry_config.read().await.clone();
5090 let project_root = config.root.clone();
5091 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
5092 crate::solc::discover_source_files(&config)
5093 .into_iter()
5094 .filter_map(|p| p.to_str().map(String::from))
5095 .collect()
5096 })
5097 .await
5098 .unwrap_or_default();
5099
5100 if source_files.is_empty() {
5101 self.client
5102 .log_message(
5103 MessageType::WARNING,
5104 "willDeleteFiles: no source files found",
5105 )
5106 .await;
5107 return Ok(None);
5108 }
5109
5110 let raw_deletes: Vec<std::path::PathBuf> = params
5111 .files
5112 .iter()
5113 .filter_map(|fd| Url::parse(&fd.uri).ok())
5114 .filter_map(|u| u.to_file_path().ok())
5115 .collect();
5116
5117 let deletes = file_operations::expand_folder_deletes(&raw_deletes, &source_files);
5118 if deletes.is_empty() {
5119 return Ok(None);
5120 }
5121
5122 self.client
5123 .log_message(
5124 MessageType::INFO,
5125 format!(
5126 "willDeleteFiles: {} delete target(s) after folder expansion",
5127 deletes.len()
5128 ),
5129 )
5130 .await;
5131
5132 let files_to_read: Vec<(String, String)> = {
5133 let tc = self.text_cache.read().await;
5134 source_files
5135 .iter()
5136 .filter_map(|fs_path| {
5137 let uri = Url::from_file_path(fs_path).ok()?;
5138 let uri_str = uri.to_string();
5139 if tc.contains_key(&uri_str) {
5140 None
5141 } else {
5142 Some((uri_str, fs_path.clone()))
5143 }
5144 })
5145 .collect()
5146 };
5147
5148 if !files_to_read.is_empty() {
5149 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
5150 files_to_read
5151 .into_iter()
5152 .filter_map(|(uri_str, fs_path)| {
5153 let content = std::fs::read_to_string(&fs_path).ok()?;
5154 Some((uri_str, content))
5155 })
5156 .collect()
5157 })
5158 .await
5159 .unwrap_or_default();
5160
5161 let mut tc = self.text_cache.write().await;
5162 for (uri_str, content) in loaded {
5163 tc.entry(uri_str).or_insert((0, content));
5164 }
5165 }
5166
5167 let result = {
5168 let tc = self.text_cache.read().await;
5169 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
5170 let uri = Url::from_file_path(fs_path).ok()?;
5171 let (_, content) = tc.get(&uri.to_string())?;
5172 Some(content.as_bytes().to_vec())
5173 };
5174
5175 file_operations::delete_imports(
5176 &source_files,
5177 &deletes,
5178 &project_root,
5179 &get_source_bytes,
5180 )
5181 };
5182
5183 let stats = &result.stats;
5184 if stats.read_failures > 0
5185 || stats.statement_range_failures > 0
5186 || stats.duplicate_deletes > 0
5187 {
5188 self.client
5189 .log_message(
5190 MessageType::WARNING,
5191 format!(
5192 "willDeleteFiles stats: read_failures={}, statement_range_failures={}, \
5193 duplicate_deletes={}, no_parent={}, dedup_skips={}",
5194 stats.read_failures,
5195 stats.statement_range_failures,
5196 stats.duplicate_deletes,
5197 stats.no_parent,
5198 stats.dedup_skips,
5199 ),
5200 )
5201 .await;
5202 }
5203
5204 let all_edits = result.edits;
5205 if all_edits.is_empty() {
5206 self.client
5207 .log_message(
5208 MessageType::INFO,
5209 "willDeleteFiles: no import-removal edits needed",
5210 )
5211 .await;
5212 return Ok(None);
5213 }
5214
5215 {
5216 let mut tc = self.text_cache.write().await;
5217 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
5218 self.client
5219 .log_message(
5220 MessageType::INFO,
5221 format!("willDeleteFiles: patched {} cached file(s)", patched),
5222 )
5223 .await;
5224 }
5225
5226 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
5227 self.client
5228 .log_message(
5229 MessageType::INFO,
5230 format!(
5231 "willDeleteFiles: {} edit(s) across {} file(s)",
5232 total_edits,
5233 all_edits.len()
5234 ),
5235 )
5236 .await;
5237
5238 Ok(Some(WorkspaceEdit {
5239 changes: Some(all_edits),
5240 document_changes: None,
5241 change_annotations: None,
5242 }))
5243 }
5244
5245 async fn did_delete_files(&self, params: DeleteFilesParams) {
5246 self.client
5247 .log_message(
5248 MessageType::INFO,
5249 format!("workspace/didDeleteFiles: {} file(s)", params.files.len()),
5250 )
5251 .await;
5252 self.project_cache_dirty.store(true, Ordering::Release);
5253 {
5254 let mut changed = self.project_cache_changed_files.write().await;
5255 for file in ¶ms.files {
5256 if let Ok(uri) = Url::parse(&file.uri)
5257 && let Ok(path) = uri.to_file_path()
5258 {
5259 changed.insert(path.to_string_lossy().to_string());
5260 }
5261 }
5262 }
5263
5264 let raw_delete_uris: Vec<Url> = params
5265 .files
5266 .iter()
5267 .filter_map(|fd| Url::parse(&fd.uri).ok())
5268 .collect();
5269
5270 let deleted_paths = {
5271 let tc = self.text_cache.read().await;
5272 let cache_paths: Vec<std::path::PathBuf> = tc
5273 .keys()
5274 .filter_map(|k| Url::parse(k).ok())
5275 .filter_map(|u| u.to_file_path().ok())
5276 .collect();
5277 drop(tc);
5278
5279 let cfg = self.foundry_config.read().await.clone();
5280 let discovered_paths =
5281 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
5282 .await
5283 .unwrap_or_default();
5284
5285 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
5286 all_paths.extend(cache_paths);
5287 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
5288
5289 file_operations::expand_folder_deletes_from_paths(&raw_delete_uris, &all_paths)
5290 };
5291
5292 let mut deleted_keys: HashSet<String> = HashSet::new();
5293 let mut deleted_uris: Vec<Url> = Vec::new();
5294 for path in deleted_paths {
5295 if let Ok(uri) = Url::from_file_path(&path) {
5296 deleted_keys.insert(uri.to_string());
5297 deleted_uris.push(uri);
5298 }
5299 }
5300 if deleted_keys.is_empty() {
5301 return;
5302 }
5303
5304 self.client
5305 .log_message(
5306 MessageType::INFO,
5307 format!(
5308 "didDeleteFiles: deleting {} cache/diagnostic entry(ies)",
5309 deleted_keys.len()
5310 ),
5311 )
5312 .await;
5313
5314 for uri in &deleted_uris {
5315 self.client
5316 .publish_diagnostics(uri.clone(), vec![], None)
5317 .await;
5318 }
5319
5320 let mut removed_text = 0usize;
5321 let mut removed_ast = 0usize;
5322 let mut removed_completion = 0usize;
5323 let mut removed_semantic = 0usize;
5324 let mut removed_pending_create = 0usize;
5325 {
5326 let mut tc = self.text_cache.write().await;
5327 for key in &deleted_keys {
5328 if tc.remove(key).is_some() {
5329 removed_text += 1;
5330 }
5331 }
5332 }
5333 {
5334 let mut ac = self.ast_cache.write().await;
5335 for key in &deleted_keys {
5336 if ac.remove(key).is_some() {
5337 removed_ast += 1;
5338 }
5339 }
5340 }
5341 {
5342 let mut cc = self.completion_cache.write().await;
5343 for key in &deleted_keys {
5344 if cc.remove(key).is_some() {
5345 removed_completion += 1;
5346 }
5347 }
5348 }
5349 {
5350 let mut sc = self.semantic_token_cache.write().await;
5351 for key in &deleted_keys {
5352 if sc.remove(key).is_some() {
5353 removed_semantic += 1;
5354 }
5355 }
5356 }
5357 {
5358 let mut pending = self.pending_create_scaffold.write().await;
5359 for key in &deleted_keys {
5360 if pending.remove(key) {
5361 removed_pending_create += 1;
5362 }
5363 }
5364 }
5365 self.client
5366 .log_message(
5367 MessageType::INFO,
5368 format!(
5369 "didDeleteFiles: removed caches text={} ast={} completion={} semantic={} pendingCreate={}",
5370 removed_text,
5371 removed_ast,
5372 removed_completion,
5373 removed_semantic,
5374 removed_pending_create,
5375 ),
5376 )
5377 .await;
5378
5379 let root_key = self.project_cache_key().await;
5380 if let Some(ref key) = root_key {
5381 self.ast_cache.write().await.remove(key);
5382 }
5383
5384 let foundry_config = self.foundry_config.read().await.clone();
5385 let ast_cache = self.ast_cache.clone();
5386 let client = self.client.clone();
5387 let text_cache_snapshot = self.text_cache.read().await.clone();
5388
5389 tokio::spawn(async move {
5390 let Some(cache_key) = root_key else {
5391 return;
5392 };
5393 match crate::solc::solc_project_index(
5394 &foundry_config,
5395 Some(&client),
5396 Some(&text_cache_snapshot),
5397 )
5398 .await
5399 {
5400 Ok(ast_data) => {
5401 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
5402 let source_count = cached_build.nodes.len();
5403 ast_cache.write().await.insert(cache_key, cached_build);
5404 client
5405 .log_message(
5406 MessageType::INFO,
5407 format!("didDeleteFiles: re-indexed {} source files", source_count),
5408 )
5409 .await;
5410 }
5411 Err(e) => {
5412 client
5413 .log_message(
5414 MessageType::WARNING,
5415 format!("didDeleteFiles: re-index failed: {e}"),
5416 )
5417 .await;
5418 }
5419 }
5420 });
5421 }
5422
5423 async fn will_create_files(
5424 &self,
5425 params: CreateFilesParams,
5426 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
5427 self.client
5428 .log_message(
5429 MessageType::INFO,
5430 format!("workspace/willCreateFiles: {} file(s)", params.files.len()),
5431 )
5432 .await;
5433 if !self
5434 .settings
5435 .read()
5436 .await
5437 .file_operations
5438 .template_on_create
5439 {
5440 self.client
5441 .log_message(
5442 MessageType::INFO,
5443 "willCreateFiles: templateOnCreate disabled",
5444 )
5445 .await;
5446 return Ok(None);
5447 }
5448 self.client
5449 .log_message(
5450 MessageType::INFO,
5451 "willCreateFiles: skipping pre-create edits; scaffolding via didCreateFiles",
5452 )
5453 .await;
5454 Ok(None)
5455 }
5456
5457 async fn did_create_files(&self, params: CreateFilesParams) {
5458 self.client
5459 .log_message(
5460 MessageType::INFO,
5461 format!("workspace/didCreateFiles: {} file(s)", params.files.len()),
5462 )
5463 .await;
5464 self.project_cache_dirty.store(true, Ordering::Release);
5465 {
5466 let mut changed = self.project_cache_changed_files.write().await;
5467 for file in ¶ms.files {
5468 if let Ok(uri) = Url::parse(&file.uri)
5469 && let Ok(path) = uri.to_file_path()
5470 {
5471 changed.insert(path.to_string_lossy().to_string());
5472 }
5473 }
5474 }
5475 if !self
5476 .settings
5477 .read()
5478 .await
5479 .file_operations
5480 .template_on_create
5481 {
5482 self.client
5483 .log_message(
5484 MessageType::INFO,
5485 "didCreateFiles: templateOnCreate disabled",
5486 )
5487 .await;
5488 return;
5489 }
5490
5491 let config = self.foundry_config.read().await;
5492 let solc_version = config.solc_version.clone();
5493 drop(config);
5494
5495 let mut apply_edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
5500 let mut staged_content: HashMap<String, String> = HashMap::new();
5501 let mut created_uris: Vec<String> = Vec::new();
5502 {
5503 let tc = self.text_cache.read().await;
5504 for file_create in ¶ms.files {
5505 let uri = match Url::parse(&file_create.uri) {
5506 Ok(u) => u,
5507 Err(_) => continue,
5508 };
5509 let uri_str = uri.to_string();
5510
5511 let open_has_content = tc
5512 .get(&uri_str)
5513 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()));
5514 let path = match uri.to_file_path() {
5515 Ok(p) => p,
5516 Err(_) => continue,
5517 };
5518 let disk_has_content = std::fs::read_to_string(&path)
5519 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()));
5520
5521 if open_has_content {
5524 self.client
5525 .log_message(
5526 MessageType::INFO,
5527 format!(
5528 "didCreateFiles: skip {} (open buffer already has content)",
5529 uri_str
5530 ),
5531 )
5532 .await;
5533 continue;
5534 }
5535
5536 if disk_has_content {
5538 self.client
5539 .log_message(
5540 MessageType::INFO,
5541 format!(
5542 "didCreateFiles: skip {} (disk file already has content)",
5543 uri_str
5544 ),
5545 )
5546 .await;
5547 continue;
5548 }
5549
5550 let content =
5551 match file_operations::generate_scaffold(&uri, solc_version.as_deref()) {
5552 Some(s) => s,
5553 None => continue,
5554 };
5555
5556 staged_content.insert(uri_str, content.clone());
5557 created_uris.push(uri.to_string());
5558
5559 apply_edits.entry(uri).or_default().push(TextEdit {
5560 range: Range {
5561 start: Position {
5562 line: 0,
5563 character: 0,
5564 },
5565 end: Position {
5566 line: 0,
5567 character: 0,
5568 },
5569 },
5570 new_text: content,
5571 });
5572 }
5573 }
5574
5575 if !apply_edits.is_empty() {
5576 {
5577 let mut pending = self.pending_create_scaffold.write().await;
5578 for uri in &created_uris {
5579 pending.insert(uri.clone());
5580 }
5581 }
5582
5583 let edit = WorkspaceEdit {
5584 changes: Some(apply_edits.clone()),
5585 document_changes: None,
5586 change_annotations: None,
5587 };
5588 self.client
5589 .log_message(
5590 MessageType::INFO,
5591 format!(
5592 "didCreateFiles: scaffolding {} empty file(s) via workspace/applyEdit",
5593 apply_edits.len()
5594 ),
5595 )
5596 .await;
5597 let apply_result = self.client.apply_edit(edit).await;
5598 let applied = apply_result.as_ref().is_ok_and(|r| r.applied);
5599
5600 if applied {
5601 let mut tc = self.text_cache.write().await;
5602 for (uri_str, content) in staged_content {
5603 tc.insert(uri_str, (0, content));
5604 }
5605 } else {
5606 if let Ok(resp) = &apply_result {
5607 self.client
5608 .log_message(
5609 MessageType::WARNING,
5610 format!(
5611 "didCreateFiles: applyEdit rejected (no disk fallback): {:?}",
5612 resp.failure_reason
5613 ),
5614 )
5615 .await;
5616 } else if let Err(e) = &apply_result {
5617 self.client
5618 .log_message(
5619 MessageType::WARNING,
5620 format!("didCreateFiles: applyEdit failed (no disk fallback): {e}"),
5621 )
5622 .await;
5623 }
5624 }
5625 }
5626
5627 for file_create in ¶ms.files {
5631 let Ok(uri) = Url::parse(&file_create.uri) else {
5632 continue;
5633 };
5634 let (version, content) = {
5635 let tc = self.text_cache.read().await;
5636 match tc.get(&uri.to_string()) {
5637 Some((v, c)) => (*v, c.clone()),
5638 None => continue,
5639 }
5640 };
5641 if !content.chars().any(|ch| !ch.is_whitespace()) {
5642 continue;
5643 }
5644 self.on_change(TextDocumentItem {
5645 uri,
5646 version,
5647 text: content,
5648 language_id: "solidity".to_string(),
5649 })
5650 .await;
5651 }
5652
5653 let root_key = self.project_cache_key().await;
5655 if let Some(ref key) = root_key {
5656 self.ast_cache.write().await.remove(key);
5657 }
5658
5659 let foundry_config = self.foundry_config.read().await.clone();
5660 let ast_cache = self.ast_cache.clone();
5661 let client = self.client.clone();
5662 let text_cache_snapshot = self.text_cache.read().await.clone();
5663
5664 tokio::spawn(async move {
5665 let Some(cache_key) = root_key else {
5666 return;
5667 };
5668 match crate::solc::solc_project_index(
5669 &foundry_config,
5670 Some(&client),
5671 Some(&text_cache_snapshot),
5672 )
5673 .await
5674 {
5675 Ok(ast_data) => {
5676 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
5677 let source_count = cached_build.nodes.len();
5678 ast_cache.write().await.insert(cache_key, cached_build);
5679 client
5680 .log_message(
5681 MessageType::INFO,
5682 format!("didCreateFiles: re-indexed {} source files", source_count),
5683 )
5684 .await;
5685 }
5686 Err(e) => {
5687 client
5688 .log_message(
5689 MessageType::WARNING,
5690 format!("didCreateFiles: re-index failed: {e}"),
5691 )
5692 .await;
5693 }
5694 }
5695 });
5696 }
5697}
5698
5699#[cfg(test)]
5700mod tests {
5701 use super::{
5702 start_or_mark_project_cache_sync_pending, stop_project_cache_sync_worker_or_reclaim,
5703 take_project_cache_sync_pending, try_claim_project_cache_dirty,
5704 update_imports_on_delete_enabled,
5705 };
5706 use std::sync::atomic::{AtomicBool, Ordering};
5707
5708 #[test]
5709 fn update_imports_on_delete_enabled_defaults_true() {
5710 let s = crate::config::Settings::default();
5711 assert!(update_imports_on_delete_enabled(&s));
5712 }
5713
5714 #[test]
5715 fn update_imports_on_delete_enabled_respects_false() {
5716 let mut s = crate::config::Settings::default();
5717 s.file_operations.update_imports_on_delete = false;
5718 assert!(!update_imports_on_delete_enabled(&s));
5719 }
5720
5721 #[test]
5722 fn project_cache_sync_burst_only_first_starts_worker() {
5723 let pending = AtomicBool::new(false);
5724 let running = AtomicBool::new(false);
5725
5726 assert!(start_or_mark_project_cache_sync_pending(&pending, &running));
5727 assert!(pending.load(Ordering::Acquire));
5728 assert!(running.load(Ordering::Acquire));
5729
5730 assert!(!start_or_mark_project_cache_sync_pending(
5732 &pending, &running
5733 ));
5734 assert!(pending.load(Ordering::Acquire));
5735 assert!(running.load(Ordering::Acquire));
5736 }
5737
5738 #[test]
5739 fn project_cache_sync_take_pending_is_one_shot() {
5740 let pending = AtomicBool::new(true);
5741 assert!(take_project_cache_sync_pending(&pending));
5742 assert!(!pending.load(Ordering::Acquire));
5743 assert!(!take_project_cache_sync_pending(&pending));
5744 }
5745
5746 #[test]
5747 fn project_cache_sync_worker_stop_or_reclaim_handles_race() {
5748 let pending = AtomicBool::new(false);
5749 let running = AtomicBool::new(true);
5750
5751 assert!(!stop_project_cache_sync_worker_or_reclaim(
5753 &pending, &running
5754 ));
5755 assert!(!running.load(Ordering::Acquire));
5756
5757 pending.store(true, Ordering::Release);
5759 running.store(true, Ordering::Release);
5760 assert!(stop_project_cache_sync_worker_or_reclaim(
5761 &pending, &running
5762 ));
5763 assert!(running.load(Ordering::Acquire));
5764 }
5765
5766 #[test]
5767 fn project_cache_dirty_claim_and_retry_cycle() {
5768 let dirty = AtomicBool::new(true);
5769
5770 assert!(try_claim_project_cache_dirty(&dirty));
5771 assert!(!dirty.load(Ordering::Acquire));
5772
5773 assert!(!try_claim_project_cache_dirty(&dirty));
5775
5776 dirty.store(true, Ordering::Release);
5778 assert!(try_claim_project_cache_dirty(&dirty));
5779 assert!(!dirty.load(Ordering::Acquire));
5780 }
5781}