1use crate::completion;
2use crate::config::{self, FoundryConfig, LintConfig, Settings};
3use crate::file_operations;
4use crate::folding;
5use crate::goto;
6use crate::highlight;
7use crate::hover;
8use crate::inlay_hints;
9use crate::links;
10use crate::references;
11use crate::rename;
12use crate::runner::{ForgeRunner, Runner};
13use crate::selection;
14use crate::semantic_tokens;
15use crate::symbols;
16use crate::utils;
17use std::collections::{HashMap, HashSet};
18use std::path::{Component, Path, PathBuf};
19use std::sync::Arc;
20use std::sync::atomic::{AtomicU64, Ordering};
21use tokio::sync::RwLock;
22use tower_lsp::{Client, LanguageServer, lsp_types::*};
23
24type SemanticTokenCache = HashMap<String, (String, Vec<SemanticToken>)>;
26
27pub struct ForgeLsp {
28 client: Client,
29 compiler: Arc<dyn Runner>,
30 ast_cache: Arc<RwLock<HashMap<String, Arc<goto::CachedBuild>>>>,
31 text_cache: Arc<RwLock<HashMap<String, (i32, String)>>>,
35 completion_cache: Arc<RwLock<HashMap<String, Arc<completion::CompletionCache>>>>,
36 lint_config: Arc<RwLock<LintConfig>>,
38 foundry_config: Arc<RwLock<FoundryConfig>>,
40 client_capabilities: Arc<RwLock<Option<ClientCapabilities>>>,
42 settings: Arc<RwLock<Settings>>,
44 use_solc: bool,
46 semantic_token_cache: Arc<RwLock<SemanticTokenCache>>,
48 semantic_token_id: Arc<AtomicU64>,
50 root_uri: Arc<RwLock<Option<Url>>>,
52 project_indexed: Arc<std::sync::atomic::AtomicBool>,
54 project_cache_dirty: Arc<std::sync::atomic::AtomicBool>,
57 project_cache_sync_running: Arc<std::sync::atomic::AtomicBool>,
59 project_cache_sync_pending: Arc<std::sync::atomic::AtomicBool>,
61 project_cache_upsert_running: Arc<std::sync::atomic::AtomicBool>,
63 project_cache_upsert_pending: Arc<std::sync::atomic::AtomicBool>,
65 project_cache_changed_files: Arc<RwLock<HashSet<String>>>,
68 project_cache_upsert_files: Arc<RwLock<HashSet<String>>>,
70 pending_create_scaffold: Arc<RwLock<HashSet<String>>>,
73 settings_from_init: Arc<std::sync::atomic::AtomicBool>,
77}
78
79impl ForgeLsp {
80 pub fn new(client: Client, use_solar: bool, use_solc: bool) -> Self {
81 let compiler: Arc<dyn Runner> = if use_solar {
82 Arc::new(crate::solar_runner::SolarRunner)
83 } else {
84 Arc::new(ForgeRunner)
85 };
86 let ast_cache = Arc::new(RwLock::new(HashMap::new()));
87 let text_cache = Arc::new(RwLock::new(HashMap::new()));
88 let completion_cache = Arc::new(RwLock::new(HashMap::new()));
89 let lint_config = Arc::new(RwLock::new(LintConfig::default()));
90 let foundry_config = Arc::new(RwLock::new(FoundryConfig::default()));
91 let client_capabilities = Arc::new(RwLock::new(None));
92 let settings = Arc::new(RwLock::new(Settings::default()));
93 Self {
94 client,
95 compiler,
96 ast_cache,
97 text_cache,
98 completion_cache,
99 lint_config,
100 foundry_config,
101 client_capabilities,
102 settings,
103 use_solc,
104 semantic_token_cache: Arc::new(RwLock::new(HashMap::new())),
105 semantic_token_id: Arc::new(AtomicU64::new(0)),
106 root_uri: Arc::new(RwLock::new(None)),
107 project_indexed: Arc::new(std::sync::atomic::AtomicBool::new(false)),
108 project_cache_dirty: Arc::new(std::sync::atomic::AtomicBool::new(false)),
109 project_cache_sync_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
110 project_cache_sync_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
111 project_cache_upsert_running: Arc::new(std::sync::atomic::AtomicBool::new(false)),
112 project_cache_upsert_pending: Arc::new(std::sync::atomic::AtomicBool::new(false)),
113 project_cache_changed_files: Arc::new(RwLock::new(HashSet::new())),
114 project_cache_upsert_files: Arc::new(RwLock::new(HashSet::new())),
115 pending_create_scaffold: Arc::new(RwLock::new(HashSet::new())),
116 settings_from_init: Arc::new(std::sync::atomic::AtomicBool::new(false)),
117 }
118 }
119
120 async fn foundry_config_for_file(&self, file_path: &std::path::Path) -> FoundryConfig {
128 config::load_foundry_config(file_path)
129 }
130
131 async fn project_cache_key(&self) -> Option<String> {
136 if let Some(uri) = self.root_uri.read().await.as_ref() {
137 return Some(uri.to_string());
138 }
139
140 let mut root = self.foundry_config.read().await.root.clone();
141 if !root.is_absolute()
142 && let Ok(cwd) = std::env::current_dir()
143 {
144 root = cwd.join(root);
145 }
146 if !root.is_dir() {
147 root = root.parent()?.to_path_buf();
148 }
149 Url::from_directory_path(root).ok().map(|u| u.to_string())
150 }
151
152 async fn ensure_project_cached_build(&self) -> Option<Arc<goto::CachedBuild>> {
157 let root_key = self.project_cache_key().await?;
158 if let Some(existing) = self.ast_cache.read().await.get(&root_key).cloned() {
159 return Some(existing);
160 }
161
162 let settings = self.settings.read().await.clone();
163 if !self.use_solc || !settings.project_index.full_project_scan {
164 return None;
165 }
166
167 let foundry_config = self.foundry_config.read().await.clone();
168 if !foundry_config.root.is_dir() {
169 return None;
170 }
171
172 let cache_mode = settings.project_index.cache_mode.clone();
173 let cfg_for_load = foundry_config.clone();
174 let load_res = tokio::task::spawn_blocking(move || {
175 crate::project_cache::load_reference_cache_with_report(&cfg_for_load, cache_mode, true)
176 })
177 .await;
178
179 let Ok(report) = load_res else {
180 return None;
181 };
182 let Some(build) = report.build else {
183 return None;
184 };
185
186 let source_count = build.nodes.len();
187 let complete = report.complete;
188 let duration_ms = report.duration_ms;
189 let reused = report.file_count_reused;
190 let hashed = report.file_count_hashed;
191 let arc = Arc::new(build);
192 self.ast_cache
193 .write()
194 .await
195 .insert(root_key.clone(), arc.clone());
196 self.project_indexed
197 .store(true, std::sync::atomic::Ordering::Relaxed);
198 self.client
199 .log_message(
200 MessageType::INFO,
201 format!(
202 "references warm-load: project cache loaded (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
203 source_count, reused, hashed, complete, duration_ms
204 ),
205 )
206 .await;
207
208 if complete {
209 return Some(arc);
210 }
211
212 let cfg_for_diff = foundry_config.clone();
215 let changed = tokio::task::spawn_blocking(move || {
216 crate::project_cache::changed_files_since_v2_cache(&cfg_for_diff, true)
217 })
218 .await
219 .ok()
220 .and_then(Result::ok)
221 .unwrap_or_default();
222
223 if changed.is_empty() {
224 return Some(arc);
225 }
226
227 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
228 let cfg_for_plan = foundry_config.clone();
229 let changed_for_plan = changed.clone();
230 let remappings_for_plan = remappings.clone();
231 let affected_set = tokio::task::spawn_blocking(move || {
232 compute_reverse_import_closure(&cfg_for_plan, &changed_for_plan, &remappings_for_plan)
233 })
234 .await
235 .ok()
236 .unwrap_or_default();
237 let mut affected_files: Vec<PathBuf> = affected_set.into_iter().collect();
238 if affected_files.is_empty() {
239 affected_files = changed;
240 }
241
242 let text_cache_snapshot = self.text_cache.read().await.clone();
243 match crate::solc::solc_project_index_scoped(
244 &foundry_config,
245 Some(&self.client),
246 Some(&text_cache_snapshot),
247 &affected_files,
248 )
249 .await
250 {
251 Ok(ast_data) => {
252 let scoped_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
253 let mut merge_error: Option<String> = None;
254 let merged = {
255 let mut cache = self.ast_cache.write().await;
256 let merged = if let Some(existing) = cache.get(&root_key).cloned() {
257 let mut merged = (*existing).clone();
258 match merge_scoped_cached_build(&mut merged, (*scoped_build).clone()) {
259 Ok(_) => Arc::new(merged),
260 Err(e) => {
261 merge_error = Some(e);
262 scoped_build.clone()
263 }
264 }
265 } else {
266 scoped_build.clone()
267 };
268 cache.insert(root_key.clone(), merged.clone());
269 merged
270 };
271 if let Some(e) = merge_error {
272 self.client
273 .log_message(
274 MessageType::WARNING,
275 format!(
276 "references warm-load reconcile: merge failed, using scoped build: {}",
277 e
278 ),
279 )
280 .await;
281 }
282
283 let cfg_for_save = foundry_config.clone();
284 let build_for_save = (*merged).clone();
285 let save_res = tokio::task::spawn_blocking(move || {
286 crate::project_cache::save_reference_cache_with_report(
287 &cfg_for_save,
288 &build_for_save,
289 None,
290 )
291 })
292 .await;
293 if let Ok(Ok(report)) = save_res {
294 self.client
295 .log_message(
296 MessageType::INFO,
297 format!(
298 "references warm-load reconcile: saved cache (affected={}, hashed_files={}, duration={}ms)",
299 affected_files.len(),
300 report.file_count_hashed,
301 report.duration_ms
302 ),
303 )
304 .await;
305 }
306 Some(merged)
307 }
308 Err(e) => {
309 self.client
310 .log_message(
311 MessageType::WARNING,
312 format!(
313 "references warm-load reconcile: scoped reindex failed: {}",
314 e
315 ),
316 )
317 .await;
318 Some(arc)
319 }
320 }
321 }
322
323 async fn flush_project_cache_to_disk(&self, reason: &str) {
327 if !self.use_solc || !self.settings.read().await.project_index.full_project_scan {
328 return;
329 }
330 let Some(root_key) = self.project_cache_key().await else {
331 return;
332 };
333 let build = {
334 let cache = self.ast_cache.read().await;
335 cache.get(&root_key).cloned()
336 };
337 let Some(build) = build else {
338 return;
339 };
340
341 let foundry_config = self.foundry_config.read().await.clone();
342 let build_for_save = (*build).clone();
343 let res = tokio::task::spawn_blocking(move || {
344 crate::project_cache::save_reference_cache_with_report(
345 &foundry_config,
346 &build_for_save,
347 None,
348 )
349 })
350 .await;
351
352 match res {
353 Ok(Ok(report)) => {
354 self.client
355 .log_message(
356 MessageType::INFO,
357 format!(
358 "project cache flush ({}): saved hashed_files={}, duration={}ms",
359 reason, report.file_count_hashed, report.duration_ms
360 ),
361 )
362 .await;
363 }
364 Ok(Err(e)) => {
365 self.client
366 .log_message(
367 MessageType::WARNING,
368 format!("project cache flush ({}) failed: {}", reason, e),
369 )
370 .await;
371 }
372 Err(e) => {
373 self.client
374 .log_message(
375 MessageType::WARNING,
376 format!("project cache flush ({}) task failed: {}", reason, e),
377 )
378 .await;
379 }
380 }
381 }
382
383 async fn on_change(&self, params: TextDocumentItem) {
384 let uri = params.uri.clone();
385 let version = params.version;
386
387 let file_path = match uri.to_file_path() {
388 Ok(path) => path,
389 Err(_) => {
390 self.client
391 .log_message(MessageType::ERROR, "Invalid file URI")
392 .await;
393 return;
394 }
395 };
396
397 let path_str = match file_path.to_str() {
398 Some(s) => s,
399 None => {
400 self.client
401 .log_message(MessageType::ERROR, "Invalid file path")
402 .await;
403 return;
404 }
405 };
406
407 let (should_lint, lint_settings) = {
409 let lint_cfg = self.lint_config.read().await;
410 let settings = self.settings.read().await;
411 let enabled = lint_cfg.should_lint(&file_path) && settings.lint.enabled;
412 let ls = settings.lint.clone();
413 (enabled, ls)
414 };
415
416 let (lint_result, build_result, ast_result) = if self.use_solc {
420 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
421 let solc_future = crate::solc::solc_ast(path_str, &foundry_cfg, Some(&self.client));
422
423 if should_lint {
424 let (lint, solc) = tokio::join!(
425 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
426 solc_future
427 );
428 match solc {
429 Ok(data) => {
430 self.client
431 .log_message(
432 MessageType::INFO,
433 "solc: AST + diagnostics from single run",
434 )
435 .await;
436 let content = tokio::fs::read_to_string(&file_path)
438 .await
439 .unwrap_or_default();
440 let build_diags = crate::build::build_output_to_diagnostics(
441 &data,
442 &file_path,
443 &content,
444 &foundry_cfg.ignored_error_codes,
445 );
446 (Some(lint), Ok(build_diags), Ok(data))
447 }
448 Err(e) => {
449 self.client
450 .log_message(
451 MessageType::WARNING,
452 format!("solc failed, falling back to forge: {e}"),
453 )
454 .await;
455 let (build, ast) = tokio::join!(
456 self.compiler.get_build_diagnostics(&uri),
457 self.compiler.ast(path_str)
458 );
459 (Some(lint), build, ast)
460 }
461 }
462 } else {
463 self.client
464 .log_message(
465 MessageType::INFO,
466 format!("skipping lint for ignored file: {path_str}"),
467 )
468 .await;
469 match solc_future.await {
470 Ok(data) => {
471 self.client
472 .log_message(
473 MessageType::INFO,
474 "solc: AST + diagnostics from single run",
475 )
476 .await;
477 let content = tokio::fs::read_to_string(&file_path)
478 .await
479 .unwrap_or_default();
480 let build_diags = crate::build::build_output_to_diagnostics(
481 &data,
482 &file_path,
483 &content,
484 &foundry_cfg.ignored_error_codes,
485 );
486 (None, Ok(build_diags), Ok(data))
487 }
488 Err(e) => {
489 self.client
490 .log_message(
491 MessageType::WARNING,
492 format!("solc failed, falling back to forge: {e}"),
493 )
494 .await;
495 let (build, ast) = tokio::join!(
496 self.compiler.get_build_diagnostics(&uri),
497 self.compiler.ast(path_str)
498 );
499 (None, build, ast)
500 }
501 }
502 }
503 } else {
504 if should_lint {
506 let (lint, build, ast) = tokio::join!(
507 self.compiler.get_lint_diagnostics(&uri, &lint_settings),
508 self.compiler.get_build_diagnostics(&uri),
509 self.compiler.ast(path_str)
510 );
511 (Some(lint), build, ast)
512 } else {
513 self.client
514 .log_message(
515 MessageType::INFO,
516 format!("skipping lint for ignored file: {path_str}"),
517 )
518 .await;
519 let (build, ast) = tokio::join!(
520 self.compiler.get_build_diagnostics(&uri),
521 self.compiler.ast(path_str)
522 );
523 (None, build, ast)
524 }
525 };
526
527 let build_succeeded = matches!(&build_result, Ok(diagnostics) if diagnostics.iter().all(|d| d.severity != Some(DiagnosticSeverity::ERROR)));
529
530 if build_succeeded {
531 if let Ok(ast_data) = ast_result {
532 let cached_build = Arc::new(goto::CachedBuild::new(ast_data, version));
533 let mut cache = self.ast_cache.write().await;
534 cache.insert(uri.to_string(), cached_build.clone());
535 drop(cache);
536
537 {
539 let mut cc = self.completion_cache.write().await;
540 cc.insert(uri.to_string(), cached_build.completion_cache.clone());
541 }
542 self.client
543 .log_message(MessageType::INFO, "Build successful, AST cache updated")
544 .await;
545 } else if let Err(e) = ast_result {
546 self.client
547 .log_message(
548 MessageType::INFO,
549 format!("Build succeeded but failed to get AST: {e}"),
550 )
551 .await;
552 }
553 } else {
554 self.client
556 .log_message(
557 MessageType::INFO,
558 "Build errors detected, keeping existing AST cache",
559 )
560 .await;
561 }
562
563 {
565 let mut text_cache = self.text_cache.write().await;
566 let uri_str = uri.to_string();
567 let existing_version = text_cache.get(&uri_str).map(|(v, _)| *v).unwrap_or(-1);
568 if version >= existing_version {
569 text_cache.insert(uri_str, (version, params.text));
570 }
571 }
572
573 let mut all_diagnostics = vec![];
574
575 if let Some(lint_result) = lint_result {
576 match lint_result {
577 Ok(mut lints) => {
578 if !lint_settings.exclude.is_empty() {
580 lints.retain(|d| {
581 if let Some(NumberOrString::String(code)) = &d.code {
582 !lint_settings.exclude.iter().any(|ex| ex == code)
583 } else {
584 true
585 }
586 });
587 }
588 self.client
589 .log_message(
590 MessageType::INFO,
591 format!("found {} lint diagnostics", lints.len()),
592 )
593 .await;
594 all_diagnostics.append(&mut lints);
595 }
596 Err(e) => {
597 self.client
598 .log_message(
599 MessageType::ERROR,
600 format!("Forge lint diagnostics failed: {e}"),
601 )
602 .await;
603 }
604 }
605 }
606
607 match build_result {
608 Ok(mut builds) => {
609 self.client
610 .log_message(
611 MessageType::INFO,
612 format!("found {} build diagnostics", builds.len()),
613 )
614 .await;
615 all_diagnostics.append(&mut builds);
616 }
617 Err(e) => {
618 self.client
619 .log_message(
620 MessageType::WARNING,
621 format!("Forge build diagnostics failed: {e}"),
622 )
623 .await;
624 }
625 }
626
627 for diag in &mut all_diagnostics {
631 if diag.message.is_empty() {
632 diag.message = "Unknown issue".to_string();
633 }
634 }
635
636 self.client
638 .publish_diagnostics(uri, all_diagnostics, None)
639 .await;
640
641 if build_succeeded {
643 let client = self.client.clone();
644 tokio::spawn(async move {
645 let _ = client.inlay_hint_refresh().await;
646 });
647 }
648
649 if build_succeeded
655 && self.use_solc
656 && self.settings.read().await.project_index.full_project_scan
657 && !self
658 .project_indexed
659 .load(std::sync::atomic::Ordering::Relaxed)
660 {
661 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
662 self.project_indexed
663 .store(true, std::sync::atomic::Ordering::Relaxed);
664 let foundry_config = self.foundry_config.read().await.clone();
665 let cache_key = self.project_cache_key().await;
666 let ast_cache = self.ast_cache.clone();
667 let client = self.client.clone();
668
669 tokio::spawn(async move {
670 let Some(cache_key) = cache_key else {
671 return;
672 };
673 if !foundry_config.root.is_dir() {
674 client
675 .log_message(
676 MessageType::INFO,
677 format!(
678 "project index: {} not found, skipping",
679 foundry_config.root.display(),
680 ),
681 )
682 .await;
683 return;
684 }
685
686 let token = NumberOrString::String("solidity/projectIndex".to_string());
688 let _ = client
689 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
690 token: token.clone(),
691 })
692 .await;
693
694 client
696 .send_notification::<notification::Progress>(ProgressParams {
697 token: token.clone(),
698 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
699 WorkDoneProgressBegin {
700 title: "Indexing project".to_string(),
701 message: Some("Discovering source files...".to_string()),
702 cancellable: Some(false),
703 percentage: None,
704 },
705 )),
706 })
707 .await;
708
709 let cfg_for_load = foundry_config.clone();
711 let cache_mode_for_load = cache_mode.clone();
712 let load_res = tokio::task::spawn_blocking(move || {
713 crate::project_cache::load_reference_cache_with_report(
714 &cfg_for_load,
715 cache_mode_for_load,
716 true,
717 )
718 })
719 .await;
720 match load_res {
721 Ok(report) => {
722 if let Some(cached_build) = report.build {
723 let source_count = cached_build.nodes.len();
724 ast_cache
725 .write()
726 .await
727 .insert(cache_key.clone(), Arc::new(cached_build));
728 client
729 .log_message(
730 MessageType::INFO,
731 format!(
732 "project index: cache load hit (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
733 source_count,
734 report.file_count_reused,
735 report.file_count_hashed,
736 report.complete,
737 report.duration_ms
738 ),
739 )
740 .await;
741 if report.complete {
742 client
743 .send_notification::<notification::Progress>(ProgressParams {
744 token: token.clone(),
745 value: ProgressParamsValue::WorkDone(
746 WorkDoneProgress::End(WorkDoneProgressEnd {
747 message: Some(format!(
748 "Loaded {} source files from cache",
749 source_count
750 )),
751 }),
752 ),
753 })
754 .await;
755 return;
756 }
757 }
758
759 client
760 .log_message(
761 MessageType::INFO,
762 format!(
763 "project index: cache load miss/partial (reason={}, reused_files={}/{}, duration={}ms)",
764 report
765 .miss_reason
766 .unwrap_or_else(|| "unknown".to_string()),
767 report.file_count_reused,
768 report.file_count_hashed,
769 report.duration_ms
770 ),
771 )
772 .await;
773 }
774 Err(e) => {
775 client
776 .log_message(
777 MessageType::WARNING,
778 format!("project index: cache load task failed: {e}"),
779 )
780 .await;
781 }
782 }
783
784 match crate::solc::solc_project_index(&foundry_config, Some(&client), None).await {
785 Ok(ast_data) => {
786 let mut new_build = crate::goto::CachedBuild::new(ast_data, 0);
787 if let Some(prev) = ast_cache.read().await.get(&cache_key) {
790 new_build.merge_missing_from(prev);
791 }
792 let source_count = new_build.nodes.len();
793 let cached_build = Arc::new(new_build);
794 let build_for_save = (*cached_build).clone();
795 ast_cache
796 .write()
797 .await
798 .insert(cache_key.clone(), cached_build);
799 client
800 .log_message(
801 MessageType::INFO,
802 format!("project index: cached {} source files", source_count),
803 )
804 .await;
805
806 let cfg_for_save = foundry_config.clone();
807 let client_for_save = client.clone();
808 tokio::spawn(async move {
809 let res = tokio::task::spawn_blocking(move || {
810 crate::project_cache::save_reference_cache_with_report(
811 &cfg_for_save,
812 &build_for_save,
813 None,
814 )
815 })
816 .await;
817 match res {
818 Ok(Ok(report)) => {
819 client_for_save
820 .log_message(
821 MessageType::INFO,
822 format!(
823 "project index: cache save complete (hashed_files={}, duration={}ms)",
824 report.file_count_hashed, report.duration_ms
825 ),
826 )
827 .await;
828 }
829 Ok(Err(e)) => {
830 client_for_save
831 .log_message(
832 MessageType::WARNING,
833 format!("project index: failed to persist cache: {e}"),
834 )
835 .await;
836 }
837 Err(e) => {
838 client_for_save
839 .log_message(
840 MessageType::WARNING,
841 format!("project index: cache save task failed: {e}"),
842 )
843 .await;
844 }
845 }
846 });
847
848 client
850 .send_notification::<notification::Progress>(ProgressParams {
851 token: token.clone(),
852 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
853 WorkDoneProgressEnd {
854 message: Some(format!(
855 "Indexed {} source files",
856 source_count
857 )),
858 },
859 )),
860 })
861 .await;
862 }
863 Err(e) => {
864 client
865 .log_message(MessageType::WARNING, format!("project index failed: {e}"))
866 .await;
867
868 client
870 .send_notification::<notification::Progress>(ProgressParams {
871 token: token.clone(),
872 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
873 WorkDoneProgressEnd {
874 message: Some("Indexing failed".to_string()),
875 },
876 )),
877 })
878 .await;
879 }
880 }
881 });
882 }
883 }
884
885 async fn get_or_fetch_build(
894 &self,
895 uri: &Url,
896 file_path: &std::path::Path,
897 insert_on_miss: bool,
898 ) -> Option<Arc<goto::CachedBuild>> {
899 let uri_str = uri.to_string();
900
901 {
904 let cache = self.ast_cache.read().await;
905 if let Some(cached) = cache.get(&uri_str) {
906 return Some(cached.clone());
907 }
908 }
909
910 if !insert_on_miss {
914 return None;
915 }
916
917 let path_str = file_path.to_str()?;
919 let ast_result = if self.use_solc {
920 let foundry_cfg = self.foundry_config_for_file(&file_path).await;
921 match crate::solc::solc_ast(path_str, &foundry_cfg, Some(&self.client)).await {
922 Ok(data) => Ok(data),
923 Err(_) => self.compiler.ast(path_str).await,
924 }
925 } else {
926 self.compiler.ast(path_str).await
927 };
928 match ast_result {
929 Ok(data) => {
930 let build = Arc::new(goto::CachedBuild::new(data, 0));
933 let mut cache = self.ast_cache.write().await;
934 cache.insert(uri_str.clone(), build.clone());
935 Some(build)
936 }
937 Err(e) => {
938 self.client
939 .log_message(MessageType::ERROR, format!("failed to get AST: {e}"))
940 .await;
941 None
942 }
943 }
944 }
945
946 async fn get_source_bytes(&self, uri: &Url, file_path: &std::path::Path) -> Option<Vec<u8>> {
949 {
950 let text_cache = self.text_cache.read().await;
951 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
952 return Some(content.as_bytes().to_vec());
953 }
954 }
955 match std::fs::read(file_path) {
956 Ok(bytes) => Some(bytes),
957 Err(e) => {
958 if e.kind() == std::io::ErrorKind::NotFound {
959 self.client
962 .log_message(
963 MessageType::INFO,
964 format!("file not found yet (transient): {e}"),
965 )
966 .await;
967 } else {
968 self.client
969 .log_message(MessageType::ERROR, format!("failed to read file: {e}"))
970 .await;
971 }
972 None
973 }
974 }
975 }
976}
977
978fn update_imports_on_delete_enabled(settings: &crate::config::Settings) -> bool {
979 settings.file_operations.update_imports_on_delete
980}
981
982fn start_or_mark_project_cache_sync_pending(
983 pending: &std::sync::atomic::AtomicBool,
984 running: &std::sync::atomic::AtomicBool,
985) -> bool {
986 pending.store(true, Ordering::Release);
987 running
988 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
989 .is_ok()
990}
991
992fn take_project_cache_sync_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
993 pending.swap(false, Ordering::AcqRel)
994}
995
996fn stop_project_cache_sync_worker_or_reclaim(
997 pending: &std::sync::atomic::AtomicBool,
998 running: &std::sync::atomic::AtomicBool,
999) -> bool {
1000 running.store(false, Ordering::Release);
1001 pending.load(Ordering::Acquire)
1002 && running
1003 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1004 .is_ok()
1005}
1006
1007fn try_claim_project_cache_dirty(dirty: &std::sync::atomic::AtomicBool) -> bool {
1008 dirty
1009 .compare_exchange(true, false, Ordering::AcqRel, Ordering::Acquire)
1010 .is_ok()
1011}
1012
1013fn start_or_mark_project_cache_upsert_pending(
1014 pending: &std::sync::atomic::AtomicBool,
1015 running: &std::sync::atomic::AtomicBool,
1016) -> bool {
1017 pending.store(true, Ordering::Release);
1018 running
1019 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1020 .is_ok()
1021}
1022
1023fn take_project_cache_upsert_pending(pending: &std::sync::atomic::AtomicBool) -> bool {
1024 pending.swap(false, Ordering::AcqRel)
1025}
1026
1027fn stop_project_cache_upsert_worker_or_reclaim(
1028 pending: &std::sync::atomic::AtomicBool,
1029 running: &std::sync::atomic::AtomicBool,
1030) -> bool {
1031 running.store(false, Ordering::Release);
1032 pending.load(Ordering::Acquire)
1033 && running
1034 .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire)
1035 .is_ok()
1036}
1037
1038fn lexical_normalize(path: &Path) -> PathBuf {
1039 let mut out = PathBuf::new();
1040 for comp in path.components() {
1041 match comp {
1042 Component::CurDir => {}
1043 Component::ParentDir => {
1044 out.pop();
1045 }
1046 Component::RootDir => out.push(comp.as_os_str()),
1047 Component::Prefix(_) => out.push(comp.as_os_str()),
1048 Component::Normal(seg) => out.push(seg),
1049 }
1050 }
1051 out
1052}
1053
1054fn resolve_import_spec_to_abs(
1055 project_root: &Path,
1056 importer_abs: &Path,
1057 import_path: &str,
1058 remappings: &[String],
1059) -> Option<PathBuf> {
1060 if import_path.starts_with("./") || import_path.starts_with("../") {
1061 let base = importer_abs.parent()?;
1062 return Some(lexical_normalize(&base.join(import_path)));
1063 }
1064
1065 for remap in remappings {
1066 let mut it = remap.splitn(2, '=');
1067 let prefix = it.next().unwrap_or_default();
1068 let target = it.next().unwrap_or_default();
1069 if prefix.is_empty() || target.is_empty() {
1070 continue;
1071 }
1072 if import_path.starts_with(prefix) {
1073 let suffix = import_path.strip_prefix(prefix).unwrap_or_default();
1074 return Some(lexical_normalize(
1075 &project_root.join(format!("{target}{suffix}")),
1076 ));
1077 }
1078 }
1079
1080 Some(lexical_normalize(&project_root.join(import_path)))
1081}
1082
1083fn compute_reverse_import_closure(
1084 config: &FoundryConfig,
1085 changed_abs: &[PathBuf],
1086 remappings: &[String],
1087) -> HashSet<PathBuf> {
1088 let source_files = crate::solc::discover_source_files(config);
1089 let mut reverse_edges: HashMap<PathBuf, HashSet<PathBuf>> = HashMap::new();
1090
1091 for importer in &source_files {
1092 let Ok(bytes) = std::fs::read(importer) else {
1093 continue;
1094 };
1095 for imp in links::ts_find_imports(&bytes) {
1096 let Some(imported_abs) =
1097 resolve_import_spec_to_abs(&config.root, importer, &imp.path, remappings)
1098 else {
1099 continue;
1100 };
1101 if !imported_abs.starts_with(&config.root) {
1102 continue;
1103 }
1104 reverse_edges
1105 .entry(imported_abs)
1106 .or_default()
1107 .insert(importer.clone());
1108 }
1109 }
1110
1111 let mut affected: HashSet<PathBuf> = HashSet::new();
1112 let mut queue: std::collections::VecDeque<PathBuf> = std::collections::VecDeque::new();
1113
1114 for path in changed_abs {
1115 if !path.starts_with(&config.root) {
1116 continue;
1117 }
1118 let normalized = lexical_normalize(path);
1119 if affected.insert(normalized.clone()) {
1120 queue.push_back(normalized);
1121 }
1122 }
1123
1124 while let Some(current) = queue.pop_front() {
1125 if let Some(importers) = reverse_edges.get(¤t) {
1126 for importer in importers {
1127 if affected.insert(importer.clone()) {
1128 queue.push_back(importer.clone());
1129 }
1130 }
1131 }
1132 }
1133
1134 let source_set: HashSet<PathBuf> = source_files.into_iter().collect();
1136 affected
1137 .into_iter()
1138 .filter(|p| source_set.contains(p) && p.is_file())
1139 .collect()
1140}
1141
1142fn src_file_id(src: &str) -> Option<&str> {
1143 src.rsplit(':').next().filter(|id| !id.is_empty())
1144}
1145
1146fn remap_src_file_id(src: &str, id_remap: &HashMap<String, String>) -> String {
1147 let Some(old_id) = src_file_id(src) else {
1148 return src.to_string();
1149 };
1150 let Some(new_id) = id_remap.get(old_id) else {
1151 return src.to_string();
1152 };
1153 if new_id == old_id {
1154 return src.to_string();
1155 }
1156 let prefix_len = src.len().saturating_sub(old_id.len());
1157 format!("{}{}", &src[..prefix_len], new_id)
1158}
1159
1160fn remap_node_info_file_ids(info: &mut goto::NodeInfo, id_remap: &HashMap<String, String>) {
1161 info.src = remap_src_file_id(&info.src, id_remap);
1162 if let Some(loc) = info.name_location.as_mut() {
1163 *loc = remap_src_file_id(loc, id_remap);
1164 }
1165 for loc in &mut info.name_locations {
1166 *loc = remap_src_file_id(loc, id_remap);
1167 }
1168 if let Some(loc) = info.member_location.as_mut() {
1169 *loc = remap_src_file_id(loc, id_remap);
1170 }
1171}
1172
1173fn doc_key_path(key: &hover::DocKey) -> Option<&str> {
1174 match key {
1175 hover::DocKey::Contract(k) | hover::DocKey::StateVar(k) | hover::DocKey::Method(k) => {
1176 k.split_once(':').map(|(path, _)| path)
1177 }
1178 hover::DocKey::Func(_) | hover::DocKey::Event(_) => None,
1179 }
1180}
1181
1182fn merge_scoped_cached_build(
1183 existing: &mut goto::CachedBuild,
1184 mut scoped: goto::CachedBuild,
1185) -> Result<usize, String> {
1186 let affected_paths: HashSet<String> = scoped.nodes.keys().cloned().collect();
1187 if affected_paths.is_empty() {
1188 return Ok(0);
1189 }
1190 let affected_abs_paths: HashSet<String> = scoped.path_to_abs.values().cloned().collect();
1191
1192 for scoped_id in scoped.decl_index.keys() {
1195 if existing.decl_index.contains_key(scoped_id)
1196 && let Some(path) = existing.node_id_to_source_path.get(scoped_id)
1197 && !affected_abs_paths.contains(path)
1198 {
1199 return Err(format!(
1200 "decl id collision for id={} in unaffected path {}",
1201 scoped_id, path
1202 ));
1203 }
1204 }
1205
1206 let mut path_to_existing_id: HashMap<String, String> = HashMap::new();
1208 for (id, path) in &existing.id_to_path_map {
1209 path_to_existing_id
1210 .entry(path.clone())
1211 .or_insert_with(|| id.clone());
1212 }
1213 let mut used_ids: HashSet<String> = existing.id_to_path_map.keys().cloned().collect();
1214 let mut next_id = used_ids
1215 .iter()
1216 .filter_map(|k| k.parse::<u64>().ok())
1217 .max()
1218 .unwrap_or(0)
1219 .saturating_add(1);
1220
1221 let mut id_remap: HashMap<String, String> = HashMap::new();
1222 for (scoped_id, path) in &scoped.id_to_path_map {
1223 let canonical = if let Some(id) = path_to_existing_id.get(path) {
1224 id.clone()
1225 } else {
1226 let id = loop {
1227 let candidate = next_id.to_string();
1228 next_id = next_id.saturating_add(1);
1229 if used_ids.insert(candidate.clone()) {
1230 break candidate;
1231 }
1232 };
1233 path_to_existing_id.insert(path.clone(), id.clone());
1234 id
1235 };
1236 id_remap.insert(scoped_id.clone(), canonical);
1237 }
1238
1239 for file_nodes in scoped.nodes.values_mut() {
1240 for info in file_nodes.values_mut() {
1241 remap_node_info_file_ids(info, &id_remap);
1242 }
1243 }
1244 let scoped_external_refs: HashMap<String, crate::types::NodeId> = scoped
1245 .external_refs
1246 .into_iter()
1247 .map(|(src, decl_id)| (remap_src_file_id(&src, &id_remap), decl_id))
1248 .collect();
1249
1250 let old_id_to_path = existing.id_to_path_map.clone();
1251 existing.external_refs.retain(|src, _| {
1252 src_file_id(src)
1253 .and_then(|fid| old_id_to_path.get(fid))
1254 .map(|path| !affected_paths.contains(path))
1255 .unwrap_or(true)
1256 });
1257 existing
1258 .nodes
1259 .retain(|path, _| !affected_paths.contains(path));
1260 existing
1261 .path_to_abs
1262 .retain(|path, _| !affected_paths.contains(path));
1263 existing
1264 .id_to_path_map
1265 .retain(|_, path| !affected_paths.contains(path));
1266
1267 existing
1268 .node_id_to_source_path
1269 .retain(|_, path| !affected_abs_paths.contains(path));
1270 existing
1271 .decl_index
1272 .retain(|id, _| match existing.node_id_to_source_path.get(id) {
1273 Some(path) => !affected_abs_paths.contains(path),
1274 None => true,
1275 });
1276 existing
1277 .hint_index
1278 .retain(|abs_path, _| !affected_abs_paths.contains(abs_path));
1279 existing.gas_index.retain(|k, _| {
1280 k.split_once(':')
1281 .map(|(path, _)| !affected_paths.contains(path))
1282 .unwrap_or(true)
1283 });
1284 existing.doc_index.retain(|k, _| {
1285 doc_key_path(k)
1286 .map(|p| !affected_paths.contains(p))
1287 .unwrap_or(true)
1288 });
1289
1290 existing.nodes.extend(scoped.nodes);
1291 existing.path_to_abs.extend(scoped.path_to_abs);
1292 existing.external_refs.extend(scoped_external_refs);
1293 for (old_id, path) in scoped.id_to_path_map {
1294 let canonical = id_remap.get(&old_id).cloned().unwrap_or(old_id);
1295 existing.id_to_path_map.insert(canonical, path);
1296 }
1297 existing.decl_index.extend(scoped.decl_index);
1298 existing
1299 .node_id_to_source_path
1300 .extend(scoped.node_id_to_source_path);
1301 existing.gas_index.extend(scoped.gas_index);
1302 existing.hint_index.extend(scoped.hint_index);
1303 existing.doc_index.extend(scoped.doc_index);
1304
1305 Ok(affected_paths.len())
1306}
1307
1308#[tower_lsp::async_trait]
1309impl LanguageServer for ForgeLsp {
1310 async fn initialize(
1311 &self,
1312 params: InitializeParams,
1313 ) -> tower_lsp::jsonrpc::Result<InitializeResult> {
1314 {
1316 let mut caps = self.client_capabilities.write().await;
1317 *caps = Some(params.capabilities.clone());
1318 }
1319
1320 if let Some(init_opts) = ¶ms.initialization_options {
1322 let s = config::parse_settings(init_opts);
1323 self.client
1324 .log_message(
1325 MessageType::INFO,
1326 format!(
1327 "settings: inlayHints.parameters={}, inlayHints.gasEstimates={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}",
1328 s.inlay_hints.parameters, s.inlay_hints.gas_estimates, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex,
1329 ),
1330 )
1331 .await;
1332 let mut settings = self.settings.write().await;
1333 *settings = s;
1334 self.settings_from_init
1335 .store(true, std::sync::atomic::Ordering::Relaxed);
1336 }
1337
1338 if let Some(uri) = params.root_uri.as_ref() {
1340 let mut root = self.root_uri.write().await;
1341 *root = Some(uri.clone());
1342 }
1343
1344 if let Some(root_uri) = params
1346 .root_uri
1347 .as_ref()
1348 .and_then(|uri| uri.to_file_path().ok())
1349 {
1350 let lint_cfg = config::load_lint_config(&root_uri);
1351 self.client
1352 .log_message(
1353 MessageType::INFO,
1354 format!(
1355 "loaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
1356 lint_cfg.lint_on_build,
1357 lint_cfg.ignore_patterns.len()
1358 ),
1359 )
1360 .await;
1361 let mut config = self.lint_config.write().await;
1362 *config = lint_cfg;
1363
1364 let foundry_cfg = config::load_foundry_config(&root_uri);
1365 self.client
1366 .log_message(
1367 MessageType::INFO,
1368 format!(
1369 "loaded foundry.toml project config: solc_version={:?}, remappings={}",
1370 foundry_cfg.solc_version,
1371 foundry_cfg.remappings.len()
1372 ),
1373 )
1374 .await;
1375 if foundry_cfg.via_ir {
1376 self.client
1377 .log_message(
1378 MessageType::WARNING,
1379 "via_ir is enabled in foundry.toml — gas estimate inlay hints are disabled to avoid slow compilation",
1380 )
1381 .await;
1382 }
1383 let mut fc = self.foundry_config.write().await;
1384 *fc = foundry_cfg;
1385 }
1386
1387 let client_encodings = params
1389 .capabilities
1390 .general
1391 .as_ref()
1392 .and_then(|g| g.position_encodings.as_deref());
1393 let encoding = utils::PositionEncoding::negotiate(client_encodings);
1394 utils::set_encoding(encoding);
1395
1396 Ok(InitializeResult {
1397 server_info: Some(ServerInfo {
1398 name: "Solidity Language Server".to_string(),
1399 version: Some(env!("LONG_VERSION").to_string()),
1400 }),
1401 capabilities: ServerCapabilities {
1402 position_encoding: Some(encoding.into()),
1403 completion_provider: Some(CompletionOptions {
1404 trigger_characters: Some(vec![".".to_string()]),
1405 resolve_provider: Some(false),
1406 ..Default::default()
1407 }),
1408 signature_help_provider: Some(SignatureHelpOptions {
1409 trigger_characters: Some(vec![
1410 "(".to_string(),
1411 ",".to_string(),
1412 "[".to_string(),
1413 ]),
1414 retrigger_characters: None,
1415 work_done_progress_options: WorkDoneProgressOptions {
1416 work_done_progress: None,
1417 },
1418 }),
1419 definition_provider: Some(OneOf::Left(true)),
1420 declaration_provider: Some(DeclarationCapability::Simple(true)),
1421 references_provider: Some(OneOf::Left(true)),
1422 rename_provider: Some(OneOf::Right(RenameOptions {
1423 prepare_provider: Some(true),
1424 work_done_progress_options: WorkDoneProgressOptions {
1425 work_done_progress: Some(true),
1426 },
1427 })),
1428 workspace_symbol_provider: Some(OneOf::Left(true)),
1429 document_symbol_provider: Some(OneOf::Left(true)),
1430 document_highlight_provider: Some(OneOf::Left(true)),
1431 hover_provider: Some(HoverProviderCapability::Simple(true)),
1432 document_link_provider: Some(DocumentLinkOptions {
1433 resolve_provider: Some(false),
1434 work_done_progress_options: WorkDoneProgressOptions {
1435 work_done_progress: None,
1436 },
1437 }),
1438 document_formatting_provider: Some(OneOf::Left(true)),
1439 code_lens_provider: None,
1440 folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
1441 selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
1442 inlay_hint_provider: Some(OneOf::Right(InlayHintServerCapabilities::Options(
1443 InlayHintOptions {
1444 resolve_provider: Some(false),
1445 work_done_progress_options: WorkDoneProgressOptions {
1446 work_done_progress: None,
1447 },
1448 },
1449 ))),
1450 semantic_tokens_provider: Some(
1451 SemanticTokensServerCapabilities::SemanticTokensOptions(
1452 SemanticTokensOptions {
1453 legend: semantic_tokens::legend(),
1454 full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
1455 range: Some(true),
1456 work_done_progress_options: WorkDoneProgressOptions {
1457 work_done_progress: None,
1458 },
1459 },
1460 ),
1461 ),
1462 text_document_sync: Some(TextDocumentSyncCapability::Options(
1463 TextDocumentSyncOptions {
1464 will_save: Some(true),
1465 will_save_wait_until: None,
1466 open_close: Some(true),
1467 save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
1468 include_text: Some(true),
1469 })),
1470 change: Some(TextDocumentSyncKind::FULL),
1471 },
1472 )),
1473 workspace: Some(WorkspaceServerCapabilities {
1474 workspace_folders: None,
1475 file_operations: Some(WorkspaceFileOperationsServerCapabilities {
1476 will_rename: Some(FileOperationRegistrationOptions {
1477 filters: vec![
1478 FileOperationFilter {
1480 scheme: Some("file".to_string()),
1481 pattern: FileOperationPattern {
1482 glob: "**/*.sol".to_string(),
1483 matches: Some(FileOperationPatternKind::File),
1484 options: None,
1485 },
1486 },
1487 FileOperationFilter {
1489 scheme: Some("file".to_string()),
1490 pattern: FileOperationPattern {
1491 glob: "**".to_string(),
1492 matches: Some(FileOperationPatternKind::Folder),
1493 options: None,
1494 },
1495 },
1496 ],
1497 }),
1498 did_rename: Some(FileOperationRegistrationOptions {
1499 filters: vec![
1500 FileOperationFilter {
1501 scheme: Some("file".to_string()),
1502 pattern: FileOperationPattern {
1503 glob: "**/*.sol".to_string(),
1504 matches: Some(FileOperationPatternKind::File),
1505 options: None,
1506 },
1507 },
1508 FileOperationFilter {
1509 scheme: Some("file".to_string()),
1510 pattern: FileOperationPattern {
1511 glob: "**".to_string(),
1512 matches: Some(FileOperationPatternKind::Folder),
1513 options: None,
1514 },
1515 },
1516 ],
1517 }),
1518 will_delete: Some(FileOperationRegistrationOptions {
1519 filters: vec![
1520 FileOperationFilter {
1521 scheme: Some("file".to_string()),
1522 pattern: FileOperationPattern {
1523 glob: "**/*.sol".to_string(),
1524 matches: Some(FileOperationPatternKind::File),
1525 options: None,
1526 },
1527 },
1528 FileOperationFilter {
1529 scheme: Some("file".to_string()),
1530 pattern: FileOperationPattern {
1531 glob: "**".to_string(),
1532 matches: Some(FileOperationPatternKind::Folder),
1533 options: None,
1534 },
1535 },
1536 ],
1537 }),
1538 did_delete: Some(FileOperationRegistrationOptions {
1539 filters: vec![
1540 FileOperationFilter {
1541 scheme: Some("file".to_string()),
1542 pattern: FileOperationPattern {
1543 glob: "**/*.sol".to_string(),
1544 matches: Some(FileOperationPatternKind::File),
1545 options: None,
1546 },
1547 },
1548 FileOperationFilter {
1549 scheme: Some("file".to_string()),
1550 pattern: FileOperationPattern {
1551 glob: "**".to_string(),
1552 matches: Some(FileOperationPatternKind::Folder),
1553 options: None,
1554 },
1555 },
1556 ],
1557 }),
1558 will_create: Some(FileOperationRegistrationOptions {
1559 filters: vec![FileOperationFilter {
1560 scheme: Some("file".to_string()),
1561 pattern: FileOperationPattern {
1562 glob: "**/*.sol".to_string(),
1563 matches: Some(FileOperationPatternKind::File),
1564 options: None,
1565 },
1566 }],
1567 }),
1568 did_create: Some(FileOperationRegistrationOptions {
1569 filters: vec![FileOperationFilter {
1570 scheme: Some("file".to_string()),
1571 pattern: FileOperationPattern {
1572 glob: "**/*.sol".to_string(),
1573 matches: Some(FileOperationPatternKind::File),
1574 options: None,
1575 },
1576 }],
1577 }),
1578 ..Default::default()
1579 }),
1580 }),
1581 ..ServerCapabilities::default()
1582 },
1583 })
1584 }
1585
1586 async fn initialized(&self, _: InitializedParams) {
1587 self.client
1588 .log_message(MessageType::INFO, "lsp server initialized.")
1589 .await;
1590
1591 let supports_dynamic = self
1593 .client_capabilities
1594 .read()
1595 .await
1596 .as_ref()
1597 .and_then(|caps| caps.workspace.as_ref())
1598 .and_then(|ws| ws.did_change_watched_files.as_ref())
1599 .and_then(|dcwf| dcwf.dynamic_registration)
1600 .unwrap_or(false);
1601
1602 if supports_dynamic {
1603 let registration = Registration {
1604 id: "foundry-toml-watcher".to_string(),
1605 method: "workspace/didChangeWatchedFiles".to_string(),
1606 register_options: Some(
1607 serde_json::to_value(DidChangeWatchedFilesRegistrationOptions {
1608 watchers: vec![
1609 FileSystemWatcher {
1610 glob_pattern: GlobPattern::String("**/foundry.toml".to_string()),
1611 kind: Some(WatchKind::all()),
1612 },
1613 FileSystemWatcher {
1614 glob_pattern: GlobPattern::String("**/remappings.txt".to_string()),
1615 kind: Some(WatchKind::all()),
1616 },
1617 ],
1618 })
1619 .unwrap(),
1620 ),
1621 };
1622
1623 if let Err(e) = self.client.register_capability(vec![registration]).await {
1624 self.client
1625 .log_message(
1626 MessageType::WARNING,
1627 format!("failed to register foundry.toml watcher: {e}"),
1628 )
1629 .await;
1630 } else {
1631 self.client
1632 .log_message(MessageType::INFO, "registered foundry.toml file watcher")
1633 .await;
1634 }
1635 }
1636
1637 if !self
1642 .settings_from_init
1643 .load(std::sync::atomic::Ordering::Relaxed)
1644 {
1645 let supports_config = self
1646 .client_capabilities
1647 .read()
1648 .await
1649 .as_ref()
1650 .and_then(|caps| caps.workspace.as_ref())
1651 .and_then(|ws| ws.configuration)
1652 .unwrap_or(false);
1653
1654 if supports_config {
1655 match self
1656 .client
1657 .configuration(vec![ConfigurationItem {
1658 scope_uri: None,
1659 section: Some("solidity-language-server".to_string()),
1660 }])
1661 .await
1662 {
1663 Ok(values) => {
1664 if let Some(val) = values.into_iter().next() {
1665 if !val.is_null() {
1666 let s = config::parse_settings(&val);
1667 self.client
1668 .log_message(
1669 MessageType::INFO,
1670 format!(
1671 "settings (workspace/configuration): lint.enabled={}, lint.exclude={:?}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}",
1672 s.lint.enabled, s.lint.exclude, s.project_index.full_project_scan, s.project_index.cache_mode,
1673 ),
1674 )
1675 .await;
1676 let mut settings = self.settings.write().await;
1677 *settings = s;
1678 }
1679 }
1680 }
1681 Err(e) => {
1682 self.client
1683 .log_message(
1684 MessageType::WARNING,
1685 format!("workspace/configuration request failed: {e}"),
1686 )
1687 .await;
1688 }
1689 }
1690 }
1691 }
1692
1693 if self.use_solc && self.settings.read().await.project_index.full_project_scan {
1697 let cache_mode = self.settings.read().await.project_index.cache_mode.clone();
1698 self.project_indexed
1699 .store(true, std::sync::atomic::Ordering::Relaxed);
1700 let foundry_config = self.foundry_config.read().await.clone();
1701 let cache_key = self.project_cache_key().await;
1702 let ast_cache = self.ast_cache.clone();
1703 let client = self.client.clone();
1704
1705 tokio::spawn(async move {
1706 let Some(cache_key) = cache_key else {
1707 return;
1708 };
1709 if !foundry_config.root.is_dir() {
1710 client
1711 .log_message(
1712 MessageType::INFO,
1713 format!(
1714 "project index: {} not found, skipping eager index",
1715 foundry_config.root.display(),
1716 ),
1717 )
1718 .await;
1719 return;
1720 }
1721
1722 let token = NumberOrString::String("solidity/projectIndex".to_string());
1723 let _ = client
1724 .send_request::<request::WorkDoneProgressCreate>(WorkDoneProgressCreateParams {
1725 token: token.clone(),
1726 })
1727 .await;
1728
1729 client
1730 .send_notification::<notification::Progress>(ProgressParams {
1731 token: token.clone(),
1732 value: ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(
1733 WorkDoneProgressBegin {
1734 title: "Indexing project".to_string(),
1735 message: Some("Discovering source files...".to_string()),
1736 cancellable: Some(false),
1737 percentage: None,
1738 },
1739 )),
1740 })
1741 .await;
1742
1743 let cfg_for_load = foundry_config.clone();
1745 let cache_mode_for_load = cache_mode.clone();
1746 let load_res = tokio::task::spawn_blocking(move || {
1747 crate::project_cache::load_reference_cache_with_report(
1748 &cfg_for_load,
1749 cache_mode_for_load,
1750 true,
1751 )
1752 })
1753 .await;
1754 match load_res {
1755 Ok(report) => {
1756 if let Some(cached_build) = report.build {
1757 let source_count = cached_build.nodes.len();
1758 ast_cache
1759 .write()
1760 .await
1761 .insert(cache_key.clone(), Arc::new(cached_build));
1762 client
1763 .log_message(
1764 MessageType::INFO,
1765 format!(
1766 "project index (eager): cache load hit (sources={}, reused_files={}/{}, complete={}, duration={}ms)",
1767 source_count,
1768 report.file_count_reused,
1769 report.file_count_hashed,
1770 report.complete,
1771 report.duration_ms
1772 ),
1773 )
1774 .await;
1775 if report.complete {
1776 client
1777 .send_notification::<notification::Progress>(ProgressParams {
1778 token: token.clone(),
1779 value: ProgressParamsValue::WorkDone(
1780 WorkDoneProgress::End(WorkDoneProgressEnd {
1781 message: Some(format!(
1782 "Loaded {} source files from cache",
1783 source_count
1784 )),
1785 }),
1786 ),
1787 })
1788 .await;
1789 return;
1790 }
1791 }
1792
1793 client
1794 .log_message(
1795 MessageType::INFO,
1796 format!(
1797 "project index (eager): cache load miss/partial (reason={}, reused_files={}/{}, duration={}ms)",
1798 report
1799 .miss_reason
1800 .unwrap_or_else(|| "unknown".to_string()),
1801 report.file_count_reused,
1802 report.file_count_hashed,
1803 report.duration_ms
1804 ),
1805 )
1806 .await;
1807 }
1808 Err(e) => {
1809 client
1810 .log_message(
1811 MessageType::WARNING,
1812 format!("project index (eager): cache load task failed: {e}"),
1813 )
1814 .await;
1815 }
1816 }
1817
1818 match crate::solc::solc_project_index(&foundry_config, Some(&client), None).await {
1819 Ok(ast_data) => {
1820 let mut new_build = crate::goto::CachedBuild::new(ast_data, 0);
1821 if let Some(prev) = ast_cache.read().await.get(&cache_key) {
1822 new_build.merge_missing_from(prev);
1823 }
1824 let source_count = new_build.nodes.len();
1825 let cached_build = Arc::new(new_build);
1826 let build_for_save = (*cached_build).clone();
1827 ast_cache
1828 .write()
1829 .await
1830 .insert(cache_key.clone(), cached_build);
1831 client
1832 .log_message(
1833 MessageType::INFO,
1834 format!(
1835 "project index (eager): cached {} source files",
1836 source_count
1837 ),
1838 )
1839 .await;
1840
1841 let cfg_for_save = foundry_config.clone();
1842 let client_for_save = client.clone();
1843 tokio::spawn(async move {
1844 let res = tokio::task::spawn_blocking(move || {
1845 crate::project_cache::save_reference_cache_with_report(
1846 &cfg_for_save,
1847 &build_for_save,
1848 None,
1849 )
1850 })
1851 .await;
1852 match res {
1853 Ok(Ok(report)) => {
1854 client_for_save
1855 .log_message(
1856 MessageType::INFO,
1857 format!(
1858 "project index (eager): cache save complete (hashed_files={}, duration={}ms)",
1859 report.file_count_hashed, report.duration_ms
1860 ),
1861 )
1862 .await;
1863 }
1864 Ok(Err(e)) => {
1865 client_for_save
1866 .log_message(
1867 MessageType::WARNING,
1868 format!(
1869 "project index (eager): failed to persist cache: {e}"
1870 ),
1871 )
1872 .await;
1873 }
1874 Err(e) => {
1875 client_for_save
1876 .log_message(
1877 MessageType::WARNING,
1878 format!(
1879 "project index (eager): cache save task failed: {e}"
1880 ),
1881 )
1882 .await;
1883 }
1884 }
1885 });
1886
1887 client
1888 .send_notification::<notification::Progress>(ProgressParams {
1889 token: token.clone(),
1890 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1891 WorkDoneProgressEnd {
1892 message: Some(format!(
1893 "Indexed {} source files",
1894 source_count
1895 )),
1896 },
1897 )),
1898 })
1899 .await;
1900 }
1901 Err(e) => {
1902 client
1903 .log_message(
1904 MessageType::WARNING,
1905 format!("project index (eager): failed: {e}"),
1906 )
1907 .await;
1908
1909 client
1910 .send_notification::<notification::Progress>(ProgressParams {
1911 token,
1912 value: ProgressParamsValue::WorkDone(WorkDoneProgress::End(
1913 WorkDoneProgressEnd {
1914 message: Some(format!("Index failed: {e}")),
1915 },
1916 )),
1917 })
1918 .await;
1919 }
1920 }
1921 });
1922 }
1923 }
1924
1925 async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
1926 self.flush_project_cache_to_disk("shutdown").await;
1927 self.client
1928 .log_message(MessageType::INFO, "lsp server shutting down.")
1929 .await;
1930 Ok(())
1931 }
1932
1933 async fn did_open(&self, params: DidOpenTextDocumentParams) {
1934 self.client
1935 .log_message(MessageType::INFO, "file opened")
1936 .await;
1937
1938 let mut td = params.text_document;
1939 let template_on_create = self
1940 .settings
1941 .read()
1942 .await
1943 .file_operations
1944 .template_on_create;
1945
1946 let should_attempt_scaffold = template_on_create
1949 && td.text.chars().all(|ch| ch.is_whitespace())
1950 && td.uri.scheme() == "file"
1951 && td
1952 .uri
1953 .to_file_path()
1954 .ok()
1955 .and_then(|p| p.extension().map(|e| e == "sol"))
1956 .unwrap_or(false);
1957
1958 if should_attempt_scaffold {
1959 let uri_str = td.uri.to_string();
1960 let create_flow_pending = {
1961 let pending = self.pending_create_scaffold.read().await;
1962 pending.contains(&uri_str)
1963 };
1964 if create_flow_pending {
1965 self.client
1966 .log_message(
1967 MessageType::INFO,
1968 format!(
1969 "didOpen: skip scaffold for {} (didCreateFiles scaffold pending)",
1970 uri_str
1971 ),
1972 )
1973 .await;
1974 } else {
1975 let cache_has_content = {
1976 let tc = self.text_cache.read().await;
1977 tc.get(&uri_str)
1978 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()))
1979 };
1980
1981 if !cache_has_content {
1982 let file_has_content = td.uri.to_file_path().ok().is_some_and(|p| {
1983 std::fs::read_to_string(&p)
1984 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()))
1985 });
1986
1987 if !file_has_content {
1988 let solc_version = self.foundry_config.read().await.solc_version.clone();
1989 if let Some(scaffold) =
1990 file_operations::generate_scaffold(&td.uri, solc_version.as_deref())
1991 {
1992 let end = utils::byte_offset_to_position(&td.text, td.text.len());
1993 let edit = WorkspaceEdit {
1994 changes: Some(HashMap::from([(
1995 td.uri.clone(),
1996 vec![TextEdit {
1997 range: Range {
1998 start: Position::default(),
1999 end,
2000 },
2001 new_text: scaffold.clone(),
2002 }],
2003 )])),
2004 document_changes: None,
2005 change_annotations: None,
2006 };
2007 if self
2008 .client
2009 .apply_edit(edit)
2010 .await
2011 .as_ref()
2012 .is_ok_and(|r| r.applied)
2013 {
2014 td.text = scaffold;
2015 self.client
2016 .log_message(
2017 MessageType::INFO,
2018 format!("didOpen: scaffolded empty file {}", uri_str),
2019 )
2020 .await;
2021 }
2022 }
2023 }
2024 }
2025 }
2026 }
2027
2028 self.on_change(td).await
2029 }
2030
2031 async fn did_change(&self, params: DidChangeTextDocumentParams) {
2032 self.client
2033 .log_message(MessageType::INFO, "file changed")
2034 .await;
2035
2036 if let Some(change) = params.content_changes.into_iter().next() {
2038 let has_substantive_content = change.text.chars().any(|ch| !ch.is_whitespace());
2039 let mut text_cache = self.text_cache.write().await;
2040 text_cache.insert(
2041 params.text_document.uri.to_string(),
2042 (params.text_document.version, change.text),
2043 );
2044 drop(text_cache);
2045
2046 if has_substantive_content {
2047 self.pending_create_scaffold
2048 .write()
2049 .await
2050 .remove(params.text_document.uri.as_str());
2051 }
2052 }
2053 }
2054
2055 async fn did_save(&self, params: DidSaveTextDocumentParams) {
2056 self.client
2057 .log_message(MessageType::INFO, "file saved")
2058 .await;
2059
2060 let mut text_content = if let Some(text) = params.text {
2061 text
2062 } else {
2063 let cached = {
2065 let text_cache = self.text_cache.read().await;
2066 text_cache
2067 .get(params.text_document.uri.as_str())
2068 .map(|(_, content)| content.clone())
2069 };
2070 if let Some(content) = cached {
2071 content
2072 } else {
2073 match std::fs::read_to_string(params.text_document.uri.path()) {
2074 Ok(content) => content,
2075 Err(e) => {
2076 self.client
2077 .log_message(
2078 MessageType::ERROR,
2079 format!("Failed to read file on save: {e}"),
2080 )
2081 .await;
2082 return;
2083 }
2084 }
2085 }
2086 };
2087
2088 let uri_str = params.text_document.uri.to_string();
2092 let template_on_create = self
2093 .settings
2094 .read()
2095 .await
2096 .file_operations
2097 .template_on_create;
2098 let needs_recover_scaffold = {
2099 let pending = self.pending_create_scaffold.read().await;
2100 template_on_create
2101 && pending.contains(&uri_str)
2102 && !text_content.chars().any(|ch| !ch.is_whitespace())
2103 };
2104 if needs_recover_scaffold {
2105 let solc_version = self.foundry_config.read().await.solc_version.clone();
2106 if let Some(scaffold) = file_operations::generate_scaffold(
2107 ¶ms.text_document.uri,
2108 solc_version.as_deref(),
2109 ) {
2110 let end = utils::byte_offset_to_position(&text_content, text_content.len());
2111 let edit = WorkspaceEdit {
2112 changes: Some(HashMap::from([(
2113 params.text_document.uri.clone(),
2114 vec![TextEdit {
2115 range: Range {
2116 start: Position::default(),
2117 end,
2118 },
2119 new_text: scaffold.clone(),
2120 }],
2121 )])),
2122 document_changes: None,
2123 change_annotations: None,
2124 };
2125 if self
2126 .client
2127 .apply_edit(edit)
2128 .await
2129 .as_ref()
2130 .is_ok_and(|r| r.applied)
2131 {
2132 text_content = scaffold.clone();
2133 let version = self
2134 .text_cache
2135 .read()
2136 .await
2137 .get(params.text_document.uri.as_str())
2138 .map(|(v, _)| *v)
2139 .unwrap_or_default();
2140 self.text_cache
2141 .write()
2142 .await
2143 .insert(uri_str.clone(), (version, scaffold));
2144 self.pending_create_scaffold.write().await.remove(&uri_str);
2145 self.client
2146 .log_message(
2147 MessageType::INFO,
2148 format!("didSave: recovered scaffold for {}", uri_str),
2149 )
2150 .await;
2151 }
2152 }
2153 }
2154
2155 let version = self
2156 .text_cache
2157 .read()
2158 .await
2159 .get(params.text_document.uri.as_str())
2160 .map(|(version, _)| *version)
2161 .unwrap_or_default();
2162
2163 let saved_uri = params.text_document.uri.clone();
2164 if let Ok(saved_file_path) = saved_uri.to_file_path() {
2165 let saved_abs = saved_file_path.to_string_lossy().to_string();
2166 self.project_cache_changed_files
2167 .write()
2168 .await
2169 .insert(saved_abs.clone());
2170 self.project_cache_upsert_files
2171 .write()
2172 .await
2173 .insert(saved_abs);
2174 }
2175 self.on_change(TextDocumentItem {
2176 uri: saved_uri.clone(),
2177 text: text_content,
2178 version,
2179 language_id: "".to_string(),
2180 })
2181 .await;
2182
2183 let settings_snapshot = self.settings.read().await.clone();
2184
2185 if self.use_solc
2188 && settings_snapshot.project_index.full_project_scan
2189 && matches!(
2190 settings_snapshot.project_index.cache_mode,
2191 crate::config::ProjectIndexCacheMode::V2
2192 | crate::config::ProjectIndexCacheMode::Auto
2193 )
2194 && let Ok(saved_file_path) = saved_uri.to_file_path()
2195 {
2196 let saved_abs = saved_file_path.to_string_lossy().to_string();
2197 let uri_key = saved_uri.to_string();
2198 let build_opt = { self.ast_cache.read().await.get(&uri_key).cloned() };
2199 if let Some(build) = build_opt {
2200 if build.nodes.contains_key(&saved_abs) {
2201 let cfg = crate::config::load_foundry_config(&saved_file_path);
2202 let build_for_upsert = (*build).clone();
2203 let upsert_res = tokio::task::spawn_blocking(move || {
2204 crate::project_cache::upsert_reference_cache_v2_with_report(
2205 &cfg,
2206 &build_for_upsert,
2207 )
2208 })
2209 .await;
2210 match upsert_res {
2211 Ok(Ok(report)) => {
2212 self.project_cache_upsert_files
2213 .write()
2214 .await
2215 .remove(&saved_abs);
2216 self.client
2217 .log_message(
2218 MessageType::INFO,
2219 format!(
2220 "project cache v2 upsert (immediate): touched_files={}, duration={}ms",
2221 report.file_count_hashed, report.duration_ms
2222 ),
2223 )
2224 .await;
2225 }
2226 Ok(Err(e)) => {
2227 self.client
2228 .log_message(
2229 MessageType::WARNING,
2230 format!("project cache v2 upsert (immediate) failed: {}", e),
2231 )
2232 .await;
2233 }
2234 Err(e) => {
2235 self.client
2236 .log_message(
2237 MessageType::WARNING,
2238 format!(
2239 "project cache v2 upsert (immediate) task failed: {}",
2240 e
2241 ),
2242 )
2243 .await;
2244 }
2245 }
2246 }
2247 }
2248 }
2249
2250 if self.use_solc
2254 && settings_snapshot.project_index.full_project_scan
2255 && matches!(
2256 settings_snapshot.project_index.cache_mode,
2257 crate::config::ProjectIndexCacheMode::V2
2258 | crate::config::ProjectIndexCacheMode::Auto
2259 )
2260 {
2261 if start_or_mark_project_cache_upsert_pending(
2262 &self.project_cache_upsert_pending,
2263 &self.project_cache_upsert_running,
2264 ) {
2265 let upsert_files = self.project_cache_upsert_files.clone();
2266 let ast_cache = self.ast_cache.clone();
2267 let client = self.client.clone();
2268 let running_flag = self.project_cache_upsert_running.clone();
2269 let pending_flag = self.project_cache_upsert_pending.clone();
2270
2271 tokio::spawn(async move {
2272 loop {
2273 tokio::time::sleep(std::time::Duration::from_millis(350)).await;
2274
2275 if !take_project_cache_upsert_pending(&pending_flag) {
2276 if stop_project_cache_upsert_worker_or_reclaim(
2277 &pending_flag,
2278 &running_flag,
2279 ) {
2280 continue;
2281 }
2282 break;
2283 }
2284
2285 let changed_paths: Vec<String> = {
2286 let mut paths = upsert_files.write().await;
2287 paths.drain().collect()
2288 };
2289 if changed_paths.is_empty() {
2290 continue;
2291 }
2292
2293 let mut work_items: Vec<(
2294 crate::config::FoundryConfig,
2295 crate::goto::CachedBuild,
2296 )> = Vec::new();
2297 {
2298 let cache = ast_cache.read().await;
2299 for abs_str in changed_paths {
2300 let path = PathBuf::from(&abs_str);
2301 let Ok(uri) = Url::from_file_path(&path) else {
2302 continue;
2303 };
2304 let uri_key = uri.to_string();
2305 let Some(build) = cache.get(&uri_key).cloned() else {
2306 continue;
2307 };
2308 if !build.nodes.contains_key(&abs_str) {
2310 continue;
2311 }
2312 let cfg = crate::config::load_foundry_config(&path);
2313 work_items.push((cfg, (*build).clone()));
2314 }
2315 }
2316
2317 if work_items.is_empty() {
2318 continue;
2319 }
2320
2321 let res = tokio::task::spawn_blocking(move || {
2322 let mut total_files = 0usize;
2323 let mut total_ms = 0u128;
2324 let mut failures: Vec<String> = Vec::new();
2325 for (cfg, build) in work_items {
2326 match crate::project_cache::upsert_reference_cache_v2_with_report(
2327 &cfg, &build,
2328 ) {
2329 Ok(report) => {
2330 total_files += report.file_count_hashed;
2331 total_ms += report.duration_ms;
2332 }
2333 Err(e) => failures.push(e),
2334 }
2335 }
2336 (total_files, total_ms, failures)
2337 })
2338 .await;
2339
2340 match res {
2341 Ok((total_files, total_ms, failures)) => {
2342 if !failures.is_empty() {
2343 client
2344 .log_message(
2345 MessageType::WARNING,
2346 format!(
2347 "project cache v2 upsert: {} failure(s), first={}",
2348 failures.len(),
2349 failures[0]
2350 ),
2351 )
2352 .await;
2353 } else {
2354 client
2355 .log_message(
2356 MessageType::INFO,
2357 format!(
2358 "project cache v2 upsert (debounced): touched_files={}, duration={}ms",
2359 total_files, total_ms
2360 ),
2361 )
2362 .await;
2363 }
2364 }
2365 Err(e) => {
2366 client
2367 .log_message(
2368 MessageType::WARNING,
2369 format!("project cache v2 upsert task failed: {e}"),
2370 )
2371 .await;
2372 }
2373 }
2374 }
2375 });
2376 }
2377 }
2378
2379 if self.use_solc
2382 && settings_snapshot.project_index.full_project_scan
2383 && self.project_cache_dirty.load(Ordering::Acquire)
2384 {
2385 if start_or_mark_project_cache_sync_pending(
2386 &self.project_cache_sync_pending,
2387 &self.project_cache_sync_running,
2388 ) {
2389 let foundry_config = self.foundry_config.read().await.clone();
2390 let root_key = self.project_cache_key().await;
2391 let ast_cache = self.ast_cache.clone();
2392 let text_cache = self.text_cache.clone();
2393 let client = self.client.clone();
2394 let dirty_flag = self.project_cache_dirty.clone();
2395 let running_flag = self.project_cache_sync_running.clone();
2396 let pending_flag = self.project_cache_sync_pending.clone();
2397 let changed_files = self.project_cache_changed_files.clone();
2398 let aggressive_scoped = settings_snapshot.project_index.incremental_edit_reindex;
2399
2400 tokio::spawn(async move {
2401 loop {
2402 tokio::time::sleep(std::time::Duration::from_millis(700)).await;
2404
2405 if !take_project_cache_sync_pending(&pending_flag) {
2406 if stop_project_cache_sync_worker_or_reclaim(
2410 &pending_flag,
2411 &running_flag,
2412 ) {
2413 continue;
2414 }
2415 break;
2416 }
2417
2418 if !try_claim_project_cache_dirty(&dirty_flag) {
2419 continue;
2420 }
2421
2422 let Some(cache_key) = &root_key else {
2423 dirty_flag.store(true, Ordering::Release);
2424 continue;
2425 };
2426 if !foundry_config.root.is_dir() {
2427 dirty_flag.store(true, Ordering::Release);
2428 client
2429 .log_message(
2430 MessageType::WARNING,
2431 format!(
2432 "didSave cache sync: invalid project root {}, deferring",
2433 foundry_config.root.display()
2434 ),
2435 )
2436 .await;
2437 continue;
2438 }
2439
2440 let mut scoped_ok = false;
2441
2442 if aggressive_scoped {
2443 let changed_abs: Vec<PathBuf> = {
2444 let mut changed = changed_files.write().await;
2445 let drained =
2446 changed.drain().map(PathBuf::from).collect::<Vec<PathBuf>>();
2447 drained
2448 };
2449 if !changed_abs.is_empty() {
2450 let remappings =
2451 crate::solc::resolve_remappings(&foundry_config).await;
2452 let cfg_for_plan = foundry_config.clone();
2453 let changed_for_plan = changed_abs.clone();
2454 let remappings_for_plan = remappings.clone();
2455 let plan_res = tokio::task::spawn_blocking(move || {
2456 compute_reverse_import_closure(
2457 &cfg_for_plan,
2458 &changed_for_plan,
2459 &remappings_for_plan,
2460 )
2461 })
2462 .await;
2463
2464 let affected_files = match plan_res {
2465 Ok(set) => set.into_iter().collect::<Vec<PathBuf>>(),
2466 Err(_) => Vec::new(),
2467 };
2468 if !affected_files.is_empty() {
2469 client
2470 .log_message(
2471 MessageType::INFO,
2472 format!(
2473 "didSave cache sync: aggressive scoped reindex (affected={})",
2474 affected_files.len(),
2475 ),
2476 )
2477 .await;
2478
2479 let text_cache_snapshot = text_cache.read().await.clone();
2480 match crate::solc::solc_project_index_scoped(
2481 &foundry_config,
2482 Some(&client),
2483 Some(&text_cache_snapshot),
2484 &affected_files,
2485 )
2486 .await
2487 {
2488 Ok(ast_data) => {
2489 let scoped_build = Arc::new(
2490 crate::goto::CachedBuild::new(ast_data, 0),
2491 );
2492 let source_count = scoped_build.nodes.len();
2493 enum ScopedApply {
2494 Merged { affected_count: usize },
2495 Stored,
2496 Failed(String),
2497 }
2498 let apply_outcome = {
2499 let mut cache = ast_cache.write().await;
2500 if let Some(existing) =
2501 cache.get(cache_key).cloned()
2502 {
2503 let mut merged = (*existing).clone();
2504 match merge_scoped_cached_build(
2505 &mut merged,
2506 (*scoped_build).clone(),
2507 ) {
2508 Ok(affected_count) => {
2509 cache.insert(
2510 cache_key.clone(),
2511 Arc::new(merged),
2512 );
2513 ScopedApply::Merged { affected_count }
2514 }
2515 Err(e) => ScopedApply::Failed(e),
2516 }
2517 } else {
2518 cache.insert(cache_key.clone(), scoped_build);
2519 ScopedApply::Stored
2520 }
2521 };
2522
2523 match apply_outcome {
2524 ScopedApply::Merged { affected_count } => {
2525 client
2526 .log_message(
2527 MessageType::INFO,
2528 format!(
2529 "didSave cache sync: scoped merge applied (scoped_sources={}, affected_paths={})",
2530 source_count, affected_count
2531 ),
2532 )
2533 .await;
2534 scoped_ok = true;
2535 }
2536 ScopedApply::Stored => {
2537 client
2538 .log_message(
2539 MessageType::INFO,
2540 format!(
2541 "didSave cache sync: scoped cache stored (scoped_sources={})",
2542 source_count
2543 ),
2544 )
2545 .await;
2546 scoped_ok = true;
2547 }
2548 ScopedApply::Failed(e) => {
2549 client
2550 .log_message(
2551 MessageType::WARNING,
2552 format!(
2553 "didSave cache sync: scoped merge rejected, will retry scoped on next save: {e}"
2554 ),
2555 )
2556 .await;
2557 dirty_flag.store(true, Ordering::Release);
2558 }
2559 }
2560 }
2561 Err(e) => {
2562 client
2563 .log_message(
2564 MessageType::WARNING,
2565 format!(
2566 "didSave cache sync: scoped reindex failed, will retry scoped on next save: {e}"
2567 ),
2568 )
2569 .await;
2570 dirty_flag.store(true, Ordering::Release);
2571 }
2572 }
2573 } else {
2574 client
2575 .log_message(
2576 MessageType::INFO,
2577 "didSave cache sync: no affected files from scoped planner",
2578 )
2579 .await;
2580 }
2581 }
2582 }
2583
2584 if scoped_ok {
2585 continue;
2586 }
2587 if aggressive_scoped {
2588 continue;
2589 }
2590
2591 client
2592 .log_message(
2593 MessageType::INFO,
2594 "didSave cache sync: rebuilding project index from disk",
2595 )
2596 .await;
2597
2598 match crate::solc::solc_project_index(&foundry_config, Some(&client), None)
2599 .await
2600 {
2601 Ok(ast_data) => {
2602 let mut new_build = crate::goto::CachedBuild::new(ast_data, 0);
2603 if let Some(prev) = ast_cache.read().await.get(cache_key) {
2604 new_build.merge_missing_from(prev);
2605 }
2606 let source_count = new_build.nodes.len();
2607 let cached_build = Arc::new(new_build);
2608 let build_for_save = (*cached_build).clone();
2609 ast_cache
2610 .write()
2611 .await
2612 .insert(cache_key.clone(), cached_build);
2613
2614 let cfg_for_save = foundry_config.clone();
2615 let save_res = tokio::task::spawn_blocking(move || {
2616 crate::project_cache::save_reference_cache_with_report(
2617 &cfg_for_save,
2618 &build_for_save,
2619 None,
2620 )
2621 })
2622 .await;
2623
2624 match save_res {
2625 Ok(Ok(report)) => {
2626 changed_files.write().await.clear();
2627 client
2628 .log_message(
2629 MessageType::INFO,
2630 format!(
2631 "didSave cache sync: persisted cache (sources={}, hashed_files={}, duration={}ms)",
2632 source_count, report.file_count_hashed, report.duration_ms
2633 ),
2634 )
2635 .await;
2636 }
2637 Ok(Err(e)) => {
2638 dirty_flag.store(true, Ordering::Release);
2639 client
2640 .log_message(
2641 MessageType::WARNING,
2642 format!(
2643 "didSave cache sync: persist failed, will retry: {e}"
2644 ),
2645 )
2646 .await;
2647 }
2648 Err(e) => {
2649 dirty_flag.store(true, Ordering::Release);
2650 client
2651 .log_message(
2652 MessageType::WARNING,
2653 format!(
2654 "didSave cache sync: save task failed, will retry: {e}"
2655 ),
2656 )
2657 .await;
2658 }
2659 }
2660 }
2661 Err(e) => {
2662 dirty_flag.store(true, Ordering::Release);
2663 client
2664 .log_message(
2665 MessageType::WARNING,
2666 format!(
2667 "didSave cache sync: re-index failed, will retry: {e}"
2668 ),
2669 )
2670 .await;
2671 }
2672 }
2673 }
2674 });
2675 }
2676 }
2677 }
2678
2679 async fn will_save(&self, params: WillSaveTextDocumentParams) {
2680 self.client
2681 .log_message(
2682 MessageType::INFO,
2683 format!(
2684 "file will save reason:{:?} {}",
2685 params.reason, params.text_document.uri
2686 ),
2687 )
2688 .await;
2689 }
2690
2691 async fn formatting(
2692 &self,
2693 params: DocumentFormattingParams,
2694 ) -> tower_lsp::jsonrpc::Result<Option<Vec<TextEdit>>> {
2695 self.client
2696 .log_message(MessageType::INFO, "formatting request")
2697 .await;
2698
2699 let uri = params.text_document.uri;
2700 let file_path = match uri.to_file_path() {
2701 Ok(path) => path,
2702 Err(_) => {
2703 self.client
2704 .log_message(MessageType::ERROR, "Invalid file URI for formatting")
2705 .await;
2706 return Ok(None);
2707 }
2708 };
2709 let path_str = match file_path.to_str() {
2710 Some(s) => s,
2711 None => {
2712 self.client
2713 .log_message(MessageType::ERROR, "Invalid file path for formatting")
2714 .await;
2715 return Ok(None);
2716 }
2717 };
2718
2719 let original_content = {
2721 let text_cache = self.text_cache.read().await;
2722 if let Some((_, content)) = text_cache.get(&uri.to_string()) {
2723 content.clone()
2724 } else {
2725 match std::fs::read_to_string(&file_path) {
2727 Ok(content) => content,
2728 Err(_) => {
2729 self.client
2730 .log_message(MessageType::ERROR, "Failed to read file for formatting")
2731 .await;
2732 return Ok(None);
2733 }
2734 }
2735 }
2736 };
2737
2738 let formatted_content = match self.compiler.format(path_str).await {
2740 Ok(content) => content,
2741 Err(e) => {
2742 self.client
2743 .log_message(MessageType::WARNING, format!("Formatting failed: {e}"))
2744 .await;
2745 return Ok(None);
2746 }
2747 };
2748
2749 if original_content != formatted_content {
2751 let end = utils::byte_offset_to_position(&original_content, original_content.len());
2752
2753 {
2755 let mut text_cache = self.text_cache.write().await;
2756 let version = text_cache
2757 .get(&uri.to_string())
2758 .map(|(v, _)| *v)
2759 .unwrap_or(0);
2760 text_cache.insert(uri.to_string(), (version, formatted_content.clone()));
2761 }
2762
2763 let edit = TextEdit {
2764 range: Range {
2765 start: Position::default(),
2766 end,
2767 },
2768 new_text: formatted_content,
2769 };
2770 Ok(Some(vec![edit]))
2771 } else {
2772 Ok(None)
2773 }
2774 }
2775
2776 async fn did_close(&self, params: DidCloseTextDocumentParams) {
2777 self.flush_project_cache_to_disk("didClose").await;
2778 let uri = params.text_document.uri.to_string();
2779 self.ast_cache.write().await.remove(&uri);
2780 self.text_cache.write().await.remove(&uri);
2781 self.completion_cache.write().await.remove(&uri);
2782 self.client
2783 .log_message(MessageType::INFO, "file closed, caches cleared.")
2784 .await;
2785 }
2786
2787 async fn did_change_configuration(&self, params: DidChangeConfigurationParams) {
2788 let s = config::parse_settings(¶ms.settings);
2789 self.client
2790 .log_message(
2791 MessageType::INFO,
2792 format!(
2793 "settings updated: inlayHints.parameters={}, inlayHints.gasEstimates={}, lint.enabled={}, lint.severity={:?}, lint.only={:?}, lint.exclude={:?}, fileOperations.templateOnCreate={}, fileOperations.updateImportsOnRename={}, fileOperations.updateImportsOnDelete={}, projectIndex.fullProjectScan={}, projectIndex.cacheMode={:?}, projectIndex.incrementalEditReindex={}",
2794 s.inlay_hints.parameters, s.inlay_hints.gas_estimates, s.lint.enabled, s.lint.severity, s.lint.only, s.lint.exclude, s.file_operations.template_on_create, s.file_operations.update_imports_on_rename, s.file_operations.update_imports_on_delete, s.project_index.full_project_scan, s.project_index.cache_mode, s.project_index.incremental_edit_reindex,
2795 ),
2796 )
2797 .await;
2798 let mut settings = self.settings.write().await;
2799 *settings = s;
2800
2801 let client = self.client.clone();
2803 tokio::spawn(async move {
2804 let _ = client.inlay_hint_refresh().await;
2805 });
2806 }
2807 async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) {
2808 self.client
2809 .log_message(MessageType::INFO, "workdspace folders changed.")
2810 .await;
2811 }
2812
2813 async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) {
2814 self.client
2815 .log_message(MessageType::INFO, "watched files have changed.")
2816 .await;
2817
2818 for change in ¶ms.changes {
2820 let path = match change.uri.to_file_path() {
2821 Ok(p) => p,
2822 Err(_) => continue,
2823 };
2824
2825 let filename = path.file_name().and_then(|n| n.to_str());
2826
2827 if filename == Some("foundry.toml") {
2828 let lint_cfg = config::load_lint_config_from_toml(&path);
2829 self.client
2830 .log_message(
2831 MessageType::INFO,
2832 format!(
2833 "reloaded foundry.toml lint config: lint_on_build={}, ignore_patterns={}",
2834 lint_cfg.lint_on_build,
2835 lint_cfg.ignore_patterns.len()
2836 ),
2837 )
2838 .await;
2839 let mut lc = self.lint_config.write().await;
2840 *lc = lint_cfg;
2841
2842 let foundry_cfg = config::load_foundry_config_from_toml(&path);
2843 self.client
2844 .log_message(
2845 MessageType::INFO,
2846 format!(
2847 "reloaded foundry.toml project config: solc_version={:?}, remappings={}",
2848 foundry_cfg.solc_version,
2849 foundry_cfg.remappings.len()
2850 ),
2851 )
2852 .await;
2853 if foundry_cfg.via_ir {
2854 self.client
2855 .log_message(
2856 MessageType::WARNING,
2857 "via_ir is enabled in foundry.toml — gas estimate inlay hints are disabled to avoid slow compilation",
2858 )
2859 .await;
2860 }
2861 let mut fc = self.foundry_config.write().await;
2862 *fc = foundry_cfg;
2863 break;
2864 }
2865
2866 if filename == Some("remappings.txt") {
2867 self.client
2868 .log_message(
2869 MessageType::INFO,
2870 "remappings.txt changed, config may need refresh",
2871 )
2872 .await;
2873 }
2876 }
2877 }
2878
2879 async fn completion(
2880 &self,
2881 params: CompletionParams,
2882 ) -> tower_lsp::jsonrpc::Result<Option<CompletionResponse>> {
2883 let uri = params.text_document_position.text_document.uri;
2884 let position = params.text_document_position.position;
2885
2886 let trigger_char = params
2887 .context
2888 .as_ref()
2889 .and_then(|ctx| ctx.trigger_character.as_deref());
2890
2891 let source_text = {
2893 let text_cache = self.text_cache.read().await;
2894 if let Some((_, text)) = text_cache.get(&uri.to_string()) {
2895 text.clone()
2896 } else {
2897 match uri.to_file_path() {
2898 Ok(path) => std::fs::read_to_string(&path).unwrap_or_default(),
2899 Err(_) => return Ok(None),
2900 }
2901 }
2902 };
2903
2904 let local_cached: Option<Arc<completion::CompletionCache>> = {
2906 let comp_cache = self.completion_cache.read().await;
2907 comp_cache.get(&uri.to_string()).cloned()
2908 };
2909
2910 let root_cached: Option<Arc<completion::CompletionCache>> = {
2912 let root_key = self.project_cache_key().await;
2913 match root_key {
2914 Some(root_key) => {
2915 let ast_cache = self.ast_cache.read().await;
2916 ast_cache
2917 .get(&root_key)
2918 .map(|root_build| root_build.completion_cache.clone())
2919 }
2920 None => None,
2921 }
2922 };
2923
2924 let cached = local_cached.or(root_cached.clone());
2926
2927 if cached.is_none() {
2928 let ast_cache = self.ast_cache.clone();
2930 let completion_cache = self.completion_cache.clone();
2931 let uri_string = uri.to_string();
2932 tokio::spawn(async move {
2933 let cached_build = {
2934 let cache = ast_cache.read().await;
2935 match cache.get(&uri_string) {
2936 Some(v) => v.clone(),
2937 None => return,
2938 }
2939 };
2940 completion_cache
2941 .write()
2942 .await
2943 .insert(uri_string, cached_build.completion_cache.clone());
2944 });
2945 }
2946
2947 let cache_ref = cached.as_deref();
2948
2949 let file_id = {
2951 let uri_path = uri.to_file_path().ok();
2952 cache_ref.and_then(|c| {
2953 uri_path.as_ref().and_then(|p| {
2954 let path_str = p.to_str()?;
2955 c.path_to_file_id.get(path_str).copied()
2956 })
2957 })
2958 };
2959
2960 let current_file_path = uri
2961 .to_file_path()
2962 .ok()
2963 .and_then(|p| p.to_str().map(|s| s.to_string()));
2964
2965 let tail_candidates = if trigger_char == Some(".") {
2966 vec![]
2967 } else {
2968 root_cached.as_deref().map_or_else(Vec::new, |c| {
2969 completion::top_level_importable_completion_candidates(
2970 c,
2971 current_file_path.as_deref(),
2972 &source_text,
2973 )
2974 })
2975 };
2976
2977 let result = completion::handle_completion_with_tail_candidates(
2978 cache_ref,
2979 &source_text,
2980 position,
2981 trigger_char,
2982 file_id,
2983 tail_candidates,
2984 );
2985 Ok(result)
2986 }
2987
2988 async fn goto_definition(
2989 &self,
2990 params: GotoDefinitionParams,
2991 ) -> tower_lsp::jsonrpc::Result<Option<GotoDefinitionResponse>> {
2992 self.client
2993 .log_message(MessageType::INFO, "got textDocument/definition request")
2994 .await;
2995
2996 let uri = params.text_document_position_params.text_document.uri;
2997 let position = params.text_document_position_params.position;
2998
2999 let file_path = match uri.to_file_path() {
3000 Ok(path) => path,
3001 Err(_) => {
3002 self.client
3003 .log_message(MessageType::ERROR, "Invalid file uri")
3004 .await;
3005 return Ok(None);
3006 }
3007 };
3008
3009 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3010 Some(bytes) => bytes,
3011 None => return Ok(None),
3012 };
3013
3014 let source_text = String::from_utf8_lossy(&source_bytes).to_string();
3015
3016 let cursor_name = goto::cursor_context(&source_text, position).map(|ctx| ctx.name);
3018
3019 let (is_dirty, cached_build) = {
3023 let text_version = self
3024 .text_cache
3025 .read()
3026 .await
3027 .get(&uri.to_string())
3028 .map(|(v, _)| *v)
3029 .unwrap_or(0);
3030 let cb = self.get_or_fetch_build(&uri, &file_path, false).await;
3031 let build_version = cb.as_ref().map(|b| b.build_version).unwrap_or(0);
3032 (text_version > build_version, cb)
3033 };
3034
3035 let validate_ts = |loc: &Location| -> bool {
3041 let Some(ref name) = cursor_name else {
3042 return true; };
3044 let target_src = if loc.uri == uri {
3045 Some(source_text.clone())
3046 } else {
3047 loc.uri
3048 .to_file_path()
3049 .ok()
3050 .and_then(|p| std::fs::read_to_string(&p).ok())
3051 };
3052 match target_src {
3053 Some(src) => goto::validate_goto_target(&src, loc, name),
3054 None => true, }
3056 };
3057
3058 if is_dirty {
3059 self.client
3060 .log_message(MessageType::INFO, "file is dirty, trying tree-sitter first")
3061 .await;
3062
3063 let ts_result = {
3065 let comp_cache = self.completion_cache.read().await;
3066 let text_cache = self.text_cache.read().await;
3067 if let Some(cc) = comp_cache.get(&uri.to_string()) {
3068 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
3069 } else {
3070 None
3071 }
3072 };
3073
3074 if let Some(location) = ts_result {
3075 if validate_ts(&location) {
3076 self.client
3077 .log_message(
3078 MessageType::INFO,
3079 format!(
3080 "found definition (tree-sitter) at {}:{}",
3081 location.uri, location.range.start.line
3082 ),
3083 )
3084 .await;
3085 return Ok(Some(GotoDefinitionResponse::from(location)));
3086 }
3087 self.client
3088 .log_message(
3089 MessageType::INFO,
3090 "tree-sitter result failed validation, trying AST fallback",
3091 )
3092 .await;
3093 }
3094
3095 if let Some(ref cb) = cached_build
3100 && let Some(ref name) = cursor_name
3101 {
3102 let byte_hint = goto::pos_to_bytes(&source_bytes, position);
3103 if let Some(location) = goto::goto_declaration_by_name(cb, &uri, name, byte_hint) {
3104 self.client
3105 .log_message(
3106 MessageType::INFO,
3107 format!(
3108 "found definition (AST by name) at {}:{}",
3109 location.uri, location.range.start.line
3110 ),
3111 )
3112 .await;
3113 return Ok(Some(GotoDefinitionResponse::from(location)));
3114 }
3115 }
3116 } else {
3117 if let Some(ref cb) = cached_build
3119 && let Some(location) =
3120 goto::goto_declaration_cached(cb, &uri, position, &source_bytes)
3121 {
3122 self.client
3123 .log_message(
3124 MessageType::INFO,
3125 format!(
3126 "found definition (AST) at {}:{}",
3127 location.uri, location.range.start.line
3128 ),
3129 )
3130 .await;
3131 return Ok(Some(GotoDefinitionResponse::from(location)));
3132 }
3133
3134 let ts_result = {
3136 let comp_cache = self.completion_cache.read().await;
3137 let text_cache = self.text_cache.read().await;
3138 if let Some(cc) = comp_cache.get(&uri.to_string()) {
3139 goto::goto_definition_ts(&source_text, position, &uri, cc, &text_cache)
3140 } else {
3141 None
3142 }
3143 };
3144
3145 if let Some(location) = ts_result {
3146 if validate_ts(&location) {
3147 self.client
3148 .log_message(
3149 MessageType::INFO,
3150 format!(
3151 "found definition (tree-sitter fallback) at {}:{}",
3152 location.uri, location.range.start.line
3153 ),
3154 )
3155 .await;
3156 return Ok(Some(GotoDefinitionResponse::from(location)));
3157 }
3158 self.client
3159 .log_message(MessageType::INFO, "tree-sitter fallback failed validation")
3160 .await;
3161 }
3162 }
3163
3164 self.client
3165 .log_message(MessageType::INFO, "no definition found")
3166 .await;
3167 Ok(None)
3168 }
3169
3170 async fn goto_declaration(
3171 &self,
3172 params: request::GotoDeclarationParams,
3173 ) -> tower_lsp::jsonrpc::Result<Option<request::GotoDeclarationResponse>> {
3174 self.client
3175 .log_message(MessageType::INFO, "got textDocument/declaration request")
3176 .await;
3177
3178 let uri = params.text_document_position_params.text_document.uri;
3179 let position = params.text_document_position_params.position;
3180
3181 let file_path = match uri.to_file_path() {
3182 Ok(path) => path,
3183 Err(_) => {
3184 self.client
3185 .log_message(MessageType::ERROR, "invalid file uri")
3186 .await;
3187 return Ok(None);
3188 }
3189 };
3190
3191 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3192 Some(bytes) => bytes,
3193 None => return Ok(None),
3194 };
3195
3196 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3197 let cached_build = match cached_build {
3198 Some(cb) => cb,
3199 None => return Ok(None),
3200 };
3201
3202 if let Some(location) =
3203 goto::goto_declaration_cached(&cached_build, &uri, position, &source_bytes)
3204 {
3205 self.client
3206 .log_message(
3207 MessageType::INFO,
3208 format!(
3209 "found declaration at {}:{}",
3210 location.uri, location.range.start.line
3211 ),
3212 )
3213 .await;
3214 Ok(Some(request::GotoDeclarationResponse::from(location)))
3215 } else {
3216 self.client
3217 .log_message(MessageType::INFO, "no declaration found")
3218 .await;
3219 Ok(None)
3220 }
3221 }
3222
3223 async fn references(
3224 &self,
3225 params: ReferenceParams,
3226 ) -> tower_lsp::jsonrpc::Result<Option<Vec<Location>>> {
3227 self.client
3228 .log_message(MessageType::INFO, "Got a textDocument/references request")
3229 .await;
3230
3231 let uri = params.text_document_position.text_document.uri;
3232 let position = params.text_document_position.position;
3233 let file_path = match uri.to_file_path() {
3234 Ok(path) => path,
3235 Err(_) => {
3236 self.client
3237 .log_message(MessageType::ERROR, "Invalid file URI")
3238 .await;
3239 return Ok(None);
3240 }
3241 };
3242 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3243 Some(bytes) => bytes,
3244 None => return Ok(None),
3245 };
3246 let file_build = self.get_or_fetch_build(&uri, &file_path, true).await;
3247 let file_build = match file_build {
3248 Some(cb) => cb,
3249 None => return Ok(None),
3250 };
3251 let mut project_build = self.ensure_project_cached_build().await;
3252 let current_abs = file_path.to_string_lossy().to_string();
3253 if self.use_solc
3254 && self.settings.read().await.project_index.full_project_scan
3255 && project_build
3256 .as_ref()
3257 .is_some_and(|b| !b.nodes.contains_key(¤t_abs))
3258 {
3259 let foundry_config = self.foundry_config_for_file(&file_path).await;
3260 let remappings = crate::solc::resolve_remappings(&foundry_config).await;
3261 let changed = vec![PathBuf::from(¤t_abs)];
3262 let cfg_for_plan = foundry_config.clone();
3263 let remappings_for_plan = remappings.clone();
3264 let affected_set = tokio::task::spawn_blocking(move || {
3265 compute_reverse_import_closure(&cfg_for_plan, &changed, &remappings_for_plan)
3266 })
3267 .await
3268 .ok()
3269 .unwrap_or_default();
3270 let mut affected_files: Vec<PathBuf> = affected_set.into_iter().collect();
3271 if affected_files.is_empty() {
3272 affected_files.push(PathBuf::from(¤t_abs));
3273 }
3274 let text_cache_snapshot = self.text_cache.read().await.clone();
3275 match crate::solc::solc_project_index_scoped(
3276 &foundry_config,
3277 Some(&self.client),
3278 Some(&text_cache_snapshot),
3279 &affected_files,
3280 )
3281 .await
3282 {
3283 Ok(ast_data) => {
3284 let scoped_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
3285 if let Some(root_key) = self.project_cache_key().await {
3286 let merged = {
3287 let mut cache = self.ast_cache.write().await;
3288 let merged = if let Some(existing) = cache.get(&root_key).cloned() {
3289 let mut merged = (*existing).clone();
3290 match merge_scoped_cached_build(
3291 &mut merged,
3292 (*scoped_build).clone(),
3293 ) {
3294 Ok(_) => Arc::new(merged),
3295 Err(_) => scoped_build.clone(),
3296 }
3297 } else {
3298 scoped_build.clone()
3299 };
3300 cache.insert(root_key, merged.clone());
3301 merged
3302 };
3303 project_build = Some(merged);
3304 } else {
3305 project_build = Some(scoped_build);
3306 }
3307 self.client
3308 .log_message(
3309 MessageType::INFO,
3310 format!(
3311 "references warm-refresh: scoped reindex applied (affected={})",
3312 affected_files.len()
3313 ),
3314 )
3315 .await;
3316 }
3317 Err(e) => {
3318 self.client
3319 .log_message(
3320 MessageType::WARNING,
3321 format!("references warm-refresh: scoped reindex failed: {e}"),
3322 )
3323 .await;
3324 }
3325 }
3326 }
3327
3328 let mut locations = references::goto_references_cached(
3331 &file_build,
3332 &uri,
3333 position,
3334 &source_bytes,
3335 None,
3336 params.context.include_declaration,
3337 );
3338
3339 if let Some((def_abs_path, def_byte_offset)) =
3341 references::resolve_target_location(&file_build, &uri, position, &source_bytes)
3342 {
3343 if let Some(project_build) = project_build {
3344 let other_locations = references::goto_references_for_target(
3345 &project_build,
3346 &def_abs_path,
3347 def_byte_offset,
3348 None,
3349 params.context.include_declaration,
3350 );
3351 locations.extend(other_locations);
3352 }
3353 }
3354
3355 let mut seen = std::collections::HashSet::new();
3357 locations.retain(|loc| {
3358 seen.insert((
3359 loc.uri.clone(),
3360 loc.range.start.line,
3361 loc.range.start.character,
3362 loc.range.end.line,
3363 loc.range.end.character,
3364 ))
3365 });
3366
3367 if locations.is_empty() {
3368 self.client
3369 .log_message(MessageType::INFO, "No references found")
3370 .await;
3371 Ok(None)
3372 } else {
3373 self.client
3374 .log_message(
3375 MessageType::INFO,
3376 format!("Found {} references", locations.len()),
3377 )
3378 .await;
3379 Ok(Some(locations))
3380 }
3381 }
3382
3383 async fn prepare_rename(
3384 &self,
3385 params: TextDocumentPositionParams,
3386 ) -> tower_lsp::jsonrpc::Result<Option<PrepareRenameResponse>> {
3387 self.client
3388 .log_message(MessageType::INFO, "got textDocument/prepareRename request")
3389 .await;
3390
3391 let uri = params.text_document.uri;
3392 let position = params.position;
3393
3394 let file_path = match uri.to_file_path() {
3395 Ok(path) => path,
3396 Err(_) => {
3397 self.client
3398 .log_message(MessageType::ERROR, "invalid file uri")
3399 .await;
3400 return Ok(None);
3401 }
3402 };
3403
3404 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3405 Some(bytes) => bytes,
3406 None => return Ok(None),
3407 };
3408
3409 if let Some(range) = rename::get_identifier_range(&source_bytes, position) {
3410 self.client
3411 .log_message(
3412 MessageType::INFO,
3413 format!(
3414 "prepare rename range: {}:{}",
3415 range.start.line, range.start.character
3416 ),
3417 )
3418 .await;
3419 Ok(Some(PrepareRenameResponse::Range(range)))
3420 } else {
3421 self.client
3422 .log_message(MessageType::INFO, "no identifier found for prepare rename")
3423 .await;
3424 Ok(None)
3425 }
3426 }
3427
3428 async fn rename(
3429 &self,
3430 params: RenameParams,
3431 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
3432 self.client
3433 .log_message(MessageType::INFO, "got textDocument/rename request")
3434 .await;
3435
3436 let uri = params.text_document_position.text_document.uri;
3437 let position = params.text_document_position.position;
3438 let new_name = params.new_name;
3439 let file_path = match uri.to_file_path() {
3440 Ok(p) => p,
3441 Err(_) => {
3442 self.client
3443 .log_message(MessageType::ERROR, "invalid file uri")
3444 .await;
3445 return Ok(None);
3446 }
3447 };
3448 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3449 Some(bytes) => bytes,
3450 None => return Ok(None),
3451 };
3452
3453 let current_identifier = match rename::get_identifier_at_position(&source_bytes, position) {
3454 Some(id) => id,
3455 None => {
3456 self.client
3457 .log_message(MessageType::ERROR, "No identifier found at position")
3458 .await;
3459 return Ok(None);
3460 }
3461 };
3462
3463 if !utils::is_valid_solidity_identifier(&new_name) {
3464 return Err(tower_lsp::jsonrpc::Error::invalid_params(
3465 "new name is not a valid solidity identifier",
3466 ));
3467 }
3468
3469 if new_name == current_identifier {
3470 self.client
3471 .log_message(
3472 MessageType::INFO,
3473 "new name is the same as current identifier",
3474 )
3475 .await;
3476 return Ok(None);
3477 }
3478
3479 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3480 let cached_build = match cached_build {
3481 Some(cb) => cb,
3482 None => return Ok(None),
3483 };
3484 let other_builds: Vec<Arc<goto::CachedBuild>> = {
3485 let cache = self.ast_cache.read().await;
3486 cache
3487 .iter()
3488 .filter(|(key, _)| **key != uri.to_string())
3489 .map(|(_, v)| v.clone())
3490 .collect()
3491 };
3492 let other_refs: Vec<&goto::CachedBuild> = other_builds.iter().map(|v| v.as_ref()).collect();
3493
3494 let text_buffers: HashMap<String, Vec<u8>> = {
3498 let text_cache = self.text_cache.read().await;
3499 text_cache
3500 .iter()
3501 .map(|(uri, (_, content))| (uri.clone(), content.as_bytes().to_vec()))
3502 .collect()
3503 };
3504
3505 match rename::rename_symbol(
3506 &cached_build,
3507 &uri,
3508 position,
3509 &source_bytes,
3510 new_name,
3511 &other_refs,
3512 &text_buffers,
3513 ) {
3514 Some(workspace_edit) => {
3515 self.client
3516 .log_message(
3517 MessageType::INFO,
3518 format!(
3519 "created rename edit with {} file(s), {} total change(s)",
3520 workspace_edit
3521 .changes
3522 .as_ref()
3523 .map(|c| c.len())
3524 .unwrap_or(0),
3525 workspace_edit
3526 .changes
3527 .as_ref()
3528 .map(|c| c.values().map(|v| v.len()).sum::<usize>())
3529 .unwrap_or(0)
3530 ),
3531 )
3532 .await;
3533
3534 Ok(Some(workspace_edit))
3539 }
3540
3541 None => {
3542 self.client
3543 .log_message(MessageType::INFO, "No locations found for renaming")
3544 .await;
3545 Ok(None)
3546 }
3547 }
3548 }
3549
3550 async fn symbol(
3551 &self,
3552 params: WorkspaceSymbolParams,
3553 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SymbolInformation>>> {
3554 self.client
3555 .log_message(MessageType::INFO, "got workspace/symbol request")
3556 .await;
3557
3558 let files: Vec<(Url, String)> = {
3560 let cache = self.text_cache.read().await;
3561 cache
3562 .iter()
3563 .filter(|(uri_str, _)| uri_str.ends_with(".sol"))
3564 .filter_map(|(uri_str, (_, content))| {
3565 Url::parse(uri_str).ok().map(|uri| (uri, content.clone()))
3566 })
3567 .collect()
3568 };
3569
3570 let mut all_symbols = symbols::extract_workspace_symbols(&files);
3571 if !params.query.is_empty() {
3572 let query = params.query.to_lowercase();
3573 all_symbols.retain(|symbol| symbol.name.to_lowercase().contains(&query));
3574 }
3575 if all_symbols.is_empty() {
3576 self.client
3577 .log_message(MessageType::INFO, "No symbols found")
3578 .await;
3579 Ok(None)
3580 } else {
3581 self.client
3582 .log_message(
3583 MessageType::INFO,
3584 format!("found {} symbols", all_symbols.len()),
3585 )
3586 .await;
3587 Ok(Some(all_symbols))
3588 }
3589 }
3590
3591 async fn document_symbol(
3592 &self,
3593 params: DocumentSymbolParams,
3594 ) -> tower_lsp::jsonrpc::Result<Option<DocumentSymbolResponse>> {
3595 self.client
3596 .log_message(MessageType::INFO, "got textDocument/documentSymbol request")
3597 .await;
3598 let uri = params.text_document.uri;
3599 let file_path = match uri.to_file_path() {
3600 Ok(path) => path,
3601 Err(_) => {
3602 self.client
3603 .log_message(MessageType::ERROR, "invalid file uri")
3604 .await;
3605 return Ok(None);
3606 }
3607 };
3608
3609 let source = {
3611 let cache = self.text_cache.read().await;
3612 cache
3613 .get(&uri.to_string())
3614 .map(|(_, content)| content.clone())
3615 };
3616 let source = match source {
3617 Some(s) => s,
3618 None => match std::fs::read_to_string(&file_path) {
3619 Ok(s) => s,
3620 Err(_) => return Ok(None),
3621 },
3622 };
3623
3624 let symbols = symbols::extract_document_symbols(&source);
3625 if symbols.is_empty() {
3626 self.client
3627 .log_message(MessageType::INFO, "no document symbols found")
3628 .await;
3629 Ok(None)
3630 } else {
3631 self.client
3632 .log_message(
3633 MessageType::INFO,
3634 format!("found {} document symbols", symbols.len()),
3635 )
3636 .await;
3637 Ok(Some(DocumentSymbolResponse::Nested(symbols)))
3638 }
3639 }
3640
3641 async fn document_highlight(
3642 &self,
3643 params: DocumentHighlightParams,
3644 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentHighlight>>> {
3645 self.client
3646 .log_message(
3647 MessageType::INFO,
3648 "got textDocument/documentHighlight request",
3649 )
3650 .await;
3651
3652 let uri = params.text_document_position_params.text_document.uri;
3653 let position = params.text_document_position_params.position;
3654
3655 let source = {
3656 let cache = self.text_cache.read().await;
3657 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3658 };
3659
3660 let source = match source {
3661 Some(s) => s,
3662 None => {
3663 let file_path = match uri.to_file_path() {
3664 Ok(p) => p,
3665 Err(_) => return Ok(None),
3666 };
3667 match std::fs::read_to_string(&file_path) {
3668 Ok(s) => s,
3669 Err(_) => return Ok(None),
3670 }
3671 }
3672 };
3673
3674 let highlights = highlight::document_highlights(&source, position);
3675
3676 if highlights.is_empty() {
3677 self.client
3678 .log_message(MessageType::INFO, "no document highlights found")
3679 .await;
3680 Ok(None)
3681 } else {
3682 self.client
3683 .log_message(
3684 MessageType::INFO,
3685 format!("found {} document highlights", highlights.len()),
3686 )
3687 .await;
3688 Ok(Some(highlights))
3689 }
3690 }
3691
3692 async fn hover(&self, params: HoverParams) -> tower_lsp::jsonrpc::Result<Option<Hover>> {
3693 self.client
3694 .log_message(MessageType::INFO, "got textDocument/hover request")
3695 .await;
3696
3697 let uri = params.text_document_position_params.text_document.uri;
3698 let position = params.text_document_position_params.position;
3699
3700 let file_path = match uri.to_file_path() {
3701 Ok(path) => path,
3702 Err(_) => {
3703 self.client
3704 .log_message(MessageType::ERROR, "invalid file uri")
3705 .await;
3706 return Ok(None);
3707 }
3708 };
3709
3710 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3711 Some(bytes) => bytes,
3712 None => return Ok(None),
3713 };
3714
3715 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3716 let cached_build = match cached_build {
3717 Some(cb) => cb,
3718 None => return Ok(None),
3719 };
3720
3721 let result = hover::hover_info(&cached_build, &uri, position, &source_bytes);
3722
3723 if result.is_some() {
3724 self.client
3725 .log_message(MessageType::INFO, "hover info found")
3726 .await;
3727 } else {
3728 self.client
3729 .log_message(MessageType::INFO, "no hover info found")
3730 .await;
3731 }
3732
3733 Ok(result)
3734 }
3735
3736 async fn signature_help(
3737 &self,
3738 params: SignatureHelpParams,
3739 ) -> tower_lsp::jsonrpc::Result<Option<SignatureHelp>> {
3740 self.client
3741 .log_message(MessageType::INFO, "got textDocument/signatureHelp request")
3742 .await;
3743
3744 let uri = params.text_document_position_params.text_document.uri;
3745 let position = params.text_document_position_params.position;
3746
3747 let file_path = match uri.to_file_path() {
3748 Ok(path) => path,
3749 Err(_) => {
3750 self.client
3751 .log_message(MessageType::ERROR, "invalid file uri")
3752 .await;
3753 return Ok(None);
3754 }
3755 };
3756
3757 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3758 Some(bytes) => bytes,
3759 None => return Ok(None),
3760 };
3761
3762 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3763 let cached_build = match cached_build {
3764 Some(cb) => cb,
3765 None => return Ok(None),
3766 };
3767
3768 let result = hover::signature_help(&cached_build, &source_bytes, position);
3769
3770 Ok(result)
3771 }
3772
3773 async fn document_link(
3774 &self,
3775 params: DocumentLinkParams,
3776 ) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentLink>>> {
3777 self.client
3778 .log_message(MessageType::INFO, "got textDocument/documentLink request")
3779 .await;
3780
3781 let uri = params.text_document.uri;
3782 let file_path = match uri.to_file_path() {
3783 Ok(path) => path,
3784 Err(_) => {
3785 self.client
3786 .log_message(MessageType::ERROR, "invalid file uri")
3787 .await;
3788 return Ok(None);
3789 }
3790 };
3791
3792 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
3793 Some(bytes) => bytes,
3794 None => return Ok(None),
3795 };
3796
3797 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
3798 let cached_build = match cached_build {
3799 Some(cb) => cb,
3800 None => return Ok(None),
3801 };
3802
3803 let result = links::document_links(&cached_build, &uri, &source_bytes);
3804
3805 if result.is_empty() {
3806 self.client
3807 .log_message(MessageType::INFO, "no document links found")
3808 .await;
3809 Ok(None)
3810 } else {
3811 self.client
3812 .log_message(
3813 MessageType::INFO,
3814 format!("found {} document links", result.len()),
3815 )
3816 .await;
3817 Ok(Some(result))
3818 }
3819 }
3820
3821 async fn semantic_tokens_full(
3822 &self,
3823 params: SemanticTokensParams,
3824 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensResult>> {
3825 self.client
3826 .log_message(
3827 MessageType::INFO,
3828 "got textDocument/semanticTokens/full request",
3829 )
3830 .await;
3831
3832 let uri = params.text_document.uri;
3833 let source = {
3834 let cache = self.text_cache.read().await;
3835 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3836 };
3837
3838 let source = match source {
3839 Some(s) => s,
3840 None => {
3841 let file_path = match uri.to_file_path() {
3843 Ok(p) => p,
3844 Err(_) => return Ok(None),
3845 };
3846 match std::fs::read_to_string(&file_path) {
3847 Ok(s) => s,
3848 Err(_) => return Ok(None),
3849 }
3850 }
3851 };
3852
3853 let mut tokens = semantic_tokens::semantic_tokens_full(&source);
3854
3855 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
3857 let result_id = id.to_string();
3858 tokens.result_id = Some(result_id.clone());
3859
3860 {
3861 let mut cache = self.semantic_token_cache.write().await;
3862 cache.insert(uri.to_string(), (result_id, tokens.data.clone()));
3863 }
3864
3865 Ok(Some(SemanticTokensResult::Tokens(tokens)))
3866 }
3867
3868 async fn semantic_tokens_range(
3869 &self,
3870 params: SemanticTokensRangeParams,
3871 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensRangeResult>> {
3872 self.client
3873 .log_message(
3874 MessageType::INFO,
3875 "got textDocument/semanticTokens/range request",
3876 )
3877 .await;
3878
3879 let uri = params.text_document.uri;
3880 let range = params.range;
3881 let source = {
3882 let cache = self.text_cache.read().await;
3883 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3884 };
3885
3886 let source = match source {
3887 Some(s) => s,
3888 None => {
3889 let file_path = match uri.to_file_path() {
3890 Ok(p) => p,
3891 Err(_) => return Ok(None),
3892 };
3893 match std::fs::read_to_string(&file_path) {
3894 Ok(s) => s,
3895 Err(_) => return Ok(None),
3896 }
3897 }
3898 };
3899
3900 let tokens =
3901 semantic_tokens::semantic_tokens_range(&source, range.start.line, range.end.line);
3902
3903 Ok(Some(SemanticTokensRangeResult::Tokens(tokens)))
3904 }
3905
3906 async fn semantic_tokens_full_delta(
3907 &self,
3908 params: SemanticTokensDeltaParams,
3909 ) -> tower_lsp::jsonrpc::Result<Option<SemanticTokensFullDeltaResult>> {
3910 self.client
3911 .log_message(
3912 MessageType::INFO,
3913 "got textDocument/semanticTokens/full/delta request",
3914 )
3915 .await;
3916
3917 let uri = params.text_document.uri;
3918 let previous_result_id = params.previous_result_id;
3919
3920 let source = {
3921 let cache = self.text_cache.read().await;
3922 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3923 };
3924
3925 let source = match source {
3926 Some(s) => s,
3927 None => {
3928 let file_path = match uri.to_file_path() {
3929 Ok(p) => p,
3930 Err(_) => return Ok(None),
3931 };
3932 match std::fs::read_to_string(&file_path) {
3933 Ok(s) => s,
3934 Err(_) => return Ok(None),
3935 }
3936 }
3937 };
3938
3939 let mut new_tokens = semantic_tokens::semantic_tokens_full(&source);
3940
3941 let id = self.semantic_token_id.fetch_add(1, Ordering::Relaxed);
3943 let new_result_id = id.to_string();
3944 new_tokens.result_id = Some(new_result_id.clone());
3945
3946 let uri_str = uri.to_string();
3947
3948 let old_tokens = {
3950 let cache = self.semantic_token_cache.read().await;
3951 cache
3952 .get(&uri_str)
3953 .filter(|(rid, _)| *rid == previous_result_id)
3954 .map(|(_, tokens)| tokens.clone())
3955 };
3956
3957 {
3959 let mut cache = self.semantic_token_cache.write().await;
3960 cache.insert(uri_str, (new_result_id.clone(), new_tokens.data.clone()));
3961 }
3962
3963 match old_tokens {
3964 Some(old) => {
3965 let edits = semantic_tokens::compute_delta(&old, &new_tokens.data);
3967 Ok(Some(SemanticTokensFullDeltaResult::TokensDelta(
3968 SemanticTokensDelta {
3969 result_id: Some(new_result_id),
3970 edits,
3971 },
3972 )))
3973 }
3974 None => {
3975 Ok(Some(SemanticTokensFullDeltaResult::Tokens(new_tokens)))
3977 }
3978 }
3979 }
3980
3981 async fn folding_range(
3982 &self,
3983 params: FoldingRangeParams,
3984 ) -> tower_lsp::jsonrpc::Result<Option<Vec<FoldingRange>>> {
3985 self.client
3986 .log_message(MessageType::INFO, "got textDocument/foldingRange request")
3987 .await;
3988
3989 let uri = params.text_document.uri;
3990
3991 let source = {
3992 let cache = self.text_cache.read().await;
3993 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
3994 };
3995
3996 let source = match source {
3997 Some(s) => s,
3998 None => {
3999 let file_path = match uri.to_file_path() {
4000 Ok(p) => p,
4001 Err(_) => return Ok(None),
4002 };
4003 match std::fs::read_to_string(&file_path) {
4004 Ok(s) => s,
4005 Err(_) => return Ok(None),
4006 }
4007 }
4008 };
4009
4010 let ranges = folding::folding_ranges(&source);
4011
4012 if ranges.is_empty() {
4013 self.client
4014 .log_message(MessageType::INFO, "no folding ranges found")
4015 .await;
4016 Ok(None)
4017 } else {
4018 self.client
4019 .log_message(
4020 MessageType::INFO,
4021 format!("found {} folding ranges", ranges.len()),
4022 )
4023 .await;
4024 Ok(Some(ranges))
4025 }
4026 }
4027
4028 async fn selection_range(
4029 &self,
4030 params: SelectionRangeParams,
4031 ) -> tower_lsp::jsonrpc::Result<Option<Vec<SelectionRange>>> {
4032 self.client
4033 .log_message(MessageType::INFO, "got textDocument/selectionRange request")
4034 .await;
4035
4036 let uri = params.text_document.uri;
4037
4038 let source = {
4039 let cache = self.text_cache.read().await;
4040 cache.get(&uri.to_string()).map(|(_, s)| s.clone())
4041 };
4042
4043 let source = match source {
4044 Some(s) => s,
4045 None => {
4046 let file_path = match uri.to_file_path() {
4047 Ok(p) => p,
4048 Err(_) => return Ok(None),
4049 };
4050 match std::fs::read_to_string(&file_path) {
4051 Ok(s) => s,
4052 Err(_) => return Ok(None),
4053 }
4054 }
4055 };
4056
4057 let ranges = selection::selection_ranges(&source, ¶ms.positions);
4058
4059 if ranges.is_empty() {
4060 self.client
4061 .log_message(MessageType::INFO, "no selection ranges found")
4062 .await;
4063 Ok(None)
4064 } else {
4065 self.client
4066 .log_message(
4067 MessageType::INFO,
4068 format!("found {} selection ranges", ranges.len()),
4069 )
4070 .await;
4071 Ok(Some(ranges))
4072 }
4073 }
4074
4075 async fn inlay_hint(
4076 &self,
4077 params: InlayHintParams,
4078 ) -> tower_lsp::jsonrpc::Result<Option<Vec<InlayHint>>> {
4079 self.client
4080 .log_message(MessageType::INFO, "got textDocument/inlayHint request")
4081 .await;
4082
4083 let uri = params.text_document.uri;
4084 let range = params.range;
4085
4086 let file_path = match uri.to_file_path() {
4087 Ok(path) => path,
4088 Err(_) => {
4089 self.client
4090 .log_message(MessageType::ERROR, "invalid file uri")
4091 .await;
4092 return Ok(None);
4093 }
4094 };
4095
4096 let source_bytes = match self.get_source_bytes(&uri, &file_path).await {
4097 Some(bytes) => bytes,
4098 None => return Ok(None),
4099 };
4100
4101 let cached_build = self.get_or_fetch_build(&uri, &file_path, false).await;
4102 let cached_build = match cached_build {
4103 Some(cb) => cb,
4104 None => return Ok(None),
4105 };
4106
4107 let mut hints = inlay_hints::inlay_hints(&cached_build, &uri, range, &source_bytes);
4108
4109 let settings = self.settings.read().await;
4111 if !settings.inlay_hints.parameters {
4112 hints.retain(|h| h.kind != Some(InlayHintKind::PARAMETER));
4113 }
4114 if !settings.inlay_hints.gas_estimates {
4115 hints.retain(|h| h.kind != Some(InlayHintKind::TYPE));
4116 }
4117
4118 if hints.is_empty() {
4119 self.client
4120 .log_message(MessageType::INFO, "no inlay hints found")
4121 .await;
4122 Ok(None)
4123 } else {
4124 self.client
4125 .log_message(
4126 MessageType::INFO,
4127 format!("found {} inlay hints", hints.len()),
4128 )
4129 .await;
4130 Ok(Some(hints))
4131 }
4132 }
4133
4134 async fn will_rename_files(
4135 &self,
4136 params: RenameFilesParams,
4137 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
4138 self.client
4139 .log_message(
4140 MessageType::INFO,
4141 format!("workspace/willRenameFiles: {} file(s)", params.files.len()),
4142 )
4143 .await;
4144 if !self
4145 .settings
4146 .read()
4147 .await
4148 .file_operations
4149 .update_imports_on_rename
4150 {
4151 self.client
4152 .log_message(
4153 MessageType::INFO,
4154 "willRenameFiles: updateImportsOnRename disabled",
4155 )
4156 .await;
4157 return Ok(None);
4158 }
4159
4160 let config = self.foundry_config.read().await.clone();
4162 let project_root = config.root.clone();
4163 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
4164 crate::solc::discover_source_files(&config)
4165 .into_iter()
4166 .filter_map(|p| p.to_str().map(String::from))
4167 .collect()
4168 })
4169 .await
4170 .unwrap_or_default();
4171
4172 if source_files.is_empty() {
4173 self.client
4174 .log_message(
4175 MessageType::WARNING,
4176 "willRenameFiles: no source files found",
4177 )
4178 .await;
4179 return Ok(None);
4180 }
4181
4182 let raw_renames: Vec<(std::path::PathBuf, std::path::PathBuf)> = params
4184 .files
4185 .iter()
4186 .filter_map(|fr| {
4187 let old_uri = Url::parse(&fr.old_uri).ok()?;
4188 let new_uri = Url::parse(&fr.new_uri).ok()?;
4189 let old_path = old_uri.to_file_path().ok()?;
4190 let new_path = new_uri.to_file_path().ok()?;
4191 Some((old_path, new_path))
4192 })
4193 .collect();
4194
4195 let renames = file_operations::expand_folder_renames(&raw_renames, &source_files);
4196
4197 if renames.is_empty() {
4198 return Ok(None);
4199 }
4200
4201 self.client
4202 .log_message(
4203 MessageType::INFO,
4204 format!(
4205 "willRenameFiles: {} rename(s) after folder expansion",
4206 renames.len()
4207 ),
4208 )
4209 .await;
4210
4211 let files_to_read: Vec<(String, String)> = {
4214 let tc = self.text_cache.read().await;
4215 source_files
4216 .iter()
4217 .filter_map(|fs_path| {
4218 let uri = Url::from_file_path(fs_path).ok()?;
4219 let uri_str = uri.to_string();
4220 if tc.contains_key(&uri_str) {
4221 None
4222 } else {
4223 Some((uri_str, fs_path.clone()))
4224 }
4225 })
4226 .collect()
4227 };
4228
4229 if !files_to_read.is_empty() {
4230 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
4231 files_to_read
4232 .into_iter()
4233 .filter_map(|(uri_str, fs_path)| {
4234 let content = std::fs::read_to_string(&fs_path).ok()?;
4235 Some((uri_str, content))
4236 })
4237 .collect()
4238 })
4239 .await
4240 .unwrap_or_default();
4241
4242 let mut tc = self.text_cache.write().await;
4243 for (uri_str, content) in loaded {
4244 tc.entry(uri_str).or_insert((0, content));
4245 }
4246 }
4247
4248 let text_cache = self.text_cache.clone();
4253 let result = {
4254 let tc = text_cache.read().await;
4255 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
4256 let uri = Url::from_file_path(fs_path).ok()?;
4257 let (_, content) = tc.get(&uri.to_string())?;
4258 Some(content.as_bytes().to_vec())
4259 };
4260
4261 file_operations::rename_imports(
4262 &source_files,
4263 &renames,
4264 &project_root,
4265 &get_source_bytes,
4266 )
4267 };
4268
4269 let stats = &result.stats;
4271 if stats.read_failures > 0 || stats.pathdiff_failures > 0 || stats.duplicate_renames > 0 {
4272 self.client
4273 .log_message(
4274 MessageType::WARNING,
4275 format!(
4276 "willRenameFiles stats: read_failures={}, pathdiff_failures={}, \
4277 duplicate_renames={}, no_parent={}, no_op_skips={}, dedup_skips={}",
4278 stats.read_failures,
4279 stats.pathdiff_failures,
4280 stats.duplicate_renames,
4281 stats.no_parent,
4282 stats.no_op_skips,
4283 stats.dedup_skips,
4284 ),
4285 )
4286 .await;
4287 }
4288
4289 let all_edits = result.edits;
4290
4291 if all_edits.is_empty() {
4292 self.client
4293 .log_message(MessageType::INFO, "willRenameFiles: no import edits needed")
4294 .await;
4295 return Ok(None);
4296 }
4297
4298 {
4300 let mut tc = self.text_cache.write().await;
4301 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
4302 self.client
4303 .log_message(
4304 MessageType::INFO,
4305 format!("willRenameFiles: patched {} cached file(s)", patched),
4306 )
4307 .await;
4308 }
4309
4310 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
4311 self.client
4312 .log_message(
4313 MessageType::INFO,
4314 format!(
4315 "willRenameFiles: {} edit(s) across {} file(s)",
4316 total_edits,
4317 all_edits.len()
4318 ),
4319 )
4320 .await;
4321
4322 Ok(Some(WorkspaceEdit {
4323 changes: Some(all_edits),
4324 document_changes: None,
4325 change_annotations: None,
4326 }))
4327 }
4328
4329 async fn did_rename_files(&self, params: RenameFilesParams) {
4330 self.client
4331 .log_message(
4332 MessageType::INFO,
4333 format!("workspace/didRenameFiles: {} file(s)", params.files.len()),
4334 )
4335 .await;
4336 self.project_cache_dirty.store(true, Ordering::Release);
4337 {
4338 let mut changed = self.project_cache_changed_files.write().await;
4339 for file in ¶ms.files {
4340 if let Ok(old_uri) = Url::parse(&file.old_uri)
4341 && let Ok(old_path) = old_uri.to_file_path()
4342 {
4343 changed.insert(old_path.to_string_lossy().to_string());
4344 }
4345 if let Ok(new_uri) = Url::parse(&file.new_uri)
4346 && let Ok(new_path) = new_uri.to_file_path()
4347 {
4348 changed.insert(new_path.to_string_lossy().to_string());
4349 }
4350 }
4351 }
4352
4353 let raw_uri_pairs: Vec<(Url, Url)> = params
4355 .files
4356 .iter()
4357 .filter_map(|fr| {
4358 let old_uri = Url::parse(&fr.old_uri).ok()?;
4359 let new_uri = Url::parse(&fr.new_uri).ok()?;
4360 Some((old_uri, new_uri))
4361 })
4362 .collect();
4363
4364 let file_renames = {
4365 let tc = self.text_cache.read().await;
4366 let cache_paths: Vec<std::path::PathBuf> = tc
4367 .keys()
4368 .filter_map(|k| Url::parse(k).ok())
4369 .filter_map(|u| u.to_file_path().ok())
4370 .collect();
4371 drop(tc);
4372
4373 let cfg = self.foundry_config.read().await.clone();
4376 let discovered_paths =
4377 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
4378 .await
4379 .unwrap_or_default();
4380
4381 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
4382 all_paths.extend(cache_paths);
4383 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
4384
4385 file_operations::expand_folder_renames_from_paths(&raw_uri_pairs, &all_paths)
4386 };
4387
4388 self.client
4389 .log_message(
4390 MessageType::INFO,
4391 format!(
4392 "didRenameFiles: migrating {} cache entry/entries",
4393 file_renames.len()
4394 ),
4395 )
4396 .await;
4397
4398 {
4402 let mut tc = self.text_cache.write().await;
4403 for (old_key, new_key) in &file_renames {
4404 if let Some(entry) = tc.remove(old_key) {
4405 tc.insert(new_key.clone(), entry);
4406 }
4407 }
4408 }
4409 {
4410 let mut ac = self.ast_cache.write().await;
4411 for (old_key, _) in &file_renames {
4412 ac.remove(old_key);
4413 }
4414 }
4415 {
4416 let mut cc = self.completion_cache.write().await;
4417 for (old_key, _) in &file_renames {
4418 cc.remove(old_key);
4419 }
4420 }
4421
4422 let root_key = self.project_cache_key().await;
4425 if let Some(ref key) = root_key {
4426 self.ast_cache.write().await.remove(key);
4427 }
4428
4429 let foundry_config = self.foundry_config.read().await.clone();
4430 let ast_cache = self.ast_cache.clone();
4431 let client = self.client.clone();
4432 let text_cache_snapshot = self.text_cache.read().await.clone();
4436
4437 tokio::spawn(async move {
4438 let Some(cache_key) = root_key else {
4439 return;
4440 };
4441 match crate::solc::solc_project_index(
4442 &foundry_config,
4443 Some(&client),
4444 Some(&text_cache_snapshot),
4445 )
4446 .await
4447 {
4448 Ok(ast_data) => {
4449 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
4450 let source_count = cached_build.nodes.len();
4451 ast_cache.write().await.insert(cache_key, cached_build);
4452 client
4453 .log_message(
4454 MessageType::INFO,
4455 format!("didRenameFiles: re-indexed {} source files", source_count),
4456 )
4457 .await;
4458 }
4459 Err(e) => {
4460 client
4461 .log_message(
4462 MessageType::WARNING,
4463 format!("didRenameFiles: re-index failed: {e}"),
4464 )
4465 .await;
4466 }
4467 }
4468 });
4469 }
4470
4471 async fn will_delete_files(
4472 &self,
4473 params: DeleteFilesParams,
4474 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
4475 self.client
4476 .log_message(
4477 MessageType::INFO,
4478 format!("workspace/willDeleteFiles: {} file(s)", params.files.len()),
4479 )
4480 .await;
4481 if !update_imports_on_delete_enabled(&*self.settings.read().await) {
4482 self.client
4483 .log_message(
4484 MessageType::INFO,
4485 "willDeleteFiles: updateImportsOnDelete disabled",
4486 )
4487 .await;
4488 return Ok(None);
4489 }
4490
4491 let config = self.foundry_config.read().await.clone();
4492 let project_root = config.root.clone();
4493 let source_files: Vec<String> = tokio::task::spawn_blocking(move || {
4494 crate::solc::discover_source_files(&config)
4495 .into_iter()
4496 .filter_map(|p| p.to_str().map(String::from))
4497 .collect()
4498 })
4499 .await
4500 .unwrap_or_default();
4501
4502 if source_files.is_empty() {
4503 self.client
4504 .log_message(
4505 MessageType::WARNING,
4506 "willDeleteFiles: no source files found",
4507 )
4508 .await;
4509 return Ok(None);
4510 }
4511
4512 let raw_deletes: Vec<std::path::PathBuf> = params
4513 .files
4514 .iter()
4515 .filter_map(|fd| Url::parse(&fd.uri).ok())
4516 .filter_map(|u| u.to_file_path().ok())
4517 .collect();
4518
4519 let deletes = file_operations::expand_folder_deletes(&raw_deletes, &source_files);
4520 if deletes.is_empty() {
4521 return Ok(None);
4522 }
4523
4524 self.client
4525 .log_message(
4526 MessageType::INFO,
4527 format!(
4528 "willDeleteFiles: {} delete target(s) after folder expansion",
4529 deletes.len()
4530 ),
4531 )
4532 .await;
4533
4534 let files_to_read: Vec<(String, String)> = {
4535 let tc = self.text_cache.read().await;
4536 source_files
4537 .iter()
4538 .filter_map(|fs_path| {
4539 let uri = Url::from_file_path(fs_path).ok()?;
4540 let uri_str = uri.to_string();
4541 if tc.contains_key(&uri_str) {
4542 None
4543 } else {
4544 Some((uri_str, fs_path.clone()))
4545 }
4546 })
4547 .collect()
4548 };
4549
4550 if !files_to_read.is_empty() {
4551 let loaded: Vec<(String, String)> = tokio::task::spawn_blocking(move || {
4552 files_to_read
4553 .into_iter()
4554 .filter_map(|(uri_str, fs_path)| {
4555 let content = std::fs::read_to_string(&fs_path).ok()?;
4556 Some((uri_str, content))
4557 })
4558 .collect()
4559 })
4560 .await
4561 .unwrap_or_default();
4562
4563 let mut tc = self.text_cache.write().await;
4564 for (uri_str, content) in loaded {
4565 tc.entry(uri_str).or_insert((0, content));
4566 }
4567 }
4568
4569 let result = {
4570 let tc = self.text_cache.read().await;
4571 let get_source_bytes = |fs_path: &str| -> Option<Vec<u8>> {
4572 let uri = Url::from_file_path(fs_path).ok()?;
4573 let (_, content) = tc.get(&uri.to_string())?;
4574 Some(content.as_bytes().to_vec())
4575 };
4576
4577 file_operations::delete_imports(
4578 &source_files,
4579 &deletes,
4580 &project_root,
4581 &get_source_bytes,
4582 )
4583 };
4584
4585 let stats = &result.stats;
4586 if stats.read_failures > 0
4587 || stats.statement_range_failures > 0
4588 || stats.duplicate_deletes > 0
4589 {
4590 self.client
4591 .log_message(
4592 MessageType::WARNING,
4593 format!(
4594 "willDeleteFiles stats: read_failures={}, statement_range_failures={}, \
4595 duplicate_deletes={}, no_parent={}, dedup_skips={}",
4596 stats.read_failures,
4597 stats.statement_range_failures,
4598 stats.duplicate_deletes,
4599 stats.no_parent,
4600 stats.dedup_skips,
4601 ),
4602 )
4603 .await;
4604 }
4605
4606 let all_edits = result.edits;
4607 if all_edits.is_empty() {
4608 self.client
4609 .log_message(
4610 MessageType::INFO,
4611 "willDeleteFiles: no import-removal edits needed",
4612 )
4613 .await;
4614 return Ok(None);
4615 }
4616
4617 {
4618 let mut tc = self.text_cache.write().await;
4619 let patched = file_operations::apply_edits_to_cache(&all_edits, &mut tc);
4620 self.client
4621 .log_message(
4622 MessageType::INFO,
4623 format!("willDeleteFiles: patched {} cached file(s)", patched),
4624 )
4625 .await;
4626 }
4627
4628 let total_edits: usize = all_edits.values().map(|v| v.len()).sum();
4629 self.client
4630 .log_message(
4631 MessageType::INFO,
4632 format!(
4633 "willDeleteFiles: {} edit(s) across {} file(s)",
4634 total_edits,
4635 all_edits.len()
4636 ),
4637 )
4638 .await;
4639
4640 Ok(Some(WorkspaceEdit {
4641 changes: Some(all_edits),
4642 document_changes: None,
4643 change_annotations: None,
4644 }))
4645 }
4646
4647 async fn did_delete_files(&self, params: DeleteFilesParams) {
4648 self.client
4649 .log_message(
4650 MessageType::INFO,
4651 format!("workspace/didDeleteFiles: {} file(s)", params.files.len()),
4652 )
4653 .await;
4654 self.project_cache_dirty.store(true, Ordering::Release);
4655 {
4656 let mut changed = self.project_cache_changed_files.write().await;
4657 for file in ¶ms.files {
4658 if let Ok(uri) = Url::parse(&file.uri)
4659 && let Ok(path) = uri.to_file_path()
4660 {
4661 changed.insert(path.to_string_lossy().to_string());
4662 }
4663 }
4664 }
4665
4666 let raw_delete_uris: Vec<Url> = params
4667 .files
4668 .iter()
4669 .filter_map(|fd| Url::parse(&fd.uri).ok())
4670 .collect();
4671
4672 let deleted_paths = {
4673 let tc = self.text_cache.read().await;
4674 let cache_paths: Vec<std::path::PathBuf> = tc
4675 .keys()
4676 .filter_map(|k| Url::parse(k).ok())
4677 .filter_map(|u| u.to_file_path().ok())
4678 .collect();
4679 drop(tc);
4680
4681 let cfg = self.foundry_config.read().await.clone();
4682 let discovered_paths =
4683 tokio::task::spawn_blocking(move || crate::solc::discover_source_files(&cfg))
4684 .await
4685 .unwrap_or_default();
4686
4687 let mut all_paths: HashSet<std::path::PathBuf> = discovered_paths.into_iter().collect();
4688 all_paths.extend(cache_paths);
4689 let all_paths: Vec<std::path::PathBuf> = all_paths.into_iter().collect();
4690
4691 file_operations::expand_folder_deletes_from_paths(&raw_delete_uris, &all_paths)
4692 };
4693
4694 let mut deleted_keys: HashSet<String> = HashSet::new();
4695 let mut deleted_uris: Vec<Url> = Vec::new();
4696 for path in deleted_paths {
4697 if let Ok(uri) = Url::from_file_path(&path) {
4698 deleted_keys.insert(uri.to_string());
4699 deleted_uris.push(uri);
4700 }
4701 }
4702 if deleted_keys.is_empty() {
4703 return;
4704 }
4705
4706 self.client
4707 .log_message(
4708 MessageType::INFO,
4709 format!(
4710 "didDeleteFiles: deleting {} cache/diagnostic entry(ies)",
4711 deleted_keys.len()
4712 ),
4713 )
4714 .await;
4715
4716 for uri in &deleted_uris {
4717 self.client
4718 .publish_diagnostics(uri.clone(), vec![], None)
4719 .await;
4720 }
4721
4722 let mut removed_text = 0usize;
4723 let mut removed_ast = 0usize;
4724 let mut removed_completion = 0usize;
4725 let mut removed_semantic = 0usize;
4726 let mut removed_pending_create = 0usize;
4727 {
4728 let mut tc = self.text_cache.write().await;
4729 for key in &deleted_keys {
4730 if tc.remove(key).is_some() {
4731 removed_text += 1;
4732 }
4733 }
4734 }
4735 {
4736 let mut ac = self.ast_cache.write().await;
4737 for key in &deleted_keys {
4738 if ac.remove(key).is_some() {
4739 removed_ast += 1;
4740 }
4741 }
4742 }
4743 {
4744 let mut cc = self.completion_cache.write().await;
4745 for key in &deleted_keys {
4746 if cc.remove(key).is_some() {
4747 removed_completion += 1;
4748 }
4749 }
4750 }
4751 {
4752 let mut sc = self.semantic_token_cache.write().await;
4753 for key in &deleted_keys {
4754 if sc.remove(key).is_some() {
4755 removed_semantic += 1;
4756 }
4757 }
4758 }
4759 {
4760 let mut pending = self.pending_create_scaffold.write().await;
4761 for key in &deleted_keys {
4762 if pending.remove(key) {
4763 removed_pending_create += 1;
4764 }
4765 }
4766 }
4767 self.client
4768 .log_message(
4769 MessageType::INFO,
4770 format!(
4771 "didDeleteFiles: removed caches text={} ast={} completion={} semantic={} pendingCreate={}",
4772 removed_text,
4773 removed_ast,
4774 removed_completion,
4775 removed_semantic,
4776 removed_pending_create,
4777 ),
4778 )
4779 .await;
4780
4781 let root_key = self.project_cache_key().await;
4782 if let Some(ref key) = root_key {
4783 self.ast_cache.write().await.remove(key);
4784 }
4785
4786 let foundry_config = self.foundry_config.read().await.clone();
4787 let ast_cache = self.ast_cache.clone();
4788 let client = self.client.clone();
4789 let text_cache_snapshot = self.text_cache.read().await.clone();
4790
4791 tokio::spawn(async move {
4792 let Some(cache_key) = root_key else {
4793 return;
4794 };
4795 match crate::solc::solc_project_index(
4796 &foundry_config,
4797 Some(&client),
4798 Some(&text_cache_snapshot),
4799 )
4800 .await
4801 {
4802 Ok(ast_data) => {
4803 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
4804 let source_count = cached_build.nodes.len();
4805 ast_cache.write().await.insert(cache_key, cached_build);
4806 client
4807 .log_message(
4808 MessageType::INFO,
4809 format!("didDeleteFiles: re-indexed {} source files", source_count),
4810 )
4811 .await;
4812 }
4813 Err(e) => {
4814 client
4815 .log_message(
4816 MessageType::WARNING,
4817 format!("didDeleteFiles: re-index failed: {e}"),
4818 )
4819 .await;
4820 }
4821 }
4822 });
4823 }
4824
4825 async fn will_create_files(
4826 &self,
4827 params: CreateFilesParams,
4828 ) -> tower_lsp::jsonrpc::Result<Option<WorkspaceEdit>> {
4829 self.client
4830 .log_message(
4831 MessageType::INFO,
4832 format!("workspace/willCreateFiles: {} file(s)", params.files.len()),
4833 )
4834 .await;
4835 if !self
4836 .settings
4837 .read()
4838 .await
4839 .file_operations
4840 .template_on_create
4841 {
4842 self.client
4843 .log_message(
4844 MessageType::INFO,
4845 "willCreateFiles: templateOnCreate disabled",
4846 )
4847 .await;
4848 return Ok(None);
4849 }
4850 self.client
4851 .log_message(
4852 MessageType::INFO,
4853 "willCreateFiles: skipping pre-create edits; scaffolding via didCreateFiles",
4854 )
4855 .await;
4856 Ok(None)
4857 }
4858
4859 async fn did_create_files(&self, params: CreateFilesParams) {
4860 self.client
4861 .log_message(
4862 MessageType::INFO,
4863 format!("workspace/didCreateFiles: {} file(s)", params.files.len()),
4864 )
4865 .await;
4866 self.project_cache_dirty.store(true, Ordering::Release);
4867 {
4868 let mut changed = self.project_cache_changed_files.write().await;
4869 for file in ¶ms.files {
4870 if let Ok(uri) = Url::parse(&file.uri)
4871 && let Ok(path) = uri.to_file_path()
4872 {
4873 changed.insert(path.to_string_lossy().to_string());
4874 }
4875 }
4876 }
4877 if !self
4878 .settings
4879 .read()
4880 .await
4881 .file_operations
4882 .template_on_create
4883 {
4884 self.client
4885 .log_message(
4886 MessageType::INFO,
4887 "didCreateFiles: templateOnCreate disabled",
4888 )
4889 .await;
4890 return;
4891 }
4892
4893 let config = self.foundry_config.read().await;
4894 let solc_version = config.solc_version.clone();
4895 drop(config);
4896
4897 let mut apply_edits: HashMap<Url, Vec<TextEdit>> = HashMap::new();
4902 let mut staged_content: HashMap<String, String> = HashMap::new();
4903 let mut created_uris: Vec<String> = Vec::new();
4904 {
4905 let tc = self.text_cache.read().await;
4906 for file_create in ¶ms.files {
4907 let uri = match Url::parse(&file_create.uri) {
4908 Ok(u) => u,
4909 Err(_) => continue,
4910 };
4911 let uri_str = uri.to_string();
4912
4913 let open_has_content = tc
4914 .get(&uri_str)
4915 .map_or(false, |(_, c)| c.chars().any(|ch| !ch.is_whitespace()));
4916 let path = match uri.to_file_path() {
4917 Ok(p) => p,
4918 Err(_) => continue,
4919 };
4920 let disk_has_content = std::fs::read_to_string(&path)
4921 .map_or(false, |c| c.chars().any(|ch| !ch.is_whitespace()));
4922
4923 if open_has_content {
4926 self.client
4927 .log_message(
4928 MessageType::INFO,
4929 format!(
4930 "didCreateFiles: skip {} (open buffer already has content)",
4931 uri_str
4932 ),
4933 )
4934 .await;
4935 continue;
4936 }
4937
4938 if disk_has_content {
4940 self.client
4941 .log_message(
4942 MessageType::INFO,
4943 format!(
4944 "didCreateFiles: skip {} (disk file already has content)",
4945 uri_str
4946 ),
4947 )
4948 .await;
4949 continue;
4950 }
4951
4952 let content =
4953 match file_operations::generate_scaffold(&uri, solc_version.as_deref()) {
4954 Some(s) => s,
4955 None => continue,
4956 };
4957
4958 staged_content.insert(uri_str, content.clone());
4959 created_uris.push(uri.to_string());
4960
4961 apply_edits.entry(uri).or_default().push(TextEdit {
4962 range: Range {
4963 start: Position {
4964 line: 0,
4965 character: 0,
4966 },
4967 end: Position {
4968 line: 0,
4969 character: 0,
4970 },
4971 },
4972 new_text: content,
4973 });
4974 }
4975 }
4976
4977 if !apply_edits.is_empty() {
4978 {
4979 let mut pending = self.pending_create_scaffold.write().await;
4980 for uri in &created_uris {
4981 pending.insert(uri.clone());
4982 }
4983 }
4984
4985 let edit = WorkspaceEdit {
4986 changes: Some(apply_edits.clone()),
4987 document_changes: None,
4988 change_annotations: None,
4989 };
4990 self.client
4991 .log_message(
4992 MessageType::INFO,
4993 format!(
4994 "didCreateFiles: scaffolding {} empty file(s) via workspace/applyEdit",
4995 apply_edits.len()
4996 ),
4997 )
4998 .await;
4999 let apply_result = self.client.apply_edit(edit).await;
5000 let applied = apply_result.as_ref().is_ok_and(|r| r.applied);
5001
5002 if applied {
5003 let mut tc = self.text_cache.write().await;
5004 for (uri_str, content) in staged_content {
5005 tc.insert(uri_str, (0, content));
5006 }
5007 } else {
5008 if let Ok(resp) = &apply_result {
5009 self.client
5010 .log_message(
5011 MessageType::WARNING,
5012 format!(
5013 "didCreateFiles: applyEdit rejected (no disk fallback): {:?}",
5014 resp.failure_reason
5015 ),
5016 )
5017 .await;
5018 } else if let Err(e) = &apply_result {
5019 self.client
5020 .log_message(
5021 MessageType::WARNING,
5022 format!("didCreateFiles: applyEdit failed (no disk fallback): {e}"),
5023 )
5024 .await;
5025 }
5026 }
5027 }
5028
5029 for file_create in ¶ms.files {
5033 let Ok(uri) = Url::parse(&file_create.uri) else {
5034 continue;
5035 };
5036 let (version, content) = {
5037 let tc = self.text_cache.read().await;
5038 match tc.get(&uri.to_string()) {
5039 Some((v, c)) => (*v, c.clone()),
5040 None => continue,
5041 }
5042 };
5043 if !content.chars().any(|ch| !ch.is_whitespace()) {
5044 continue;
5045 }
5046 self.on_change(TextDocumentItem {
5047 uri,
5048 version,
5049 text: content,
5050 language_id: "solidity".to_string(),
5051 })
5052 .await;
5053 }
5054
5055 let root_key = self.project_cache_key().await;
5057 if let Some(ref key) = root_key {
5058 self.ast_cache.write().await.remove(key);
5059 }
5060
5061 let foundry_config = self.foundry_config.read().await.clone();
5062 let ast_cache = self.ast_cache.clone();
5063 let client = self.client.clone();
5064 let text_cache_snapshot = self.text_cache.read().await.clone();
5065
5066 tokio::spawn(async move {
5067 let Some(cache_key) = root_key else {
5068 return;
5069 };
5070 match crate::solc::solc_project_index(
5071 &foundry_config,
5072 Some(&client),
5073 Some(&text_cache_snapshot),
5074 )
5075 .await
5076 {
5077 Ok(ast_data) => {
5078 let cached_build = Arc::new(crate::goto::CachedBuild::new(ast_data, 0));
5079 let source_count = cached_build.nodes.len();
5080 ast_cache.write().await.insert(cache_key, cached_build);
5081 client
5082 .log_message(
5083 MessageType::INFO,
5084 format!("didCreateFiles: re-indexed {} source files", source_count),
5085 )
5086 .await;
5087 }
5088 Err(e) => {
5089 client
5090 .log_message(
5091 MessageType::WARNING,
5092 format!("didCreateFiles: re-index failed: {e}"),
5093 )
5094 .await;
5095 }
5096 }
5097 });
5098 }
5099}
5100
5101#[cfg(test)]
5102mod tests {
5103 use super::{
5104 start_or_mark_project_cache_sync_pending, stop_project_cache_sync_worker_or_reclaim,
5105 take_project_cache_sync_pending, try_claim_project_cache_dirty,
5106 update_imports_on_delete_enabled,
5107 };
5108 use std::sync::atomic::{AtomicBool, Ordering};
5109
5110 #[test]
5111 fn update_imports_on_delete_enabled_defaults_true() {
5112 let s = crate::config::Settings::default();
5113 assert!(update_imports_on_delete_enabled(&s));
5114 }
5115
5116 #[test]
5117 fn update_imports_on_delete_enabled_respects_false() {
5118 let mut s = crate::config::Settings::default();
5119 s.file_operations.update_imports_on_delete = false;
5120 assert!(!update_imports_on_delete_enabled(&s));
5121 }
5122
5123 #[test]
5124 fn project_cache_sync_burst_only_first_starts_worker() {
5125 let pending = AtomicBool::new(false);
5126 let running = AtomicBool::new(false);
5127
5128 assert!(start_or_mark_project_cache_sync_pending(&pending, &running));
5129 assert!(pending.load(Ordering::Acquire));
5130 assert!(running.load(Ordering::Acquire));
5131
5132 assert!(!start_or_mark_project_cache_sync_pending(
5134 &pending, &running
5135 ));
5136 assert!(pending.load(Ordering::Acquire));
5137 assert!(running.load(Ordering::Acquire));
5138 }
5139
5140 #[test]
5141 fn project_cache_sync_take_pending_is_one_shot() {
5142 let pending = AtomicBool::new(true);
5143 assert!(take_project_cache_sync_pending(&pending));
5144 assert!(!pending.load(Ordering::Acquire));
5145 assert!(!take_project_cache_sync_pending(&pending));
5146 }
5147
5148 #[test]
5149 fn project_cache_sync_worker_stop_or_reclaim_handles_race() {
5150 let pending = AtomicBool::new(false);
5151 let running = AtomicBool::new(true);
5152
5153 assert!(!stop_project_cache_sync_worker_or_reclaim(
5155 &pending, &running
5156 ));
5157 assert!(!running.load(Ordering::Acquire));
5158
5159 pending.store(true, Ordering::Release);
5161 running.store(true, Ordering::Release);
5162 assert!(stop_project_cache_sync_worker_or_reclaim(
5163 &pending, &running
5164 ));
5165 assert!(running.load(Ordering::Acquire));
5166 }
5167
5168 #[test]
5169 fn project_cache_dirty_claim_and_retry_cycle() {
5170 let dirty = AtomicBool::new(true);
5171
5172 assert!(try_claim_project_cache_dirty(&dirty));
5173 assert!(!dirty.load(Ordering::Acquire));
5174
5175 assert!(!try_claim_project_cache_dirty(&dirty));
5177
5178 dirty.store(true, Ordering::Release);
5180 assert!(try_claim_project_cache_dirty(&dirty));
5181 assert!(!dirty.load(Ordering::Acquire));
5182 }
5183}