1use std::collections::HashMap;
4use std::path::{Path, PathBuf};
5use std::sync::Arc;
6
7use crate::features::commands::{self, execute_command};
8use crate::features::document_links::collect_document_links;
9use crate::features::document_symbols::{collect_document_symbols, LexDocumentSymbol};
10use crate::features::folding_ranges::{folding_ranges as collect_folding_ranges, LexFoldingRange};
11use crate::features::formatting::{self, LineRange as FormattingLineRange, TextEditSpan};
12use crate::features::go_to_definition::goto_definition;
13use crate::features::hover::{hover as compute_hover, HoverResult};
14use crate::features::references::find_references;
15use crate::features::semantic_tokens::{
16 collect_semantic_tokens, LexSemanticToken, SEMANTIC_TOKEN_KINDS,
17};
18use clapfig::{Boundary, Clapfig, SearchPath};
19use lex_analysis::completion::{completion_items, CompletionCandidate, CompletionWorkspace};
20use lex_analysis::diagnostics::{
21 analyze as analyze_diagnostics, AnalysisDiagnostic, DiagnosticKind,
22};
23use lex_babel::formats::lex::formatting_rules::FormattingRules;
24use lex_babel::templates::{
25 build_asset_snippet, build_verbatim_snippet, AssetSnippetRequest, VerbatimSnippetRequest,
26};
27use lex_config::{LexConfig, CONFIG_FILE_NAME};
28use lex_core::lex::ast::links::{DocumentLink as AstDocumentLink, LinkType};
29use lex_core::lex::ast::range::SourceLocation;
30use lex_core::lex::ast::{Document, Position as AstPosition, Range as AstRange};
31use lex_core::lex::parsing;
32use serde_json::{json, Value};
33use tokio::sync::RwLock;
34use tower_lsp::async_trait;
35use tower_lsp::jsonrpc::{Error, Result};
36use tower_lsp::lsp_types::{
37 CodeActionParams, CodeActionProviderCapability, CodeActionResponse, CompletionItem,
38 CompletionOptions, CompletionParams, CompletionResponse, DidChangeConfigurationParams,
39 DocumentFormattingParams, DocumentLink, DocumentLinkOptions, DocumentLinkParams,
40 DocumentRangeFormattingParams, DocumentSymbol, DocumentSymbolParams, DocumentSymbolResponse,
41 ExecuteCommandOptions, ExecuteCommandParams, FoldingRange, FoldingRangeParams,
42 FoldingRangeProviderCapability, GotoDefinitionParams, GotoDefinitionResponse, Hover,
43 HoverContents, HoverParams, HoverProviderCapability, InitializeParams, InitializeResult,
44 InitializedParams, Location, MarkupContent, MarkupKind, OneOf, Position, Range,
45 ReferenceParams, SemanticToken, SemanticTokenType, SemanticTokens, SemanticTokensFullOptions,
46 SemanticTokensLegend, SemanticTokensOptions, SemanticTokensParams, SemanticTokensResult,
47 ServerCapabilities, ServerInfo, TextDocumentItem, TextDocumentSyncCapability,
48 TextDocumentSyncKind, TextEdit, Url, WorkDoneProgressOptions,
49};
50use tower_lsp::Client;
51
52use tower_lsp::lsp_types::Diagnostic;
53
54use tower_lsp::lsp_types::MessageType;
55
56#[async_trait]
57pub trait LspClient: Send + Sync + Clone + 'static {
58 async fn publish_diagnostics(&self, uri: Url, diags: Vec<Diagnostic>, version: Option<i32>);
59 async fn show_message(&self, typ: MessageType, message: String);
60}
61
62#[async_trait]
63impl LspClient for Client {
64 async fn publish_diagnostics(&self, uri: Url, diags: Vec<Diagnostic>, version: Option<i32>) {
65 self.publish_diagnostics(uri, diags, version).await;
66 }
67
68 async fn show_message(&self, typ: MessageType, message: String) {
69 self.show_message(typ, message).await;
70 }
71}
72
73pub trait FeatureProvider: Send + Sync + 'static {
74 fn semantic_tokens(&self, document: &Document) -> Vec<LexSemanticToken>;
75 fn document_symbols(&self, document: &Document) -> Vec<LexDocumentSymbol>;
76 fn folding_ranges(&self, document: &Document) -> Vec<LexFoldingRange>;
77 fn hover(&self, document: &Document, position: AstPosition) -> Option<HoverResult>;
78 fn goto_definition(&self, document: &Document, position: AstPosition) -> Vec<AstRange>;
79 fn references(
80 &self,
81 document: &Document,
82 position: AstPosition,
83 include_declaration: bool,
84 ) -> Vec<AstRange>;
85 fn document_links(&self, document: &Document) -> Vec<AstDocumentLink>;
86 fn format_document(
87 &self,
88 document: &Document,
89 source: &str,
90 rules: Option<FormattingRules>,
91 ) -> Vec<TextEditSpan>;
92 fn format_range(
93 &self,
94 document: &Document,
95 source: &str,
96 range: FormattingLineRange,
97 rules: Option<FormattingRules>,
98 ) -> Vec<TextEditSpan>;
99 fn completion(
100 &self,
101 document: &Document,
102 position: AstPosition,
103 current_line: Option<&str>,
104 workspace: Option<&CompletionWorkspace>,
105 trigger_char: Option<&str>,
106 ) -> Vec<CompletionCandidate>;
107 fn execute_command(&self, command: &str, arguments: &[Value]) -> Result<Option<Value>>;
108}
109
110#[derive(Default)]
111pub struct DefaultFeatureProvider;
112
113impl DefaultFeatureProvider {
114 pub fn new() -> Self {
115 Self
116 }
117}
118
119#[async_trait]
120impl FeatureProvider for DefaultFeatureProvider {
121 fn semantic_tokens(&self, document: &Document) -> Vec<LexSemanticToken> {
122 collect_semantic_tokens(document)
123 }
124
125 fn document_symbols(&self, document: &Document) -> Vec<LexDocumentSymbol> {
126 collect_document_symbols(document)
127 }
128
129 fn folding_ranges(&self, document: &Document) -> Vec<LexFoldingRange> {
130 collect_folding_ranges(document)
131 }
132
133 fn hover(&self, document: &Document, position: AstPosition) -> Option<HoverResult> {
134 compute_hover(document, position)
135 }
136
137 fn goto_definition(&self, document: &Document, position: AstPosition) -> Vec<AstRange> {
138 goto_definition(document, position)
139 }
140
141 fn references(
142 &self,
143 document: &Document,
144 position: AstPosition,
145 include_declaration: bool,
146 ) -> Vec<AstRange> {
147 find_references(document, position, include_declaration)
148 }
149
150 fn document_links(&self, document: &Document) -> Vec<AstDocumentLink> {
151 collect_document_links(document)
152 }
153
154 fn format_document(
155 &self,
156 document: &Document,
157 source: &str,
158 rules: Option<FormattingRules>,
159 ) -> Vec<TextEditSpan> {
160 formatting::format_document(document, source, rules)
161 }
162
163 fn format_range(
164 &self,
165 document: &Document,
166 source: &str,
167 range: FormattingLineRange,
168 rules: Option<FormattingRules>,
169 ) -> Vec<TextEditSpan> {
170 formatting::format_range(document, source, range, rules)
171 }
172
173 fn completion(
174 &self,
175 document: &Document,
176 position: AstPosition,
177 current_line: Option<&str>,
178 workspace: Option<&CompletionWorkspace>,
179 trigger_char: Option<&str>,
180 ) -> Vec<CompletionCandidate> {
181 completion_items(document, position, current_line, workspace, trigger_char)
182 }
183
184 fn execute_command(&self, command: &str, arguments: &[Value]) -> Result<Option<Value>> {
185 execute_command(command, arguments)
186 }
187}
188
189#[derive(Clone)]
190struct DocumentEntry {
191 document: Arc<Document>,
192 text: Arc<String>,
193}
194
195#[derive(Default)]
196struct DocumentStore {
197 entries: RwLock<HashMap<Url, Option<DocumentEntry>>>,
198}
199
200impl DocumentStore {
201 async fn upsert(&self, uri: Url, text: String) -> Option<DocumentEntry> {
202 let parsed = match parsing::parse_document(&text) {
203 Ok(document) => Some(DocumentEntry {
204 document: Arc::new(document),
205 text: Arc::new(text),
206 }),
207 Err(_) => None,
208 };
209 self.entries.write().await.insert(uri, parsed.clone());
210 parsed
211 }
212
213 async fn get(&self, uri: &Url) -> Option<DocumentEntry> {
214 self.entries.read().await.get(uri).cloned().flatten()
215 }
216
217 async fn remove(&self, uri: &Url) {
218 self.entries.write().await.remove(uri);
219 }
220}
221
222fn document_directory_from_uri(uri: &Url) -> Option<PathBuf> {
223 uri.to_file_path()
224 .ok()
225 .and_then(|path| path.parent().map(|parent| parent.to_path_buf()))
226}
227
228fn indent_level_from_position(
229 entry: &DocumentEntry,
230 position: &Position,
231 rules: &FormattingRules,
232) -> usize {
233 let indent_unit = rules.indent_string.as_str();
234 if indent_unit.is_empty() {
235 return 0;
236 }
237 let indent_len = indent_unit.len();
238 let line = entry.text.lines().nth(position.line as usize).unwrap_or("");
239 let prefix: String = line.chars().take(position.character as usize).collect();
240 let mut level = 0;
241 let mut remainder = prefix.as_str();
242 while remainder.starts_with(indent_unit) {
243 level += 1;
244 remainder = &remainder[indent_len..];
245 }
246 level
247}
248
249fn semantic_tokens_legend() -> SemanticTokensLegend {
250 SemanticTokensLegend {
251 token_types: SEMANTIC_TOKEN_KINDS
252 .iter()
253 .map(|kind| SemanticTokenType::new(kind.as_str()))
254 .collect(),
255 token_modifiers: Vec::new(),
256 }
257}
258
259pub struct LexLanguageServer<C = Client, P = DefaultFeatureProvider> {
260 _client: C,
261 documents: DocumentStore,
262 features: Arc<P>,
263 workspace_roots: RwLock<Vec<PathBuf>>,
264 config: RwLock<LexConfig>,
265}
266
267impl LexLanguageServer<Client, DefaultFeatureProvider> {
268 pub fn new(client: Client) -> Self {
269 Self::with_features(client, Arc::new(DefaultFeatureProvider::new()))
270 }
271}
272
273impl<C, P> LexLanguageServer<C, P>
274where
275 C: LspClient,
276 P: FeatureProvider,
277{
278 pub fn with_features(client: C, features: Arc<P>) -> Self {
279 let config = load_config(None);
280 Self {
281 _client: client,
282 documents: DocumentStore::default(),
283 features,
284 workspace_roots: RwLock::new(Vec::new()),
285 config: RwLock::new(config),
286 }
287 }
288
289 async fn parse_and_store(&self, uri: Url, text: String) {
290 if let Some(entry) = self.documents.upsert(uri.clone(), text).await {
291 let analysis_diags = analyze_diagnostics(&entry.document);
293 let diagnostics: Vec<_> = analysis_diags.into_iter().map(to_lsp_diagnostic).collect();
294
295 self._client
296 .publish_diagnostics(uri, diagnostics, None)
297 .await;
298 }
299 }
300
301 async fn document_entry(&self, uri: &Url) -> Option<DocumentEntry> {
302 self.documents.get(uri).await
303 }
304
305 async fn document(&self, uri: &Url) -> Option<Arc<Document>> {
306 self.document_entry(uri).await.map(|entry| entry.document)
307 }
308
309 #[allow(deprecated)]
310 async fn update_workspace_roots(&self, params: &InitializeParams) {
311 let mut roots = Vec::new();
312
313 if let Some(folders) = params.workspace_folders.as_ref() {
314 for folder in folders {
315 if let Ok(path) = folder.uri.to_file_path() {
316 roots.push(path);
317 }
318 }
319 }
320
321 if roots.is_empty() {
322 if let Some(root_uri) = params.root_uri.as_ref() {
323 if let Ok(path) = root_uri.to_file_path() {
324 roots.push(path);
325 }
326 } else if let Some(root_path) = params.root_path.as_ref() {
327 roots.push(PathBuf::from(root_path));
328 } else if let Ok(current_dir) = std::env::current_dir() {
329 roots.push(current_dir);
330 }
331 }
332
333 *self.workspace_roots.write().await = roots;
334 }
335
336 async fn workspace_context_for_uri(&self, uri: &Url) -> Option<CompletionWorkspace> {
337 let document_path = uri.to_file_path().ok()?;
338 let roots = self.workspace_roots.read().await;
339 let project_root = best_matching_root(&roots, &document_path)
340 .or_else(|| document_directory_from_uri(uri))
341 .or_else(|| document_path.parent().map(|path| path.to_path_buf()))
342 .unwrap_or_else(|| document_path.clone());
343
344 Some(CompletionWorkspace {
345 project_root,
346 document_path,
347 })
348 }
349
350 async fn resolve_formatting_rules(&self, options: &FormattingOptions) -> FormattingRules {
352 let config = self.config.read().await;
353 let mut rules = FormattingRules::from(&config.formatting.rules);
354
355 apply_formatting_overrides(&mut rules, options);
357
358 rules
359 }
360}
361
362fn load_config(workspace_root: Option<&Path>) -> LexConfig {
364 let mut search_paths = vec![SearchPath::Platform];
365 if let Some(root) = workspace_root {
366 search_paths.push(SearchPath::Path(root.to_path_buf()));
367 } else {
368 search_paths.push(SearchPath::Ancestors(Boundary::Marker(".git")));
369 search_paths.push(SearchPath::Cwd);
370 }
371 Clapfig::builder::<LexConfig>()
372 .app_name("lex")
373 .file_name(CONFIG_FILE_NAME)
374 .search_paths(search_paths)
375 .load()
376 .unwrap_or_else(|_| {
377 Clapfig::builder::<LexConfig>()
379 .app_name("lex")
380 .no_env()
381 .search_paths(vec![])
382 .load()
383 .expect("compiled defaults must load")
384 })
385}
386
387fn best_matching_root(roots: &[PathBuf], document_path: &Path) -> Option<PathBuf> {
388 roots
389 .iter()
390 .filter(|root| document_path.starts_with(root))
391 .max_by_key(|root| root.components().count())
392 .cloned()
393}
394
395fn to_lsp_position(position: &AstPosition) -> Position {
396 Position::new(position.line as u32, position.column as u32)
397}
398
399fn to_lsp_range(range: &AstRange) -> Range {
400 Range {
401 start: to_lsp_position(&range.start),
402 end: to_lsp_position(&range.end),
403 }
404}
405
406fn to_lsp_location(uri: &Url, range: &AstRange) -> Location {
407 Location {
408 uri: uri.clone(),
409 range: to_lsp_range(range),
410 }
411}
412
413fn spans_to_text_edits(text: &str, spans: Vec<TextEditSpan>) -> Vec<TextEdit> {
414 if spans.is_empty() {
415 return Vec::new();
416 }
417 let locator = SourceLocation::new(text);
418 spans
419 .into_iter()
420 .map(|span| TextEdit {
421 range: Range {
422 start: to_lsp_position(&locator.byte_to_position(span.start)),
423 end: to_lsp_position(&locator.byte_to_position(span.end)),
424 },
425 new_text: span.new_text,
426 })
427 .collect()
428}
429
430fn to_formatting_line_range(range: &Range) -> FormattingLineRange {
431 let start = range.start.line as usize;
432 let mut end = range.end.line as usize;
433 if range.end.character > 0 || end == start {
434 end += 1;
435 }
436 FormattingLineRange { start, end }
437}
438
439use lsp_types::{FormattingOptions, FormattingProperty};
440
441fn apply_formatting_overrides(rules: &mut FormattingRules, options: &FormattingOptions) {
454 for (key, value) in &options.properties {
455 match key.as_str() {
456 "lex.session_blank_lines_before" => {
457 if let FormattingProperty::Number(n) = value {
458 rules.session_blank_lines_before = (*n).max(0) as usize;
459 }
460 }
461 "lex.session_blank_lines_after" => {
462 if let FormattingProperty::Number(n) = value {
463 rules.session_blank_lines_after = (*n).max(0) as usize;
464 }
465 }
466 "lex.normalize_seq_markers" => {
467 if let FormattingProperty::Bool(b) = value {
468 rules.normalize_seq_markers = *b;
469 }
470 }
471 "lex.unordered_seq_marker" => {
472 if let FormattingProperty::String(s) = value {
473 if let Some(c) = s.chars().next() {
474 rules.unordered_seq_marker = c;
475 }
476 }
477 }
478 "lex.max_blank_lines" => {
479 if let FormattingProperty::Number(n) = value {
480 rules.max_blank_lines = (*n).max(0) as usize;
481 }
482 }
483 "lex.indent_string" => {
484 if let FormattingProperty::String(s) = value {
485 rules.indent_string = s.clone();
486 }
487 }
488 "lex.preserve_trailing_blanks" => {
489 if let FormattingProperty::Bool(b) = value {
490 rules.preserve_trailing_blanks = *b;
491 }
492 }
493 "lex.normalize_verbatim_markers" => {
494 if let FormattingProperty::Bool(b) = value {
495 rules.normalize_verbatim_markers = *b;
496 }
497 }
498 _ => {}
499 }
500 }
501}
502
503fn from_lsp_position(position: Position) -> AstPosition {
504 AstPosition::new(position.line as usize, position.character as usize)
505}
506
507fn encode_semantic_tokens(tokens: &[LexSemanticToken], text: &str) -> Vec<SemanticToken> {
508 let line_offsets = compute_line_offsets(text);
509 let mut data = Vec::new();
510 let mut prev_line = 0u32;
511 let mut prev_start = 0u32;
512
513 for token in tokens {
514 let token_type_index = SEMANTIC_TOKEN_KINDS
515 .iter()
516 .position(|kind| *kind == token.kind)
517 .unwrap_or(0) as u32;
518 for (line, start, length) in split_token_on_lines(token, text, &line_offsets) {
519 if length == 0 {
520 continue;
521 }
522 let delta_line = line.saturating_sub(prev_line);
523 let delta_start = if delta_line == 0 {
524 start.saturating_sub(prev_start)
525 } else {
526 start
527 };
528 data.push(SemanticToken {
529 delta_line,
530 delta_start,
531 length,
532 token_type: token_type_index,
533 token_modifiers_bitset: 0,
534 });
535 prev_line = line;
536 prev_start = start;
537 }
538 }
539
540 data
541}
542
543fn compute_line_offsets(text: &str) -> Vec<usize> {
544 let mut offsets = vec![0];
545 for (idx, ch) in text.char_indices() {
546 if ch == '\n' {
547 offsets.push(idx + ch.len_utf8());
548 }
549 }
550 offsets
551}
552
553fn split_token_on_lines(
559 token: &LexSemanticToken,
560 text: &str,
561 line_offsets: &[usize],
562) -> Vec<(u32, u32, u32)> {
563 let span = &token.range.span;
564 if span.start > text.len() || span.end > text.len() {
565 return Vec::new();
568 }
569 let slice = &text[span.clone()];
570 let mut segments = Vec::new();
571 let mut current_line = token.range.start.line as u32;
572 let mut segment_start = 0;
573 let base_offset = token.range.span.start;
574
575 for (idx, ch) in slice.char_indices() {
576 if ch == '\n' {
577 if idx > segment_start {
578 let length = (idx - segment_start) as u32;
579 let absolute_start = base_offset + segment_start;
580 let line_offset = line_offsets
581 .get(current_line as usize)
582 .copied()
583 .unwrap_or(0);
584 let start_col = (absolute_start.saturating_sub(line_offset)) as u32;
585 segments.push((current_line, start_col, length));
586 }
587 current_line += 1;
588 segment_start = idx + ch.len_utf8();
589 }
590 }
591
592 if slice.len() > segment_start {
593 let length = (slice.len() - segment_start) as u32;
594 let absolute_start = base_offset + segment_start;
595 let line_offset = line_offsets
596 .get(current_line as usize)
597 .copied()
598 .unwrap_or(0);
599 let start_col = (absolute_start.saturating_sub(line_offset)) as u32;
600 segments.push((current_line, start_col, length));
601 }
602
603 segments
604}
605
606#[allow(deprecated)]
607fn to_document_symbol(symbol: &LexDocumentSymbol) -> DocumentSymbol {
608 DocumentSymbol {
609 name: symbol.name.clone(),
610 detail: symbol.detail.clone(),
611 kind: symbol.kind,
612 deprecated: None,
613 range: to_lsp_range(&symbol.range),
614 selection_range: to_lsp_range(&symbol.selection_range),
615 children: if symbol.children.is_empty() {
616 None
617 } else {
618 Some(symbol.children.iter().map(to_document_symbol).collect())
619 },
620 tags: None,
621 }
622}
623
624fn to_lsp_folding_range(range: &LexFoldingRange) -> FoldingRange {
625 FoldingRange {
626 start_line: range.start_line,
627 start_character: range.start_character,
628 end_line: range.end_line,
629 end_character: range.end_character,
630 kind: range.kind.clone(),
631 collapsed_text: None,
632 }
633}
634
635fn to_lsp_completion_item(candidate: &CompletionCandidate) -> CompletionItem {
636 CompletionItem {
637 label: candidate.label.clone(),
638 kind: Some(candidate.kind),
639 detail: candidate.detail.clone(),
640 insert_text: candidate.insert_text.clone(),
641 ..Default::default()
642 }
643}
644
645fn build_document_link(uri: &Url, link: &AstDocumentLink) -> Option<DocumentLink> {
646 let target = link_target_uri(uri, link)?;
647 Some(DocumentLink {
648 range: to_lsp_range(&link.range),
649 target: Some(target),
650 tooltip: None,
651 data: None,
652 })
653}
654
655fn link_target_uri(document_uri: &Url, link: &AstDocumentLink) -> Option<Url> {
656 match link.link_type {
657 LinkType::Url => Url::parse(&link.target).ok(),
658 LinkType::File | LinkType::VerbatimSrc => {
659 resolve_file_like_target(document_uri, &link.target)
660 }
661 }
662}
663
664fn resolve_file_like_target(document_uri: &Url, target: &str) -> Option<Url> {
665 if target.is_empty() {
666 return None;
667 }
668 let path = Path::new(target);
669 if path.is_absolute() {
670 return Url::from_file_path(path).ok();
671 }
672 if document_uri.scheme() == "file" {
673 let mut base = document_uri.to_file_path().ok()?;
674 base.pop();
675 base.push(target);
676 Url::from_file_path(base).ok()
677 } else {
678 parent_directory_uri(document_uri).join(target).ok()
679 }
680}
681
682fn parent_directory_uri(uri: &Url) -> Url {
683 let mut base = uri.clone();
684 let mut path = base.path().to_string();
685 if let Some(idx) = path.rfind('/') {
686 path.truncate(idx + 1);
687 } else {
688 path.push('/');
689 }
690 base.set_path(&path);
691 base.set_query(None);
692 base.set_fragment(None);
693 base
694}
695
696#[async_trait]
697impl<C, P> tower_lsp::LanguageServer for LexLanguageServer<C, P>
698where
699 C: LspClient,
700 P: FeatureProvider,
701{
702 async fn initialize(&self, params: InitializeParams) -> Result<InitializeResult> {
703 self.update_workspace_roots(¶ms).await;
704
705 {
707 let roots = self.workspace_roots.read().await;
708 let root = roots.first().map(|p| p.as_path());
709 *self.config.write().await = load_config(root);
710 }
711
712 let capabilities = ServerCapabilities {
713 text_document_sync: Some(TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL)),
714 hover_provider: Some(HoverProviderCapability::Simple(true)),
715 document_symbol_provider: Some(OneOf::Left(true)),
716 folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
717 definition_provider: Some(OneOf::Left(true)),
718 references_provider: Some(OneOf::Left(true)),
719 document_link_provider: Some(DocumentLinkOptions {
720 work_done_progress_options: WorkDoneProgressOptions::default(),
721 resolve_provider: Some(false),
722 }),
723 code_action_provider: Some(CodeActionProviderCapability::Simple(true)),
724 completion_provider: Some(CompletionOptions {
725 resolve_provider: Some(false),
726 trigger_characters: Some(vec![
727 "[".to_string(),
728 ":".to_string(),
729 "=".to_string(),
730 "@".to_string(),
731 ]),
732 work_done_progress_options: WorkDoneProgressOptions::default(),
733 all_commit_characters: None,
734 ..Default::default()
735 }),
736 document_formatting_provider: Some(OneOf::Left(true)),
737 document_range_formatting_provider: Some(OneOf::Left(true)),
738 semantic_tokens_provider: Some(
739 lsp_types::SemanticTokensServerCapabilities::SemanticTokensOptions(
740 SemanticTokensOptions {
741 work_done_progress_options: WorkDoneProgressOptions::default(),
742 legend: semantic_tokens_legend(),
743 range: None,
744 full: Some(SemanticTokensFullOptions::Bool(true)),
745 },
746 ),
747 ),
748 execute_command_provider: Some(ExecuteCommandOptions {
749 commands: vec![
750 commands::COMMAND_ECHO.to_string(),
751 commands::COMMAND_IMPORT.to_string(),
752 commands::COMMAND_EXPORT.to_string(),
753 commands::COMMAND_NEXT_ANNOTATION.to_string(),
754 commands::COMMAND_RESOLVE_ANNOTATION.to_string(),
755 commands::COMMAND_TOGGLE_ANNOTATIONS.to_string(),
756 commands::COMMAND_INSERT_ASSET.to_string(),
757 commands::COMMAND_INSERT_VERBATIM.to_string(),
758 commands::COMMAND_FOOTNOTES_REORDER.to_string(),
759 commands::COMMAND_TABLE_FORMAT.to_string(),
760 ],
761 work_done_progress_options: WorkDoneProgressOptions::default(),
762 }),
763 ..ServerCapabilities::default()
764 };
765
766 Ok(InitializeResult {
767 capabilities,
768 server_info: Some(ServerInfo {
769 name: "lex-lsp".to_string(),
770 version: Some(env!("CARGO_PKG_VERSION").to_string()),
771 }),
772 })
773 }
774
775 async fn initialized(&self, _: InitializedParams) {}
776
777 async fn shutdown(&self) -> Result<()> {
778 Ok(())
779 }
780
781 async fn did_open(&self, params: lsp_types::DidOpenTextDocumentParams) {
782 let TextDocumentItem { uri, text, .. } = params.text_document;
783 self.parse_and_store(uri, text).await;
784 }
785
786 async fn did_change_configuration(&self, _params: DidChangeConfigurationParams) {
787 {
789 let roots = self.workspace_roots.read().await;
790 let root = roots.first().map(|p| p.as_path());
791 *self.config.write().await = load_config(root);
792 }
793
794 let uris: Vec<Url> = self
796 .documents
797 .entries
798 .read()
799 .await
800 .keys()
801 .cloned()
802 .collect();
803
804 for uri in uris {
805 if let Some(entry) = self.documents.get(&uri).await {
806 self.parse_and_store(uri, entry.text.to_string()).await;
807 }
808 }
809 }
810 async fn did_change(&self, params: lsp_types::DidChangeTextDocumentParams) {
811 if let Some(change) = params.content_changes.into_iter().last() {
812 self.parse_and_store(params.text_document.uri, change.text)
813 .await;
814 }
815 }
816
817 async fn did_close(&self, params: lsp_types::DidCloseTextDocumentParams) {
818 self.documents.remove(¶ms.text_document.uri).await;
819 }
820
821 async fn semantic_tokens_full(
822 &self,
823 params: SemanticTokensParams,
824 ) -> Result<Option<SemanticTokensResult>> {
825 if let Some(entry) = self.document_entry(¶ms.text_document.uri).await {
826 let DocumentEntry { document, text } = entry;
827 let tokens = self.features.semantic_tokens(&document);
828 let data = encode_semantic_tokens(&tokens, text.as_str());
829 Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
830 result_id: None,
831 data,
832 })))
833 } else {
834 Ok(None)
835 }
836 }
837
838 async fn document_symbol(
839 &self,
840 params: DocumentSymbolParams,
841 ) -> Result<Option<DocumentSymbolResponse>> {
842 if let Some(document) = self.document(¶ms.text_document.uri).await {
843 let symbols = self.features.document_symbols(&document);
844 let converted: Vec<DocumentSymbol> = symbols.iter().map(to_document_symbol).collect();
845 Ok(Some(DocumentSymbolResponse::Nested(converted)))
846 } else {
847 Ok(None)
848 }
849 }
850
851 async fn hover(&self, params: HoverParams) -> Result<Option<Hover>> {
852 if let Some(document) = self
853 .document(¶ms.text_document_position_params.text_document.uri)
854 .await
855 {
856 let position = from_lsp_position(params.text_document_position_params.position);
857 if let Some(result) = self.features.hover(&document, position) {
858 return Ok(Some(Hover {
859 contents: HoverContents::Markup(MarkupContent {
860 kind: MarkupKind::Markdown,
861 value: result.contents,
862 }),
863 range: Some(to_lsp_range(&result.range)),
864 }));
865 }
866 }
867 Ok(None)
868 }
869
870 async fn folding_range(&self, params: FoldingRangeParams) -> Result<Option<Vec<FoldingRange>>> {
871 if let Some(document) = self.document(¶ms.text_document.uri).await {
872 let ranges = self.features.folding_ranges(&document);
873 Ok(Some(ranges.iter().map(to_lsp_folding_range).collect()))
874 } else {
875 Ok(None)
876 }
877 }
878
879 async fn goto_definition(
880 &self,
881 params: GotoDefinitionParams,
882 ) -> Result<Option<GotoDefinitionResponse>> {
883 let uri = params.text_document_position_params.text_document.uri;
884 if let Some(document) = self.document(&uri).await {
885 let position = from_lsp_position(params.text_document_position_params.position);
886 let ranges = self.features.goto_definition(&document, position);
887 if ranges.is_empty() {
888 Ok(None)
889 } else {
890 let locations: Vec<Location> = ranges
891 .iter()
892 .map(|range| to_lsp_location(&uri, range))
893 .collect();
894 Ok(Some(GotoDefinitionResponse::Array(locations)))
895 }
896 } else {
897 Ok(None)
898 }
899 }
900
901 async fn references(&self, params: ReferenceParams) -> Result<Option<Vec<Location>>> {
902 let uri = params.text_document_position.text_document.uri;
903 if let Some(document) = self.document(&uri).await {
904 let position = from_lsp_position(params.text_document_position.position);
905 let include_declaration = params.context.include_declaration;
906 let ranges = self
907 .features
908 .references(&document, position, include_declaration);
909 if ranges.is_empty() {
910 Ok(None)
911 } else {
912 Ok(Some(
913 ranges
914 .iter()
915 .map(|range| to_lsp_location(&uri, range))
916 .collect(),
917 ))
918 }
919 } else {
920 Ok(None)
921 }
922 }
923
924 async fn document_link(&self, params: DocumentLinkParams) -> Result<Option<Vec<DocumentLink>>> {
925 let uri = params.text_document.uri;
926 if let Some(document) = self.document(&uri).await {
927 let links = self.features.document_links(&document);
928 let resolved: Vec<DocumentLink> = links
929 .iter()
930 .filter_map(|link| build_document_link(&uri, link))
931 .collect();
932 Ok(Some(resolved))
933 } else {
934 Ok(None)
935 }
936 }
937
938 async fn formatting(&self, params: DocumentFormattingParams) -> Result<Option<Vec<TextEdit>>> {
939 let uri = params.text_document.uri;
940 if let Some(entry) = self.document_entry(&uri).await {
941 let DocumentEntry { document, text } = entry;
942 let rules = self.resolve_formatting_rules(¶ms.options).await;
943 let edits = self
944 .features
945 .format_document(&document, text.as_str(), Some(rules));
946 Ok(Some(spans_to_text_edits(text.as_str(), edits)))
947 } else {
948 Ok(None)
949 }
950 }
951
952 async fn range_formatting(
953 &self,
954 params: DocumentRangeFormattingParams,
955 ) -> Result<Option<Vec<TextEdit>>> {
956 let uri = params.text_document.uri;
957 if let Some(entry) = self.document_entry(&uri).await {
958 let DocumentEntry { document, text } = entry;
959 let line_range = to_formatting_line_range(¶ms.range);
960 let rules = self.resolve_formatting_rules(¶ms.options).await;
961 let edits =
962 self.features
963 .format_range(&document, text.as_str(), line_range, Some(rules));
964 Ok(Some(spans_to_text_edits(text.as_str(), edits)))
965 } else {
966 Ok(None)
967 }
968 }
969
970 async fn completion(&self, params: CompletionParams) -> Result<Option<CompletionResponse>> {
971 let uri = params.text_document_position.text_document.uri;
972 if let Some(entry) = self.document_entry(&uri).await {
973 let DocumentEntry { document, text } = entry;
974 let position = from_lsp_position(params.text_document_position.position);
975 let workspace = self.workspace_context_for_uri(&uri).await;
976
977 let trigger_char = params
979 .context
980 .as_ref()
981 .and_then(|ctx| ctx.trigger_character.as_deref());
982
983 let current_line = text.lines().nth(position.line);
985
986 let candidates = self.features.completion(
987 &document,
988 position,
989 current_line,
990 workspace.as_ref(),
991 trigger_char,
992 );
993 let items: Vec<CompletionItem> =
994 candidates.iter().map(to_lsp_completion_item).collect();
995 Ok(Some(CompletionResponse::Array(items)))
996 } else {
997 Ok(None)
998 }
999 }
1000
1001 async fn code_action(&self, params: CodeActionParams) -> Result<Option<CodeActionResponse>> {
1002 let mut actions = Vec::new();
1003
1004 if let Some(entry) = self.documents.get(¶ms.text_document.uri).await {
1005 let lex_actions = crate::features::available_actions::compute_actions(
1006 &entry.document,
1007 &entry.text,
1008 ¶ms,
1009 );
1010 for action in lex_actions {
1011 actions.push(tower_lsp::lsp_types::CodeActionOrCommand::CodeAction(
1012 action,
1013 ));
1014 }
1015 }
1016
1017 if actions.is_empty() {
1018 Ok(None)
1019 } else {
1020 Ok(Some(actions))
1021 }
1022 }
1023
1024 async fn execute_command(&self, params: ExecuteCommandParams) -> Result<Option<Value>> {
1025 let command = params.command.as_str();
1026 match command {
1027 commands::COMMAND_NEXT_ANNOTATION | commands::COMMAND_PREVIOUS_ANNOTATION => {
1028 let uri_str = params.arguments.first().and_then(|v| v.as_str());
1029 let pos_val = params.arguments.get(1);
1030
1031 if let (Some(uri_str), Some(pos_val)) = (uri_str, pos_val) {
1032 if let Ok(uri) = Url::parse(uri_str) {
1033 if let Ok(position) = serde_json::from_value::<Position>(pos_val.clone()) {
1034 if let Some(document) = self.document(&uri).await {
1035 let ast_pos = from_lsp_position(position);
1036 let navigation = if command == commands::COMMAND_NEXT_ANNOTATION {
1037 lex_analysis::annotations::next_annotation(&document, ast_pos)
1038 } else {
1039 lex_analysis::annotations::previous_annotation(
1040 &document, ast_pos,
1041 )
1042 };
1043
1044 if let Some(result) = navigation {
1045 let location = to_lsp_location(&uri, &result.header);
1046 return Ok(Some(
1047 serde_json::to_value(location)
1048 .map_err(|_| Error::internal_error())?,
1049 ));
1050 }
1051 }
1052 }
1053 }
1054 }
1055 Ok(None)
1056 }
1057 commands::COMMAND_RESOLVE_ANNOTATION | commands::COMMAND_TOGGLE_ANNOTATIONS => {
1058 let uri_str = params.arguments.first().and_then(|v| v.as_str());
1059 let pos_val = params.arguments.get(1);
1060
1061 if let (Some(uri_str), Some(pos_val)) = (uri_str, pos_val) {
1062 if let Ok(uri) = Url::parse(uri_str) {
1063 if let Ok(position) = serde_json::from_value::<Position>(pos_val.clone()) {
1064 if let Some(document) = self.document(&uri).await {
1065 let ast_pos = from_lsp_position(position);
1066 let _resolved = command == commands::COMMAND_RESOLVE_ANNOTATION;
1067
1068 let target_state =
1079 if command == commands::COMMAND_RESOLVE_ANNOTATION {
1080 true
1081 } else {
1082 if let Some(annotation) =
1084 lex_analysis::utils::find_annotation_at_position(
1085 &document, ast_pos,
1086 )
1087 {
1088 let is_resolved =
1089 annotation.data.parameters.iter().any(|p| {
1090 p.key == "status" && p.value == "resolved"
1091 });
1092 !is_resolved
1093 } else {
1094 return Ok(None);
1095 }
1096 };
1097
1098 if let Some(edit) =
1099 lex_analysis::annotations::toggle_annotation_resolution(
1100 &document,
1101 ast_pos,
1102 target_state,
1103 )
1104 {
1105 let text_edit = TextEdit {
1106 range: to_lsp_range(&edit.range),
1107 new_text: edit.new_text,
1108 };
1109 let mut changes = HashMap::new();
1110 changes.insert(uri, vec![text_edit]);
1111 let workspace_edit = tower_lsp::lsp_types::WorkspaceEdit {
1112 changes: Some(changes),
1113 ..Default::default()
1114 };
1115 return Ok(Some(
1116 serde_json::to_value(workspace_edit)
1117 .map_err(|_| Error::internal_error())?,
1118 ));
1119 }
1120 }
1121 }
1122 }
1123 }
1124 Ok(None)
1125 }
1126 commands::COMMAND_INSERT_ASSET => {
1127 let uri_str = params.arguments.first().and_then(|v| v.as_str());
1128 let pos_val = params.arguments.get(1);
1129 let path_val = params.arguments.get(2).and_then(|v| v.as_str());
1130
1131 if let (Some(uri_str), Some(pos_val), Some(path)) = (uri_str, pos_val, path_val) {
1132 if let Ok(uri) = Url::parse(uri_str) {
1133 if let Ok(position) = serde_json::from_value::<Position>(pos_val.clone()) {
1134 let file_path = PathBuf::from(path);
1135 let rules = FormattingRules::default();
1136 let entry = self.document_entry(&uri).await;
1137 let indent_level = entry
1138 .as_ref()
1139 .map(|entry| indent_level_from_position(entry, &position, &rules))
1140 .unwrap_or(0);
1141 let document_directory = document_directory_from_uri(&uri);
1142 let snippet = {
1143 let request = AssetSnippetRequest {
1144 asset_path: file_path.as_path(),
1145 document_directory: document_directory.as_deref(),
1146 formatting: &rules,
1147 indent_level,
1148 };
1149 build_asset_snippet(&request)
1150 };
1151
1152 return Ok(Some(json!({
1153 "text": snippet.text,
1154 "cursorOffset": snippet.cursor_offset,
1155 })));
1156 }
1157 }
1158 }
1159 Ok(None)
1160 }
1161 commands::COMMAND_INSERT_VERBATIM => {
1162 let uri_str = params.arguments.first().and_then(|v| v.as_str());
1163 let pos_val = params.arguments.get(1);
1164 let path_val = params.arguments.get(2).and_then(|v| v.as_str());
1165
1166 if let (Some(uri_str), Some(pos_val), Some(path)) = (uri_str, pos_val, path_val) {
1167 if let Ok(uri) = Url::parse(uri_str) {
1168 if let Ok(position) = serde_json::from_value::<Position>(pos_val.clone()) {
1169 let file_path = PathBuf::from(path);
1170 let rules = FormattingRules::default();
1171 let entry = self.document_entry(&uri).await;
1172 let indent_level = entry
1173 .as_ref()
1174 .map(|entry| indent_level_from_position(entry, &position, &rules))
1175 .unwrap_or(0);
1176 let document_directory = document_directory_from_uri(&uri);
1177 let snippet_result = {
1178 let mut request =
1179 VerbatimSnippetRequest::new(file_path.as_path(), &rules);
1180 request.document_directory = document_directory.as_deref();
1181 request.indent_level = indent_level;
1182 build_verbatim_snippet(&request)
1183 };
1184
1185 match snippet_result {
1186 Ok(snippet) => {
1187 return Ok(Some(json!({
1188 "text": snippet.text,
1189 "cursorOffset": snippet.cursor_offset,
1190 })));
1191 }
1192 Err(err) => {
1193 return Err(Error::invalid_params(format!(
1194 "Failed to insert verbatim block: {err}"
1195 )));
1196 }
1197 }
1198 }
1199 }
1200 }
1201 Ok(None)
1202 }
1203 _ => self
1204 .features
1205 .execute_command(¶ms.command, ¶ms.arguments),
1206 }
1207 }
1208}
1209
1210fn to_lsp_diagnostic(diag: AnalysisDiagnostic) -> Diagnostic {
1211 let severity = match diag.kind {
1212 DiagnosticKind::MissingFootnoteDefinition => {
1213 tower_lsp::lsp_types::DiagnosticSeverity::ERROR
1214 }
1215 DiagnosticKind::UnusedFootnoteDefinition => {
1216 tower_lsp::lsp_types::DiagnosticSeverity::WARNING
1217 }
1218 DiagnosticKind::TableInconsistentColumns => {
1219 tower_lsp::lsp_types::DiagnosticSeverity::WARNING
1220 }
1221 };
1222
1223 let code = match diag.kind {
1224 DiagnosticKind::MissingFootnoteDefinition => "missing-footnote",
1225 DiagnosticKind::UnusedFootnoteDefinition => "unused-footnote",
1226 DiagnosticKind::TableInconsistentColumns => "table-inconsistent-columns",
1227 };
1228
1229 Diagnostic {
1230 range: to_lsp_range(&diag.range),
1231 severity: Some(severity),
1232 code: Some(tower_lsp::lsp_types::NumberOrString::String(
1233 code.to_string(),
1234 )),
1235 code_description: None,
1236 source: Some("lex".to_string()),
1237 message: diag.message,
1238 related_information: None,
1239 tags: None,
1240 data: None,
1241 }
1242}
1243
1244#[cfg(test)]
1245mod tests {
1246 use super::*;
1247 use crate::features::semantic_tokens::LexSemanticTokenKind;
1248 use lex_analysis::test_support::sample_source;
1249 use serde::Deserialize;
1250 use std::fs;
1251 use std::sync::atomic::{AtomicUsize, Ordering};
1252 use std::sync::Mutex;
1253 use tempfile::tempdir;
1254 use tower_lsp::lsp_types::{
1255 CompletionItemKind, DidOpenTextDocumentParams, DocumentFormattingParams,
1256 DocumentLinkParams, DocumentRangeFormattingParams, DocumentSymbolParams, FoldingRangeKind,
1257 FoldingRangeParams, FormattingOptions, GotoDefinitionParams, HoverParams, Position, Range,
1258 ReferenceContext, ReferenceParams, SemanticTokensParams, SymbolKind,
1259 TextDocumentIdentifier, TextDocumentItem, TextDocumentPositionParams,
1260 };
1261 use tower_lsp::LanguageServer;
1262
1263 #[derive(Clone, Default)]
1264 struct NoopClient;
1265 #[async_trait]
1266 impl LspClient for NoopClient {
1267 async fn publish_diagnostics(&self, _: Url, _: Vec<Diagnostic>, _: Option<i32>) {}
1268 async fn show_message(&self, _: MessageType, _: String) {}
1269 }
1270
1271 #[derive(Default)]
1272 struct MockFeatureProvider {
1273 semantic_tokens_called: AtomicUsize,
1274 document_symbols_called: AtomicUsize,
1275 hover_called: AtomicUsize,
1276 folding_called: AtomicUsize,
1277 last_hover_position: Mutex<Option<AstPosition>>,
1278 definition_called: AtomicUsize,
1279 references_called: AtomicUsize,
1280 document_links_called: AtomicUsize,
1281 last_references_include: Mutex<Option<bool>>,
1282 formatting_called: AtomicUsize,
1283 range_formatting_called: AtomicUsize,
1284 completion_called: AtomicUsize,
1285 execute_command_called: AtomicUsize,
1286 }
1287
1288 impl FeatureProvider for MockFeatureProvider {
1289 fn semantic_tokens(&self, _: &Document) -> Vec<LexSemanticToken> {
1290 self.semantic_tokens_called.fetch_add(1, Ordering::SeqCst);
1291 vec![LexSemanticToken {
1292 kind: LexSemanticTokenKind::DocumentTitle,
1293 range: AstRange::new(0..5, AstPosition::new(0, 0), AstPosition::new(0, 5)),
1294 }]
1295 }
1296
1297 fn document_symbols(&self, _: &Document) -> Vec<LexDocumentSymbol> {
1298 self.document_symbols_called.fetch_add(1, Ordering::SeqCst);
1299 vec![LexDocumentSymbol {
1300 name: "symbol".into(),
1301 detail: None,
1302 kind: SymbolKind::FILE,
1303 range: AstRange::new(0..5, AstPosition::new(0, 0), AstPosition::new(0, 5)),
1304 selection_range: AstRange::new(
1305 0..5,
1306 AstPosition::new(0, 0),
1307 AstPosition::new(0, 5),
1308 ),
1309 children: Vec::new(),
1310 }]
1311 }
1312
1313 fn folding_ranges(&self, _: &Document) -> Vec<LexFoldingRange> {
1314 self.folding_called.fetch_add(1, Ordering::SeqCst);
1315 vec![LexFoldingRange {
1316 start_line: 0,
1317 start_character: Some(0),
1318 end_line: 1,
1319 end_character: Some(0),
1320 kind: Some(FoldingRangeKind::Region),
1321 }]
1322 }
1323
1324 fn hover(&self, _: &Document, position: AstPosition) -> Option<HoverResult> {
1325 self.hover_called.fetch_add(1, Ordering::SeqCst);
1326 *self.last_hover_position.lock().unwrap() = Some(position);
1327 Some(HoverResult {
1328 range: AstRange::new(0..5, AstPosition::new(0, 0), AstPosition::new(0, 5)),
1329 contents: "hover".into(),
1330 })
1331 }
1332
1333 fn goto_definition(&self, _: &Document, _: AstPosition) -> Vec<AstRange> {
1334 self.definition_called.fetch_add(1, Ordering::SeqCst);
1335 vec![AstRange::new(
1336 0..5,
1337 AstPosition::new(0, 0),
1338 AstPosition::new(0, 5),
1339 )]
1340 }
1341
1342 fn references(
1343 &self,
1344 _: &Document,
1345 _: AstPosition,
1346 include_declaration: bool,
1347 ) -> Vec<AstRange> {
1348 self.references_called.fetch_add(1, Ordering::SeqCst);
1349 *self.last_references_include.lock().unwrap() = Some(include_declaration);
1350 vec![AstRange::new(
1351 0..5,
1352 AstPosition::new(0, 0),
1353 AstPosition::new(0, 5),
1354 )]
1355 }
1356
1357 fn document_links(&self, _: &Document) -> Vec<AstDocumentLink> {
1358 self.document_links_called.fetch_add(1, Ordering::SeqCst);
1359 vec![AstDocumentLink::new(
1360 AstRange::new(0..5, AstPosition::new(0, 0), AstPosition::new(0, 5)),
1361 "https://example.com".to_string(),
1362 LinkType::Url,
1363 )]
1364 }
1365
1366 fn format_document(
1367 &self,
1368 _: &Document,
1369 _: &str,
1370 _: Option<FormattingRules>,
1371 ) -> Vec<TextEditSpan> {
1372 self.formatting_called.fetch_add(1, Ordering::SeqCst);
1373 vec![TextEditSpan {
1374 start: 0,
1375 end: 0,
1376 new_text: "formatted".into(),
1377 }]
1378 }
1379
1380 fn format_range(
1381 &self,
1382 _: &Document,
1383 _: &str,
1384 _: FormattingLineRange,
1385 _: Option<FormattingRules>,
1386 ) -> Vec<TextEditSpan> {
1387 self.range_formatting_called.fetch_add(1, Ordering::SeqCst);
1388 vec![TextEditSpan {
1389 start: 0,
1390 end: 0,
1391 new_text: "range".into(),
1392 }]
1393 }
1394
1395 fn completion(
1396 &self,
1397 _: &Document,
1398 _: AstPosition,
1399 _: Option<&str>,
1400 _: Option<&CompletionWorkspace>,
1401 _: Option<&str>,
1402 ) -> Vec<CompletionCandidate> {
1403 self.completion_called.fetch_add(1, Ordering::SeqCst);
1404 vec![CompletionCandidate {
1405 label: "completion".into(),
1406 detail: None,
1407 kind: CompletionItemKind::TEXT,
1408 insert_text: None,
1409 }]
1410 }
1411
1412 fn execute_command(&self, command: &str, _: &[Value]) -> Result<Option<Value>> {
1413 self.execute_command_called.fetch_add(1, Ordering::SeqCst);
1414 if command == "test.command" {
1415 Ok(Some(Value::String("executed".into())))
1416 } else {
1417 Ok(None)
1418 }
1419 }
1420 }
1421
1422 fn sample_uri() -> Url {
1423 Url::parse("file:///sample.lex").unwrap()
1424 }
1425
1426 fn sample_text() -> String {
1427 sample_source().to_string()
1428 }
1429
1430 fn offset_to_position(source: &str, offset: usize) -> AstPosition {
1431 let mut line = 0;
1432 let mut line_start = 0;
1433 for (idx, ch) in source.char_indices() {
1434 if idx >= offset {
1435 break;
1436 }
1437 if ch == '\n' {
1438 line += 1;
1439 line_start = idx + ch.len_utf8();
1440 }
1441 }
1442 AstPosition::new(line, offset - line_start)
1443 }
1444
1445 fn range_for_snippet(snippet: &str) -> AstRange {
1446 let source = sample_source();
1447 let start = source
1448 .find(snippet)
1449 .unwrap_or_else(|| panic!("snippet not found: {snippet}"));
1450 let end = start + snippet.len();
1451 let start_pos = offset_to_position(source, start);
1452 let end_pos = offset_to_position(source, end);
1453 AstRange::new(start..end, start_pos, end_pos)
1454 }
1455
1456 async fn open_sample_document(server: &LexLanguageServer<NoopClient, MockFeatureProvider>) {
1457 let uri = sample_uri();
1458 server
1459 .did_open(DidOpenTextDocumentParams {
1460 text_document: TextDocumentItem {
1461 uri,
1462 language_id: "lex".into(),
1463 version: 1,
1464 text: sample_text(),
1465 },
1466 })
1467 .await;
1468 }
1469
1470 #[test]
1471 fn encode_semantic_tokens_splits_multi_line_ranges() {
1472 let snippet = " CLI Example:\n lex build\n lex serve";
1473 let range = range_for_snippet(snippet);
1474 let tokens = vec![LexSemanticToken {
1475 kind: LexSemanticTokenKind::DocumentTitle,
1476 range,
1477 }];
1478 let source = sample_source();
1479 let encoded = encode_semantic_tokens(&tokens, source);
1480 assert_eq!(encoded.len(), 3);
1481 let snippet_offset = source
1482 .find(snippet)
1483 .expect("snippet not found in sample document");
1484 let mut cursor = 0;
1485 let lines: Vec<&str> = snippet.split('\n').collect();
1486 let mut expected_positions = Vec::new();
1487 for (idx, line) in lines.iter().enumerate() {
1488 let offset = snippet_offset + cursor;
1489 expected_positions.push(offset_to_position(source, offset));
1490 cursor += line.len();
1491 if idx < lines.len() - 1 {
1492 cursor += 1; }
1494 }
1495 let mut absolute_positions = Vec::new();
1496 let mut line = 0u32;
1497 let mut column = 0u32;
1498 for token in &encoded {
1499 line += token.delta_line;
1500 let start = if token.delta_line == 0 {
1501 column + token.delta_start
1502 } else {
1503 token.delta_start
1504 };
1505 column = start;
1506 absolute_positions.push((line, start));
1507 }
1508 for (actual, expected) in absolute_positions.iter().zip(expected_positions.iter()) {
1509 assert_eq!(actual.0, expected.line as u32);
1510 assert_eq!(actual.1, expected.column as u32);
1511 }
1512 let expected_len: usize = snippet.lines().map(|line| line.len()).sum();
1513 let actual_len: usize = encoded.iter().map(|token| token.length as usize).sum();
1514 assert_eq!(actual_len, expected_len);
1515 }
1516
1517 #[tokio::test]
1518 async fn semantic_tokens_call_feature_layer() {
1519 let provider = Arc::new(MockFeatureProvider::default());
1520 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1521 open_sample_document(&server).await;
1522
1523 let result = server
1524 .semantic_tokens_full(SemanticTokensParams {
1525 text_document: TextDocumentIdentifier { uri: sample_uri() },
1526 work_done_progress_params: Default::default(),
1527 partial_result_params: Default::default(),
1528 })
1529 .await
1530 .unwrap()
1531 .unwrap();
1532
1533 assert_eq!(provider.semantic_tokens_called.load(Ordering::SeqCst), 1);
1534 let data_len = match result {
1535 SemanticTokensResult::Tokens(tokens) => tokens.data.len(),
1536 SemanticTokensResult::Partial(partial) => partial.data.len(),
1537 };
1538 assert!(data_len > 0);
1539 }
1540
1541 #[tokio::test]
1542 async fn document_symbols_call_feature_layer() {
1543 let provider = Arc::new(MockFeatureProvider::default());
1544 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1545 open_sample_document(&server).await;
1546
1547 let response = server
1548 .document_symbol(DocumentSymbolParams {
1549 text_document: TextDocumentIdentifier { uri: sample_uri() },
1550 work_done_progress_params: Default::default(),
1551 partial_result_params: Default::default(),
1552 })
1553 .await
1554 .unwrap()
1555 .unwrap();
1556
1557 match response {
1558 DocumentSymbolResponse::Nested(symbols) => assert!(!symbols.is_empty()),
1559 _ => panic!("unexpected symbol response"),
1560 }
1561 assert_eq!(provider.document_symbols_called.load(Ordering::SeqCst), 1);
1562 }
1563
1564 #[tokio::test]
1565 async fn hover_uses_feature_provider_position() {
1566 let provider = Arc::new(MockFeatureProvider::default());
1567 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1568 open_sample_document(&server).await;
1569
1570 let hover = server
1571 .hover(HoverParams {
1572 text_document_position_params: TextDocumentPositionParams {
1573 text_document: TextDocumentIdentifier { uri: sample_uri() },
1574 position: Position::new(0, 0),
1575 },
1576 work_done_progress_params: Default::default(),
1577 })
1578 .await
1579 .unwrap()
1580 .unwrap();
1581
1582 assert!(matches!(hover.contents, HoverContents::Markup(_)));
1583 assert_eq!(provider.hover_called.load(Ordering::SeqCst), 1);
1584 let stored = provider.last_hover_position.lock().unwrap().unwrap();
1585 assert_eq!(stored.line, 0);
1586 assert_eq!(stored.column, 0);
1587 }
1588
1589 #[tokio::test]
1590 async fn folding_range_uses_feature_provider() {
1591 let provider = Arc::new(MockFeatureProvider::default());
1592 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1593 open_sample_document(&server).await;
1594
1595 let ranges = server
1596 .folding_range(FoldingRangeParams {
1597 text_document: TextDocumentIdentifier { uri: sample_uri() },
1598 work_done_progress_params: Default::default(),
1599 partial_result_params: Default::default(),
1600 })
1601 .await
1602 .unwrap()
1603 .unwrap();
1604
1605 assert_eq!(provider.folding_called.load(Ordering::SeqCst), 1);
1606 assert_eq!(ranges.len(), 1);
1607 }
1608
1609 #[tokio::test]
1610 async fn goto_definition_uses_feature_provider() {
1611 let provider = Arc::new(MockFeatureProvider::default());
1612 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1613 open_sample_document(&server).await;
1614
1615 let response = server
1616 .goto_definition(GotoDefinitionParams {
1617 text_document_position_params: TextDocumentPositionParams {
1618 text_document: TextDocumentIdentifier { uri: sample_uri() },
1619 position: Position::new(0, 0),
1620 },
1621 work_done_progress_params: Default::default(),
1622 partial_result_params: Default::default(),
1623 })
1624 .await
1625 .unwrap()
1626 .unwrap();
1627
1628 assert_eq!(provider.definition_called.load(Ordering::SeqCst), 1);
1629 match response {
1630 GotoDefinitionResponse::Array(locations) => assert_eq!(locations.len(), 1),
1631 _ => panic!("unexpected goto definition response"),
1632 }
1633 }
1634
1635 #[derive(Deserialize)]
1636 struct SnippetResponse {
1637 text: String,
1638 #[serde(rename = "cursorOffset")]
1639 cursor_offset: usize,
1640 }
1641
1642 #[tokio::test]
1643 async fn execute_insert_commands() {
1644 let provider = Arc::new(MockFeatureProvider::default());
1645 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1646 open_sample_document(&server).await;
1647
1648 let temp_dir = tempdir().unwrap();
1649 let asset_file = temp_dir.path().join("diagram.png");
1650 fs::write(&asset_file, [0u8, 159u8, 146u8, 150u8]).unwrap();
1651
1652 let params = ExecuteCommandParams {
1653 command: commands::COMMAND_INSERT_ASSET.to_string(),
1654 arguments: vec![
1655 serde_json::to_value(sample_uri().to_string()).unwrap(),
1656 serde_json::to_value(Position::new(0, 0)).unwrap(),
1657 serde_json::to_value(asset_file.to_string_lossy()).unwrap(),
1658 ],
1659 work_done_progress_params: Default::default(),
1660 };
1661 let result = server.execute_command(params).await.unwrap();
1662 let snippet: SnippetResponse = serde_json::from_value(result.unwrap()).unwrap();
1663 assert!(snippet.text.contains(":: doc.image"));
1664 assert!(snippet.text.contains(asset_file.to_string_lossy().as_ref()));
1665
1666 let verbatim_file = temp_dir.path().join("example.py");
1667 fs::write(&verbatim_file, "print('hi')\n").unwrap();
1668
1669 let params = ExecuteCommandParams {
1670 command: commands::COMMAND_INSERT_VERBATIM.to_string(),
1671 arguments: vec![
1672 serde_json::to_value(sample_uri().to_string()).unwrap(),
1673 serde_json::to_value(Position::new(0, 0)).unwrap(),
1674 serde_json::to_value(verbatim_file.to_string_lossy()).unwrap(),
1675 ],
1676 work_done_progress_params: Default::default(),
1677 };
1678 let result = server.execute_command(params).await.unwrap();
1679 let snippet: SnippetResponse = serde_json::from_value(result.unwrap()).unwrap();
1680 assert!(snippet.text.contains(":: python"));
1681 assert!(snippet.text.contains("print('hi')"));
1682 assert_eq!(snippet.cursor_offset, 0);
1683 }
1684
1685 #[tokio::test]
1686 async fn execute_annotation_navigation_commands() {
1687 let provider = Arc::new(MockFeatureProvider::default());
1688 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1689 let uri = Url::parse("file:///annotations.lex").unwrap();
1690 let text = ":: note ::\n First\n::\n\n:: note ::\n Second\n::\n";
1691 server
1692 .did_open(DidOpenTextDocumentParams {
1693 text_document: TextDocumentItem {
1694 uri: uri.clone(),
1695 language_id: "lex".into(),
1696 version: 1,
1697 text: text.to_string(),
1698 },
1699 })
1700 .await;
1701
1702 let next_params = ExecuteCommandParams {
1703 command: commands::COMMAND_NEXT_ANNOTATION.to_string(),
1704 arguments: vec![
1705 serde_json::to_value(uri.to_string()).unwrap(),
1706 serde_json::to_value(Position::new(0, 0)).unwrap(),
1707 ],
1708 work_done_progress_params: Default::default(),
1709 };
1710 let next_location: Location =
1711 serde_json::from_value(server.execute_command(next_params).await.unwrap().unwrap())
1712 .unwrap();
1713 assert_eq!(next_location.range.start.line, 0);
1714
1715 let previous_params = ExecuteCommandParams {
1716 command: commands::COMMAND_PREVIOUS_ANNOTATION.to_string(),
1717 arguments: vec![
1718 serde_json::to_value(uri.to_string()).unwrap(),
1719 serde_json::to_value(Position::new(0, 0)).unwrap(),
1720 ],
1721 work_done_progress_params: Default::default(),
1722 };
1723 let previous_location: Location = serde_json::from_value(
1724 server
1725 .execute_command(previous_params)
1726 .await
1727 .unwrap()
1728 .unwrap(),
1729 )
1730 .unwrap();
1731 assert_eq!(previous_location.range.start.line, 4);
1732
1733 let resolve_params = ExecuteCommandParams {
1734 command: commands::COMMAND_RESOLVE_ANNOTATION.to_string(),
1735 arguments: vec![
1736 serde_json::to_value(uri.to_string()).unwrap(),
1737 serde_json::to_value(Position::new(0, 0)).unwrap(),
1738 ],
1739 work_done_progress_params: Default::default(),
1740 };
1741 let edit_value = server
1742 .execute_command(resolve_params)
1743 .await
1744 .unwrap()
1745 .unwrap();
1746 let workspace_edit: tower_lsp::lsp_types::WorkspaceEdit =
1747 serde_json::from_value(edit_value).unwrap();
1748 let changes = workspace_edit.changes.expect("workspace edit changes");
1749 let edits = changes.get(&uri).expect("edits for document");
1750 assert_eq!(edits[0].new_text, ":: note status=resolved ::");
1751 }
1752
1753 #[tokio::test]
1754 async fn references_use_feature_provider() {
1755 let provider = Arc::new(MockFeatureProvider::default());
1756 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1757 open_sample_document(&server).await;
1758
1759 let result = server
1760 .references(ReferenceParams {
1761 text_document_position: TextDocumentPositionParams {
1762 text_document: TextDocumentIdentifier { uri: sample_uri() },
1763 position: Position::new(0, 0),
1764 },
1765 context: ReferenceContext {
1766 include_declaration: true,
1767 },
1768 work_done_progress_params: Default::default(),
1769 partial_result_params: Default::default(),
1770 })
1771 .await
1772 .unwrap()
1773 .unwrap();
1774
1775 assert_eq!(provider.references_called.load(Ordering::SeqCst), 1);
1776 assert_eq!(result.len(), 1);
1777 assert_eq!(
1778 *provider.last_references_include.lock().unwrap(),
1779 Some(true)
1780 );
1781 }
1782
1783 #[tokio::test]
1784 async fn document_links_use_feature_provider() {
1785 let provider = Arc::new(MockFeatureProvider::default());
1786 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1787 open_sample_document(&server).await;
1788
1789 let links = server
1790 .document_link(DocumentLinkParams {
1791 text_document: TextDocumentIdentifier { uri: sample_uri() },
1792 work_done_progress_params: Default::default(),
1793 partial_result_params: Default::default(),
1794 })
1795 .await
1796 .unwrap()
1797 .unwrap();
1798
1799 assert_eq!(provider.document_links_called.load(Ordering::SeqCst), 1);
1800 assert_eq!(links.len(), 1);
1801 assert_eq!(
1802 links[0].target.as_ref().map(|url| url.as_str()),
1803 Some("https://example.com/")
1804 );
1805 }
1806
1807 #[tokio::test]
1808 async fn formatting_uses_feature_provider() {
1809 let provider = Arc::new(MockFeatureProvider::default());
1810 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1811 open_sample_document(&server).await;
1812
1813 let edits = server
1814 .formatting(DocumentFormattingParams {
1815 text_document: TextDocumentIdentifier { uri: sample_uri() },
1816 options: FormattingOptions::default(),
1817 work_done_progress_params: Default::default(),
1818 })
1819 .await
1820 .unwrap()
1821 .unwrap();
1822
1823 assert_eq!(provider.formatting_called.load(Ordering::SeqCst), 1);
1824 assert_eq!(edits.len(), 1);
1825 assert_eq!(edits[0].new_text, "formatted");
1826 }
1827
1828 #[tokio::test]
1829 async fn range_formatting_uses_feature_provider() {
1830 let provider = Arc::new(MockFeatureProvider::default());
1831 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1832 open_sample_document(&server).await;
1833
1834 let edits = server
1835 .range_formatting(DocumentRangeFormattingParams {
1836 text_document: TextDocumentIdentifier { uri: sample_uri() },
1837 range: Range {
1838 start: Position::new(0, 0),
1839 end: Position::new(0, 0),
1840 },
1841 options: FormattingOptions::default(),
1842 work_done_progress_params: Default::default(),
1843 })
1844 .await
1845 .unwrap()
1846 .unwrap();
1847
1848 assert_eq!(provider.range_formatting_called.load(Ordering::SeqCst), 1);
1849 assert_eq!(edits.len(), 1);
1850 assert_eq!(edits[0].new_text, "range");
1851 }
1852
1853 #[tokio::test]
1854 async fn semantic_tokens_returns_none_when_document_missing() {
1855 let provider = Arc::new(MockFeatureProvider::default());
1856 let server = LexLanguageServer::with_features(NoopClient, provider);
1857
1858 let result = server
1859 .semantic_tokens_full(SemanticTokensParams {
1860 text_document: TextDocumentIdentifier { uri: sample_uri() },
1861 work_done_progress_params: Default::default(),
1862 partial_result_params: Default::default(),
1863 })
1864 .await
1865 .unwrap();
1866
1867 assert!(result.is_none());
1868 }
1869
1870 #[tokio::test]
1871 async fn execute_command_uses_feature_provider() {
1872 let provider = Arc::new(MockFeatureProvider::default());
1873 let server = LexLanguageServer::with_features(NoopClient, provider.clone());
1874
1875 let result = server
1876 .execute_command(ExecuteCommandParams {
1877 command: "test.command".into(),
1878 arguments: vec![],
1879 work_done_progress_params: Default::default(),
1880 })
1881 .await
1882 .unwrap()
1883 .unwrap();
1884
1885 assert_eq!(provider.execute_command_called.load(Ordering::SeqCst), 1);
1886 assert_eq!(result, Value::String("executed".into()));
1887 }
1888
1889 #[tokio::test]
1890 async fn hover_returns_none_without_document_entry() {
1891 let provider = Arc::new(MockFeatureProvider::default());
1892 let server = LexLanguageServer::with_features(NoopClient, provider);
1893
1894 let hover = server
1895 .hover(HoverParams {
1896 text_document_position_params: TextDocumentPositionParams {
1897 text_document: TextDocumentIdentifier { uri: sample_uri() },
1898 position: Position::new(0, 0),
1899 },
1900 work_done_progress_params: Default::default(),
1901 })
1902 .await
1903 .unwrap();
1904
1905 assert!(hover.is_none());
1906 }
1907
1908 #[test]
1909 fn apply_formatting_overrides_noop_without_lex_properties() {
1910 let options = FormattingOptions {
1911 tab_size: 4,
1912 insert_spaces: true,
1913 properties: Default::default(),
1914 trim_trailing_whitespace: None,
1915 insert_final_newline: None,
1916 trim_final_newlines: None,
1917 };
1918 let mut rules = FormattingRules::default();
1919 let original = rules.clone();
1920 apply_formatting_overrides(&mut rules, &options);
1921 assert_eq!(rules.indent_string, original.indent_string);
1922 assert_eq!(rules.max_blank_lines, original.max_blank_lines);
1923 }
1924
1925 #[test]
1926 fn apply_formatting_overrides_applies_lex_properties() {
1927 use std::collections::HashMap;
1928
1929 let mut properties = HashMap::new();
1930 properties.insert(
1931 "lex.indent_string".to_string(),
1932 FormattingProperty::String(" ".to_string()),
1933 );
1934 properties.insert(
1935 "lex.max_blank_lines".to_string(),
1936 FormattingProperty::Number(3),
1937 );
1938 properties.insert(
1939 "lex.normalize_seq_markers".to_string(),
1940 FormattingProperty::Bool(false),
1941 );
1942 properties.insert(
1943 "lex.unordered_seq_marker".to_string(),
1944 FormattingProperty::String("*".to_string()),
1945 );
1946
1947 let options = FormattingOptions {
1948 tab_size: 4,
1949 insert_spaces: true,
1950 properties,
1951 trim_trailing_whitespace: None,
1952 insert_final_newline: None,
1953 trim_final_newlines: None,
1954 };
1955
1956 let mut rules = FormattingRules::default();
1957 apply_formatting_overrides(&mut rules, &options);
1958 assert_eq!(rules.indent_string, " ");
1959 assert_eq!(rules.max_blank_lines, 3);
1960 assert!(!rules.normalize_seq_markers);
1961 assert_eq!(rules.unordered_seq_marker, '*');
1962 }
1963}