1use eure::query::{
4 DiagnosticMessage, DiagnosticSeverity, GetFileDiagnostics, GetSemanticTokens, SemanticToken,
5 TextFile,
6};
7use lsp_types::{
8 Diagnostic, DiagnosticSeverity as LspSeverity, Position, Range,
9 SemanticToken as LspSemanticToken, SemanticTokens,
10};
11use query_flow::{Db, QueryError, query};
12
13#[query]
17pub fn lsp_semantic_tokens(
18 db: &impl Db,
19 file: TextFile,
20 source: String,
21) -> Result<SemanticTokens, QueryError> {
22 let tokens = db.query(GetSemanticTokens::new(file.clone()))?;
23 Ok(convert_tokens(&tokens, &source))
24}
25
26#[query]
32pub fn lsp_diagnostics(
33 db: &impl Db,
34 file: TextFile,
35) -> Result<Vec<(TextFile, Vec<Diagnostic>)>, QueryError> {
36 let diagnostics = db.query(GetFileDiagnostics::new(file.clone()))?;
37
38 let mut by_file: std::collections::HashMap<TextFile, Vec<DiagnosticMessage>> =
40 std::collections::HashMap::new();
41
42 by_file.insert(file, vec![]);
44
45 for d in diagnostics.iter() {
46 by_file.entry(d.file.clone()).or_default().push(d.clone());
47 }
48
49 let mut result = Vec::new();
51 for (diag_file, file_diagnostics) in by_file {
52 let source: std::sync::Arc<eure::query::TextFileContent> = db.asset(diag_file.clone())?;
53 let line_offsets = compute_line_offsets(source.get());
54 let lsp_diagnostics: Vec<Diagnostic> = file_diagnostics
55 .iter()
56 .map(|d| convert_diagnostic(d, source.get(), &line_offsets))
57 .collect();
58 result.push((diag_file, lsp_diagnostics));
59 }
60
61 Ok(result)
62}
63
64#[query]
69pub fn lsp_file_diagnostics(db: &impl Db, file: TextFile) -> Result<Vec<Diagnostic>, QueryError> {
70 let diagnostics = db.query(GetFileDiagnostics::new(file.clone()))?;
71
72 let source: std::sync::Arc<eure::query::TextFileContent> = db.asset(file.clone())?;
74 let line_offsets = compute_line_offsets(source.get());
75
76 let lsp_diagnostics: Vec<Diagnostic> = diagnostics
78 .iter()
79 .filter(|d| d.file == file) .map(|d| convert_diagnostic(d, source.get(), &line_offsets))
81 .collect();
82
83 Ok(lsp_diagnostics)
84}
85
86fn convert_tokens(tokens: &[SemanticToken], source: &str) -> SemanticTokens {
94 let line_offsets = compute_line_offsets(source);
95
96 let mut data = Vec::new();
97 let mut prev_line = 0u32;
98 let mut prev_start = 0u32;
99
100 for token in tokens {
101 let start = token.start as usize;
102 let end = start + token.length as usize;
103 let (line, char) = offset_to_position(start, source, &line_offsets);
104 let length = byte_len_to_utf16_len(source, start, end);
105
106 let delta_line = line - prev_line;
107 let delta_start = if delta_line == 0 {
108 char - prev_start
109 } else {
110 char
111 };
112
113 data.push(LspSemanticToken {
114 delta_line,
115 delta_start,
116 length,
117 token_type: token.token_type as u32,
118 token_modifiers_bitset: token.modifiers,
119 });
120
121 prev_line = line;
122 prev_start = char;
123 }
124
125 SemanticTokens {
126 result_id: None,
127 data,
128 }
129}
130
131fn convert_diagnostic(msg: &DiagnosticMessage, source: &str, line_offsets: &[usize]) -> Diagnostic {
133 let start = offset_to_lsp_position(msg.start, source, line_offsets);
134 let end = offset_to_lsp_position(msg.end, source, line_offsets);
135
136 Diagnostic {
137 range: Range { start, end },
138 severity: Some(convert_severity(msg.severity)),
139 code: None,
140 code_description: None,
141 source: Some("eure".to_string()),
142 message: msg.message.clone(),
143 related_information: None,
144 tags: None,
145 data: None,
146 }
147}
148
149fn convert_severity(severity: DiagnosticSeverity) -> LspSeverity {
151 match severity {
152 DiagnosticSeverity::Error => LspSeverity::ERROR,
153 DiagnosticSeverity::Warning => LspSeverity::WARNING,
154 DiagnosticSeverity::Info => LspSeverity::INFORMATION,
155 DiagnosticSeverity::Hint => LspSeverity::HINT,
156 }
157}
158
159fn compute_line_offsets(source: &str) -> Vec<usize> {
163 let mut offsets = vec![0];
164 for (i, c) in source.char_indices() {
165 if c == '\n' {
166 offsets.push(i + 1);
167 }
168 }
169 offsets
170}
171
172fn offset_to_position(offset: usize, source: &str, line_offsets: &[usize]) -> (u32, u32) {
176 let line = line_offsets.iter().rposition(|&o| o <= offset).unwrap_or(0);
177 let line_start = line_offsets[line];
178 let end = offset.min(source.len());
180 let line_content = &source[line_start..end];
181 let utf16_offset: usize = line_content.chars().map(|c| c.len_utf16()).sum();
182 (line as u32, utf16_offset as u32)
183}
184
185fn offset_to_lsp_position(offset: usize, source: &str, line_offsets: &[usize]) -> Position {
187 let (line, character) = offset_to_position(offset, source, line_offsets);
188 Position { line, character }
189}
190
191fn byte_len_to_utf16_len(source: &str, start: usize, end: usize) -> u32 {
193 let end = end.min(source.len());
194 let start = start.min(end);
195 source[start..end]
196 .chars()
197 .map(|c| c.len_utf16())
198 .sum::<usize>() as u32
199}
200
201#[cfg(test)]
202mod tests {
203 use super::*;
204
205 #[test]
206 fn test_compute_line_offsets() {
207 let source = "hello\nworld\n";
208 let offsets = compute_line_offsets(source);
209 assert_eq!(offsets, vec![0, 6, 12]);
210 }
211
212 #[test]
213 fn test_offset_to_position_ascii() {
214 let source = "hello\nworld\n";
215 let offsets = compute_line_offsets(source);
216 assert_eq!(offset_to_position(0, source, &offsets), (0, 0));
217 assert_eq!(offset_to_position(5, source, &offsets), (0, 5));
218 assert_eq!(offset_to_position(6, source, &offsets), (1, 0));
219 assert_eq!(offset_to_position(11, source, &offsets), (1, 5));
220 }
221
222 #[test]
223 fn test_offset_to_position_utf8() {
224 let source = "日本語\ntest";
226 let offsets = compute_line_offsets(source);
227 assert_eq!(offset_to_position(0, source, &offsets), (0, 0));
229 assert_eq!(offset_to_position(3, source, &offsets), (0, 1));
231 assert_eq!(offset_to_position(6, source, &offsets), (0, 2));
233 assert_eq!(offset_to_position(9, source, &offsets), (0, 3));
235 assert_eq!(offset_to_position(10, source, &offsets), (1, 0));
237 }
238
239 #[test]
240 fn test_offset_to_position_emoji() {
241 let source = "😀a";
243 let offsets = compute_line_offsets(source);
244 assert_eq!(offset_to_position(0, source, &offsets), (0, 0));
246 assert_eq!(offset_to_position(4, source, &offsets), (0, 2));
248 assert_eq!(offset_to_position(5, source, &offsets), (0, 3));
250 }
251
252 #[test]
253 fn test_byte_len_to_utf16_len() {
254 assert_eq!(byte_len_to_utf16_len("hello", 0, 5), 5);
256 assert_eq!(byte_len_to_utf16_len("日本語", 0, 9), 3);
258 assert_eq!(byte_len_to_utf16_len("😀", 0, 4), 2);
260 }
261}