dbml_language_server/
server.rs

1// src/server.rs
2use dashmap::DashMap;
3use tower_lsp::jsonrpc::Result;
4use tower_lsp::lsp_types::*;
5use tower_lsp::{Client, LanguageServer};
6
7use crate::state::DocumentState;
8
9pub struct Backend {
10    client: Client,
11    document_map: DashMap<Url, DocumentState>,
12}
13
14impl Backend {
15    pub fn new(client: Client) -> Self {
16        Self {
17            client,
18            document_map: DashMap::new(),
19        }
20    }
21
22    async fn on_change(&self, params: TextDocumentItem) {
23        let uri = params.uri;
24        let content = params.text;
25        let version = params.version;
26
27        let mut state = DocumentState::new(uri.clone(), content, version);
28        state.analyze();
29
30        let diagnostics = state.diagnostics.clone();
31        self.document_map.insert(uri.clone(), state);
32
33        self.client
34            .publish_diagnostics(uri, diagnostics, None)
35            .await;
36    }
37}
38
39#[tower_lsp::async_trait]
40impl LanguageServer for Backend {
41    async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {
42        Ok(InitializeResult {
43            server_info: Some(ServerInfo {
44                name: "dbml-lsp".to_string(),
45                version: Some("0.1.0".to_string()),
46            }),
47            capabilities: ServerCapabilities {
48                text_document_sync: Some(TextDocumentSyncCapability::Kind(
49                    TextDocumentSyncKind::FULL,
50                )),
51                definition_provider: Some(OneOf::Left(true)),
52                rename_provider: Some(OneOf::Left(true)),
53                semantic_tokens_provider: Some(
54                    SemanticTokensServerCapabilities::SemanticTokensOptions(
55                        SemanticTokensOptions {
56                            work_done_progress_options: WorkDoneProgressOptions::default(),
57                            legend: SemanticTokensLegend {
58                                token_types: vec![
59                                    SemanticTokenType::KEYWORD,
60                                    SemanticTokenType::CLASS,
61                                    SemanticTokenType::PROPERTY,
62                                    SemanticTokenType::ENUM,
63                                    SemanticTokenType::ENUM_MEMBER,
64                                    SemanticTokenType::TYPE,
65                                    SemanticTokenType::STRING,
66                                    SemanticTokenType::COMMENT,
67                                    SemanticTokenType::OPERATOR,
68                                ],
69                                token_modifiers: vec![],
70                            },
71                            range: Some(false),
72                            full: Some(SemanticTokensFullOptions::Bool(true)),
73                        },
74                    ),
75                ),
76                ..ServerCapabilities::default()
77            },
78        })
79    }
80
81    async fn initialized(&self, _: InitializedParams) {
82        self.client
83            .log_message(MessageType::INFO, "DBML language server initialized")
84            .await;
85    }
86
87    async fn shutdown(&self) -> Result<()> {
88        Ok(())
89    }
90
91    async fn did_open(&self, params: DidOpenTextDocumentParams) {
92        self.on_change(TextDocumentItem {
93            uri: params.text_document.uri,
94            text: params.text_document.text,
95            version: params.text_document.version,
96            language_id: params.text_document.language_id,
97        })
98        .await;
99    }
100
101    async fn did_change(&self, params: DidChangeTextDocumentParams) {
102        let uri = params.text_document.uri;
103        let version = params.text_document.version;
104
105        if let Some(change) = params.content_changes.first() {
106            let mut state = self
107                .document_map
108                .get_mut(&uri)
109                .expect("Document should exist");
110
111            state.update_content(change.text.clone(), version);
112            state.analyze();
113
114            let diagnostics = state.diagnostics.clone();
115            drop(state);
116
117            self.client
118                .publish_diagnostics(uri, diagnostics, None)
119                .await;
120        }
121    }
122
123    async fn did_close(&self, params: DidCloseTextDocumentParams) {
124        self.document_map.remove(&params.text_document.uri);
125    }
126
127    async fn goto_definition(
128        &self,
129        params: GotoDefinitionParams,
130    ) -> Result<Option<GotoDefinitionResponse>> {
131        let uri = params.text_document_position_params.text_document.uri;
132        let position = params.text_document_position_params.position;
133
134        if let Some(state) = self.document_map.get(&uri) {
135            if let Some(semantic_model) = &state.semantic_model {
136                let offset = position_to_offset(&position, &state.content);
137
138                if let Some(symbol) = semantic_model.find_symbol_at_position(offset) {
139                    let range = span_to_range(&symbol.span, &state.content);
140                    return Ok(Some(GotoDefinitionResponse::Scalar(Location {
141                        uri: uri.clone(),
142                        range,
143                    })));
144                }
145            }
146        }
147
148        Ok(None)
149    }
150
151    async fn rename(&self, params: RenameParams) -> Result<Option<WorkspaceEdit>> {
152        let uri = params.text_document_position.text_document.uri;
153        let position = params.text_document_position.position;
154        let new_name = params.new_name;
155
156        if let Some(state) = self.document_map.get(&uri) {
157            if let Some(semantic_model) = &state.semantic_model {
158                let offset = position_to_offset(&position, &state.content);
159
160                if let Some(symbol) = semantic_model.find_symbol_at_position(offset) {
161                    let range = span_to_range(&symbol.span, &state.content);
162
163                    let mut changes = std::collections::HashMap::new();
164                    changes.insert(
165                        uri.clone(),
166                        vec![TextEdit {
167                            range,
168                            new_text: new_name,
169                        }],
170                    );
171
172                    return Ok(Some(WorkspaceEdit {
173                        changes: Some(changes),
174                        document_changes: None,
175                        change_annotations: None,
176                    }));
177                }
178            }
179        }
180
181        Ok(None)
182    }
183
184    async fn semantic_tokens_full(
185        &self,
186        params: SemanticTokensParams,
187    ) -> Result<Option<SemanticTokensResult>> {
188        let uri = params.text_document.uri;
189
190        if let Some(state) = self.document_map.get(&uri) {
191            if let Some(ast) = &state.ast {
192                let tokens = generate_semantic_tokens(ast, &state.content);
193                return Ok(Some(SemanticTokensResult::Tokens(SemanticTokens {
194                    result_id: None,
195                    data: tokens,
196                })));
197            }
198        }
199
200        Ok(None)
201    }
202}
203
204fn position_to_offset(position: &Position, content: &str) -> usize {
205    let mut offset = 0;
206    let mut current_line = 0;
207    let mut current_col = 0;
208
209    for ch in content.chars() {
210        if current_line == position.line && current_col == position.character {
211            return offset;
212        }
213
214        if ch == '\n' {
215            current_line += 1;
216            current_col = 0;
217        } else {
218            current_col += 1;
219        }
220        offset += ch.len_utf8();
221    }
222
223    offset
224}
225
226fn span_to_range(span: &std::ops::Range<usize>, content: &str) -> Range {
227    let start_pos = offset_to_position(span.start, content);
228    let end_pos = offset_to_position(span.end, content);
229    Range::new(start_pos, end_pos)
230}
231
232fn offset_to_position(offset: usize, content: &str) -> Position {
233    let mut line = 0;
234    let mut col = 0;
235
236    for (i, ch) in content.chars().enumerate() {
237        if i >= offset {
238            break;
239        }
240        if ch == '\n' {
241            line += 1;
242            col = 0;
243        } else {
244            col += 1;
245        }
246    }
247
248    Position::new(line, col)
249}
250
251fn generate_semantic_tokens(ast: &crate::ast::Document, content: &str) -> Vec<SemanticToken> {
252    use crate::ast::*;
253
254    let mut tokens = Vec::new();
255    let mut prev_line = 0;
256    let mut prev_start = 0;
257
258    for item in &ast.items {
259        match item {
260            DocumentItem::Table(table) => {
261                add_token(
262                    &mut tokens,
263                    &table.name.span,
264                    1,
265                    content,
266                    &mut prev_line,
267                    &mut prev_start,
268                );
269
270                for item in &table.items {
271                    if let TableItem::Column(col) = item {
272                        add_token(
273                            &mut tokens,
274                            &col.name.span,
275                            2,
276                            content,
277                            &mut prev_line,
278                            &mut prev_start,
279                        );
280                    }
281                }
282            }
283            DocumentItem::Enum(enum_def) => {
284                add_token(
285                    &mut tokens,
286                    &enum_def.name.span,
287                    3,
288                    content,
289                    &mut prev_line,
290                    &mut prev_start,
291                );
292
293                for member in &enum_def.members {
294                    add_token(
295                        &mut tokens,
296                        &member.name.span,
297                        4,
298                        content,
299                        &mut prev_line,
300                        &mut prev_start,
301                    );
302                }
303            }
304            _ => {}
305        }
306    }
307
308    tokens
309}
310
311fn add_token(
312    tokens: &mut Vec<SemanticToken>,
313    span: &std::ops::Range<usize>,
314    token_type: u32,
315    content: &str,
316    prev_line: &mut u32,
317    prev_start: &mut u32,
318) {
319    let start_pos = offset_to_position(span.start, content);
320    let length = span.end - span.start;
321
322    let delta_line = start_pos.line - *prev_line;
323    let delta_start = if delta_line == 0 {
324        start_pos.character - *prev_start
325    } else {
326        start_pos.character
327    };
328
329    tokens.push(SemanticToken {
330        delta_line,
331        delta_start,
332        length: length as u32,
333        token_type,
334        token_modifiers_bitset: 0,
335    });
336
337    *prev_line = start_pos.line;
338    *prev_start = start_pos.character;
339}