1use std::borrow::Cow;
2
3use serde::ser::SerializeSeq;
4use serde::{Deserialize, Serialize};
5
6use crate::{
7 PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
8 TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
9};
10#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
16pub struct SemanticTokenType(Cow<'static, str>);
17
18impl SemanticTokenType {
19 pub const NAMESPACE: Self = Self::new("namespace");
20 pub const TYPE: Self = Self::new("type");
21 pub const CLASS: Self = Self::new("class");
22 pub const ENUM: Self = Self::new("enum");
23 pub const INTERFACE: Self = Self::new("interface");
24 pub const STRUCT: Self = Self::new("struct");
25 pub const TYPE_PARAMETER: Self = Self::new("typeParameter");
26 pub const PARAMETER: Self = Self::new("parameter");
27 pub const VARIABLE: Self = Self::new("variable");
28 pub const PROPERTY: Self = Self::new("property");
29 pub const ENUM_MEMBER: Self = Self::new("enumMember");
30 pub const EVENT: Self = Self::new("event");
31 pub const FUNCTION: Self = Self::new("function");
32 pub const METHOD: Self = Self::new("method");
33 pub const MACRO: Self = Self::new("macro");
34 pub const KEYWORD: Self = Self::new("keyword");
35 pub const MODIFIER: Self = Self::new("modifier");
36 pub const COMMENT: Self = Self::new("comment");
37 pub const STRING: Self = Self::new("string");
38 pub const NUMBER: Self = Self::new("number");
39 pub const REGEXP: Self = Self::new("regexp");
40 pub const OPERATOR: Self = Self::new("operator");
41
42 pub const DECORATOR: Self = Self::new("decorator");
44
45 #[must_use]
46 pub const fn new(tag: &'static str) -> Self {
47 Self(Cow::Borrowed(tag))
48 }
49
50 #[must_use]
51 pub fn as_str(&self) -> &str {
52 &self.0
53 }
54}
55
56impl From<String> for SemanticTokenType {
57 fn from(from: String) -> Self {
58 Self(Cow::from(from))
59 }
60}
61
62impl From<&'static str> for SemanticTokenType {
63 fn from(from: &'static str) -> Self {
64 Self::new(from)
65 }
66}
67
68#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
74pub struct SemanticTokenModifier(Cow<'static, str>);
75
76impl SemanticTokenModifier {
77 pub const DECLARATION: Self = Self::new("declaration");
78 pub const DEFINITION: Self = Self::new("definition");
79 pub const READONLY: Self = Self::new("readonly");
80 pub const STATIC: Self = Self::new("static");
81 pub const DEPRECATED: Self = Self::new("deprecated");
82 pub const ABSTRACT: Self = Self::new("abstract");
83 pub const ASYNC: Self = Self::new("async");
84 pub const MODIFICATION: Self = Self::new("modification");
85 pub const DOCUMENTATION: Self = Self::new("documentation");
86 pub const DEFAULT_LIBRARY: Self = Self::new("defaultLibrary");
87
88 #[must_use]
89 pub const fn new(tag: &'static str) -> Self {
90 Self(Cow::Borrowed(tag))
91 }
92
93 #[must_use]
94 pub fn as_str(&self) -> &str {
95 &self.0
96 }
97}
98
99impl From<String> for SemanticTokenModifier {
100 fn from(from: String) -> Self {
101 Self(Cow::from(from))
102 }
103}
104
105impl From<&'static str> for SemanticTokenModifier {
106 fn from(from: &'static str) -> Self {
107 Self::new(from)
108 }
109}
110
111#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
112pub struct TokenFormat(Cow<'static, str>);
113
114impl TokenFormat {
115 pub const RELATIVE: Self = Self::new("relative");
116
117 #[must_use]
118 pub const fn new(tag: &'static str) -> Self {
119 Self(Cow::Borrowed(tag))
120 }
121
122 #[must_use]
123 pub fn as_str(&self) -> &str {
124 &self.0
125 }
126}
127
128impl From<String> for TokenFormat {
129 fn from(from: String) -> Self {
130 Self(Cow::from(from))
131 }
132}
133
134impl From<&'static str> for TokenFormat {
135 fn from(from: &'static str) -> Self {
136 Self::new(from)
137 }
138}
139
140#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
142#[serde(rename_all = "camelCase")]
143pub struct SemanticTokensLegend {
144 pub token_types: Vec<SemanticTokenType>,
146
147 pub token_modifiers: Vec<SemanticTokenModifier>,
149}
150
151#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
153pub struct SemanticToken {
154 pub delta_line: u32,
155 pub delta_start: u32,
156 pub length: u32,
157 pub token_type: u32,
158 pub token_modifiers_bitset: u32,
159}
160
161impl SemanticToken {
162 fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<Self>, D::Error>
163 where
164 D: serde::Deserializer<'de>,
165 {
166 let data = Vec::<u32>::deserialize(deserializer)?;
167 let chunks = data.chunks_exact(5);
168
169 if !chunks.remainder().is_empty() {
170 return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
171 }
172
173 Result::Ok(
174 chunks
175 .map(|chunk| Self {
176 delta_line: chunk[0],
177 delta_start: chunk[1],
178 length: chunk[2],
179 token_type: chunk[3],
180 token_modifiers_bitset: chunk[4],
181 })
182 .collect(),
183 )
184 }
185
186 fn serialize_tokens<S>(tokens: &[Self], serializer: S) -> Result<S::Ok, S::Error>
187 where
188 S: serde::Serializer,
189 {
190 let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
191 for token in tokens {
192 seq.serialize_element(&token.delta_line)?;
193 seq.serialize_element(&token.delta_start)?;
194 seq.serialize_element(&token.length)?;
195 seq.serialize_element(&token.token_type)?;
196 seq.serialize_element(&token.token_modifiers_bitset)?;
197 }
198 seq.end()
199 }
200
201 fn deserialize_tokens_opt<'de, D>(deserializer: D) -> Result<Option<Vec<Self>>, D::Error>
202 where
203 D: serde::Deserializer<'de>,
204 {
205 #[derive(Deserialize)]
206 #[serde(transparent)]
207 struct Wrapper {
208 #[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
209 tokens: Vec<SemanticToken>,
210 }
211
212 Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
213 }
214
215 fn serialize_tokens_opt<S>(data: &Option<Vec<Self>>, serializer: S) -> Result<S::Ok, S::Error>
216 where
217 S: serde::Serializer,
218 {
219 #[derive(Serialize)]
220 #[serde(transparent)]
221 struct Wrapper {
222 #[serde(serialize_with = "SemanticToken::serialize_tokens")]
223 tokens: Vec<SemanticToken>,
224 }
225
226 let opt = data.as_ref().map(|t| Wrapper { tokens: t.clone() });
227
228 opt.serialize(serializer)
229 }
230}
231
232#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
234#[serde(rename_all = "camelCase")]
235pub struct SemanticTokens {
236 #[serde(skip_serializing_if = "Option::is_none")]
241 pub result_id: Option<String>,
242
243 #[serde(
247 deserialize_with = "SemanticToken::deserialize_tokens",
248 serialize_with = "SemanticToken::serialize_tokens"
249 )]
250 pub data: Vec<SemanticToken>,
251}
252
253#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
255#[serde(rename_all = "camelCase")]
256pub struct SemanticTokensPartialResult {
257 #[serde(
258 deserialize_with = "SemanticToken::deserialize_tokens",
259 serialize_with = "SemanticToken::serialize_tokens"
260 )]
261 pub data: Vec<SemanticToken>,
262}
263
264#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
265#[serde(rename_all = "camelCase")]
266#[serde(untagged)]
267pub enum SemanticTokensResult {
268 Tokens(SemanticTokens),
269 Partial(SemanticTokensPartialResult),
270}
271
272impl From<SemanticTokens> for SemanticTokensResult {
273 fn from(from: SemanticTokens) -> Self {
274 Self::Tokens(from)
275 }
276}
277
278impl From<SemanticTokensPartialResult> for SemanticTokensResult {
279 fn from(from: SemanticTokensPartialResult) -> Self {
280 Self::Partial(from)
281 }
282}
283
284#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
286#[serde(rename_all = "camelCase")]
287pub struct SemanticTokensEdit {
288 pub start: u32,
289 pub delete_count: u32,
290
291 #[serde(
292 default,
293 skip_serializing_if = "Option::is_none",
294 deserialize_with = "SemanticToken::deserialize_tokens_opt",
295 serialize_with = "SemanticToken::serialize_tokens_opt"
296 )]
297 pub data: Option<Vec<SemanticToken>>,
298}
299
300#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
301#[serde(rename_all = "camelCase")]
302#[serde(untagged)]
303pub enum SemanticTokensFullDeltaResult {
304 Tokens(SemanticTokens),
305 TokensDelta(SemanticTokensDelta),
306 PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
307}
308
309impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
310 fn from(from: SemanticTokens) -> Self {
311 Self::Tokens(from)
312 }
313}
314
315impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
316 fn from(from: SemanticTokensDelta) -> Self {
317 Self::TokensDelta(from)
318 }
319}
320
321#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
323#[serde(rename_all = "camelCase")]
324pub struct SemanticTokensDelta {
325 #[serde(skip_serializing_if = "Option::is_none")]
326 pub result_id: Option<String>,
327 pub edits: Vec<SemanticTokensEdit>,
330}
331
332#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
336#[serde(rename_all = "camelCase")]
337pub struct SemanticTokensClientCapabilities {
338 #[serde(skip_serializing_if = "Option::is_none")]
342 pub dynamic_registration: Option<bool>,
343
344 pub requests: SemanticTokensClientCapabilitiesRequests,
353
354 pub token_types: Vec<SemanticTokenType>,
356
357 pub token_modifiers: Vec<SemanticTokenModifier>,
359
360 pub formats: Vec<TokenFormat>,
362
363 #[serde(skip_serializing_if = "Option::is_none")]
365 pub overlapping_token_support: Option<bool>,
366
367 #[serde(skip_serializing_if = "Option::is_none")]
369 pub multiline_token_support: Option<bool>,
370
371 #[serde(skip_serializing_if = "Option::is_none")]
378 pub server_cancel_support: Option<bool>,
379
380 #[serde(skip_serializing_if = "Option::is_none")]
391 pub augments_syntax_tokens: Option<bool>,
392}
393
394#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
395#[serde(rename_all = "camelCase")]
396pub struct SemanticTokensClientCapabilitiesRequests {
397 #[serde(skip_serializing_if = "Option::is_none")]
399 pub range: Option<bool>,
400
401 #[serde(skip_serializing_if = "Option::is_none")]
403 pub full: Option<SemanticTokensFullOptions>,
404}
405
406#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
407#[serde(rename_all = "camelCase")]
408#[serde(untagged)]
409pub enum SemanticTokensFullOptions {
410 Bool(bool),
411 Delta {
412 #[serde(skip_serializing_if = "Option::is_none")]
415 delta: Option<bool>,
416 },
417}
418
419#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
421#[serde(rename_all = "camelCase")]
422pub struct SemanticTokensOptions {
423 #[serde(flatten)]
424 pub work_done_progress_options: WorkDoneProgressOptions,
425
426 pub legend: SemanticTokensLegend,
428
429 #[serde(skip_serializing_if = "Option::is_none")]
432 pub range: Option<bool>,
433
434 #[serde(skip_serializing_if = "Option::is_none")]
436 pub full: Option<SemanticTokensFullOptions>,
437}
438
439#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
440#[serde(rename_all = "camelCase")]
441pub struct SemanticTokensRegistrationOptions {
442 #[serde(flatten)]
443 pub text_document_registration_options: TextDocumentRegistrationOptions,
444
445 #[serde(flatten)]
446 pub semantic_tokens_options: SemanticTokensOptions,
447
448 #[serde(flatten)]
449 pub static_registration_options: StaticRegistrationOptions,
450}
451
452#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
453#[serde(rename_all = "camelCase")]
454#[serde(untagged)]
455pub enum SemanticTokensServerCapabilities {
456 SemanticTokensOptions(SemanticTokensOptions),
457 SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
458}
459
460impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
461 fn from(from: SemanticTokensOptions) -> Self {
462 Self::SemanticTokensOptions(from)
463 }
464}
465
466impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
467 fn from(from: SemanticTokensRegistrationOptions) -> Self {
468 Self::SemanticTokensRegistrationOptions(from)
469 }
470}
471
472#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
473#[serde(rename_all = "camelCase")]
474pub struct SemanticTokensWorkspaceClientCapabilities {
475 pub refresh_support: Option<bool>,
483}
484
485#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
486#[serde(rename_all = "camelCase")]
487pub struct SemanticTokensParams {
488 #[serde(flatten)]
489 pub work_done_progress_params: WorkDoneProgressParams,
490
491 #[serde(flatten)]
492 pub partial_result_params: PartialResultParams,
493
494 pub text_document: TextDocumentIdentifier,
496}
497
498#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
499#[serde(rename_all = "camelCase")]
500pub struct SemanticTokensDeltaParams {
501 #[serde(flatten)]
502 pub work_done_progress_params: WorkDoneProgressParams,
503
504 #[serde(flatten)]
505 pub partial_result_params: PartialResultParams,
506
507 pub text_document: TextDocumentIdentifier,
509
510 pub previous_result_id: String,
513}
514
515#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
516#[serde(rename_all = "camelCase")]
517pub struct SemanticTokensRangeParams {
518 #[serde(flatten)]
519 pub work_done_progress_params: WorkDoneProgressParams,
520
521 #[serde(flatten)]
522 pub partial_result_params: PartialResultParams,
523
524 pub text_document: TextDocumentIdentifier,
526
527 pub range: Range,
529}
530
531#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
532#[serde(rename_all = "camelCase")]
533#[serde(untagged)]
534pub enum SemanticTokensRangeResult {
535 Tokens(SemanticTokens),
536 Partial(SemanticTokensPartialResult),
537}
538
539impl From<SemanticTokens> for SemanticTokensRangeResult {
540 fn from(tokens: SemanticTokens) -> Self {
541 Self::Tokens(tokens)
542 }
543}
544
545impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
546 fn from(partial: SemanticTokensPartialResult) -> Self {
547 Self::Partial(partial)
548 }
549}
550
551#[cfg(test)]
552mod tests {
553 use super::*;
554 use crate::tests::{test_deserialization, test_serialization};
555
556 #[test]
557 fn test_semantic_tokens_support_serialization() {
558 test_serialization(
559 &SemanticTokens {
560 result_id: None,
561 data: vec![],
562 },
563 r#"{"data":[]}"#,
564 );
565
566 test_serialization(
567 &SemanticTokens {
568 result_id: None,
569 data: vec![SemanticToken {
570 delta_line: 2,
571 delta_start: 5,
572 length: 3,
573 token_type: 0,
574 token_modifiers_bitset: 3,
575 }],
576 },
577 r#"{"data":[2,5,3,0,3]}"#,
578 );
579
580 test_serialization(
581 &SemanticTokens {
582 result_id: None,
583 data: vec![
584 SemanticToken {
585 delta_line: 2,
586 delta_start: 5,
587 length: 3,
588 token_type: 0,
589 token_modifiers_bitset: 3,
590 },
591 SemanticToken {
592 delta_line: 0,
593 delta_start: 5,
594 length: 4,
595 token_type: 1,
596 token_modifiers_bitset: 0,
597 },
598 ],
599 },
600 r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
601 );
602 }
603
604 #[test]
605 fn test_semantic_tokens_support_deserialization() {
606 test_deserialization(
607 r#"{"data":[]}"#,
608 &SemanticTokens {
609 result_id: None,
610 data: vec![],
611 },
612 );
613
614 test_deserialization(
615 r#"{"data":[2,5,3,0,3]}"#,
616 &SemanticTokens {
617 result_id: None,
618 data: vec![SemanticToken {
619 delta_line: 2,
620 delta_start: 5,
621 length: 3,
622 token_type: 0,
623 token_modifiers_bitset: 3,
624 }],
625 },
626 );
627
628 test_deserialization(
629 r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
630 &SemanticTokens {
631 result_id: None,
632 data: vec![
633 SemanticToken {
634 delta_line: 2,
635 delta_start: 5,
636 length: 3,
637 token_type: 0,
638 token_modifiers_bitset: 3,
639 },
640 SemanticToken {
641 delta_line: 0,
642 delta_start: 5,
643 length: 4,
644 token_type: 1,
645 token_modifiers_bitset: 0,
646 },
647 ],
648 },
649 );
650 }
651
652 #[test]
653 #[should_panic = "Length is not divisible by 5"]
654 fn test_semantic_tokens_support_deserialization_err() {
655 test_deserialization(r#"{"data":[1]}"#, &SemanticTokens::default());
656 }
657
658 #[test]
659 fn test_semantic_tokens_edit_support_deserialization() {
660 test_deserialization(
661 r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
662 &SemanticTokensEdit {
663 start: 0,
664 delete_count: 1,
665 data: Some(vec![
666 SemanticToken {
667 delta_line: 2,
668 delta_start: 5,
669 length: 3,
670 token_type: 0,
671 token_modifiers_bitset: 3,
672 },
673 SemanticToken {
674 delta_line: 0,
675 delta_start: 5,
676 length: 4,
677 token_type: 1,
678 token_modifiers_bitset: 0,
679 },
680 ]),
681 },
682 );
683
684 test_deserialization(
685 r#"{"start":0,"deleteCount":1}"#,
686 &SemanticTokensEdit {
687 start: 0,
688 delete_count: 1,
689 data: None,
690 },
691 );
692 }
693
694 #[test]
695 fn test_semantic_tokens_edit_support_serialization() {
696 test_serialization(
697 &SemanticTokensEdit {
698 start: 0,
699 delete_count: 1,
700 data: Some(vec![
701 SemanticToken {
702 delta_line: 2,
703 delta_start: 5,
704 length: 3,
705 token_type: 0,
706 token_modifiers_bitset: 3,
707 },
708 SemanticToken {
709 delta_line: 0,
710 delta_start: 5,
711 length: 4,
712 token_type: 1,
713 token_modifiers_bitset: 0,
714 },
715 ]),
716 },
717 r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
718 );
719
720 test_serialization(
721 &SemanticTokensEdit {
722 start: 0,
723 delete_count: 1,
724 data: None,
725 },
726 r#"{"start":0,"deleteCount":1}"#,
727 );
728 }
729}