1use std::borrow::Cow;
2
3use serde::ser::SerializeSeq;
4use serde::{Deserialize, Serialize};
5
6use crate::{
7 PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
8 TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
9};
10#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
16pub struct SemanticTokenType(Cow<'static, str>);
17
18impl SemanticTokenType {
19 pub const NAMESPACE: Self = Self::new("namespace");
20 pub const TYPE: Self = Self::new("type");
21 pub const CLASS: Self = Self::new("class");
22 pub const ENUM: Self = Self::new("enum");
23 pub const INTERFACE: Self = Self::new("interface");
24 pub const STRUCT: Self = Self::new("struct");
25 pub const TYPE_PARAMETER: Self = Self::new("typeParameter");
26 pub const PARAMETER: Self = Self::new("parameter");
27 pub const VARIABLE: Self = Self::new("variable");
28 pub const PROPERTY: Self = Self::new("property");
29 pub const ENUM_MEMBER: Self = Self::new("enumMember");
30 pub const EVENT: Self = Self::new("event");
31 pub const FUNCTION: Self = Self::new("function");
32 pub const METHOD: Self = Self::new("method");
33 pub const MACRO: Self = Self::new("macro");
34 pub const KEYWORD: Self = Self::new("keyword");
35 pub const MODIFIER: Self = Self::new("modifier");
36 pub const COMMENT: Self = Self::new("comment");
37 pub const STRING: Self = Self::new("string");
38 pub const NUMBER: Self = Self::new("number");
39 pub const REGEXP: Self = Self::new("regexp");
40 pub const OPERATOR: Self = Self::new("operator");
41
42 pub const DECORATOR: Self = Self::new("decorator");
44
45 #[must_use]
46 pub const fn new(tag: &'static str) -> Self {
47 Self(Cow::Borrowed(tag))
48 }
49
50 #[must_use]
51 pub fn as_str(&self) -> &str {
52 &self.0
53 }
54}
55
56impl From<String> for SemanticTokenType {
57 fn from(from: String) -> Self {
58 Self(Cow::from(from))
59 }
60}
61
62impl From<&'static str> for SemanticTokenType {
63 fn from(from: &'static str) -> Self {
64 Self::new(from)
65 }
66}
67
68#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
74pub struct SemanticTokenModifier(Cow<'static, str>);
75
76impl SemanticTokenModifier {
77 pub const DECLARATION: Self = Self::new("declaration");
78 pub const DEFINITION: Self = Self::new("definition");
79 pub const READONLY: Self = Self::new("readonly");
80 pub const STATIC: Self = Self::new("static");
81 pub const DEPRECATED: Self = Self::new("deprecated");
82 pub const ABSTRACT: Self = Self::new("abstract");
83 pub const ASYNC: Self = Self::new("async");
84 pub const MODIFICATION: Self = Self::new("modification");
85 pub const DOCUMENTATION: Self = Self::new("documentation");
86 pub const DEFAULT_LIBRARY: Self = Self::new("defaultLibrary");
87
88 #[must_use]
89 pub const fn new(tag: &'static str) -> Self {
90 Self(Cow::Borrowed(tag))
91 }
92
93 #[must_use]
94 pub fn as_str(&self) -> &str {
95 &self.0
96 }
97}
98
99impl From<String> for SemanticTokenModifier {
100 fn from(from: String) -> Self {
101 Self(Cow::from(from))
102 }
103}
104
105impl From<&'static str> for SemanticTokenModifier {
106 fn from(from: &'static str) -> Self {
107 Self::new(from)
108 }
109}
110
111#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
112pub struct TokenFormat(Cow<'static, str>);
113
114impl TokenFormat {
115 pub const RELATIVE: Self = Self::new("relative");
116
117 #[must_use]
118 pub const fn new(tag: &'static str) -> Self {
119 Self(Cow::Borrowed(tag))
120 }
121
122 #[must_use]
123 pub fn as_str(&self) -> &str {
124 &self.0
125 }
126}
127
128impl From<String> for TokenFormat {
129 fn from(from: String) -> Self {
130 Self(Cow::from(from))
131 }
132}
133
134impl From<&'static str> for TokenFormat {
135 fn from(from: &'static str) -> Self {
136 Self::new(from)
137 }
138}
139
140#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
142#[serde(rename_all = "camelCase")]
143pub struct SemanticTokensLegend {
144 pub token_types: Vec<SemanticTokenType>,
146
147 pub token_modifiers: Vec<SemanticTokenModifier>,
149}
150
151#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
153pub struct SemanticToken {
154 pub delta_line: u32,
155 pub delta_start: u32,
156 pub length: u32,
157 pub token_type: u32,
158 pub token_modifiers_bitset: u32,
159}
160
161impl SemanticToken {
162 fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<Self>, D::Error>
163 where
164 D: serde::Deserializer<'de>,
165 {
166 let data = Vec::<u32>::deserialize(deserializer)?;
167 let chunks = data.chunks_exact(5);
168
169 if !chunks.remainder().is_empty() {
170 return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
171 }
172
173 Result::Ok(
174 chunks
175 .map(|chunk| Self {
176 delta_line: chunk[0],
177 delta_start: chunk[1],
178 length: chunk[2],
179 token_type: chunk[3],
180 token_modifiers_bitset: chunk[4],
181 })
182 .collect(),
183 )
184 }
185
186 fn serialize_tokens<S>(tokens: &[Self], serializer: S) -> Result<S::Ok, S::Error>
187 where
188 S: serde::Serializer,
189 {
190 let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
191 for token in tokens {
192 seq.serialize_element(&token.delta_line)?;
193 seq.serialize_element(&token.delta_start)?;
194 seq.serialize_element(&token.length)?;
195 seq.serialize_element(&token.token_type)?;
196 seq.serialize_element(&token.token_modifiers_bitset)?;
197 }
198 seq.end()
199 }
200
201 fn deserialize_tokens_opt<'de, D>(deserializer: D) -> Result<Option<Vec<Self>>, D::Error>
202 where
203 D: serde::Deserializer<'de>,
204 {
205 #[derive(Deserialize)]
206 #[serde(transparent)]
207 struct Wrapper {
208 #[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
209 tokens: Vec<SemanticToken>,
210 }
211
212 Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
213 }
214
215 #[expect(clippy::ref_option)]
216 fn serialize_tokens_opt<S>(data: &Option<Vec<Self>>, serializer: S) -> Result<S::Ok, S::Error>
217 where
218 S: serde::Serializer,
219 {
220 #[derive(Serialize)]
221 #[serde(transparent)]
222 struct Wrapper {
223 #[serde(serialize_with = "SemanticToken::serialize_tokens")]
224 tokens: Vec<SemanticToken>,
225 }
226
227 let opt = data.as_ref().map(|t| Wrapper { tokens: t.clone() });
228
229 opt.serialize(serializer)
230 }
231}
232
233#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
235#[serde(rename_all = "camelCase")]
236pub struct SemanticTokens {
237 #[serde(skip_serializing_if = "Option::is_none")]
242 pub result_id: Option<String>,
243
244 #[serde(
248 deserialize_with = "SemanticToken::deserialize_tokens",
249 serialize_with = "SemanticToken::serialize_tokens"
250 )]
251 pub data: Vec<SemanticToken>,
252}
253
254#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
256#[serde(rename_all = "camelCase")]
257pub struct SemanticTokensPartialResult {
258 #[serde(
259 deserialize_with = "SemanticToken::deserialize_tokens",
260 serialize_with = "SemanticToken::serialize_tokens"
261 )]
262 pub data: Vec<SemanticToken>,
263}
264
265#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
266#[serde(rename_all = "camelCase")]
267#[serde(untagged)]
268pub enum SemanticTokensResult {
269 Tokens(SemanticTokens),
270 Partial(SemanticTokensPartialResult),
271}
272
273impl From<SemanticTokens> for SemanticTokensResult {
274 fn from(from: SemanticTokens) -> Self {
275 Self::Tokens(from)
276 }
277}
278
279impl From<SemanticTokensPartialResult> for SemanticTokensResult {
280 fn from(from: SemanticTokensPartialResult) -> Self {
281 Self::Partial(from)
282 }
283}
284
285#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
287#[serde(rename_all = "camelCase")]
288pub struct SemanticTokensEdit {
289 pub start: u32,
290 pub delete_count: u32,
291
292 #[serde(
293 default,
294 skip_serializing_if = "Option::is_none",
295 deserialize_with = "SemanticToken::deserialize_tokens_opt",
296 serialize_with = "SemanticToken::serialize_tokens_opt"
297 )]
298 pub data: Option<Vec<SemanticToken>>,
299}
300
301#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
302#[serde(rename_all = "camelCase")]
303#[serde(untagged)]
304pub enum SemanticTokensFullDeltaResult {
305 Tokens(SemanticTokens),
306 TokensDelta(SemanticTokensDelta),
307 PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
308}
309
310impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
311 fn from(from: SemanticTokens) -> Self {
312 Self::Tokens(from)
313 }
314}
315
316impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
317 fn from(from: SemanticTokensDelta) -> Self {
318 Self::TokensDelta(from)
319 }
320}
321
322#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
324#[serde(rename_all = "camelCase")]
325pub struct SemanticTokensDelta {
326 #[serde(skip_serializing_if = "Option::is_none")]
327 pub result_id: Option<String>,
328 pub edits: Vec<SemanticTokensEdit>,
331}
332
333#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
337#[serde(rename_all = "camelCase")]
338pub struct SemanticTokensClientCapabilities {
339 #[serde(skip_serializing_if = "Option::is_none")]
343 pub dynamic_registration: Option<bool>,
344
345 pub requests: SemanticTokensClientCapabilitiesRequests,
354
355 pub token_types: Vec<SemanticTokenType>,
357
358 pub token_modifiers: Vec<SemanticTokenModifier>,
360
361 pub formats: Vec<TokenFormat>,
363
364 #[serde(skip_serializing_if = "Option::is_none")]
366 pub overlapping_token_support: Option<bool>,
367
368 #[serde(skip_serializing_if = "Option::is_none")]
370 pub multiline_token_support: Option<bool>,
371
372 #[serde(skip_serializing_if = "Option::is_none")]
379 pub server_cancel_support: Option<bool>,
380
381 #[serde(skip_serializing_if = "Option::is_none")]
392 pub augments_syntax_tokens: Option<bool>,
393}
394
395#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
396#[serde(rename_all = "camelCase")]
397pub struct SemanticTokensClientCapabilitiesRequests {
398 #[serde(skip_serializing_if = "Option::is_none")]
400 pub range: Option<bool>,
401
402 #[serde(skip_serializing_if = "Option::is_none")]
404 pub full: Option<SemanticTokensFullOptions>,
405}
406
407#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
408#[serde(rename_all = "camelCase")]
409#[serde(untagged)]
410pub enum SemanticTokensFullOptions {
411 Bool(bool),
412 Delta {
413 #[serde(skip_serializing_if = "Option::is_none")]
416 delta: Option<bool>,
417 },
418}
419
420#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
422#[serde(rename_all = "camelCase")]
423pub struct SemanticTokensOptions {
424 #[serde(flatten)]
425 pub work_done_progress_options: WorkDoneProgressOptions,
426
427 pub legend: SemanticTokensLegend,
429
430 #[serde(skip_serializing_if = "Option::is_none")]
433 pub range: Option<bool>,
434
435 #[serde(skip_serializing_if = "Option::is_none")]
437 pub full: Option<SemanticTokensFullOptions>,
438}
439
440#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
441#[serde(rename_all = "camelCase")]
442pub struct SemanticTokensRegistrationOptions {
443 #[serde(flatten)]
444 pub text_document_registration_options: TextDocumentRegistrationOptions,
445
446 #[serde(flatten)]
447 pub semantic_tokens_options: SemanticTokensOptions,
448
449 #[serde(flatten)]
450 pub static_registration_options: StaticRegistrationOptions,
451}
452
453#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
454#[serde(rename_all = "camelCase")]
455#[serde(untagged)]
456pub enum SemanticTokensServerCapabilities {
457 SemanticTokensOptions(SemanticTokensOptions),
458 SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
459}
460
461impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
462 fn from(from: SemanticTokensOptions) -> Self {
463 Self::SemanticTokensOptions(from)
464 }
465}
466
467impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
468 fn from(from: SemanticTokensRegistrationOptions) -> Self {
469 Self::SemanticTokensRegistrationOptions(from)
470 }
471}
472
473#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
474#[serde(rename_all = "camelCase")]
475pub struct SemanticTokensWorkspaceClientCapabilities {
476 pub refresh_support: Option<bool>,
484}
485
486#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
487#[serde(rename_all = "camelCase")]
488pub struct SemanticTokensParams {
489 #[serde(flatten)]
490 pub work_done_progress_params: WorkDoneProgressParams,
491
492 #[serde(flatten)]
493 pub partial_result_params: PartialResultParams,
494
495 pub text_document: TextDocumentIdentifier,
497}
498
499#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
500#[serde(rename_all = "camelCase")]
501pub struct SemanticTokensDeltaParams {
502 #[serde(flatten)]
503 pub work_done_progress_params: WorkDoneProgressParams,
504
505 #[serde(flatten)]
506 pub partial_result_params: PartialResultParams,
507
508 pub text_document: TextDocumentIdentifier,
510
511 pub previous_result_id: String,
514}
515
516#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
517#[serde(rename_all = "camelCase")]
518pub struct SemanticTokensRangeParams {
519 #[serde(flatten)]
520 pub work_done_progress_params: WorkDoneProgressParams,
521
522 #[serde(flatten)]
523 pub partial_result_params: PartialResultParams,
524
525 pub text_document: TextDocumentIdentifier,
527
528 pub range: Range,
530}
531
532#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
533#[serde(rename_all = "camelCase")]
534#[serde(untagged)]
535pub enum SemanticTokensRangeResult {
536 Tokens(SemanticTokens),
537 Partial(SemanticTokensPartialResult),
538}
539
540impl From<SemanticTokens> for SemanticTokensRangeResult {
541 fn from(tokens: SemanticTokens) -> Self {
542 Self::Tokens(tokens)
543 }
544}
545
546impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
547 fn from(partial: SemanticTokensPartialResult) -> Self {
548 Self::Partial(partial)
549 }
550}
551
552#[cfg(test)]
553mod tests {
554 use super::*;
555 use crate::tests::{test_deserialization, test_serialization};
556
557 #[test]
558 fn test_semantic_tokens_support_serialization() {
559 test_serialization(
560 &SemanticTokens {
561 result_id: None,
562 data: vec![],
563 },
564 r#"{"data":[]}"#,
565 );
566
567 test_serialization(
568 &SemanticTokens {
569 result_id: None,
570 data: vec![SemanticToken {
571 delta_line: 2,
572 delta_start: 5,
573 length: 3,
574 token_type: 0,
575 token_modifiers_bitset: 3,
576 }],
577 },
578 r#"{"data":[2,5,3,0,3]}"#,
579 );
580
581 test_serialization(
582 &SemanticTokens {
583 result_id: None,
584 data: vec![
585 SemanticToken {
586 delta_line: 2,
587 delta_start: 5,
588 length: 3,
589 token_type: 0,
590 token_modifiers_bitset: 3,
591 },
592 SemanticToken {
593 delta_line: 0,
594 delta_start: 5,
595 length: 4,
596 token_type: 1,
597 token_modifiers_bitset: 0,
598 },
599 ],
600 },
601 r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
602 );
603 }
604
605 #[test]
606 fn test_semantic_tokens_support_deserialization() {
607 test_deserialization(
608 r#"{"data":[]}"#,
609 &SemanticTokens {
610 result_id: None,
611 data: vec![],
612 },
613 );
614
615 test_deserialization(
616 r#"{"data":[2,5,3,0,3]}"#,
617 &SemanticTokens {
618 result_id: None,
619 data: vec![SemanticToken {
620 delta_line: 2,
621 delta_start: 5,
622 length: 3,
623 token_type: 0,
624 token_modifiers_bitset: 3,
625 }],
626 },
627 );
628
629 test_deserialization(
630 r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
631 &SemanticTokens {
632 result_id: None,
633 data: vec![
634 SemanticToken {
635 delta_line: 2,
636 delta_start: 5,
637 length: 3,
638 token_type: 0,
639 token_modifiers_bitset: 3,
640 },
641 SemanticToken {
642 delta_line: 0,
643 delta_start: 5,
644 length: 4,
645 token_type: 1,
646 token_modifiers_bitset: 0,
647 },
648 ],
649 },
650 );
651 }
652
653 #[test]
654 #[should_panic = "Length is not divisible by 5"]
655 fn test_semantic_tokens_support_deserialization_err() {
656 test_deserialization(r#"{"data":[1]}"#, &SemanticTokens::default());
657 }
658
659 #[test]
660 fn test_semantic_tokens_edit_support_deserialization() {
661 test_deserialization(
662 r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
663 &SemanticTokensEdit {
664 start: 0,
665 delete_count: 1,
666 data: Some(vec![
667 SemanticToken {
668 delta_line: 2,
669 delta_start: 5,
670 length: 3,
671 token_type: 0,
672 token_modifiers_bitset: 3,
673 },
674 SemanticToken {
675 delta_line: 0,
676 delta_start: 5,
677 length: 4,
678 token_type: 1,
679 token_modifiers_bitset: 0,
680 },
681 ]),
682 },
683 );
684
685 test_deserialization(
686 r#"{"start":0,"deleteCount":1}"#,
687 &SemanticTokensEdit {
688 start: 0,
689 delete_count: 1,
690 data: None,
691 },
692 );
693 }
694
695 #[test]
696 fn test_semantic_tokens_edit_support_serialization() {
697 test_serialization(
698 &SemanticTokensEdit {
699 start: 0,
700 delete_count: 1,
701 data: Some(vec![
702 SemanticToken {
703 delta_line: 2,
704 delta_start: 5,
705 length: 3,
706 token_type: 0,
707 token_modifiers_bitset: 3,
708 },
709 SemanticToken {
710 delta_line: 0,
711 delta_start: 5,
712 length: 4,
713 token_type: 1,
714 token_modifiers_bitset: 0,
715 },
716 ]),
717 },
718 r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
719 );
720
721 test_serialization(
722 &SemanticTokensEdit {
723 start: 0,
724 delete_count: 1,
725 data: None,
726 },
727 r#"{"start":0,"deleteCount":1}"#,
728 );
729 }
730}