Skip to main content

ls_types/
semantic_tokens.rs

1use std::borrow::Cow;
2
3use serde::ser::SerializeSeq;
4use serde::{Deserialize, Serialize};
5
6use crate::{
7    PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
8    TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
9};
10/// A set of predefined token types. This set is not fixed
11/// and clients can specify additional token types via the
12/// corresponding client capabilities.
13///
14/// @since 3.16.0
15#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
16pub struct SemanticTokenType(Cow<'static, str>);
17
18impl SemanticTokenType {
19    pub const NAMESPACE: Self = Self::new("namespace");
20    pub const TYPE: Self = Self::new("type");
21    pub const CLASS: Self = Self::new("class");
22    pub const ENUM: Self = Self::new("enum");
23    pub const INTERFACE: Self = Self::new("interface");
24    pub const STRUCT: Self = Self::new("struct");
25    pub const TYPE_PARAMETER: Self = Self::new("typeParameter");
26    pub const PARAMETER: Self = Self::new("parameter");
27    pub const VARIABLE: Self = Self::new("variable");
28    pub const PROPERTY: Self = Self::new("property");
29    pub const ENUM_MEMBER: Self = Self::new("enumMember");
30    pub const EVENT: Self = Self::new("event");
31    pub const FUNCTION: Self = Self::new("function");
32    pub const METHOD: Self = Self::new("method");
33    pub const MACRO: Self = Self::new("macro");
34    pub const KEYWORD: Self = Self::new("keyword");
35    pub const MODIFIER: Self = Self::new("modifier");
36    pub const COMMENT: Self = Self::new("comment");
37    pub const STRING: Self = Self::new("string");
38    pub const NUMBER: Self = Self::new("number");
39    pub const REGEXP: Self = Self::new("regexp");
40    pub const OPERATOR: Self = Self::new("operator");
41
42    /// @since 3.17.0
43    pub const DECORATOR: Self = Self::new("decorator");
44
45    #[must_use]
46    pub const fn new(tag: &'static str) -> Self {
47        Self(Cow::Borrowed(tag))
48    }
49
50    #[must_use]
51    pub fn as_str(&self) -> &str {
52        &self.0
53    }
54}
55
56impl From<String> for SemanticTokenType {
57    fn from(from: String) -> Self {
58        Self(Cow::from(from))
59    }
60}
61
62impl From<&'static str> for SemanticTokenType {
63    fn from(from: &'static str) -> Self {
64        Self::new(from)
65    }
66}
67
68/// A set of predefined token modifiers. This set is not fixed
69/// and clients can specify additional token types via the
70/// corresponding client capabilities.
71///
72/// @since 3.16.0
73#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
74pub struct SemanticTokenModifier(Cow<'static, str>);
75
76impl SemanticTokenModifier {
77    pub const DECLARATION: Self = Self::new("declaration");
78    pub const DEFINITION: Self = Self::new("definition");
79    pub const READONLY: Self = Self::new("readonly");
80    pub const STATIC: Self = Self::new("static");
81    pub const DEPRECATED: Self = Self::new("deprecated");
82    pub const ABSTRACT: Self = Self::new("abstract");
83    pub const ASYNC: Self = Self::new("async");
84    pub const MODIFICATION: Self = Self::new("modification");
85    pub const DOCUMENTATION: Self = Self::new("documentation");
86    pub const DEFAULT_LIBRARY: Self = Self::new("defaultLibrary");
87
88    #[must_use]
89    pub const fn new(tag: &'static str) -> Self {
90        Self(Cow::Borrowed(tag))
91    }
92
93    #[must_use]
94    pub fn as_str(&self) -> &str {
95        &self.0
96    }
97}
98
99impl From<String> for SemanticTokenModifier {
100    fn from(from: String) -> Self {
101        Self(Cow::from(from))
102    }
103}
104
105impl From<&'static str> for SemanticTokenModifier {
106    fn from(from: &'static str) -> Self {
107        Self::new(from)
108    }
109}
110
111#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
112pub struct TokenFormat(Cow<'static, str>);
113
114impl TokenFormat {
115    pub const RELATIVE: Self = Self::new("relative");
116
117    #[must_use]
118    pub const fn new(tag: &'static str) -> Self {
119        Self(Cow::Borrowed(tag))
120    }
121
122    #[must_use]
123    pub fn as_str(&self) -> &str {
124        &self.0
125    }
126}
127
128impl From<String> for TokenFormat {
129    fn from(from: String) -> Self {
130        Self(Cow::from(from))
131    }
132}
133
134impl From<&'static str> for TokenFormat {
135    fn from(from: &'static str) -> Self {
136        Self::new(from)
137    }
138}
139
140/// @since 3.16.0
141#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
142#[serde(rename_all = "camelCase")]
143pub struct SemanticTokensLegend {
144    /// The token types a server uses.
145    pub token_types: Vec<SemanticTokenType>,
146
147    /// The token modifiers a server uses.
148    pub token_modifiers: Vec<SemanticTokenModifier>,
149}
150
151/// The actual tokens.
152#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
153pub struct SemanticToken {
154    pub delta_line: u32,
155    pub delta_start: u32,
156    pub length: u32,
157    pub token_type: u32,
158    pub token_modifiers_bitset: u32,
159}
160
161impl SemanticToken {
162    fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<Self>, D::Error>
163    where
164        D: serde::Deserializer<'de>,
165    {
166        let data = Vec::<u32>::deserialize(deserializer)?;
167        let chunks = data.chunks_exact(5);
168
169        if !chunks.remainder().is_empty() {
170            return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
171        }
172
173        Result::Ok(
174            chunks
175                .map(|chunk| Self {
176                    delta_line: chunk[0],
177                    delta_start: chunk[1],
178                    length: chunk[2],
179                    token_type: chunk[3],
180                    token_modifiers_bitset: chunk[4],
181                })
182                .collect(),
183        )
184    }
185
186    fn serialize_tokens<S>(tokens: &[Self], serializer: S) -> Result<S::Ok, S::Error>
187    where
188        S: serde::Serializer,
189    {
190        let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
191        for token in tokens {
192            seq.serialize_element(&token.delta_line)?;
193            seq.serialize_element(&token.delta_start)?;
194            seq.serialize_element(&token.length)?;
195            seq.serialize_element(&token.token_type)?;
196            seq.serialize_element(&token.token_modifiers_bitset)?;
197        }
198        seq.end()
199    }
200
201    fn deserialize_tokens_opt<'de, D>(deserializer: D) -> Result<Option<Vec<Self>>, D::Error>
202    where
203        D: serde::Deserializer<'de>,
204    {
205        #[derive(Deserialize)]
206        #[serde(transparent)]
207        struct Wrapper {
208            #[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
209            tokens: Vec<SemanticToken>,
210        }
211
212        Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
213    }
214
215    #[expect(clippy::ref_option)]
216    fn serialize_tokens_opt<S>(data: &Option<Vec<Self>>, serializer: S) -> Result<S::Ok, S::Error>
217    where
218        S: serde::Serializer,
219    {
220        #[derive(Serialize)]
221        #[serde(transparent)]
222        struct Wrapper {
223            #[serde(serialize_with = "SemanticToken::serialize_tokens")]
224            tokens: Vec<SemanticToken>,
225        }
226
227        let opt = data.as_ref().map(|t| Wrapper { tokens: t.clone() });
228
229        opt.serialize(serializer)
230    }
231}
232
233/// @since 3.16.0
234#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
235#[serde(rename_all = "camelCase")]
236pub struct SemanticTokens {
237    /// An optional result id. If provided and clients support delta updating
238    /// the client will include the result id in the next semantic token request.
239    /// A server can then instead of computing all semantic tokens again simply
240    /// send a delta.
241    #[serde(skip_serializing_if = "Option::is_none")]
242    pub result_id: Option<String>,
243
244    /// The actual tokens. For a detailed description about how the data is
245    /// structured please see
246    /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71>
247    #[serde(
248        deserialize_with = "SemanticToken::deserialize_tokens",
249        serialize_with = "SemanticToken::serialize_tokens"
250    )]
251    pub data: Vec<SemanticToken>,
252}
253
254/// @since 3.16.0
255#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
256#[serde(rename_all = "camelCase")]
257pub struct SemanticTokensPartialResult {
258    #[serde(
259        deserialize_with = "SemanticToken::deserialize_tokens",
260        serialize_with = "SemanticToken::serialize_tokens"
261    )]
262    pub data: Vec<SemanticToken>,
263}
264
265#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
266#[serde(rename_all = "camelCase")]
267#[serde(untagged)]
268pub enum SemanticTokensResult {
269    Tokens(SemanticTokens),
270    Partial(SemanticTokensPartialResult),
271}
272
273impl From<SemanticTokens> for SemanticTokensResult {
274    fn from(from: SemanticTokens) -> Self {
275        Self::Tokens(from)
276    }
277}
278
279impl From<SemanticTokensPartialResult> for SemanticTokensResult {
280    fn from(from: SemanticTokensPartialResult) -> Self {
281        Self::Partial(from)
282    }
283}
284
285/// @since 3.16.0
286#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
287#[serde(rename_all = "camelCase")]
288pub struct SemanticTokensEdit {
289    pub start: u32,
290    pub delete_count: u32,
291
292    #[serde(
293        default,
294        skip_serializing_if = "Option::is_none",
295        deserialize_with = "SemanticToken::deserialize_tokens_opt",
296        serialize_with = "SemanticToken::serialize_tokens_opt"
297    )]
298    pub data: Option<Vec<SemanticToken>>,
299}
300
301#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
302#[serde(rename_all = "camelCase")]
303#[serde(untagged)]
304pub enum SemanticTokensFullDeltaResult {
305    Tokens(SemanticTokens),
306    TokensDelta(SemanticTokensDelta),
307    PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
308}
309
310impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
311    fn from(from: SemanticTokens) -> Self {
312        Self::Tokens(from)
313    }
314}
315
316impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
317    fn from(from: SemanticTokensDelta) -> Self {
318        Self::TokensDelta(from)
319    }
320}
321
322/// @since 3.16.0
323#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
324#[serde(rename_all = "camelCase")]
325pub struct SemanticTokensDelta {
326    #[serde(skip_serializing_if = "Option::is_none")]
327    pub result_id: Option<String>,
328    /// For a detailed description how these edits are structured please see
329    /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L131>
330    pub edits: Vec<SemanticTokensEdit>,
331}
332
333/// Capabilities specific to the `textDocument/semanticTokens/*` requests.
334///
335/// @since 3.16.0
336#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
337#[serde(rename_all = "camelCase")]
338pub struct SemanticTokensClientCapabilities {
339    /// Whether implementation supports dynamic registration. If this is set to `true`
340    /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
341    /// return value for the corresponding server capability as well.
342    #[serde(skip_serializing_if = "Option::is_none")]
343    pub dynamic_registration: Option<bool>,
344
345    /// Which requests the client supports and might send to the server
346    /// depending on the server's capability. Please note that clients might not
347    /// show semantic tokens or degrade some of the user experience if a range
348    /// or full request is advertised by the client but not provided by the
349    /// server. If for example the client capability `requests.full` and
350    /// `request.range` are both set to true but the server only provides a
351    /// range provider the client might not render a minimap correctly or might
352    /// even decide to not show any semantic tokens at all.
353    pub requests: SemanticTokensClientCapabilitiesRequests,
354
355    /// The token types that the client supports.
356    pub token_types: Vec<SemanticTokenType>,
357
358    /// The token modifiers that the client supports.
359    pub token_modifiers: Vec<SemanticTokenModifier>,
360
361    /// The token formats the clients supports.
362    pub formats: Vec<TokenFormat>,
363
364    /// Whether the client supports tokens that can overlap each other.
365    #[serde(skip_serializing_if = "Option::is_none")]
366    pub overlapping_token_support: Option<bool>,
367
368    /// Whether the client supports tokens that can span multiple lines.
369    #[serde(skip_serializing_if = "Option::is_none")]
370    pub multiline_token_support: Option<bool>,
371
372    /// Whether the client allows the server to actively cancel a
373    /// semantic token request, e.g. supports returning
374    /// ErrorCodes.ServerCancelled. If a server does the client
375    /// needs to retrigger the request.
376    ///
377    /// @since 3.17.0
378    #[serde(skip_serializing_if = "Option::is_none")]
379    pub server_cancel_support: Option<bool>,
380
381    /// Whether the client uses semantic tokens to augment existing
382    /// syntax tokens. If set to `true` client side created syntax
383    /// tokens and semantic tokens are both used for colorization. If
384    /// set to `false` the client only uses the returned semantic tokens
385    /// for colorization.
386    ///
387    /// If the value is `undefined` then the client behavior is not
388    /// specified.
389    ///
390    /// @since 3.17.0
391    #[serde(skip_serializing_if = "Option::is_none")]
392    pub augments_syntax_tokens: Option<bool>,
393}
394
395#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
396#[serde(rename_all = "camelCase")]
397pub struct SemanticTokensClientCapabilitiesRequests {
398    /// The client will send the `textDocument/semanticTokens/range` request if the server provides a corresponding handler.
399    #[serde(skip_serializing_if = "Option::is_none")]
400    pub range: Option<bool>,
401
402    /// The client will send the `textDocument/semanticTokens/full` request if the server provides a corresponding handler.
403    #[serde(skip_serializing_if = "Option::is_none")]
404    pub full: Option<SemanticTokensFullOptions>,
405}
406
407#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
408#[serde(rename_all = "camelCase")]
409#[serde(untagged)]
410pub enum SemanticTokensFullOptions {
411    Bool(bool),
412    Delta {
413        /// The client will send the `textDocument/semanticTokens/full/delta` request if the server provides a corresponding handler.
414        /// The server supports deltas for full documents.
415        #[serde(skip_serializing_if = "Option::is_none")]
416        delta: Option<bool>,
417    },
418}
419
420/// @since 3.16.0
421#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
422#[serde(rename_all = "camelCase")]
423pub struct SemanticTokensOptions {
424    #[serde(flatten)]
425    pub work_done_progress_options: WorkDoneProgressOptions,
426
427    /// The legend used by the server
428    pub legend: SemanticTokensLegend,
429
430    /// Server supports providing semantic tokens for a specific range
431    /// of a document.
432    #[serde(skip_serializing_if = "Option::is_none")]
433    pub range: Option<bool>,
434
435    /// Server supports providing semantic tokens for a full document.
436    #[serde(skip_serializing_if = "Option::is_none")]
437    pub full: Option<SemanticTokensFullOptions>,
438}
439
440#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
441#[serde(rename_all = "camelCase")]
442pub struct SemanticTokensRegistrationOptions {
443    #[serde(flatten)]
444    pub text_document_registration_options: TextDocumentRegistrationOptions,
445
446    #[serde(flatten)]
447    pub semantic_tokens_options: SemanticTokensOptions,
448
449    #[serde(flatten)]
450    pub static_registration_options: StaticRegistrationOptions,
451}
452
453#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
454#[serde(rename_all = "camelCase")]
455#[serde(untagged)]
456pub enum SemanticTokensServerCapabilities {
457    SemanticTokensOptions(SemanticTokensOptions),
458    SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
459}
460
461impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
462    fn from(from: SemanticTokensOptions) -> Self {
463        Self::SemanticTokensOptions(from)
464    }
465}
466
467impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
468    fn from(from: SemanticTokensRegistrationOptions) -> Self {
469        Self::SemanticTokensRegistrationOptions(from)
470    }
471}
472
473#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
474#[serde(rename_all = "camelCase")]
475pub struct SemanticTokensWorkspaceClientCapabilities {
476    /// Whether the client implementation supports a refresh request sent from
477    /// the server to the client.
478    ///
479    /// Note that this event is global and will force the client to refresh all
480    /// semantic tokens currently shown. It should be used with absolute care
481    /// and is useful for situation where a server for example detect a project
482    /// wide change that requires such a calculation.
483    pub refresh_support: Option<bool>,
484}
485
486#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
487#[serde(rename_all = "camelCase")]
488pub struct SemanticTokensParams {
489    #[serde(flatten)]
490    pub work_done_progress_params: WorkDoneProgressParams,
491
492    #[serde(flatten)]
493    pub partial_result_params: PartialResultParams,
494
495    /// The text document.
496    pub text_document: TextDocumentIdentifier,
497}
498
499#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
500#[serde(rename_all = "camelCase")]
501pub struct SemanticTokensDeltaParams {
502    #[serde(flatten)]
503    pub work_done_progress_params: WorkDoneProgressParams,
504
505    #[serde(flatten)]
506    pub partial_result_params: PartialResultParams,
507
508    /// The text document.
509    pub text_document: TextDocumentIdentifier,
510
511    /// The result id of a previous response. The result Id can either point to a full response
512    /// or a delta response depending on what was received last.
513    pub previous_result_id: String,
514}
515
516#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
517#[serde(rename_all = "camelCase")]
518pub struct SemanticTokensRangeParams {
519    #[serde(flatten)]
520    pub work_done_progress_params: WorkDoneProgressParams,
521
522    #[serde(flatten)]
523    pub partial_result_params: PartialResultParams,
524
525    /// The text document.
526    pub text_document: TextDocumentIdentifier,
527
528    /// The range the semantic tokens are requested for.
529    pub range: Range,
530}
531
532#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
533#[serde(rename_all = "camelCase")]
534#[serde(untagged)]
535pub enum SemanticTokensRangeResult {
536    Tokens(SemanticTokens),
537    Partial(SemanticTokensPartialResult),
538}
539
540impl From<SemanticTokens> for SemanticTokensRangeResult {
541    fn from(tokens: SemanticTokens) -> Self {
542        Self::Tokens(tokens)
543    }
544}
545
546impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
547    fn from(partial: SemanticTokensPartialResult) -> Self {
548        Self::Partial(partial)
549    }
550}
551
552#[cfg(test)]
553mod tests {
554    use super::*;
555    use crate::tests::{test_deserialization, test_serialization};
556
557    #[test]
558    fn test_semantic_tokens_support_serialization() {
559        test_serialization(
560            &SemanticTokens {
561                result_id: None,
562                data: vec![],
563            },
564            r#"{"data":[]}"#,
565        );
566
567        test_serialization(
568            &SemanticTokens {
569                result_id: None,
570                data: vec![SemanticToken {
571                    delta_line: 2,
572                    delta_start: 5,
573                    length: 3,
574                    token_type: 0,
575                    token_modifiers_bitset: 3,
576                }],
577            },
578            r#"{"data":[2,5,3,0,3]}"#,
579        );
580
581        test_serialization(
582            &SemanticTokens {
583                result_id: None,
584                data: vec![
585                    SemanticToken {
586                        delta_line: 2,
587                        delta_start: 5,
588                        length: 3,
589                        token_type: 0,
590                        token_modifiers_bitset: 3,
591                    },
592                    SemanticToken {
593                        delta_line: 0,
594                        delta_start: 5,
595                        length: 4,
596                        token_type: 1,
597                        token_modifiers_bitset: 0,
598                    },
599                ],
600            },
601            r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
602        );
603    }
604
605    #[test]
606    fn test_semantic_tokens_support_deserialization() {
607        test_deserialization(
608            r#"{"data":[]}"#,
609            &SemanticTokens {
610                result_id: None,
611                data: vec![],
612            },
613        );
614
615        test_deserialization(
616            r#"{"data":[2,5,3,0,3]}"#,
617            &SemanticTokens {
618                result_id: None,
619                data: vec![SemanticToken {
620                    delta_line: 2,
621                    delta_start: 5,
622                    length: 3,
623                    token_type: 0,
624                    token_modifiers_bitset: 3,
625                }],
626            },
627        );
628
629        test_deserialization(
630            r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
631            &SemanticTokens {
632                result_id: None,
633                data: vec![
634                    SemanticToken {
635                        delta_line: 2,
636                        delta_start: 5,
637                        length: 3,
638                        token_type: 0,
639                        token_modifiers_bitset: 3,
640                    },
641                    SemanticToken {
642                        delta_line: 0,
643                        delta_start: 5,
644                        length: 4,
645                        token_type: 1,
646                        token_modifiers_bitset: 0,
647                    },
648                ],
649            },
650        );
651    }
652
653    #[test]
654    #[should_panic = "Length is not divisible by 5"]
655    fn test_semantic_tokens_support_deserialization_err() {
656        test_deserialization(r#"{"data":[1]}"#, &SemanticTokens::default());
657    }
658
659    #[test]
660    fn test_semantic_tokens_edit_support_deserialization() {
661        test_deserialization(
662            r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
663            &SemanticTokensEdit {
664                start: 0,
665                delete_count: 1,
666                data: Some(vec![
667                    SemanticToken {
668                        delta_line: 2,
669                        delta_start: 5,
670                        length: 3,
671                        token_type: 0,
672                        token_modifiers_bitset: 3,
673                    },
674                    SemanticToken {
675                        delta_line: 0,
676                        delta_start: 5,
677                        length: 4,
678                        token_type: 1,
679                        token_modifiers_bitset: 0,
680                    },
681                ]),
682            },
683        );
684
685        test_deserialization(
686            r#"{"start":0,"deleteCount":1}"#,
687            &SemanticTokensEdit {
688                start: 0,
689                delete_count: 1,
690                data: None,
691            },
692        );
693    }
694
695    #[test]
696    fn test_semantic_tokens_edit_support_serialization() {
697        test_serialization(
698            &SemanticTokensEdit {
699                start: 0,
700                delete_count: 1,
701                data: Some(vec![
702                    SemanticToken {
703                        delta_line: 2,
704                        delta_start: 5,
705                        length: 3,
706                        token_type: 0,
707                        token_modifiers_bitset: 3,
708                    },
709                    SemanticToken {
710                        delta_line: 0,
711                        delta_start: 5,
712                        length: 4,
713                        token_type: 1,
714                        token_modifiers_bitset: 0,
715                    },
716                ]),
717            },
718            r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
719        );
720
721        test_serialization(
722            &SemanticTokensEdit {
723                start: 0,
724                delete_count: 1,
725                data: None,
726            },
727            r#"{"start":0,"deleteCount":1}"#,
728        );
729    }
730}