ls_types/
semantic_tokens.rs

1use std::borrow::Cow;
2
3use serde::ser::SerializeSeq;
4use serde::{Deserialize, Serialize};
5
6use crate::{
7    PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
8    TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
9};
10/// A set of predefined token types. This set is not fixed
11/// and clients can specify additional token types via the
12/// corresponding client capabilities.
13///
14/// @since 3.16.0
15#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
16pub struct SemanticTokenType(Cow<'static, str>);
17
18impl SemanticTokenType {
19    pub const NAMESPACE: Self = Self::new("namespace");
20    pub const TYPE: Self = Self::new("type");
21    pub const CLASS: Self = Self::new("class");
22    pub const ENUM: Self = Self::new("enum");
23    pub const INTERFACE: Self = Self::new("interface");
24    pub const STRUCT: Self = Self::new("struct");
25    pub const TYPE_PARAMETER: Self = Self::new("typeParameter");
26    pub const PARAMETER: Self = Self::new("parameter");
27    pub const VARIABLE: Self = Self::new("variable");
28    pub const PROPERTY: Self = Self::new("property");
29    pub const ENUM_MEMBER: Self = Self::new("enumMember");
30    pub const EVENT: Self = Self::new("event");
31    pub const FUNCTION: Self = Self::new("function");
32    pub const METHOD: Self = Self::new("method");
33    pub const MACRO: Self = Self::new("macro");
34    pub const KEYWORD: Self = Self::new("keyword");
35    pub const MODIFIER: Self = Self::new("modifier");
36    pub const COMMENT: Self = Self::new("comment");
37    pub const STRING: Self = Self::new("string");
38    pub const NUMBER: Self = Self::new("number");
39    pub const REGEXP: Self = Self::new("regexp");
40    pub const OPERATOR: Self = Self::new("operator");
41
42    /// @since 3.17.0
43    pub const DECORATOR: Self = Self::new("decorator");
44
45    #[must_use]
46    pub const fn new(tag: &'static str) -> Self {
47        Self(Cow::Borrowed(tag))
48    }
49
50    #[must_use]
51    pub fn as_str(&self) -> &str {
52        &self.0
53    }
54}
55
56impl From<String> for SemanticTokenType {
57    fn from(from: String) -> Self {
58        Self(Cow::from(from))
59    }
60}
61
62impl From<&'static str> for SemanticTokenType {
63    fn from(from: &'static str) -> Self {
64        Self::new(from)
65    }
66}
67
68/// A set of predefined token modifiers. This set is not fixed
69/// and clients can specify additional token types via the
70/// corresponding client capabilities.
71///
72/// @since 3.16.0
73#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
74pub struct SemanticTokenModifier(Cow<'static, str>);
75
76impl SemanticTokenModifier {
77    pub const DECLARATION: Self = Self::new("declaration");
78    pub const DEFINITION: Self = Self::new("definition");
79    pub const READONLY: Self = Self::new("readonly");
80    pub const STATIC: Self = Self::new("static");
81    pub const DEPRECATED: Self = Self::new("deprecated");
82    pub const ABSTRACT: Self = Self::new("abstract");
83    pub const ASYNC: Self = Self::new("async");
84    pub const MODIFICATION: Self = Self::new("modification");
85    pub const DOCUMENTATION: Self = Self::new("documentation");
86    pub const DEFAULT_LIBRARY: Self = Self::new("defaultLibrary");
87
88    #[must_use]
89    pub const fn new(tag: &'static str) -> Self {
90        Self(Cow::Borrowed(tag))
91    }
92
93    #[must_use]
94    pub fn as_str(&self) -> &str {
95        &self.0
96    }
97}
98
99impl From<String> for SemanticTokenModifier {
100    fn from(from: String) -> Self {
101        Self(Cow::from(from))
102    }
103}
104
105impl From<&'static str> for SemanticTokenModifier {
106    fn from(from: &'static str) -> Self {
107        Self::new(from)
108    }
109}
110
111#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
112pub struct TokenFormat(Cow<'static, str>);
113
114impl TokenFormat {
115    pub const RELATIVE: Self = Self::new("relative");
116
117    #[must_use]
118    pub const fn new(tag: &'static str) -> Self {
119        Self(Cow::Borrowed(tag))
120    }
121
122    #[must_use]
123    pub fn as_str(&self) -> &str {
124        &self.0
125    }
126}
127
128impl From<String> for TokenFormat {
129    fn from(from: String) -> Self {
130        Self(Cow::from(from))
131    }
132}
133
134impl From<&'static str> for TokenFormat {
135    fn from(from: &'static str) -> Self {
136        Self::new(from)
137    }
138}
139
140/// @since 3.16.0
141#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
142#[serde(rename_all = "camelCase")]
143pub struct SemanticTokensLegend {
144    /// The token types a server uses.
145    pub token_types: Vec<SemanticTokenType>,
146
147    /// The token modifiers a server uses.
148    pub token_modifiers: Vec<SemanticTokenModifier>,
149}
150
151/// The actual tokens.
152#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
153pub struct SemanticToken {
154    pub delta_line: u32,
155    pub delta_start: u32,
156    pub length: u32,
157    pub token_type: u32,
158    pub token_modifiers_bitset: u32,
159}
160
161impl SemanticToken {
162    fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<Self>, D::Error>
163    where
164        D: serde::Deserializer<'de>,
165    {
166        let data = Vec::<u32>::deserialize(deserializer)?;
167        let chunks = data.chunks_exact(5);
168
169        if !chunks.remainder().is_empty() {
170            return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
171        }
172
173        Result::Ok(
174            chunks
175                .map(|chunk| Self {
176                    delta_line: chunk[0],
177                    delta_start: chunk[1],
178                    length: chunk[2],
179                    token_type: chunk[3],
180                    token_modifiers_bitset: chunk[4],
181                })
182                .collect(),
183        )
184    }
185
186    fn serialize_tokens<S>(tokens: &[Self], serializer: S) -> Result<S::Ok, S::Error>
187    where
188        S: serde::Serializer,
189    {
190        let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
191        for token in tokens {
192            seq.serialize_element(&token.delta_line)?;
193            seq.serialize_element(&token.delta_start)?;
194            seq.serialize_element(&token.length)?;
195            seq.serialize_element(&token.token_type)?;
196            seq.serialize_element(&token.token_modifiers_bitset)?;
197        }
198        seq.end()
199    }
200
201    fn deserialize_tokens_opt<'de, D>(deserializer: D) -> Result<Option<Vec<Self>>, D::Error>
202    where
203        D: serde::Deserializer<'de>,
204    {
205        #[derive(Deserialize)]
206        #[serde(transparent)]
207        struct Wrapper {
208            #[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
209            tokens: Vec<SemanticToken>,
210        }
211
212        Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
213    }
214
215    fn serialize_tokens_opt<S>(data: &Option<Vec<Self>>, serializer: S) -> Result<S::Ok, S::Error>
216    where
217        S: serde::Serializer,
218    {
219        #[derive(Serialize)]
220        #[serde(transparent)]
221        struct Wrapper {
222            #[serde(serialize_with = "SemanticToken::serialize_tokens")]
223            tokens: Vec<SemanticToken>,
224        }
225
226        let opt = data.as_ref().map(|t| Wrapper { tokens: t.clone() });
227
228        opt.serialize(serializer)
229    }
230}
231
232/// @since 3.16.0
233#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
234#[serde(rename_all = "camelCase")]
235pub struct SemanticTokens {
236    /// An optional result id. If provided and clients support delta updating
237    /// the client will include the result id in the next semantic token request.
238    /// A server can then instead of computing all semantic tokens again simply
239    /// send a delta.
240    #[serde(skip_serializing_if = "Option::is_none")]
241    pub result_id: Option<String>,
242
243    /// The actual tokens. For a detailed description about how the data is
244    /// structured please see
245    /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71>
246    #[serde(
247        deserialize_with = "SemanticToken::deserialize_tokens",
248        serialize_with = "SemanticToken::serialize_tokens"
249    )]
250    pub data: Vec<SemanticToken>,
251}
252
253/// @since 3.16.0
254#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
255#[serde(rename_all = "camelCase")]
256pub struct SemanticTokensPartialResult {
257    #[serde(
258        deserialize_with = "SemanticToken::deserialize_tokens",
259        serialize_with = "SemanticToken::serialize_tokens"
260    )]
261    pub data: Vec<SemanticToken>,
262}
263
264#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
265#[serde(rename_all = "camelCase")]
266#[serde(untagged)]
267pub enum SemanticTokensResult {
268    Tokens(SemanticTokens),
269    Partial(SemanticTokensPartialResult),
270}
271
272impl From<SemanticTokens> for SemanticTokensResult {
273    fn from(from: SemanticTokens) -> Self {
274        Self::Tokens(from)
275    }
276}
277
278impl From<SemanticTokensPartialResult> for SemanticTokensResult {
279    fn from(from: SemanticTokensPartialResult) -> Self {
280        Self::Partial(from)
281    }
282}
283
284/// @since 3.16.0
285#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
286#[serde(rename_all = "camelCase")]
287pub struct SemanticTokensEdit {
288    pub start: u32,
289    pub delete_count: u32,
290
291    #[serde(
292        default,
293        skip_serializing_if = "Option::is_none",
294        deserialize_with = "SemanticToken::deserialize_tokens_opt",
295        serialize_with = "SemanticToken::serialize_tokens_opt"
296    )]
297    pub data: Option<Vec<SemanticToken>>,
298}
299
300#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
301#[serde(rename_all = "camelCase")]
302#[serde(untagged)]
303pub enum SemanticTokensFullDeltaResult {
304    Tokens(SemanticTokens),
305    TokensDelta(SemanticTokensDelta),
306    PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
307}
308
309impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
310    fn from(from: SemanticTokens) -> Self {
311        Self::Tokens(from)
312    }
313}
314
315impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
316    fn from(from: SemanticTokensDelta) -> Self {
317        Self::TokensDelta(from)
318    }
319}
320
321/// @since 3.16.0
322#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
323#[serde(rename_all = "camelCase")]
324pub struct SemanticTokensDelta {
325    #[serde(skip_serializing_if = "Option::is_none")]
326    pub result_id: Option<String>,
327    /// For a detailed description how these edits are structured please see
328    /// <https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L131>
329    pub edits: Vec<SemanticTokensEdit>,
330}
331
332/// Capabilities specific to the `textDocument/semanticTokens/*` requests.
333///
334/// @since 3.16.0
335#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
336#[serde(rename_all = "camelCase")]
337pub struct SemanticTokensClientCapabilities {
338    /// Whether implementation supports dynamic registration. If this is set to `true`
339    /// the client supports the new `(TextDocumentRegistrationOptions & StaticRegistrationOptions)`
340    /// return value for the corresponding server capability as well.
341    #[serde(skip_serializing_if = "Option::is_none")]
342    pub dynamic_registration: Option<bool>,
343
344    /// Which requests the client supports and might send to the server
345    /// depending on the server's capability. Please note that clients might not
346    /// show semantic tokens or degrade some of the user experience if a range
347    /// or full request is advertised by the client but not provided by the
348    /// server. If for example the client capability `requests.full` and
349    /// `request.range` are both set to true but the server only provides a
350    /// range provider the client might not render a minimap correctly or might
351    /// even decide to not show any semantic tokens at all.
352    pub requests: SemanticTokensClientCapabilitiesRequests,
353
354    /// The token types that the client supports.
355    pub token_types: Vec<SemanticTokenType>,
356
357    /// The token modifiers that the client supports.
358    pub token_modifiers: Vec<SemanticTokenModifier>,
359
360    /// The token formats the clients supports.
361    pub formats: Vec<TokenFormat>,
362
363    /// Whether the client supports tokens that can overlap each other.
364    #[serde(skip_serializing_if = "Option::is_none")]
365    pub overlapping_token_support: Option<bool>,
366
367    /// Whether the client supports tokens that can span multiple lines.
368    #[serde(skip_serializing_if = "Option::is_none")]
369    pub multiline_token_support: Option<bool>,
370
371    /// Whether the client allows the server to actively cancel a
372    /// semantic token request, e.g. supports returning
373    /// ErrorCodes.ServerCancelled. If a server does the client
374    /// needs to retrigger the request.
375    ///
376    /// @since 3.17.0
377    #[serde(skip_serializing_if = "Option::is_none")]
378    pub server_cancel_support: Option<bool>,
379
380    /// Whether the client uses semantic tokens to augment existing
381    /// syntax tokens. If set to `true` client side created syntax
382    /// tokens and semantic tokens are both used for colorization. If
383    /// set to `false` the client only uses the returned semantic tokens
384    /// for colorization.
385    ///
386    /// If the value is `undefined` then the client behavior is not
387    /// specified.
388    ///
389    /// @since 3.17.0
390    #[serde(skip_serializing_if = "Option::is_none")]
391    pub augments_syntax_tokens: Option<bool>,
392}
393
394#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
395#[serde(rename_all = "camelCase")]
396pub struct SemanticTokensClientCapabilitiesRequests {
397    /// The client will send the `textDocument/semanticTokens/range` request if the server provides a corresponding handler.
398    #[serde(skip_serializing_if = "Option::is_none")]
399    pub range: Option<bool>,
400
401    /// The client will send the `textDocument/semanticTokens/full` request if the server provides a corresponding handler.
402    #[serde(skip_serializing_if = "Option::is_none")]
403    pub full: Option<SemanticTokensFullOptions>,
404}
405
406#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
407#[serde(rename_all = "camelCase")]
408#[serde(untagged)]
409pub enum SemanticTokensFullOptions {
410    Bool(bool),
411    Delta {
412        /// The client will send the `textDocument/semanticTokens/full/delta` request if the server provides a corresponding handler.
413        /// The server supports deltas for full documents.
414        #[serde(skip_serializing_if = "Option::is_none")]
415        delta: Option<bool>,
416    },
417}
418
419/// @since 3.16.0
420#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
421#[serde(rename_all = "camelCase")]
422pub struct SemanticTokensOptions {
423    #[serde(flatten)]
424    pub work_done_progress_options: WorkDoneProgressOptions,
425
426    /// The legend used by the server
427    pub legend: SemanticTokensLegend,
428
429    /// Server supports providing semantic tokens for a specific range
430    /// of a document.
431    #[serde(skip_serializing_if = "Option::is_none")]
432    pub range: Option<bool>,
433
434    /// Server supports providing semantic tokens for a full document.
435    #[serde(skip_serializing_if = "Option::is_none")]
436    pub full: Option<SemanticTokensFullOptions>,
437}
438
439#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
440#[serde(rename_all = "camelCase")]
441pub struct SemanticTokensRegistrationOptions {
442    #[serde(flatten)]
443    pub text_document_registration_options: TextDocumentRegistrationOptions,
444
445    #[serde(flatten)]
446    pub semantic_tokens_options: SemanticTokensOptions,
447
448    #[serde(flatten)]
449    pub static_registration_options: StaticRegistrationOptions,
450}
451
452#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
453#[serde(rename_all = "camelCase")]
454#[serde(untagged)]
455pub enum SemanticTokensServerCapabilities {
456    SemanticTokensOptions(SemanticTokensOptions),
457    SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
458}
459
460impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
461    fn from(from: SemanticTokensOptions) -> Self {
462        Self::SemanticTokensOptions(from)
463    }
464}
465
466impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
467    fn from(from: SemanticTokensRegistrationOptions) -> Self {
468        Self::SemanticTokensRegistrationOptions(from)
469    }
470}
471
472#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
473#[serde(rename_all = "camelCase")]
474pub struct SemanticTokensWorkspaceClientCapabilities {
475    /// Whether the client implementation supports a refresh request sent from
476    /// the server to the client.
477    ///
478    /// Note that this event is global and will force the client to refresh all
479    /// semantic tokens currently shown. It should be used with absolute care
480    /// and is useful for situation where a server for example detect a project
481    /// wide change that requires such a calculation.
482    pub refresh_support: Option<bool>,
483}
484
485#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
486#[serde(rename_all = "camelCase")]
487pub struct SemanticTokensParams {
488    #[serde(flatten)]
489    pub work_done_progress_params: WorkDoneProgressParams,
490
491    #[serde(flatten)]
492    pub partial_result_params: PartialResultParams,
493
494    /// The text document.
495    pub text_document: TextDocumentIdentifier,
496}
497
498#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
499#[serde(rename_all = "camelCase")]
500pub struct SemanticTokensDeltaParams {
501    #[serde(flatten)]
502    pub work_done_progress_params: WorkDoneProgressParams,
503
504    #[serde(flatten)]
505    pub partial_result_params: PartialResultParams,
506
507    /// The text document.
508    pub text_document: TextDocumentIdentifier,
509
510    /// The result id of a previous response. The result Id can either point to a full response
511    /// or a delta response depending on what was received last.
512    pub previous_result_id: String,
513}
514
515#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
516#[serde(rename_all = "camelCase")]
517pub struct SemanticTokensRangeParams {
518    #[serde(flatten)]
519    pub work_done_progress_params: WorkDoneProgressParams,
520
521    #[serde(flatten)]
522    pub partial_result_params: PartialResultParams,
523
524    /// The text document.
525    pub text_document: TextDocumentIdentifier,
526
527    /// The range the semantic tokens are requested for.
528    pub range: Range,
529}
530
531#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
532#[serde(rename_all = "camelCase")]
533#[serde(untagged)]
534pub enum SemanticTokensRangeResult {
535    Tokens(SemanticTokens),
536    Partial(SemanticTokensPartialResult),
537}
538
539impl From<SemanticTokens> for SemanticTokensRangeResult {
540    fn from(tokens: SemanticTokens) -> Self {
541        Self::Tokens(tokens)
542    }
543}
544
545impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
546    fn from(partial: SemanticTokensPartialResult) -> Self {
547        Self::Partial(partial)
548    }
549}
550
551#[cfg(test)]
552mod tests {
553    use super::*;
554    use crate::tests::{test_deserialization, test_serialization};
555
556    #[test]
557    fn test_semantic_tokens_support_serialization() {
558        test_serialization(
559            &SemanticTokens {
560                result_id: None,
561                data: vec![],
562            },
563            r#"{"data":[]}"#,
564        );
565
566        test_serialization(
567            &SemanticTokens {
568                result_id: None,
569                data: vec![SemanticToken {
570                    delta_line: 2,
571                    delta_start: 5,
572                    length: 3,
573                    token_type: 0,
574                    token_modifiers_bitset: 3,
575                }],
576            },
577            r#"{"data":[2,5,3,0,3]}"#,
578        );
579
580        test_serialization(
581            &SemanticTokens {
582                result_id: None,
583                data: vec![
584                    SemanticToken {
585                        delta_line: 2,
586                        delta_start: 5,
587                        length: 3,
588                        token_type: 0,
589                        token_modifiers_bitset: 3,
590                    },
591                    SemanticToken {
592                        delta_line: 0,
593                        delta_start: 5,
594                        length: 4,
595                        token_type: 1,
596                        token_modifiers_bitset: 0,
597                    },
598                ],
599            },
600            r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
601        );
602    }
603
604    #[test]
605    fn test_semantic_tokens_support_deserialization() {
606        test_deserialization(
607            r#"{"data":[]}"#,
608            &SemanticTokens {
609                result_id: None,
610                data: vec![],
611            },
612        );
613
614        test_deserialization(
615            r#"{"data":[2,5,3,0,3]}"#,
616            &SemanticTokens {
617                result_id: None,
618                data: vec![SemanticToken {
619                    delta_line: 2,
620                    delta_start: 5,
621                    length: 3,
622                    token_type: 0,
623                    token_modifiers_bitset: 3,
624                }],
625            },
626        );
627
628        test_deserialization(
629            r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
630            &SemanticTokens {
631                result_id: None,
632                data: vec![
633                    SemanticToken {
634                        delta_line: 2,
635                        delta_start: 5,
636                        length: 3,
637                        token_type: 0,
638                        token_modifiers_bitset: 3,
639                    },
640                    SemanticToken {
641                        delta_line: 0,
642                        delta_start: 5,
643                        length: 4,
644                        token_type: 1,
645                        token_modifiers_bitset: 0,
646                    },
647                ],
648            },
649        );
650    }
651
652    #[test]
653    #[should_panic = "Length is not divisible by 5"]
654    fn test_semantic_tokens_support_deserialization_err() {
655        test_deserialization(r#"{"data":[1]}"#, &SemanticTokens::default());
656    }
657
658    #[test]
659    fn test_semantic_tokens_edit_support_deserialization() {
660        test_deserialization(
661            r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
662            &SemanticTokensEdit {
663                start: 0,
664                delete_count: 1,
665                data: Some(vec![
666                    SemanticToken {
667                        delta_line: 2,
668                        delta_start: 5,
669                        length: 3,
670                        token_type: 0,
671                        token_modifiers_bitset: 3,
672                    },
673                    SemanticToken {
674                        delta_line: 0,
675                        delta_start: 5,
676                        length: 4,
677                        token_type: 1,
678                        token_modifiers_bitset: 0,
679                    },
680                ]),
681            },
682        );
683
684        test_deserialization(
685            r#"{"start":0,"deleteCount":1}"#,
686            &SemanticTokensEdit {
687                start: 0,
688                delete_count: 1,
689                data: None,
690            },
691        );
692    }
693
694    #[test]
695    fn test_semantic_tokens_edit_support_serialization() {
696        test_serialization(
697            &SemanticTokensEdit {
698                start: 0,
699                delete_count: 1,
700                data: Some(vec![
701                    SemanticToken {
702                        delta_line: 2,
703                        delta_start: 5,
704                        length: 3,
705                        token_type: 0,
706                        token_modifiers_bitset: 3,
707                    },
708                    SemanticToken {
709                        delta_line: 0,
710                        delta_start: 5,
711                        length: 4,
712                        token_type: 1,
713                        token_modifiers_bitset: 0,
714                    },
715                ]),
716            },
717            r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
718        );
719
720        test_serialization(
721            &SemanticTokensEdit {
722                start: 0,
723                delete_count: 1,
724                data: None,
725            },
726            r#"{"start":0,"deleteCount":1}"#,
727        );
728    }
729}