use std::borrow::Cow;
use serde::ser::SerializeSeq;
use serde::{Deserialize, Serialize};
use crate::{
PartialResultParams, Range, StaticRegistrationOptions, TextDocumentIdentifier,
TextDocumentRegistrationOptions, WorkDoneProgressOptions, WorkDoneProgressParams,
};
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
pub struct SemanticTokenType(Cow<'static, str>);
impl SemanticTokenType {
pub const NAMESPACE: Self = Self::new("namespace");
pub const TYPE: Self = Self::new("type");
pub const CLASS: Self = Self::new("class");
pub const ENUM: Self = Self::new("enum");
pub const INTERFACE: Self = Self::new("interface");
pub const STRUCT: Self = Self::new("struct");
pub const TYPE_PARAMETER: Self = Self::new("typeParameter");
pub const PARAMETER: Self = Self::new("parameter");
pub const VARIABLE: Self = Self::new("variable");
pub const PROPERTY: Self = Self::new("property");
pub const ENUM_MEMBER: Self = Self::new("enumMember");
pub const EVENT: Self = Self::new("event");
pub const FUNCTION: Self = Self::new("function");
pub const METHOD: Self = Self::new("method");
pub const MACRO: Self = Self::new("macro");
pub const KEYWORD: Self = Self::new("keyword");
pub const MODIFIER: Self = Self::new("modifier");
pub const COMMENT: Self = Self::new("comment");
pub const STRING: Self = Self::new("string");
pub const NUMBER: Self = Self::new("number");
pub const REGEXP: Self = Self::new("regexp");
pub const OPERATOR: Self = Self::new("operator");
pub const DECORATOR: Self = Self::new("decorator");
#[must_use]
pub const fn new(tag: &'static str) -> Self {
Self(Cow::Borrowed(tag))
}
#[must_use]
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<String> for SemanticTokenType {
fn from(from: String) -> Self {
Self(Cow::from(from))
}
}
impl From<&'static str> for SemanticTokenType {
fn from(from: &'static str) -> Self {
Self::new(from)
}
}
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
pub struct SemanticTokenModifier(Cow<'static, str>);
impl SemanticTokenModifier {
pub const DECLARATION: Self = Self::new("declaration");
pub const DEFINITION: Self = Self::new("definition");
pub const READONLY: Self = Self::new("readonly");
pub const STATIC: Self = Self::new("static");
pub const DEPRECATED: Self = Self::new("deprecated");
pub const ABSTRACT: Self = Self::new("abstract");
pub const ASYNC: Self = Self::new("async");
pub const MODIFICATION: Self = Self::new("modification");
pub const DOCUMENTATION: Self = Self::new("documentation");
pub const DEFAULT_LIBRARY: Self = Self::new("defaultLibrary");
#[must_use]
pub const fn new(tag: &'static str) -> Self {
Self(Cow::Borrowed(tag))
}
#[must_use]
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<String> for SemanticTokenModifier {
fn from(from: String) -> Self {
Self(Cow::from(from))
}
}
impl From<&'static str> for SemanticTokenModifier {
fn from(from: &'static str) -> Self {
Self::new(from)
}
}
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd, Clone, Deserialize, Serialize)]
pub struct TokenFormat(Cow<'static, str>);
impl TokenFormat {
pub const RELATIVE: Self = Self::new("relative");
#[must_use]
pub const fn new(tag: &'static str) -> Self {
Self(Cow::Borrowed(tag))
}
#[must_use]
pub fn as_str(&self) -> &str {
&self.0
}
}
impl From<String> for TokenFormat {
fn from(from: String) -> Self {
Self(Cow::from(from))
}
}
impl From<&'static str> for TokenFormat {
fn from(from: &'static str) -> Self {
Self::new(from)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensLegend {
pub token_types: Vec<SemanticTokenType>,
pub token_modifiers: Vec<SemanticTokenModifier>,
}
#[derive(Debug, Eq, PartialEq, Copy, Clone, Default)]
pub struct SemanticToken {
pub delta_line: u32,
pub delta_start: u32,
pub length: u32,
pub token_type: u32,
pub token_modifiers_bitset: u32,
}
impl SemanticToken {
fn deserialize_tokens<'de, D>(deserializer: D) -> Result<Vec<Self>, D::Error>
where
D: serde::Deserializer<'de>,
{
let data = Vec::<u32>::deserialize(deserializer)?;
let chunks = data.chunks_exact(5);
if !chunks.remainder().is_empty() {
return Result::Err(serde::de::Error::custom("Length is not divisible by 5"));
}
Result::Ok(
chunks
.map(|chunk| Self {
delta_line: chunk[0],
delta_start: chunk[1],
length: chunk[2],
token_type: chunk[3],
token_modifiers_bitset: chunk[4],
})
.collect(),
)
}
fn serialize_tokens<S>(tokens: &[Self], serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut seq = serializer.serialize_seq(Some(tokens.len() * 5))?;
for token in tokens {
seq.serialize_element(&token.delta_line)?;
seq.serialize_element(&token.delta_start)?;
seq.serialize_element(&token.length)?;
seq.serialize_element(&token.token_type)?;
seq.serialize_element(&token.token_modifiers_bitset)?;
}
seq.end()
}
fn deserialize_tokens_opt<'de, D>(deserializer: D) -> Result<Option<Vec<Self>>, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
#[serde(transparent)]
struct Wrapper {
#[serde(deserialize_with = "SemanticToken::deserialize_tokens")]
tokens: Vec<SemanticToken>,
}
Ok(Option::<Wrapper>::deserialize(deserializer)?.map(|wrapper| wrapper.tokens))
}
#[expect(clippy::ref_option)]
fn serialize_tokens_opt<S>(data: &Option<Vec<Self>>, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
#[derive(Serialize)]
#[serde(transparent)]
struct Wrapper {
#[serde(serialize_with = "SemanticToken::serialize_tokens")]
tokens: Vec<SemanticToken>,
}
let opt = data.as_ref().map(|t| Wrapper { tokens: t.clone() });
opt.serialize(serializer)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokens {
#[serde(skip_serializing_if = "Option::is_none")]
pub result_id: Option<String>,
#[serde(
deserialize_with = "SemanticToken::deserialize_tokens",
serialize_with = "SemanticToken::serialize_tokens"
)]
pub data: Vec<SemanticToken>,
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensPartialResult {
#[serde(
deserialize_with = "SemanticToken::deserialize_tokens",
serialize_with = "SemanticToken::serialize_tokens"
)]
pub data: Vec<SemanticToken>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensResult {
Tokens(SemanticTokens),
Partial(SemanticTokensPartialResult),
}
impl From<SemanticTokens> for SemanticTokensResult {
fn from(from: SemanticTokens) -> Self {
Self::Tokens(from)
}
}
impl From<SemanticTokensPartialResult> for SemanticTokensResult {
fn from(from: SemanticTokensPartialResult) -> Self {
Self::Partial(from)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensEdit {
pub start: u32,
pub delete_count: u32,
#[serde(
default,
skip_serializing_if = "Option::is_none",
deserialize_with = "SemanticToken::deserialize_tokens_opt",
serialize_with = "SemanticToken::serialize_tokens_opt"
)]
pub data: Option<Vec<SemanticToken>>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensFullDeltaResult {
Tokens(SemanticTokens),
TokensDelta(SemanticTokensDelta),
PartialTokensDelta { edits: Vec<SemanticTokensEdit> },
}
impl From<SemanticTokens> for SemanticTokensFullDeltaResult {
fn from(from: SemanticTokens) -> Self {
Self::Tokens(from)
}
}
impl From<SemanticTokensDelta> for SemanticTokensFullDeltaResult {
fn from(from: SemanticTokensDelta) -> Self {
Self::TokensDelta(from)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensDelta {
#[serde(skip_serializing_if = "Option::is_none")]
pub result_id: Option<String>,
pub edits: Vec<SemanticTokensEdit>,
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensClientCapabilities {
#[serde(skip_serializing_if = "Option::is_none")]
pub dynamic_registration: Option<bool>,
pub requests: SemanticTokensClientCapabilitiesRequests,
pub token_types: Vec<SemanticTokenType>,
pub token_modifiers: Vec<SemanticTokenModifier>,
pub formats: Vec<TokenFormat>,
#[serde(skip_serializing_if = "Option::is_none")]
pub overlapping_token_support: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub multiline_token_support: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub server_cancel_support: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub augments_syntax_tokens: Option<bool>,
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensClientCapabilitiesRequests {
#[serde(skip_serializing_if = "Option::is_none")]
pub range: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub full: Option<SemanticTokensFullOptions>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensFullOptions {
Bool(bool),
Delta {
#[serde(skip_serializing_if = "Option::is_none")]
delta: Option<bool>,
},
}
#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensOptions {
#[serde(flatten)]
pub work_done_progress_options: WorkDoneProgressOptions,
pub legend: SemanticTokensLegend,
#[serde(skip_serializing_if = "Option::is_none")]
pub range: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub full: Option<SemanticTokensFullOptions>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensRegistrationOptions {
#[serde(flatten)]
pub text_document_registration_options: TextDocumentRegistrationOptions,
#[serde(flatten)]
pub semantic_tokens_options: SemanticTokensOptions,
#[serde(flatten)]
pub static_registration_options: StaticRegistrationOptions,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensServerCapabilities {
SemanticTokensOptions(SemanticTokensOptions),
SemanticTokensRegistrationOptions(SemanticTokensRegistrationOptions),
}
impl From<SemanticTokensOptions> for SemanticTokensServerCapabilities {
fn from(from: SemanticTokensOptions) -> Self {
Self::SemanticTokensOptions(from)
}
}
impl From<SemanticTokensRegistrationOptions> for SemanticTokensServerCapabilities {
fn from(from: SemanticTokensRegistrationOptions) -> Self {
Self::SemanticTokensRegistrationOptions(from)
}
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensWorkspaceClientCapabilities {
pub refresh_support: Option<bool>,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensParams {
#[serde(flatten)]
pub work_done_progress_params: WorkDoneProgressParams,
#[serde(flatten)]
pub partial_result_params: PartialResultParams,
pub text_document: TextDocumentIdentifier,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensDeltaParams {
#[serde(flatten)]
pub work_done_progress_params: WorkDoneProgressParams,
#[serde(flatten)]
pub partial_result_params: PartialResultParams,
pub text_document: TextDocumentIdentifier,
pub previous_result_id: String,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SemanticTokensRangeParams {
#[serde(flatten)]
pub work_done_progress_params: WorkDoneProgressParams,
#[serde(flatten)]
pub partial_result_params: PartialResultParams,
pub text_document: TextDocumentIdentifier,
pub range: Range,
}
#[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum SemanticTokensRangeResult {
Tokens(SemanticTokens),
Partial(SemanticTokensPartialResult),
}
impl From<SemanticTokens> for SemanticTokensRangeResult {
fn from(tokens: SemanticTokens) -> Self {
Self::Tokens(tokens)
}
}
impl From<SemanticTokensPartialResult> for SemanticTokensRangeResult {
fn from(partial: SemanticTokensPartialResult) -> Self {
Self::Partial(partial)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{test_deserialization, test_serialization};
#[test]
fn test_semantic_tokens_support_serialization() {
test_serialization(
&SemanticTokens {
result_id: None,
data: vec![],
},
r#"{"data":[]}"#,
);
test_serialization(
&SemanticTokens {
result_id: None,
data: vec![SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
}],
},
r#"{"data":[2,5,3,0,3]}"#,
);
test_serialization(
&SemanticTokens {
result_id: None,
data: vec![
SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 4,
token_type: 1,
token_modifiers_bitset: 0,
},
],
},
r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
);
}
#[test]
fn test_semantic_tokens_support_deserialization() {
test_deserialization(
r#"{"data":[]}"#,
&SemanticTokens {
result_id: None,
data: vec![],
},
);
test_deserialization(
r#"{"data":[2,5,3,0,3]}"#,
&SemanticTokens {
result_id: None,
data: vec![SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
}],
},
);
test_deserialization(
r#"{"data":[2,5,3,0,3,0,5,4,1,0]}"#,
&SemanticTokens {
result_id: None,
data: vec![
SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 4,
token_type: 1,
token_modifiers_bitset: 0,
},
],
},
);
}
#[test]
#[should_panic = "Length is not divisible by 5"]
fn test_semantic_tokens_support_deserialization_err() {
test_deserialization(r#"{"data":[1]}"#, &SemanticTokens::default());
}
#[test]
fn test_semantic_tokens_edit_support_deserialization() {
test_deserialization(
r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
&SemanticTokensEdit {
start: 0,
delete_count: 1,
data: Some(vec![
SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 4,
token_type: 1,
token_modifiers_bitset: 0,
},
]),
},
);
test_deserialization(
r#"{"start":0,"deleteCount":1}"#,
&SemanticTokensEdit {
start: 0,
delete_count: 1,
data: None,
},
);
}
#[test]
fn test_semantic_tokens_edit_support_serialization() {
test_serialization(
&SemanticTokensEdit {
start: 0,
delete_count: 1,
data: Some(vec![
SemanticToken {
delta_line: 2,
delta_start: 5,
length: 3,
token_type: 0,
token_modifiers_bitset: 3,
},
SemanticToken {
delta_line: 0,
delta_start: 5,
length: 4,
token_type: 1,
token_modifiers_bitset: 0,
},
]),
},
r#"{"start":0,"deleteCount":1,"data":[2,5,3,0,3,0,5,4,1,0]}"#,
);
test_serialization(
&SemanticTokensEdit {
start: 0,
delete_count: 1,
data: None,
},
r#"{"start":0,"deleteCount":1}"#,
);
}
}