oak_clojure/parser/
element_type.rs1use oak_core::{ElementType, Parser, UniversalElementRole};
2#[cfg(feature = "serde")]
3use serde::{Deserialize, Serialize};
4
5#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
7#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
8pub enum ClojureElementType {
9 Token,
11 List,
13 Vector,
15 Map,
17 Set,
19 AnonFn,
21 Quotation,
23 Meta,
25 Root,
27 SourceFile,
29 Error,
31}
32
33impl ElementType for ClojureElementType {
34 type Role = UniversalElementRole;
35
36 fn role(&self) -> Self::Role {
37 match self {
38 Self::Root => UniversalElementRole::Root,
39 Self::SourceFile => UniversalElementRole::Root,
40 Self::Error => UniversalElementRole::Error,
41 Self::List | Self::Vector | Self::Map | Self::Set | Self::AnonFn | Self::Quotation | Self::Meta => UniversalElementRole::Expression,
42 _ => UniversalElementRole::None,
43 }
44 }
45}
46
47impl From<crate::lexer::token_type::ClojureTokenType> for ClojureElementType {
48 fn from(token: crate::lexer::token_type::ClojureTokenType) -> Self {
49 use crate::lexer::token_type::ClojureTokenType as T;
50 match token {
51 T::Token => Self::Token,
52 T::List => Self::List,
53 T::Vector => Self::Vector,
54 T::Map => Self::Map,
55 T::Set => Self::Set,
56 T::AnonFn => Self::AnonFn,
57 T::Root => Self::Root,
58 T::SourceFile => Self::SourceFile,
59 T::Error => Self::Error,
60 T::ListStart => Self::Token,
61 T::ListEnd => Self::Token,
62 T::VectorStart => Self::Token,
63 T::VectorEnd => Self::Token,
64 T::MapStart => Self::Token,
65 T::MapEnd => Self::Token,
66 T::SetStart => Self::Token,
67 T::AnonFnStart => Self::Token,
68 T::Quote => Self::Token,
69 T::Unquote => Self::Token,
70 T::UnquoteSplice => Self::Token,
71 T::Meta => Self::Token,
72 T::Whitespace => Self::Token,
73 T::Comment => Self::Token,
74 T::StringLiteral => Self::Token,
75 T::CharacterLiteral => Self::Token,
76 T::NumberLiteral => Self::Token,
77 T::KeywordLiteral => Self::Token,
78 T::Dispatch => Self::Token,
79 T::RegexLiteral => Self::Token,
80 T::Symbol => Self::Token,
81 }
82 }
83}