oak_csv/lexer/
token_type.rs1use oak_core::{TokenType, UniversalTokenRole};
2use serde::{Deserialize, Serialize};
3
4#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
6pub enum CsvTokenType {
7 Whitespace,
9 Newline,
11
12 Field,
14 QuotedField,
16 UnquotedField,
18 Comma,
20 Quote,
22 EscapedQuote,
24
25 Eof,
27 Error,
29}
30
31impl TokenType for CsvTokenType {
32 const END_OF_STREAM: Self = Self::Eof;
33 type Role = UniversalTokenRole;
34
35 fn is_ignored(&self) -> bool {
36 matches!(self, Self::Whitespace)
37 }
38
39 fn is_comment(&self) -> bool {
40 false
41 }
42
43 fn is_whitespace(&self) -> bool {
44 matches!(self, Self::Whitespace | Self::Newline)
45 }
46
47 fn role(&self) -> Self::Role {
48 match self {
49 Self::Whitespace | Self::Newline => UniversalTokenRole::Whitespace,
50 Self::Field | Self::QuotedField | Self::UnquotedField => UniversalTokenRole::Literal,
51 Self::Comma | Self::Quote | Self::EscapedQuote => UniversalTokenRole::Punctuation,
52 Self::Eof => UniversalTokenRole::Eof,
53 Self::Error => UniversalTokenRole::Error,
54 }
55 }
56}