oak_csv/lexer/
token_type.rs1use oak_core::{TokenType, UniversalTokenRole};
2#[cfg(feature = "serde")]
3use serde::{Deserialize, Serialize};
4
5#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
7#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
8pub enum CsvTokenType {
9 Whitespace,
11 Newline,
13
14 Field,
16 QuotedField,
18 UnquotedField,
20 Comma,
22 Quote,
24 EscapedQuote,
26
27 Eof,
29 Error,
31}
32
33impl TokenType for CsvTokenType {
34 const END_OF_STREAM: Self = Self::Eof;
35 type Role = UniversalTokenRole;
36
37 fn is_ignored(&self) -> bool {
38 matches!(self, Self::Whitespace)
39 }
40
41 fn is_comment(&self) -> bool {
42 false
43 }
44
45 fn is_whitespace(&self) -> bool {
46 matches!(self, Self::Whitespace | Self::Newline)
47 }
48
49 fn role(&self) -> Self::Role {
50 match self {
51 Self::Whitespace | Self::Newline => UniversalTokenRole::Whitespace,
52 Self::Field | Self::QuotedField | Self::UnquotedField => UniversalTokenRole::Literal,
53 Self::Comma | Self::Quote | Self::EscapedQuote => UniversalTokenRole::Punctuation,
54 Self::Eof => UniversalTokenRole::Eof,
55 Self::Error => UniversalTokenRole::Error,
56 }
57 }
58}