Skip to main content

oak_csv/lexer/
token_type.rs

1use oak_core::{TokenType, UniversalTokenRole};
2use serde::{Deserialize, Serialize};
3
4/// CSV token type
5#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
6pub enum CsvTokenType {
7    /// Whitespace
8    Whitespace,
9    /// Newline
10    Newline,
11
12    /// Field value (quoted or unquoted)
13    Field,
14    /// Quoted field value
15    QuotedField,
16    /// Unquoted field value
17    UnquotedField,
18    /// Field separator ,
19    Comma,
20    /// Quote character "
21    Quote,
22    /// Escaped quote ""
23    EscapedQuote,
24
25    /// End of file
26    Eof,
27    /// Error
28    Error,
29}
30
31impl TokenType for CsvTokenType {
32    const END_OF_STREAM: Self = Self::Eof;
33    type Role = UniversalTokenRole;
34
35    fn is_ignored(&self) -> bool {
36        matches!(self, Self::Whitespace)
37    }
38
39    fn is_comment(&self) -> bool {
40        false
41    }
42
43    fn is_whitespace(&self) -> bool {
44        matches!(self, Self::Whitespace | Self::Newline)
45    }
46
47    fn role(&self) -> Self::Role {
48        match self {
49            Self::Whitespace | Self::Newline => UniversalTokenRole::Whitespace,
50            Self::Field | Self::QuotedField | Self::UnquotedField => UniversalTokenRole::Literal,
51            Self::Comma | Self::Quote | Self::EscapedQuote => UniversalTokenRole::Punctuation,
52            Self::Eof => UniversalTokenRole::Eof,
53            Self::Error => UniversalTokenRole::Error,
54        }
55    }
56}