Skip to main content

oak_csv/lexer/
token_type.rs

1use oak_core::{TokenType, UniversalTokenRole};
2#[cfg(feature = "serde")]
3use serde::{Deserialize, Serialize};
4
5/// CSV token type
6#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
7#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
8pub enum CsvTokenType {
9    /// Whitespace
10    Whitespace,
11    /// Newline
12    Newline,
13
14    /// Field value (quoted or unquoted)
15    Field,
16    /// Quoted field value
17    QuotedField,
18    /// Unquoted field value
19    UnquotedField,
20    /// Field separator ,
21    Comma,
22    /// Quote character "
23    Quote,
24    /// Escaped quote ""
25    EscapedQuote,
26
27    /// End of file
28    Eof,
29    /// Error
30    Error,
31}
32
33impl TokenType for CsvTokenType {
34    const END_OF_STREAM: Self = Self::Eof;
35    type Role = UniversalTokenRole;
36
37    fn is_ignored(&self) -> bool {
38        matches!(self, Self::Whitespace)
39    }
40
41    fn is_comment(&self) -> bool {
42        false
43    }
44
45    fn is_whitespace(&self) -> bool {
46        matches!(self, Self::Whitespace | Self::Newline)
47    }
48
49    fn role(&self) -> Self::Role {
50        match self {
51            Self::Whitespace | Self::Newline => UniversalTokenRole::Whitespace,
52            Self::Field | Self::QuotedField | Self::UnquotedField => UniversalTokenRole::Literal,
53            Self::Comma | Self::Quote | Self::EscapedQuote => UniversalTokenRole::Punctuation,
54            Self::Eof => UniversalTokenRole::Eof,
55            Self::Error => UniversalTokenRole::Error,
56        }
57    }
58}