sequoia_openpgp/message/
lexer.rs

1use std::fmt;
2
3// The type of the parser's input.
4//
5// The parser iterators over tuples consisting of the token's starting
6// position, the token itself, and the token's ending position.
7pub(crate) type LexerItem<Tok, Loc, Error>
8    = ::std::result::Result<(Loc, Tok, Loc), Error>;
9
10/// The components of an OpenPGP Message.
11#[non_exhaustive]
12#[derive(Debug, Clone, Copy, PartialEq)]
13pub enum Token {
14    /// A Literal data packet.
15    Literal,
16    /// A Compressed Data packet.
17    CompressedData,
18
19    /// An SK-ESK packet.
20    SKESK,
21    /// An PK-ESK packet.
22    PKESK,
23    /// A version 1 SEIP packet.
24    SEIPv1,
25    /// A version 2 SEIP packet.
26    SEIPv2,
27    /// An MDC packet.
28    MDC,
29    /// An AED packet.
30    AED,
31
32    /// A OnePassSig packet.
33    OPS,
34    /// A Signature packet.
35    SIG,
36
37    /// The end of a container (either a Compressed Data packet or a
38    /// SEIP packet).
39    Pop,
40
41    /// A container's unparsed content.
42    OpaqueContent,
43}
44assert_send_and_sync!(Token);
45
46impl fmt::Display for Token {
47    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
48        write!(f, "{:?}", self)
49    }
50}
51
52#[derive(Debug, Clone)]
53pub enum LexicalError {
54    // There are no lexing errors.
55}
56assert_send_and_sync!(LexicalError);
57
58impl fmt::Display for LexicalError {
59    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
60        write!(f, "{:?}", self)
61    }
62}
63
64pub(crate) struct Lexer<'input> {
65    iter: Box<dyn Iterator<Item=(usize, &'input Token)> + 'input>,
66}
67
68impl<'input> Iterator for Lexer<'input> {
69    type Item = LexerItem<Token, usize, LexicalError>;
70
71    fn next(&mut self) -> Option<Self::Item> {
72        let n = self.iter.next().map(|(pos, tok)| (pos, *tok));
73        if let Some((pos, tok)) = n {
74            Some(Ok((pos, tok, pos)))
75        } else {
76            None
77        }
78    }
79}
80
81impl<'input> Lexer<'input> {
82    /// Uses a raw sequence of tokens as input to the parser.
83    pub(crate) fn from_tokens(raw: &'input [Token]) -> Self {
84        Lexer {
85            iter: Box::new(raw.iter().enumerate())
86        }
87    }
88}