sequoia_openpgp/message/
lexer.rs

1use std::fmt;
2
3// The type of the parser's input.
4//
5// The parser iterators over tuples consisting of the token's starting
6// position, the token itself, and the token's ending position.
7pub(crate) type LexerItem<Tok, Loc, Error>
8    = ::std::result::Result<(Loc, Tok, Loc), Error>;
9
10/// The components of an OpenPGP Message.
11///
12/// Note: This enum cannot be exhaustively matched to allow future
13/// extensions.
14#[non_exhaustive]
15#[derive(Debug, Clone, Copy, PartialEq)]
16#[deprecated(since = "1.9.0",
17             note = "Not covered by SemVer guarantees, DO NOT match on it.")]
18pub enum Token {
19    /// A Literal data packet.
20    Literal,
21    /// A Compressed Data packet.
22    CompressedData,
23
24    /// An SK-ESK packet.
25    SKESK,
26    /// An PK-ESK packet.
27    PKESK,
28    /// A version 1 SEIP packet.
29    SEIPv1,
30    /// An MDC packet.
31    MDC,
32    /// An AED packet.
33    AED,
34
35    /// A OnePassSig packet.
36    OPS,
37    /// A Signature packet.
38    SIG,
39
40    /// The end of a container (either a Compressed Data packet or a
41    /// SEIP packet).
42    Pop,
43
44    /// A container's unparsed content.
45    OpaqueContent,
46}
47assert_send_and_sync!(Token);
48
49impl fmt::Display for Token {
50    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
51        write!(f, "{:?}", self)
52    }
53}
54
55#[derive(Debug, Clone)]
56pub enum LexicalError {
57    // There are no lexing errors.
58}
59assert_send_and_sync!(LexicalError);
60
61impl fmt::Display for LexicalError {
62    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
63        write!(f, "{:?}", self)
64    }
65}
66
67pub(crate) struct Lexer<'input> {
68    iter: Box<dyn Iterator<Item=(usize, &'input Token)> + 'input>,
69}
70
71impl<'input> Iterator for Lexer<'input> {
72    type Item = LexerItem<Token, usize, LexicalError>;
73
74    fn next(&mut self) -> Option<Self::Item> {
75        let n = self.iter.next().map(|(pos, tok)| (pos, *tok));
76        if let Some((pos, tok)) = n {
77            Some(Ok((pos, tok, pos)))
78        } else {
79            None
80        }
81    }
82}
83
84impl<'input> Lexer<'input> {
85    /// Uses a raw sequence of tokens as input to the parser.
86    pub(crate) fn from_tokens(raw: &'input [Token]) -> Self {
87        Lexer {
88            iter: Box::new(raw.iter().enumerate())
89        }
90    }
91}