libreda_lefdef/
stream_parser.rs1use itertools::{Itertools, PeekingNext};
8use std::fmt;
9use std::iter::Peekable;
10use std::num::ParseIntError;
11use std::str::FromStr;
12
13use libreda_stream_parser::{Lexer, ParserError};
14
15#[derive(Clone, Debug)]
18pub enum LefDefParseError {
19 ParserError(ParserError<char>),
21 InvalidCharacter,
23 UnexpectedEndOfFile,
25 UnexpectedToken(String, String),
27 UnknownToken(String),
29 InvalidLiteral(String),
31 IllegalBusBitChars(char, char),
33 NotImplemented(&'static str),
35 UndefinedProperty(String),
37 ParseIntError(ParseIntError),
39 Other(&'static str),
41}
42
43impl From<ParserError<char>> for LefDefParseError {
44 fn from(e: ParserError<char>) -> Self {
45 Self::ParserError(e)
46 }
47}
48
49impl fmt::Display for LefDefParseError {
50 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
51 match self {
52 LefDefParseError::InvalidCharacter => write!(f, "Invalid character."),
53 LefDefParseError::UnexpectedEndOfFile => write!(f, "Unexpected end of file."),
54 LefDefParseError::UnexpectedToken(actual, exp) => {
55 write!(f, "Unexpected token. '{}' instead of '{}'", actual, exp)
56 }
57 LefDefParseError::UnknownToken(t) => write!(f, "Unknown token: '{}'.", t),
58 LefDefParseError::InvalidLiteral(n) => write!(f, "Invalid literal: '{}'.", n),
59 LefDefParseError::IllegalBusBitChars(a, b) => {
60 write!(f, "Illegal bus bit chars: '{} {}'.", a, b)
61 }
62 LefDefParseError::NotImplemented(n) => write!(f, "Not implemented: '{}'.", n),
63 LefDefParseError::UndefinedProperty(p) => write!(f, "Undefined property: '{}'.", p),
64 LefDefParseError::Other(msg) => write!(f, "'{}'.", msg),
65 LefDefParseError::ParseIntError(e) => write!(f, "Illegal integer: '{}'", e),
66 LefDefParseError::ParserError(e) => write!(f, "{}", e),
67 }
68 }
69}
70
71impl From<ParseIntError> for LefDefParseError {
72 fn from(e: ParseIntError) -> Self {
73 Self::ParseIntError(e)
74 }
75}
76
77pub struct LefDefLexer {}
78
79impl Lexer for LefDefLexer {
80 type Char = char;
81
82 fn consume_next_token(
83 &mut self,
84 iter: &mut (impl Iterator<Item = char> + PeekingNext),
85 mut output: impl FnMut(char),
86 ) -> Result<(), ParserError<char>> {
87 loop {
88 let _n = iter.peeking_take_while(|c| c.is_whitespace()).count();
90
91 if let Some(c) = iter.peeking_next(|_| true) {
93 debug_assert!(!c.is_whitespace());
94
95 match c {
96 '#' => {
97 iter.peeking_take_while(|&c| c != '\n' && c != '\r').count();
99 }
100 '"' | '\'' => {
101 let quote_char = c;
103
104 let mut prev = None;
105 while let Some(c) = iter.next() {
106 if prev != Some('\\') && c == quote_char {
107 break;
109 }
110 output(c);
111 prev = Some(c);
112 }
113 return Ok(());
114 }
115 _ => {
116 let mut prev = Some(c);
118 output(c);
119
120 while let Some(c) = iter.next() {
121 if prev != Some('\\') && c.is_whitespace() {
122 break;
124 }
125
126 output(c);
127 prev = Some(c);
128 }
129 return Ok(());
130 }
131 }
132 } else {
133 return Ok(());
134 }
135 }
136 }
137}
138
139#[test]
141fn test_read_token() {
142 let data = r#"
143 # Comment 1
144
145 # Comment 2
146
147 token1
148
149 # Comment 3
150
151 token2 token3
152
153 "quoted token"
154
155 token4
156 "#;
157
158 let mut iter = data.chars().inspect(|c| print!("{}", c)).peekable();
159
160 let mut buffer = String::new();
161
162 let mut tk = libreda_stream_parser::tokenize(iter, LefDefLexer {});
163
164 tk.advance().unwrap();
165
166 tk.expect_str("token1").unwrap();
167 tk.expect_str("token2").unwrap();
168 tk.expect_str("token3").unwrap();
169 tk.expect_str("quoted token").unwrap();
170 tk.expect_str("token4").unwrap();
171 assert!(tk.current_token_ref().is_none());
172}