cmake_parser/doc/
token.rs1use std::fmt::{self, Display};
2
3#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
4pub struct Token<'b> {
5 bytes: &'b [u8],
6 quoted: bool,
7}
8
9impl<'tn> Token<'tn> {
10 pub fn text_node(bytes: &'tn [u8], quoted: bool) -> Self {
11 Token { bytes, quoted }
12 }
13
14 pub fn as_bytes(&self) -> &[u8] {
15 self.bytes
16 }
17
18 pub fn is_quoted(&self) -> bool {
19 self.quoted
20 }
21}
22
23impl<'b> Display for Token<'b> {
24 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
25 write!(f, "{}", String::from_utf8_lossy(self.bytes))
26 }
27}
28
29impl<'b> fmt::Debug for Token<'b> {
30 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
31 if self.quoted {
32 write!(f, "Token(\"{}\")", String::from_utf8_lossy(self.bytes))
33 } else {
34 write!(f, "Token({})", String::from_utf8_lossy(self.bytes))
35 }
36 }
37}
38
39impl<'b> AsRef<[u8]> for Token<'b> {
40 fn as_ref(&self) -> &[u8] {
41 self.bytes
42 }
43}
44
45impl<'b, const N: usize> From<&'b [u8; N]> for Token<'b> {
46 fn from(bytes: &'b [u8; N]) -> Self {
47 Self {
48 bytes,
49 quoted: false,
50 }
51 }
52}
53
54pub struct TokenDeclarations<'kv, 'k, 'tnv, 'tn> {
55 tokens: &'tnv [Token<'tn>],
56 keywords: &'kv [&'k [u8]],
57 finished: bool,
58}
59
60pub fn declarations_by_keywords<'kv, 'k, 'tnv, 'tn>(
61 tokens: &'tnv [Token<'tn>],
62 keywords: &'kv [&'k [u8]],
63) -> TokenDeclarations<'kv, 'k, 'tnv, 'tn> {
64 TokenDeclarations {
65 tokens,
66 keywords,
67 finished: false,
68 }
69}
70
71#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
72pub struct TextNodeDeclaration<'tnv, 'tn> {
73 option: &'tnv Token<'tn>,
74 args: &'tnv [Token<'tn>],
75}
76
77impl<'tnv, 'tn> TextNodeDeclaration<'tnv, 'tn> {
78 pub fn from_text_nodes(value: &'tnv [Token<'tn>]) -> Option<Self> {
79 value
80 .split_first()
81 .map(|(option, args)| Self { option, args })
82 }
83
84 pub fn option(&self) -> &Token<'tn> {
85 self.option
86 }
87
88 pub fn args(&self) -> &[Token<'tn>] {
89 self.args
90 }
91}
92
93impl<'kv, 'k, 'tnv, 'tn> Iterator for TokenDeclarations<'kv, 'k, 'tnv, 'tn> {
94 type Item = TextNodeDeclaration<'tnv, 'tn>;
95
96 fn next(&mut self) -> Option<Self::Item> {
97 if self.finished {
98 return None;
99 }
100
101 let mut text_nodes = self.tokens.iter();
102
103 let start = text_nodes.position(|tn| self.keywords.iter().any(|&k| tn.as_bytes() == k));
104
105 let Some(start) = start else {
106 self.finished = true;
107 return TextNodeDeclaration::from_text_nodes(self.tokens);
108 };
109
110 if start != 0 {
111 self.finished = true;
112 return TextNodeDeclaration::from_text_nodes(self.tokens);
113 }
114
115 let len = self.tokens.len();
116
117 let end = text_nodes
118 .position(|tn| self.keywords.iter().any(|&k| tn.as_bytes() == k))
119 .map(|end| start + end + 1)
120 .unwrap_or(len);
121
122 if end >= len {
123 self.finished = true;
124 }
125
126 let ret = TextNodeDeclaration::from_text_nodes(&self.tokens[start..end]);
127 self.tokens = &self.tokens[end..];
128 ret
129 }
130}
131
132#[cfg(test)]
133mod tests {
134
135 use super::{declarations_by_keywords, TextNodeDeclaration, Token};
136
137 fn to_text_nodes<'tn>(tns: &[&'tn [u8]]) -> Vec<Token<'tn>> {
138 tns.iter().map(|&x| Token::text_node(x, false)).collect()
139 }
140
141 #[test]
142 fn check_split_by_keywords() {
143 let tns: &[&[u8]] = &[b"HELLO", b"world", b"FLAG", b"FLAG", b"COMMAND", b"command"];
144 let text_nodes: Vec<_> = to_text_nodes(tns);
145 let mut iter = declarations_by_keywords(&text_nodes, &[b"FLAG", b"HELLO", b"COMMAND"]);
146 assert_eq!(
147 TextNodeDeclaration::from_text_nodes(&to_text_nodes(&[b"HELLO", b"world"])),
148 iter.next()
149 );
150 assert_eq!(
151 TextNodeDeclaration::from_text_nodes(&to_text_nodes(&[b"FLAG"])),
152 iter.next()
153 );
154 assert_eq!(
155 TextNodeDeclaration::from_text_nodes(&to_text_nodes(&[b"FLAG"])),
156 iter.next()
157 );
158 assert_eq!(
159 TextNodeDeclaration::from_text_nodes(&to_text_nodes(&[b"COMMAND", b"command"])),
160 iter.next()
161 );
162 assert_eq!(None, iter.next());
163 }
164}