kconfig_parser/lex/
source_lexer.rs

1/*
2 Cargo KConfig - KConfig parser
3 Copyright (C) 2022  Sjoerd van Leent
4
5--------------------------------------------------------------------------------
6
7Copyright Notice: Apache
8
9Licensed under the Apache License, Version 2.0 (the "License"); you may not use
10this file except in compliance with the License. You may obtain a copy of the
11License at
12
13   https://www.apache.org/licenses/LICENSE-2.0
14
15Unless required by applicable law or agreed to in writing, software distributed
16under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
17CONDITIONS OF ANY KIND, either express or implied. See the License for the
18specific language governing permissions and limitations under the License.
19
20--------------------------------------------------------------------------------
21
22Copyright Notice: GPLv2
23
24This program is free software: you can redistribute it and/or modify
25it under the terms of the GNU General Public License as published by
26the Free Software Foundation, either version 2 of the License, or
27(at your option) any later version.
28
29This program is distributed in the hope that it will be useful,
30but WITHOUT ANY WARRANTY; without even the implied warranty of
31MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
32GNU General Public License for more details.
33
34You should have received a copy of the GNU General Public License
35along with this program.  If not, see <https://www.gnu.org/licenses/>.
36
37--------------------------------------------------------------------------------
38
39Copyright Notice: MIT
40
41Permission is hereby granted, free of charge, to any person obtaining a copy of
42this software and associated documentation files (the “Software”), to deal in
43the Software without restriction, including without limitation the rights to
44use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
45the Software, and to permit persons to whom the Software is furnished to do so,
46subject to the following conditions:
47
48The above copyright notice and this permission notice shall be included in all
49copies or substantial portions of the Software.
50
51THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
52IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
53FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
54COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
55IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
56CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
57*/
58
59//! This file contains the source lexer, which behaves like a regular lexer,
60//! but inserts the contents of source files indicated by the "source ..."
61//! directive.
62
63use std::fmt::Display;
64
65use super::{
66    structs::{Keyword, Lexicon, Token},
67    LexerBase,
68};
69use crate::parse_string;
70
71/// The source lexer allows the source directive to be used to load
72/// additional sources as part of the (currently) loaded Kconfig file.
73pub struct SourceLexer<LB>
74where
75    LB: LexerBase,
76{
77    /// This stack is composed of the name of the stream and the lexer
78    /// belonging to the stream. If the lexer has been depleted, it will
79    /// be removed from the stack.
80    stack: Vec<Table<LB>>,
81
82    /// The source function takes the name of the source and returns a new
83    /// lexer.
84    source_function: fn(&str) -> Result<LB, String>,
85
86    /// The resolve_name_function resolves the name of the source
87    /// to a canonical name understood by the system. This is primarily
88    /// used to discover source recursion (which wouldn't end).
89    canonical_name_function: fn(&str) -> String,
90}
91
92struct Table<LB>
93where
94    LB: LexerBase,
95{
96    stream_name: String,
97    lexer: LB,
98}
99
100/// Provides a basic error message when the source function for one
101/// or the other reason fails.
102#[derive(Clone, Debug, Eq, PartialEq)]
103pub struct Error {
104    msg: String,
105}
106
107impl Error {
108    /// Creates a new error from a source function, and allows it to
109    /// be passed as part of the source lexer error conditions.
110    pub fn new(s: &str) -> Error {
111        Error { msg: s.to_string() }
112    }
113}
114
115impl Display for Error {
116    /// Formats the error
117    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
118        write!(f, "{}", self.msg)
119    }
120}
121
122impl<LB> SourceLexer<LB>
123where
124    LB: LexerBase,
125{
126    /// Creates a new source lexer, given the stream's name. This will
127    /// create the first source_function call, to actually load the
128    /// stream.
129    pub fn new(
130        stream_name: &str,
131        source_function: fn(&str) -> Result<LB, String>,
132        canonical_name_function: fn(&str) -> String,
133    ) -> Result<Self, Error> {
134        match (source_function)(stream_name) {
135            Ok(lexer) => Ok(Self {
136                stack: vec![Table {
137                    stream_name: stream_name.to_string(),
138                    lexer: lexer,
139                }],
140                source_function,
141                canonical_name_function,
142            }),
143            Err(s) => {
144                Err(Error::new(&s))
145            }
146        }
147    }
148}
149
150impl<LB> LexerBase for SourceLexer<LB>
151where
152    LB: LexerBase,
153{
154    fn next_token(&mut self) -> super::structs::Token {
155        let last = self.stack.last_mut();
156        match last {
157            Some(table) => {
158                let token = table.lexer.next_token();
159                match token.term() {
160                    Lexicon::EOT => {
161                        self.stack.pop();
162                        token
163                    }
164                    Lexicon::Keyword(k) => match k {
165                        Keyword::Source => {
166                            // read the next token, which should be a string,
167                            // pointing to the stream to read.
168                            let token = table.lexer.next_token();
169                            match token.term() {
170                                Lexicon::String(stream_name) => {
171                                    let stream_name = match parse_string(&stream_name) {
172                                        Ok(s) => s,
173                                        Err(e) => {
174                                            return Token::create(
175                                                Lexicon::Error(e),
176                                                token.column(),
177                                                token.line(),
178                                                &token.raw(),
179                                            )
180                                        }
181                                    };
182                                    let canonical_name =
183                                        (self.canonical_name_function)(&stream_name);
184                                    // Verify whether the the stream_name
185                                    // does not already exist in the stack and creates
186                                    // a circular load.
187                                    for needle in &self.stack {
188                                        if needle.stream_name.eq(&canonical_name) {
189                                            return Token::create(
190                                                Lexicon::Error(
191                                                    "Circular source detected, abort load"
192                                                        .to_string(),
193                                                ),
194                                                token.column(),
195                                                token.line(),
196                                                &token.raw(),
197                                            );
198                                        }
199                                    }
200
201                                    match (self.source_function)(&stream_name) {
202                                        Ok(lb) => {
203                                            let t = Table {
204                                                stream_name: canonical_name,
205                                                lexer: lb,
206                                            };
207        
208                                            self.stack.push(t);
209                                            self.next_token()
210                                        },
211                                        Err(s) => {
212                                            Token::create(Lexicon::Error(s), token.column(), token.line(), &token.raw())
213                                        }
214                                    }
215                                }
216                                _ => Token::create(
217                                    Lexicon::Error(
218                                        "Expected string indicating source file or stream"
219                                            .to_string(),
220                                    ),
221                                    token.column(),
222                                    token.line(),
223                                    &token.raw(),
224                                ),
225                            }
226                        }
227                        _ => token,
228                    },
229                    _ => token,
230                }
231            }
232            None => Token::create(Lexicon::EOT, 0, 0, &""),
233        }
234    }
235
236    fn current_stream(&self) -> Option<String> {
237        let last = self.stack.last();
238        match last {
239            Some(table) => Some(table.stream_name.to_string()),
240            _ => None,
241        }
242    }
243}
244
245#[cfg(test)]
246mod tests {
247    use super::super::Lexer;
248    use super::super::LexerBase;
249    use super::*;
250
251    #[test]
252    fn test_keywords() {
253        let mut l = SourceLexer::<Lexer<&[u8]>>::new(
254            "foo",
255            |stream_name| {
256                if stream_name == "foo" {
257                    let s = &mut "mainmenu config menuconfig choice endchoice menu endmenu if endif bool def_bool \
258                                  tristate def_tristate string hex int default depends on select imply visible range prompt comment".as_bytes();
259                    Ok(Lexer::create(s))
260                } else {
261                    Err("Error".to_owned())
262                }
263            },
264            |stream_name| stream_name.to_string(),
265        ).unwrap();
266        assert_eq!(Lexicon::Keyword(Keyword::Mainmenu), l.next_token().term());
267        assert_eq!(Lexicon::Keyword(Keyword::Config), l.next_token().term());
268        assert_eq!(Lexicon::Keyword(Keyword::Menuconfig), l.next_token().term());
269        assert_eq!(Lexicon::Keyword(Keyword::Choice), l.next_token().term());
270        assert_eq!(Lexicon::Keyword(Keyword::Endchoice), l.next_token().term());
271        assert_eq!(Lexicon::Keyword(Keyword::Menu), l.next_token().term());
272        assert_eq!(Lexicon::Keyword(Keyword::Endmenu), l.next_token().term());
273        assert_eq!(Lexicon::Keyword(Keyword::If), l.next_token().term());
274        assert_eq!(Lexicon::Keyword(Keyword::Endif), l.next_token().term());
275        assert_eq!(Lexicon::Keyword(Keyword::Bool), l.next_token().term());
276        assert_eq!(Lexicon::Keyword(Keyword::DefBool), l.next_token().term());
277        assert_eq!(Lexicon::Keyword(Keyword::Tristate), l.next_token().term());
278        assert_eq!(
279            Lexicon::Keyword(Keyword::DefTristate),
280            l.next_token().term()
281        );
282        assert_eq!(Lexicon::Keyword(Keyword::String), l.next_token().term());
283        assert_eq!(Lexicon::Keyword(Keyword::Hex), l.next_token().term());
284        assert_eq!(Lexicon::Keyword(Keyword::Int), l.next_token().term());
285        assert_eq!(Lexicon::Keyword(Keyword::Default), l.next_token().term());
286        assert_eq!(Lexicon::Keyword(Keyword::Depends), l.next_token().term());
287        assert_eq!(Lexicon::Keyword(Keyword::On), l.next_token().term());
288        assert_eq!(Lexicon::Keyword(Keyword::Select), l.next_token().term());
289        assert_eq!(Lexicon::Keyword(Keyword::Imply), l.next_token().term());
290        assert_eq!(Lexicon::Keyword(Keyword::Visible), l.next_token().term());
291        assert_eq!(Lexicon::Keyword(Keyword::Range), l.next_token().term());
292        assert_eq!(Lexicon::Keyword(Keyword::Prompt), l.next_token().term());
293        assert_eq!(Lexicon::Keyword(Keyword::Comment), l.next_token().term());
294        assert_eq!(Lexicon::EOT, l.next_token().term());
295    }
296
297    #[test]
298    fn test_keywords_sub_lexer() {
299        let mut l = SourceLexer::<Lexer<&[u8]>>::new(
300            "foo",
301            |stream_name| {
302                if stream_name == "foo" {
303                    let s = &mut "source \"bar\"".as_bytes();
304                    Ok(Lexer::create(s))
305                } else if stream_name == "bar" {
306                    let s = &mut "mainmenu config menuconfig choice endchoice menu endmenu if endif bool def_bool \
307                                  tristate def_tristate string hex int default depends on select imply visible range prompt comment".as_bytes();
308                    Ok(Lexer::create(s))
309                } else {
310                    Err("Error".to_owned())
311                }
312            },
313            |stream_name| stream_name.to_string(),
314        ).unwrap();
315        assert_eq!(Lexicon::Keyword(Keyword::Mainmenu), l.next_token().term());
316        assert_eq!(Lexicon::Keyword(Keyword::Config), l.next_token().term());
317        assert_eq!(Lexicon::Keyword(Keyword::Menuconfig), l.next_token().term());
318        assert_eq!(Lexicon::Keyword(Keyword::Choice), l.next_token().term());
319        assert_eq!(Lexicon::Keyword(Keyword::Endchoice), l.next_token().term());
320        assert_eq!(Lexicon::Keyword(Keyword::Menu), l.next_token().term());
321        assert_eq!(Lexicon::Keyword(Keyword::Endmenu), l.next_token().term());
322        assert_eq!(Lexicon::Keyword(Keyword::If), l.next_token().term());
323        assert_eq!(Lexicon::Keyword(Keyword::Endif), l.next_token().term());
324        assert_eq!(Lexicon::Keyword(Keyword::Bool), l.next_token().term());
325        assert_eq!(Lexicon::Keyword(Keyword::DefBool), l.next_token().term());
326        assert_eq!(Lexicon::Keyword(Keyword::Tristate), l.next_token().term());
327        assert_eq!(
328            Lexicon::Keyword(Keyword::DefTristate),
329            l.next_token().term()
330        );
331        assert_eq!(Lexicon::Keyword(Keyword::String), l.next_token().term());
332        assert_eq!(Lexicon::Keyword(Keyword::Hex), l.next_token().term());
333        assert_eq!(Lexicon::Keyword(Keyword::Int), l.next_token().term());
334        assert_eq!(Lexicon::Keyword(Keyword::Default), l.next_token().term());
335        assert_eq!(Lexicon::Keyword(Keyword::Depends), l.next_token().term());
336        assert_eq!(Lexicon::Keyword(Keyword::On), l.next_token().term());
337        assert_eq!(Lexicon::Keyword(Keyword::Select), l.next_token().term());
338        assert_eq!(Lexicon::Keyword(Keyword::Imply), l.next_token().term());
339        assert_eq!(Lexicon::Keyword(Keyword::Visible), l.next_token().term());
340        assert_eq!(Lexicon::Keyword(Keyword::Range), l.next_token().term());
341        assert_eq!(Lexicon::Keyword(Keyword::Prompt), l.next_token().term());
342        assert_eq!(Lexicon::Keyword(Keyword::Comment), l.next_token().term());
343        assert_eq!(Lexicon::EOT, l.next_token().term());
344    }
345
346    #[test]
347    fn test_error_wrong_syntax() {
348        let mut l = SourceLexer::<Lexer<&[u8]>>::new(
349            "foo",
350            |stream_name| {
351                if stream_name == "foo" {
352                    let s = &mut "source bla".as_bytes();
353                    Ok(Lexer::create(s))
354                } else {
355                    Err("Error".to_owned())
356                }
357            },
358            |stream_name| stream_name.to_string(),
359        ).unwrap();
360
361        let is_error = if let Lexicon::Error(_) = l.next_token().term() {
362            true
363        } else {
364            false
365        };
366        assert_eq!(is_error, true);
367    }
368
369    #[test]
370    fn test_error_circular_syntax() {
371        let mut l = SourceLexer::<Lexer<&[u8]>>::new(
372            "foo",
373            |stream_name| {
374                if stream_name == "foo" {
375                    let s = &mut "source \"bar\"".as_bytes();
376                    Ok(Lexer::create(s))
377                } else if stream_name == "bar" {
378                    let s = &mut "source \"foo\"".as_bytes();
379                    Ok(Lexer::create(s))
380                } else {
381                    Err("Error".to_owned())
382                }
383            },
384            |stream_name| stream_name.to_string(),
385        ).unwrap();
386
387        let is_error = if let Lexicon::Error(_) = l.next_token().term() {
388            true
389        } else {
390            false
391        };
392        assert_eq!(is_error, true);
393    }
394}