miden_assembly/parser/
mod.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
/// Simple macro used in the grammar definition for constructing spans
macro_rules! span {
    ($id:expr, $l:expr, $r:expr) => {
        crate::SourceSpan::new($id, $l..$r)
    };
    ($id:expr, $i:expr) => {
        crate::SourceSpan::at($id, $i)
    };
}

lalrpop_util::lalrpop_mod!(
    #[allow(clippy::all)]
    grammar,
    "/parser/grammar.rs"
);

mod error;
mod lexer;
mod scanner;
mod token;

use alloc::{boxed::Box, collections::BTreeSet, string::ToString, sync::Arc, vec::Vec};

pub use self::{
    error::{BinErrorKind, HexErrorKind, LiteralErrorKind, ParsingError},
    lexer::Lexer,
    scanner::Scanner,
    token::{BinEncodedValue, DocumentationType, HexEncodedValue, Token},
};
use crate::{
    ast,
    diagnostics::{Report, SourceFile, SourceSpan, Span, Spanned},
    sema, LibraryPath, SourceManager,
};

// TYPE ALIASES
// ================================================================================================

type ParseError<'a> = lalrpop_util::ParseError<u32, Token<'a>, ParsingError>;

// MODULE PARSER
// ================================================================================================

/// This is a wrapper around the lower-level parser infrastructure which handles orchestrating all
/// of the pieces needed to parse a [ast::Module] from source, and run semantic analysis on it.
#[derive(Default)]
pub struct ModuleParser {
    /// The kind of module we're parsing.
    ///
    /// This is used when performing semantic analysis to detect when various invalid constructions
    /// are encountered, such as use of the `syscall` instruction in a kernel module.
    kind: ast::ModuleKind,
    /// A set of interned strings allocated during parsing/semantic analysis.
    ///
    /// This is a very primitive and imprecise way of interning strings, but was the least invasive
    /// at the time the new parser was implemented. In essence, we avoid duplicating allocations
    /// for frequently occurring strings, by tracking which strings we've seen before, and
    /// sharing a reference counted pointer instead.
    ///
    /// We may want to replace this eventually with a proper interner, so that we can also gain the
    /// benefits commonly provided by interned string handles (e.g. cheap equality comparisons, no
    /// ref- counting overhead, copyable and of smaller size).
    ///
    /// Note that [Ident], [ProcedureName], [LibraryPath] and others are all implemented in terms
    /// of either the actual reference-counted string, e.g. `Arc<str>`, or in terms of [Ident],
    /// which is essentially the former wrapped in a [SourceSpan]. If we ever replace this with
    /// a better interner, we will also want to update those types to be in terms of whatever
    /// the handle type of the interner is.
    interned: BTreeSet<Arc<str>>,
    /// When true, all warning diagnostics are promoted to error severity
    warnings_as_errors: bool,
}

impl ModuleParser {
    /// Construct a new parser for the given `kind` of [ast::Module].
    pub fn new(kind: ast::ModuleKind) -> Self {
        Self {
            kind,
            interned: Default::default(),
            warnings_as_errors: false,
        }
    }

    /// Configure this parser so that any warning diagnostics are promoted to errors.
    pub fn set_warnings_as_errors(&mut self, yes: bool) {
        self.warnings_as_errors = yes;
    }

    /// Parse a [ast::Module] from `source`, and give it the provided `path`.
    pub fn parse(
        &mut self,
        path: LibraryPath,
        source: Arc<SourceFile>,
    ) -> Result<Box<ast::Module>, Report> {
        let forms = parse_forms_internal(source.clone(), &mut self.interned)
            .map_err(|err| Report::new(err).with_source_code(source.clone()))?;
        sema::analyze(source, self.kind, path, forms, self.warnings_as_errors).map_err(Report::new)
    }

    /// Parse a [ast::Module], `name`, from `path`.
    #[cfg(feature = "std")]
    pub fn parse_file<P>(
        &mut self,
        name: LibraryPath,
        path: P,
        source_manager: &dyn SourceManager,
    ) -> Result<Box<ast::Module>, Report>
    where
        P: AsRef<std::path::Path>,
    {
        use vm_core::debuginfo::SourceManagerExt;

        use crate::diagnostics::{IntoDiagnostic, WrapErr};

        let path = path.as_ref();
        let source_file = source_manager
            .load_file(path)
            .into_diagnostic()
            .wrap_err_with(|| format!("failed to load source file from '{}'", path.display()))?;
        self.parse(name, source_file)
    }

    /// Parse a [ast::Module], `name`, from `source`.
    pub fn parse_str(
        &mut self,
        name: LibraryPath,
        source: impl ToString,
        source_manager: &dyn SourceManager,
    ) -> Result<Box<ast::Module>, Report> {
        use vm_core::debuginfo::SourceContent;

        let path = Arc::from(name.path().into_owned().into_boxed_str());
        let content = SourceContent::new(Arc::clone(&path), source.to_string().into_boxed_str());
        let source_file = source_manager.load_from_raw_parts(path, content);
        self.parse(name, source_file)
    }
}

/// This is used in tests to parse `source` as a set of raw [ast::Form]s rather than as a
/// [ast::Module].
///
/// NOTE: This does _not_ run semantic analysis.
#[cfg(any(test, feature = "testing"))]
pub fn parse_forms(source: Arc<SourceFile>) -> Result<Vec<ast::Form>, ParsingError> {
    let mut interned = BTreeSet::default();
    parse_forms_internal(source, &mut interned)
}

/// Parse `source` as a set of [ast::Form]s
///
/// Aside from catching syntax errors, this does little validation of the resulting forms, that is
/// handled by semantic analysis, which the caller is expected to perform next.
fn parse_forms_internal(
    source: Arc<SourceFile>,
    interned: &mut BTreeSet<Arc<str>>,
) -> Result<Vec<ast::Form>, ParsingError> {
    let source_id = source.id();
    let scanner = Scanner::new(source.as_str());
    let lexer = Lexer::new(source_id, scanner);
    grammar::FormsParser::new()
        .parse(&source, interned, core::marker::PhantomData, lexer)
        .map_err(|err| ParsingError::from_parse_error(source_id, err))
}

// DIRECTORY PARSER
// ================================================================================================

/// Read the contents (modules) of this library from `dir`, returning any errors that occur
/// while traversing the file system.
///
/// Errors may also be returned if traversal discovers issues with the modules, such as
/// invalid names, etc.
///
/// Returns an iterator over all parsed modules.
#[cfg(feature = "std")]
pub fn read_modules_from_dir(
    namespace: crate::LibraryNamespace,
    dir: &std::path::Path,
    source_manager: &dyn SourceManager,
) -> Result<impl Iterator<Item = Box<ast::Module>>, Report> {
    use std::collections::{btree_map::Entry, BTreeMap};

    use miette::miette;
    use module_walker::{ModuleEntry, WalkModules};

    use crate::diagnostics::{IntoDiagnostic, WrapErr};

    if !dir.is_dir() {
        return Err(miette!("the provided path '{}' is not a valid directory", dir.display()));
    }

    // mod.masm is not allowed in the root directory
    if dir.join(ast::Module::ROOT_FILENAME).exists() {
        return Err(miette!("{} is not allowed in the root directory", ast::Module::ROOT_FILENAME));
    }

    let mut modules = BTreeMap::default();

    let walker = WalkModules::new(namespace.clone(), dir)
        .into_diagnostic()
        .wrap_err_with(|| format!("failed to load modules from '{}'", dir.display()))?;
    for entry in walker {
        let ModuleEntry { mut name, source_path } = entry?;
        if name.last() == ast::Module::ROOT {
            name.pop();
        }

        // Parse module at the given path
        let mut parser = ModuleParser::new(ast::ModuleKind::Library);
        let ast = parser.parse_file(name.clone(), &source_path, source_manager)?;
        match modules.entry(name) {
            Entry::Occupied(ref entry) => {
                return Err(miette!("duplicate module '{0}'", entry.key().clone()));
            },
            Entry::Vacant(entry) => {
                entry.insert(ast);
            },
        }
    }

    Ok(modules.into_values())
}

#[cfg(feature = "std")]
mod module_walker {

    use std::{
        ffi::OsStr,
        fs::{self, DirEntry, FileType},
        io,
        path::{Path, PathBuf},
    };

    use miette::miette;

    use crate::{
        ast::Module,
        diagnostics::{IntoDiagnostic, Report},
        LibraryNamespace, LibraryPath,
    };

    pub struct ModuleEntry {
        pub name: LibraryPath,
        pub source_path: PathBuf,
    }

    pub struct WalkModules<'a> {
        namespace: LibraryNamespace,
        root: &'a Path,
        stack: alloc::collections::VecDeque<io::Result<DirEntry>>,
    }

    impl<'a> WalkModules<'a> {
        pub fn new(namespace: LibraryNamespace, path: &'a Path) -> io::Result<Self> {
            use alloc::collections::VecDeque;

            let stack = VecDeque::from_iter(fs::read_dir(path)?);

            Ok(Self { namespace, root: path, stack })
        }

        fn next_entry(
            &mut self,
            entry: &DirEntry,
            ty: &FileType,
        ) -> Result<Option<ModuleEntry>, Report> {
            if ty.is_dir() {
                let dir = entry.path();
                self.stack.extend(fs::read_dir(dir).into_diagnostic()?);
                return Ok(None);
            }

            let mut file_path = entry.path();
            let is_module = file_path
                .extension()
                .map(|ext| ext == AsRef::<OsStr>::as_ref(Module::FILE_EXTENSION))
                .unwrap_or(false);
            if !is_module {
                return Ok(None);
            }

            // Remove the file extension and the root prefix, leaving a namespace-relative path
            file_path.set_extension("");
            if file_path.is_dir() {
                return Err(miette!(
                    "file and directory with same name are not allowed: {}",
                    file_path.display()
                ));
            }
            let relative_path = file_path
                .strip_prefix(self.root)
                .expect("expected path to be a child of the root directory");

            // Construct a [LibraryPath] from the path components, after validating them
            let mut libpath = LibraryPath::from(self.namespace.clone());
            for component in relative_path.iter() {
                let component = component.to_str().ok_or_else(|| {
                    let p = entry.path();
                    miette!("{} is an invalid directory entry", p.display())
                })?;
                libpath.push(component).into_diagnostic()?;
            }
            Ok(Some(ModuleEntry { name: libpath, source_path: entry.path() }))
        }
    }

    impl Iterator for WalkModules<'_> {
        type Item = Result<ModuleEntry, Report>;

        fn next(&mut self) -> Option<Self::Item> {
            loop {
                let entry = self
                    .stack
                    .pop_front()?
                    .and_then(|entry| entry.file_type().map(|ft| (entry, ft)))
                    .into_diagnostic();

                match entry {
                    Ok((ref entry, ref file_type)) => {
                        match self.next_entry(entry, file_type).transpose() {
                            None => continue,
                            result => break result,
                        }
                    },
                    Err(err) => break Some(Err(err)),
                }
            }
        }
    }
}

// TESTS
// ================================================================================================

#[cfg(test)]
mod tests {
    use vm_core::assert_matches;

    use super::*;
    use crate::SourceId;

    // This test checks the lexer behavior with regard to tokenizing `exp(.u?[\d]+)?`
    #[test]
    fn lex_exp() {
        let source_id = SourceId::default();
        let scanner = Scanner::new("begin exp.u9 end");
        let mut lexer = Lexer::new(source_id, scanner).map(|result| result.map(|(_, t, _)| t));
        assert_matches!(lexer.next(), Some(Ok(Token::Begin)));
        assert_matches!(lexer.next(), Some(Ok(Token::ExpU)));
        assert_matches!(lexer.next(), Some(Ok(Token::Int(n))) if n == 9);
        assert_matches!(lexer.next(), Some(Ok(Token::End)));
    }

    #[test]
    fn lex_block() {
        let source_id = SourceId::default();
        let scanner = Scanner::new(
            "\
const.ERR1=1

begin
    u32assertw
    u32assertw.err=ERR1
    u32assertw.err=2
end
",
        );
        let mut lexer = Lexer::new(source_id, scanner).map(|result| result.map(|(_, t, _)| t));
        assert_matches!(lexer.next(), Some(Ok(Token::Const)));
        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
        assert_matches!(lexer.next(), Some(Ok(Token::ConstantIdent("ERR1"))));
        assert_matches!(lexer.next(), Some(Ok(Token::Equal)));
        assert_matches!(lexer.next(), Some(Ok(Token::Int(1))));
        assert_matches!(lexer.next(), Some(Ok(Token::Begin)));
        assert_matches!(lexer.next(), Some(Ok(Token::U32Assertw)));
        assert_matches!(lexer.next(), Some(Ok(Token::U32Assertw)));
        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
        assert_matches!(lexer.next(), Some(Ok(Token::Err)));
        assert_matches!(lexer.next(), Some(Ok(Token::Equal)));
        assert_matches!(lexer.next(), Some(Ok(Token::ConstantIdent("ERR1"))));
        assert_matches!(lexer.next(), Some(Ok(Token::U32Assertw)));
        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
        assert_matches!(lexer.next(), Some(Ok(Token::Err)));
        assert_matches!(lexer.next(), Some(Ok(Token::Equal)));
        assert_matches!(lexer.next(), Some(Ok(Token::Int(2))));
        assert_matches!(lexer.next(), Some(Ok(Token::End)));
        assert_matches!(lexer.next(), Some(Ok(Token::Eof)));
    }

    #[test]
    fn lex_emit() {
        let source_id = SourceId::default();
        let scanner = Scanner::new(
            "\
begin
    push.1
    emit.1
end
",
        );
        let mut lexer = Lexer::new(source_id, scanner).map(|result| result.map(|(_, t, _)| t));
        assert_matches!(lexer.next(), Some(Ok(Token::Begin)));
        assert_matches!(lexer.next(), Some(Ok(Token::Push)));
        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
        assert_matches!(lexer.next(), Some(Ok(Token::Int(1))));
        assert_matches!(lexer.next(), Some(Ok(Token::Emit)));
        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
        assert_matches!(lexer.next(), Some(Ok(Token::Int(1))));
        assert_matches!(lexer.next(), Some(Ok(Token::End)));
        assert_matches!(lexer.next(), Some(Ok(Token::Eof)));
    }
}