miden_assembly_syntax/parser/
mod.rs

1/// Simple macro used in the grammar definition for constructing spans
2macro_rules! span {
3    ($id:expr, $l:expr, $r:expr) => {
4        ::miden_debug_types::SourceSpan::new($id, $l..$r)
5    };
6    ($id:expr, $i:expr) => {
7        ::miden_debug_types::SourceSpan::at($id, $i)
8    };
9}
10
11lalrpop_util::lalrpop_mod!(
12    #[expect(clippy::all)]
13    grammar,
14    "/parser/grammar.rs"
15);
16
17mod error;
18mod lexer;
19mod scanner;
20mod token;
21
22use alloc::{boxed::Box, collections::BTreeSet, string::ToString, sync::Arc, vec::Vec};
23
24use miden_debug_types::{SourceFile, SourceLanguage, SourceManager, Uri};
25use miden_utils_diagnostics::Report;
26
27pub use self::{
28    error::{BinErrorKind, HexErrorKind, LiteralErrorKind, ParsingError},
29    lexer::Lexer,
30    scanner::Scanner,
31    token::{BinEncodedValue, DocumentationType, IntValue, PushValue, Token, WordValue},
32};
33use crate::{Path, ast, sema};
34
35// TYPE ALIASES
36// ================================================================================================
37
38type ParseError<'a> = lalrpop_util::ParseError<u32, Token<'a>, ParsingError>;
39
40// MODULE PARSER
41// ================================================================================================
42
43/// This is a wrapper around the lower-level parser infrastructure which handles orchestrating all
44/// of the pieces needed to parse a [ast::Module] from source, and run semantic analysis on it.
45#[derive(Default)]
46pub struct ModuleParser {
47    /// The kind of module we're parsing.
48    ///
49    /// This is used when performing semantic analysis to detect when various invalid constructions
50    /// are encountered, such as use of the `syscall` instruction in a kernel module.
51    kind: ast::ModuleKind,
52    /// A set of interned strings allocated during parsing/semantic analysis.
53    ///
54    /// This is a very primitive and imprecise way of interning strings, but was the least invasive
55    /// at the time the new parser was implemented. In essence, we avoid duplicating allocations
56    /// for frequently occurring strings, by tracking which strings we've seen before, and
57    /// sharing a reference counted pointer instead.
58    ///
59    /// We may want to replace this eventually with a proper interner, so that we can also gain the
60    /// benefits commonly provided by interned string handles (e.g. cheap equality comparisons, no
61    /// ref- counting overhead, copyable and of smaller size).
62    ///
63    /// Note that [Ident], [ProcedureName], [LibraryPath] and others are all implemented in terms
64    /// of either the actual reference-counted string, e.g. `Arc<str>`, or in terms of [Ident],
65    /// which is essentially the former wrapped in a [SourceSpan]. If we ever replace this with
66    /// a better interner, we will also want to update those types to be in terms of whatever
67    /// the handle type of the interner is.
68    interned: BTreeSet<Arc<str>>,
69    /// When true, all warning diagnostics are promoted to error severity
70    warnings_as_errors: bool,
71}
72
73impl ModuleParser {
74    /// Construct a new parser for the given `kind` of [ast::Module].
75    pub fn new(kind: ast::ModuleKind) -> Self {
76        Self {
77            kind,
78            interned: Default::default(),
79            warnings_as_errors: false,
80        }
81    }
82
83    /// Configure this parser so that any warning diagnostics are promoted to errors.
84    pub fn set_warnings_as_errors(&mut self, yes: bool) {
85        self.warnings_as_errors = yes;
86    }
87
88    /// Parse a [ast::Module] from `source`, and give it the provided `path`.
89    pub fn parse(
90        &mut self,
91        path: impl AsRef<Path>,
92        source: Arc<SourceFile>,
93        source_manager: Arc<dyn SourceManager>,
94    ) -> Result<Box<ast::Module>, Report> {
95        let path = path.as_ref();
96        let forms = parse_forms_internal(source.clone(), &mut self.interned)
97            .map_err(|err| Report::new(err).with_source_code(source.clone()))?;
98        sema::analyze(source, self.kind, path, forms, self.warnings_as_errors, source_manager)
99            .map_err(Report::new)
100    }
101
102    /// Parse a [ast::Module], `name`, from `path`.
103    #[cfg(feature = "std")]
104    pub fn parse_file<N, P>(
105        &mut self,
106        name: N,
107        path: P,
108        source_manager: Arc<dyn SourceManager>,
109    ) -> Result<Box<ast::Module>, Report>
110    where
111        N: AsRef<Path>,
112        P: AsRef<std::path::Path>,
113    {
114        use miden_debug_types::SourceManagerExt;
115        use miden_utils_diagnostics::{IntoDiagnostic, WrapErr};
116
117        let path = path.as_ref();
118        let source_file = source_manager
119            .load_file(path)
120            .into_diagnostic()
121            .wrap_err_with(|| format!("failed to load source file from '{}'", path.display()))?;
122        self.parse(name, source_file, source_manager)
123    }
124
125    /// Parse a [ast::Module], `name`, from `source`.
126    pub fn parse_str(
127        &mut self,
128        name: impl AsRef<Path>,
129        source: impl ToString,
130        source_manager: Arc<dyn SourceManager>,
131    ) -> Result<Box<ast::Module>, Report> {
132        use miden_debug_types::SourceContent;
133
134        let name = name.as_ref();
135        let uri = Uri::from(name.as_str().to_string().into_boxed_str());
136        let content = SourceContent::new(
137            SourceLanguage::Masm,
138            uri.clone(),
139            source.to_string().into_boxed_str(),
140        );
141        let source_file = source_manager.load_from_raw_parts(uri, content);
142        self.parse(name, source_file, source_manager)
143    }
144}
145
146/// This is used in tests to parse `source` as a set of raw [ast::Form]s rather than as a
147/// [ast::Module].
148///
149/// NOTE: This does _not_ run semantic analysis.
150#[cfg(any(test, feature = "testing"))]
151pub fn parse_forms(source: Arc<SourceFile>) -> Result<Vec<ast::Form>, ParsingError> {
152    let mut interned = BTreeSet::default();
153    parse_forms_internal(source, &mut interned)
154}
155
156/// Parse `source` as a set of [ast::Form]s
157///
158/// Aside from catching syntax errors, this does little validation of the resulting forms, that is
159/// handled by semantic analysis, which the caller is expected to perform next.
160fn parse_forms_internal(
161    source: Arc<SourceFile>,
162    interned: &mut BTreeSet<Arc<str>>,
163) -> Result<Vec<ast::Form>, ParsingError> {
164    let source_id = source.id();
165    let scanner = Scanner::new(source.as_str());
166    let lexer = Lexer::new(source_id, scanner);
167    let felt_type = Arc::new(ast::types::ArrayType::new(ast::types::Type::Felt, 4));
168    grammar::FormsParser::new()
169        .parse(source_id, interned, &felt_type, core::marker::PhantomData, lexer)
170        .map_err(|err| ParsingError::from_parse_error(source_id, err))
171}
172
173// DIRECTORY PARSER
174// ================================================================================================
175
176/// Read the contents (modules) of this library from `dir`, returning any errors that occur
177/// while traversing the file system.
178///
179/// Errors may also be returned if traversal discovers issues with the modules, such as
180/// invalid names, etc.
181///
182/// Returns an iterator over all parsed modules.
183#[cfg(feature = "std")]
184pub fn read_modules_from_dir(
185    dir: impl AsRef<std::path::Path>,
186    namespace: impl AsRef<Path>,
187    source_manager: Arc<dyn SourceManager>,
188) -> Result<impl Iterator<Item = Box<ast::Module>>, Report> {
189    use std::collections::{BTreeMap, btree_map::Entry};
190
191    use miden_utils_diagnostics::{IntoDiagnostic, WrapErr, report};
192    use module_walker::{ModuleEntry, WalkModules};
193
194    let dir = dir.as_ref();
195    if !dir.is_dir() {
196        return Err(report!("the provided path '{}' is not a valid directory", dir.display()));
197    }
198
199    // mod.masm is not allowed in the root directory
200    if dir.join(ast::Module::ROOT_FILENAME).exists() {
201        return Err(report!("{} is not allowed in the root directory", ast::Module::ROOT_FILENAME));
202    }
203
204    let mut modules = BTreeMap::default();
205
206    let walker = WalkModules::new(namespace.as_ref().to_path_buf(), dir)
207        .into_diagnostic()
208        .wrap_err_with(|| format!("failed to load modules from '{}'", dir.display()))?;
209    for entry in walker {
210        let ModuleEntry { mut name, source_path } = entry?;
211        if name.last().unwrap() == ast::Module::ROOT {
212            name.pop();
213        }
214
215        // Parse module at the given path
216        let mut parser = ModuleParser::new(ast::ModuleKind::Library);
217        let ast = parser.parse_file(&name, &source_path, source_manager.clone())?;
218        match modules.entry(name) {
219            Entry::Occupied(ref entry) => {
220                return Err(report!("duplicate module '{0}'", entry.key().clone()));
221            },
222            Entry::Vacant(entry) => {
223                entry.insert(ast);
224            },
225        }
226    }
227
228    Ok(modules.into_values())
229}
230
231#[cfg(feature = "std")]
232mod module_walker {
233    use std::{
234        ffi::OsStr,
235        fs::{self, DirEntry, FileType},
236        io,
237        path::{Path, PathBuf},
238    };
239
240    use miden_utils_diagnostics::{IntoDiagnostic, Report, report};
241
242    use crate::{Path as LibraryPath, PathBuf as LibraryPathBuf, ast::Module};
243
244    pub struct ModuleEntry {
245        pub name: LibraryPathBuf,
246        pub source_path: PathBuf,
247    }
248
249    pub struct WalkModules<'a> {
250        namespace: LibraryPathBuf,
251        root: &'a Path,
252        stack: alloc::collections::VecDeque<io::Result<DirEntry>>,
253    }
254
255    impl<'a> WalkModules<'a> {
256        pub fn new(namespace: LibraryPathBuf, path: &'a Path) -> io::Result<Self> {
257            use alloc::collections::VecDeque;
258
259            let stack = VecDeque::from_iter(fs::read_dir(path)?);
260
261            Ok(Self { namespace, root: path, stack })
262        }
263
264        fn next_entry(
265            &mut self,
266            entry: &DirEntry,
267            ty: &FileType,
268        ) -> Result<Option<ModuleEntry>, Report> {
269            if ty.is_dir() {
270                let dir = entry.path();
271                self.stack.extend(fs::read_dir(dir).into_diagnostic()?);
272                return Ok(None);
273            }
274
275            let mut file_path = entry.path();
276            let is_module = file_path
277                .extension()
278                .map(|ext| ext == AsRef::<OsStr>::as_ref(Module::FILE_EXTENSION))
279                .unwrap_or(false);
280            if !is_module {
281                return Ok(None);
282            }
283
284            // Remove the file extension and the root prefix, leaving a namespace-relative path
285            file_path.set_extension("");
286            if file_path.is_dir() {
287                return Err(report!(
288                    "file and directory with same name are not allowed: {}",
289                    file_path.display()
290                ));
291            }
292            let relative_path = file_path
293                .strip_prefix(self.root)
294                .expect("expected path to be a child of the root directory");
295
296            // Construct a [LibraryPath] from the path components, after validating them
297            let mut libpath = self.namespace.clone();
298            for component in relative_path.iter() {
299                let component = component.to_str().ok_or_else(|| {
300                    let p = entry.path();
301                    report!("{} is an invalid directory entry", p.display())
302                })?;
303                LibraryPath::validate(component).into_diagnostic()?;
304                libpath.push(component);
305            }
306            Ok(Some(ModuleEntry { name: libpath, source_path: entry.path() }))
307        }
308    }
309
310    impl Iterator for WalkModules<'_> {
311        type Item = Result<ModuleEntry, Report>;
312
313        fn next(&mut self) -> Option<Self::Item> {
314            loop {
315                let entry = self
316                    .stack
317                    .pop_front()?
318                    .and_then(|entry| entry.file_type().map(|ft| (entry, ft)))
319                    .into_diagnostic();
320
321                match entry {
322                    Ok((ref entry, ref file_type)) => {
323                        match self.next_entry(entry, file_type).transpose() {
324                            None => continue,
325                            result => break result,
326                        }
327                    },
328                    Err(err) => break Some(Err(err)),
329                }
330            }
331        }
332    }
333}
334
335// TESTS
336// ================================================================================================
337
338#[cfg(test)]
339mod tests {
340    use miden_core::assert_matches;
341    use miden_debug_types::SourceId;
342
343    use super::*;
344
345    // This test checks the lexer behavior with regard to tokenizing `exp(.u?[\d]+)?`
346    #[test]
347    fn lex_exp() {
348        let source_id = SourceId::default();
349        let scanner = Scanner::new("begin exp.u9 end");
350        let mut lexer = Lexer::new(source_id, scanner).map(|result| result.map(|(_, t, _)| t));
351        assert_matches!(lexer.next(), Some(Ok(Token::Begin)));
352        assert_matches!(lexer.next(), Some(Ok(Token::ExpU)));
353        assert_matches!(lexer.next(), Some(Ok(Token::Int(n))) if n == 9);
354        assert_matches!(lexer.next(), Some(Ok(Token::End)));
355    }
356
357    #[test]
358    fn lex_block() {
359        let source_id = SourceId::default();
360        let scanner = Scanner::new(
361            "\
362const ERR1 = 1
363
364begin
365    u32assertw
366    u32assertw.err=ERR1
367    u32assertw.err=2
368end
369",
370        );
371        let mut lexer = Lexer::new(source_id, scanner).map(|result| result.map(|(_, t, _)| t));
372        assert_matches!(lexer.next(), Some(Ok(Token::Const)));
373        assert_matches!(lexer.next(), Some(Ok(Token::ConstantIdent("ERR1"))));
374        assert_matches!(lexer.next(), Some(Ok(Token::Equal)));
375        assert_matches!(lexer.next(), Some(Ok(Token::Int(1))));
376        assert_matches!(lexer.next(), Some(Ok(Token::Begin)));
377        assert_matches!(lexer.next(), Some(Ok(Token::U32Assertw)));
378        assert_matches!(lexer.next(), Some(Ok(Token::U32Assertw)));
379        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
380        assert_matches!(lexer.next(), Some(Ok(Token::Err)));
381        assert_matches!(lexer.next(), Some(Ok(Token::Equal)));
382        assert_matches!(lexer.next(), Some(Ok(Token::ConstantIdent("ERR1"))));
383        assert_matches!(lexer.next(), Some(Ok(Token::U32Assertw)));
384        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
385        assert_matches!(lexer.next(), Some(Ok(Token::Err)));
386        assert_matches!(lexer.next(), Some(Ok(Token::Equal)));
387        assert_matches!(lexer.next(), Some(Ok(Token::Int(2))));
388        assert_matches!(lexer.next(), Some(Ok(Token::End)));
389        assert_matches!(lexer.next(), Some(Ok(Token::Eof)));
390    }
391
392    #[test]
393    fn lex_emit() {
394        let source_id = SourceId::default();
395        let scanner = Scanner::new(
396            "\
397begin
398    push.1
399    emit.event(\"abc\")
400end
401",
402        );
403        let mut lexer = Lexer::new(source_id, scanner).map(|result| result.map(|(_, t, _)| t));
404        assert_matches!(lexer.next(), Some(Ok(Token::Begin)));
405        assert_matches!(lexer.next(), Some(Ok(Token::Push)));
406        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
407        assert_matches!(lexer.next(), Some(Ok(Token::Int(1))));
408        assert_matches!(lexer.next(), Some(Ok(Token::Emit)));
409        assert_matches!(lexer.next(), Some(Ok(Token::Dot)));
410        assert_matches!(lexer.next(), Some(Ok(Token::Event)));
411        assert_matches!(lexer.next(), Some(Ok(Token::Lparen)));
412        assert_matches!(lexer.next(), Some(Ok(Token::QuotedIdent("abc"))));
413        assert_matches!(lexer.next(), Some(Ok(Token::Rparen)));
414        assert_matches!(lexer.next(), Some(Ok(Token::End)));
415        assert_matches!(lexer.next(), Some(Ok(Token::Eof)));
416    }
417}