rusty_lr_buildscript/
lib.rs

1//! Build script for rusty_lr
2//!
3//! This crate is private and not intended to be used directly.
4//! Please use the [`rusty_lr`](https://crates.io/crates/rusty_lr) crate instead.
5//!
6//! ```ignore
7//! fn main() {
8//!     println!("cargo::rerun-if-changed=src/parser/parser.rs");
9//!
10//!     let output_dir = std::env::var("OUT_DIR").unwrap();
11//!     let output = format!("{}/parser.rs", output_dir);
12//!     Builder::new()
13//!        .file("src/parser/parser.rs")
14//!        .build(&output);
15//! }
16//!
17
18pub mod output;
19mod split;
20mod utils;
21
22use codespan_reporting::diagnostic::Diagnostic;
23use codespan_reporting::diagnostic::Label;
24use codespan_reporting::files::SimpleFiles;
25use codespan_reporting::term;
26use codespan_reporting::term::termcolor::ColorChoice;
27use codespan_reporting::term::termcolor::StandardStream;
28
29use proc_macro2::Ident;
30use proc_macro2::Span;
31use proc_macro2::TokenStream;
32
33use quote::quote;
34use rusty_lr_core::ShiftedRule;
35use rusty_lr_parser::error::ArgError;
36use rusty_lr_parser::error::EmitError;
37use rusty_lr_parser::error::ParseArgError;
38use rusty_lr_parser::error::ParseError;
39
40use std::collections::BTreeMap;
41use std::collections::BTreeSet;
42use std::fs::read;
43use std::fs::write;
44
45/// Main entry for the build script
46pub struct Builder {
47    /// input_file to read
48    input_file: Option<String>,
49
50    /// when `vebose` is on, print debug information about
51    /// any shift/reduce, reduce/reduce conflicts.
52    /// This is for GLR parser, to show where the conflicts occured.
53    verbose_conflicts: bool,
54
55    /// when `vebose` is on, print debug information about
56    /// conflicts resolving process by `%left` or `%right` for any shift/reduce, reduce/reduce conflicts.
57    verbose_conflicts_resolving: bool,
58
59    /// print verbose information to stderr
60    verbose_on_stderr: bool,
61}
62
63impl Builder {
64    pub fn new() -> Self {
65        Self {
66            input_file: None,
67            verbose_conflicts: false,
68            verbose_conflicts_resolving: false,
69            verbose_on_stderr: false,
70        }
71    }
72
73    /// set input file
74    pub fn file(&mut self, filename: &str) -> &mut Self {
75        self.input_file = Some(filename.to_string());
76        self
77    }
78
79    /// turns on all verbose options
80    pub fn verbose(&mut self) -> &mut Self {
81        self.verbose_conflicts();
82        self.verbose_conflicts_resolving();
83        self
84    }
85
86    /// when `vebose` is on, print debug information about
87    /// any shift/reduce, reduce/reduce conflicts.
88    pub fn verbose_conflicts(&mut self) -> &mut Self {
89        self.verbose_conflicts = true;
90        self
91    }
92
93    /// when `vebose` is on, print debug information about
94    /// conflicts resolving process by `%left` or `%right` for any shift/reduce, reduce/reduce conflicts.
95    pub fn verbose_conflicts_resolving(&mut self) -> &mut Self {
96        self.verbose_conflicts_resolving = true;
97        self
98    }
99
100    /// print debug information to stderr.
101    pub fn verbose_on_stderr(&mut self) -> &mut Self {
102        self.verbose_on_stderr = true;
103        self
104    }
105
106    fn verbose_stream(&self) -> StandardStream {
107        if self.verbose_on_stderr {
108            StandardStream::stderr(ColorChoice::Auto)
109        } else {
110            StandardStream::stdout(ColorChoice::Auto)
111        }
112    }
113
114    /// build and emit code to output file
115    pub fn build(&self, output_file: &str) {
116        let output = match self.build_impl() {
117            Ok(output) => {
118                let stream1 = output.user_stream;
119                let stream2 = output.generated_stream;
120                quote! {
121                    #stream1
122                    #stream2
123                }
124            }
125            Err(_) => {
126                panic!("build failed");
127            }
128        };
129
130        write(output_file, output.to_string()).expect("Failed to write to file");
131    }
132
133    /// extend `labels` with messages about the source of the rule
134    /// if `ruleid` is auto-generated rule, "{} was generated here" will be added
135    /// if `ruleid` is user-written rule, "{} was defined here" will be added
136    fn extend_rule_source_label(
137        labels: &mut Vec<codespan_reporting::diagnostic::Label<usize>>,
138        fileid: usize,
139        ruleid: usize,
140        grammar: &rusty_lr_parser::grammar::Grammar,
141        prefix_str: &str,
142        prefix_in_this_line: &str,
143    ) {
144        let (nonterm, local_rule) = grammar.get_rule_by_id(ruleid).expect("Rule not found");
145        if let Some(origin_span) = &nonterm.regex_span {
146            let origin_range = origin_span.0.byte_range().start..origin_span.1.byte_range().end;
147            labels.push(Label::primary(fileid, origin_range).with_message(format!(
148                "{}{} was generated here",
149                prefix_str, nonterm.pretty_name,
150            )));
151        } else {
152            let (rule_begin, rule_end) = nonterm.rules[local_rule].span_pair();
153            let rule_range = rule_begin.byte_range().start..rule_end.byte_range().end;
154
155            labels.push(
156                Label::primary(fileid, nonterm.name.span().byte_range()).with_message(format!(
157                    "{}{} was defined here",
158                    prefix_str, nonterm.pretty_name
159                )),
160            );
161            labels.push(
162                Label::secondary(fileid, rule_range)
163                    .with_message(format!("{}in this line", prefix_in_this_line)),
164            );
165        }
166    }
167
168    /// for internal use
169    pub fn build_impl(&self) -> Result<output::Output, String> {
170        if self.input_file.is_none() {
171            eprintln!("Input file not set");
172            return Err("Input file not set".to_string());
173        }
174
175        let input_file = self.input_file.as_ref().unwrap();
176        // read file
177        let input_bytes = match read(input_file) {
178            Ok(bytes) => bytes,
179            Err(e) => {
180                let message = format!("Error reading file: {}", e);
181                eprintln!("{}", message);
182                return Err(message);
183            }
184        };
185
186        let str = match String::from_utf8(input_bytes) {
187            Ok(str) => str,
188            Err(e) => {
189                let message = format!("Error reading utf-8: {}", e);
190                eprintln!("{}", message);
191                return Err(message);
192            }
193        };
194
195        let mut files = SimpleFiles::new();
196        let file_id = files.add(input_file, str.clone());
197
198        // lex with proc-macro2
199        let token_stream: TokenStream = match str.parse() {
200            Ok(token_stream) => token_stream,
201            Err(e) => {
202                let range = e.span().byte_range();
203                let diag = Diagnostic::error()
204                    .with_message("Lexing error")
205                    .with_labels(vec![
206                        Label::primary(file_id, range).with_message(e.to_string())
207                    ]);
208                let writer = StandardStream::stderr(ColorChoice::Auto);
209                let config = codespan_reporting::term::Config::default();
210                term::emit(&mut writer.lock(), &config, &files, &diag)
211                    .expect("Failed to write to stderr");
212                return Err("Lexing error".to_string());
213            }
214        };
215
216        // split stream by '%%'
217        let (output_stream, macro_stream) = match split::split_stream(token_stream) {
218            Ok((output_stream, macro_stream)) => (output_stream, macro_stream),
219            Err(_) => {
220                let diag = Diagnostic::error()
221                    .with_message("Cannot find `%%`")
222                    .with_notes(vec![
223                    "Please put `%%` to separate the code part and the context-free grammar part"
224                        .to_string(),
225                ]);
226                let writer = StandardStream::stderr(ColorChoice::Auto);
227                let config = codespan_reporting::term::Config::default();
228                term::emit(&mut writer.lock(), &config, &files, &diag)
229                    .expect("Failed to write to stderr");
230                return Err(diag.message);
231            }
232        };
233
234        let grammar_args = match rusty_lr_parser::grammar::Grammar::parse_args(macro_stream) {
235            Ok(grammar_args) => grammar_args,
236            Err(e) => {
237                let diag =
238                    match e {
239                        ParseArgError::MacroLineParse { span, message } => {
240                            let range = span.byte_range();
241
242                            Diagnostic::error()
243                                .with_message("Parse Failed")
244                                .with_labels(vec![
245                                    Label::primary(file_id, range).with_message("Error here")
246                                ])
247                                .with_notes(vec![message])
248                        }
249                        ParseArgError::MacroLineParseEnd { message } => Diagnostic::error()
250                            .with_message("Parse Failed")
251                            .with_notes(vec![message]),
252
253                        _ => {
254                            let message = e.short_message();
255                            let span = e.span().byte_range();
256                            Diagnostic::error().with_message(message).with_labels(vec![
257                                Label::primary(file_id, span).with_message("occured here"),
258                            ])
259                        }
260                    };
261
262                let writer = StandardStream::stderr(ColorChoice::Auto);
263                let config = codespan_reporting::term::Config::default();
264                term::emit(&mut writer.lock(), &config, &files, &diag)
265                    .expect("Failed to write to stderr");
266                return Err(diag.message);
267            }
268        };
269        match rusty_lr_parser::grammar::Grammar::arg_check_error(&grammar_args) {
270            Ok(_) => {}
271            Err(e) => {
272                let diag = match e {
273                    ArgError::MultipleModulePrefixDefinition(
274                        (span1, tokenstream1),
275                        (span2, tokenstream2),
276                    ) => {
277                        let range1 = utils::span_stream_range(span1, tokenstream1);
278                        let range2 = utils::span_stream_range(span2, tokenstream2);
279
280                        Diagnostic::error()
281                            .with_message("Multiple %moduleprefix definition")
282                            .with_labels(vec![
283                                Label::primary(file_id, range1).with_message("First definition"),
284                                Label::primary(file_id, range2).with_message("Other definition"),
285                            ])
286                            .with_notes(vec![
287                                "Only one %moduleprefix definition is allowed".to_string()
288                            ])
289                    }
290                    ArgError::MultipleUserDataDefinition(
291                        (span1, tokenstream1),
292                        (span2, tokenstream2),
293                    ) => {
294                        let range1 = utils::span_stream_range(span1, tokenstream1);
295                        let range2 = utils::span_stream_range(span2, tokenstream2);
296
297                        Diagnostic::error()
298                            .with_message("Multiple %userdata definition")
299                            .with_labels(vec![
300                                Label::primary(file_id, range1).with_message("First definition"),
301                                Label::primary(file_id, range2).with_message("Other definition"),
302                            ])
303                            .with_notes(
304                                vec!["Only one %userdata definition is allowed".to_string()],
305                            )
306                    }
307                    ArgError::MultipleErrorDefinition(
308                        (span1, tokenstream1),
309                        (span2, tokenstream2),
310                    ) => {
311                        let range1 = utils::span_stream_range(span1, tokenstream1);
312                        let range2 = utils::span_stream_range(span2, tokenstream2);
313
314                        Diagnostic::error()
315                            .with_message("Multiple %error definition")
316                            .with_labels(vec![
317                                Label::primary(file_id, range1).with_message("First definition"),
318                                Label::primary(file_id, range2).with_message("Other definition"),
319                            ])
320                            .with_notes(vec!["Only one %error definition is allowed".to_string()])
321                    }
322                    ArgError::MultipleTokenTypeDefinition(
323                        (span1, tokenstream1),
324                        (span2, tokenstream2),
325                    ) => {
326                        let range1 = utils::span_stream_range(span1, tokenstream1);
327                        let range2 = utils::span_stream_range(span2, tokenstream2);
328
329                        Diagnostic::error()
330                            .with_message("Multiple %tokentype definition")
331                            .with_labels(vec![
332                                Label::primary(file_id, range1).with_message("First definition"),
333                                Label::primary(file_id, range2).with_message("Other definition"),
334                            ])
335                            .with_notes(vec![
336                                "Only one %tokentype definition is allowed".to_string()
337                            ])
338                    }
339                    ArgError::MultipleEofDefinition(
340                        (span1, tokenstream1),
341                        (span2, tokenstream2),
342                    ) => {
343                        let range1 = utils::span_stream_range(span1, tokenstream1);
344                        let range2 = utils::span_stream_range(span2, tokenstream2);
345
346                        Diagnostic::error()
347                            .with_message("Multiple %eof definition")
348                            .with_labels(vec![
349                                Label::primary(file_id, range1).with_message("First definition"),
350                                Label::primary(file_id, range2).with_message("Other definition"),
351                            ])
352                            .with_notes(vec!["Only one %eof definition is allowed".to_string()])
353                    }
354                    ArgError::MultipleStartDefinition(ident1, ident2) => {
355                        let range1 = ident1.span().byte_range();
356                        let range2 = ident2.span().byte_range();
357
358                        Diagnostic::error()
359                            .with_message("Multiple %start definition")
360                            .with_labels(vec![
361                                Label::primary(file_id, range1).with_message("First definition"),
362                                Label::primary(file_id, range2).with_message("Other definition"),
363                            ])
364                            .with_notes(vec!["Only one %start definition is allowed".to_string()])
365                    }
366
367                    ArgError::StartNotDefined => Diagnostic::error()
368                        .with_message("%start not defined")
369                        .with_labels(vec![])
370                        .with_notes(vec![
371                            "%start must be defined".to_string(),
372                            ">>> %start <non-terminal>".to_string(),
373                        ]),
374                    ArgError::EofNotDefined => Diagnostic::error()
375                        .with_message("%eof not defined")
376                        .with_labels(vec![])
377                        .with_notes(vec![
378                            "%eof must be defined".to_string(),
379                            ">>> %eof <terminal>".to_string(),
380                        ]),
381                    ArgError::TokenTypeNotDefined => Diagnostic::error()
382                        .with_message("%tokentype not defined")
383                        .with_labels(vec![])
384                        .with_notes(vec![
385                            "%tokentype must be defined".to_string(),
386                            ">>> %tokentype <TokenType>".to_string(),
387                        ]),
388                    _ => {
389                        let message = e.short_message();
390                        let span = e.span().byte_range();
391                        Diagnostic::error()
392                            .with_message(message)
393                            .with_labels(vec![
394                                Label::primary(file_id, span).with_message("occured here")
395                            ])
396                    }
397                };
398
399                let writer = StandardStream::stderr(ColorChoice::Auto);
400                let config = codespan_reporting::term::Config::default();
401                term::emit(&mut writer.lock(), &config, &files, &diag)
402                    .expect("Failed to write to stderr");
403                return Err(diag.message);
404            }
405        }
406
407        // parse lines
408        let grammar = match rusty_lr_parser::grammar::Grammar::from_grammar_args(grammar_args) {
409            Ok(grammar) => grammar,
410            Err(e) => {
411                let diag = match e {
412                    ParseError::MultipleRuleDefinition(ident1, ident2) => {
413                        let range1 = ident1.span().byte_range();
414                        let range2 = ident2.span().byte_range();
415
416                        Diagnostic::error()
417                            .with_message("Multiple rule definition")
418                            .with_labels(vec![
419                                Label::primary(file_id, range1).with_message("First definition"),
420                                Label::primary(file_id, range2).with_message("Other definition"),
421                            ])
422                            .with_notes(vec!["Rule name must be unique".to_string()])
423                    }
424
425                    ParseError::MultipleReduceDefinition { terminal, old, new } => {
426                        let old_range = old.0.byte_range().start..old.1.byte_range().end;
427                        let old_string = match old.2 {
428                            rusty_lr_core::ReduceType::Left => "%left",
429                            rusty_lr_core::ReduceType::Right => "%right",
430                        };
431                        let new_range = new.0.byte_range().start..new.1.byte_range().end;
432                        let new_string = match new.2 {
433                            rusty_lr_core::ReduceType::Left => "%left",
434                            rusty_lr_core::ReduceType::Right => "%right",
435                        };
436
437                        Diagnostic::error()
438                            .with_message("Multiple reduce definition")
439                            .with_labels(vec![
440                                Label::primary(file_id, terminal.span().byte_range()).with_message(
441                                    "This terminal symbol is defined as both of %left and %right",
442                                ),
443                                Label::secondary(file_id, old_range)
444                                    .with_message(format!("was set as {} here", old_string)),
445                                Label::secondary(file_id, new_range)
446                                    .with_message(format!("was set as {} here", new_string)),
447                            ])
448                            .with_notes(vec![
449                                "Reduce type must be unique, either %left or %right".to_string()
450                            ])
451                    }
452
453                    ParseError::TermNonTermConflict {
454                        name,
455                        terminal,
456                        non_terminal,
457                    } => {
458                        let range = name.span().byte_range();
459
460                        Diagnostic::error()
461                            .with_message("Ambiguous token name")
462                            .with_labels(vec![
463                                Label::primary(file_id, range).with_message(
464                                    "This name is used for both terminal and non-terminal",
465                                ),
466                                Label::secondary(file_id, terminal.span().byte_range())
467                                    .with_message("Terminal definition here"),
468                                Label::secondary(file_id, non_terminal.span().byte_range())
469                                    .with_message("Non-terminal definition here"),
470                            ])
471                            .with_notes(vec![
472                                "Terminal and non-terminal name must be unique".to_string()
473                            ])
474                    }
475
476                    ParseError::InvalidTerminalRange(
477                        (first, first_index, first_stream),
478                        (last, last_index, last_stream),
479                    ) => {
480                        let range1 = first.span().byte_range();
481                        let range2 = last.span().byte_range();
482                        let range = range1.start..range2.end;
483                        let range1 = utils::tokenstream_range(first_stream);
484                        let range2 = utils::tokenstream_range(last_stream);
485
486                        Diagnostic::error()
487                        .with_message("Invalid terminal range")
488                        .with_labels(vec![
489                            Label::primary(file_id, range).with_message("Invalid range here"),
490                            Label::secondary(file_id, range1).with_message(format!("First terminal symbol (index {})", first_index)),
491                            Label::secondary(file_id, range2).with_message(format!("Last terminal symbol (index {})", last_index)),
492                        ]).with_notes(vec![
493                            "First terminal symbol has to be less than or equal to the last terminal symbol".to_string()
494                        ])
495                    }
496
497                    ParseError::StartNonTerminalNotDefined(ident) => {
498                        let range = ident.span().byte_range();
499
500                        Diagnostic::error()
501                            .with_message("Start non-terminal not defined")
502                            .with_labels(vec![Label::primary(file_id, range)
503                                .with_message("This name is given to %start")])
504                            .with_notes(vec!["Non-terminal name must be defined".to_string()])
505                    }
506
507                    ParseError::TerminalNotDefined(ident) => {
508                        let range = ident.span().byte_range();
509
510                        Diagnostic::error()
511                            .with_message("Terminal symbol not defined")
512                            .with_labels(vec![Label::primary(file_id, range)
513                                .with_message("This terminal symbol is not defined")])
514                            .with_notes(vec!["Terminal symbol must be defined".to_string()])
515                    }
516
517                    ParseError::MultipleTokenDefinition(ident1, ident2) => {
518                        let range1 = ident1.span().byte_range();
519                        let range2 = ident2.span().byte_range();
520
521                        Diagnostic::error()
522                            .with_message("Multiple %token definition")
523                            .with_labels(vec![
524                                Label::primary(file_id, range1).with_message("First definition"),
525                                Label::primary(file_id, range2).with_message("Other definition"),
526                            ])
527                            .with_notes(vec!["Token name must be unique".to_string()])
528                    }
529
530                    ParseError::EofDefined(ident) => {
531                        let range = ident.span().byte_range();
532
533                        Diagnostic::error()
534                            .with_message("'eof' is reserved name")
535                            .with_labels(vec![Label::primary(file_id, range)
536                                .with_message("This name is reserved")])
537                    }
538                    ParseError::AugmentedDefined(ident) => {
539                        let range = ident.span().byte_range();
540
541                        Diagnostic::error()
542                            .with_message("'Augmented' is reserved name")
543                            .with_labels(vec![Label::primary(file_id, range)
544                                .with_message("This name is reserved")])
545                    }
546                    _ => {
547                        let message = e.short_message();
548                        let span = e.span().byte_range();
549                        Diagnostic::error()
550                            .with_message(message)
551                            .with_labels(vec![
552                                Label::primary(file_id, span).with_message("occured here")
553                            ])
554                    }
555                };
556
557                let writer = StandardStream::stderr(ColorChoice::Auto);
558                let config = codespan_reporting::term::Config::default();
559                term::emit(&mut writer.lock(), &config, &files, &diag)
560                    .expect("Failed to write to stderr");
561
562                return Err(diag.message);
563            }
564        };
565
566        // expand macro
567        let expanded_stream = match grammar.emit_compiletime() {
568            Ok(expanded_stream) => expanded_stream,
569            Err(e) => {
570                let diag = match e.as_ref() {
571                    EmitError::RuleTypeDefinedButActionNotDefined {
572                        name,
573                        rule_local_id,
574                    } => {
575                        // `name` must not be generated rule,
576                        // since it is programmically generated, it must have a proper reduce action
577
578                        let rule_id = *grammar.nonterminals_index.get(name).unwrap();
579                        let rule_line = &grammar.nonterminals[rule_id].rules[*rule_local_id];
580                        let rule_line_range = if rule_line.tokens.is_empty() {
581                            rule_line.separator_span.byte_range()
582                        } else {
583                            let first = rule_line.separator_span.byte_range().start;
584                            let last = rule_line.tokens.last().unwrap().end_span.byte_range().end;
585                            first..last
586                        };
587                        Diagnostic::error()
588                            .with_message("Reduce action not defined")
589                            .with_labels(vec![
590                                Label::secondary(file_id, name.span().byte_range())
591                                    .with_message("This rule has a type definition"),
592                                Label::primary(file_id, rule_line_range)
593                                    .with_message("This rule line has no reduce action"),
594                            ])
595                            .with_notes(vec!["".to_string()])
596                    }
597
598                    EmitError::ShiftReduceConflict {
599                        term,
600                        reduce_rule: (reduceid, reduce_production_rule),
601                        shift_rules,
602                    } => {
603                        let mut message = format!(
604                            "Shift/Reduce conflict:\nReduce rule:\n\t>>> {}\nShift rules:",
605                            reduce_production_rule
606                        );
607                        for (_, shifted_rule) in shift_rules.iter() {
608                            message.push_str(format!("\n\t>>> {}", shifted_rule).as_str());
609                        }
610                        let mut labels = Vec::new();
611
612                        Self::extend_rule_source_label(
613                            &mut labels,
614                            file_id,
615                            *reduceid,
616                            &grammar,
617                            "(Reduce) ",
618                            "error ",
619                        );
620
621                        for (shiftid, _) in shift_rules.iter() {
622                            Self::extend_rule_source_label(
623                                &mut labels,
624                                file_id,
625                                *shiftid,
626                                &grammar,
627                                "(Shift) ",
628                                "error ",
629                            );
630                        }
631                        Diagnostic::error()
632                            .with_message(message)
633                            .with_labels(labels)
634                            .with_notes(vec![
635                                format!("conflict terminal: {}", term),
636                                format!(
637                                "Try to rearrange the rules or resolve conflict by set reduce type"
638                            ),
639                                format!(">>> %left {}", term),
640                                format!(">>> %right {}", term),
641                            ])
642                    }
643                    EmitError::ReduceReduceConflict {
644                        lookahead,
645                        rule1: (ruleid1, production_rule1),
646                        rule2: (ruleid2, production_rule2),
647                    } => {
648                        let mut labels = Vec::new();
649
650                        Self::extend_rule_source_label(
651                            &mut labels,
652                            file_id,
653                            *ruleid1,
654                            &grammar,
655                            "(Rule1) ",
656                            "error ",
657                        );
658                        Self::extend_rule_source_label(
659                            &mut labels,
660                            file_id,
661                            *ruleid2,
662                            &grammar,
663                            "(Rule2) ",
664                            "error ",
665                        );
666
667                        Diagnostic::error()
668                            .with_message(format!(
669                                "Reduce/Reduce conflict:\n>>> {}\n>>> {}",
670                                production_rule1, production_rule2
671                            ))
672                            .with_labels(labels)
673                            .with_notes(vec![format!("with lookahead {}", lookahead)])
674                    }
675
676                    _ => {
677                        let message = e.short_message();
678                        let span = e.span().byte_range();
679                        Diagnostic::error()
680                            .with_message(message)
681                            .with_labels(vec![
682                                Label::primary(file_id, span).with_message("occured here")
683                            ])
684                    }
685                };
686
687                let writer = StandardStream::stderr(ColorChoice::Auto);
688                let config = codespan_reporting::term::Config::default();
689                term::emit(&mut writer.lock(), &config, &files, &diag)
690                    .expect("Failed to write to stderr");
691
692                return Err(diag.message);
693            }
694        };
695
696        // this comments will be printed to the output file
697        // build again here whether it was built before
698        // since many informations are removed in the rusty_lr_parser output
699        let mut debug_comments = String::new();
700        {
701            // to map production rule to its pretty name abbreviation
702            let term_mapper = |term_idx: usize| grammar.terminals[term_idx].name.to_string();
703            let nonterm_mapper = |nonterm: usize| grammar.nonterminals[nonterm].pretty_name.clone();
704
705            let mut builder = grammar.create_grammar();
706            debug_comments.push_str(format!("{:=^80}\n", "Grammar").as_str());
707            for (rule, _) in builder.rules.iter() {
708                debug_comments.push_str(
709                    format!("{}\n", rule.clone().map(term_mapper, nonterm_mapper)).as_str(),
710                );
711            }
712            let augmented_rule_id = *grammar
713                .nonterminals_index
714                .get(&Ident::new(
715                    rusty_lr_parser::utils::AUGMENTED_NAME,
716                    Span::call_site(),
717                ))
718                .unwrap();
719            let parser = if grammar.lalr {
720                match builder.build_lalr(augmented_rule_id) {
721                    Ok(parser) => parser,
722                    Err(_) => unreachable!("Grammar building failed"),
723                }
724            } else {
725                match builder.build(augmented_rule_id) {
726                    Ok(parser) => parser,
727                    Err(_) => unreachable!("Grammar building failed"),
728                }
729            };
730
731            // print note about shift/reduce conflict resolved with `%left` or `%right`
732            if self.verbose_conflicts_resolving {
733                for state in parser.states.iter() {
734                    let mut reduce_rules = BTreeMap::new();
735                    let mut shift_rules = BTreeMap::new();
736
737                    for (shifted_rule_ref, lookaheads) in state.ruleset.rules.iter() {
738                        // is end of rule, add to reduce
739                        if shifted_rule_ref.shifted
740                            == builder.rules[shifted_rule_ref.rule].0.rule.len()
741                        {
742                            for token in lookaheads.iter() {
743                                reduce_rules.insert(token, shifted_rule_ref.rule);
744                            }
745                        }
746
747                        // if it is not end, and next token is terminal, add to shift
748                        if let Some(rusty_lr_core::Token::Term(token)) = builder.rules
749                            [shifted_rule_ref.rule]
750                            .0
751                            .rule
752                            .get(shifted_rule_ref.shifted)
753                        {
754                            shift_rules
755                                .entry(token)
756                                .or_insert_with(BTreeSet::new)
757                                .insert(*shifted_rule_ref);
758                        }
759                    }
760
761                    // check shift/reduce conflict
762                    for (term, shift_rules) in shift_rules.into_iter() {
763                        if let Some(reduce_rule) = reduce_rules.get(term) {
764                            // shift/reduce conflict here
765                            // since there were not error reaching here, 'term' must be set reduce_type
766
767                            let mut message = format!(
768                                "Shift/Reduce conflict with token {} was resolved:\nReduce rule:\n\t>>> {}\nShift rules:",
769                                grammar.terminals[*term].name,
770                                builder.rules[*reduce_rule].0.clone().map(term_mapper, nonterm_mapper)
771                            );
772                            for shifted_rule in shift_rules.iter() {
773                                let shifted_rule = ShiftedRule {
774                                    rule: builder.rules[shifted_rule.rule]
775                                        .0
776                                        .clone()
777                                        .map(term_mapper, nonterm_mapper),
778                                    shifted: shifted_rule.shifted,
779                                };
780                                message.push_str(format!("\n\t>>> {}", shifted_rule).as_str());
781                            }
782
783                            let mut labels = Vec::new();
784
785                            Self::extend_rule_source_label(
786                                &mut labels,
787                                file_id,
788                                *reduce_rule,
789                                &grammar,
790                                "(Reduce) ",
791                                "error ",
792                            );
793
794                            let mut shift_source_inserted = BTreeSet::new();
795                            for shift_rule in shift_rules.iter() {
796                                let name = &builder.rules[shift_rule.rule].0.name;
797                                if !shift_source_inserted.contains(name) {
798                                    shift_source_inserted.insert(name);
799                                    Self::extend_rule_source_label(
800                                        &mut labels,
801                                        file_id,
802                                        shift_rule.rule,
803                                        &grammar,
804                                        "(Shift) ",
805                                        "error ",
806                                    );
807                                }
808                            }
809
810                            let term_info = &grammar.terminals[*term];
811                            if let Some(reduce_type_origin) = &term_info.reduce_type {
812                                let reduce_type = reduce_type_origin.reduce_type;
813                                let type_string = match reduce_type {
814                                    rusty_lr_core::ReduceType::Left => "%left",
815                                    rusty_lr_core::ReduceType::Right => "%right",
816                                };
817                                for (first, last) in reduce_type_origin.sources.iter() {
818                                    let range = first.byte_range().start..last.byte_range().end;
819                                    labels.push(Label::primary(file_id, range).with_message(
820                                        format!("Reduce type was set as {} here", type_string),
821                                    ));
822                                }
823
824                                let diag =
825                                    Diagnostic::note().with_message(message).with_labels(labels);
826
827                                let writer = self.verbose_stream();
828                                let config = codespan_reporting::term::Config::default();
829                                term::emit(&mut writer.lock(), &config, &files, &diag)
830                                    .expect("Failed to write to stderr");
831                            }
832                        }
833                    }
834                }
835            }
836
837            // print note about reduce/reduce conflict and shift/reduce conflict not resolved
838            if self.verbose_conflicts {
839                // to prevent duplicated messages, collect them into BTreeMap first
840                let mut reduce_rules_set = BTreeMap::new();
841                for state in parser.states.iter() {
842                    for (term, reduce_rules) in state.reduce_map.iter() {
843                        if reduce_rules.len() > 1 {
844                            reduce_rules_set
845                                .entry(reduce_rules)
846                                .or_insert_with(BTreeSet::new)
847                                .insert(term);
848                        }
849                    }
850                }
851
852                for (reduce_rules, terms) in reduce_rules_set.into_iter() {
853                    let mut message = "Reduce/Reduce conflict:".to_string();
854                    let mut labels = Vec::new();
855                    let mut note = "with lookaheads: ".to_string();
856                    let len = terms.len();
857                    for (idx, term) in terms.into_iter().enumerate() {
858                        let term = &grammar.terminals[*term].name;
859                        if idx < len - 1 {
860                            note.push_str(format!("{}, ", term).as_str());
861                        } else {
862                            note.push_str(format!("{}", term).as_str());
863                        }
864                    }
865
866                    let mut reduce_source_inserted = BTreeSet::new();
867                    for reduce_rule in reduce_rules.iter() {
868                        message.push_str(
869                            format!(
870                                "\n\t>>> {}",
871                                builder.rules[*reduce_rule]
872                                    .0
873                                    .clone()
874                                    .map(term_mapper, nonterm_mapper)
875                            )
876                            .as_str(),
877                        );
878                        let name = &builder.rules[*reduce_rule].0.name;
879                        if !reduce_source_inserted.contains(name) {
880                            reduce_source_inserted.insert(name);
881                            Self::extend_rule_source_label(
882                                &mut labels,
883                                file_id,
884                                *reduce_rule,
885                                &grammar,
886                                "",
887                                "error ",
888                            );
889                        }
890                    }
891
892                    let diag = Diagnostic::note()
893                        .with_message(message)
894                        .with_labels(labels)
895                        .with_notes(vec![note]);
896
897                    let writer = self.verbose_stream();
898                    let config = codespan_reporting::term::Config::default();
899                    term::emit(&mut writer.lock(), &config, &files, &diag)
900                        .expect("Failed to write to stderr");
901                }
902
903                let mut shift_map = BTreeMap::new();
904                for state in parser.states.iter() {
905                    for (term, reduce_rules) in state.reduce_map.iter() {
906                        if let Some(next_state) = state.shift_goto_map_term.get(term) {
907                            shift_map
908                                .entry(
909                                    parser.states[*next_state]
910                                        .ruleset
911                                        .rules
912                                        .keys()
913                                        .copied()
914                                        .collect::<BTreeSet<_>>(),
915                                )
916                                .or_insert_with(BTreeSet::new)
917                                .append(&mut reduce_rules.clone());
918                        }
919                    }
920                }
921
922                for (shift_ruleset, reduce_rules) in shift_map.into_iter() {
923                    let mut message = "Shift/Reduce conflict:".to_string();
924                    let mut labels = Vec::new();
925
926                    let mut reduce_source_inserted = BTreeSet::new();
927                    message.push_str("\nReduce rules:");
928                    for reduce_rule in reduce_rules.iter() {
929                        message.push_str(
930                            format!(
931                                "\n\t>>> {}",
932                                builder.rules[*reduce_rule]
933                                    .0
934                                    .clone()
935                                    .map(term_mapper, nonterm_mapper)
936                            )
937                            .as_str(),
938                        );
939                        let name = &builder.rules[*reduce_rule].0.name;
940                        if !reduce_source_inserted.contains(name) {
941                            reduce_source_inserted.insert(name);
942                            Self::extend_rule_source_label(
943                                &mut labels,
944                                file_id,
945                                *reduce_rule,
946                                &grammar,
947                                "(Reduce) ",
948                                "error ",
949                            );
950                        }
951                    }
952
953                    let mut shift_source_inserted = BTreeSet::new();
954                    message.push_str("\nShift rules:");
955                    for shifted_rule in shift_ruleset.into_iter() {
956                        let shifted_rule_ = ShiftedRule {
957                            rule: builder.rules[shifted_rule.rule]
958                                .0
959                                .clone()
960                                .map(term_mapper, nonterm_mapper),
961                            shifted: shifted_rule.shifted,
962                        };
963                        message.push_str(format!("\n\t>>> {}", shifted_rule_).as_str());
964
965                        let name = &builder.rules[shifted_rule.rule].0.name;
966
967                        if !shift_source_inserted.contains(name) {
968                            shift_source_inserted.insert(name);
969                            Self::extend_rule_source_label(
970                                &mut labels,
971                                file_id,
972                                shifted_rule.rule,
973                                &grammar,
974                                "(Shift) ",
975                                "error ",
976                            );
977                        }
978                    }
979
980                    let diag = Diagnostic::note().with_message(message).with_labels(labels);
981
982                    let writer = self.verbose_stream();
983                    let config = codespan_reporting::term::Config::default();
984                    term::emit(&mut writer.lock(), &config, &files, &diag)
985                        .expect("Failed to write to stderr");
986                }
987            }
988        }
989
990        Ok(output::Output {
991            user_stream: output_stream,
992            generated_stream: expanded_stream,
993            debug_comments,
994        })
995    }
996}
997
998impl Default for Builder {
999    fn default() -> Self {
1000        Self::new()
1001    }
1002}