1use crate::LRParseTable;
2use crate::analysis::LookaheadDFA;
3use crate::analysis::compiled_la_dfa::CompiledDFA;
4use crate::analysis::lookahead_dfa::CompiledProductionIndex;
5use crate::config::{CommonGeneratorConfig, ParserGeneratorConfig};
6use crate::generators::parser_model::{
7 LookaheadAutomatonModel as LookaheadAutomatonIR, ProductionModel as ProductionIR,
8 ProductionSymbolModel as ProductionSymbolIR, build_export_model_for_lalr,
9 build_export_model_for_llk, build_lookahead_automata_model, build_production_model,
10 find_start_symbol_index as parser_model_find_start_symbol_index,
11};
12use crate::generators::parser_render_ir::{
13 LalrProductionRenderIR, build_lalr_production_render_ir, build_non_terminal_metadata_ir,
14 build_rust_lalr_parse_table_render_ir, build_terminal_label_map,
15};
16use crate::generators::{GrammarConfig, NamingHelper};
17use crate::parser::GrammarType;
18use crate::parser::parol_grammar::LookaheadExpression;
19use anyhow::Result;
20use std::collections::BTreeMap;
21
22use crate::StrVec;
23use std::fmt::Debug;
24
25#[derive(Debug, Default)]
26pub(crate) struct Dfa {
27 prod0: CompiledProductionIndex,
28 transitions: StrVec,
29 k: usize,
30 nt_index: usize,
31 nt_name: String,
32}
33
34impl Dfa {
35 #[allow(dead_code)]
36 pub(crate) fn from_compiled_dfa(
37 compiled_dfa: CompiledDFA,
38 nt_index: usize,
39 nt_name: String,
40 ) -> Dfa {
41 let prod0 = compiled_dfa.prod0;
42 let transitions = compiled_dfa.transitions.iter().fold(
43 StrVec::new(4).first_line_no_indent(),
44 |mut acc, t| {
45 acc.push(format!(
46 "Trans({}, {}, {}, {}),",
47 t.from_state, t.term, t.to_state, t.prod_num
48 ));
49 acc
50 },
51 );
52 let k = compiled_dfa.k;
53
54 Self {
55 prod0,
56 transitions,
57 k,
58 nt_index,
59 nt_name,
60 }
61 }
62
63 fn from_ir(automaton_ir: &LookaheadAutomatonIR) -> Self {
64 let prod0 = automaton_ir.prod0;
65 let transitions = automaton_ir.transitions.iter().fold(
66 StrVec::new(4).first_line_no_indent(),
67 |mut acc, t| {
68 acc.push(format!(
69 "Trans({}, {}, {}, {}),",
70 t.from_state, t.term, t.to_state, t.prod_num
71 ));
72 acc
73 },
74 );
75 let k = automaton_ir.k;
76
77 Self {
78 prod0,
79 transitions,
80 k,
81 nt_index: automaton_ir.non_terminal_index,
82 nt_name: automaton_ir.non_terminal_name.clone(),
83 }
84 }
85}
86
87impl std::fmt::Display for Dfa {
88 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
89 let Dfa {
90 prod0,
91 transitions,
92 k,
93 nt_index,
94 nt_name,
95 } = self;
96 writeln!(f, r#"/* {nt_index} - "{nt_name}" */"#)?;
97 f.write_fmt(ume::ume! {
98 LookaheadDFA {
99 prod0: #prod0,
100 transitions: &[#transitions],
101 k: #k,
102 },
103 })
104 }
105}
106
107#[derive(Debug, Default)]
108struct Dfas {
109 dfa_count: usize,
110 lookahead_dfa_s: String,
111}
112
113impl std::fmt::Display for Dfas {
114 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
115 let Dfas {
116 dfa_count,
117 lookahead_dfa_s,
118 } = self;
119 f.write_fmt(ume::ume! {
120 pub const LOOKAHEAD_AUTOMATA: &[LookaheadDFA; #dfa_count] = &[
121 #lookahead_dfa_s];
122 })
123 }
124}
125
126#[derive(Debug, Default)]
127struct Production {
128 lhs: usize,
129 production: StrVec,
130 prod_num: usize,
131 prod_string: String,
132}
133
134impl Production {
135 fn from_ir(production_ir: &ProductionIR) -> Self {
136 let lhs = production_ir.lhs_index;
137 let production = production_ir.rhs.iter().rev().fold(
138 StrVec::new(4).first_line_no_indent(),
139 |mut acc, s| {
140 match s {
141 ProductionSymbolIR::NonTerminal(index) => {
142 acc.push(format!("ParseType::N({index}),"))
143 }
144 ProductionSymbolIR::Terminal { index, .. } => {
145 acc.push(format!("ParseType::T({index}),"))
146 }
147 }
148 acc
149 },
150 );
151 Self {
152 lhs,
153 production,
154 prod_num: production_ir.production_index,
155 prod_string: production_ir.text.clone(),
156 }
157 }
158}
159
160impl std::fmt::Display for Production {
161 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
162 let Production {
163 lhs,
164 production,
165 prod_num,
166 prod_string,
167 } = self;
168 writeln!(f, "// {prod_num} - {prod_string}")?;
169 f.write_fmt(ume::ume! {
170 Production {
171 lhs: #lhs,
172 production: &[#production],
173 },
174 })?;
175 writeln!(f)
176 }
177}
178
179#[derive(Debug, Default)]
180struct LRProduction {
181 lhs: usize,
182 len: usize,
183 prod_num: usize,
184 prod_string: String,
185}
186
187impl LRProduction {
188 fn from_render_ir(production_render_ir: &LalrProductionRenderIR) -> Self {
189 let lhs = production_render_ir.lhs_index;
190 let len = production_render_ir.rhs_len;
191 Self {
192 lhs,
193 len,
194 prod_num: production_render_ir.production_index,
195 prod_string: production_render_ir.text.clone(),
196 }
197 }
198}
199
200impl std::fmt::Display for LRProduction {
201 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
202 let LRProduction {
203 lhs,
204 len,
205 prod_num,
206 prod_string,
207 } = self;
208 writeln!(f, "// {prod_num} - {prod_string}")?;
209 f.write_fmt(ume::ume! {
210 LRProduction {
211 lhs: #lhs,
212 len: #len,
213 },
214 })?;
215 writeln!(f)
216 }
217}
218
219#[derive(Debug, Default)]
220struct Productions {
221 production_count: usize,
222 productions: String,
223}
224
225impl std::fmt::Display for Productions {
226 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
227 let Productions {
228 production_count,
229 productions,
230 } = self;
231 f.write_fmt(ume::ume! {
232 pub const PRODUCTIONS: &[Production; #production_count] = &[
233 #productions];
234 })
235 }
236}
237
238#[derive(Debug, Default)]
239struct LRProductions {
240 production_count: usize,
241 productions: String,
242}
243
244impl std::fmt::Display for LRProductions {
245 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
246 let LRProductions {
247 production_count,
248 productions,
249 } = self;
250 f.write_fmt(ume::ume! {
251 pub const PRODUCTIONS: &[LRProduction; #production_count] = &[
252 #productions];
253 })
254 }
255}
256
257#[derive(Debug, Default)]
258struct ParserData<'a> {
259 start_symbol_index: usize,
260 lexer_source: &'a str,
261 non_terminals: StrVec,
262 non_terminal_count: usize,
263 dfa_source: String,
264 productions: String,
265 max_k: usize,
266 user_type_name: &'a str,
267 user_type_life_time: &'static str,
268 scanner_type_name: String,
269 scanner_module_name: String,
270 module_name: &'a str,
271 trim_parse_tree: bool,
272 disable_recovery: bool,
273}
274
275impl std::fmt::Display for ParserData<'_> {
276 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
277 let ParserData {
278 start_symbol_index,
279 lexer_source,
280 non_terminals,
281 non_terminal_count,
282 dfa_source,
283 productions,
284 max_k,
285 user_type_name,
286 scanner_type_name,
287 scanner_module_name,
288 user_type_life_time,
289 module_name,
290 trim_parse_tree,
291 disable_recovery,
292 } = self;
293
294 writeln!(
295 f,
296 "
297 // ---------------------------------------------------------
298 // This file was generated by parol.
299 // Do not edit this file manually.
300 // Changes will be overwritten on the next build.
301 // ---------------------------------------------------------
302 "
303 )?;
304
305 f.write_fmt(ume::ume! {
306 use parol_runtime::{
307 parser::{
308 parse_tree_type::TreeConstruct, LLKParser, LookaheadDFA, ParseType, Production, Trans,
309 },
310 ParolError, ParseTree, TokenStream,
311 };
312 use scnr2::scanner;
313 use std::path::Path;
314 })?;
315
316 writeln!(f, "\n")?;
317 let auto_name = format!("{user_type_name}Auto");
318 let trait_module_name = format!("{module_name}_trait");
319 f.write_fmt(ume::ume! {
320 use crate::#module_name::#user_type_name;
321 use crate::#trait_module_name::#auto_name;
322 })?;
323 writeln!(f, "\n")?;
324
325 writeln!(f, "{lexer_source}\n")?;
326
327 f.write_fmt(ume::ume! {
328 const MAX_K: usize = #max_k;
329 })?;
330 writeln!(f, "\n\n")?;
331 f.write_fmt(ume::ume! {
332 pub const NON_TERMINALS: &[&str; #non_terminal_count] = &[#non_terminals];
333 })?;
334
335 writeln!(f, "\n\n{dfa_source}")?;
336 writeln!(f, "\n{productions}\n")?;
337
338 writeln!(f, "\n")?;
339
340 let user_actions = ume::ume!(&mut #user_type_name #user_type_life_time).to_string();
341 let lifetime_on_parse = if *user_type_life_time == "<'t>" {
342 "'t,"
343 } else {
344 ""
345 };
346 let lifetime_on_input = if *user_type_life_time == "<'t>" {
347 "'t"
348 } else {
349 ""
350 };
351 let use_scanner_type = ume::ume! {
352 use #scanner_module_name::#scanner_type_name;
353 }
354 .to_string();
355 let scanner_instance = ume::ume! {
356 let scanner = #scanner_type_name::new();
357 }
358 .to_string();
359 let auto_wrapper = format!(
360 "\n// Initialize wrapper\n{}",
361 ume::ume! {
362 let mut user_actions = #auto_name::new(user_actions);
363 }
364 );
365 let mut_ref_user_actions = ume::ume!(&mut user_actions);
366 let enable_trimming = if *trim_parse_tree {
367 "llk_parser.trim_parse_tree();\n"
368 } else {
369 ""
370 };
371 let recovery = if *disable_recovery {
372 "llk_parser.disable_recovery();\n"
373 } else {
374 ""
375 };
376 f.write_fmt(ume::ume! {
377 pub fn parse<#lifetime_on_parse T>(
378 input: &#lifetime_on_input str,
379 file_name: T,
380 user_actions: #user_actions,
381 ) -> Result<ParseTree, ParolError> where T: AsRef<Path> {
382 use parol_runtime::{
383 parser::{parse_tree_type::SynTree, parser_types::SynTreeFlavor},
384 syntree::Builder,
385 };
386 let mut builder = Builder::<SynTree, SynTreeFlavor>::new_with();
387 parse_into(input, &mut builder, file_name, user_actions)?;
388 Ok(builder.build()?)
389 }
390 })?;
391 f.write_fmt(ume::ume! {
392 #[allow(dead_code)]
393 pub fn parse_into<'t, T: TreeConstruct<'t>>(
394 input: &'t str,
395 tree_builder: &mut T,
396 file_name: impl AsRef<Path>,
397 user_actions: #user_actions,
398 ) -> Result<(), ParolError> where ParolError: From<T::Error> {
399 #use_scanner_type
400 let mut llk_parser = LLKParser::new(
401 #start_symbol_index,
402 LOOKAHEAD_AUTOMATA,
403 PRODUCTIONS,
404 TERMINAL_NAMES,
405 NON_TERMINALS,
406 );
407 #enable_trimming
408 #recovery
409 #scanner_instance
410 #auto_wrapper
411
412 llk_parser.parse_into(
413 tree_builder,
414 TokenStream::new(
415 input,
416 file_name,
417 scanner.scanner_impl.clone(),
418 &#scanner_type_name::match_function,
419 MAX_K,
420 )
421 .unwrap(),
422 #mut_ref_user_actions
423 )
424 }
425 })
426 }
427}
428
429#[derive(Debug, Default)]
430struct LRParserData<'a> {
431 start_symbol_index: usize,
432 lexer_source: &'a str,
433 non_terminals: StrVec,
434 non_terminal_count: usize,
435 productions: String,
436 user_type_name: &'a str,
437 user_type_life_time: &'static str,
438 scanner_type_name: String,
439 scanner_module_name: String,
440 module_name: &'a str,
441 trim_parse_tree: bool,
442 parse_table_source: String,
443}
444
445impl std::fmt::Display for LRParserData<'_> {
446 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
447 let LRParserData {
448 start_symbol_index,
449 lexer_source,
450 non_terminals,
451 non_terminal_count,
452 productions,
453 user_type_name,
454 user_type_life_time,
455 scanner_type_name,
456 scanner_module_name,
457 module_name,
458 trim_parse_tree,
459 parse_table_source,
460 } = self;
461
462 writeln!(
463 f,
464 "
465 // ---------------------------------------------------------
466 // This file was generated by parol.
467 // Do not edit this file manually.
468 // Changes will be overwritten on the next build.
469 // ---------------------------------------------------------
470 "
471 )?;
472
473 f.write_fmt(ume::ume! {
474 use parol_runtime::{
475 ParolError, ParseTree, TokenStream,
476 lr_parser::{LR1State, LRAction, LRParseTable, LRParser, LRProduction},
477 parser::parse_tree_type::TreeConstruct,
478 };
479 use scnr2::scanner;
480 use std::path::Path;
481 })?;
482
483 writeln!(f, "\n")?;
484 let auto_name = format!("{user_type_name}Auto");
485 let trait_module_name = format!("{module_name}_trait");
486 f.write_fmt(ume::ume! {
487 use crate::#module_name::#user_type_name;
488 use crate::#trait_module_name::#auto_name;
489 })?;
490 writeln!(f, "\n")?;
491
492 writeln!(f, "{lexer_source}\n")?;
493
494 writeln!(f, "\n\n")?;
495 f.write_fmt(ume::ume! {
496 pub const NON_TERMINALS: &[&str; #non_terminal_count] = &[#non_terminals];
497 })?;
498
499 writeln!(
500 f,
501 "\n\nstatic PARSE_TABLE: LRParseTable = {parse_table_source};\n"
502 )?;
503 writeln!(f, "\n{productions}\n")?;
504
505 writeln!(f, "\n")?;
506
507 let user_actions = ume::ume!(&mut #user_type_name #user_type_life_time).to_string();
508 let lifetime_on_parse = if *user_type_life_time == "<'t>" {
509 "'t,"
510 } else {
511 ""
512 };
513 let lifetime_on_input = if *user_type_life_time == "<'t>" {
514 "'t"
515 } else {
516 ""
517 };
518 let auto_wrapper = format!(
519 "\n// Initialize wrapper\n{}",
520 ume::ume! {
521 let mut user_actions = #auto_name::new(user_actions);
522 }
523 );
524 let mut_ref_user_actions = ume::ume!(&mut user_actions);
525 let enable_trimming = if *trim_parse_tree {
526 "lr_parser.trim_parse_tree();\n"
527 } else {
528 ""
529 };
530 let use_scanner_type = ume::ume! {
531 use #scanner_module_name::#scanner_type_name;
532 }
533 .to_string();
534 let scanner_instance = ume::ume! {
535 let scanner = #scanner_type_name::new();
536 }
537 .to_string();
538
539 f.write_fmt(ume::ume! {
540 pub fn parse<#lifetime_on_parse T>(
541 input: &#lifetime_on_input str,
542 file_name: T,
543 user_actions: #user_actions,
544 ) -> Result<ParseTree, ParolError> where T: AsRef<Path> {
545 use parol_runtime::{
546 parser::{parse_tree_type::SynTree, parser_types::SynTreeFlavor},
547 syntree::Builder,
548 };
549 let mut builder = Builder::<SynTree, SynTreeFlavor>::new_with();
550 parse_into(input, &mut builder, file_name, user_actions)?;
551 Ok(builder.build()?)
552 }
553 })?;
554 f.write_fmt(ume::ume! {
555 #[allow(dead_code)]
556 pub fn parse_into<'t, T: TreeConstruct<'t>>(
557 input: &'t str,
558 tree_builder: &mut T,
559 file_name: impl AsRef<Path>,
560 user_actions: #user_actions,
561 ) -> Result<(), ParolError> where ParolError: From<T::Error> {
562 #use_scanner_type
563 let mut lr_parser = LRParser::new(
564 #start_symbol_index,
565 &PARSE_TABLE,
566 PRODUCTIONS,
567 TERMINAL_NAMES,
568 NON_TERMINALS,
569 );
570 #enable_trimming
571 #auto_wrapper
572 #scanner_instance
573 lr_parser.parse_into(
574 tree_builder,
575 TokenStream::new(
576 input,
577 file_name,
578 scanner.scanner_impl.clone(),
579 &#scanner_type_name::match_function,
580 1,
581 )
582 .unwrap(),
583 #mut_ref_user_actions
584 )
585 }
586 })
587 }
588}
589
590pub fn generate_parser_source<C: CommonGeneratorConfig + ParserGeneratorConfig>(
598 grammar_config: &GrammarConfig,
599 lexer_source: &str,
600 config: &C,
601 la_dfa: &BTreeMap<String, LookaheadDFA>,
602 ast_type_has_lifetime: bool,
603) -> Result<String> {
604 generate_parser_source_internal(
605 grammar_config,
606 lexer_source,
607 config,
608 la_dfa,
609 ast_type_has_lifetime,
610 )
611}
612
613pub fn generate_parser_export_model(
621 grammar_config: &GrammarConfig,
622 la_dfa: &BTreeMap<String, LookaheadDFA>,
623) -> Result<crate::generators::ParserExportModel> {
624 build_export_model_for_llk(grammar_config, la_dfa)
625}
626
627fn generate_parser_source_internal<C: CommonGeneratorConfig + ParserGeneratorConfig>(
628 grammar_config: &GrammarConfig,
629 lexer_source: &str,
630 config: &C,
631 la_dfa: &BTreeMap<String, LookaheadDFA>,
632 ast_type_has_lifetime: bool,
633) -> Result<String> {
634 let non_terminal_metadata = build_non_terminal_metadata_ir(grammar_config);
635 let non_terminal_names = non_terminal_metadata.names;
636 let non_terminal_count = non_terminal_names.len();
637 let start_symbol_index: usize =
638 parser_model_find_start_symbol_index(&non_terminal_names, grammar_config)?;
639
640 let non_terminals =
641 non_terminal_metadata
642 .indexed_rows
643 .into_iter()
644 .fold(StrVec::new(4), |mut acc, row| {
645 acc.push(row);
646 acc
647 });
648
649 let lookahead_automata_ir = build_lookahead_automata_model(la_dfa, &non_terminal_names);
650 let dfa_source = generate_dfa_source(&lookahead_automata_ir);
651
652 let production_ir = build_production_model(grammar_config, &non_terminal_names)?;
653 let productions = generate_productions(&production_ir);
654
655 let max_k = grammar_config.lookahead_size;
656
657 let user_type_life_time = if ast_type_has_lifetime { "<'t>" } else { "" };
658
659 let parser_data = ParserData {
660 start_symbol_index,
661 lexer_source,
662 non_terminals,
663 non_terminal_count,
664 dfa_source,
665 productions,
666 max_k,
667 user_type_name: config.user_type_name(),
668 user_type_life_time,
669 scanner_type_name: get_scanner_type_name(config),
670 scanner_module_name: get_scanner_module_name(config),
671 module_name: config.module_name(),
672 trim_parse_tree: config.trim_parse_tree(),
673 disable_recovery: config.recovery_disabled(),
674 };
675
676 Ok(format!("{parser_data}"))
677}
678
679fn get_terminals(grammar_config: &GrammarConfig) -> Vec<(&str, Option<LookaheadExpression>)> {
680 grammar_config
681 .cfg
682 .get_ordered_terminals()
683 .iter()
684 .map(|(t, _, l, _)| (*t, l.clone()))
685 .collect::<Vec<(&str, Option<LookaheadExpression>)>>()
686}
687
688fn get_scanner_module_name<C: CommonGeneratorConfig>(config: &C) -> String {
689 let scanner_module_name = NamingHelper::to_lower_snake_case(config.user_type_name());
690 scanner_module_name + "_scanner"
691}
692
693fn get_scanner_type_name<C: CommonGeneratorConfig>(config: &C) -> String {
694 let scanner_type_name = NamingHelper::to_upper_camel_case(config.user_type_name());
695 scanner_type_name + "Scanner"
696}
697
698pub fn generate_lalr1_parser_source<C: CommonGeneratorConfig + ParserGeneratorConfig>(
706 grammar_config: &GrammarConfig,
707 lexer_source: &str,
708 config: &C,
709 parse_table: &LRParseTable,
710 ast_type_has_lifetime: bool,
711) -> Result<String> {
712 generate_lalr1_parser_source_internal(
713 grammar_config,
714 lexer_source,
715 config,
716 parse_table,
717 ast_type_has_lifetime,
718 )
719}
720
721pub fn generate_lalr1_parser_export_model(
729 grammar_config: &GrammarConfig,
730 parse_table: &LRParseTable,
731) -> Result<crate::generators::ParserExportModel> {
732 build_export_model_for_lalr(grammar_config, parse_table)
733}
734
735pub fn generate_parser_export_model_from_grammar(
761 grammar_config: &GrammarConfig,
762 max_lookahead: usize,
763) -> Result<crate::generators::ParserExportModel> {
764 match grammar_config.grammar_type {
765 GrammarType::LLK => {
766 let lookahead_dfas = crate::calculate_lookahead_dfas(grammar_config, max_lookahead)?;
767 generate_parser_export_model(grammar_config, &lookahead_dfas)
768 }
769 GrammarType::LALR1 => {
770 let parse_table = crate::calculate_lalr1_parse_table(grammar_config)?.0;
771 generate_lalr1_parser_export_model(grammar_config, &parse_table)
772 }
773 }
774}
775
776fn generate_lalr1_parser_source_internal<C: CommonGeneratorConfig + ParserGeneratorConfig>(
777 grammar_config: &GrammarConfig,
778 lexer_source: &str,
779 config: &C,
780 parse_table: &LRParseTable,
781 ast_type_has_lifetime: bool,
782) -> Result<String> {
783 let terminals = get_terminals(grammar_config);
784 let non_terminal_metadata = build_non_terminal_metadata_ir(grammar_config);
785 let non_terminal_names = non_terminal_metadata.names;
786 let non_terminal_count = non_terminal_names.len();
787 let start_symbol_index: usize =
788 parser_model_find_start_symbol_index(&non_terminal_names, grammar_config)?;
789
790 let non_terminals = non_terminal_names.iter().collect::<Vec<_>>();
791
792 let non_terminals_with_index_comment =
793 non_terminal_metadata
794 .indexed_rows
795 .into_iter()
796 .fold(StrVec::new(4), |mut acc, row| {
797 acc.push(row);
798 acc
799 });
800 let production_ir = build_production_model(grammar_config, &non_terminal_names)?;
801 let productions = generate_lr_productions(&production_ir);
802
803 let user_type_life_time = if ast_type_has_lifetime { "<'t>" } else { "" };
804
805 let parse_table_source = generate_parse_table_source(parse_table, &terminals, &non_terminals);
806
807 let parser_data = LRParserData {
808 start_symbol_index,
809 lexer_source,
810 non_terminals: non_terminals_with_index_comment,
811 non_terminal_count,
812 productions,
813 user_type_name: config.user_type_name(),
814 user_type_life_time,
815 scanner_type_name: get_scanner_type_name(config),
816 scanner_module_name: get_scanner_module_name(config),
817 module_name: config.module_name(),
818 trim_parse_tree: config.trim_parse_tree(),
819 parse_table_source,
820 };
821
822 Ok(format!("{parser_data}"))
823}
824
825fn generate_parse_table_source(
826 parse_table: &LRParseTable,
827 terminals: &[(&str, Option<LookaheadExpression>)],
828 non_terminals: &[&String],
829) -> String {
830 let terminal_labels = build_terminal_label_map(terminals);
831 let non_terminal_names = non_terminals
832 .iter()
833 .map(|n| (*n).clone())
834 .collect::<Vec<_>>();
835 let render_ir =
836 build_rust_lalr_parse_table_render_ir(parse_table, &terminal_labels, &non_terminal_names);
837
838 let actions =
839 render_ir
840 .actions
841 .iter()
842 .enumerate()
843 .fold(String::new(), |mut acc, (i, action_source)| {
844 acc.push_str(format!("/* {} */ {}, ", i, action_source).as_str());
845 acc
846 });
847
848 let states = render_ir
849 .states
850 .iter()
851 .fold(String::new(), |mut acc, state| {
852 let state_actions = format!(
853 "&[{}]",
854 state
855 .actions
856 .iter()
857 .map(|a| {
858 format!(
859 r#"
860 ({}, {}) /* '{}' => {} */"#,
861 a.terminal, a.action_index, a.terminal_label, a.action_comment
862 )
863 })
864 .collect::<Vec<String>>()
865 .join(", ")
866 );
867
868 let state_gotos = if state.gotos.is_empty() {
869 "&[]".to_string()
870 } else {
871 format!(
872 "&[{}]",
873 state
874 .gotos
875 .iter()
876 .map(|g| {
877 format!(
878 r#"
879 ({}, {}) /* {} => {} */"#,
880 g.non_terminal, g.goto_state, g.non_terminal_name, g.goto_state,
881 )
882 })
883 .collect::<Vec<String>>()
884 .join(", ")
885 )
886 };
887
888 acc.push_str(
889 format!(
890 r#"
891 // State {}
892 LR1State {{
893 actions: {},
894 gotos: {} }}"#,
895 state.state_index, state_actions, state_gotos
896 )
897 .as_str(),
898 );
899 acc.push(',');
900 acc
901 });
902
903 format!("LRParseTable {{ actions: &[{actions}], states: &[{states}] }}",)
904}
905
906fn generate_dfa_source(lookahead_automata_ir: &[LookaheadAutomatonIR]) -> String {
907 let lookahead_dfa_s =
908 lookahead_automata_ir
909 .iter()
910 .fold(StrVec::new(0), |mut acc, automaton_ir| {
911 let dfa = Dfa::from_ir(automaton_ir);
912 acc.push(format!("{dfa}"));
913 acc
914 });
915 let dfa_count = lookahead_automata_ir.len();
916
917 let dfas = Dfas {
918 dfa_count,
919 lookahead_dfa_s: format!("{lookahead_dfa_s}"),
920 };
921
922 format!("{dfas}")
923}
924
925fn generate_productions(production_ir: &[ProductionIR]) -> String {
926 let production_count = production_ir.len();
927 let productions = production_ir.iter().fold(String::new(), |mut acc, p| {
928 let production = Production::from_ir(p);
929 acc.push_str(format!("{production}").as_str());
930 acc
931 });
932
933 let productions = Productions {
934 production_count,
935 productions,
936 };
937
938 format!("{productions}")
939}
940
941fn generate_lr_productions(production_ir: &[ProductionIR]) -> String {
942 let production_render_ir = build_lalr_production_render_ir(production_ir);
943 let production_count = production_render_ir.len();
944 let productions = production_render_ir
945 .iter()
946 .fold(String::new(), |mut acc, p| {
947 let production = LRProduction::from_render_ir(p);
948 acc.push_str(format!("{production}").as_str());
949 acc
950 });
951
952 let productions = LRProductions {
953 production_count,
954 productions,
955 };
956
957 format!("{productions}")
958}