use parol_runtime::once_cell::sync::Lazy;
#[allow(unused_imports)]
use parol_runtime::parser::{LLKParser, LookaheadDFA, ParseTreeType, ParseType, Production, Trans};
use parol_runtime::{ParolError, ParseTree, TerminalIndex};
use parol_runtime::{TokenStream, Tokenizer};
use std::path::Path;
use crate::parser::sdc_grammar::SdcGrammar;
use crate::parser::sdc_grammar_trait::SdcGrammarAuto;
use parol_runtime::lexer::tokenizer::{ERROR_TOKEN, UNMATCHABLE_TOKEN, WHITESPACE_TOKEN};
pub const TERMINALS: &[&str; 17] = &[
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
r"\[",
r"\]",
r"\{",
r"\}",
r"\u{0022}(?:\\[\u{0022}\\/bfnrt]|u[0-9a-fA-F]{4}|[^\u{0022}\\]|\\\n)*\u{0022}",
r"#.*(\r\n|\r|\n|$)",
r";",
r"\\(\r\n|\r|\n)",
r"(\r\n|\r|\n|$)",
r"[^\s\[\]\\;]+",
r"[^}]*",
ERROR_TOKEN,
];
pub const TERMINAL_NAMES: &[&str; 17] = &[
"EndOfInput",
"Newline",
"Whitespace",
"LineComment",
"BlockComment",
"TermLBracket",
"TermRBracket",
"TermLBrace",
"TermRBrace",
"TermStringGroup",
"TermComment",
"TermSemiColon",
"TermBackslashLineBreak",
"TermLineBreak",
"TermWord",
"TermBraceGroupContent",
"Error",
];
const SCANNER_0: (&[&str; 5], &[TerminalIndex; 10]) = (
&[
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
WHITESPACE_TOKEN,
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
],
&[
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
],
);
const SCANNER_1: (&[&str; 5], &[TerminalIndex; 3]) = (
&[
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
UNMATCHABLE_TOKEN,
],
&[
7,
8,
15,
],
);
const MAX_K: usize = 1;
pub const NON_TERMINALS: &[&str; 32] = &[
"Argument",
"Command",
"CommandLine",
"CommandList",
"CommandReplacement",
"Source",
"SourceList",
"SourceListGroup",
"TermBackslashLineBreak",
"TermBraceGroup",
"TermBraceGroupContent",
"TermBraceGroupGroup",
"TermComment",
"TermLBrace",
"TermLBracket",
"TermLineBreak",
"TermRBrace",
"TermRBracket",
"TermSemiColon",
"TermStringGroup",
"TermWord",
"TokenBraceGroup",
"TokenBraceGroupOpt",
"TokenEnd",
"TokenLBracket",
"TokenLBracketOpt",
"TokenRBracket",
"TokenRBracketOpt",
"TokenStringGroup",
"TokenStringGroupOpt",
"TokenWord",
"TokenWordOpt",
];
pub const LOOKAHEAD_AUTOMATA: &[LookaheadDFA; 32] = &[
LookaheadDFA {
prod0: -1,
transitions: &[
Trans(0, 5, 4, 34),
Trans(0, 7, 3, 33),
Trans(0, 9, 2, 32),
Trans(0, 14, 1, 31),
],
k: 1,
},
LookaheadDFA {
prod0: 36,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 39,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: -1,
transitions: &[
Trans(0, 5, 1, 37),
Trans(0, 6, 2, 38),
Trans(0, 7, 1, 37),
Trans(0, 9, 1, 37),
Trans(0, 11, 2, 38),
Trans(0, 13, 2, 38),
Trans(0, 14, 1, 37),
],
k: 1,
},
LookaheadDFA {
prod0: 35,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 40,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: -1,
transitions: &[
Trans(0, 0, 2, 45),
Trans(0, 10, 1, 41),
Trans(0, 11, 1, 41),
Trans(0, 13, 1, 41),
Trans(0, 14, 1, 41),
],
k: 1,
},
LookaheadDFA {
prod0: -1,
transitions: &[
Trans(0, 10, 3, 44),
Trans(0, 11, 2, 43),
Trans(0, 13, 2, 43),
Trans(0, 14, 1, 42),
],
k: 1,
},
LookaheadDFA {
prod0: 7,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 11,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 10,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: -1,
transitions: &[Trans(0, 7, 1, 12), Trans(0, 15, 2, 13)],
k: 1,
},
LookaheadDFA {
prod0: 5,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 2,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 0,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 8,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 3,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 1,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 6,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 4,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 9,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: 16,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: -1,
transitions: &[
Trans(0, 5, 2, 18),
Trans(0, 6, 2, 18),
Trans(0, 7, 2, 18),
Trans(0, 9, 2, 18),
Trans(0, 11, 2, 18),
Trans(0, 12, 1, 17),
Trans(0, 13, 2, 18),
Trans(0, 14, 2, 18),
],
k: 1,
},
LookaheadDFA {
prod0: -1,
transitions: &[Trans(0, 11, 2, 15), Trans(0, 13, 1, 14)],
k: 1,
},
LookaheadDFA {
prod0: 22,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: -1,
transitions: &[Trans(0, 12, 1, 23), Trans(0, 14, 2, 24)],
k: 1,
},
LookaheadDFA {
prod0: 25,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: -1,
transitions: &[
Trans(0, 5, 2, 27),
Trans(0, 6, 2, 27),
Trans(0, 7, 2, 27),
Trans(0, 9, 2, 27),
Trans(0, 11, 2, 27),
Trans(0, 12, 1, 26),
Trans(0, 13, 2, 27),
Trans(0, 14, 2, 27),
],
k: 1,
},
LookaheadDFA {
prod0: 19,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: -1,
transitions: &[
Trans(0, 5, 2, 21),
Trans(0, 6, 2, 21),
Trans(0, 7, 2, 21),
Trans(0, 9, 2, 21),
Trans(0, 11, 2, 21),
Trans(0, 12, 1, 20),
Trans(0, 13, 2, 21),
Trans(0, 14, 2, 21),
],
k: 1,
},
LookaheadDFA {
prod0: 28,
transitions: &[],
k: 0,
},
LookaheadDFA {
prod0: -1,
transitions: &[
Trans(0, 5, 2, 30),
Trans(0, 6, 2, 30),
Trans(0, 7, 2, 30),
Trans(0, 9, 2, 30),
Trans(0, 11, 2, 30),
Trans(0, 12, 1, 29),
Trans(0, 13, 2, 30),
Trans(0, 14, 2, 30),
],
k: 1,
},
];
pub const PRODUCTIONS: &[Production; 46] = &[
Production {
lhs: 14,
production: &[ParseType::T(5)],
},
Production {
lhs: 17,
production: &[ParseType::T(6)],
},
Production {
lhs: 13,
production: &[ParseType::T(7)],
},
Production {
lhs: 16,
production: &[ParseType::T(8)],
},
Production {
lhs: 19,
production: &[ParseType::T(9)],
},
Production {
lhs: 12,
production: &[ParseType::T(10)],
},
Production {
lhs: 18,
production: &[ParseType::T(11)],
},
Production {
lhs: 8,
production: &[ParseType::T(12)],
},
Production {
lhs: 15,
production: &[ParseType::T(13)],
},
Production {
lhs: 20,
production: &[ParseType::T(14)],
},
Production {
lhs: 10,
production: &[ParseType::T(15)],
},
Production {
lhs: 9,
production: &[
ParseType::Pop,
ParseType::N(16),
ParseType::N(11),
ParseType::Push(1),
ParseType::N(13),
],
},
Production {
lhs: 11,
production: &[ParseType::N(9)],
},
Production {
lhs: 11,
production: &[ParseType::N(10)],
},
Production {
lhs: 23,
production: &[ParseType::N(15)],
},
Production {
lhs: 23,
production: &[ParseType::N(18)],
},
Production {
lhs: 21,
production: &[ParseType::N(22), ParseType::N(9)],
},
Production {
lhs: 22,
production: &[ParseType::N(8)],
},
Production {
lhs: 22,
production: &[],
},
Production {
lhs: 28,
production: &[ParseType::N(29), ParseType::N(19)],
},
Production {
lhs: 29,
production: &[ParseType::N(8)],
},
Production {
lhs: 29,
production: &[],
},
Production {
lhs: 24,
production: &[ParseType::N(25), ParseType::N(14)],
},
Production {
lhs: 25,
production: &[ParseType::N(8)],
},
Production {
lhs: 25,
production: &[],
},
Production {
lhs: 26,
production: &[ParseType::N(27), ParseType::N(17)],
},
Production {
lhs: 27,
production: &[ParseType::N(8)],
},
Production {
lhs: 27,
production: &[],
},
Production {
lhs: 30,
production: &[ParseType::N(31), ParseType::N(20)],
},
Production {
lhs: 31,
production: &[ParseType::N(8)],
},
Production {
lhs: 31,
production: &[],
},
Production {
lhs: 0,
production: &[ParseType::N(30)],
},
Production {
lhs: 0,
production: &[ParseType::N(28)],
},
Production {
lhs: 0,
production: &[ParseType::N(21)],
},
Production {
lhs: 0,
production: &[ParseType::N(4)],
},
Production {
lhs: 4,
production: &[ParseType::N(26), ParseType::N(1), ParseType::N(24)],
},
Production {
lhs: 1,
production: &[ParseType::N(3), ParseType::N(30)],
},
Production {
lhs: 3,
production: &[ParseType::N(3), ParseType::N(0)],
},
Production {
lhs: 3,
production: &[],
},
Production {
lhs: 2,
production: &[ParseType::N(23), ParseType::N(1)],
},
Production {
lhs: 5,
production: &[ParseType::N(6)],
},
Production {
lhs: 6,
production: &[ParseType::N(6), ParseType::N(7)],
},
Production {
lhs: 7,
production: &[ParseType::N(2)],
},
Production {
lhs: 7,
production: &[ParseType::N(23)],
},
Production {
lhs: 7,
production: &[ParseType::N(12)],
},
Production {
lhs: 6,
production: &[],
},
];
static TOKENIZERS: Lazy<Vec<(&'static str, Tokenizer)>> = Lazy::new(|| {
vec![
(
"INITIAL",
Tokenizer::build(TERMINALS, SCANNER_0.0, SCANNER_0.1).unwrap(),
),
(
"BraceGroup",
Tokenizer::build(TERMINALS, SCANNER_1.0, SCANNER_1.1).unwrap(),
),
]
});
pub fn parse<'t, T>(
input: &'t str,
file_name: T,
user_actions: &mut SdcGrammar<'t>,
) -> Result<ParseTree<'t>, ParolError>
where
T: AsRef<Path>,
{
let mut llk_parser = LLKParser::new(
5,
LOOKAHEAD_AUTOMATA,
PRODUCTIONS,
TERMINAL_NAMES,
NON_TERMINALS,
);
llk_parser.trim_parse_tree();
let mut user_actions = SdcGrammarAuto::new(user_actions);
llk_parser.parse(
TokenStream::new(input, file_name, &TOKENIZERS, MAX_K).unwrap(),
&mut user_actions,
)
}