#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(irrefutable_let_patterns)]
#![allow(unreachable_patterns)]
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};
static SYMBOLS:[&'static str;10] = ["_WILDCARD_TOKEN_","A","B","E","D","S","a","b","START","EOF"];
static TABLE:[u64;22] = [25769869312,8590327809,4295229441,30065033218,21475033089,12885032961,17180196865,281505041874946,281500746973184,562980018716672,844463584837635,1125938561548290,1407404948914176,1688888515035138,1970337722531841,1970350606843904,1970342017564673,1970354902007810,2251838468521986,2533313445298178,2814779832205314,3096254809047042,];
pub fn make_parser() -> ZCParser<u32,()>
{
let mut parser1:ZCParser<u32,()> = ZCParser::new(9,12);
let mut rule = ZCRProduction::<u32,()>::new_skeleton("start");
rule = ZCRProduction::<u32,()>::new_skeleton("S");
rule.Ruleaction = |parser|{ let mut n = parser.popstack(); n.value};
parser1.Rules.push(rule);
rule = ZCRProduction::<u32,()>::new_skeleton("S");
rule.Ruleaction = |parser|{ let mut n = parser.popstack(); n.value};
parser1.Rules.push(rule);
rule = ZCRProduction::<u32,()>::new_skeleton("A");
rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut n = parser.popstack(); n.value};
parser1.Rules.push(rule);
rule = ZCRProduction::<u32,()>::new_skeleton("B");
rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let ((n),)=(_item0_.value,) { n} else {parser.bad_pattern("((n),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<u32,()>::new_skeleton("E");
rule.Ruleaction = |parser|{ 0};
parser1.Rules.push(rule);
rule = ZCRProduction::<u32,()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut n = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); print!("E "); n.value+2};
parser1.Rules.push(rule);
rule = ZCRProduction::<u32,()>::new_skeleton("D");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); 1};
parser1.Rules.push(rule);
rule = ZCRProduction::<u32,()>::new_skeleton("D");
rule.Ruleaction = |parser|{ let mut n = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); print!("D "); n.value+2};
parser1.Rules.push(rule);
rule = ZCRProduction::<u32,()>::new_skeleton("START");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <u32>::default()};
parser1.Rules.push(rule);
parser1.Errsym = "";
for i in 0..22 {
let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
}
for s in SYMBOLS { parser1.Symset.insert(s); }
load_extras(&mut parser1);
return parser1;
}
pub fn parse_with<'t>(parser:&mut ZCParser<u32,()>, lexer:&mut evodlexer<'t>) -> Result<u32,u32>
{
let _xres_ = parser.parse(lexer); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}
pub fn parse_train_with<'t>(parser:&mut ZCParser<u32,()>, lexer:&mut evodlexer<'t>, parserpath:&str) -> Result<u32,u32>
{
let _xres_ = parser.parse_train(lexer,parserpath); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}
pub struct evodlexer<'t> {
stk: StrTokenizer<'t>,
keywords: HashSet<&'static str>,
lexnames: HashMap<&'static str,&'static str>,
}
impl<'t> evodlexer<'t>
{
pub fn from_str(s:&'t str) -> evodlexer<'t> {
Self::new(StrTokenizer::from_str(s))
}
pub fn from_source(s:&'t LexSource<'t>) -> evodlexer<'t> {
Self::new(StrTokenizer::from_source(s))
}
pub fn new(mut stk:StrTokenizer<'t>) -> evodlexer<'t> {
let mut lexnames = HashMap::with_capacity(64);
let mut keywords = HashSet::with_capacity(64);
for kw in ["b","_WILDCARD_TOKEN_","a",] {keywords.insert(kw);}
for c in [] {stk.add_single(c);}
for d in [] {stk.add_double(d);}
for d in [] {stk.add_triple(d);}
for (k,v) in [] {lexnames.insert(k,v);}
evodlexer {stk,keywords,lexnames}
}
}
impl<'t> Tokenizer<'t,u32> for evodlexer<'t>
{
fn nextsym(&mut self) -> Option<TerminalToken<'t,u32>> {
let tokopt = self.stk.next_token();
if let None = tokopt {return None;}
let token = tokopt.unwrap();
match token.0 {
RawToken::Alphanum(sym) if self.keywords.contains(sym) => {
let truesym = self.lexnames.get(sym).unwrap_or(&sym);
Some(TerminalToken::from_raw(token,truesym,<u32>::default()))
},
RawToken::Symbol(s) if self.lexnames.contains_key(s) => {
let tname = self.lexnames.get(s).unwrap();
Some(TerminalToken::from_raw(token,tname,<u32>::default()))
},
RawToken::Symbol(s) => Some(TerminalToken::from_raw(token,s,<u32>::default())),
RawToken::Alphanum(s) => Some(TerminalToken::from_raw(token,s,<u32>::default())),
_ => Some(TerminalToken::from_raw(token,"<LexicalError>",<u32>::default())),
}
}
fn linenum(&self) -> usize {self.stk.line()}
fn column(&self) -> usize {self.stk.column()}
fn position(&self) -> usize {self.stk.current_position()}
fn current_line(&self) -> &str {self.stk.current_line()}
fn get_line(&self,i:usize) -> Option<&str> {self.stk.get_line(i)}
}
fn load_extras(parser:&mut ZCParser<u32,()>)
{
}