//Parser generated by rustlr for grammar nuttycalc
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(irrefutable_let_patterns)]
#![allow(unreachable_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};
static SYMBOLS:[&'static str;17] = ["_WILDCARD_TOKEN_","E","E2","E3","num","float","(",")","mark1","mark2","PLUS","MINUS","TIMES","DIV","START","EOF","NEWDELAYNT_E_4"];
static TABLE:[u64;41] = [17180065792,25769869312,4295098369,281543700905985,281479276331009,281500749070336,281492161363968,563014377930755,844489354641410,7036891599339520,7036904482734082,7318400934543362,10414629976866816,10414625678229506,10696079181414400,10977528389173249,10977541274206208,10977549864075264,10977592813879297,11259029133197314,11259016250720256,11540525587496960,11821979089436672,12103454063919106,12103441181310976,12384950515531778,16044125212508162,17170003648184320,17451465739665408,17451478620962818,18858887864320002,19984753415618560,20266249867821056,20547690484596736,20547703364648962,20829204116078592,20829199816523778,21110653323051008,21392162654650370,21673603276603392,21955112608530434,];
pub fn make_parser() -> ZCParser<(),()>
{
let mut parser1:ZCParser<(),()> = ZCParser::new(11,79);
let mut rule = ZCRProduction::<(),()>::new_skeleton("start");
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("START");
rule.Ruleaction = |parser|{ let mut _item7_ = parser.popstack(); let mut _item6_ = parser.popstack(); let mut _item5_ = parser.popstack(); let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del1_5_0_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del1_5_0_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del5_6_2_ = parser.popstack(); let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del5_6_2_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del3_7_3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del3_7_3_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del5_8_1_ = parser.popstack(); let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del5_8_1_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _delayitem1_8 = parser.popstack(); let mut _item0_ = parser.popstack(); let mut _item1_ = _delayitem1_8.0; let mut _item2_ = _delayitem1_8.1; };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del5_8_1_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _delayitem1_18 = parser.popstack(); let mut _item0_ = parser.popstack(); let mut _item1_ = _delayitem1_18.0; let mut _item2_ = _delayitem1_18.1; let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del5_8_1_,) };
parser1.Rules.push(rule);
parser1.Errsym = "";
for i in 0..41 {
let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
}
for s in SYMBOLS { parser1.Symset.insert(s); }
load_extras(&mut parser1);
return parser1;
} //make_parser
pub fn parse_with<'t>(parser:&mut ZCParser<(),()>, lexer:&mut nuttycalclexer<'t>) -> Result<(),()>
{
let _xres_ = parser.parse(lexer); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_with public function
pub fn parse_train_with<'t>(parser:&mut ZCParser<(),()>, lexer:&mut nuttycalclexer<'t>, parserpath:&str) -> Result<(),()>
{
let _xres_ = parser.parse_train(lexer,parserpath); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_train_with public function
// Lexical Scanner using RawToken and StrTokenizer
pub struct nuttycalclexer<'t> {
stk: StrTokenizer<'t>,
keywords: HashSet<&'static str>,
lexnames: HashMap<&'static str,&'static str>,
shared_state: Rc<RefCell<()>>,
}
impl<'t> nuttycalclexer<'t>
{
pub fn from_str(s:&'t str) -> nuttycalclexer<'t> {
Self::new(StrTokenizer::from_str(s))
}
pub fn from_source(s:&'t LexSource<'t>) -> nuttycalclexer<'t> {
Self::new(StrTokenizer::from_source(s))
}
pub fn new(mut stk:StrTokenizer<'t>) -> nuttycalclexer<'t> {
let mut lexnames = HashMap::with_capacity(64);
let mut keywords = HashSet::with_capacity(64);
let shared_state = Rc::new(RefCell::new(<()>::default()));
for kw in ["mark1","mark2","float","_WILDCARD_TOKEN_","num",] {keywords.insert(kw);}
for c in ['(',')','-','*','+','/',] {stk.add_single(c);}
for d in [] {stk.add_double(d);}
for d in [] {stk.add_triple(d);}
for (k,v) in [(r"-","MINUS"),(r"*","TIMES"),(r"+","PLUS"),(r"/","DIV"),] {lexnames.insert(k,v);}
nuttycalclexer {stk,keywords,lexnames,shared_state}
}
}
impl<'t> Tokenizer<'t,()> for nuttycalclexer<'t>
{
fn nextsym(&mut self) -> Option<TerminalToken<'t,()>> {
let tokopt = self.stk.next_token();
if let None = tokopt {return None;}
let token = tokopt.unwrap();
match token.0 {
RawToken::Alphanum(sym) if self.keywords.contains(sym) => {
let truesym = self.lexnames.get(sym).unwrap_or(&sym);
Some(TerminalToken::from_raw(token,truesym,<()>::default()))
},
RawToken::Symbol(s) if self.lexnames.contains_key(s) => {
let tname = self.lexnames.get(s).unwrap();
Some(TerminalToken::from_raw(token,tname,<()>::default()))
},
RawToken::Symbol(s) => Some(TerminalToken::from_raw(token,s,<()>::default())),
RawToken::Alphanum(s) => Some(TerminalToken::from_raw(token,s,<()>::default())),
_ => Some(TerminalToken::from_raw(token,"<LexicalError>",<()>::default())),
}
}
fn linenum(&self) -> usize {self.stk.line()}
fn column(&self) -> usize {self.stk.column()}
fn position(&self) -> usize {self.stk.current_position()}
fn current_line(&self) -> &str {self.stk.current_line()}
fn get_line(&self,i:usize) -> Option<&str> {self.stk.get_line(i)}
fn get_slice(&self,s:usize,l:usize) -> &str {self.stk.get_slice(s,l)}
}//impl Tokenizer
fn load_extras(parser:&mut ZCParser<(),()>)
{
}//end of load_extras: don't change this line as it affects augmentation