//Parser generated by rustlr for grammar nuttycalc
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(irrefutable_let_patterns)]
#![allow(unreachable_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};
static SYMBOLS:[&'static str;17] = ["_WILDCARD_TOKEN_","E","E2","E3","num","float","(",")","mark1","mark2","PLUS","MINUS","TIMES","DIV","START","EOF","NEWDELAYNT_E_4"];
static TABLE:[u64;111] = [21475098624,17180065792,25769934848,4295032833,281539401220099,563018673815553,562975723880448,562971428978688,562967134142464,562954249175041,844489354641410,1125964331417602,2814775539466240,2814754064695297,2814766949466112,2814818490974209,2814771242663936,3096241925586944,3096254808653826,3377729786281984,3659191878746112,3659204762009602,3940701220962304,4222189075431426,5066579650805760,5066566765510656,5910991690858498,5911004575760386,7036925957963778,7036930254045184,7318379463442432,8162825864609794,9851654249644034,9851641364742146,10133155000287232,10133150701780994,10414625680719872,10696066297233408,10696079179776002,10977541274075136,10977545567272960,10977592813617153,10977528391794689,10977549866565632,11259016254455808,11259029138440192,12103454070472704,13792325399019522,13792329697525760,14355275354669056,15199700282769410,16044090852638722,16044103737540610,16325600193609728,17732953604161536,18014462934188034,18858874984660992,19984774886522882,20266228388069378,20266215507689472,20547703369367552,20829199820980224,21673641928622081,21673590389080064,21673598982291456,21673577504309249,21673594682277888,21955065368608768,21955078253445120,22518015322357760,22518028201885698,22799524658544640,23080978160615424,23362453137260544,23643915227103232,23925390204928000,24206899537379330,24769815132897280,24769823727026176,24769866672439297,24769819426095104,24769802248126465,26458699356635136,26740152858574848,27021627829125122,27584564901314560,27584577786150912,28147549211197442,28428989834199040,28429002712743938,28710516346388481,28710464806846464,28710473401499648,28710451924566017,28710469100044288,29273427642679298,29273414757777410,30680789644869632,30680802526953474,30962316160073729,30962268914384896,30962273215250432,30962251734319105,30962264620335104,31243743891095552,31243739601567744,31243726716272641,31243748191961088,31243791137701889,31806702440546304,32088211770245122,];
pub fn make_parser() -> ZCParser<(),()>
{
let mut parser1:ZCParser<(),()> = ZCParser::new(13,118);
let mut rule = ZCRProduction::<(),()>::new_skeleton("start");
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("START");
rule.Ruleaction = |parser|{ let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del1_1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del1_1_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del5_2_ = parser.popstack(); let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del5_2_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del5_3_ = parser.popstack(); let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del5_3_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del1_0_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del1_0_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del3_4_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del3_4_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("E");
rule.Ruleaction = |parser|{ let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _delayeditem1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let mut _item1_ = _delayeditem1_.0; let mut _item2_ = _delayeditem1_.1; };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_E_4");
rule.Ruleaction = |parser|{ let mut _item_del5_2_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _delayeditem1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let mut _item1_ = _delayeditem1_.0; let mut _item2_ = _delayeditem1_.1; let _delvar_16_1_ = { }; (_delvar_16_1_,_item_del5_2_,) };
parser1.Rules.push(rule);
parser1.Errsym = "";
for i in 0..111 {
let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
}
for s in SYMBOLS { parser1.Symset.insert(s); }
load_extras(&mut parser1);
return parser1;
} //make_parser
pub fn parse_with<'t>(parser:&mut ZCParser<(),()>, lexer:&mut nuttycalclexer<'t>) -> Result<(),()>
{
let _xres_ = parser.parse(lexer); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_with public function
pub fn parse_train_with<'t>(parser:&mut ZCParser<(),()>, lexer:&mut nuttycalclexer<'t>, parserpath:&str) -> Result<(),()>
{
let _xres_ = parser.parse_train(lexer,parserpath); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_train_with public function
// Lexical Scanner using RawToken and StrTokenizer
pub struct nuttycalclexer<'t> {
stk: StrTokenizer<'t>,
keywords: HashSet<&'static str>,
lexnames: HashMap<&'static str,&'static str>,
shared_state: Rc<RefCell<()>>,
}
impl<'t> nuttycalclexer<'t>
{
pub fn from_str(s:&'t str) -> nuttycalclexer<'t> {
Self::new(StrTokenizer::from_str(s))
}
pub fn from_source(s:&'t LexSource<'t>) -> nuttycalclexer<'t> {
Self::new(StrTokenizer::from_source(s))
}
pub fn new(mut stk:StrTokenizer<'t>) -> nuttycalclexer<'t> {
let mut lexnames = HashMap::with_capacity(64);
let mut keywords = HashSet::with_capacity(64);
let shared_state = Rc::new(RefCell::new(<()>::default()));
for kw in ["_WILDCARD_TOKEN_","mark1","num","mark2","float",] {keywords.insert(kw);}
for c in ['(',')','*','-','+','/',] {stk.add_single(c);}
for d in [] {stk.add_double(d);}
for d in [] {stk.add_triple(d);}
for (k,v) in [(r"*","TIMES"),(r"-","MINUS"),(r"+","PLUS"),(r"/","DIV"),] {lexnames.insert(k,v);}
nuttycalclexer {stk,keywords,lexnames,shared_state}
}
}
impl<'t> Tokenizer<'t,()> for nuttycalclexer<'t>
{
fn nextsym(&mut self) -> Option<TerminalToken<'t,()>> {
let tokopt = self.stk.next_token();
if let None = tokopt {return None;}
let token = tokopt.unwrap();
match token.0 {
RawToken::Alphanum(sym) if self.keywords.contains(sym) => {
let truesym = self.lexnames.get(sym).unwrap_or(&sym);
Some(TerminalToken::from_raw(token,truesym,<()>::default()))
},
RawToken::Symbol(s) if self.lexnames.contains_key(s) => {
let tname = self.lexnames.get(s).unwrap();
Some(TerminalToken::from_raw(token,tname,<()>::default()))
},
RawToken::Symbol(s) => Some(TerminalToken::from_raw(token,s,<()>::default())),
RawToken::Alphanum(s) => Some(TerminalToken::from_raw(token,s,<()>::default())),
_ => Some(TerminalToken::from_raw(token,"<LexicalError>",<()>::default())),
}
}
fn linenum(&self) -> usize {self.stk.line()}
fn column(&self) -> usize {self.stk.column()}
fn position(&self) -> usize {self.stk.current_position()}
fn current_line(&self) -> &str {self.stk.current_line()}
fn get_line(&self,i:usize) -> Option<&str> {self.stk.get_line(i)}
fn get_slice(&self,s:usize,l:usize) -> &str {self.stk.get_slice(s,l)}
}//impl Tokenizer
fn load_extras(parser:&mut ZCParser<(),()>)
{
}//end of load_extras: don't change this line as it affects augmentation