//Parser generated by rustlr for grammar test
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(irrefutable_let_patterns)]
#![allow(unreachable_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};
static SYMBOLS:[&'static str;16] = ["_WILDCARD_TOKEN_","TERMINAL0","TERMINAL1","TERMINAL2","identifier","COLONCOLON","class_name","id_expression","nested_name_specifier","qualified_id","template_argument","template_id","unqualified_id","START","EOF","NEWDELAYNT_nested_name_specifier_12"];
static TABLE:[u64;69] = [38654836737,51539673089,25772556289,64427327489,30065229825,47244902401,17180065792,281535106252802,563010083028994,844437815492610,844485059805186,844433523998720,1125960036581378,1125912792268802,1970384966516739,3659178992205826,3940653968982018,4222133243150336,4222137536020482,4222128945758210,4503603922534402,4503612512796674,5629503829835778,5910978806808578,7036878713520130,9570153504112642,9851637072134144,10133146407272449,10133116342435840,10133150702829569,10133124933681153,10133163588321281,10977554157797377,10977571337404417,10977575632175105,10977567044009985,10977588517601281,10977562747338753,10977549863813121,10977541272567808,11259003366080512,11540486930628610,11540478340694018,11821961909633024,12103484129017858,12384946220171265,12384963403317249,12384916155334656,12384924748546049,12384950517825537,12666434082308098,14918186654171136,15199708872900610,15481170963988481,15481175261642753,15481149492363265,15481188147134465,15481140899151872,16888563028525057,16888541556310017,16888550143098881,16888537258262529,16888528668721153,16888515783491584,16888545848328193,16888524374736897,17169977878380544,17451461441552386,17451508686192642,];
pub fn make_parser() -> ZCParser<(),()>
{
let mut parser1:ZCParser<(),()> = ZCParser::new(16,63);
let mut rule = ZCRProduction::<(),()>::new_skeleton("start");
rule = ZCRProduction::<(),()>::new_skeleton("id_expression");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("id_expression");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("unqualified_id");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("unqualified_id");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("qualified_id");
rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("nested_name_specifier");
rule.Ruleaction = |parser|{ let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("nested_name_specifier");
rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("class_name");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("class_name");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("template_id");
rule.Ruleaction = |parser|{ let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("template_argument");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("START");
rule.Ruleaction = |parser|{ let mut _item6_ = parser.popstack(); let mut _item5_ = parser.popstack(); let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_nested_name_specifier_12");
rule.Ruleaction = |parser|{ let mut _item_del2_12_6_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_15_0_ = _rrsemaction_6_(parser,_item0_,_item1_); (_delvar_15_0_,_item_del2_12_6_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_nested_name_specifier_12");
rule.Ruleaction = |parser|{ let mut _item_del3_13_5_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); let _delvar_15_0_ = _rrsemaction_5_(parser,_item0_,_item1_,_item2_); (_delvar_15_0_,_item_del3_13_5_,) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("qualified_id");
rule.Ruleaction = |parser|{ let mut _delayitem0_4_13 = parser.popstack(); _rrsemaction_4_(parser,_delayitem0_4_13.0,_delayitem0_4_13.1) };
parser1.Rules.push(rule);
rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_nested_name_specifier_12");
rule.Ruleaction = |parser|{ let mut _delayitem2_13_19 = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); _rrsemaction_13_(parser,_item0_,_item1_,_delayitem2_13_19.0,_delayitem2_13_19.1) };
parser1.Rules.push(rule);
parser1.Errsym = "";
for i in 0..69 {
let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
}
for s in SYMBOLS { parser1.Symset.insert(s); }
load_extras(&mut parser1);
return parser1;
} //make_parser
pub fn parse_with<'t>(parser:&mut ZCParser<(),()>, lexer:&mut dyn Tokenizer<'t,()>) -> Result<(),()>
{
let _xres_ = parser.parse(lexer); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_with public function
pub fn parse_train_with<'t>(parser:&mut ZCParser<(),()>, lexer:&mut dyn Tokenizer<'t,()>, parserpath:&str) -> Result<(),()>
{
let _xres_ = parser.parse_train(lexer,parserpath); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_train_with public function
// Lexical Scanner using RawToken and StrTokenizer
pub struct testlexer<'lt> {
stk: StrTokenizer<'lt>,
keywords: HashSet<&'static str>,
lexnames: HashMap<&'static str,&'static str>,
shared_state: Rc<RefCell<()>>,
}
impl<'lt> testlexer<'lt>
{
pub fn from_str(s:&'lt str) -> testlexer<'lt> {
Self::new(StrTokenizer::from_str(s))
}
pub fn from_source(s:&'lt LexSource<'lt>) -> testlexer<'lt> {
Self::new(StrTokenizer::from_source(s))
}
pub fn new(mut stk:StrTokenizer<'lt>) -> testlexer<'lt> {
let mut lexnames = HashMap::with_capacity(64);
let mut keywords = HashSet::with_capacity(64);
let shared_state = Rc::new(RefCell::new(<()>::default()));
for kw in ["_WILDCARD_TOKEN_","identifier",] {keywords.insert(kw);}
for c in ['<','>',] {stk.add_single(c);}
for d in ["::",] {stk.add_double(d);}
for d in [] {stk.add_triple(d);}
for (k,v) in [(r"<","TERMINAL1"),(r"::","COLONCOLON"),(r">","TERMINAL0"),] {lexnames.insert(k,v);}
testlexer {stk,keywords,lexnames,shared_state}
}
}
impl<'lt> Tokenizer<'lt,()> for testlexer<'lt>
{
fn nextsym(&mut self) -> Option<TerminalToken<'lt,()>> {
let tokopt = self.stk.next_token();
if let None = tokopt {return None;}
let token = tokopt.unwrap();
match token.0 {
RawToken::Alphanum(sym) if self.keywords.contains(sym) => {
let truesym = self.lexnames.get(sym).unwrap_or(&sym);
Some(TerminalToken::from_raw(token,truesym,<()>::default()))
},
RawToken::Symbol(s) if self.lexnames.contains_key(s) => {
let tname = self.lexnames.get(s).unwrap();
Some(TerminalToken::from_raw(token,tname,<()>::default()))
},
RawToken::Symbol(s) => Some(TerminalToken::from_raw(token,s,<()>::default())),
RawToken::Alphanum(s) => Some(TerminalToken::from_raw(token,s,<()>::default())),
_ => Some(TerminalToken::from_raw(token,"<LexicalError>",<()>::default())),
}
}
fn linenum(&self) -> usize {self.stk.line()}
fn column(&self) -> usize {self.stk.column()}
fn position(&self) -> usize {self.stk.current_position()}
fn current_line(&self) -> &str {self.stk.current_line()}
fn get_line(&self,i:usize) -> Option<&str> {self.stk.get_line(i)}
fn get_slice(&self,s:usize,l:usize) -> &str {self.stk.get_slice(s,l)}
}//impl Tokenizer
fn load_extras(parser:&mut ZCParser<(),()>)
{
}//end of load_extras: don't change this line as it affects augmentation