rustlr 0.3.4

LR parser generator that can automatically create ASTs
Documentation
//Parser generated by rustlr for grammar isocpp

#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(irrefutable_let_patterns)]
#![allow(unreachable_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};

static SYMBOLS:[&'static str;16] = ["_WILDCARD_TOKEN_","TERMINAL0","TERMINAL1","TERMINAL2","identifier","COLONCOLON","class_name","id_expression","nested_name_specifier","qualified_id","template_argument","template_id","unqualified_id","START","EOF","NEWDELAYNT_nested_name_specifier_12"];

static TABLE:[u64;69] = [38654836737,51539673089,17180065792,47244902401,30065229825,25772556289,64427327489,281535106252802,563010083028994,844429229031424,844485059805186,844433520525314,1125908497301506,1125960036581378,1970384966516739,3659187582140418,3940662558916610,4222133241053186,4222128948183040,4222137535692802,4503608217829378,4503612512468994,5629512419770370,5910987396743170,7036887303520258,9570162094047234,9851632777166848,10133146407272449,10133150702829569,10133116342435840,10133163588321281,10133124933681153,10977549863813121,10977575632175105,10977541272567808,10977554157797377,10977588517601281,10977562747338753,10977567044009985,10977571337404417,11259011956015104,11540482635661314,11540486930628610,11821957614665728,12103484129017858,12384946220171265,12384963403317249,12384916155334656,12384924748546049,12384950517891073,12947909059084290,14918182359203840,15199708872900610,15481175261708289,15481140899151872,15481170963988481,15481149492363265,15481188147134465,16888545848328193,16888563028525057,16888541556310017,16888528668721153,16888515783491584,16888550143098881,16888537258262529,16888524374736897,17169986468315136,17451508686192642,17451457146585090,];

pub fn make_parser() -> ZCParser<(),()>
{
 let mut parser1:ZCParser<(),()> = ZCParser::new(16,63);
 let mut rule = ZCRProduction::<(),()>::new_skeleton("start");
 rule = ZCRProduction::<(),()>::new_skeleton("id_expression");
 rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("id_expression");
 rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("unqualified_id");
 rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("unqualified_id");
 rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("qualified_id");
 rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("nested_name_specifier");
 rule.Ruleaction = |parser|{ let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("nested_name_specifier");
 rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("class_name");
 rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("class_name");
 rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("template_id");
 rule.Ruleaction = |parser|{ let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("template_argument");
 rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("START");
 rule.Ruleaction = |parser|{ let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); <()>::default()};
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_nested_name_specifier_12");
 rule.Ruleaction = |parser|{ let mut _item_del3_12_5_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();  let _delvar_15_0_ = _rrsemaction_5_(parser,_item0_,_item1_,_item2_); (_delvar_15_0_,_item_del3_12_5_,) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_nested_name_specifier_12");
 rule.Ruleaction = |parser|{ let mut _item_del2_13_6_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();  let _delvar_15_0_ = _rrsemaction_6_(parser,_item0_,_item1_); (_delvar_15_0_,_item_del2_13_6_,) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("qualified_id");
 rule.Ruleaction = |parser|{ let mut _delayitem0_4_13 = parser.popstack();  _rrsemaction_4_(parser,_delayitem0_4_13.0,_delayitem0_4_13.1) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<(),()>::new_skeleton("NEWDELAYNT_nested_name_specifier_12");
 rule.Ruleaction = |parser|{ let mut _delayitem2_12_19 = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();  _rrsemaction_12_(parser,_item0_,_item1_,_delayitem2_12_19.0,_delayitem2_12_19.1) };
 parser1.Rules.push(rule);
 parser1.Errsym = "";

 for i in 0..69 {
   let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
   let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
   parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
 }

 for s in SYMBOLS { parser1.Symset.insert(s); }

 load_extras(&mut parser1);
 return parser1;
} //make_parser

pub fn parse_with<'t>(parser:&mut ZCParser<(),()>, lexer:&mut dyn Tokenizer<'t,()>) -> Result<(),()>
{
  let _xres_ = parser.parse(lexer);  if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_with public function

pub fn parse_train_with<'t>(parser:&mut ZCParser<(),()>, lexer:&mut dyn Tokenizer<'t,()>, parserpath:&str) -> Result<(),()>
{
  let _xres_ = parser.parse_train(lexer,parserpath);  if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_train_with public function

// Lexical Scanner using RawToken and StrTokenizer
pub struct isocpplexer<'t> {
   stk: StrTokenizer<'t>,
   keywords: HashSet<&'static str>,
   lexnames: HashMap<&'static str,&'static str>,
   shared_state: Rc<RefCell<()>>,
}
impl<'t> isocpplexer<'t> 
{
  pub fn from_str(s:&'t str) -> isocpplexer<'t>  {
    Self::new(StrTokenizer::from_str(s))
  }
  pub fn from_source(s:&'t LexSource<'t>) -> isocpplexer<'t>  {
    Self::new(StrTokenizer::from_source(s))
  }
  pub fn new(mut stk:StrTokenizer<'t>) -> isocpplexer<'t> {
    let mut lexnames = HashMap::with_capacity(64);
    let mut keywords = HashSet::with_capacity(64);
    let shared_state = Rc::new(RefCell::new(<()>::default()));
    for kw in ["identifier","_WILDCARD_TOKEN_",] {keywords.insert(kw);}
    for c in ['>','<',] {stk.add_single(c);}
    for d in ["::",] {stk.add_double(d);}
    for d in [] {stk.add_triple(d);}
    for (k,v) in [(r">","TERMINAL2"),(r"<","TERMINAL0"),(r"::","COLONCOLON"),] {lexnames.insert(k,v);}
    isocpplexer {stk,keywords,lexnames,shared_state}
  }
}
impl<'t> Tokenizer<'t,()> for isocpplexer<'t>
{
   fn nextsym(&mut self) -> Option<TerminalToken<'t,()>> {
    let tokopt = self.stk.next_token();
    if let None = tokopt {return None;}
    let token = tokopt.unwrap();
    match token.0 {
      RawToken::Alphanum(sym) if self.keywords.contains(sym) => {
        let truesym = self.lexnames.get(sym).unwrap_or(&sym);
        Some(TerminalToken::from_raw(token,truesym,<()>::default()))
      },
      RawToken::Symbol(s) if self.lexnames.contains_key(s) => {
        let tname = self.lexnames.get(s).unwrap();
        Some(TerminalToken::from_raw(token,tname,<()>::default()))
      },
      RawToken::Symbol(s) => Some(TerminalToken::from_raw(token,s,<()>::default())),
      RawToken::Alphanum(s) => Some(TerminalToken::from_raw(token,s,<()>::default())),
      _ => Some(TerminalToken::from_raw(token,"<LexicalError>",<()>::default())),
    }
  }
   fn linenum(&self) -> usize {self.stk.line()}
   fn column(&self) -> usize {self.stk.column()}
   fn position(&self) -> usize {self.stk.current_position()}
   fn current_line(&self) -> &str {self.stk.current_line()}
   fn get_line(&self,i:usize) -> Option<&str> {self.stk.get_line(i)}
   fn get_slice(&self,s:usize,l:usize) -> &str {self.stk.get_slice(s,l)}
}//impl Tokenizer

fn load_extras(parser:&mut ZCParser<(),()>)
{
}//end of load_extras: don't change this line as it affects augmentation