rustlr 0.3.0

LR/LALR parser generator that can automatically create abstract syntax trees
Documentation
//Parser generated by rustlr for grammar cppid
    
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(unreachable_patterns)]
#![allow(irrefutable_let_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};
use rustlr::LBox;
use crate::cppid_ast;
use crate::cppid_ast::*;

static SYMBOLS:[&'static str;13] = ["_WILDCARD_TOKEN_","i","DC","Langle","Rangle","I","U","Q","T","N","START","EOF","NEWDELAYNT_U_2"];

static TABLE:[u64;50] = [4295032832,38655164417,51539738625,34360066049,25770065921,21475229697,30064967681,281492156710914,281483567169536,281522221481986,281487862202368,562988608847873,562984313946113,563001493159937,562954249043968,844442110066690,844472174837762,1125947151482882,1125917086711810,1407422128390146,1407392063619074,1407383474339840,1688897104904195,1970350607761409,1970359197761537,1970329132859392,2251804109242370,2533296266346497,2533300560461825,2533309150461953,2533279085428736,2533304855363585,2533326330134529,2533313445560321,2814758357565440,2814762652598272,3096229039112194,3377708311314432,3659178992861186,3940666853949442,3940662559440896,3940696918720514,4222141830791170,4222171895562242,4503616807436290,4503646872207362,4785091785129984,5066558171185154,5066596825890818,5066566761119746,];


fn _semaction_rule_0_(parser:&mut ZCParser<RetTypeEnum,()>) -> I {
let mut _item0_ = if let RetTypeEnum::Enumvariant_9(_x_9)=parser.popstack().value { _x_9 } else {<U>::default()};  I::I_0(parser.lbx(0,_item0_)) }

fn _semaction_rule_1_(parser:&mut ZCParser<RetTypeEnum,()>) -> I {
let mut _item0_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Q>::default()};  I::I_1(parser.lbx(0,_item0_)) }

fn _semaction_rule_2_(parser:&mut ZCParser<RetTypeEnum,()>) -> U {
let mut _item0_ = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<()>::default()};  U::i }

fn _semaction_rule_3_(parser:&mut ZCParser<RetTypeEnum,()>) -> U {
let mut _item0_ = if let RetTypeEnum::Enumvariant_12(_x_12)=parser.popstack().value { _x_12 } else {<T>::default()};  U::U_3(parser.lbx(0,_item0_)) }

fn _semaction_rule_4_(parser:&mut ZCParser<RetTypeEnum,()>) -> Q {
let mut _item1_ = if let RetTypeEnum::Enumvariant_9(_x_9)=parser.popstack().value { _x_9 } else {<U>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_11(_x_11)=parser.popstack().value { _x_11 } else {<N>::default()};  Q(parser.lbx(0,_item0_), parser.lbx(1,_item1_), ) }

fn _semaction_rule_5_(parser:&mut ZCParser<RetTypeEnum,()>) -> N {
let mut _item1_ = if let RetTypeEnum::Enumvariant_11(_x_11)=parser.popstack().value { _x_11 } else {<N>::default()}; let mut _delayeditem0_ = if let RetTypeEnum::Enumvariant_15(_x_15)=parser.popstack().value { _x_15 } else {<(U,(),)>::default()};  let mut _item0_ = _delayeditem0_.0; let mut _item1_ = _delayeditem0_.1;  N::N_5(parser.lbx(0,_item0_),parser.lbx(2,_item2_)) }

fn _semaction_rule_6_(parser:&mut ZCParser<RetTypeEnum,()>) -> N {
let mut _delayeditem0_ = if let RetTypeEnum::Enumvariant_15(_x_15)=parser.popstack().value { _x_15 } else {<(U,(),)>::default()};  let mut _item0_ = _delayeditem0_.0; let mut _item1_ = _delayeditem0_.1;  N::N_6(parser.lbx(0,_item0_)) }

fn _semaction_rule_7_(parser:&mut ZCParser<RetTypeEnum,()>) -> T {
let mut _item3_ = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<()>::default()}; let mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<I>::default()}; let mut _item1_ = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<()>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<()>::default()};  T(parser.lbx(2,_item2_), ) }

fn _semaction_rule_8_(parser:&mut ZCParser<RetTypeEnum,()>) -> () {
let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<I>::default()}; <()>::default()}

fn _semaction_rule_9_(parser:&mut ZCParser<RetTypeEnum,()>) -> (U,(),) {
let mut _item1_ = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<()>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<()>::default()};  let _del_12_0_ = {  U::i }; (_del_12_0_,_item1_,) }

fn _semaction_rule_10_(parser:&mut ZCParser<RetTypeEnum,()>) -> (U,(),) {
let mut _item1_ = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<()>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_12(_x_12)=parser.popstack().value { _x_12 } else {<T>::default()};  let _del_12_0_ = {  U::U_3(parser.lbx(0,_item0_)) }; (_del_12_0_,_item1_,) }

pub fn make_parser() -> ZCParser<RetTypeEnum,()>
{
 let mut parser1:ZCParser<RetTypeEnum,()> = ZCParser::new(11,19);
 let mut rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("start");
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("I");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_0(_semaction_rule_0_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("I");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_0(_semaction_rule_1_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("U");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_9(_semaction_rule_2_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("U");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_9(_semaction_rule_3_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("Q");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_13(_semaction_rule_4_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("N");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_11(_semaction_rule_5_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("N");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_11(_semaction_rule_6_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("T");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_12(_semaction_rule_7_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("START");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_1(_semaction_rule_8_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("NEWDELAYNT_U_2");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_15(_semaction_rule_9_(parser)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<RetTypeEnum,()>::new_skeleton("NEWDELAYNT_U_2");
 rule.Ruleaction = |parser|{  RetTypeEnum::Enumvariant_15(_semaction_rule_10_(parser)) };
 parser1.Rules.push(rule);
 parser1.Errsym = "";

 for i in 0..50 {
   let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
   let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
   parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
 }

 for s in SYMBOLS { parser1.Symset.insert(s); }

 load_extras(&mut parser1);
 return parser1;
} //make_parser

pub fn parse_with<'t>(parser:&mut ZCParser<RetTypeEnum,()>, lexer:&mut cppidlexer<'t>) -> Result<I,I>
{
  lexer.shared_state = Rc::clone(&parser.shared_state);
  if let RetTypeEnum::Enumvariant_0(_xres_) = parser.parse(lexer) {
     if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
  } else { Err(<I>::default())}
}//parse_with public function

pub fn parse_train_with<'t>(parser:&mut ZCParser<RetTypeEnum,()>, lexer:&mut cppidlexer<'t>, parserpath:&str) -> Result<I,I>
{
  lexer.shared_state = Rc::clone(&parser.shared_state);
  if let RetTypeEnum::Enumvariant_0(_xres_) = parser.parse_train(lexer,parserpath) {
     if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
  } else { Err(<I>::default())}
}//parse_train_with public function

//Enum for return values 
pub enum RetTypeEnum {
  Enumvariant_15((U,(),)),
  Enumvariant_0(I),
  Enumvariant_2((usize,usize)),
  Enumvariant_9(U),
  Enumvariant_1(()),
  Enumvariant_13(Q),
  Enumvariant_11(N),
  Enumvariant_12(T),
}
impl Default for RetTypeEnum { fn default()->Self {RetTypeEnum::Enumvariant_0(<I>::default())} }


// Lexical Scanner using RawToken and StrTokenizer
pub struct cppidlexer<'t> {
   stk: StrTokenizer<'t>,
   keywords: HashSet<&'static str>,
   lexnames: HashMap<&'static str,&'static str>,
   shared_state: Rc<RefCell<()>>,
}
impl<'t> cppidlexer<'t> 
{
  pub fn from_str(s:&'t str) -> cppidlexer<'t>  {
    Self::new(StrTokenizer::from_str(s))
  }
  pub fn from_source(s:&'t LexSource<'t>) -> cppidlexer<'t>  {
    Self::new(StrTokenizer::from_source(s))
  }
  pub fn new(mut stk:StrTokenizer<'t>) -> cppidlexer<'t> {
    let mut lexnames = HashMap::with_capacity(64);
    let mut keywords = HashSet::with_capacity(64);
    let shared_state = Rc::new(RefCell::new(<()>::default()));
    for kw in ["i","_WILDCARD_TOKEN_",] {keywords.insert(kw);}
    for c in ['<','>',] {stk.add_single(c);}
    for d in ["::",] {stk.add_double(d);}
    for d in [] {stk.add_triple(d);}
    for (k,v) in [(r"<","Langle"),(r"::","DC"),(r">","Rangle"),] {lexnames.insert(k,v);}
    cppidlexer {stk,keywords,lexnames,shared_state}
  }
}
impl<'t> Tokenizer<'t,RetTypeEnum> for cppidlexer<'t>
{
   fn nextsym(&mut self) -> Option<TerminalToken<'t,RetTypeEnum>> {
    let tokopt = self.stk.next_token();
    if let None = tokopt {return None;}
    let token = tokopt.unwrap();
    match token.0 {
      RawToken::Alphanum(sym) if self.keywords.contains(sym) => {
        let truesym = self.lexnames.get(sym).unwrap_or(&sym);
        Some(TerminalToken::from_raw(token,truesym,<RetTypeEnum>::default()))
      },
      RawToken::Symbol(s) if self.lexnames.contains_key(s) => {
        let tname = self.lexnames.get(s).unwrap();
        Some(TerminalToken::from_raw(token,tname,<RetTypeEnum>::default()))
      },
      RawToken::Symbol(s) => Some(TerminalToken::from_raw(token,s,<RetTypeEnum>::default())),
      RawToken::Alphanum(s) => Some(TerminalToken::from_raw(token,s,<RetTypeEnum>::default())),
      _ => Some(TerminalToken::from_raw(token,"<LexicalError>",<RetTypeEnum>::default())),
    }
  }
   fn linenum(&self) -> usize {self.stk.line()}
   fn column(&self) -> usize {self.stk.column()}
   fn position(&self) -> usize {self.stk.current_position()}
   fn current_line(&self) -> &str {self.stk.current_line()}
   fn get_line(&self,i:usize) -> Option<&str> {self.stk.get_line(i)}
   fn get_slice(&self,s:usize,l:usize) -> &str {self.stk.get_slice(s,l)}
   fn transform_wildcard(&self,t:TerminalToken<'t,RetTypeEnum>) -> TerminalToken<'t,RetTypeEnum> { TerminalToken::new(t.sym,RetTypeEnum::Enumvariant_2((self.stk.previous_position(),self.stk.current_position())),t.line,t.column) }
}//impl Tokenizer

fn load_extras(parser:&mut ZCParser<RetTypeEnum,()>)
{
}//end of load_extras: don't change this line as it affects augmentation