rustlr 0.3.7

LR parser generator that can automatically create ASTs
Documentation
//Parser generated by rustlr for grammar lbacalc
    
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(irrefutable_let_patterns)]
use std::any::Any;
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action,LBox,lbdown,lbup,lbget,unbox};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};
use crate::exprtrees::*;
use crate::exprtrees::Expr::*;

static SYMBOLS:[&'static str;12] = ["E","ES","+","-","*","/","(",")",";","int","START","EOF"];

static TABLE:[u64;99] = [65537,4295098369,25770131456,12885098496,38654967808,281483567235072,281496452005888,281509337104384,281492157104128,281487862005760,562997198061571,562975723552768,562962838519808,562949954142209,562988608389120,844437815230464,844463585099776,844424930918401,844450700263424,1125908496777218,1125912791744514,1125917086711810,1125934266580994,1125929971613698,1125921381679106,1407413538521088,1407374884405249,1407387768651776,1407400653684736,1688849861181441,1688862745362432,1688888515231744,1688875630395392,1970350607106048,1970324837957633,1970337722073088,1970363491942400,2251825583816704,2251812698783744,2251838468653056,2251799814733825,2533313445363712,2533300560527360,2533274791510017,2533287675494400,2814775537369090,2814762652467202,2814788422270978,2814797012205570,3096259104735232,3096233334341632,3096237629112320,3096246219112448,3096241924210688,3377712605757442,3377729785626626,3377734080593922,3377716900921344,3377708310790146,3377721195823104,3659196172533760,3659204763254784,3659183287762944,3659187582533632,3659191877632000,3940671149244416,3940679738851330,3940666854342656,3940662558982146,3940684033818626,3940658264014850,4222159010594818,4222154715627522,4222137535758338,4222141830725634,4222146125692930,4222133240791042,4503621102469122,4503629692403714,4503612512534530,4503616807501826,4503633987371010,4503608217567234,4785108963885058,4785083194081282,4785091784474624,4785104668917762,4785096079376384,4785087489048578,5066596825956354,5066575351119874,5066562466217986,5066588236021762,5348041737764866,5348054622666754,5348046032732162,5348037442797570,5348058917634050,5348033147830274,];

fn _semaction_for_0_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Expr {
let mut m = lbdown!(parser.popstack().value,Expr);  unbox!(m) }
fn _semaction_for_1_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Expr {
let mut e2 = lbdown!(parser.popstack().value,Expr); let mut _item1_ = parser.popstack().value; let mut e1 = lbdown!(parser.popstack().value,Expr);  Plus(e1,e2) }
fn _semaction_for_2_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Expr {
let mut e2 = lbdown!(parser.popstack().value,Expr); let mut _item1_ = parser.popstack().value; let mut e1 = lbdown!(parser.popstack().value,Expr);  Minus(e1,e2) }
fn _semaction_for_3_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Expr {
let mut e2 = lbdown!(parser.popstack().value,Expr); let mut _item1_ = parser.popstack().value; let mut e1 = lbdown!(parser.popstack().value,Expr);  Divide(e1,e2) }
fn _semaction_for_4_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Expr {
let mut e2 = lbdown!(parser.popstack().value,Expr); let mut _item1_ = parser.popstack().value; let mut e1 = lbdown!(parser.popstack().value,Expr);  Times(e1,e2) }
fn _semaction_for_5_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Expr {
let mut e = lbdown!(parser.popstack().value,Expr); let mut _item0_ = parser.popstack().value;  Negative(e) }
fn _semaction_for_6_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Expr {
let mut _item2_ = parser.popstack().value; let mut e = lbdown!(parser.popstack().value,Expr); let mut _item0_ = parser.popstack().value;  *e.exp }
fn _semaction_for_7_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Vec<LBox<Expr>> {
let mut _item1_ = parser.popstack().value; let mut n = lbdown!(parser.popstack().value,Expr);  vec![n] }
fn _semaction_for_8_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> Vec<LBox<Expr>> {
let mut _item2_ = parser.popstack().value; let mut e = lbdown!(parser.popstack().value,Expr); let mut v = lbdown!(parser.popstack().value,Vec<LBox<Expr>>); 
   v.push(e);
   unbox!(v)
   }
fn _semaction_for_9_(parser:&mut ZCParser<LBox<dyn Any>,i64>) -> LBox<dyn Any> {
let mut _item0_ = lbdown!(parser.popstack().value,Vec<LBox<Expr>>); <LBox<dyn Any>>::default()}

pub fn make_parser() -> ZCParser<LBox<dyn Any>,i64>
{
 let mut parser1:ZCParser<LBox<dyn Any>,i64> = ZCParser::new(10,20);
 let mut rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("start");
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("E");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_0_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("E");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_1_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("E");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_2_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("E");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_3_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("E");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_4_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("E");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_5_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("E");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_6_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("ES");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_7_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("ES");
 rule.Ruleaction = |parser|{  lbup!( LBox::new(_semaction_for_8_(parser),parser.linenum,parser.column)) };
 parser1.Rules.push(rule);
 rule = ZCRProduction::<LBox<dyn Any>,i64>::new_skeleton("START");
 rule.Ruleaction = |parser|{  _semaction_for_9_(parser) };
 parser1.Rules.push(rule);
 parser1.Errsym = "";
 parser1.resynch.insert(";");

 for i in 0..99 {
   let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
   let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
   parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
 }

 for s in SYMBOLS { parser1.Symset.insert(s); }

 load_extras(&mut parser1);
 return parser1;
} //make_parser


//Enum for return values 
enum RetTypeEnum {
  Enumvariant_1(Expr),
  Enumvariant_2(Vec<LBox<Expr>>),
  Enumvariant_0(LBox<dyn Any>),
}
impl Default for RetTypeEnum { fn default()->Self {RetTypeEnum::Enumvariant_0(<LBox<dyn Any>>::default())} }


// Lexical Scanner using RawToken and StrTokenizer
pub struct lbacalclexer<'t> {
   stk: StrTokenizer<'t>,
   keywords: HashSet<&'static str>,
}
impl<'t> lbacalclexer<'t> 
{
  pub fn from_str(s:&'t str) -> lbacalclexer<'t>  {
    Self::new(StrTokenizer::from_str(s))
  }
  pub fn from_source(s:&'t LexSource<'t>) -> lbacalclexer<'t>  {
    Self::new(StrTokenizer::from_source(s))
  }
  pub fn new(mut stk:StrTokenizer<'t>) -> lbacalclexer<'t> {
    let mut keywords = HashSet::with_capacity(16);
    for kw in [] {keywords.insert(kw);}
    for c in ['+','-','*','/','(',')',';',] {stk.add_single(c);}
    for d in [] {stk.add_double(d);}
    lbacalclexer {stk,keywords}
  }
}
impl<'t> Tokenizer<'t,LBox<dyn Any>> for lbacalclexer<'t>
{
   fn nextsym(&mut self) -> Option<TerminalToken<'t,LBox<dyn Any>>> {
    let tokopt = self.stk.next_token();
    if let None = tokopt {return None;}
    let token = tokopt.unwrap();
    match token.0 {
      RawToken::Num(n) => Some(TerminalToken::raw_to_lba(token,"int",RetTypeEnum::Enumvariant_1(Val(n)))),
      RawToken::Symbol(s) => Some(TerminalToken::raw_to_lba(token,s,<LBox<dyn Any>>::default())),
      RawToken::Alphanum(s) => Some(TerminalToken::raw_to_lba(token,s,<LBox<dyn Any>>::default())),
      _ => Some(TerminalToken::raw_to_lba(token,"<LexicalError>",<LBox<dyn Any>>::default())),
    }
  }
   fn linenum(&self) -> usize {self.stk.line()}
   fn column(&self) -> usize {self.stk.column()}
   fn position(&self) -> usize {self.stk.current_position()}
}//impl Tokenizer

fn load_extras(parser:&mut ZCParser<LBox<dyn Any>,i64>)
{
}//end of load_extras: don't change this line as it affects augmentation