//Parser generated by rustlr for grammar untyped
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(irrefutable_let_patterns)]
#![allow(unreachable_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};
use rustlr::{LBox,unbox};
use crate::untyped::*;
use crate::untyped::Term::*;
use fixedstr::str16;
static SYMBOLS:[&'static str;29] = ["_WILDCARD_TOKEN_","lambda","lam","Lam","(",")","[","]","DOT","let","=","in","define","lazy","weak","CBV","Liang",";","INTEGER","ID","T","F","Fs","TOP","Vars","LAMSYM","Ts","START","EOF"];
static TABLE:[u64;214] = [90194444289,8590917632,77309739008,64425164800,12885622784,81604640768,60130328576,17180917760,98784772097,94489870337,85899739137,107375099905,51540066304,38654902272,111669215233,4295819264,281552286449664,281535107039232,281569466580993,281565171154945,281595235794947,281560876449793,281479272529920,281492157628416,281582351810561,281526516776960,281483567628288,281539401875456,281573762072577,281487862333440,281556581351424,281513631612928,562971428388866,563027262963714,563031557931010,562997198192642,563022967996418,562967133421570,844506535690240,1125972921548802,1125977216516098,1125981511483394,1125921381941250,1125917086973954,1125947151745026,1407422128521218,1407447898324994,1407396358717442,1407392063750146,1407452193292290,1407456488259586,1688922875756546,1970406442663936,1970380672794624,2251872829505536,2533291971313664,2533352100134912,2533356395036672,2533296265625602,2533347805233154,2533364986150913,2533322035429378,2814831371747328,2814766948024320,2814839962927105,2814827076845568,3096306349441026,3377789916413953,3377781325168640,3377777030266880,3377716901445632,3659256302731266,3940731280031744,3940752754802689,4222206256218114,4503676937109504,4503664052535296,4503638282272768,4503608218288128,4503681232011264,4503603923189760,4503685528485889,4503707002470401,4503689821814785,4503616808288256,4503612512993280,4503694117240833,4503659757699072,4785147620360192,5066592532365312,5348106163847168,5629542485917696,5910983100858370,5910987395825666,5910991690792962,5911094770008066,5911056115302402,5911051820335106,5911013165629442,5910978805891074,5911026050531330,5911034640465922,5911038935433218,6192522502275074,6192531092209666,6192466667700226,6192470962667522,6192496732471298,6192526797242370,6473997479313410,6473945939705858,6473971709509634,6755446686285826,6755472456089602,6755420916482050,7036908779667456,7036956024242176,7318430999576578,7318383754936322,7599845848252416,7881338002669570,7881376657375234,7881359477506050,7881312232865794,7881419607048194,7881316527833090,7881307937898498,7881363772473346,7881350887571458,7881303642931202,7881380952342530,8162864519053313,8162782915526656,8162834454937600,8162778620428288,8162860226248705,8162881699708929,8162791505526784,8162787210231808,8162868814479361,8162855929249792,8162812979511296,8162851634348032,8162838749773824,8444292253351936,8725732868947968,8725784408358912,8725728573849600,8725741458948096,8725788703195136,8725762932932608,8725810179801089,8725805882671104,8725818767900673,8725737163653120,8725801587769344,8725814472474625,8725831653130241,9007280859906050,9007233615265794,9288734361780224,9288781606551553,9288682822369280,9288764425895937,9288712886353920,9288768721321985,9288678527270912,9288738656616448,9288755836092416,9288751541190656,9288687117074432,9288760133287937,9288691412369408,9570226518032386,9570166388490242,9570222223065090,9570170683457538,9570196453261314,9570230812999682,9851671432069120,10133180766224384,10133176471322624,10133116342501376,10133189356027905,10133193651453953,10133206536683521,10133159291912192,10133103457402880,10133112047206400,10133185063550977,10133163586748416,10133107752501248,10133137816485888,10414647153655810,10696070590496770,10696122130104322,10696096360300546,10977528387534848,10977536977338368,10977609993748481,10977601401454592,10977541272633344,10977605696356352,10977614286159873,10977562746617856,10977631466815489,10977584222044160,10977532682633216,10977618581585921,10977588516880384,11259072083853314,11540547060432898,11540521290629122,11540495520825346,];
pub fn make_parser() -> ZCParser<Term,Vec<LBox<Term>>>
{
let mut parser1:ZCParser<Term,Vec<LBox<Term>>> = ZCParser::new(21,42);
let mut rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("start");
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Ts");
rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut x = parser.popstack(); parser.exstate.push(x.lbox()); Nothing };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Ts");
rule.Ruleaction = |parser|{ let mut _item2_ = parser.popstack(); let mut x = parser.popstack(); let mut _item0_ = parser.popstack(); parser.exstate.push(x.lbox()); Nothing };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Fs");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let (a,)=(_item0_.value,) { a } else {parser.bad_pattern("(a,)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Fs");
rule.Ruleaction = |parser|{ let mut b = parser.popstack(); let mut a = parser.popstack(); App(a.lbox(), b.lbox()) };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("F");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let ((x),)=(_item0_.value,) { x } /* var */ else {parser.bad_pattern("((x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("F");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let ((x),)=(_item0_.value,) { x } /* const*/ else {parser.bad_pattern("((x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let (a,)=(_item0_.value,) { a } else {parser.bad_pattern("(a,)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("F");
rule.Ruleaction = |parser|{ let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (a,)=(_item1_.value,) { a } else {parser.bad_pattern("(a,)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut x = parser.popstack(); let mut _item0_ = parser.popstack(); CBV(x.lbox()) };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut x = parser.popstack(); let mut _item0_ = parser.popstack(); Weak(x.lbox()) };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut b = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Seq(mut vs),)=(_item1_.value,) {
let mut t = b.value;
while vs.len()>0 {
t = Abs(getvar(&unbox!(vs.pop().unwrap())),parser.lbx(0,t));
}
return t; } else {parser.bad_pattern("(Seq(mut vs),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Vars");
rule.Ruleaction = |parser|{ let mut x = parser.popstack(); Seq(vec![x.lbox()]) };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Vars");
rule.Ruleaction = |parser|{ let mut y = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Seq(mut vs),)=(_item0_.value,) { vs.push(y.lbox()); Seq(vs) } else {parser.bad_pattern("(Seq(mut vs),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut b = parser.popstack(); let mut _item4_ = parser.popstack(); let mut v = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Var(x),)=(_item1_.value,) { App(parser.lbx(0,Abs(x,b.lbox())), v.lbox()) } else {parser.bad_pattern("(Var(x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("TOP");
rule.Ruleaction = |parser|{ let mut v = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Var(x),)=(_item1_.value,) {
let nv = Def(true,x,v.lbox());
//parser.exstate.push(parser.lbx(0,nv));
nv
} else {parser.bad_pattern("(Var(x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("TOP");
rule.Ruleaction = |parser|{ let mut v = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Var(x),)=(_item2_.value,) {
let nv = Def(false,x,v.lbox());
nv
} else {parser.bad_pattern("(Var(x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("TOP");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let ((x),)=(_item0_.value,) { x } else {parser.bad_pattern("((x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("LAMSYM");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <Term>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("LAMSYM");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <Term>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("LAMSYM");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <Term>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("START");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <Term>::default()};
parser1.Rules.push(rule);
parser1.Errsym = "";
parser1.resynch.insert(";");
for i in 0..214 {
let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
}
for s in SYMBOLS { parser1.Symset.insert(s); }
load_extras(&mut parser1);
return parser1;
} //make_parser
pub fn parse_with<'t>(parser:&mut ZCParser<Term,Vec<LBox<Term>>>, lexer:&mut dyn Tokenizer<'t,Term>) -> Result<Term,Term>
{
let _xres_ = parser.parse(lexer); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_with public function
pub fn parse_train_with<'t>(parser:&mut ZCParser<Term,Vec<LBox<Term>>>, lexer:&mut dyn Tokenizer<'t,Term>, parserpath:&str) -> Result<Term,Term>
{
let _xres_ = parser.parse_train(lexer,parserpath); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_train_with public function
// Lexical Scanner using RawToken and StrTokenizer
pub struct untypedlexer<'t> {
stk: StrTokenizer<'t>,
keywords: HashSet<&'static str>,
lexnames: HashMap<&'static str,&'static str>,
shared_state: Rc<RefCell<Vec<LBox<Term>>>>,
}
impl<'t> untypedlexer<'t>
{
pub fn from_str(s:&'t str) -> untypedlexer<'t> {
Self::new(StrTokenizer::from_str(s))
}
pub fn from_source(s:&'t LexSource<'t>) -> untypedlexer<'t> {
Self::new(StrTokenizer::from_source(s))
}
pub fn new(mut stk:StrTokenizer<'t>) -> untypedlexer<'t> {
let mut lexnames = HashMap::with_capacity(64);
let mut keywords = HashSet::with_capacity(64);
let shared_state = Rc::new(RefCell::new(<Vec<LBox<Term>>>::default()));
for kw in ["CBV","let","define","lambda","Lam","lam","_WILDCARD_TOKEN_","lazy","weak","in",] {keywords.insert(kw);}
for c in ['(',')','[',']','=',';','.',] {stk.add_single(c);}
for d in [] {stk.add_double(d);}
for d in [] {stk.add_triple(d);}
for (k,v) in [(r#"."#,"DOT"),] {lexnames.insert(k,v);}
untypedlexer {stk,keywords,lexnames,shared_state,}
}
}
impl<'t> Tokenizer<'t,Term> for untypedlexer<'t>
{
fn nextsym(&mut self) -> Option<TerminalToken<'t,Term>> {
let tokopt = self.stk.next_token();
if let None = tokopt {return None;}
let token = tokopt.unwrap();
match token.0 {
RawToken::Alphanum(sym) if self.keywords.contains(sym) => {
let truesym = self.lexnames.get(sym).unwrap_or(&sym);
Some(TerminalToken::from_raw(token,truesym,<Term>::default()))
},
RawToken::Num(n) => Some(TerminalToken::from_raw(token,"INTEGER",Const(n))),
RawToken::Alphanum("liang") => Some(TerminalToken::from_raw(token,"Liang",Nothing)),
RawToken::Alphanum(a) => Some(TerminalToken::from_raw(token,"ID",Var(str16::from(a)))),
RawToken::Symbol(s) if self.lexnames.contains_key(s) => {
let tname = self.lexnames.get(s).unwrap();
Some(TerminalToken::from_raw(token,tname,<Term>::default()))
},
RawToken::Symbol(s) => Some(TerminalToken::from_raw(token,s,<Term>::default())),
RawToken::Alphanum(s) => Some(TerminalToken::from_raw(token,s,<Term>::default())),
_ => { let _rrodb=token.0.to_staticstr(); Some(TerminalToken::from_raw(token,_rrodb,<Term>::default())) },
}
}
fn linenum(&self) -> usize {self.stk.line()}
fn column(&self) -> usize {self.stk.column()}
fn position(&self) -> usize {self.stk.current_position()}
fn current_line(&self) -> &str {self.stk.current_line()}
fn get_line(&self,i:usize) -> Option<&str> {self.stk.get_line(i)}
fn add_priority_symbol(&mut self, s:&'static str) {self.stk.add_priority_symbol(s);}
fn get_slice(&self,s:usize,l:usize) -> &str {self.stk.get_slice(s,l)}
}//impl Tokenizer
fn load_extras(parser:&mut ZCParser<Term,Vec<LBox<Term>>>)
{
}//end of load_extras: don't change this line as it affects augmentation