//Parser generated by rustlr for grammar untyped
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(irrefutable_let_patterns)]
#![allow(unreachable_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action};
use rustlr::{StrTokenizer,RawToken,LexSource};
use std::collections::{HashMap,HashSet};
use rustlr::{LBox,unbox};
use crate::untyped::*;
use crate::untyped::Term::*;
use fixedstr::str16;
static SYMBOLS:[&'static str;29] = ["_WILDCARD_TOKEN_","lambda","lam","Lam","(",")","[","]","DOT","let","=","in","define","lazy","weak","CBV","Liang",";","INTEGER","ID","T","F","Fs","TOP","Vars","LAMSYM","Ts","START","EOF"];
static TABLE:[u64;214] = [51540197376,107374379009,90194444289,38655688704,98785165313,4295426048,81604902912,64425230336,17180655616,60129607680,94489542657,77310459904,85900197889,8590327808,111669805057,12885229568,281492157366272,281565172137985,281556581613568,281552287170560,562967133421570,562997198192642,563027262963714,562971428388866,563031557931010,563022967996418,844528010592257,844506535690240,1125990102466561,1125947151876098,1125977217302528,1125921382072322,1125917087498240,1125981511745536,1125972921679874,1407456489177090,1688931465822210,1970406442467330,2251881418326018,2251877123358722,2251821288783874,2251816993816578,2251872828391426,2251847058587650,2533330626347008,2533356396216320,2814844256649217,2814848552861697,2814831372009472,2814839961550849,2814762652336128,2814766947762176,2814857141485569,2814758357434368,2814870026190851,2814809896714240,2814754062532608,2814835667304449,2814801307303936,2814788422795264,2814814192336896,2814827077566464,3096306348720128,3096314939703297,3096302054277120,3096241924472832,3377781325430784,3377704015953920,3377807094906881,3377712605757440,3377716901183488,3377764145758208,3377759850135552,3377708310855680,3377738376216576,3377777030987776,3377794210070529,3377785621512193,3377789914972161,3659247712731138,3940722690097152,4222206256807936,4503681232076802,4503646872338434,4503621102534658,4503676937109506,4503672642142210,4503616807567362,4785121849311234,4785147619115010,4785096079507458,5066631185891330,5066583941251074,5348058919075840,5348106163781632,5629576843821058,5629546779049986,5629521009246210,5629516714278914,5629572548853762,5629581138788354,5911056117268480,6192492439339008,6473997480886272,6755446686220290,6755420916416514,6755472456024066,7036895894765568,7318469653561346,7318388049182722,7318357984411650,7318366574346242,7318413818986498,7318430998855682,7318409524019202,7318400934084610,7318426703888386,7318353689444354,7318362279378946,7599867323088896,7881376658358272,7881303643324416,7881406722277377,7881363773128704,7881359477506048,7881385249538049,7881338003587072,7881307938226176,7881380952801280,7881393837441025,7881312233127936,7881316528553984,7881389542342657,8162855929774082,8162808685133826,8444292253351936,8725737163259904,8725728573456384,8725762933719040,8725788703260672,8725810179801089,8725732868358144,8725801588490240,8725814472474625,8725784407638016,8725818767572993,8725805882933248,8725741458685952,8725831652409345,9007276564217858,9007250794414082,9007280859185154,9007203549773826,9007237909512194,9007212139708418,9007263679315970,9007259384348674,9007216434675714,9007207844741122,9007319513890818,9288751541321730,9288747246354434,9288755836289026,9288691411779586,9288695706746882,9288721476550658,9570239402606593,9570209337769984,9570235109998593,9570226518622208,9570230813065216,9570162093391872,9570166388817920,9570256582541313,9570157798490112,9570187863851008,9570213633392640,9570153503588352,9570243697704961,9851671430168578,9851697199972354,9851645660364802,10133189356027905,10133163586813952,10133193651126273,10133112046813184,10133116342239232,10133137817272320,10133107751911424,10133159291191296,10133180766486528,10133103457009664,10133185063485441,10133176472043520,10133206535962625,10414647153655810,10696096362266624,10977597107142658,11259007658754048,11259059198033920,11259063493656576,11259076378886144,11259093557968897,11259003363852288,11259016249081856,11259089262870529,11259080673329152,11259037724114944,11259084970459137,11259106442805249,11259011953655808,11540521290629122,11540495520825346,11540547060432898,];
pub fn make_parser() -> ZCParser<Term,Vec<LBox<Term>>>
{
let mut parser1:ZCParser<Term,Vec<LBox<Term>>> = ZCParser::new(21,42);
let mut rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("start");
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Ts");
rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut x = parser.popstack(); parser.exstate.push(x.lbox()); Nothing };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Ts");
rule.Ruleaction = |parser|{ let mut _item2_ = parser.popstack(); let mut x = parser.popstack(); let mut _item0_ = parser.popstack(); parser.exstate.push(x.lbox()); Nothing };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Fs");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let (a,)=(_item0_.value,) { a } else {parser.bad_pattern("(a,)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Fs");
rule.Ruleaction = |parser|{ let mut b = parser.popstack(); let mut a = parser.popstack(); App(a.lbox(), b.lbox()) };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("F");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let ((x),)=(_item0_.value,) { x } /* var */ else {parser.bad_pattern("((x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("F");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let ((x),)=(_item0_.value,) { x } /* const*/ else {parser.bad_pattern("((x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let (a,)=(_item0_.value,) { a } else {parser.bad_pattern("(a,)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("F");
rule.Ruleaction = |parser|{ let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (a,)=(_item1_.value,) { a } else {parser.bad_pattern("(a,)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut x = parser.popstack(); let mut _item0_ = parser.popstack(); CBV(x.lbox()) };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut x = parser.popstack(); let mut _item0_ = parser.popstack(); Weak(x.lbox()) };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut b = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Seq(mut vs),)=(_item1_.value,) {
let mut t = b.value;
while vs.len()>0 {
t = Abs(getvar(&unbox!(vs.pop().unwrap())),parser.lbx(0,t));
}
return t; } else {parser.bad_pattern("(Seq(mut vs),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Vars");
rule.Ruleaction = |parser|{ let mut x = parser.popstack(); Seq(vec![x.lbox()]) };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("Vars");
rule.Ruleaction = |parser|{ let mut y = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Seq(mut vs),)=(_item0_.value,) { vs.push(y.lbox()); Seq(vs) } else {parser.bad_pattern("(Seq(mut vs),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("T");
rule.Ruleaction = |parser|{ let mut b = parser.popstack(); let mut _item4_ = parser.popstack(); let mut v = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Var(x),)=(_item1_.value,) { App(parser.lbx(0,Abs(x,b.lbox())), v.lbox()) } else {parser.bad_pattern("(Var(x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("TOP");
rule.Ruleaction = |parser|{ let mut v = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Var(x),)=(_item1_.value,) {
let nv = Def(true,x,v.lbox());
//parser.exstate.push(parser.lbx(0,nv));
nv
} else {parser.bad_pattern("(Var(x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("TOP");
rule.Ruleaction = |parser|{ let mut v = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack();
if let (Var(x),)=(_item2_.value,) {
let nv = Def(false,x,v.lbox());
nv
} else {parser.bad_pattern("(Var(x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("TOP");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack();
if let ((x),)=(_item0_.value,) { x } else {parser.bad_pattern("((x),)")} };
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("LAMSYM");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <Term>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("LAMSYM");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <Term>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("LAMSYM");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <Term>::default()};
parser1.Rules.push(rule);
rule = ZCRProduction::<Term,Vec<LBox<Term>>>::new_skeleton("START");
rule.Ruleaction = |parser|{ let mut _item0_ = parser.popstack(); <Term>::default()};
parser1.Rules.push(rule);
parser1.Errsym = "";
parser1.resynch.insert(";");
for i in 0..214 {
let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;
let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;
parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));
}
for s in SYMBOLS { parser1.Symset.insert(s); }
load_extras(&mut parser1);
return parser1;
} //make_parser
pub fn parse_with<'t>(parser:&mut ZCParser<Term,Vec<LBox<Term>>>, lexer:&mut dyn Tokenizer<'t,Term>) -> Result<Term,Term>
{
let _xres_ = parser.parse(lexer); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_with public function
pub fn parse_train_with<'t>(parser:&mut ZCParser<Term,Vec<LBox<Term>>>, lexer:&mut dyn Tokenizer<'t,Term>, parserpath:&str) -> Result<Term,Term>
{
let _xres_ = parser.parse_train(lexer,parserpath); if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}
}//parse_train_with public function
// Lexical Scanner using RawToken and StrTokenizer
pub struct untypedlexer<'t> {
stk: StrTokenizer<'t>,
keywords: HashSet<&'static str>,
lexnames: HashMap<&'static str,&'static str>,
shared_state: Rc<RefCell<Vec<LBox<Term>>>>,
}
impl<'t> untypedlexer<'t>
{
pub fn from_str(s:&'t str) -> untypedlexer<'t> {
Self::new(StrTokenizer::from_str(s))
}
pub fn from_source(s:&'t LexSource<'t>) -> untypedlexer<'t> {
Self::new(StrTokenizer::from_source(s))
}
pub fn new(mut stk:StrTokenizer<'t>) -> untypedlexer<'t> {
let mut lexnames = HashMap::with_capacity(64);
let mut keywords = HashSet::with_capacity(64);
let shared_state = Rc::new(RefCell::new(<Vec<LBox<Term>>>::default()));
for kw in ["let","_WILDCARD_TOKEN_","Lam","in","weak","lazy","lam","define","lambda","CBV",] {keywords.insert(kw);}
for c in ['(',')','[',']','=',';','.',] {stk.add_single(c);}
for d in [] {stk.add_double(d);}
for d in [] {stk.add_triple(d);}
for (k,v) in [(r#"."#,"DOT"),] {lexnames.insert(k,v);}
untypedlexer {stk,keywords,lexnames,shared_state,}
}
}
impl<'t> Tokenizer<'t,Term> for untypedlexer<'t>
{
fn nextsym(&mut self) -> Option<TerminalToken<'t,Term>> {
let tokopt = self.stk.next_token();
if let None = tokopt {return None;}
let token = tokopt.unwrap();
match token.0 {
RawToken::Alphanum(sym) if self.keywords.contains(sym) => {
let truesym = self.lexnames.get(sym).unwrap_or(&sym);
Some(TerminalToken::from_raw(token,truesym,<Term>::default()))
},
RawToken::Num(n) => Some(TerminalToken::from_raw(token,"INTEGER",Const(n))),
RawToken::Alphanum("liang") => Some(TerminalToken::from_raw(token,"Liang",Nothing)),
RawToken::Alphanum(a) => Some(TerminalToken::from_raw(token,"ID",Var(str16::from(a)))),
RawToken::Symbol(s) if self.lexnames.contains_key(s) => {
let tname = self.lexnames.get(s).unwrap();
Some(TerminalToken::from_raw(token,tname,<Term>::default()))
},
RawToken::Symbol(s) => Some(TerminalToken::from_raw(token,s,<Term>::default())),
RawToken::Alphanum(s) => Some(TerminalToken::from_raw(token,s,<Term>::default())),
_ => { let _rrodb=token.0.to_staticstr(); Some(TerminalToken::from_raw(token,_rrodb,<Term>::default())) },
}
}
fn linenum(&self) -> usize {self.stk.line()}
fn column(&self) -> usize {self.stk.column()}
fn position(&self) -> usize {self.stk.current_position()}
fn current_line(&self) -> &str {self.stk.current_line()}
fn get_line(&self,i:usize) -> Option<&str> {self.stk.get_line(i)}
fn get_slice(&self,s:usize,l:usize) -> &str {self.stk.get_slice(s,l)}
}//impl Tokenizer
fn load_extras(parser:&mut ZCParser<Term,Vec<LBox<Term>>>)
{
}//end of load_extras: don't change this line as it affects augmentation