#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_assignments)]
#![allow(unused_doc_comments)]
#![allow(unused_imports)]
use std::fmt::Display;
use std::default::Default;
use std::collections::{HashMap,HashSet,BTreeSet};
use std::io::{self,Read,Write,BufReader,BufRead};
use std::cell::{RefCell,Ref,RefMut};
use std::hash::{Hash,Hasher};
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use std::mem;
use crate::Stateaction::*;
pub const DEFAULTPRECEDENCE:i32 = 20;
pub const TRACE:usize = 0;
#[derive(Clone)]
pub struct Gsym {
pub sym : String,
pub rusttype : String, pub terminal : bool,
pub label : String, pub precedence : i32, }
impl Gsym
{
pub fn new(s:&str,isterminal:bool) -> Gsym {
Gsym {
sym : s.to_owned(),
terminal : isterminal,
label : String::default(),
rusttype : String::from("String"),
precedence : DEFAULTPRECEDENCE, }
}
pub fn setlabel(&mut self, la:&str)
{ self.label = String::from(la); }
pub fn settype(&mut self, rt:&str)
{ self.rusttype = String::from(rt); }
pub fn setprecedence(&mut self, p:i32)
{ self.precedence = p; }
}
pub struct Grule {
pub lhs : Gsym, pub rhs : Vec<Gsym>, pub action : String, pub precedence : i32, }
impl Grule
{
pub fn new_skeleton(lh:&str) -> Grule
{
Grule {
lhs : Gsym::new(lh,false),
rhs : Vec::new(),
action : String::default(),
precedence : 0,
}
}
}
pub fn printrule(rule:&Grule) {
print!("PRODUCTION: {} --> ",rule.lhs.sym);
for s in &rule.rhs {
print!("{}",s.sym);
if s.label.len()>0 {print!(":{}",s.label);}
print!(" ");
}
println!("{{ {}, preclevel {}",rule.action,rule.precedence); }
pub struct Grammar
{
pub name : String,
pub Symbols : Vec<Gsym>,
pub Symhash : HashMap<String,usize>,
pub Rules: Vec<Grule>,
pub topsym : String,
pub Nullable : HashSet<String>,
pub First : HashMap<String,HashSet<String>>,
pub Rulesfor: HashMap<String,HashSet<usize>>, pub Absyntype : String, pub Externtype : String, pub Extras : String, }
impl Grammar
{
pub fn new() -> Grammar
{
Grammar {
name : String::from(""), Symbols: Vec::new(), Symhash: HashMap::new(),
Rules: Vec::new(), topsym : String::default(), Nullable : HashSet::new(),
First : HashMap::new(),
Rulesfor: HashMap::new(),
Absyntype:String::from("i64"), Externtype:String::from(""), Extras: String::new(),
}
}
pub fn nonterminal(&self,s:&str) -> bool
{
match self.Symhash.get(s) {
Some(symi) => !self.Symbols[*symi].terminal,
_ => false,
}
}
pub fn terminal(&self,s:&str) -> bool
{
match self.Symhash.get(s) {
Some(symi) => self.Symbols[*symi].terminal,
_ => false,
}
}
pub fn parse_grammar(&mut self, filename:&str)
{
let mut reader = match File::open(filename) {
Ok(f) => { Some(BufReader::new(f)) },
_ => { println!("cannot open file, reading from stdin..."); None},
};
let mut line=String::from("");
let mut atEOF = false;
let mut linenum = 0;
let mut linelen = 0;
let mut stage = 0;
let mut multiline = false; let mut foundeol = false;
while !atEOF
{
if !multiline {line = String::new();}
if foundeol { multiline=false;} else {
let result = if let Some(br)=&mut reader {br.read_line(&mut line)}
else {std::io::stdin().read_line(&mut line)};
match result {
Ok(0) | Err(_) => { line = String::from("EOF"); },
Ok(n) => {linenum+=1;},
} }
linelen = line.len();
if multiline && linelen>1 && &line[0..1]!="#" {
if linelen==3 && &line[0..3]=="EOF" {
panic!("MULTI-LINE GRAMMAR PRODUCTION DID NOT END WITH <==");
}
match line.rfind("<==") {
None => {}, Some(eoli) => {
line.truncate(eoli);
foundeol = true;
}
} }
else if linelen>1 && &line[0..1]=="!" {
self.Extras.push_str(&line[1..]);
}
else if linelen>1 && &line[0..1]!="#" {
let toksplit = line.split_whitespace();
let stokens:Vec<&str> = toksplit.collect();
match stokens[0] {
"use" => {
self.Extras.push_str("use ");
self.Extras.push_str(stokens[1]);
self.Extras.push_str("\n");
},
"extern" if stokens.len()>2 && stokens[1]=="crate" => {
self.Extras.push_str("extern crate ");
self.Extras.push_str(stokens[2]);
self.Extras.push_str("\n");
},
"!" => {
let pbi = line.find('!').unwrap();
self.Extras.push_str(&line[pbi+1..]);
self.Extras.push_str("\n");
},
"gramname" | "grammarname" | "grammar" => {
self.name = String::from(stokens[1]);
},
"EOF" => {atEOF=true},
("terminal" | "terminals") if stage==0 => {
for i in 1..stokens.len() {
let newterm = Gsym::new(stokens[i],true);
self.Symhash.insert(stokens[i].to_owned(),self.Symbols.len());
self.Symbols.push(newterm);
if TRACE>2 {println!("terminal {}",stokens[i]);}
}
}, "typedterminal" if stage==0 => {
let mut newterm = Gsym::new(stokens[1],true);
if stokens.len()>2 {newterm.settype(stokens[2]);}
self.Symhash.insert(stokens[1].to_owned(),self.Symbols.len());
self.Symbols.push(newterm);
}, "nonterminal" if stage==0 => {
let mut newterm = Gsym::new(stokens[1],false);
if stokens.len()>2 {newterm.settype(stokens[2]);}
self.Symhash.insert(stokens[1].to_owned(),self.Symbols.len());
self.Symbols.push(newterm);
}, "nonterminals" if stage==0 => {
for i in 1..stokens.len() {
let newterm = Gsym::new(stokens[i],false);
self.Symhash.insert(stokens[i].to_owned(),self.Symbols.len());
self.Symbols.push(newterm);
if TRACE>2 {println!("nonterminal {}",stokens[i]);}
}
},
"topsym" | "startsymbol" if stage==0 => {
match self.Symhash.get(stokens[1]) {
Some(tsi) if *tsi<self.Symbols.len() && !self.Symbols[*tsi].terminal => {
self.topsym = String::from(stokens[1]);
},
_ => { panic!("top symbol {} not found in declared non-terminals; check ordering of declarations, line {}",stokens[1],linenum);
},
} }, "absyntype" | "valuetype" if stage==0 => {
self.Absyntype = String::from(stokens[1]);
if TRACE>2 {println!("abstract syntax type is {}",stokens[1]);}
},
"externtype" | "externaltype" if stage==0 => {
self.Externtype = String::from(stokens[1]);
if TRACE>2 {println!("external structure type is {}",stokens[1]);}
},
"left" | "right" if stage<2 => {
if stage==0 {stage=1;}
let mut preclevel:i32 = 0;
if let Ok(n)=stokens[2].parse::<i32>() {preclevel = n;}
else {panic!("did not read precedence level on line {}",linenum);}
if stokens[0]=="right" && preclevel>0 {preclevel = -1 * preclevel;}
if let Some(index) = self.Symhash.get(stokens[1]) {
self.Symbols[*index].precedence = preclevel;
}
}, "recover" | "flexname" | "resync" => {}, LHS if (stokens[1]=="-->" || stokens[1]=="::=" || stokens[1]=="==>") => {
if !foundeol && stokens[1]=="==>" {multiline=true; continue;}
else if foundeol {foundeol=false;}
if stage<2 {stage=2;}
let symindex = match self.Symhash.get(LHS) {
Some(smi) if *smi<self.Symbols.len() && !self.Symbols[*smi].terminal => smi,
_ => {panic!("unrecognized non-terminal symbol {}, line {}",LHS,linenum);},
};
let lhsym = self.Symbols[*symindex].clone();
let mut rhsyms:Vec<Gsym> = Vec::new();
let mut semaction = "}";
let mut i:usize = 2;
let mut maxprec:i32 = 0;
while i<stokens.len() {
let strtok = stokens[i];
i+=1;
if strtok == "{" {
let position = line.find('{').unwrap();
semaction = line.split_at(position+1).1;
break;
}
let toks:Vec<&str> = strtok.split(':').collect();
if TRACE>2&&toks.len()>1 {println!("see labeled token {}",strtok);}
match self.Symhash.get(toks[0]) {
None => {panic!("unrecognized grammar symbol {}, line {}",toks[0],linenum); },
Some(symi) => {
let sym = &self.Symbols[*symi];
let mut newsym = sym.clone();
if toks.len()>1 { newsym.setlabel(toks[1]); }
if maxprec.abs() < newsym.precedence.abs() {
maxprec=newsym.precedence;
}
rhsyms.push(newsym);
}
} } let rule = Grule {
lhs : lhsym,
rhs : rhsyms,
action: semaction.to_owned(),
precedence : maxprec,
};
if TRACE>2 {printrule(&rule);}
self.Rules.push(rule);
}, _ => {panic!("error parsing grammar on line {}, grammar stage {}",linenum,stage);},
} } } if self.Symhash.contains_key("START") || self.Symhash.contains_key("EOF")
{
panic!("Error in grammar: START and EOF are reserved symbols");
}
let startnt = Gsym::new("START",false);
let eofterm = Gsym::new("EOF",true);
self.Symhash.insert(String::from("START"),self.Symbols.len());
self.Symhash.insert(String::from("EOF"),self.Symbols.len()+1);
self.Symbols.push(startnt.clone());
self.Symbols.push(eofterm.clone());
let topgsym = &self.Symbols[*self.Symhash.get(&self.topsym).unwrap()];
let startrule = Grule { lhs:startnt,
rhs:vec![topgsym.clone()], action: String::default(),
precedence : 0,
};
self.Rules.push(startrule); if TRACE>0 {println!("{} rules in grammar",self.Rules.len());}
if self.Externtype.len()<1 {self.Externtype = self.Absyntype.clone();} }}
impl Grammar
{
pub fn compute_NullableRf(&mut self)
{
let mut changed = true;
let mut rulei:usize = 0;
while changed
{
changed = false;
rulei = 0;
for rule in &self.Rules
{
let mut addornot = true;
for gs in &rule.rhs
{
if gs.terminal || !self.Nullable.contains(&gs.sym) {addornot=false;}
} if (addornot) {
changed = self.Nullable.insert(rule.lhs.sym.clone()) || changed;
if TRACE>3 {println!("{} added to Nullable",rule.lhs.sym);}
}
if let None = self.Rulesfor.get(&rule.lhs.sym) {
self.Rulesfor.insert(rule.lhs.sym.clone(),HashSet::new());
}
let ruleset = self.Rulesfor.get_mut(&rule.lhs.sym).unwrap();
ruleset.insert(rulei);
rulei += 1;
} } }
pub fn compute_FirstIM(&mut self)
{
let mut FIRST:HashMap<String,RefCell<HashSet<String>>> = HashMap::new();
let mut changed = true;
while changed
{
changed = false;
for rule in &self.Rules
{
let ref nt = rule.lhs.sym; if !FIRST.contains_key(nt) {
changed = true;
FIRST.insert(String::from(nt),RefCell::new(HashSet::new()));
} let mut Firstnt = FIRST.get(nt).unwrap().borrow_mut();
let mut i = 0;
let mut isnullable = true;
while i< rule.rhs.len() && isnullable
{
let gs = &rule.rhs[i];
if gs.terminal {
changed=Firstnt.insert(gs.sym.clone()) || changed;
isnullable = false;
}
else if &gs.sym!=nt { if let Some(firstgs) = FIRST.get(&gs.sym) {
let firstgsb = firstgs.borrow();
for sym in firstgsb.iter() {
changed=Firstnt.insert(sym.clone())||changed;
}
} } if gs.terminal || !self.Nullable.contains(&gs.sym) {isnullable=false;}
i += 1;
} } } for nt in FIRST.keys() {
if let Some(rcell) = FIRST.get(nt) {
self.First.insert(nt.to_owned(),rcell.take());
}
}
}
pub fn Firstseq(&self, Gs:&[Gsym], la:&str) -> HashSet<String>
{
let mut Fseq = HashSet::new();
let mut i = 0;
let mut nullable = true;
while nullable && i<Gs.len()
{
if (Gs[i].terminal) {Fseq.insert(Gs[i].sym.clone()); nullable=false; }
else {
let firstgsym = self.First.get(&Gs[i].sym).unwrap();
for s in firstgsym { Fseq.insert(s.to_owned()); }
if !self.Nullable.contains(&Gs[i].sym) {nullable=false;}
}
i += 1;
} if nullable {Fseq.insert(la.to_owned());}
Fseq
}
}
#[derive(Clone,PartialEq,Eq,Hash,Debug)]
pub struct LRitem
{
ri: usize, pi: usize, la: String, }
pub fn printrulela(ri:usize, Gmr:&Grammar, la:&str)
{
if ri>=Gmr.Rules.len() {println!("printing invalid rule number {}",ri); return;}
let ref lhs_sym = Gmr.Rules[ri].lhs.sym;
let ref rhs = Gmr.Rules[ri].rhs;
print!(" (Rule {}) {} --> ",ri,lhs_sym);
for gsym in rhs { print!("{} ",gsym.sym); }
println!(" , lookahead {}",la);
}
pub fn printitem(item:&LRitem, Gmr:&Grammar)
{
let ref lhs_sym = Gmr.Rules[item.ri].lhs.sym;
let ref rhs = Gmr.Rules[item.ri].rhs;
print!(" ({}) {} --> ",item.ri,lhs_sym);
let mut position = 0;
for gsym in rhs
{
if &position==&item.pi {print!(".");}
print!("{} ",gsym.sym);
position+=1;
}
if &position==&item.pi {print!(". ");}
println!(", {}",&item.la);
}
pub type Itemset = HashSet<LRitem>;
pub fn stateeq(s1:&Itemset, s2:&Itemset) -> bool
{
if s1.len()!=s2.len() { return false; }
for s in s1 {
if !s2.contains(s) {return false;}
}
return true;
}
fn extract_core(items:&Itemset) -> HashSet<(usize,usize)> {
let mut core0 = HashSet::new();
for LRitem{ri:r, pi:p, la} in items { core0.insert((*r,*p)); }
core0
}
fn sub_core(s1:&Itemset, s2:&Itemset) -> bool {
for LRitem{ri:r1,pi:p1,la:la1} in s1
{
let mut bx = false;
for LRitem{ri:r2,pi:p2,la} in s2
{
if r1==r2 && p1==p2 {bx=true; break;}
}
if !bx {return false;}
}
return true;
}
fn eq_core(s1:&Itemset, s2:&Itemset) -> bool
{
let (core1,core2) = (extract_core(s1),extract_core(s2));
if core1.len()!=core2.len() {return false;}
for item_core in &core1
{
if !core2.contains(item_core) {return false; }
}
return true;
}
#[derive(Clone,Debug)]
pub struct LR1State
{
index: usize, items:Itemset,
lhss: BTreeSet<String>, }
impl LR1State
{
pub fn new() -> LR1State
{
LR1State {
index : 0, items : HashSet::new(),
lhss: BTreeSet::new(),
}
}
pub fn insert(&mut self, item:LRitem, lhs:&str) -> bool
{
let inserted = self.items.insert(item);
self.lhss.insert(String::from(lhs));
inserted
}
pub fn hashval(&self) -> String {
let mut key=self.items.len().to_string(); for s in &self.lhss {key.push_str(s);}
key
}
pub fn hashval_lalr(&self) -> String {
let mut key = extract_core(&self.items).len().to_string(); for s in &self.lhss {key.push_str(s);}
key
}
pub fn contains(&self, x:&LRitem) -> bool {self.items.contains(x)}
fn core_eq(&self, state2:&LR1State) -> bool { eq_core(&self.items,&state2.items) }
fn merge_states(&mut self, state2:&LR1State) {
for item in &state2.items {self.items.insert(item.clone());}
}
}
impl PartialEq for LR1State
{
fn eq(&self, other:&LR1State) -> bool
{stateeq(&self.items,&other.items)}
fn ne(&self, other:&LR1State) ->bool
{!stateeq(&self.items,&other.items)}
}
impl Eq for LR1State {}
pub fn printstate(state:&LR1State,Gmr:&Grammar)
{
println!("state {}:",state.index);
for item in &state.items
{ printitem(item,Gmr); }
}
pub fn stateclosure(mut state:LR1State, Gmr:&Grammar)
-> LR1State {
let mut closed =LR1State::new(); closed.index = state.index;
while state.items.len()>0
{
let nextitem = state.items.iter().next().unwrap().clone();
let item = state.items.take(&nextitem).unwrap();
let (ri,pi,la) = (item.ri,item.pi,&item.la);
let rulei = &Gmr.Rules[ri]; let lhs = &rulei.lhs.sym;
closed.insert(nextitem,lhs); if pi<rulei.rhs.len() && !rulei.rhs[pi].terminal {
let nti = &rulei.rhs[pi]; let lookaheads=&Gmr.Firstseq(&rulei.rhs[pi+1..],la);
for rulent in Gmr.Rulesfor.get(&nti.sym).unwrap() {
for lafollow in lookaheads
{
let newitem = LRitem {
ri: *rulent,
pi: 0,
la: lafollow.clone(),
};
if !closed.items.contains(&newitem) {
state.insert(newitem,&nti.sym); }
} } } } closed
}
#[derive(Clone,PartialEq,Eq,Debug)]
pub enum Stateaction {
Shift(usize), Reduce(usize), Gotonext(usize), Accept,
Error(String),
}
pub struct Statemachine {
pub Gmr: Grammar,
pub States: Vec<LR1State>,
pub Statelookup: HashMap<String,BTreeSet<usize>>,
pub FSM: Vec<HashMap<String,Stateaction>>,
pub lalr: bool,
pub Open: Vec<usize>, }
impl Statemachine
{
pub fn new(gram:Grammar) -> Statemachine
{
Statemachine {
Gmr: gram,
States: Vec::with_capacity(8*1024), Statelookup: HashMap::with_capacity(1024),
FSM: Vec::with_capacity(8*1024),
lalr: false,
Open: Vec::new(), }
}
fn addstate(&mut self, mut state:LR1State, psi:usize, nextsym:String)
{
let newstateindex = self.States.len(); state.index = newstateindex;
let lookupkey = if self.lalr {state.hashval_lalr()} else {state.hashval()};
if let None=self.Statelookup.get(&lookupkey) {
self.Statelookup.insert(lookupkey.clone(),BTreeSet::new());
}
let indices = self.Statelookup.get_mut(&lookupkey).unwrap();
let mut toadd = newstateindex; if self.lalr {
for i in indices.iter()
{
if state.core_eq(&self.States[*i]) {
toadd=*i;
let mut stateclone = LR1State {
index : toadd,
items : state.items.clone(),
lhss: BTreeSet::new(),
};
stateclone.merge_states(&self.States[toadd]);
if stateclone.items.len() > self.States[toadd].items.len() {
self.States[toadd] = stateclosure(stateclone,&self.Gmr);
self.Open.push(toadd);
if TRACE>3 { print!("===> MERGED STATE: ");
printstate(&self.States[toadd],&self.Gmr);
}
} break;
} } } else { for i in indices.iter()
{
if &state==&self.States[*i] {toadd=*i; break; } }
}
if TRACE==2 {println!("transition to state {} from state {}, symbol {}..",toadd,psi,&nextsym);}
if toadd==newstateindex { if TRACE>2 {printstate(&state,&self.Gmr);}
indices.insert(newstateindex); self.States.push(state);
self.FSM.push(HashMap::new()); if self.lalr {self.Open.push(newstateindex)}
}
let gsymbol = &self.Gmr.Symbols[*self.Gmr.Symhash.get(&nextsym).unwrap()];
let mut newaction = Stateaction::Gotonext(toadd);
if gsymbol.terminal {newaction=Stateaction::Shift(toadd);}
let currentaction = self.FSM[psi].get(&nextsym);
let mut changefsm = true;
match currentaction { Some(Reduce(ri2)) => {
let prec2 = self.Gmr.Rules[*ri2].precedence;
let prec1 = gsymbol.precedence;
if prec1==prec2 && prec1>0 {changefsm=false;} else if prec2.abs()>prec1.abs() {changefsm=false;} if TRACE>0 {println!("shift-reduce conflict resolved by operator precedence/associativity:"); printrulela(*ri2,&self.Gmr,&nextsym); }
},
Some(Accept) => {changefsm=false;},
_ => {},
} if changefsm {self.FSM[psi].insert(nextsym,newaction);}
}
fn addreduce(FSM: &mut Vec<HashMap<String,Stateaction>>, Gmr:&Grammar, item:&LRitem, si:usize)
{
let currentaction = FSM[si].get(&item.la);
let mut changefsm = true;
let ri1 = &item.ri;
match currentaction {
Some(Reduce(ri2)) if ri2<ri1 => {
changefsm=false;
println!("Reduce-Reduce Conflict conflicted detected between rules {} and {}, resolved in favor of {}",ri2,ri1,ri2);
printrulela(*ri1,Gmr,&item.la); printrulela(*ri2,Gmr,&item.la);
},
Some(Reduce(ri2)) if ri2>ri1 => {
println!("Reduce-Reduce Conflict conflicted detected between rules {} and {}, resolved in favor of {}",ri2,ri1,ri1);
printrulela(*ri1,Gmr,&item.la); printrulela(*ri2,Gmr,&item.la);
},
Some(Reduce(ri2)) if ri2==ri1 => {changefsm=false;},
Some(Accept) => {changefsm = false;},
Some(Shift(_)) => { let prec1 = Gmr.Rules[item.ri].precedence;
let prec2 = Gmr.Symbols[*Gmr.Symhash.get(&item.la).unwrap()].precedence;
if prec1==prec2 && prec1<0 {changefsm=false;} else if prec2.abs()>prec1.abs() {changefsm=false;} if TRACE>0 {println!("Shift-Reduce conflict resolved by operator precedence/associativity:"); printrulela(*ri1,Gmr,&item.la); }
},
_ => {},
} if changefsm { if item.ri==Gmr.Rules.len()-1 && item.la=="EOF" { FSM[si].insert(item.la.clone(),Stateaction::Accept);
}
else {
if TRACE>1 {println!("++adding Reduce({}) at state {}, lookahead {}",item.ri,si,&item.la);}
FSM[si].insert(item.la.clone(),Stateaction::Reduce(item.ri));
}
} }
fn makegotos(&mut self, si:usize)
{
let ref state = self.States[si];
let mut newstates:HashMap<String,LR1State> = HashMap::new();
let mut keyvec:Vec<String> = Vec::new(); for item in &state.items
{
let rule = self.Gmr.Rules.get(item.ri).unwrap();
if item.pi<rule.rhs.len() { let ref nextsym = rule.rhs[item.pi].sym;
if let None = newstates.get(nextsym) {
newstates.insert(nextsym.to_owned(),LR1State::new());
keyvec.push(nextsym.clone());
}
let symstate = newstates.get_mut(nextsym).unwrap();
let newitem = LRitem {
ri : item.ri,
pi : item.pi+1,
la : item.la.clone(),
};
let lhssym = &self.Gmr.Rules[item.ri].lhs.sym;
symstate.insert(newitem,lhssym);
} else {
Statemachine::addreduce(&mut self.FSM,&self.Gmr,item,si);
} } for key in keyvec
{
let kernel = newstates.remove(&key).unwrap();
let fullstate = stateclosure(kernel,&self.Gmr);
self.addstate(fullstate,si,key);
}
}
pub fn generatefsm(&mut self)
{
let mut startstate=LR1State::new();
startstate.insert( LRitem {
ri : self.Gmr.Rules.len()-1, pi : 0,
la : "EOF".to_owned(), },"START");
startstate = stateclosure(startstate,&self.Gmr);
self.States.push(startstate); self.FSM.push(HashMap::new()); let mut closed:usize = 0;
if !self.lalr {
while closed<self.States.len()
{
self.makegotos(closed);
closed += 1;
} } else { self.Open.push(0);
while closed<self.Open.len()
{
let si = self.Open[closed]; self.makegotos(si);
closed += 1;
}
} }
pub fn writefsm(&self, filename:&str)->Result<(),std::io::Error>
{
let mut fd = File::create(filename)?;
write!(fd,"//Parser generated by RustLr\n
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_assignments)]
extern crate RustLr;
use RustLr::{{Parser,RGrule,Stateaction}};\n")?;
write!(fd,"{}\n",&self.Gmr.Extras)?; let ref absyn = self.Gmr.Absyntype;
write!(fd,"pub fn make_parser() -> Parser<{}>",absyn)?;
write!(fd,"\n{{\n")?;
write!(fd," let mut parser1:Parser<{}> = Parser::new({},{});\n",absyn,self.Gmr.Rules.len(),self.States.len())?;
write!(fd," let mut rule = RGrule::<{}>::new_skeleton(\"{}\");\n",absyn,"start")?;
for i in 0..self.Gmr.Rules.len()
{
write!(fd," rule = RGrule::<{}>::new_skeleton(\"{}\");\n",absyn,self.Gmr.Rules[i].lhs.sym)?;
write!(fd," rule.Ruleaction = |pstack|{{ ")?;
let mut k = self.Gmr.Rules[i].rhs.len();
while k>0
{
let gsym = &self.Gmr.Rules[i].rhs[k-1];
if gsym.label.len()>0 && &gsym.rusttype[0..3]=="mut"
{ write!(fd," let mut {}:{}=",gsym.label,absyn)?; }
else if gsym.label.len()>0
{ write!(fd," let {}:{}=",gsym.label,absyn)?; }
write!(fd,"pstack.pop()")?; if gsym.label.len()>0 { write!(fd,".unwrap().value; ")?;}
else {write!(fd,"; ")?;}
k -= 1;
} let mut semaction = &self.Gmr.Rules[i].action; if semaction.len()>1 {write!(fd,"{};\n",semaction)?;}
else {write!(fd," return {}::default();}};\n",absyn)?;}
write!(fd," parser1.Rules.push(rule);\n")?;
}
let mut linecx = 0; let cxmax = 512; for i in 0..self.FSM.len()
{
let row = &self.FSM[i];
for key in row.keys()
{
write!(fd," parser1.RSM[{}].insert(\"{}\",Stateaction::{:?});\n",i,key,row.get(key).unwrap())?;
linecx += 1;
if linecx%cxmax==0 {
write!(fd," return make_parser{}(parser1);\n}}\n\n",(linecx/cxmax))?;
write!(fd,"fn make_parser{}(mut parser1:Parser<{}>) -> Parser<{}>\n{{\n",(linecx/cxmax),absyn,absyn)?;
}
} }
write!(fd," return parser1;\n")?;
write!(fd,"}} //make_parser\n")?;
Ok(())
}
pub fn writefsm_bin(&self, filename:&str)->Result<(),std::io::Error>
{
let mut fd = File::create(filename)?;
write!(fd,"//Parser generated by RustLr\n
#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_assignments)]
extern crate RustLr;
use RustLr::{{Parser,RGrule,Stateaction,decode_action}};\n")?;
write!(fd,"{}\n",&self.Gmr.Extras)?;
write!(fd,"const SYMBOLS:[&'static str;{}] = [",self.Gmr.Symbols.len())?;
for i in 0..self.Gmr.Symbols.len()-1
{
write!(fd,"\"{}\",",&self.Gmr.Symbols[i].sym)?;
}
write!(fd,"\"{}\"];\n\n",&self.Gmr.Symbols[self.Gmr.Symbols.len()-1].sym)?;
let mut totalsize = 0;
for i in 0..self.FSM.len() { totalsize+=self.FSM[i].len(); }
write!(fd,"const TABLE:[u64;{}] = [",totalsize)?;
let mut encode:u64 = 0;
for i in 0..self.FSM.len() {
let row = &self.FSM[i];
for key in row.keys()
{ let k = *self.Gmr.Symhash.get(key).unwrap(); encode = ((i as u64) << 48) + ((k as u64) << 32);
match row.get(key) {
Some(Shift(statei)) => { encode += (*statei as u64) << 16; },
Some(Gotonext(statei)) => { encode += ((*statei as u64) << 16)+1; },
Some(Reduce(rulei)) => { encode += ((*rulei as u64) << 16)+2; },
Some(Accept) => {encode += 3; },
_ => {encode += 4; }, } write!(fd,"{},",encode)?;
} } write!(fd,"];\n\n")?;
let ref absyn = self.Gmr.Absyntype;
write!(fd,"pub fn make_parser() -> Parser<{}>",absyn)?;
write!(fd,"\n{{\n")?;
write!(fd," let mut parser1:Parser<{}> = Parser::new({},{});\n",absyn,self.Gmr.Rules.len(),self.States.len())?;
write!(fd," let mut rule = RGrule::<{}>::new_skeleton(\"{}\");\n",absyn,"start")?;
for i in 0..self.Gmr.Rules.len()
{
write!(fd," rule = RGrule::<{}>::new_skeleton(\"{}\");\n",absyn,self.Gmr.Rules[i].lhs.sym)?;
write!(fd," rule.Ruleaction = |pstack|{{ ")?;
let mut k = self.Gmr.Rules[i].rhs.len();
while k>0
{
let gsym = &self.Gmr.Rules[i].rhs[k-1];
if gsym.label.len()>0 && &gsym.rusttype[0..3]=="mut"
{ write!(fd," let mut {}:{}=",gsym.label,absyn)?; }
else if gsym.label.len()>0
{ write!(fd," let {}:{}=",gsym.label,absyn)?; }
write!(fd,"pstack.pop()")?; if gsym.label.len()>0 { write!(fd,".unwrap().value; ")?;}
else {write!(fd,"; ")?;}
k -= 1;
} let mut semaction = &self.Gmr.Rules[i].action; if semaction.len()>1 {write!(fd,"{};\n",semaction)?;}
else {write!(fd," return {}::default();}};\n",absyn)?;}
write!(fd," parser1.Rules.push(rule);\n")?;
}
write!(fd,"\n for i in 0..{} {{\n",totalsize)?;
write!(fd," let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;\n")?;
write!(fd," let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;\n")?;
write!(fd," parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));\n }}\n\n")?;
write!(fd," return parser1;\n")?;
write!(fd,"}} //make_parser\n")?;
Ok(())
}
}
pub fn decode_action(code:u64) -> Stateaction
{
let actiontype = code & 0x000000000000ffff;
let actionvalue = (code & 0x00000000ffff0000) >> 16;
match (actiontype,actionvalue) {
(0,si) => Shift(si as usize),
(1,si) => Gotonext(si as usize),
(2,ri) => Reduce(ri as usize),
(3,_) => Accept,
(4,x) => Error(x.to_string()),
_ => Error("unrecognized action in TABLE".to_owned()),
}
}
pub struct Stackelement<AT:Default>
{
pub si : usize, pub value : AT, }
pub struct Lextoken<AT:Default> {
pub sym: String, pub value: AT, }
impl<AT:Default> Lextoken<AT>
{
pub fn new(name:String, val:AT) -> Lextoken<AT> {
Lextoken {
sym : name,
value : val,
}
}}
pub trait Lexer<AT:Default>
{
fn nextsym(&mut self) -> Option<Lextoken<AT>>; fn linenum(&self) -> usize; }
pub struct RGrule<AT:Default> {
pub lhs: &'static str,
pub Ruleaction : fn(&mut Vec<Stackelement<AT>>) -> AT, }
impl<AT:Default> RGrule<AT>
{
pub fn new_skeleton(lh:&'static str) -> RGrule<AT>
{
RGrule {
lhs : lh,
Ruleaction : |p|{AT::default()},
}
}
}
pub struct Parser<AT:Default>
{
pub RSM : Vec<HashMap<&'static str,Stateaction>>, pub Rules : Vec<RGrule<AT>>, }
impl<AT:Default> Parser<AT>
{
pub fn new(rlen:usize, slen:usize) -> Parser<AT>
{ let mut p = Parser {
RSM : Vec::with_capacity(slen),
Rules : Vec::with_capacity(rlen),
};
for _ in 0..slen {p.RSM.push(HashMap::new());}
p
}
pub fn parse(&self, tokenizer:&mut dyn Lexer<AT>) -> AT
{
let mut result = AT::default();
let mut stack:Vec<Stackelement<AT>> = Vec::with_capacity(1024);
stack.push(Stackelement {si:0, value:AT::default()});
let unexpected = Stateaction::Error(String::from("unexpected end of input"));
let mut action = &unexpected; let mut stopparsing = false;
let mut lookahead = Lextoken{sym:"EOF".to_owned(),value:AT::default()};
if let Some(tok) = tokenizer.nextsym() {lookahead=tok;}
else {stopparsing=true;}
while !stopparsing
{
let currentstate = stack[stack.len()-1].si;
if TRACE>1 {print!(" current state={}, lookahead={}, ",¤tstate,&lookahead.sym);}
let actionopt = self.RSM[currentstate].get(lookahead.sym.as_str()); if TRACE>1 {println!("RSM action : {:?}",actionopt);}
if let None = actionopt {
panic!("!!PARSE ERROR: no action at state {}, lookahead {}, line {}",currentstate,&lookahead.sym,tokenizer.linenum());
}
action = actionopt.unwrap();
match action {
Stateaction::Shift(i) => { stack.push(Stackelement{si:*i,value:mem::replace(&mut lookahead.value,AT::default())});
if let Some(tok) = tokenizer.nextsym() {lookahead=tok;}
else {
lookahead=Lextoken{sym:"EOF".to_owned(), value:AT::default()};
}
}, Stateaction::Reduce(ri) => { let rulei = &self.Rules[*ri];
let val = (rulei.Ruleaction)(&mut stack); let newtop = stack[stack.len()-1].si;
let goton = self.RSM[newtop].get(rulei.lhs).unwrap();
if TRACE>1 {println!(" ..performing Reduce({}), new state {}, action on {}: {:?}..",ri,newtop,&rulei.lhs,goton);}
if let Stateaction::Gotonext(nsi) = goton {
stack.push(Stackelement{si:*nsi,value:val});
} else { stopparsing=true; }
},
Stateaction::Accept => {
result = stack.pop().unwrap().value;
stopparsing = true;
},
Stateaction::Error(msg) => {
stopparsing = true;
},
Stateaction::Gotonext(_) => { stopparsing = true;
},
} } if let Stateaction::Error(msg) = action {
panic!("!!!Parsing failed on line {}, next symbol {}: {}",tokenizer.linenum(),&lookahead.sym,msg);
}
return result;
}}
pub fn rustler(grammarname:&str, option:&str) {
let mut gram1 = Grammar::new();
let grammarfile = format!("{}.grammar",&grammarname);
let lalr = match option {
"lalr" | "LALR" => true,
"lr1" | "LR1" => false,
_ => {println!("Option {} not supported, defaulting to full LR1 generation",option); false},
};
if TRACE>1 {println!("parsing grammar from {}",grammarfile);}
gram1.parse_grammar(&grammarfile);
if TRACE>2 {println!("computing Nullable set");}
gram1.compute_NullableRf();
if TRACE>2 {println!("computing First sets");}
gram1.compute_FirstIM();
if gram1.name.len()<2 {gram1.name = grammarname.to_owned(); }
let gramname = gram1.name.clone();
let mut fsm0 = Statemachine::new(gram1);
fsm0.lalr = lalr;
if lalr {fsm0.Open = Vec::with_capacity(1024); }
println!("Generating {} state machine for grammar...",if lalr {"LALR"} else {"LR1"});
fsm0.generatefsm();
if TRACE>1 { for state in &fsm0.States {printstate(state,&fsm0.Gmr);} }
else if TRACE>0 { printstate(&fsm0.States[0],&fsm0.Gmr); } let parserfile = format!("{}parser.rs",&gramname);
let write_result =
if fsm0.Gmr.Externtype.len()>0 {
if fsm0.States.len()<=16 {fsm0.write_verbose(&parserfile)}
else if fsm0.States.len()<=65536 {fsm0.writeparser(&parserfile)}
else {panic!("too many states: {}",fsm0.States.len())}
}
else if fsm0.States.len()<=16 { fsm0.writefsm(&parserfile) }
else if fsm0.States.len()<=65536 { fsm0.writefsm_bin(&parserfile) }
else {panic!("too many states: {}",fsm0.States.len());};
println!("{} total states",fsm0.States.len());
if let Ok(_) = write_result {println!("written parser to {}",&parserfile);}
}