#![allow(dead_code)]
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_assignments)]
#![allow(unused_doc_comments)]
#![allow(unused_imports)]
use std::io::{self,Read,Write,BufReader,BufRead};
use std::collections::HashSet;
use std::fs::File;
use std::io::prelude::*;
use std::path::Path;
use crate::{Statemachine,Stateaction,checkboxexp};
use crate::{StandardReporter};
use crate::zc_parser::*;
use crate::lexer_interface::*;
use crate::generic_absyn::*;
use crate::Stateaction::*;
fn is_lba(t:&str) -> bool {
t.trim().starts_with("LBox") && t.contains("Any") && t.contains('<') && t.contains('>')
}
fn remove_lt(s:&str, lt:&str) -> String
{
let mut ax = String::from(s);
if lt.len()==0 {return ax;}
let mut ltform = format!("{} ",lt);
let mut ln = ltform.len();
while let Some(p) = ax.find(<form) {ax.replace_range(p..(p+ln),"");}
ltform = format!("<{}>",lt); ln = ltform.len();
while let Some(p) = ax.find(<form) {ax.replace_range(p..(p+ln),"");}
ln = lt.len();
while let Some(p) = ax.find(lt) {ax.replace_range(p..(p+ln),"");}
while let Some(p) = ax.find("<") {ax.replace_range(p..(p+1),"_");}
while let Some(p) = ax.find(">") {ax.replace_range(p..(p+1),"_");}
ax
}
impl Statemachine
{
pub fn writeenumparser(&self, filename:&str)->Result<(),std::io::Error>
{
let ref absyn = self.Gmr.Absyntype;
if self.Gmr.sametype || is_lba(absyn){
return self.writelbaparser(filename);
}
let ref extype = self.Gmr.Externtype;
let ref lifetime = self.Gmr.lifetime;
let has_lt = lifetime.len()>0 ;
let ltopt = if has_lt {format!("<{}>",lifetime)} else {String::from("")};
let lbc = if self.Gmr.bumpast {"lc"} else {"lbx"};
let rlen = self.Gmr.Rules.len();
let mut actions:Vec<String> = Vec::with_capacity(rlen);
for ri in 0..rlen
{
let lhs = &self.Gmr.Rules[ri].lhs.sym;
let lhsi = self.Gmr.Rules[ri].lhs.index;
let rettype = &self.Gmr.Symbols[lhsi].rusttype; let ltoptr = if has_lt || (lifetime.len()>0 && rettype.contains(lifetime))
{format!("<{}>",lifetime)} else {String::from("")};
let mut fndef = format!("\nfn _semaction_rule_{}_{}(parser:&mut ZCParser<RetTypeEnum{},{}>) -> {} {{\n",ri,<optr,<opt,extype,rettype);
let mut k = self.Gmr.Rules[ri].rhs.len(); let mut labels = String::from("(");
let mut patterns = String::from("(");
while k>0 {
let mut boxedlabel = false; let gsym = &self.Gmr.Rules[ri].rhs[k-1]; let findat = gsym.label.find('@');
let mut plab = format!("_item{}_",k-1);
match &findat {
None if gsym.label.len()>0 => {
let truelabel = checkboxexp(&gsym.label,&plab);
boxedlabel = gsym.label.starts_with('[') && (truelabel != &gsym.label);
plab = String::from(truelabel);
},
Some(ati) if *ati>0 => {
let rawlabel = gsym.label[0..*ati].trim();
let truelabel = checkboxexp(rawlabel,&plab);
boxedlabel = gsym.label.starts_with('[') && (truelabel != rawlabel);
plab = String::from(truelabel);
},
_ => {},
} let poppedlab = plab.as_str();
let symtype=&self.Gmr.Symbols[gsym.index].rusttype;
let emsg = format!("FATAL ERROR: '{}' IS NOT A TYPE IN THIS GRAMMAR. DID YOU INTEND TO USE THE -auto OPTION TO GENERATE TYPES?",&symtype);
let eindex = self.Gmr.enumhash.get(symtype).expect(&emsg);
let stat;
if self.Gmr.bumpast && boxedlabel { stat = format!("let mut _{0}_ = if let RetTypeEnum::Enumvariant_{1}(_x_{1})=parser.popstack().value {{ _x_{1} }} else {{<{2}>::default()}}; let mut {0} = parser.exstate.make(parser.lc({3},_{0}_)); ",poppedlab,&eindex,symtype,k-1);
} else {
if self.Gmr.Rules[ri].autogenerated || !boxedlabel { stat = format!("let mut {0} = if let RetTypeEnum::Enumvariant_{1}(_x_{1})=parser.popstack().value {{ _x_{1} }} else {{<{2}>::default()}}; ",poppedlab,&eindex,symtype);
}
else { stat = format!("let mut _{0}_ = if let RetTypeEnum::Enumvariant_{1}(_x_{1})=parser.popstack().value {{ _x_{1} }} else {{<{2}>::default()}}; let mut {0} = parser.lbx({3},_{0}_); ",poppedlab,&eindex,symtype,k-1);
}
}
fndef.push_str(&stat);
if gsym.label.len()>1 && findat.is_some() { let atindex = findat.unwrap();
if atindex>0 { labels.push_str("&mut "); if boxedlabel {labels.push('*');} labels.push_str(poppedlab); labels.push(',');
}
else { labels.push_str(poppedlab); labels.push(',');
}
patterns.push_str(&gsym.label[atindex+1..]); patterns.push(',');
}
k -= 1;
} let defaultaction = format!("<{}>::default()}}",rettype);
let mut semaction = &self.Gmr.Rules[ri].action; if semaction.len()<=1 {semaction = &defaultaction;}
if labels.len()<2 {
fndef.push_str(semaction.trim_end()); fndef.push_str("\n");
} else { labels.push(')'); patterns.push(')');
let pat2= format!("\n if let {}={} {{ {} else {{parser.report(\"{}\"); <{}>::default()}} }}\n",&patterns,&labels,semaction.trim_end(),&patterns,rettype);
fndef.push_str(&pat2);
} actions.push(fndef);
}
let mut fd = File::create(filename)?;
write!(fd,"//Parser generated by rustlr for grammar {}",&self.Gmr.name)?;
write!(fd,"\n
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(unreachable_patterns)]
#![allow(irrefutable_let_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{{Tokenizer,TerminalToken,ZCParser,ZCRProduction,Stateaction,decode_action}};\n")?;
if self.Gmr.genlex {
write!(fd,"use rustlr::{{StrTokenizer,RawToken,LexSource}};
use std::collections::{{HashMap,HashSet}};\n")?;
}
write!(fd,"{}\n",&self.Gmr.Extras)?;
write!(fd,"static SYMBOLS:[&'static str;{}] = [",self.Gmr.Symbols.len())?;
for i in 0..self.Gmr.Symbols.len()-1
{
write!(fd,"\"{}\",",&self.Gmr.Symbols[i].sym)?;
}
write!(fd,"\"{}\"];\n\n",&self.Gmr.Symbols[self.Gmr.Symbols.len()-1].sym)?;
let mut totalsize = 0;
for i in 0..self.FSM.len() { totalsize+=self.FSM[i].len(); }
if self.Gmr.tracelev>1 {println!("{} total state table entries",totalsize);}
write!(fd,"static TABLE:[u64;{}] = [",totalsize)?;
let mut encode:u64 = 0;
for i in 0..self.FSM.len() {
let row = &self.FSM[i];
for key in row.keys()
{ let k = *key; encode = ((i as u64) << 48) + ((k as u64) << 32);
match row.get(key) {
Some(Shift(statei)) => { encode += (*statei as u64) << 16; },
Some(Gotonext(statei)) => { encode += ((*statei as u64) << 16)+1; },
Some(Reduce(rulei)) => { encode += ((*rulei as u64) << 16)+2; },
Some(Accept) => {encode += 3; },
_ => {encode += 4; }, } write!(fd,"{},",encode)?;
} } write!(fd,"];\n\n")?;
for deffn in &actions { write!(fd,"{}",deffn)?; }
write!(fd,"\npub fn make_parser{}() -> ZCParser<RetTypeEnum{},{}>",<opt,<opt,extype)?;
write!(fd,"\n{{\n")?;
write!(fd," let mut parser1:ZCParser<RetTypeEnum{},{}> = ZCParser::new({},{});\n",<opt,extype,self.Gmr.Rules.len(),self.FSM.len())?;
write!(fd," let mut rule = ZCRProduction::<RetTypeEnum{},{}>::new_skeleton(\"{}\");\n",<opt,extype,"start")?; for i in 0..self.Gmr.Rules.len()
{
write!(fd," rule = ZCRProduction::<RetTypeEnum{},{}>::new_skeleton(\"{}\");\n",<opt,extype,self.Gmr.Rules[i].lhs.sym)?;
write!(fd," rule.Ruleaction = |parser|{{ ")?;
let lhsi = self.Gmr.Symhash.get(&self.Gmr.Rules[i].lhs.sym).expect("GRAMMAR REPRESENTATION CORRUPTED");
let fnname = format!("_semaction_rule_{}_",i);
let typei = &self.Gmr.Symbols[*lhsi].rusttype;
let enumindex = self.Gmr.enumhash.get(typei).expect("FATAL ERROR: TYPE {typei} NOT USED IN GRAMMAR");
write!(fd," RetTypeEnum::Enumvariant_{}({}(parser)) }};\n",enumindex,&fnname)?;
write!(fd," parser1.Rules.push(rule);\n")?;
}
write!(fd," parser1.Errsym = \"{}\";\n",&self.Gmr.Errsym)?;
for s in &self.Gmr.Resynch {write!(fd," parser1.resynch.insert(\"{}\");\n",s)?;}
write!(fd,"\n for i in 0..{} {{\n",totalsize)?;
write!(fd," let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;\n")?;
write!(fd," let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;\n")?;
write!(fd," parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));\n }}\n\n")?;
write!(fd," for s in SYMBOLS {{ parser1.Symset.insert(s); }}\n\n")?;
write!(fd," load_extras(&mut parser1);\n")?;
write!(fd," return parser1;\n")?;
write!(fd,"}} //make_parser\n\n")?;
let lexerlt = if has_lt {<opt} else {"<'t>"};
let lexername = format!("{}lexer{}",&self.Gmr.name,lexerlt);
let abindex = *self.Gmr.enumhash.get(absyn).unwrap();
write!(fd,"pub fn parse_with{}(parser:&mut ZCParser<RetTypeEnum{},{}>, lexer:&mut {}) -> Result<{},{}>\n{{\n",lexerlt,lexerlt,extype,&lexername,absyn,absyn)?;
if self.Gmr.bumpast {
write!(fd," if lexer.bump.is_some() {{parser.exstate.set(lexer.bump.unwrap());}}\n")?;
}
write!(fd," lexer.shared_state = Rc::clone(&parser.shared_state);\n")?;
write!(fd," if let RetTypeEnum::Enumvariant_{}(_xres_) = parser.parse(lexer) {{\n",abindex)?;
write!(fd," if !parser.error_occurred() {{Ok(_xres_)}} else {{Err(_xres_)}}\n }} ")?;
write!(fd,"else {{ Err(<{}>::default())}}\n}}//parse_with public function\n",absyn)?;
write!(fd,"\npub fn parse_train_with{}(parser:&mut ZCParser<RetTypeEnum{},{}>, lexer:&mut {}, parserpath:&str) -> Result<{},{}>\n{{\n",lexerlt,<opt,extype,&lexername,absyn,absyn)?;
if self.Gmr.bumpast {
write!(fd," if lexer.bump.is_some() {{parser.exstate.set(lexer.bump.unwrap());}}\n")?;
} write!(fd," lexer.shared_state = Rc::clone(&parser.shared_state);\n")?;
write!(fd," if let RetTypeEnum::Enumvariant_{}(_xres_) = parser.parse_train(lexer,parserpath) {{\n",abindex)?;
write!(fd," if !parser.error_occurred() {{Ok(_xres_)}} else {{Err(_xres_)}}\n }} ")?;
write!(fd,"else {{ Err(<{}>::default())}}\n}}//parse_train_with public function\n",absyn)?;
self.Gmr.gen_enum(&mut fd)?;
if self.Gmr.genlex { self.Gmr.genlexer(&mut fd,"from_raw")?; }
write!(fd,"fn load_extras{}(parser:&mut ZCParser<RetTypeEnum{},{}>)\n{{\n",<opt,<opt,extype)?;
write!(fd,"}}//end of load_extras: don't change this line as it affects augmentation\n")?;
Ok(())
}
}
impl Statemachine
{
pub fn writebaseenumparser(&self, filename:&str)->Result<(),std::io::Error>
{
let ref absyn = self.Gmr.Absyntype;
let ref extype = self.Gmr.Externtype;
let ref lifetime = self.Gmr.lifetime;
let has_lt = lifetime.len()>0 ;
let ltopt = if has_lt {format!("<{}>",lifetime)} else {String::from("")};
let lbc = if self.Gmr.bumpast {"lc"} else {"lbx"};
let lexerlt = if has_lt {<opt} else {"<'t>"};
let lexerlife = if has_lt {lifetime} else {"'t"};
let lexername = format!("{}lexer{}",&self.Gmr.name,lexerlt);
let abindex = *self.Gmr.enumhash.get(absyn).unwrap();
let rlen = self.Gmr.Rules.len();
let mut actions:Vec<String> = Vec::with_capacity(rlen);
for ri in 0..rlen
{
let lhs = &self.Gmr.Rules[ri].lhs.sym;
let lhsi = self.Gmr.Rules[ri].lhs.index;
let rettype = &self.Gmr.Symbols[lhsi].rusttype; let ltoptr = if has_lt || (lifetime.len()>0 && rettype.contains(lifetime))
{format!("<{}>",lifetime)} else {String::from("")};
let mut fndef = format!("\nfn _semaction_rule_{}_<{},TT:Tokenizer<{},RetTypeEnum{}>>(parser:&mut BaseParser<{},RetTypeEnum{},{},TT>) -> {} {{\n",ri,lexerlife,lexerlife, <opt,lexerlife,<opt, extype,rettype);
let mut k = self.Gmr.Rules[ri].rhs.len(); let mut labels = String::from("(");
let mut patterns = String::from("(");
while k>0 {
let mut boxedlabel = false; let gsym = &self.Gmr.Rules[ri].rhs[k-1]; let findat = gsym.label.find('@');
let mut plab = format!("_item{}_",k-1);
match &findat {
None if gsym.label.len()>0 => {
let truelabel = checkboxexp(&gsym.label,&plab);
boxedlabel = gsym.label.starts_with('[') && (truelabel != &gsym.label);
plab = String::from(truelabel);
},
Some(ati) if *ati>0 => {
let rawlabel = gsym.label[0..*ati].trim();
let truelabel = checkboxexp(rawlabel,&plab);
boxedlabel = gsym.label.starts_with('[') && (truelabel != rawlabel);
plab = String::from(truelabel);
},
_ => {},
} let poppedlab = plab.as_str();
let symtype=&self.Gmr.Symbols[gsym.index].rusttype;
let emsg = format!("FATAL ERROR: '{}' IS NOT A TYPE IN THIS GRAMMAR. DID YOU INTEND TO USE THE -auto OPTION TO GENERATE TYPES?",&symtype);
let eindex = self.Gmr.enumhash.get(symtype).expect(&emsg);
let stat;
if self.Gmr.bumpast && boxedlabel { stat = format!("let mut _{0}_ = if let RetTypeEnum::Enumvariant_{1}(_x_{1})=parser.popstack().value {{ _x_{1} }} else {{<{2}>::default()}}; let mut {0} = parser.exstate.make(parser.lc({3},_{0}_)); ",poppedlab,&eindex,symtype,k-1);
} else {
if self.Gmr.Rules[ri].autogenerated || !boxedlabel { stat = format!("let mut {0} = if let RetTypeEnum::Enumvariant_{1}(_x_{1})=parser.popstack().value {{ _x_{1} }} else {{<{2}>::default()}}; ",poppedlab,&eindex,symtype);
}
else { stat = format!("let mut _{0}_ = if let RetTypeEnum::Enumvariant_{1}(_x_{1})=parser.popstack().value {{ _x_{1} }} else {{<{2}>::default()}}; let mut {0} = parser.lbx({3},_{0}_); ",poppedlab,&eindex,symtype,k-1);
}
}
fndef.push_str(&stat);
if gsym.label.len()>1 && findat.is_some() { let atindex = findat.unwrap();
if atindex>0 { labels.push_str("&mut "); if boxedlabel {labels.push('*');} labels.push_str(poppedlab); labels.push(',');
}
else { labels.push_str(poppedlab); labels.push(',');
}
patterns.push_str(&gsym.label[atindex+1..]); patterns.push(',');
}
k -= 1;
} let defaultaction = format!("<{}>::default()}}",rettype);
let mut semaction = &self.Gmr.Rules[ri].action; if semaction.len()<=1 {semaction = &defaultaction;}
if labels.len()<2 {
fndef.push_str(semaction.trim_end()); fndef.push_str("\n");
} else { labels.push(')'); patterns.push(')');
let pat2= format!("\n if let {}={} {{ {} else {{parser.report(\"{}\"); <{}>::default()}} }}\n",&patterns,&labels,semaction.trim_end(),&patterns,rettype);
fndef.push_str(&pat2);
} actions.push(fndef);
}
let mut fd = File::create(filename)?;
write!(fd,"//Parser generated by rustlr for grammar {}",&self.Gmr.name)?;
write!(fd,"\n
#![allow(unused_variables)]
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
#![allow(unused_parens)]
#![allow(unused_mut)]
#![allow(unused_imports)]
#![allow(unused_assignments)]
#![allow(dead_code)]
#![allow(unreachable_patterns)]
#![allow(irrefutable_let_patterns)]
use std::rc::Rc;
use std::cell::RefCell;
extern crate rustlr;
use rustlr::{{Tokenizer,TerminalToken,BaseParser,BaseProduction,Stateaction,decode_action}};\n")?;
if self.Gmr.genlex {
write!(fd,"use rustlr::{{StrTokenizer,RawToken,LexSource}};
use std::collections::{{HashMap,HashSet}};\n")?;
}
write!(fd,"{}\n",&self.Gmr.Extras)?;
write!(fd,"static SYMBOLS:[&'static str;{}] = [",self.Gmr.Symbols.len())?;
for i in 0..self.Gmr.Symbols.len()-1
{
write!(fd,"\"{}\",",&self.Gmr.Symbols[i].sym)?;
}
write!(fd,"\"{}\"];\n\n",&self.Gmr.Symbols[self.Gmr.Symbols.len()-1].sym)?;
let mut totalsize = 0;
for i in 0..self.FSM.len() { totalsize+=self.FSM[i].len(); }
if self.Gmr.tracelev>1 {println!("{} total state table entries",totalsize);}
let mut tfdopt = None;
if self.Gmr.tablefile.len()>0 {
write!(fd,"use std::fs::File;\n")?;
write!(fd,"use std::io::prelude::*;\n")?;
write!(fd,"use std::path::Path;\n")?;
write!(fd,"use std::io::Read;\n")?;
let mut tfd1 = File::create(&self.Gmr.tablefile)?;
tfdopt = Some(tfd1);
}
else { write!(fd,"static TABLE:[u64;{}] = [",totalsize)?;
}
let mut encode:u64 = 0;
for i in 0..self.FSM.len() {
let row = &self.FSM[i];
for key in row.keys()
{ let k = *key; encode = ((i as u64) << 48) + ((k as u64) << 32);
match row.get(key) {
Some(Shift(statei)) => { encode += (*statei as u64) << 16; },
Some(Gotonext(statei)) => { encode += ((*statei as u64) << 16)+1; },
Some(Reduce(rulei)) => { encode += ((*rulei as u64) << 16)+2; },
Some(Accept) => {encode += 3; },
_ => {encode += 4; }, } tfdopt.as_mut().map_or_else(||{write!(fd,"{},",encode)},
|tfd|{tfd.write_all(&encode.to_be_bytes())})?;
} } if self.Gmr.tablefile.len()==0 { write!(fd,"];\n\n")?; }
for deffn in &actions { write!(fd,"{}",deffn)?; }
write!(fd,"\npub fn make_parser<{},TT:Tokenizer<{},RetTypeEnum{}>>(tk:TT) -> BaseParser<{},RetTypeEnum{},{},TT>",lexerlife,lexerlife,<opt,lexerlife,<opt,extype)?;
write!(fd,"\n{{\n")?;
write!(fd," let mut parser1:BaseParser<{},RetTypeEnum{},{},TT> = BaseParser::new({},{},tk);\n",lexerlife,<opt,extype,self.Gmr.Rules.len(),self.FSM.len())?;
write!(fd," let mut rule = BaseProduction::<{},RetTypeEnum{},{},TT>::new_skeleton(\"{}\");\n",lexerlife,<opt,extype,"start")?; for i in 0..self.Gmr.Rules.len()
{
write!(fd," rule = BaseProduction::<{},RetTypeEnum{},{},TT>::new_skeleton(\"{}\");\n",lexerlife,<opt,extype,self.Gmr.Rules[i].lhs.sym)?;
write!(fd," rule.Ruleaction = |parser|{{ ")?;
let lhsi = self.Gmr.Symhash.get(&self.Gmr.Rules[i].lhs.sym).expect("GRAMMAR REPRESENTATION CORRUPTED");
let fnname = format!("_semaction_rule_{}_",i);
let typei = &self.Gmr.Symbols[*lhsi].rusttype;
let enumindex = self.Gmr.enumhash.get(typei).expect("FATAL ERROR: TYPE {typei} NOT USED IN GRAMMAR");
write!(fd," RetTypeEnum::Enumvariant_{}({}(parser)) }};\n",enumindex,&fnname)?;
write!(fd," parser1.Rules.push(rule);\n")?;
}
write!(fd," parser1.Errsym = \"{}\";\n",&self.Gmr.Errsym)?;
for s in &self.Gmr.Resynch {write!(fd," parser1.resynch.insert(\"{}\");\n",s)?;}
if self.Gmr.tablefile.len()==0 {
write!(fd,"\n for i in 0..{} {{\n",totalsize)?;
write!(fd," let symi = ((TABLE[i] & 0x0000ffff00000000) >> 32) as usize;\n")?;
write!(fd," let sti = ((TABLE[i] & 0xffff000000000000) >> 48) as usize;\n")?;
write!(fd," parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(TABLE[i]));\n }}\n\n")?;
} else { let mut fsmfile = &self.Gmr.tablefile[..];
if let Some(pos) = self.Gmr.tablefile.rfind('/')
.or(self.Gmr.tablefile.rfind('\\')) {
fsmfile = &self.Gmr.tablefile[pos+1..];
}
write!(fd,"let mut tfd = File::open(r\"./src/{}\").or(File::open(r\".\\src\\{}\")).or(File::open(\"{}\")).expect(r\"Parse Table File {} Not Found\");\n",fsmfile, fsmfile, fsmfile, fsmfile)?;
write!(fd,"\n let mut tbuf = [0u8;8];")?;
write!(fd,"\n for i in 0..{} {{\n",totalsize)?;
write!(fd," tfd.read_exact(&mut tbuf).expect(\"File Read Failed\");\n")?;
write!(fd," let tabi = u64::from_be_bytes(tbuf);\n")?;
write!(fd," let symi = ((tabi & 0x0000ffff00000000) >> 32) as usize;\n")?;
write!(fd," let sti = ((tabi & 0xffff000000000000) >> 48) as usize;\n")?;
write!(fd," parser1.RSM[sti].insert(SYMBOLS[symi],decode_action(tabi));\n }}\n\n")?;
}
write!(fd," for s in SYMBOLS {{ parser1.Symset.insert(s); }}\n\n")?;
write!(fd," load_extras(&mut parser1);\n")?;
write!(fd," return parser1;\n")?;
write!(fd,"}} //make_parser\n\n")?;
write!(fd,"pub fn parse_with{}(parser:&mut BaseParser<{},RetTypeEnum{},{},{}>) -> Result<{},{}>\n{{\n",lexerlt,lexerlife,<opt,extype,&lexername,absyn,absyn)?;
if self.Gmr.bumpast {
write!(fd," if parser.tokenizer.bump.is_some() {{let bb = parser.tokenizer.bump.unwrap(); parser.exstate.set(bb);}}\n")?;
}
write!(fd," parser.tokenizer.shared_state = Rc::clone(&parser.shared_state);\n")?;
write!(fd," if let RetTypeEnum::Enumvariant_{}(_xres_) = parser.parse() {{\n",abindex)?;
write!(fd," if !parser.error_occurred() {{Ok(_xres_)}} else {{Err(_xres_)}}\n }} ")?;
write!(fd,"else {{ Err(<{}>::default())}}\n}}//parse_with public function\n",absyn)?;
write!(fd,"\npub fn parse_train_with{}(parser:&mut BaseParser<{},RetTypeEnum{},{},{}>, parserpath:&str) -> Result<{},{}>\n{{\n",lexerlt,lexerlife,<opt,extype,&lexername,absyn,absyn)?;
if self.Gmr.bumpast {
write!(fd," if parser.tokenizer.bump.is_some() {{let bb = parser.tokenizer.bump.unwrap(); parser.exstate.set(bb);}}\n")?;
} write!(fd," parser.tokenizer.shared_state = Rc::clone(&parser.shared_state);\n")?;
write!(fd," if let RetTypeEnum::Enumvariant_{}(_xres_) = parser.parse_train(parserpath) {{\n",abindex)?;
write!(fd," if !parser.error_occurred() {{Ok(_xres_)}} else {{Err(_xres_)}}\n }} ")?;
write!(fd,"else {{ Err(<{}>::default())}}\n}}//parse_train_with public function\n",absyn)?;
self.Gmr.gen_enum(&mut fd)?;
if self.Gmr.genlex { self.Gmr.genlexer(&mut fd,"from_raw")?; }
write!(fd,"fn load_extras<{},TT:Tokenizer<{},RetTypeEnum{}>>(parser:&mut BaseParser<{},RetTypeEnum{},{},TT>)\n{{\n",lexerlife,lexerlife,<opt,lexerlife,<opt,extype)?;
write!(fd,"}}//end of load_extras: don't change this line as it affects augmentation\n")?;
Ok(())
}
}