Skip to main content

lemon_mint/
lib.rs

1//! # lemon-mint
2//! Famous Lemon Parser Generator implemented in rust as library with API.
3//!
4//! ## Example
5//!
6//! ```
7//! extern crate lemon_mint;
8//!
9//! use std::fs::File;
10//! use std::sync::Arc;
11//! use lemon_mint::LemonMintBuilder;
12//!
13//! fn main()
14//! {	let builder = LemonMintBuilder::new().load_y
15//! 	(	&Arc::new("source.y".to_string()), // fake source name, that will appear in error messages
16//! 		"	%trace {>> }
17//! 			%extra_argument {()}
18//! 			%left PLUS MINUS.
19//! 			%left TIMES DIVIDE.
20//! 			%token_type {f64}
21//! 			%type Expr {super::Expr}
22//! 			%type Exprs {Vec<super::Expr>}
23//! 			%type Program {Vec<super::Expr>}
24//!
25//! 			Program ::= Exprs(exprs). exprs
26//! 			Program ::= Exprs(exprs) NEW_LINE. exprs
27//!
28//! 			Exprs ::= Expr(item).                       vec![item]
29//! 			Exprs ::= Exprs(items) NEW_LINE Expr(item). let mut items = items; items.push(item); items
30//!
31//! 			Expr ::= NUM(value). super::Expr {value}
32//! 			Expr ::= PAR_OPEN Expr(a) PAR_CLOSE. a
33//! 			Expr ::= PLUS Expr(a). a
34//! 			Expr ::= MINUS Expr(a). let mut a = a; a.value = -a.value; a
35//! 			Expr ::= Expr(a) PLUS Expr(b). super::Expr{value: a.value + b.value}
36//! 			Expr ::= Expr(a) MINUS Expr(b). super::Expr{value: a.value - b.value}
37//! 			Expr ::= Expr(a) TIMES Expr(b). super::Expr{value: a.value * b.value}
38//! 			Expr ::= Expr(a) DIVIDE Expr(b). super::Expr{value: a.value / b.value}
39//!
40//! 			%code {
41//! 				use code::{Parser, Token};
42//!
43//!                 #[derive(Debug, PartialEq)]
44//! 				pub struct Expr {value: f64}
45//!
46//! 				fn main()
47//! 				{	let mut parser = Parser::new(()); // () is our extra argument, that will be accessible in actions, and also through parser.extra
48//!
49//! 					parser.add_token(Token::NUM, 15.0).unwrap();
50//! 					parser.add_token(Token::DIVIDE, 0.0).unwrap();
51//! 					parser.add_token(Token::NUM, 5.0).unwrap();
52//! 					parser.add_token(Token::NEW_LINE, 0.0).unwrap();
53//!
54//! 					let result = parser.end().unwrap(); // if Program
55//!                     assert_eq!(result, vec![Expr {value: 3.0}]);
56//!                     println!(\"Result: {:?}\", result);
57//! 				}
58//! 			}
59//! 		".as_bytes()
60//! 	).unwrap();
61//!
62//! 	let lemon = builder.try_into_lemon().unwrap();
63//! 	let mut out_rust = File::create("/tmp/main.rs").unwrap();
64//! 	let mut out_y = File::create("/tmp/main.y").unwrap();
65//! 	lemon.gen_rust(&mut out_rust).unwrap();
66//! 	lemon.gen_log(&mut out_y, false, false).unwrap();
67//! }
68//! ```
69//!
70//! The first step is to create `LemonMintBuilder` object, and use it's methods to describe the desired parser. It's possible to load "y"-grammar from a file with `load_y_file()`, or from a string with `load_y()`, as shown in the example above, or you can set each parser rule with individual methods, like `set_start_symbol()`, `set_token_type()`, `add_type()`, `add_rule()`, and others.
71//!
72//! Then the builder object can be converted to `LemonMint` object that represents the parser transition tables. This step is done with the `try_into_lemon()` method, as shown above.
73//!
74//! And the last step is to generate a rust file with parser tables and it's driver code. This is done with `gen_rust()` method. Optionally you can generate log file with `gen_log()`, as classic Lemon program does.
75//!
76//! ## Y-Grammar
77//!
78//! Lemon-mint uses y-grammar similar to what [classic Lemon parser](https://www.hwaci.com/sw/lemon/) uses. There are a few distinctions:
79//!
80//! * If a symbol name contains at least one lowercase letter, it's considered nonterminal. In classic Lemon only the first letter matters.
81//! * To enable trace, use `%trace` directive. In Lemon need to call ParseTrace() function.
82//! * No need for `%name` directive, because in rust each file is separate module.
83//! * No need for destructors.
84//!
85//! The generated parser will be separated to 2 submodules: `code` and `rules`. There are only 2 interesting things in `code` module: `Parser` and `Token`. See in above example how they're used. The `rules` module wraps actions, and you don't need to use it directly. If your actions use types, constants or functions global space, they can be accessed like `super::Expr`, or `crate::Expr`, or so.
86//!
87//! The following directives are supported:
88//!
89//! * %token_type
90//! * %type
91//! * %default_type
92//! * %start_symbol
93//! * %trace
94//! * %extra_argument - only it's type. The name is always `extra`. Default type is `()`.
95//! * %left
96//! * %right
97//! * %nonassoc
98//! * %fallback
99//! * %code or %include - are the same
100//!
101//! The syntax is free and permissive:
102//!
103//! ```ignore
104//! %start_symbol {Unit}
105//! /* or */
106//! %start_symbol Unit.
107//! /* or */
108//! %start_symbol Unit
109//! ```
110//!
111//! Braces allow to specify multiline value, till matching closing brace. Supported line and multiline C-style comments.
112
113
114mod lempar;
115
116use lempar::LemparReader;
117
118use std::mem;
119use std::cmp;
120use std::cmp::Ordering;
121use std::collections::HashMap;
122use std::cell::RefCell;
123use std::io::{self, BufRead, BufReader};
124use std::fmt;
125use std::hash::{Hash, Hasher};
126use std::collections::hash_map::DefaultHasher;
127use std::error::Error;
128use std::iter::Take;
129use std::slice::Iter;
130use std::convert::TryFrom;
131use std::rc::Rc;
132use std::sync::Arc;
133use std::fs::File;
134use std::borrow::Cow;
135
136fn typename_to_string(value: String) -> String
137{	let value_trimmed = value.trim();
138	if value_trimmed.is_empty() || value.starts_with('(') && value.ends_with(')') && value[1 .. value.len()-1].trim().is_empty()
139	{	String::new()
140	}
141	else
142	{	if value.len() == value_trimmed.len() {value} else {value_trimmed.to_string()}
143	}
144}
145
146fn is_terminal_name(s: &str) -> bool
147{	s.find(|c: char| c.is_ascii_lowercase()).is_none()
148}
149
150#[derive(Debug)]
151pub struct LemonMintError
152{	pub filename: Arc<String>,
153	pub n_line: usize,
154	pub message: String,
155}
156impl LemonMintError
157{	pub fn new(filename: &Arc<String>, n_line: usize, message: String) -> Self
158	{	Self {filename: Arc::clone(filename), n_line, message}
159	}
160}
161impl fmt::Display for LemonMintError
162{	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
163	{	let message: &str = if self.message.is_empty() {"Unspecified error"} else {&self.message};
164		if self.filename.is_empty()
165		{	write!(f, "{}", message)
166		}
167		else
168		{	write!(f, "{}({}): {}", self.filename, self.n_line, message)
169		}
170	}
171}
172impl Error for LemonMintError {}
173
174type ParserResult<T> = Result<T, LemonMintError>;
175
176#[derive(Copy, Clone, Eq, PartialEq, Debug)]
177enum Directive
178{	Rule,
179	TokenType,
180	Type,
181	DefaultType,
182	StartSymbol,
183	Trace,
184	ExtraArgument,
185	Left,
186	Right,
187	Nonassoc,
188	Fallback,
189	Code,
190}
191
192#[derive(Copy, Clone, Eq, PartialEq, Debug)]
193enum SymbolType
194{	TERMINAL,
195	NONTERMINAL
196}
197
198#[derive(Copy, Clone, Eq, PartialEq, Debug)]
199enum Associativity
200{	LEFT,
201	RIGHT,
202	NONASSOC,
203	UNKNOWN
204}
205
206#[derive(Debug)]
207struct Set
208{	data: Vec<bool>,
209}
210impl Set
211{	pub fn new(size: usize) -> Self
212	{	let mut this = Self {data: Vec::new()};
213		if size > 0
214		{	this.set_size(size);
215		}
216		this
217	}
218
219	pub fn len(&self) -> usize
220	{	self.data.len()
221	}
222
223	pub fn set_size(&mut self, size: usize)
224	{	self.data.clear();
225		self.data.resize(size, false);
226	}
227
228	pub fn contains(&self, element: usize) -> bool
229	{	*self.data.get(element).unwrap_or(&false)
230	}
231
232	pub fn add(&mut self, element: usize) -> bool
233	{	let rv = self.contains(element);
234		if let Some(v) = self.data.get_mut(element)
235		{	*v = true;
236		}
237		!rv
238	}
239
240	pub fn intersect(&mut self, other: &Set) -> bool
241	{	let mut changed = false;
242		for (i, v) in &mut self.data.iter_mut().enumerate()
243		{	if !*v && other.contains(i)
244			{	changed = true;
245				*v = true;
246			}
247		}
248		return changed;
249	}
250}
251
252/// Terminal or nonterminal symbol from the grammar.
253#[derive(Debug)]
254struct Symbol
255{	name: Arc<String>,             // Name of the symbol
256	index: usize,                  // Index number for this symbol
257	typ: SymbolType,               // Symbols are all either TERMINALS or NTs
258	sym_rules_arr: Vec<usize>,     // Array of rules of this (if an NT) (indices in rules array)
259	fallback_index: usize,         // fallback token in case this token doesn't parse
260	prec: i32,                     // Precedence if defined (-1 otherwise)
261	assoc: Associativity,          // Associativity if precedence is defined
262	firstset: Set,                 // First-set for all rules of this symbol
263	lambda: bool,                  // True if NT and can generate an empty string
264	data_type: String,             // The data type of information held by this object. Only used if type==NONTERMINAL
265	dtnum: usize,                  // The data type number. In the parser, the value stack is a union. The .Symbol%d element of this union is the correct data type for this object
266}
267impl Symbol
268{	fn new(name: &str, index: usize) -> Self
269	{	let typ = if is_terminal_name(name) || name=="$" {SymbolType::TERMINAL} else {SymbolType::NONTERMINAL};
270		Self
271		{	name: Arc::new(String::new()),
272			index,
273			typ,
274			sym_rules_arr: Vec::new(),
275			fallback_index: std::usize::MAX,
276			prec: -1,
277			assoc: Associativity::UNKNOWN,
278			firstset: Set::new(0),
279			lambda: false,
280			data_type: String::new(),
281			dtnum: 0,
282		}
283	}
284}
285impl PartialEq for Symbol
286{	fn eq(&self, other: &Self) -> bool
287	{	self.index == other.index
288	}
289}
290
291#[derive(Debug)]
292struct Rhs
293{	name: Arc<String>,
294	index: usize,
295	alias: String,
296}
297
298/// Each production rule in the grammar is stored in this structure.
299#[derive(Debug)]
300struct Rule
301{	rule_filename: Arc<String>,
302	rule_n_line: usize,            // Line number for the rule
303	lhs: Arc<String>,              // Left-hand side of the rule
304	lhs_index: usize,              // Left-hand side of the rule (index in symbols)
305	lhs_start: bool,               // True if left-hand side is the start symbol
306	rhs: Vec<Rhs>,                 // The RHS symbols
307	code: String,                  // The code executed when this rule is reduced
308	precsym_index: usize,          // Precedence symbol for this rule (index in symbols)
309	index: usize,                  // An index number for this rule
310	can_reduce: bool,              // True if this rule is ever reduced
311	does_reduce: bool,             // Reduce actions occur after optimization
312}
313impl Rule
314{	fn new(rule_filename: &Arc<String>, rule_n_line: usize, lhs: &Arc<String>, lhs_index: usize, index: usize, code: String) -> Self
315	{	Self
316		{	rule_filename: Arc::clone(rule_filename),
317			rule_n_line,
318			lhs: Arc::clone(lhs),
319			lhs_index,
320			lhs_start: false,
321			rhs: Vec::new(),
322			code,
323			precsym_index: std::usize::MAX,
324			index,
325			can_reduce: false,
326			does_reduce: false,
327		}
328	}
329}
330impl fmt::Display for Rule
331{	/// Write text on "out" that describes the rule
332	fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result
333	{	write!(f, "{} ::=", self.lhs)?;
334		for sp in &self.rhs
335		{	write!(f, " {}", sp.name)?;
336		}
337		Ok(())
338	}
339}
340
341#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
342struct ConfigKey
343{	n_rule: usize,
344	dot: usize,
345}
346impl ConfigKey
347{	fn new(config: &Config) -> Self
348	{	Self
349		{	n_rule: config.n_rule,
350			dot: config.dot,
351		}
352	}
353}
354impl cmp::Ord for ConfigKey
355{	fn cmp(&self, other: &Self) -> cmp::Ordering
356	{	let mut res = self.n_rule.cmp(&other.n_rule);
357		if res == Ordering::Equal
358		{	res = self.dot.cmp(&other.dot);
359		}
360		res
361	}
362}
363impl cmp::PartialOrd for ConfigKey
364{	fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering>
365	{	Some(self.cmp(other))
366	}
367}
368
369/// A configuration is a production rule of the grammar together with
370/// a mark (dot) showing how much of that rule has been processed so far.
371/// Configurations also contain a follow-set which is a list of terminal
372/// symbols which are allowed to immediately follow the end of the rule.
373/// Every configuration is recorded as an instance of this structure.
374#[derive(Debug)]
375struct Config
376{	n_rule: usize,                  // The rule upon which the configuration is based
377	dot: usize,                     // The parse point
378	fws: Set,                       // Follow-set for this configuration only
379	fplp: Vec<Rc<RefCell<Config>>>, // Follow-set forward propagation links
380	bplp: Vec<Rc<RefCell<Config>>>, // Follow-set backwards propagation links
381	n_state: usize,                 // Pointer to state which contains this
382	status_is_complete: bool,       // used during followset and shift computations
383}
384impl Config
385{	pub fn new(n_rule: usize, dot: usize, fws_size: usize) -> Self
386	{	Self
387		{	n_rule,
388			dot,
389			fws: Set::new(fws_size),
390			fplp: Vec::new(),
391			bplp: Vec::new(),
392			n_state: std::usize::MAX,
393			status_is_complete: false,
394		}
395	}
396}
397impl cmp::Ord for Config
398{	fn cmp(&self, other: &Self) -> cmp::Ordering
399	{	ConfigKey::new(self).cmp(&ConfigKey::new(other))
400	}
401}
402impl cmp::PartialOrd for Config
403{	fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering>
404	{	Some(self.cmp(other))
405	}
406}
407impl cmp::PartialEq for Config
408{	fn eq(&self, other: &Self) -> bool
409	{	ConfigKey::new(self).eq(&ConfigKey::new(other))
410	}
411}
412impl Eq for Config {}
413
414#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)]
415enum ActionType
416{	Shift,
417	Accept,
418	Reduce,
419	Error,
420	SsConflict,  // A shift/shift conflict
421	SrConflict,  // Was a reduce, but part of a conflict
422	RrConflict,  // Was a reduce, but part of a conflict
423	ShResolved, // Was a shift.  Precedence resolved conflict
424	RdResolved, // Was reduce.  Precedence resolved conflict
425	NotUsed,    // Deleted by compression
426	ShiftReduce // Shift first, then reduce
427}
428
429#[derive(Debug, PartialEq, Copy, Clone)]
430enum StateOrRule
431{	State(usize), // n_state: usize - The new state, if a shift
432	Rule(usize),  // The rule, if a reduce
433	EmptyRule     // Empty reduce rule for "accepting" token
434}
435
436/// Every shift or reduce operation is stored as one of this structure.
437#[derive(Debug)]
438struct Action
439{	id: i32,
440	symbol_index: usize,     // The look-ahead symbol
441	typ: ActionType,
442	x: StateOrRule,
443	symbol_index_opt: usize, // ActionType::ShiftReduce optimization to this symbol
444}
445impl Action
446{	fn new_state(id: i32, symbol_index: usize, n_state: usize) -> Action
447	{	Action
448		{	id,
449			symbol_index,
450			typ: ActionType::Shift,
451			x: StateOrRule::State(n_state),
452			symbol_index_opt: std::usize::MAX,
453		}
454	}
455
456	fn new_rule(id: i32, symbol_index: usize, typ: ActionType, n_rule: usize) -> Action
457	{	Action
458		{	id,
459			symbol_index,
460			typ,
461			x: StateOrRule::Rule(n_rule),
462			symbol_index_opt: std::usize::MAX,
463		}
464	}
465
466	fn new_empty_rule(id: i32, symbol_index: usize, typ: ActionType) -> Action
467	{	Action
468		{	id,
469			symbol_index,
470			typ,
471			x: StateOrRule::EmptyRule,
472			symbol_index_opt: std::usize::MAX,
473		}
474	}
475}
476impl cmp::Ord for Action
477{	fn cmp(&self, other: &Self) -> cmp::Ordering
478	{	let index = self.symbol_index;
479		let other_index = other.symbol_index;
480		let mut res = index.cmp(&other_index);
481		if res == Ordering::Equal
482		{	res = self.typ.cmp(&other.typ);
483			if res == Ordering::Equal
484			{	if self.typ==ActionType::Reduce || self.typ==ActionType::ShiftReduce
485				{	if let StateOrRule::Rule(n_rule) = self.x
486					{	if let StateOrRule::Rule(other_n_rule) = other.x
487						{	if n_rule < other_n_rule
488							{	return Ordering::Less;
489							}
490							if n_rule > other_n_rule
491							{	return Ordering::Greater;
492							}
493						}
494					}
495				}
496				res = self.id.cmp(&other.id);
497			}
498		}
499		res
500	}
501}
502impl cmp::PartialOrd for Action
503{	fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering>
504	{	Some(self.cmp(other))
505	}
506}
507impl cmp::PartialEq for Action
508{	fn eq(&self, other: &Self) -> bool
509	{	self.cmp(other) == Ordering::Equal
510	}
511}
512impl Eq for Action {}
513
514/// Each state of the generated parser's finite state machine is encoded as an instance of this structure.
515#[derive(Debug)]
516struct State
517{	basis: Vec<Rc<RefCell<Config>>>,          // The basis configurations for this state
518	configurations: Vec<Rc<RefCell<Config>>>, // All configurations in this set
519	n_state: usize,                    // Sequential number for this state
520	actions: Vec<Rc<RefCell<Action>>>, // Array of actions for this state
521	n_token_act: usize,
522	n_nt_act: usize,                   // Number of actions on terminals and nonterminals
523	token_offset: isize,
524	nt_offset: isize,                  // yy_action[] offset for terminals and nonterms
525	default_reduce_action: usize,      // Default action is to REDUCE by this rule
526	default_reduce_rule: usize,        // The default REDUCE rule
527	auto_reduce: bool,
528}
529impl State
530{	pub fn new(n_state: usize, basis: Vec<Rc<RefCell<Config>>>, configurations: Vec<Rc<RefCell<Config>>>) -> Self
531	{	Self
532		{	basis,
533			configurations,
534			n_state,
535			actions: Vec::new(),
536			n_token_act: 0,
537			n_nt_act: 0,
538			token_offset: 0,
539			nt_offset: 0,
540			default_reduce_action: 0,
541			default_reduce_rule: std::usize::MAX,
542			auto_reduce: false,
543		}
544	}
545}
546
547/// Each state contains a set of token transaction and a set of
548/// nonterminal transactions.  Each of these sets makes an instance
549/// of the following structure.  An array of these structures is used
550/// to order the creation of entries in the `yy_action[]` table.
551#[derive(Debug)]
552struct AxSet
553{	n_state: usize, // Index in sorted states array
554	is_tkn: bool,    // True to use tokens.  False for non-terminals
555	n_action: usize,   // Number of actions
556	i_order: usize,    // Original order of action sets
557}
558impl AxSet
559{	fn new(n_state: usize, is_tkn: bool, n_action: usize, i_order: usize) -> Self
560	{	Self
561		{	n_state,
562			is_tkn,
563			n_action,
564			i_order,
565		}
566	}
567}
568impl cmp::Ord for AxSet
569{	fn cmp(&self, other: &Self) -> cmp::Ordering
570	{	let mut res = other.n_action.cmp(&self.n_action); // descending order
571		if res == Ordering::Equal
572		{	res = self.i_order.cmp(&other.i_order);
573		}
574		res
575	}
576}
577impl cmp::PartialOrd for AxSet
578{	fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering>
579	{	Some(self.cmp(other))
580	}
581}
582impl cmp::PartialEq for AxSet
583{	fn eq(&self, other: &Self) -> bool
584	{	self.cmp(other) == Ordering::Equal
585	}
586}
587impl Eq for AxSet {}
588
589/// The state of the yy_action table under construction is an instance of
590/// the following structure.
591///
592/// The yy_action table maps the pair (state_number, lookahead) into an
593/// action_number.  The table is an array of integers pairs.  The state_number
594/// determines an initial offset into the yy_action array.  The lookahead
595/// value is then added to this initial offset to get an index X into the
596/// yy_action array. If the `aAction[X]`.lookahead equals the value of the
597/// of the lookahead input, then the value of the action_number output is
598/// `aAction[X].action`.  If the lookaheads do not match then the
599/// default action for the state_number is returned.
600///
601/// All actions associated with a single state_number are first entered
602/// into `aLookahead[]` using multiple calls to acttab_action().  Then the
603/// actions for that single state_number are placed into the `aAction[]`
604/// array with a single call to acttab_insert().  The acttab_insert() call
605/// also resets the `aLookahead[]` array in preparation for the next
606/// state number.
607#[derive(Copy, Clone, Eq, PartialEq, Debug)]
608struct LookaheadAction
609{	lookahead: usize, // Value of the lookahead token
610	action: usize,    // Action to take on the given lookahead
611}
612impl LookaheadAction
613{	fn new(lookahead: usize, action: usize) -> Self
614	{	Self
615		{	lookahead,
616			action,
617		}
618	}
619}
620
621#[derive(Debug)]
622struct ActTab
623{	n_action: usize,                   // Number of used slots in aAction[]
624	a_action: Vec<LookaheadAction>,    // The yy_action[] table under construction
625	a_lookahead: Vec<LookaheadAction>, // A single new transaction set
626	min_lookahead: usize,              // Minimum aLookahead[].lookahead
627	max_lookahead: usize,              // Maximum aLookahead[].lookahead
628	min_action: usize,                 // Action associated with mnLookahead
629
630	n_terminals: usize,                // Number of terminal symbols
631	n_symbols: usize,                  // Total number of symbols
632
633	min_token_offset: isize,
634	max_token_offset: isize,
635	min_nt_offset: isize,
636	max_nt_offset: isize,
637
638	n_fallbacks: usize,
639	n_shift_offset: usize,
640	reduce_count: usize,
641}
642impl ActTab
643{	fn new(n_terminals: usize, n_symbols: usize) -> Self
644	{	Self
645		{	n_action: 0,
646			a_action: Vec::new(),
647			a_lookahead: Vec::new(),
648			min_lookahead: 0,
649			max_lookahead: 0,
650			min_action: 0,
651
652			n_terminals,
653			n_symbols,
654
655			min_token_offset: 0,
656			max_token_offset: 0,
657			min_nt_offset: 0,
658			max_nt_offset: 0,
659
660			n_fallbacks: 0,
661			n_shift_offset: 0,
662			reduce_count: 0,
663		}
664	}
665
666	fn get_n_lookahead(&self) -> usize
667	{	self.n_action
668	}
669
670	fn get_n_actions(&self) -> usize
671	{	let mut n_action = self.n_action;
672		for rec in  self.a_action.iter().take(n_action).rev()
673		{	if rec.lookahead == std::usize::MAX
674			{	n_action -= 1;
675			}
676			else
677			{	break;
678			}
679		}
680		n_action
681	}
682
683	fn iter(&self) -> Take<Iter<LookaheadAction>>
684	{	self.a_action.iter().take(self.n_action)
685	}
686
687	/// Add a new action to the current transaction set.
688	/// This routine is called once for each lookahead for a particular state.
689	fn acttab_action(&mut self, lookahead: usize, action: usize)
690	{	if self.a_lookahead.len() == 0
691		{	self.max_lookahead = lookahead;
692			self.min_lookahead = lookahead;
693			self.min_action = action;
694		}
695		else
696		{	if self.max_lookahead < lookahead
697			{	self.max_lookahead = lookahead;
698			}
699			if self.min_lookahead > lookahead
700			{	self.min_lookahead = lookahead;
701				self.min_action = action;
702			}
703		}
704		self.a_lookahead.push(LookaheadAction::new(lookahead, action));
705	}
706
707	/// Add the transaction set built up with prior calls to acttab_action() into the current action table.
708	/// Then reset the transaction set back to an empty set in preparation for a new round of acttab_action() calls.
709	///
710	/// Return the offset into the action table of the new transaction.
711	fn acttab_insert(&mut self, make_it_safe: bool) -> isize
712	{	assert!(self.a_lookahead.len() > 0);
713
714		// Make sure we have enough space to hold the expanded action table in the worst case.  The worst case occurs if the transaction set
715		// must be appended to the current action table
716		if self.n_action + self.n_symbols + 1 >= self.a_action.len()
717		{	self.a_action.resize(self.n_action + self.n_symbols + self.a_action.len() + 21, LookaheadAction::new(std::usize::MAX, std::usize::MAX));
718		}
719
720		// Scan the existing action table looking for an offset that is a duplicate of the current transaction set.  Fall out of the loop
721		// if and when the duplicate is found.
722		//
723		// i is the index in aAction[] where mnLookahead is inserted.
724		let mut found_offset = std::usize::MAX;
725		let end = if make_it_safe {self.min_lookahead} else {0};
726'l:		for i in (end .. self.n_action).rev()
727		{	if self.a_action[i].lookahead == self.min_lookahead
728			{	// All lookaheads and actions in the aLookahead[] transaction must match against the candidate aAction[i] entry.
729				if self.a_action[i].action == self.min_action
730				{	for j in 0..self.a_lookahead.len()
731					{	let k = self.a_lookahead[j].lookahead + i - self.min_lookahead;
732						if k>=self.n_action || self.a_lookahead[j].lookahead != self.a_action[k].lookahead || self.a_lookahead[j].action != self.a_action[k].action
733						{	continue 'l;
734						}
735					}
736					// No possible lookahead value that is not in the aLookahead[] transaction is allowed to match aAction[i]
737					let mut n = 0;
738					for j in 0..self.n_action
739					{	if self.a_action[j].lookahead != std::usize::MAX
740						{	if self.a_action[j].lookahead == (j + self.min_lookahead).wrapping_sub(i)
741							{	n += 1;
742							}
743						}
744					}
745					if n == self.a_lookahead.len()
746					{	found_offset = i;  // An exact match is found at offset i
747						break;
748					}
749				}
750			}
751		}
752
753		// If no existing offsets exactly match the current transaction, find an an empty offset in the aAction[] table in which we can add the aLookahead[] transaction.
754		if found_offset == std::usize::MAX
755		{	// Look for holes in the aAction[] table that fit the current aLookahead[] transaction.  Leave found_offset set to the offset of the hole.
756			// If no holes are found, found_offset is left at nAction, which means the transaction will be appended.
757			found_offset = if make_it_safe {self.min_lookahead} else {0};
758'm:			for i in found_offset .. self.a_action.len() - self.max_lookahead
759			{	if self.a_action[i].lookahead == std::usize::MAX
760				{	for j in 0 .. self.a_lookahead.len()
761					{	let k = self.a_lookahead[j].lookahead + i - self.min_lookahead;
762						if k >= self.a_action.len() || self.a_action[k].lookahead != std::usize::MAX
763						{	continue 'm;
764						}
765					}
766					for j in 0 .. self.n_action
767					{	if self.a_action[j].lookahead == (j + self.min_lookahead).wrapping_sub(i)
768						{	continue 'm;
769						}
770					}
771					found_offset = i; // Fits in empty slots
772					break;
773				}
774			}
775		}
776
777		// Insert transaction set at index found_offset
778		for j in 0 .. self.a_lookahead.len()
779		{	let k = self.a_lookahead[j].lookahead - self.min_lookahead + found_offset;
780			self.a_action[k] = self.a_lookahead[j];
781			if k >= self.n_action
782			{	self.n_action = k + 1;
783			}
784		}
785		if make_it_safe && found_offset+self.n_terminals >= self.n_action
786		{	self.n_action = found_offset+self.n_terminals+1;
787		}
788		self.a_lookahead.clear();
789
790		// Return the offset that is added to the lookahead in order to get the index into yy_action of the action
791		return found_offset as isize - self.min_lookahead as isize;
792	}
793}
794
795#[derive(Debug)]
796struct SymbolsBuilder
797{	symbols_map: HashMap<Arc<String>, Symbol>,
798	n_terminals: usize,
799	n_nonterminals: usize,
800}
801impl SymbolsBuilder
802{	pub fn new(empty: bool) -> Self
803	{	let mut this = Self
804		{	symbols_map: HashMap::new(),
805			n_terminals: 0,
806			n_nonterminals: 0,
807		};
808		if !empty
809		{	this.add(&Arc::new("$".to_string()));
810		}
811		this
812	}
813
814	/// Return a pointer to the (terminal or nonterminal) symbol "symbol_name". Create a new symbol if this is the first time "x" has been seen.
815	pub fn add(&mut self, symbol_name: &Arc<String>) -> usize
816	{	if let Some(symbol) = self.symbols_map.get(symbol_name)
817		{	symbol.index
818		}
819		else
820		{	let index = if is_terminal_name(symbol_name.as_ref()) || symbol_name.as_ref()=="$"
821			{	self.n_terminals += 1;
822				self.n_terminals - 1
823			}
824			else
825			{	self.n_nonterminals += 1;
826				self.n_nonterminals - 1
827			};
828			let symbol = Symbol::new(symbol_name, index); // need to set the name later in into_symbols()
829			self.symbols_map.insert(Arc::clone(symbol_name), symbol);
830			index
831		}
832	}
833
834	pub fn get(&self, symbol_name: &Arc<String>) -> &Symbol
835	{	&self.symbols_map[symbol_name]
836	}
837
838	pub fn get_mut(&mut self, symbol_name: &Arc<String>) -> &mut Symbol
839	{	self.symbols_map.get_mut(symbol_name).unwrap()
840	}
841
842	pub fn into_symbols(mut self, start_name: &String, nonterminal_types: HashMap<String, StringInFile>, precedence: HashMap<String, PrecedenceInFile>, rules: &mut Vec<Rule>) -> ParserResult<Symbols>
843	{	// nonterminal_types
844		for (symbol_name, symbol_type) in nonterminal_types
845		{	if let Some(symbol) = self.symbols_map.get_mut(&symbol_name)
846			{	symbol.data_type = symbol_type.string;
847			}
848			else
849			{	return Err(LemonMintError::new(&symbol_type.filename, symbol_type.n_line, format!("No such nonterminal symbol when defining symbol type: \"{}\"", symbol_name)));
850			}
851		}
852		// precedence
853		for (symbol_name, precedence) in precedence
854		{	if let Some(symbol) = self.symbols_map.get_mut(&symbol_name)
855			{	symbol.prec = precedence.precedence;
856				symbol.assoc = precedence.assoc;
857			}
858			else
859			{	return Err(LemonMintError::new(&precedence.filename, precedence.n_line, format!("No such terminal symbol \"{}\" when defining precedence", symbol_name)));
860			}
861		}
862		// convert rules
863		for rule in rules
864		{	rule.lhs_index += self.n_terminals;
865			for rhs in rule.rhs.iter_mut()
866			{	if self.symbols_map[&rhs.name].typ == SymbolType::NONTERMINAL
867				{	rhs.index += self.n_terminals;
868				}
869			}
870		}
871		// into Symbols
872		let n_symbols = self.symbols_map.len();
873		let default_symbol = self.add(&Arc::new("{default}".to_string()));
874		let default_symbol_index = default_symbol + self.n_terminals;
875		let mut start_symbol_index = std::usize::MAX;
876		let mut error_symbol_index = std::usize::MAX;
877		let mut array = Vec::with_capacity(self.symbols_map.len());
878		for (symbol_name, mut symbol) in self.symbols_map
879		{	if symbol.typ == SymbolType::NONTERMINAL
880			{	symbol.index += self.n_terminals;
881				if symbol_name.as_ref() == start_name
882				{	start_symbol_index = symbol.index;
883				}
884				else if symbol_name.as_ref() == "error"
885				{	error_symbol_index = symbol.index;
886				}
887			}
888			else if symbol.index == 0
889			{	symbol.typ = SymbolType::NONTERMINAL; // like in lemon
890			}
891			symbol.name = Arc::clone(&symbol_name);
892			array.push(symbol);
893		}
894		array.sort_by(|a, b| a.index.cmp(&b.index));
895		Ok(Symbols {array, n_symbols, n_terminals: self.n_terminals, default_symbol_index, start_symbol_index, error_symbol_index})
896	}
897}
898
899#[derive(Debug)]
900struct Symbols
901{	pub array: Vec<Symbol>, // Sorted array of all symbols
902	pub n_symbols: usize,                // Number of terminal and nonterminal symbols
903	pub n_terminals: usize,              // Number of terminal symbols
904	pub start_symbol_index: usize,
905	pub default_symbol_index: usize,
906	pub error_symbol_index: usize,
907}
908impl Symbols
909{	pub fn get_start_symbol(&self) -> &Symbol
910	{	&self.array[self.start_symbol_index]
911	}
912
913	pub fn get_error_symbol(&self) -> Option<&Symbol>
914	{	self.array.get(self.error_symbol_index)
915	}
916}
917
918#[derive(Debug)]
919struct States
920{	pub array: Vec<State>,
921	pub n_no_tail: usize, // Number of states with tail degenerate states removed
922}
923
924#[derive(Debug)]
925struct StatesBuilder;
926impl StatesBuilder
927{	/// Compute all LR(0) states for the grammar. Links are added to between some states so that the LR(1) follow sets can be computed later.
928	pub fn build(symbols: &Symbols, rules: &mut Vec<Rule>, start: &Symbol, n_terminals: usize, actions_enum: &mut i32) -> ParserResult<States>
929	{	let mut configs_basis_keys_arr = Vec::new();
930		let mut configs_basis_arr = Vec::new();
931		let mut configs_arr = Vec::new();
932		let mut configs_map = HashMap::new();
933		let mut states_map = HashMap::new();
934
935		// The basis configuration set for the first state is all rules which have the start symbol as their left-hand side
936		for n_rule in start.sym_rules_arr.iter()
937		{	rules[*n_rule].lhs_start = true;
938			Self::configlist_add(rules, &mut configs_basis_keys_arr, &mut configs_basis_arr, &mut configs_arr, &mut configs_map, n_terminals, *n_rule, 0, true).borrow_mut().fws.add(0);
939		}
940
941		// Compute the first state. All other states will be computed automatically during the computation of the first one.
942		// The returned pointer to the first state is not used.
943		Self::getstate(symbols, rules, &mut configs_basis_keys_arr, &mut configs_basis_arr, &mut configs_arr, &mut configs_map, &mut states_map, n_terminals, actions_enum)?;
944
945		let mut states = States {array: Vec::with_capacity(states_map.len()), n_no_tail: 0}; // Table of states sorted by state number
946		for (_, elem) in states_map
947		{	states.array.push(Rc::try_unwrap(elem).unwrap().into_inner());
948		}
949		states.array.sort_by(|a, b| a.n_state.cmp(&b.n_state));
950		states.n_no_tail = states.array.len();
951		Ok(states)
952	}
953
954	/// Return a pointer to a state which is described by the configuration list which has been built from calls to configlist_add.
955	fn getstate
956	(	symbols: &Symbols,
957		rules: &Vec<Rule>,
958		configs_basis_keys_arr: &mut Vec<ConfigKey>,
959		configs_basis_arr: &mut Vec<Rc<RefCell<Config>>>,
960		configs_arr: &mut Vec<Rc<RefCell<Config>>>,
961		configs_map: &mut HashMap<ConfigKey, Rc<RefCell<Config>>>,
962		states_map: &mut HashMap<Vec<ConfigKey>, Rc<RefCell<State>>>,
963		n_terminals: usize,
964		actions_enum: &mut i32,
965	) -> ParserResult<usize>
966	{	// Extract the sorted basis of the new state.  The basis was constructed by prior calls to "configlist_add(*, *, *, true)".
967		let mut basis_keys = mem::replace(configs_basis_keys_arr, Vec::new());
968		let mut basis = mem::replace(configs_basis_arr, Vec::new());
969		basis_keys.sort();
970		basis.sort();
971
972		// Find a state with the same basis
973		if let Some(found) = states_map.get(&basis_keys)
974		{	let found = found.borrow();
975			let n_state = found.n_state;
976			// A state with the same basis already exists!  Copy all the follow-set propagation links from the state under construction into the
977			// preexisting state, then return a pointer to the preexisting state
978			for (i, x) in basis.iter().enumerate()
979			{	let mut y = found.basis[i].borrow_mut();
980				if let Ok(mut x) = x.try_borrow_mut()
981				{	mem::swap(&mut x.bplp, &mut y.bplp);
982					y.bplp.reserve(x.bplp.len());
983					for config in x.bplp.iter()
984					{	y.bplp.push(Rc::clone(config));
985					}
986				}
987				else
988				{	// assume x and y are the same object
989					let len = y.bplp.len();
990					y.bplp.reserve(len);
991					for i in 0 .. len
992					{	let o = Rc::clone(&y.bplp[i]);
993						y.bplp.push(o);
994					}
995				}
996			}
997			configs_arr.clear();
998			Ok(n_state)
999		}
1000		else
1001		{	// This really is a new state.  Construct all the details
1002			Self::configlist_closure(symbols, rules, configs_basis_keys_arr, configs_basis_arr, configs_arr, configs_map, n_terminals)?; // Compute the configuration closure
1003			let n_state = states_map.len();
1004			let mut configurations = mem::replace(configs_arr, Vec::new());
1005			configurations.sort(); // Sort the configuration closure
1006			let stp = State::new // A new state structure
1007			(	n_state, // Every state gets a sequence number
1008				basis, // Remember the configuration basis
1009				configurations // Remember the configuration closure
1010			);
1011			let stp = Rc::new(RefCell::new(stp));
1012			states_map.insert(basis_keys, Rc::clone(&stp)); // Add to the state table
1013			let actions = Self::buildshifts(symbols, rules, configs_basis_keys_arr, configs_basis_arr, configs_arr, configs_map, states_map, n_terminals, actions_enum, &stp)?; // Recursively compute successor states
1014			stp.borrow_mut().actions = actions;
1015			Ok(n_state)
1016		}
1017	}
1018
1019	/// Construct all successor states to the given state.  A "successor" state is any state which can be reached by a shift action.
1020	fn buildshifts
1021	(	symbols: &Symbols,
1022		rules: &Vec<Rule>,
1023		configs_basis_keys_arr: &mut Vec<ConfigKey>,
1024		configs_basis_arr: &mut Vec<Rc<RefCell<Config>>>,
1025		configs_arr: &mut Vec<Rc<RefCell<Config>>>,
1026		configs_map: &mut HashMap<ConfigKey, Rc<RefCell<Config>>>,
1027		states_map: &mut HashMap<Vec<ConfigKey>, Rc<RefCell<State>>>,
1028		n_terminals: usize,
1029		actions_enum: &mut i32,
1030		stp: &Rc<RefCell<State>>
1031	) -> ParserResult<Vec<Rc<RefCell<Action>>>>
1032	{	let mut actions = Vec::new();
1033
1034		// Each configuration becomes complete after it contibutes to a successor state.  Initially, all configurations are incomplete
1035		for cfp in stp.borrow().configurations.iter()
1036		{	cfp.borrow_mut().status_is_complete = false;
1037		}
1038
1039		// Loop through all configurations of the state "stp"
1040		for (i, cfp) in stp.borrow().configurations.iter().enumerate()
1041		{	let (status_is_complete, dot, n_rule) =
1042			{	let cfp = cfp.borrow();
1043				(cfp.status_is_complete, cfp.dot, cfp.n_rule)
1044			};
1045			if !status_is_complete && dot<rules[n_rule].rhs.len() // !Already used by inner loop && !Can't shift this config
1046			{	configs_basis_keys_arr.clear();
1047				configs_basis_arr.clear();
1048				configs_arr.clear();
1049				configs_map.clear();
1050				let sp = rules[n_rule].rhs[dot].index; // Symbol following the dot in configuration "cfp"
1051
1052				// For every configuration in the state "stp" which has the symbol "sp" following its dot, add the same configuration to the basis set under
1053				// construction but with the dot shifted one symbol to the right.
1054				for bcfp in stp.borrow().configurations.iter().skip(i)
1055				{	if !bcfp.borrow().status_is_complete && bcfp.borrow().dot<rules[bcfp.borrow().n_rule].rhs.len() // !Already used && !Can't shift this one
1056					{	let bsp = rules[bcfp.borrow().n_rule].rhs[bcfp.borrow().dot].index; // Symbol following the dot in configuration "bcfp"
1057						if bsp == sp // Must be same as for "cfp"
1058						{	bcfp.borrow_mut().status_is_complete = true; // Mark this config as used
1059							let newcfg = Self::configlist_add(rules, configs_basis_keys_arr, configs_basis_arr, configs_arr, configs_map, n_terminals, bcfp.borrow().n_rule, bcfp.borrow().dot+1, true);
1060							newcfg.borrow_mut().bplp.push(Rc::clone(bcfp));
1061						}
1062					}
1063				}
1064
1065				// Get a pointer to the state described by the basis configuration set constructed in the preceding loop
1066				let n_state = Self::getstate(symbols, rules, configs_basis_keys_arr, configs_basis_arr, configs_arr, configs_map, states_map, n_terminals, actions_enum)?; // A pointer to a successor state
1067
1068				// The state "newstp" is reached from the state "stp" by a shift action on the symbol "sp"
1069				actions.insert(0, Rc::new(RefCell::new(Action::new_state(*actions_enum, sp, n_state)))); // TODO: maybe push(), not insert()
1070				*actions_enum += 1;
1071			}
1072		}
1073		Ok(actions)
1074	}
1075
1076	/// Add another configuration to the configuration list.
1077	/// dot - Index into the RHS of the rule where the dot goes
1078	fn configlist_add
1079	(	rules: &Vec<Rule>,
1080		configs_basis_keys_arr: &mut Vec<ConfigKey>,
1081		configs_basis_arr: &mut Vec<Rc<RefCell<Config>>>,
1082		configs_arr: &mut Vec<Rc<RefCell<Config>>>,
1083		configs_map: &mut HashMap<ConfigKey, Rc<RefCell<Config>>>,
1084		n_terminals: usize,
1085		n_rule: usize,
1086		dot: usize,
1087		is_basis: bool
1088	) -> Rc<RefCell<Config>>
1089	{	let key = ConfigKey {n_rule: rules[n_rule].index, dot};
1090
1091		if let Some(found) = configs_map.get(&key)
1092		{	Rc::clone(found)
1093		}
1094		else
1095		{	let cfp =  Rc::new(RefCell::new(Config::new(n_rule, dot, n_terminals+1)));
1096			configs_arr.push(Rc::clone(&cfp));
1097			let key = ConfigKey::new(&cfp.borrow());
1098			if is_basis
1099			{	configs_basis_keys_arr.push(key);
1100				configs_basis_arr.push(Rc::clone(&cfp));
1101			}
1102			configs_map.insert(key, Rc::clone(&cfp));
1103			cfp
1104		}
1105	}
1106
1107	/// Compute the closure of the configuration list
1108	fn configlist_closure
1109	(	symbols: &Symbols,
1110		rules: &Vec<Rule>,
1111		configs_basis_keys_arr: &mut Vec<ConfigKey>,
1112		configs_basis_arr: &mut Vec<Rc<RefCell<Config>>>,
1113		configs_arr: &mut Vec<Rc<RefCell<Config>>>,
1114		configs_map: &mut HashMap<ConfigKey, Rc<RefCell<Config>>>,
1115		n_terminals: usize
1116	) -> ParserResult<()>
1117	{	let mut c = 0;
1118		while c < configs_arr.len() // size of configs_arr increases during this loop
1119		{	let (rule, dot) =
1120			{	let config = configs_arr[c].borrow();
1121				(&rules[config.n_rule], config.dot)
1122			};
1123			if dot < rule.rhs.len()
1124			{	let sp = &symbols.array[rule.rhs[dot].index];
1125				if sp.typ == SymbolType::NONTERMINAL
1126				{	if sp.index!=symbols.error_symbol_index && sp.sym_rules_arr.is_empty()
1127					{	return Err(LemonMintError::new(&rule.rule_filename, rule.rule_n_line, format!("Nonterminal \"{}\" has no rules.", sp.name)));
1128					}
1129					for new_n_rule in sp.sym_rules_arr.iter()
1130					{	let newcfp = Self::configlist_add(rules, configs_basis_keys_arr, configs_basis_arr, configs_arr, configs_map, n_terminals, *new_n_rule, 0, false);
1131						let mut found = false;
1132						for i in dot+1 .. rule.rhs.len()
1133						{	let xsp = &symbols.array[rule.rhs[i].index];
1134							if xsp.typ == SymbolType::TERMINAL
1135							{	newcfp.borrow_mut().fws.add(xsp.index);
1136								found = true;
1137								break;
1138							}
1139							newcfp.borrow_mut().fws.intersect(&xsp.firstset);
1140							if !xsp.lambda
1141							{	found = true;
1142								break;
1143							}
1144						}
1145						if !found
1146						{	configs_arr[c].borrow_mut().fplp.insert(0, newcfp); // TODO: maybe push(), not insert()
1147						}
1148					}
1149				}
1150			}
1151			c += 1;
1152		}
1153		Ok(())
1154	}
1155}
1156
1157#[derive(Debug)]
1158struct StringInFile
1159{	filename: Arc<String>,
1160	n_line: usize,
1161	string: String,
1162}
1163
1164#[derive(Debug)]
1165struct PrecedenceInFile
1166{	filename: Arc<String>,
1167	n_line: usize,
1168	assoc: Associativity,
1169	precedence: i32,
1170}
1171
1172struct ArrayFormatter
1173{	i: usize,
1174	values_on_line: usize,
1175	endl: &'static str,
1176}
1177impl ArrayFormatter
1178{	pub fn new(values_on_line: usize) -> Self
1179	{	Self
1180		{	i: 0,
1181			values_on_line,
1182			endl: "\n[\t",
1183		}
1184	}
1185
1186	pub fn separ<W>(&mut self, out: &mut W) -> io::Result<()> where W: io::Write
1187	{	if self.i % self.values_on_line == 0
1188		{	write!(out, "{}/* {:5} */  ", self.endl, self.i)?;
1189			self.endl = ",\n\t";
1190		}
1191		else
1192		{	write!(out, ", ")?;
1193		}
1194		self.i += 1;
1195		Ok(())
1196	}
1197
1198	pub fn end<W>(self, out: &mut W) -> io::Result<()> where W: io::Write
1199	{	writeln!(out, "{}];", if self.i==0 {" ["} else {"\n"})
1200	}
1201}
1202
1203/// Builder class that will finally generate `LemonMint`. Call builder methods to supply parser rules and options - everything that you would normally put to Lemon's Y-grammar file.
1204/// Or you can feed the Y-file itself (it's syntax is similar to Lemon's one).
1205///
1206/// # Example
1207///
1208/// ```
1209/// use lemon_mint::LemonMintBuilder;
1210/// use std::sync::Arc;
1211///
1212/// let fake_filename = Arc::new("source.y".to_string()); // will appear in error messages
1213/// let builder = LemonMintBuilder::new().load_y(&fake_filename, "%token_type {f64}\nUnit ::= NEW_LINE.".as_bytes()).unwrap();
1214/// let lemon = builder.try_into_lemon().unwrap();
1215/// ```
1216///
1217/// Or:
1218///
1219/// ```
1220/// use lemon_mint::LemonMintBuilder;
1221/// use std::sync::Arc;
1222///
1223/// let fake_filename = Arc::new("source.y".to_string()); // will appear in error messages
1224/// let builder = LemonMintBuilder::new()
1225/// 	.set_token_type("f64".to_string()).unwrap()
1226/// 	.add_rule(&fake_filename, 1, "Unit".to_string(), "NEW_LINE", "".to_string()).unwrap();
1227/// let lemon = builder.try_into_lemon().unwrap();
1228/// ```
1229#[derive(Debug)]
1230pub struct LemonMintBuilder
1231{	rules: Vec<Rule>,                  // All rules
1232	token_type: String,                // Type of terminal symbols in the parser stack
1233	vartype: String,                   // The default type of non-terminal symbols
1234	start_name: String,                // Name of the start symbol for the grammar
1235	with_trace: bool,
1236	trace_prompt: String,              // Prompt to preface each trace message, if with_trace is true
1237	extracode: String,                 // Code appended to the generated file
1238	n_conflicts: usize,                // Number of parsing conflicts
1239	n_action_table_entries: usize,     // Number of entries in the yy_action[] table
1240	tables_size: usize,                // Total table size of all tables in bytes
1241	has_fallback: bool,                // True if any %fallback is seen in the grammar
1242	symbols_builder: SymbolsBuilder,
1243	prec_counter: i32,
1244	actions_enum: i32,
1245	nonterminal_types: HashMap<String, StringInFile>,
1246	precedence: HashMap<String, PrecedenceInFile>,
1247	extra_argument_type: String,
1248	min_shift_reduce: usize,
1249	error_action: usize,
1250	accept_action: usize,
1251	no_action: usize,
1252	min_reduce: usize,
1253	max_action: usize,
1254	no_compress: bool,                 // Don't compress the action table
1255	no_resort: bool,                   // Do not sort or renumber states
1256}
1257impl LemonMintBuilder
1258{	/// Creates new builder
1259	pub fn new() -> Self
1260	{	Self
1261		{	rules: Vec::with_capacity(64),
1262			token_type: String::new(),
1263			vartype: String::new(),
1264			start_name: String::new(),
1265			with_trace: false,
1266			trace_prompt: String::new(),
1267			extracode: String::new(),
1268			n_conflicts: 0,
1269			n_action_table_entries: 0,
1270			tables_size: 0,
1271			has_fallback: false,
1272			symbols_builder: SymbolsBuilder::new(false),
1273			prec_counter: 1,
1274			actions_enum: 0,
1275			nonterminal_types: HashMap::new(),
1276			precedence: HashMap::new(),
1277			extra_argument_type: String::new(),
1278			min_shift_reduce: 0,
1279			error_action: 0,
1280			accept_action: 0,
1281			no_action: 0,
1282			min_reduce: 0,
1283			max_action: 0,
1284			no_compress: false,
1285			no_resort: false,
1286		}
1287	}
1288
1289	/// Find a precedence symbol of every rule in the grammar.
1290	///
1291	/// Those rules which have a precedence symbol coded in the input grammar using the `"[symbol]"` construct will already have the
1292	/// rule->precsym field filled.  Other rules take as their precedence symbol the first RHS symbol with a defined precedence.  If there
1293	/// are not RHS symbols with a defined precedence, the precedence symbol field is left blank.
1294	fn find_rule_precedences(symbols: &mut Symbols, rules: &mut Vec<Rule>)
1295	{	for rule in rules.iter_mut()
1296    	{	if rule.precsym_index == std::usize::MAX
1297			{	for sp in &rule.rhs
1298				{	if symbols.array[sp.index].prec >= 0
1299					{	rule.precsym_index = sp.index;
1300						break;
1301					}
1302				}
1303			}
1304		}
1305	}
1306
1307	/// Find all nonterminals which will generate the empty string.  Then go back and compute the first sets of every nonterminal.
1308	/// The first set is the set of all terminal symbols which can begin a string generated by that nonterminal.
1309	fn find_first_sets(&self, symbols: &mut Symbols, rules: &mut Vec<Rule>)
1310	{	for (i, symbol) in symbols.array[0 .. symbols.n_symbols].iter_mut().enumerate()
1311		{	symbol.lambda = false;
1312			if i >= symbols.n_terminals
1313			{	symbol.firstset.set_size(symbols.n_terminals + 1);
1314			}
1315		}
1316
1317		// First compute all lambdas
1318		let mut progress = true;
1319		while progress
1320		{	progress = false;
1321'l:			for rule in rules.iter()
1322			{	let rule = rule;
1323				if !symbols.array[rule.lhs_index].lambda
1324				{	for sp in &rule.rhs
1325					{	let sp = &symbols.array[sp.index];
1326						assert!(sp.typ==SymbolType::NONTERMINAL || !sp.lambda);
1327						if !sp.lambda
1328						{	continue 'l;
1329						}
1330					}
1331					symbols.array[rule.lhs_index].lambda = true;
1332					progress = true;
1333				}
1334			}
1335		}
1336
1337		// Now compute all first sets
1338		progress = true;
1339		while progress
1340		{	progress = false;
1341			for rule in rules.iter()
1342			{	for rhs in &rule.rhs
1343				{	if symbols.array[rhs.index].typ == SymbolType::TERMINAL
1344					{	if symbols.array[rule.lhs_index].firstset.add(rhs.index)
1345						{	progress = true;
1346						}
1347						break;
1348					}
1349					else if rule.lhs_index == rhs.index
1350					{	if !symbols.array[rule.lhs_index].lambda
1351						{	break;
1352						}
1353					}
1354					else
1355					{	let set = mem::replace(&mut symbols.array[rhs.index].firstset, Set::new(0));
1356						if symbols.array[rule.lhs_index].firstset.intersect(&set)
1357						{	progress = true;
1358						}
1359						mem::replace(&mut symbols.array[rhs.index].firstset, set);
1360						if !symbols.array[rhs.index].lambda
1361						{	break;
1362						}
1363					}
1364				}
1365			}
1366		}
1367	}
1368
1369	/// Construct the propagation links
1370	fn find_links(states: &States)
1371    {	// Housekeeping detail:
1372		// Add to every propagate link a pointer back to the state to which the link is attached.
1373		for stp in states.array.iter()
1374		{	for cfp in stp.configurations.iter()
1375			{	cfp.borrow_mut().n_state = stp.n_state;
1376			}
1377		}
1378
1379		// Convert all backlinks into forward links.
1380    	// Only the forward links are used in the follow-set computation.
1381    	for stp in states.array.iter()
1382    	{	for cfp in stp.configurations.iter()
1383    		{	for plp in cfp.borrow().bplp.iter()
1384    			{	plp.borrow_mut().fplp.insert(0, Rc::clone(&cfp)); // TODO: maybe push(), not insert()
1385    			}
1386    		}
1387    	}
1388    }
1389
1390	/// Compute all followsets.
1391    ///
1392    /// A followset is the set of all symbols which can come immediately after a configuration.
1393    fn find_follow_sets(states: &States)
1394    {	for stp in states.array.iter()
1395    	{	for cfp in stp.configurations.iter()
1396    		{	cfp.borrow_mut().status_is_complete = false;
1397    		}
1398    	}
1399
1400    	let mut progress = true;
1401		while progress
1402		{	progress = false;
1403    		for stp in states.array.iter()
1404    		{	for cfp in stp.configurations.iter()
1405    			{	if !cfp.borrow().status_is_complete
1406					{	for plp in cfp.borrow().fplp.iter()
1407						{	let mut plp = plp.borrow_mut();
1408							if plp.fws.intersect(&cfp.borrow().fws) // if changed
1409							{	plp.status_is_complete = false;
1410								progress = true;
1411							}
1412						}
1413						cfp.borrow_mut().status_is_complete = true;
1414					}
1415    			}
1416    		}
1417    	}
1418    }
1419
1420	/// Resolve a conflict between the two given actions.  If the conflict can't be resolved, return non-zero.
1421    ///
1422    /// NO LONGER TRUE:
1423    ///   To resolve a conflict, first look to see if either action is on an error rule.  In that case, take the action which
1424    ///   is not associated with the error rule.  If neither or both actions are associated with an error rule, then try to
1425    ///   use precedence to resolve the conflict.
1426    ///
1427    /// If either action is a SHIFT, then it must be apx.  This function won't work if apx->type==REDUCE and apy->type==SHIFT.
1428    fn resolve_conflict(symbols: &Symbols, rules: &mut Vec<Rule>, apx: &Rc<RefCell<Action>>, apy: &Rc<RefCell<Action>>) -> usize
1429	{	let mut errcnt = 0;
1430		let mut apx = apx.borrow_mut();
1431		let mut apy = apy.borrow_mut();
1432		assert_eq!(apx.symbol_index, apy.symbol_index);  // Otherwise there would be no conflict
1433		let mut apx_typ = apx.typ;
1434		let mut apy_typ = apy.typ;
1435		if apx_typ==ActionType::Shift && apy_typ==ActionType::Shift
1436		{	apy_typ = ActionType::SsConflict;
1437			errcnt += 1;
1438		}
1439		if apx_typ==ActionType::Shift && apy_typ==ActionType::Reduce
1440		{	let spx = &symbols.array[apx.symbol_index];
1441			if let StateOrRule::Rule(n_rule) = apy.x
1442			{	let spy = rules[n_rule].precsym_index;
1443				if spy != std::usize::MAX
1444				{	let spy = &symbols.array[spy];
1445					if spx.prec<0 || spy.prec<0
1446					{	// Not enough precedence information.
1447						apy_typ = ActionType::SrConflict;
1448						errcnt += 1;
1449					}
1450					else if spx.prec > spy.prec    // higher precedence wins
1451					{	apy_typ = ActionType::RdResolved;
1452					}
1453					else if spx.prec < spy.prec
1454					{	apx_typ = ActionType::ShResolved;
1455					}
1456					else if spx.prec==spy.prec && spx.assoc== Associativity::RIGHT // Use operator
1457					{	apy_typ = ActionType::RdResolved;                          // associativity
1458					}
1459					else if spx.prec==spy.prec && spx.assoc== Associativity::LEFT  // to break tie
1460					{	apx_typ = ActionType::ShResolved;
1461					}
1462					else
1463					{	assert!(spx.prec==spy.prec && spx.assoc== Associativity::NONASSOC);
1464						apy_typ = ActionType::Error;
1465					}
1466				}
1467				else
1468				{	// Not enough precedence information.
1469					apy_typ = ActionType::SrConflict;
1470					errcnt += 1;
1471				}
1472			}
1473		}
1474		else if apx_typ==ActionType::Reduce && apy_typ==ActionType::Reduce
1475		{	if let StateOrRule::Rule(n_rule_x) = apx.x
1476			{	if let StateOrRule::Rule(n_rule_y) = apy.x
1477				{	let spx = rules[n_rule_x].precsym_index;
1478					let spy = rules[n_rule_y].precsym_index;
1479					if spx != std::usize::MAX
1480					{	if spy != std::usize::MAX
1481						{	let spx = &symbols.array[spx];
1482							let spy = &symbols.array[spy];
1483							if spx.prec<0 || spy.prec<0 || spx.prec==spy.prec
1484							{	apy_typ = ActionType::RrConflict;
1485								errcnt += 1;
1486							}
1487							else if spx.prec > spy.prec
1488							{	apy_typ = ActionType::RdResolved;
1489							}
1490							else if spx.prec < spy.prec
1491							{	apx_typ = ActionType::RdResolved;
1492							}
1493						}
1494						else
1495						{	apy_typ = ActionType::RrConflict;
1496							errcnt += 1;
1497						}
1498					}
1499					else
1500					{	apy_typ = ActionType::RrConflict;
1501						errcnt += 1;
1502					}
1503				}
1504			}
1505		}
1506		else
1507		{	assert!
1508			(	apx_typ == ActionType::ShResolved ||
1509				apx_typ == ActionType::RdResolved ||
1510				apx_typ == ActionType::SsConflict ||
1511				apx_typ == ActionType::SrConflict ||
1512				apx_typ == ActionType::RrConflict ||
1513				apy_typ == ActionType::ShResolved ||
1514				apy_typ == ActionType::RdResolved ||
1515				apy_typ == ActionType::SsConflict ||
1516				apy_typ == ActionType::SrConflict ||
1517				apy_typ == ActionType::RrConflict
1518			);
1519			// The REDUCE/SHIFT case cannot happen because SHIFTs come before REDUCEs on the list.  If we reach this point it must be because
1520			// the parser conflict had already been resolved.
1521		}
1522		apx.typ = apx_typ;
1523		apy.typ = apy_typ;
1524		errcnt
1525    }
1526
1527	/// Compute the reduce actions, and resolve conflicts.
1528    fn find_actions(&mut self, symbols: &Symbols, rules: &mut Vec<Rule>, states: &mut States) -> ParserResult<()>
1529    {	// Add all of the reduce actions
1530    	// A reduce action is added for each element of the followset of a configuration which has its dot at the extreme right.
1531    	for stp in states.array.iter_mut()   // Loop over all states
1532    	{	for cfp in stp.configurations.iter()  // Loop over all configurations
1533    		{	if cfp.borrow().dot == rules[cfp.borrow().n_rule].rhs.len()  // Is dot at extreme right?
1534    			{	for (j, symbol) in symbols.array.iter().take(symbols.n_terminals).enumerate()
1535    				{	if cfp.borrow().fws.contains(j)
1536    					{	// Add a reduce action to the state "stp" which will reduce by the rule "cfp->rule" if the lookahead symbol is "symbols[j]"
1537    						stp.actions.push(Rc::new(RefCell::new(Action::new_rule(self.actions_enum, symbol.index, ActionType::Reduce, cfp.borrow().n_rule))));
1538							self.actions_enum += 1;
1539    					}
1540    				}
1541    			}
1542    		}
1543    	}
1544
1545    	// Add the accepting token
1546    	// Add to the first state (which is always the starting state of the finite state machine) an action to ACCEPT if the lookahead is the start nonterminal.
1547    	states.array[0].actions.push(Rc::new(RefCell::new(Action::new_empty_rule(self.actions_enum, symbols.start_symbol_index, ActionType::Accept))));
1548		self.actions_enum += 1;
1549
1550    	// Resolve conflicts
1551    	for stp in states.array.iter_mut()
1552    	{	if stp.actions.len() != 0
1553    		{	stp.actions.sort();
1554    			for action in stp.actions.windows(2)
1555				{	if action[0].borrow().symbol_index == action[1].borrow().symbol_index
1556    				{	// The two actions "action[0]" and "action[1]" have the same lookahead.
1557    					// Figure out which one should be used
1558    					self.n_conflicts += Self::resolve_conflict(symbols, rules, &action[0], &action[1]);
1559    				}
1560				}
1561    		}
1562    	}
1563
1564    	// Report an error for each rule that can never be reduced.
1565    	for stp in states.array.iter_mut()
1566    	{	for action in stp.actions.iter()
1567    		{	if action.borrow().typ == ActionType::Reduce
1568				{	if let StateOrRule::Rule(n_rule) = action.borrow().x
1569					{	rules[n_rule].can_reduce = true;
1570					}
1571				}
1572    		}
1573    	}
1574    	for rule in rules.iter()
1575		{	if !rule.can_reduce
1576			{	return Err(LemonMintError::new(&rule.rule_filename, rule.rule_n_line, "This rule can not be reduced.\n".to_string()));
1577			}
1578		}
1579		Ok(())
1580    }
1581
1582	fn get_filename_of_first_rule(rules: &Vec<Rule>) -> Arc<String>
1583	{	if let Some(rule) = rules.iter().next()
1584		{	return Arc::clone(&rule.rule_filename);
1585		}
1586		Arc::new("".to_string())
1587	}
1588
1589	/// Reduce the size of the action tables, if possible, by making use of defaults.
1590	///
1591	/// In this version, we take the most frequent REDUCE action and make it the default.
1592	fn compress_tables(&self, symbols: &Symbols, rules: &Vec<Rule>, states: &mut States, default_symbol_index: usize)
1593	{	for state in states.array.iter_mut()
1594		{	let mut n_best = 0;
1595			let mut r_best = std::usize::MAX;
1596
1597			let mut it = state.actions.iter();
1598			while let Some(action) = it.next()
1599			{	if action.borrow().typ == ActionType::Reduce
1600				{	if let StateOrRule::Rule(n_rule) = action.borrow().x
1601					{	let rule = &rules[n_rule];
1602						if !rule.lhs_start && n_rule != r_best
1603						{	let mut n = 1;
1604							let mut it2 = it.clone();
1605							while let Some(ap2) = it2.next()
1606							{	if ap2.borrow().typ == ActionType::Reduce
1607								{	if let StateOrRule::Rule(n_rule_2) = ap2.borrow().x
1608									{	if n_rule_2 != r_best && n_rule_2 == n_rule
1609										{	n += 1;
1610										}
1611									}
1612								}
1613							}
1614							if n > n_best
1615							{	n_best = n;
1616								r_best = n_rule;
1617							}
1618						}
1619					}
1620				}
1621			}
1622
1623			// Do not make a default if the number of rules to default is not at least 1
1624			if n_best < 1
1625			{	continue;
1626			}
1627
1628			// Combine matching REDUCE actions into a single default
1629			let mut found = false;
1630			for action in state.actions.iter()
1631			{	let mut action =  action.borrow_mut();
1632				if action.typ == ActionType::Reduce
1633				{	if let StateOrRule::Rule(n_rule) = action.x
1634					{	if rules[n_rule].index == r_best
1635						{	if !found
1636							{	action.symbol_index = default_symbol_index;
1637								found = true;
1638							}
1639							else
1640							{	action.typ = ActionType::NotUsed;
1641							}
1642						}
1643					}
1644				}
1645			}
1646			assert!(found);
1647			state.actions.sort();
1648
1649			found = false;
1650			for action in state.actions.iter()
1651			{	let action = action.borrow();
1652				if action.typ==ActionType::Shift || action.typ==ActionType::Reduce && match action.x {StateOrRule::Rule(n_rule) => rules[n_rule].index != r_best, _ => false}
1653				{	found = true;
1654					break;
1655				}
1656			}
1657			if !found
1658			{	state.auto_reduce = true;
1659				state.default_reduce_rule = r_best;
1660			}
1661		}
1662
1663		// Make a second pass over all states and actions.  Convert every action that is a SHIFT to an autoReduce state into a SHIFTREDUCE action.
1664		for stp in states.array.iter()
1665		{	for action in stp.actions.iter()
1666			{	let mut action = action.borrow_mut();
1667				if action.typ == ActionType::Shift
1668				{	if let StateOrRule::State(next_n_state) = action.x
1669					{	let next_state = &states.array[next_n_state];
1670						if next_state.auto_reduce && next_state.default_reduce_rule!=std::usize::MAX
1671						{	action.typ = ActionType::ShiftReduce;
1672							action.x = StateOrRule::Rule(next_state.default_reduce_rule);
1673						}
1674					}
1675				}
1676			}
1677		}
1678
1679		// If a SHIFTREDUCE action specifies a rule that has a single RHS term (meaning that the SHIFTREDUCE will land back in the state where it
1680		// started) and if there is no C-code associated with the reduce action, then we can go ahead and convert the action to be the same as the
1681		// action for the RHS of the rule.
1682		for stp in states.array.iter()
1683		{	for action in stp.actions.iter()
1684			{	let mut done = false;
1685				while !done
1686				{	done = true;
1687					let mut ap2 = None;
1688					{	let action = action.borrow();
1689						if action.typ == ActionType::ShiftReduce
1690						{	if let StateOrRule::Rule(n_rule) = action.x
1691							{	let rp = &rules[n_rule];
1692								if rp.code.is_empty() && rp.rhs.len()==1
1693								{	// Only apply this optimization to non-terminals.  It would be OK to apply it to terminal symbols too, but that makes the parser tables larger.
1694									if action.symbol_index >= symbols.n_terminals
1695									{	// If we reach this point, it means the optimization can be applied
1696										ap2 = stp.actions.iter().find
1697										(	|a|
1698											{	let a = a.borrow();
1699												a.id!= action.id && a.symbol_index==rp.lhs_index
1700											}
1701										);
1702										assert!(ap2.is_some());
1703										done = false;
1704									}
1705								}
1706							}
1707						}
1708					}
1709					if let Some(ap2) = ap2
1710					{	let mut action = action.borrow_mut();
1711						let ap2 = ap2.borrow();
1712						action.symbol_index_opt = ap2.symbol_index;
1713						action.typ = ap2.typ;
1714						action.x = ap2.x;
1715					}
1716				}
1717			}
1718		}
1719	}
1720
1721	/// Given an action, compute the integer value for that action which is to be put in the action table of the generated machine.
1722	/// Return negative if no action should be generated.
1723	fn compute_action(&self, symbols: &Symbols, action: &Rc<RefCell<Action>>) -> usize
1724	{	let action = action.borrow();
1725		match action.typ
1726		{	ActionType::Shift =>
1727			{	match action.x
1728				{	StateOrRule::State(n_state) => n_state,
1729					_ => std::usize::MAX
1730				}
1731			}
1732			ActionType::ShiftReduce =>
1733			{	// Since a SHIFT is inherient after a prior REDUCE, convert any SHIFTREDUCE action with a nonterminal on the LHS into a simple REDUCE action:
1734				match action.x
1735				{	StateOrRule::Rule(n_rule) =>
1736					{	if action.symbol_index >= symbols.n_terminals
1737						{	self.min_reduce + n_rule
1738						}
1739						else
1740						{	self.min_shift_reduce + n_rule
1741						}
1742					},
1743					_ => std::usize::MAX
1744				}
1745			}
1746			ActionType::Reduce =>
1747			{	match action.x
1748				{	StateOrRule::Rule(n_rule) => self.min_reduce + n_rule,
1749					_ => std::usize::MAX
1750				}
1751			}
1752			ActionType::Error =>
1753			{	self.error_action
1754			}
1755			ActionType::Accept =>
1756			{	self.accept_action
1757			}
1758			_ => std::usize::MAX
1759		}
1760	}
1761
1762	/// Renumber and resort states so that states with fewer choices occur at the end.  Except, keep state 0 as the first state.
1763	fn resort_states(&mut self, symbols: &Symbols, states: &mut States)
1764	{	let sorted_len = states.array.len();
1765		for stp in states.array.iter_mut()
1766		{	stp.n_token_act = 0;
1767			stp.n_nt_act = 0;
1768			stp.default_reduce_action = std::usize::MAX; // Init dflt action to "syntax error"
1769			stp.token_offset = std::isize::MIN;
1770			stp.nt_offset = std::isize::MIN;
1771			for action in stp.actions.iter()
1772			{	let n_action = self.compute_action(symbols, action);
1773				if n_action != std::usize::MAX
1774				{	let action = action.borrow();
1775					if action.symbol_index < symbols.n_terminals
1776					{	stp.n_token_act += 1;
1777					}
1778					else if action.symbol_index < symbols.n_symbols
1779					{	stp.n_nt_act += 1;
1780					}
1781					else
1782					{	assert!(!stp.auto_reduce || action.x == StateOrRule::Rule(stp.default_reduce_rule));
1783						stp.default_reduce_action = n_action;
1784					}
1785				}
1786			}
1787		}
1788
1789		// Compare two states for sorting purposes.  The smaller state is the one with the most non-terminal actions.
1790		// If they have the same number of non-terminal actions, then the smaller is the one with the most token actions.
1791		if sorted_len > 2
1792		{	states.array[1 ..].sort_by
1793			(	|a, b|
1794				{	let mut res = b.n_nt_act.cmp(&a.n_nt_act);
1795					if res == Ordering::Equal
1796					{	res = b.n_token_act.cmp(&a.n_token_act);
1797						if res == Ordering::Equal
1798						{	res = b.n_state.cmp(&a.n_state);
1799						}
1800					}
1801					res
1802				}
1803			);
1804		}
1805
1806		// update also in actions and configurations
1807		let mut map = Vec::new();
1808		map.resize(states.array.len(), 0);
1809		for (i, stp) in states.array.iter().enumerate()
1810		{	map[stp.n_state] = i;
1811		}
1812		for stp in states.array.iter()
1813		{	for action in stp.actions.iter()
1814			{	if let StateOrRule::State(ref mut state) = action.borrow_mut().x
1815				{	*state = map[*state];
1816				}
1817			}
1818			for configuration in stp.configurations.iter()
1819			{	let mut configuration = configuration.borrow_mut();
1820				configuration.n_state = map[configuration.n_state];
1821			}
1822		}
1823
1824		// update self
1825		for (new_n_state, stp) in states.array.iter_mut().enumerate()
1826		{	stp.n_state = new_n_state;
1827		}
1828
1829		while states.n_no_tail>1 && states.array[states.n_no_tail-1].auto_reduce
1830		{	states.n_no_tail -= 1;
1831		}
1832	}
1833
1834	/// Return the name of a C datatype able to represent values between lwr and upr, inclusive.
1835	fn minimum_size_type(lower: isize, upper: isize) -> isize
1836	{	if lower >= 0
1837		{	if upper <= 255
1838			{	1
1839			}
1840			else if upper < 65535
1841			{	2
1842			}
1843			else
1844			{	4
1845			}
1846		}
1847		else
1848		{	if lower>=-127 && upper<=127
1849			{	-1
1850			}
1851			else if lower>=-32767 && upper<32767
1852			{	-2
1853			}
1854			else
1855			{	-4
1856			}
1857		}
1858	}
1859
1860	/// zCode is a string that is the action associated with a rule.  Expand the symbols in this string so that the refer to elements of the parser stack.
1861	fn translate_code(&self, rule: &mut Rule) -> ParserResult<()>
1862	{	let mut used = Vec::new();   // True for each RHS element which is used
1863		used.resize(rule.rhs.len(), false);
1864
1865		let mut it = rule.code.chars();
1866		let mut it_prev = it.clone();
1867
1868		while let Some(cp) = it.next()
1869		{	if cp.is_ascii_alphabetic()
1870			{	let ident: String = it_prev.take_while(|c| c.is_ascii_alphanumeric() || *c=='_').collect();
1871				if ident.len() > 1
1872				{	it.nth(ident.chars().count() - 2).unwrap();
1873				}
1874				for (i, rhs) in rule.rhs.iter().enumerate()
1875				{	if rhs.alias == ident
1876					{	used[i] = true;
1877						break;
1878					}
1879				}
1880			}
1881			it_prev = it.clone();
1882		}
1883
1884		// Generate warnings for RHS symbols which aliases are not used in the reduce code
1885		for (i, rhs) in rule.rhs.iter().enumerate()
1886		{	if !rhs.alias.is_empty() && !used[i]
1887			{	return Err
1888				(	LemonMintError::new
1889					(	&rule.rule_filename,
1890						rule.rule_n_line,
1891						format!("Label {} for \"{}({})\" is never used.", rhs.alias, rhs.name, rhs.alias)
1892					)
1893				);
1894			}
1895		}
1896
1897		Ok(())
1898	}
1899
1900	/*
1901	** Print the definition of the union used for the parser's data stack.
1902	** This union contains fields for every possible data type for tokens
1903	** and nonterminals.  In the process of computing and printing this
1904	** union, also set the ".dtnum" field of every terminal and nonterminal
1905	** symbol.
1906	*/
1907	fn get_minor_type(&self, symbols: &mut Symbols) -> Vec<(String, String)>
1908	{	// Allocate and initialize types[] and allocate stddt[]
1909		let arraysize = symbols.n_symbols * 2; // Size of the "types" array
1910		let mut types = Vec::new(); // A hash table of datatypes
1911		types.resize(arraysize, String::new());
1912
1913		// Build a hash table of datatypes. The ".dtnum" field of each symbol is filled in with the hash index plus 1.  A ".dtnum" value of 0 is
1914		// used for terminal symbols.  If there is no %default_type defined then 0 is also used as the .dtnum value for nonterminals which do not specify
1915		// a datatype using the %type directive.
1916'l:		for (i, sp) in symbols.array[0 .. symbols.n_symbols].iter_mut().enumerate()
1917		{	if i == symbols.error_symbol_index
1918			{	sp.dtnum = arraysize+1;
1919				continue;
1920			}
1921			if sp.typ==SymbolType::TERMINAL || (sp.data_type.is_empty() && self.vartype.is_empty())
1922			{	sp.dtnum = 0;
1923				continue;
1924			}
1925			let stddt = if !sp.data_type.is_empty()
1926			{	&sp.data_type
1927			}
1928			else
1929			{	&self.vartype
1930			};
1931			if !self.token_type.is_empty() && &self.token_type == stddt
1932			{	sp.dtnum = 0;
1933				continue;
1934			}
1935			let mut hash = DefaultHasher::new();
1936			stddt.hash(&mut hash);
1937			let mut hash = hash.finish() as usize % arraysize;
1938			while !types[hash].is_empty()
1939			{	if &types[hash] == stddt
1940				{	sp.dtnum = hash + 1;
1941					continue 'l;
1942				}
1943				hash += 1;
1944				if hash >= arraysize
1945				{	hash = 0;
1946				}
1947			}
1948			sp.dtnum = hash + 1;
1949			types[hash] = stddt.to_string(); // TODO: no to_string()
1950		}
1951
1952		// Print out the definition of TokenValue and MinorType
1953		let mut minor_type = Vec::with_capacity(arraysize+3);
1954		minor_type.push(("None".to_string(), "".to_string()));
1955		minor_type.push(("Symbol0".to_string(), "TokenValue".to_string()));
1956		let mut i = 1;
1957		for name in types
1958		{	if !name.is_empty()
1959			{	minor_type.push((format!("Symbol{}", i), name));
1960			}
1961			i += 1;
1962		}
1963		if let Some(symbol) = symbols.get_error_symbol()
1964		{	minor_type.push((format!("Symbol{}", symbol.dtnum), "i32".to_string()));
1965		}
1966
1967		// Done
1968		minor_type
1969	}
1970
1971	fn init_acttab(&mut self, symbols: &Symbols, rules: &mut Vec<Rule>, states: &mut States) -> ActTab
1972	{	let mut acttab = ActTab::new(symbols.n_terminals, symbols.n_symbols);
1973
1974		// Compute the actions on all states and count them up
1975		let mut ax = Vec::with_capacity(states.n_no_tail*2);
1976		for stp in states.array[0 .. states.n_no_tail].iter()
1977		{	ax.push(AxSet::new(stp.n_state, true, stp.n_token_act, ax.len()));
1978			ax.push(AxSet::new(stp.n_state, false, stp.n_nt_act, ax.len()));
1979		}
1980		ax.sort();
1981
1982		let mut min_token_offset = 0;
1983		let mut max_token_offset = 0;
1984		let mut min_nt_offset = 0;
1985		let mut max_nt_offset = 0;
1986
1987		// Compute the action table.  In order to try to keep the size of the action table to a minimum, the heuristic of placing the largest action sets first is used.
1988		for ax_i in ax.iter()
1989		{	if ax_i.n_action == 0
1990			{	break;
1991			}
1992			let stp = &mut states.array[ax_i.n_state];
1993			if ax_i.is_tkn
1994			{	for action in stp.actions.iter()
1995				{	if action.borrow().symbol_index < symbols.n_terminals
1996					{	let n_action = self.compute_action(symbols, action);
1997						if n_action != std::usize::MAX
1998						{	acttab.acttab_action(action.borrow().symbol_index, n_action);
1999						}
2000					}
2001				}
2002				stp.token_offset = acttab.acttab_insert(true);
2003				if stp.token_offset < min_token_offset
2004				{	min_token_offset = stp.token_offset;
2005				}
2006				if stp.token_offset > max_token_offset
2007				{	max_token_offset = stp.token_offset;
2008				}
2009			}
2010			else
2011			{	for action in stp.actions.iter()
2012				{	if action.borrow().symbol_index >= symbols.n_terminals && action.borrow().symbol_index != symbols.n_symbols
2013					{	let n_action = self.compute_action(symbols, action);
2014						if n_action != std::usize::MAX
2015						{	acttab.acttab_action(action.borrow().symbol_index, n_action);
2016						}
2017					}
2018				}
2019				stp.nt_offset = acttab.acttab_insert(false);
2020				if stp.nt_offset < min_nt_offset
2021				{	min_nt_offset = stp.nt_offset;
2022				}
2023				if stp.nt_offset > max_nt_offset
2024				{	max_nt_offset = stp.nt_offset;
2025				}
2026			}
2027		}
2028
2029		// Mark rules that are actually used for reduce actions after all optimizations have been applied
2030		for rule in rules.iter_mut()
2031		{	rule.does_reduce = false;
2032		}
2033		for state in states.array[0 .. states.n_no_tail].iter()
2034		{	for action in state.actions.iter()
2035			{	let action = action.borrow_mut();
2036				if action.typ==ActionType::Reduce || action.typ==ActionType::ShiftReduce
2037				{	if let StateOrRule::Rule(n_rule) = action.x
2038					{	rules[n_rule].does_reduce = true;
2039					}
2040				}
2041			}
2042		}
2043
2044		let mut n_fallbacks = 0;
2045		if self.has_fallback
2046		{	n_fallbacks = symbols.n_terminals - 1;
2047			while n_fallbacks>0 && symbols.array[n_fallbacks].fallback_index==std::usize::MAX
2048			{	n_fallbacks -= 1;
2049			}
2050			n_fallbacks += 1;
2051		}
2052		self.has_fallback = n_fallbacks != 0;
2053
2054		let mut n_shift_offset = states.n_no_tail;
2055		while n_shift_offset >0 && states.array[n_shift_offset -1].token_offset == std::isize::MIN
2056		{	n_shift_offset -= 1;
2057		}
2058
2059		let mut reduce_count = states.n_no_tail;
2060		while reduce_count>0 && states.array[reduce_count-1].nt_offset == std::isize::MIN
2061		{	reduce_count -= 1;
2062		}
2063
2064		acttab.min_token_offset = min_token_offset;
2065		acttab.max_token_offset = max_token_offset;
2066		acttab.min_nt_offset = min_nt_offset;
2067		acttab.max_nt_offset = max_nt_offset;
2068		acttab.n_fallbacks = n_fallbacks;
2069		acttab.n_shift_offset = n_shift_offset;
2070		acttab.reduce_count = reduce_count;
2071
2072		acttab
2073	}
2074
2075	/// Generate source code for the parser
2076	fn get_lemon(mut self, mut symbols: Symbols, mut rules: Vec<Rule>, states: States, acttab: ActTab) -> ParserResult<LemonMint>
2077	{	// Generate types
2078		let sz_code_type = Self::minimum_size_type(0, symbols.n_symbols as isize+1);
2079		let sz_action_type =  Self::minimum_size_type(0, (states.array.len()+rules.len()+5) as isize);
2080		let sz_shift_offset_type =  Self::minimum_size_type(acttab.min_token_offset-1, acttab.max_token_offset);
2081		let sz_reduce_offset_type =  Self::minimum_size_type(acttab.min_nt_offset-1, acttab.max_nt_offset);
2082
2083		// Generate token constants
2084		let mut token = Vec::with_capacity(symbols.n_terminals);
2085		for symbol in symbols.array[1 .. symbols.n_terminals].iter()
2086		{	token.push(Arc::clone(&symbol.name));
2087		}
2088
2089		// Generate the table of rule information
2090		// Note: This code depends on the fact that rules are number sequentually beginning with 0.
2091		self.tables_size += rules.len() * sz_code_type.abs() as usize;
2092
2093		// Generate the action table and its associates:
2094		//
2095		//  yy_action[]        A single table containing all actions.
2096		//  yy_lookahead[]     A table containing the lookahead for each entry in
2097		//                     yy_action.  Used to detect hash collisions.
2098		//  SHIFT_OFFSET[]    For each state, the offset into yy_action for
2099		//                     shifting terminals.
2100		//  REDUCE_OFFSET[]   For each state, the offset into yy_action for
2101		//                     shifting non-terminals after a reduce.
2102		//  DEFAULT[]       Default action for each state.
2103
2104		// Output the LOOKAHEAD_AND_ACTION table
2105		self.n_action_table_entries = acttab.get_n_actions();
2106		let mut n = acttab.get_n_lookahead();
2107		let n_lookahead = std::cmp::max(n, symbols.n_terminals + self.n_action_table_entries);
2108		let mut lookahead_and_action = Vec::with_capacity(n_lookahead);
2109		for (i, LookaheadAction {lookahead, action}) in acttab.iter().enumerate()
2110		{	let lookahead = if *lookahead == std::usize::MAX {symbols.n_symbols} else {*lookahead};
2111			let action = if i >= self.n_action_table_entries {0} else if *action == std::usize::MAX {self.no_action} else {*action};
2112			lookahead_and_action.push((lookahead, action));
2113		}
2114		// Add extra entries to the end of the yy_lookahead[] table so that yy_shift_ofst[]+iToken will always be a valid index into the array,
2115		// even for the largest possible value of yy_shift_ofst[] and iToken.
2116		while n < n_lookahead
2117		{	lookahead_and_action.push((symbols.n_terminals, 0));
2118			n += 1;
2119		}
2120		self.tables_size += lookahead_and_action.len() * (sz_code_type + sz_action_type).abs() as usize;
2121
2122		// Output the SHIFT_OFFSET[] table
2123		let mut shift_offset = Vec::with_capacity(acttab.n_shift_offset);
2124		for stp in states.array.iter().take(acttab.n_shift_offset)
2125		{	shift_offset.push(if stp.token_offset==std::isize::MIN {acttab.min_token_offset - 1} else {stp.token_offset});
2126		}
2127		self.tables_size += shift_offset.len() * sz_shift_offset_type.abs() as usize;
2128
2129		// Output the REDUCE_OFFSET[] table
2130		let mut reduce_offset = Vec::with_capacity(acttab.reduce_count);
2131		for stp in states.array.iter().take(acttab.reduce_count)
2132		{	reduce_offset.push(if stp.nt_offset==std::isize::MIN {acttab.min_nt_offset - 1} else {stp.nt_offset});
2133		}
2134		self.tables_size += reduce_offset.len() * sz_reduce_offset_type.abs() as usize;
2135
2136		// Output the default action table
2137		let mut default = Vec::with_capacity(states.n_no_tail);
2138		for stp in states.array.iter().take(states.n_no_tail)
2139		{	default.push(if stp.default_reduce_action!=std::usize::MAX {stp.default_reduce_action + self.min_reduce} else {self.error_action});
2140		}
2141		self.tables_size += default.len() * sz_action_type.abs() as usize;
2142
2143		// Generate the table of fallback tokens
2144		let mut fallback = Vec::new();
2145		if self.has_fallback
2146		{	fallback.reserve(acttab.n_fallbacks);
2147			for symbol in symbols.array.iter().take(acttab.n_fallbacks)
2148			{	if symbol.fallback_index != std::usize::MAX
2149				{	let fb = &symbols.array[symbol.fallback_index];
2150					fallback.push((fb.index, Arc::clone(&symbol.name), Arc::clone(&fb.name)));
2151				}
2152				else
2153				{	fallback.push((0, Arc::clone(&symbol.name), Arc::new(String::new())));
2154				}
2155			}
2156		}
2157		self.tables_size += fallback.len() * sz_code_type.abs() as usize;
2158
2159		// Generate a table containing the symbolic name of every symbol
2160		let mut token_names = Vec::new();
2161		if self.with_trace && symbols.n_symbols!=0
2162		{	token_names.reserve(symbols.n_symbols);
2163			for symbol in symbols.array.iter().take(symbols.n_symbols)
2164			{	token_names.push(Arc::clone(&symbol.name));
2165			}
2166		}
2167
2168		// Generate code which execution during each REDUCE action
2169		for sym in symbols.array.iter()
2170		{	for n_rule in sym.sym_rules_arr.iter()
2171			{	self.translate_code(&mut rules[*n_rule])?;
2172			}
2173		}
2174
2175		let start_type = symbols.get_start_symbol().data_type.clone();
2176		let minor_type = self.get_minor_type(&mut symbols);
2177		let n_symbols = symbols.n_symbols;
2178		let n_states = states.array.len();
2179		let n_no_tail = states.n_no_tail;
2180		let error_symbol = if let Some(symbol) = symbols.get_error_symbol() {symbol.index} else {0};
2181		let n_terminals = symbols.n_terminals;
2182		let n_nonterminals = symbols.n_symbols - symbols.n_terminals;
2183
2184		Ok
2185		(	LemonMint
2186			{	symbols,
2187				rules,
2188				states,
2189				token_type: self.token_type,
2190				vartype: self.vartype,
2191				start_name: self.start_name,
2192				start_type,
2193				extra_argument_type: self.extra_argument_type,
2194				extracode: self.extracode,
2195				minor_type,
2196				token,
2197
2198				sz_code_type,
2199				sz_action_type,
2200				sz_shift_offset_type,
2201				sz_reduce_offset_type,
2202
2203				n_symbols,
2204				n_states,
2205				n_no_tail,
2206				n_fallbacks: acttab.n_fallbacks,
2207				error_symbol,
2208				with_fallback: self.has_fallback,
2209				shift_count: acttab.n_shift_offset-1,
2210				reduce_count: acttab.reduce_count-1,
2211				with_trace: self.with_trace,
2212				trace_prompt: self.trace_prompt,
2213				shift_min: acttab.min_token_offset,
2214				shift_max: acttab.max_token_offset,
2215				reduce_min: acttab.min_nt_offset,
2216				reduce_max: acttab.max_nt_offset,
2217
2218				n_terminals,
2219				n_nonterminals,
2220				tables_size: self.tables_size,
2221				n_action_table_entries: self.n_action_table_entries,
2222
2223				lookahead_and_action,
2224				shift_offset,
2225				reduce_offset,
2226				default,
2227				fallback,
2228				token_names,
2229			}
2230		)
2231	}
2232
2233	/// Load a Y-grammar file. You can call this function several times to load grammar by parts, and you can call other builder methods to add/override settings.
2234	pub fn load_y_file(self, filename: &Arc<String>) -> ParserResult<Self>
2235	{	self.load_y(filename, File::open(filename.as_ref()).map_err(|e| LemonMintError::new(filename, 1, e.to_string()))?)
2236	}
2237
2238	/// Like load_y_file(), but you give file contents as io::Read object.
2239	///
2240	/// # Example
2241	///
2242	/// ```
2243	/// use lemon_mint::LemonMintBuilder;
2244	/// use std::sync::Arc;
2245	///
2246	/// let fake_filename = Arc::new("source.y".to_string()); // will appear in error messages
2247	/// let builder = LemonMintBuilder::new().load_y(&fake_filename, "%token_type {f64}\nUnit ::= NEW_LINE.".as_bytes()).unwrap();
2248	/// ```
2249	pub fn load_y<R>(mut self, filename: &Arc<String>, input: R) -> ParserResult<Self> where R: io::Read
2250	{	let input = BufReader::new(input);
2251		let mut n_line = 0;
2252		let mut lines = input.lines();
2253		let mut part_till_comment: Option<String> = None; // if a multiline comment opened on a line, this line is not complete, and it will be stored here, and we will wait to closing token
2254'l:		while let Some(line) = lines.next()
2255		{	n_line += 1;
2256			let mut line = line.map_err(|e| LemonMintError::new(filename, n_line, e.to_string()))?;
2257			// comments?
2258			if let Some(part) = part_till_comment.take()
2259			{	if let Some(pos) = line.find("*/")
2260				{	line.replace_range(.. pos+2, &part);
2261				}
2262				else
2263				{	part_till_comment = Some(part);
2264					continue;
2265				}
2266			}
2267			loop
2268			{	if let Some(pos) = line.find('/')
2269				{	match line.bytes().skip(pos+1).next()
2270					{	Some(b'/') => // line comment
2271						{	line.truncate(pos);
2272						}
2273						Some(b'*') => // multiline comment
2274						{	if let Some(len_minus_2) = line[pos+2 ..].find("*/")
2275							{	line.replace_range(pos .. pos+len_minus_2+2, "");
2276								continue;
2277							}
2278							else
2279							{	line.truncate(pos);
2280								part_till_comment = Some(line);
2281								continue 'l;
2282							}
2283						}
2284						_ => {}
2285					}
2286				}
2287				break;
2288			}
2289			// comments cut from line
2290			let mut line = line.trim();
2291			if !line.is_empty()
2292			{	let mut directive = Directive::Rule;
2293				let mut symbol_name: Cow<str> = "".into();
2294				let mut rule_rhs: Cow<str> = "".into();
2295				if line.as_bytes()[0] == b'%'
2296				{	let pos = line.bytes().position(|c| c.is_ascii_whitespace()).unwrap_or(line.len());
2297					let token = &line[1 .. pos];
2298					line = &line[pos ..].trim_start();
2299					directive = match token
2300					{	"token_type" => Directive::TokenType,
2301						"type" => Directive::Type,
2302						"default_type" => Directive::DefaultType,
2303						"start_symbol" => Directive::StartSymbol,
2304						"trace" => Directive::Trace,
2305						"extra_argument" => Directive::ExtraArgument,
2306						"left" => Directive::Left,
2307						"right" => Directive::Right,
2308						"nonassoc" => Directive::Nonassoc,
2309						"fallback" => Directive::Fallback,
2310						"code" | "include" => Directive::Code,
2311						_ =>
2312						{	return Err(LemonMintError::new(filename, n_line, format!("Directive not supported: %{}", token)));
2313						}
2314					};
2315				}
2316				match directive
2317				{	Directive::Type | Directive::Fallback =>
2318					{	// read symbol name
2319						let pos = line.bytes().position(|c| !c.is_ascii_alphanumeric() && c!=b'_').unwrap_or(line.len());
2320						if pos == 0
2321						{	return Err(LemonMintError::new(filename, n_line, "Expected symbol name after %type".to_string()));
2322						}
2323						symbol_name = line[.. pos].to_string().into();
2324						line = &line[pos ..].trim_start();
2325					}
2326					Directive::Rule =>
2327					{	let pos = line.find("::=").ok_or_else(|| LemonMintError::new(filename, n_line, "Couldn't interpret this line".to_string()))?;
2328						symbol_name = line[.. pos].trim_end().to_string().into();
2329						line = &line[pos+3 ..].trim_start();
2330						let pos = line.find('.').ok_or_else(|| LemonMintError::new(filename, n_line, "Rules must be terminated with a dot".to_string()))?;
2331						rule_rhs = line[.. pos].trim_end().to_string().into();
2332						line = &line[pos+1 ..].trim_start();
2333					}
2334					_ => {}
2335				}
2336				let mut value: Cow<str> = line.into();
2337				if line.bytes().next() == Some(b'{')
2338				{	line = &line[1 ..].trim_start(); // skip '{' at the beginning
2339					let (mut balance, mut last_close) = (1, usize::MAX);
2340					Self::balanced_braces(filename, n_line, line, &mut balance, &mut last_close)?;
2341					if balance == 0
2342					{	value = line[.. last_close].into(); // cut '}' at the end
2343					}
2344					else
2345					{	// read lines untill all braces closed
2346						let mut buffer = String::with_capacity(128);
2347						buffer.push_str(line);
2348						buffer.push('\n');
2349						let from_n_line = n_line;
2350						while let Some(line) = lines.next()
2351						{	n_line += 1;
2352							let line = line.map_err(|e| LemonMintError::new(filename, n_line, e.to_string()))?;
2353							Self::balanced_braces(filename, n_line, &line, &mut balance, &mut last_close)?;
2354							if balance == 0
2355							{	buffer.push_str(&line[.. last_close]); // cut '}' at the end
2356								break;
2357							}
2358							buffer.push_str(&line);
2359							buffer.push('\n');
2360						}
2361						if balance > 0
2362						{	return Err(LemonMintError::new(filename, n_line, format!("Braces not closed at the end of file since line {}", from_n_line)));
2363						}
2364						value = buffer.into();
2365					}
2366				}
2367				else if !line.is_empty() && line.as_bytes()[line.len()-1]==b'.'
2368				{	value = line[.. line.len()-1].trim_end().into(); // cut '.' at end
2369				}
2370				self = match directive
2371				{	Directive::Rule => self.add_rule(filename, n_line, symbol_name.into(), rule_rhs.as_ref(), value.into()),
2372					Directive::TokenType => self.set_token_type(value.into()),
2373					Directive::Type => self.add_type(filename, n_line, symbol_name.into(), value.into()),
2374					Directive::DefaultType => self.set_default_type(value.into()),
2375					Directive::StartSymbol => self.set_start_symbol(filename, n_line, value.into()),
2376					Directive::Trace => self.set_trace_prompt(value.into()),
2377					Directive::ExtraArgument => self.set_extra_argument_type(value.into()),
2378					Directive::Left => self.set_left(filename, n_line, value.as_ref()),
2379					Directive::Right => self.set_right(filename, n_line, value.as_ref()),
2380					Directive::Nonassoc => self.set_nonassoc(filename, n_line, value.as_ref()),
2381					Directive::Fallback => self.add_fallback(filename, n_line, symbol_name.into(), value.as_ref()),
2382					Directive::Code => self.add_code(value.into()),
2383				}?;
2384			}
2385		}
2386		Ok(self)
2387	}
2388
2389	fn balanced_braces(filename: &Arc<String>, n_line: usize, line: &str, balance: &mut i32, last_close: &mut usize) -> ParserResult<()>
2390	{	for (i, c) in line.bytes().enumerate()
2391		{	match c
2392			{	b'{' => *balance += 1,
2393				b'}' => {*balance -= 1; *last_close = i}
2394				_ => {}
2395			}
2396		}
2397		if *balance < 0
2398		{	return Err(LemonMintError::new(filename, n_line, "Braces on line not balanced".to_string()));
2399		}
2400		if *balance == 0
2401		{	if !line[*last_close+1 ..].trim_end().is_empty()
2402			{	return Err(LemonMintError::new(filename, n_line, "Junk after closing brace".to_string()));
2403			}
2404		}
2405		Ok(())
2406	}
2407
2408	///	Set the parser "%start_symbol".
2409	pub fn set_start_symbol(mut self, filename: &Arc<String>, n_line: usize, mut value: String) -> ParserResult<Self>
2410	{	let trimmed = value.trim();
2411		if trimmed.len() != value.len()
2412		{	value = trimmed.to_string();
2413		}
2414		if value.find(|c: char| !c.is_ascii_alphanumeric() && c!='_').is_some()
2415		{	return Err(LemonMintError::new(filename, n_line, format!("Invalid start symbol name: {}", value)));
2416		}
2417		if is_terminal_name(&value)
2418		{	return Err(LemonMintError::new(filename, n_line, "Start symbol must be nonterminal".to_string()));
2419		}
2420		self.start_name = value;
2421		Ok(self)
2422	}
2423
2424	///	Set the parser "%token_type".
2425	pub fn set_token_type(mut self, value: String) -> ParserResult<Self>
2426	{	self.token_type = typename_to_string(value);
2427		Ok(self)
2428	}
2429
2430	///	Set the parser "%default_type".
2431	pub fn set_default_type(mut self, value: String) -> ParserResult<Self>
2432	{	self.vartype = typename_to_string(value);
2433		Ok(self)
2434	}
2435
2436	///	Enable trace, and set prompt that will be printed before each message.
2437	/// The prompt can be empty string.
2438	pub fn set_trace_prompt(mut self, value: String) -> ParserResult<Self>
2439	{	self.with_trace = true;
2440		self.trace_prompt = value;
2441		Ok(self)
2442	}
2443
2444	/// Set the parser %extra_argument.
2445	/// Only it's type, like:
2446	///
2447	/// ```
2448	/// # use lemon_mint::LemonMintBuilder;
2449	///
2450	/// let builder = LemonMintBuilder::new().set_extra_argument_type("String".to_string()).unwrap();
2451	/// ```
2452	///
2453	/// In your rust code that uses the generated parser, you initialize the extra argument when you create a Parser object:
2454	///
2455	/// ```ignore
2456	/// %code {
2457	/// 	fn main()
2458	/// 	{	let mut parser = Parser::new("Initial value".to_string()); // Initial value of the extra argument
2459	/// 		assert_eq!(parser.extra, "Initial value".to_string());
2460	/// 	}
2461	/// }
2462	/// ```
2463	///
2464	/// In actions code, the extra argument is available as variable `&mut extra`:
2465	///
2466	/// ```ignore
2467	/// %token_type {String}
2468	/// Unit ::= NEW_LINE(tok). extra.push_str(&tok);
2469	/// ```
2470	pub fn set_extra_argument_type(mut self, value: String) -> ParserResult<Self>
2471	{	self.extra_argument_type = typename_to_string(value);
2472		Ok(self)
2473	}
2474
2475	///	Add the parser "%left A B C".
2476	///
2477	/// # Example
2478	///
2479	/// ```
2480	/// # use lemon_mint::LemonMintBuilder;
2481	/// # use std::sync::Arc;
2482	///
2483	/// let fake_filename = Arc::new("source.y".to_string()); // will appear in error messages
2484	/// let builder = LemonMintBuilder::new().set_left(&fake_filename, 1, "PLUS MINUS").unwrap().set_left(&fake_filename, 2, "TIMES DIVIDE").unwrap();
2485	/// ```
2486	///
2487	/// * symbol_names - Terminal symbol names, separated by whitespace characters. All the symbols will have the same precedence.
2488	/// Further calls to `set_left()`, `set_right()` or `set_nonassoc()` will set higher precedence.
2489	pub fn set_left(self, filename: &Arc<String>, n_line: usize, symbol_names: &str) -> ParserResult<Self>
2490	{	self.add_associativity(filename, n_line, symbol_names, Associativity::LEFT)
2491	}
2492
2493	///	Add the parser "%right A B C".
2494	pub fn set_right(self, filename: &Arc<String>, n_line: usize, symbol_names: &str) -> ParserResult<Self>
2495	{	self.add_associativity(filename, n_line, symbol_names, Associativity::RIGHT)
2496	}
2497
2498	///	Add the parser "%nonassoc A B C".
2499	pub fn set_nonassoc(self, filename: &Arc<String>, n_line: usize, symbol_names: &str) -> ParserResult<Self>
2500	{	self.add_associativity(filename, n_line, symbol_names, Associativity::NONASSOC)
2501	}
2502
2503	fn add_associativity(mut self, filename: &Arc<String>, n_line: usize, symbol_names: &str, assoc: Associativity) -> ParserResult<Self>
2504	{	for symbol_name in symbol_names.trim().split(|c: char| c.is_ascii_whitespace())
2505		{	if !symbol_name.is_empty()
2506			{	if symbol_name.find(|c: char| !c.is_ascii_alphanumeric() && c!='_').is_some()
2507				{	return Err(LemonMintError::new(filename, n_line, format!("Invalid token name: {}", symbol_name)));
2508				}
2509				if !is_terminal_name(symbol_name)
2510				{	return Err(LemonMintError::new(filename, n_line, format!("Can only set precedence for terminal symbols, not \"{}\"", symbol_name)));
2511				}
2512				if let Some(prev) = self.precedence.get(symbol_name)
2513				{	return Err(LemonMintError::new(filename, n_line, format!("Symbol \"{}\" has already be given a precedence at {}:{}", symbol_name, prev.filename, prev.n_line)));
2514				}
2515				self.precedence.insert
2516				(	symbol_name.to_string(),
2517					PrecedenceInFile
2518					{	filename: Arc::clone(filename),
2519						n_line,
2520						assoc,
2521						precedence: self.prec_counter,
2522					}
2523				);
2524			}
2525		}
2526		self.prec_counter += 1;
2527		Ok(self)
2528	}
2529
2530	///	Add the parser "%type symbol_name {Type}".
2531	pub fn add_type(mut self, filename: &Arc<String>, n_line: usize, mut symbol_name: String, type_name: String) -> ParserResult<Self>
2532	{	let trimmed = symbol_name.trim();
2533		if trimmed.len() != symbol_name.len()
2534		{	symbol_name = trimmed.to_string();
2535		}
2536		if symbol_name.find(|c: char| !c.is_ascii_alphanumeric() && c!='_').is_some()
2537		{	return Err(LemonMintError::new(filename, n_line, format!("Invalid symbol name: {}", symbol_name)));
2538		}
2539		if is_terminal_name(&symbol_name)
2540		{	return Err(LemonMintError::new(filename, n_line, format!("Can only set type for nonterminal symbols, not \"{}\"", symbol_name)));
2541		}
2542		if let Some(prev) = self.nonterminal_types.get(&symbol_name)
2543		{	return Err(LemonMintError::new(filename, n_line, format!("Type for symbol \"{}\" already defined at {}:{}", symbol_name, prev.filename, prev.n_line)));
2544		}
2545		self.nonterminal_types.insert
2546		(	symbol_name,
2547			StringInFile
2548			{	filename: Arc::clone(filename),
2549				n_line,
2550				string: typename_to_string(type_name),
2551			}
2552		);
2553		Ok(self)
2554	}
2555
2556	/// Return `Some((filename, n_line))` if given symbol was added through `add_type()`.
2557	pub fn get_type(&self, mut symbol_name: &str) -> Option<(Arc<String>, usize)>
2558	{	symbol_name = symbol_name.trim();
2559		self.nonterminal_types.get(symbol_name).map(|v| (Arc::clone(&v.filename), v.n_line))
2560	}
2561
2562	///	Add the parser "%fallback FB A B C".
2563	pub fn add_fallback(mut self, filename: &Arc<String>, n_line: usize, mut fallback_to_symbol_name: String, symbol_names: &str) -> ParserResult<Self>
2564	{	let trimmed = fallback_to_symbol_name.trim();
2565		if trimmed.len() != fallback_to_symbol_name.len()
2566		{	fallback_to_symbol_name = trimmed.to_string();
2567		}
2568		if fallback_to_symbol_name.find(|c: char| !c.is_ascii_alphanumeric() && c!='_').is_some()
2569		{	return Err(LemonMintError::new(filename, n_line, format!("Invalid token name: {}", fallback_to_symbol_name)));
2570		}
2571		let fallback_to_symbol_name = Arc::new(fallback_to_symbol_name);
2572		for symbol_name in symbol_names.trim().split(|c: char| c.is_ascii_whitespace())
2573		{	if !symbol_name.is_empty()
2574			{	if symbol_name.find(|c: char| !c.is_ascii_alphanumeric() && c!='_').is_some()
2575				{	return Err(LemonMintError::new(filename, n_line, format!("Invalid token name: {}", symbol_name)));
2576				}
2577				if !is_terminal_name(symbol_name)
2578				{	return Err(LemonMintError::new(filename, n_line, format!("Can only set fallback to terminal symbols, not \"{}\"", symbol_name)));
2579				}
2580				let symbol_name = Arc::new(symbol_name.to_string());
2581				self.symbols_builder.add(&symbol_name);
2582				self.symbols_builder.add(&fallback_to_symbol_name);
2583				let sp = self.symbols_builder.get(&symbol_name);
2584				if sp.fallback_index != std::usize::MAX
2585				{	return Err(LemonMintError::new(filename, n_line, format!("More than one fallback assigned to token \"{}\"", symbol_name)));
2586				}
2587				let fsp = self.symbols_builder.get(&fallback_to_symbol_name);
2588				if *fsp == *sp
2589				{	return Err(LemonMintError::new(filename, n_line, format!("Symbol \"{}\" is asked fallback to self // {}, {}", symbol_name, fallback_to_symbol_name, symbol_name)));
2590				}
2591				self.symbols_builder.get_mut(&symbol_name).fallback_index = fsp.index;
2592			}
2593		}
2594		Ok(self)
2595	}
2596
2597	///	Add parser rule like "a", "b(v_b) COMMA c(v_c)", "vec![v_b, v_c]"
2598	pub fn add_rule(mut self, filename: &Arc<String>, n_line: usize, mut lhs_name: String, rhs_names_aliases: &str, code: String) -> ParserResult<Self>
2599	{	let trimmed = lhs_name.trim();
2600		if trimmed.len() != lhs_name.len()
2601		{	lhs_name = trimmed.to_string();
2602		}
2603		if is_terminal_name(&lhs_name)
2604		{	return Err(LemonMintError::new(filename, n_line, format!("Invalid LHS symbol: \"{}\"", lhs_name)));
2605		}
2606		let mut code = code;
2607		if code.trim().is_empty()
2608		{	code = String::new();
2609		}
2610		if self.start_name.is_empty()
2611		{	self.start_name = lhs_name.clone();
2612		}
2613		let lhs_name = Arc::new(lhs_name);
2614		let lhs_index = self.symbols_builder.add(&lhs_name);
2615		let mut rule = Rule::new(filename, n_line, &lhs_name, lhs_index, self.rules.len(), code);
2616		// parse rhs_names_aliases
2617		let mut s = rhs_names_aliases.trim_start();
2618		while s.len() != 0
2619		{	let name_len = s.chars().position(|c| !c.is_ascii_alphanumeric() && c!='_').unwrap_or(s.len());
2620			if name_len == 0
2621			{	return Err(LemonMintError::new(filename, n_line, format!("Invalid RHS expression: \"{}\"", rhs_names_aliases)));
2622			}
2623			let rhs_name = Arc::new(s[.. name_len].to_string());
2624			let rhs_index = self.symbols_builder.add(&rhs_name);
2625			s = s[name_len ..].trim_start();
2626			let mut alias = String::new();
2627			if s.bytes().next() == Some(b'(')
2628			{	s = s[1 ..].trim_start();
2629				let alias_len = s.chars().position(|c| !c.is_ascii_alphanumeric() && c!='_').unwrap_or(s.len());
2630				alias = s[.. alias_len].to_string();
2631				s = s[alias_len ..].trim_start();
2632				if alias_len==0 || s.bytes().next() != Some(b')')
2633				{	return Err(LemonMintError::new(filename, n_line, format!("Invalid alias in RHS expression: \"{}\"", rhs_names_aliases)));
2634				}
2635				s = s[1 ..].trim_start();
2636			}
2637			rule.rhs.push(Rhs{name: rhs_name, index: rhs_index, alias});
2638		}
2639		self.symbols_builder.get_mut(&lhs_name).sym_rules_arr.insert(0, self.rules.len()); // TODO: maybe push(), not insert()
2640		self.rules.push(rule);
2641		Ok(self)
2642	}
2643
2644	///	Add the parser %code or %include
2645	pub fn add_code(mut self, code: String) -> ParserResult<Self>
2646	{	if self.extracode.is_empty()
2647		{	self.extracode = code;
2648		}
2649		else
2650		{	self.extracode.push_str("\n\n");
2651			self.extracode.push_str(&code);
2652		}
2653		Ok(self)
2654	}
2655
2656	/// Allows to disable finite-state machine tables compression. Don't do this.
2657	pub fn set_no_compress(mut self, new_no_compress: bool) -> ParserResult<Self>
2658	{	self.no_compress = new_no_compress;
2659		Ok(self)
2660	}
2661
2662	/// Disable resorting states. You don't need this functionality.
2663	pub fn set_no_resort(mut self, new_no_resort: bool) -> ParserResult<Self>
2664	{	self.no_resort = new_no_resort;
2665		Ok(self)
2666	}
2667
2668	/// When you feed all the parser rules and options, call this method to finally build the parser finite-state machine transition tables.
2669	/// If there are problems with your grammar or if there are parser conflicts, this will return Err.
2670	/// If parser build succeeds this returns the parser representation as `LemonMint` object.
2671	pub fn try_into_lemon(mut self) -> ParserResult<LemonMint>
2672	{	// rules
2673		let mut rules = mem::replace(&mut self.rules, Vec::new());
2674		if rules.len() == 0
2675		{	return Err(LemonMintError::new(&Arc::new(String::new()), 1, "Empty grammar".to_string()));
2676		}
2677
2678		// Count and index the symbols of the grammar
2679		let mut symbols =
2680		{	let symbols_builder = mem::replace(&mut self.symbols_builder, SymbolsBuilder::new(true));
2681			let nonterminal_types = mem::replace(&mut self.nonterminal_types, HashMap::new());
2682			let precedence = mem::replace(&mut self.precedence, HashMap::new());
2683
2684			symbols_builder.into_symbols(&self.start_name, nonterminal_types, precedence, &mut rules)? // symbols - Sorted array of all symbols
2685		};
2686
2687		// Put rules that have no reduce action C-code associated with them last, so that the switch() statement that selects reduction actions will have a smaller jump table.
2688		rules.sort_by
2689		(	|a, b|
2690			{	if a.code.is_empty() == b.code.is_empty()
2691				{	a.index.cmp(&b.index)
2692				}
2693				else if a.code.is_empty()
2694				{	Ordering::Greater
2695				}
2696				else
2697				{	Ordering::Less
2698				}
2699			}
2700		);
2701		for (i, rule) in rules.iter_mut().enumerate()
2702		{	rule.index = i;
2703		}
2704
2705		// Find the precedence for every production rule (that has one)
2706		Self::find_rule_precedences(&mut symbols, &mut rules);
2707
2708		// Compute the lambda-nonterminals and the first-sets for every nonterminal
2709		self.find_first_sets(&mut symbols, &mut rules);
2710
2711		// Find the start symbol
2712		if !self.start_name.is_empty()
2713		{	if symbols.start_symbol_index == std::usize::MAX
2714			{	return Err(LemonMintError::new(&Self::get_filename_of_first_rule(&rules), 1, format!("The specified start symbol \"{}\" is not in a nonterminal of the grammar", self.start_name)));
2715			}
2716		}
2717		else
2718		{	return Err(LemonMintError::new(&Self::get_filename_of_first_rule(&rules), 1, "Please, specify a start symbol".to_string()));
2719		}
2720
2721		// Make sure the start symbol doesn't occur on the right-hand side of any rule.  Report an error if it does.
2722		// (YACC would generate a new start symbol in this case.)
2723		for rule in rules.iter()
2724		{	for rhs in &rule.rhs
2725			{	if rhs.index == symbols.start_symbol_index
2726				{	return Err(LemonMintError::new(&Self::get_filename_of_first_rule(&rules), 1, format!("The start symbol \"{}\" occurs on the right-hand side of a rule", rhs.name)));
2727				}
2728			}
2729		}
2730
2731		dump_symbols(&symbols, &rules);
2732
2733		// Compute all LR(0) states.  Also record follow-set propagation links so that the follow-set can be computed later
2734		let mut states = StatesBuilder::build(&symbols, &mut rules, symbols.get_start_symbol(), symbols.n_terminals, &mut self.actions_enum)?;
2735
2736		dump_states(&states, &rules, &symbols, 1);
2737
2738		// Tie up loose ends on the propagation links
2739		Self::find_links(&states);
2740
2741		dump_states(&states, &rules, &symbols, 2);
2742
2743		// Compute the follow set of every reducible configuration
2744		Self::find_follow_sets(&states);
2745
2746		dump_states(&states, &rules, &symbols, 3);
2747
2748		// Compute the action tables
2749		self.find_actions(&symbols, &mut rules, &mut states)?;
2750
2751		dump_states(&states, &rules, &symbols, 4);
2752
2753		// Compress the action tables
2754		if !self.no_compress
2755		{	self.compress_tables(&symbols, &rules, &mut states, symbols.default_symbol_index);
2756		}
2757
2758		dump_states(&states, &rules, &symbols, 5);
2759
2760		// Reorder and renumber the states so that states with fewer choices occur at the end.  This is an optimization that helps make the
2761		// generated parser tables smaller.
2762		if !self.no_resort
2763		{	self.resort_states(&symbols, &mut states);
2764		}
2765
2766		dump_states(&states, &rules, &symbols, 6);
2767
2768		// Compute the action table
2769		self.min_shift_reduce = states.array.len();
2770		self.error_action = self.min_shift_reduce + rules.len();
2771		self.accept_action = self.error_action + 1;
2772		self.no_action = self.accept_action + 1;
2773		self.min_reduce = self.no_action + 1;
2774		self.max_action = self.min_reduce + rules.len();
2775		let acttab = self.init_acttab(&symbols, &mut rules, &mut states);
2776
2777		dump_states(&states, &rules, &symbols, 7);
2778
2779		// Generate the source code for the parser
2780		if self.n_conflicts > 0
2781		{	Err(LemonMintError::new(&Self::get_filename_of_first_rule(&rules), 1, format!("{} parsing conflicts", self.n_conflicts)))
2782		}
2783		else
2784		{	self.get_lemon(symbols, rules, states, acttab)
2785		}
2786	}
2787}
2788
2789#[derive(Debug, Copy, Clone, PartialEq)]
2790enum Lang
2791{	Rust
2792}
2793
2794fn size_type_to_str(lang: Lang, n_bytes: isize) -> &'static str
2795{	match (lang, n_bytes)
2796	{	(Lang::Rust, -1) => "i8",
2797		(Lang::Rust, -2) => "i16",
2798		(Lang::Rust, -4) => "i32",
2799		(Lang::Rust,  1) => "u8",
2800		(Lang::Rust,  2) => "u16",
2801		(Lang::Rust,  4) => "u32",
2802		(Lang::Rust,  _) => "isize",
2803	}
2804}
2805
2806/// The compiled parser that can be saved to a rust file. Use `LemonMintBuilder` to build it.
2807#[derive(Debug)]
2808pub struct LemonMint
2809{	symbols: Symbols,
2810	rules: Vec<Rule>,                  // All rules
2811	states: States,
2812	token_type: String,                // Type of terminal symbols in the parser stack
2813	vartype: String,                   // The default type of non-terminal symbols
2814	start_name: String,                // Name of the start symbol for the grammar
2815	start_type: String,
2816	extra_argument_type: String,
2817	extracode: String,
2818	minor_type: Vec<(String, String)>,
2819	token: Vec<Arc<String>>,
2820
2821	sz_code_type: isize,
2822	sz_action_type: isize,
2823	sz_shift_offset_type: isize,
2824	sz_reduce_offset_type: isize,
2825
2826	n_symbols: usize,
2827	n_states: usize,
2828	n_no_tail: usize,
2829	n_fallbacks: usize,
2830	error_symbol: usize,
2831	with_fallback: bool,
2832	shift_count: usize,
2833	reduce_count: usize,
2834	with_trace: bool,
2835	trace_prompt: String,              // Prompt to preface each trace message, if with_trace is true
2836	shift_min: isize,
2837	shift_max: isize,
2838	reduce_min: isize,
2839	reduce_max: isize,
2840
2841	n_terminals: usize,
2842	n_nonterminals: usize,
2843	tables_size: usize,                // Total table size of all tables in bytes
2844	n_action_table_entries: usize,     // Number of entries in the yy_action[] table
2845
2846	lookahead_and_action: Vec<(usize, usize)>,
2847	shift_offset: Vec<isize>,
2848	reduce_offset: Vec<isize>,
2849	default: Vec<usize>,
2850	fallback: Vec<(usize, Arc<String>, Arc<String>)>,
2851	token_names: Vec<Arc<String>>,
2852}
2853impl LemonMint
2854{	/// Generate Rust source code for the parser
2855	pub fn gen_rust<W>(&self, out: &mut W) -> io::Result<()> where W: io::Write
2856	{	writeln!(out, "pub mod code {{")?;
2857
2858		let mut lempar_reader = LemparReader::new();
2859		lempar_reader.copy_part(out)?; // to %% no. 1
2860
2861		// Generate types
2862		writeln!(out, "pub type TokenValue = {};", if !self.token_type.is_empty() {&self.token_type} else {"String"})?;
2863		writeln!(out, "pub type ExtraArgumentType = {};", if !self.extra_argument_type.is_empty() {&self.extra_argument_type} else {"()"})?;
2864		writeln!(out, "pub type StartType = {};", if !self.start_type.is_empty() {&self.start_type} else {"()"})?;
2865		writeln!(out, "type CodeType = {};", size_type_to_str(Lang::Rust, self.sz_code_type))?;
2866		writeln!(out, "type ActionType = {};", size_type_to_str(Lang::Rust, self.sz_action_type))?;
2867		writeln!(out, "type ShiftOffsetType = {};", size_type_to_str(Lang::Rust, self.sz_shift_offset_type))?;
2868		writeln!(out, "type ReduceOffsetType = {};", size_type_to_str(Lang::Rust, self.sz_reduce_offset_type))?;
2869
2870		write!(out, "enum MinorType\n{{")?;
2871		for (variant, typ) in self.minor_type.iter()
2872		{	if typ.is_empty()
2873			{	writeln!(out, "\t{},", variant)?;
2874			}
2875			else
2876			{	writeln!(out, "\t{}({}),", variant, typ)?;
2877			}
2878		}
2879		writeln!(out, "}}")?;
2880
2881		// Generate token constants
2882		writeln!(out, "#[repr(u32)]\n#[derive(Copy, Clone, PartialEq)]\n#[allow(non_camel_case_types)]")?;
2883		write!(out, "pub enum Token\n{{")?;
2884		for (i, name) in self.token.iter().enumerate()
2885		{	if self.token_type.is_empty()
2886			{	writeln!(out, "\t{:<30} = {:2},", name, i+1)?;
2887			}
2888			else
2889			{	writeln!(out, "\t{:<30} = {:2},", name, i+1)?;
2890			}
2891		}
2892		writeln!(out, "}}\n")?;
2893
2894		// Generate constants
2895		writeln!(out, "const NO_CODE: CodeType = {}; // YYNOCODE", self.n_symbols)?;
2896		writeln!(out, "const N_STATES: usize = {}; // YYNSTATE", self.n_no_tail)?;
2897		writeln!(out, "const N_RULES: usize = {}; // YYNRULE", self.rules.len())?;
2898		writeln!(out, "const N_TERMINALS: usize = {}; // YYNTOKEN", self.n_terminals)?;
2899		writeln!(out, "const MAX_SHIFT: usize = N_STATES-1; // YY_MAX_SHIFT")?;
2900		writeln!(out, "const MIN_SHIFTREDUCE: usize = {}; // YY_MIN_SHIFTREDUCE", self.n_states)?;
2901		writeln!(out, "const MAX_SHIFTREDUCE: usize = MIN_SHIFTREDUCE + N_RULES - 1; // YY_MAX_SHIFTREDUCE")?;
2902		writeln!(out, "const ERROR_ACTION: usize = MAX_SHIFTREDUCE + 1; // YY_ERROR_ACTION")?;
2903		writeln!(out, "const ACCEPT_ACTION: usize = ERROR_ACTION + 1; // YY_ACCEPT_ACTION")?;
2904		writeln!(out, "const NO_ACTION: usize = ACCEPT_ACTION + 1; // YY_NO_ACTION")?;
2905		writeln!(out, "const MIN_REDUCE: usize = NO_ACTION + 1; // YY_MIN_REDUCE")?;
2906		writeln!(out, "//const MAX_REDUCE: usize = MIN_REDUCE + N_RULES - 1; // YY_MAX_REDUCE")?;
2907		writeln!(out, "const ERROR_SYMBOL: CodeType = {}; // YYERRORSYMBOL", self.error_symbol)?;
2908		writeln!(out, "const WITH_FALLBACK: bool = {}; // YYFALLBACK", if self.n_fallbacks>0 {"true"} else {"false"})?;
2909		writeln!(out, "const SHIFT_COUNT: usize = {}; // YY_SHIFT_COUNT", self.shift_count)?;
2910		writeln!(out, "const REDUCE_COUNT: usize = {}; // YY_REDUCE_COUNT", self.reduce_count)?;
2911		writeln!(out, "//const SHIFT_MIN: i32 = {}; // YY_SHIFT_MIN", self.shift_min)?;
2912		writeln!(out, "//const SHIFT_MAX: i32 = {}; // YY_SHIFT_MAX", self.shift_max)?;
2913		writeln!(out, "//const REDUCE_MIN: i32 = {}; // YY_REDUCE_MIN", self.reduce_min)?;
2914		writeln!(out, "//const REDUCE_MAX: i32 = {}; // YY_REDUCE_MAX", self.reduce_max)?;
2915		writeln!(out, "const ACTTAB_COUNT: usize = {}; // YY_ACTTAB_COUNT", self.n_action_table_entries)?;
2916		writeln!(out, "const TRACE: bool = {};", if self.with_trace {"true"} else {"false"})?;
2917
2918		// trace
2919		if self.trace_prompt.chars().position(|c| c=='\\' || c=='"').is_none()
2920		{	writeln!(out, "const TRACE_PROMPT: &str = \"{}\";", self.trace_prompt)?;
2921		}
2922		else
2923		{	let mut tag = String::with_capacity(self.trace_prompt.len()+1);
2924			for _i in 0 .. self.trace_prompt.len()+1
2925			{	tag.push('#');
2926				if !self.trace_prompt.contains(&tag)
2927				{	break;
2928				}
2929			}
2930			writeln!(out, "const TRACE_PROMPT: &str = r{tag}\"{}\"{tag};", self.trace_prompt, tag=tag)?;
2931		}
2932
2933		// Generate the action table and its associates:
2934		//
2935		//  yy_action[]        A single table containing all actions.
2936		//  yy_lookahead[]     A table containing the lookahead for each entry in
2937		//                     yy_action.  Used to detect hash collisions.
2938		//  SHIFT_OFFSET[]    For each state, the offset into yy_action for
2939		//                     shifting terminals.
2940		//  REDUCE_OFFSET[]   For each state, the offset into yy_action for
2941		//                     shifting non-terminals after a reduce.
2942		//  DEFAULT[]       Default action for each state.
2943
2944		// Output the LOOKAHEAD_AND_ACTION table
2945		write!(out, "const LOOKAHEAD_AND_ACTION: [LookaheadAndAction; {}] =", self.lookahead_and_action.len())?;
2946		let mut formatter = ArrayFormatter::new(2);
2947		for (lookahead, action) in self.lookahead_and_action.iter()
2948		{	formatter.separ(out)?;
2949			write!(out, "LookaheadAndAction {{lookahead: {:4}, action: {:4}}}", lookahead, action)?;
2950		}
2951		formatter.end(out)?;
2952
2953		// Output the SHIFT_OFFSET[] table
2954		write!(out, "const SHIFT_OFFSET: [ShiftOffsetType; {}] =", self.shift_offset.len())?;
2955		let mut formatter = ArrayFormatter::new(16);
2956		for offset in self.shift_offset.iter()
2957		{	formatter.separ(out)?;
2958			write!(out, "{:4}", offset)?;
2959		}
2960		formatter.end(out)?;
2961
2962		// Output the REDUCE_OFFSET[] table
2963		write!(out, "const REDUCE_OFFSET: [ReduceOffsetType; {}] =", self.reduce_offset.len())?;
2964		let mut formatter = ArrayFormatter::new(16);
2965		for offset in self.reduce_offset.iter()
2966		{	formatter.separ(out)?;
2967			write!(out, "{:4}", offset)?;
2968		}
2969		formatter.end(out)?;
2970
2971		// Output the default action table
2972		write!(out, "const DEFAULT: [ActionType; {}] =", self.default.len())?;
2973		let mut formatter = ArrayFormatter::new(16);
2974		for action in self.default.iter()
2975		{	formatter.separ(out)?;
2976			write!(out, "{:4}", action)?;
2977		}
2978		formatter.end(out)?;
2979
2980		// Next
2981		lempar_reader.copy_part(out)?; // to %% no. 2
2982
2983		// Generate the table of fallback tokens
2984		write!(out, "const FALLBACK: [CodeType; {}] =", self.fallback.len())?;
2985		let mut formatter = ArrayFormatter::new(1);
2986		for (index, name, to_name) in self.fallback.iter()
2987		{	formatter.separ(out)?;
2988			write!(out, "{:3} /* {:10} => {} */", index, name, to_name)?;
2989		}
2990		formatter.end(out)?;
2991
2992		// Next
2993		lempar_reader.copy_part(out)?; // to %% no. 3
2994
2995		// Generate a table containing the symbolic name of every symbol
2996		write!(out, "const TOKEN_NAMES: [&str; {}] =", self.token_names.len())?;
2997		let mut formatter = ArrayFormatter::new(1);
2998		for name in self.token_names.iter()
2999		{	formatter.separ(out)?;
3000			write!(out, "{:<15}", format!("\"{}\"", name))?;
3001		}
3002		formatter.end(out)?;
3003
3004		// Next
3005		lempar_reader.copy_part(out)?; // to %% no. 4
3006
3007		// Generate a table containing a text string that describes every rule in the rule set of the grammar.
3008		// This information is used when tracing REDUCE actions.
3009		write!(out, "const RULE_NAMES: [&str; {}] =", self.rules.len())?;
3010		let mut formatter = ArrayFormatter::new(1);
3011		for rule in self.rules.iter()
3012		{	formatter.separ(out)?;
3013			write!(out, "\"{}\"", rule)?;
3014		}
3015		formatter.end(out)?;
3016
3017		// Next
3018		lempar_reader.copy_part(out)?; // to %% no. 5
3019
3020		// Generate the table of rule information
3021		// Note: This code depends on the fact that rules are number sequentually beginning with 0.
3022		write!(out, "const RULES_INFO: [CodeType; {}] =", self.rules.len())?;
3023		let mut formatter = ArrayFormatter::new(16);
3024		for rule in &self.rules
3025		{	formatter.separ(out)?;
3026			write!(out, "{:4}", rule.lhs_index)?;
3027		}
3028		formatter.end(out)?;
3029
3030		// Next
3031		lempar_reader.copy_part(out)?; // to %% no. 6
3032
3033		// First output rules other than the default: rule
3034		for rule in self.rules.iter()
3035		{	if !rule.code.is_empty() // Will be default:
3036			{	write!(out, "\t\t\t{} =>", rule.index)?;
3037				let has_data_type = !self.symbols.array[rule.lhs_index].data_type.is_empty();
3038				let mut next_indent = "\t\t\t\t";
3039				if has_data_type && !rule.lhs_start
3040				{	write!(out, " MinorType::Symbol{}\n\t\t\t(\t{{", self.symbols.array[rule.lhs_index].dtnum)?;
3041					next_indent = "\t\t\t\t\t";
3042				}
3043				else
3044				{	write!(out, "\n\t\t\t{{")?;
3045				}
3046				let mut indent = "\t";
3047				let mut n_aliases = 0;
3048				let n_args = rule.rhs.len();
3049				for i in (0 .. n_args).rev()
3050				{	let has_alias = !rule.rhs[i].alias.is_empty();
3051					if has_alias
3052					{	n_aliases += 1;
3053						writeln!(out, "{}let arg{} = self.stack.pop().unwrap();", indent, i+1)?;
3054					}
3055					else
3056					{	writeln!(out, "{}self.stack.pop();", indent)?;
3057					}
3058					indent = next_indent;
3059				}
3060				if n_aliases > 0
3061				{	write!(out, "{}{}match {}", indent, if rule.lhs_start {"let result = "} else {""}, if n_aliases>1 {"("} else {""})?;
3062					let mut comma = "";
3063					for i in 0 .. n_args
3064					{	if !rule.rhs[i].alias.is_empty()
3065						{	write!(out, "{}arg{}.minor", comma, i+1)?;
3066							comma = ", ";
3067						}
3068					}
3069					write!(out, "{}\n{}{{\t{}", if n_aliases>1 {")"} else {""}, indent, if n_aliases>1 {"("} else {""})?;
3070					comma = "";
3071					for (i, rhs) in rule.rhs.iter().enumerate()
3072					{	if !rhs.alias.is_empty()
3073						{	write!(out, "{}MinorType::Symbol{}(arg{})", comma, self.symbols.array[rhs.index].dtnum, i+1)?;
3074							comma = ", ";
3075						}
3076					}
3077					write!(out, "{} => super::rules::do_rule_{}(&mut self.extra", if n_aliases>1 {")"} else {""}, rule.index)?;
3078					for i in 0 .. n_args
3079					{	if !rule.rhs[i].alias.is_empty()
3080						{	write!(out, ", arg{}", i+1)?;
3081						}
3082					}
3083					if rule.lhs_start
3084					{	writeln!(out, "),\n{}\t_ => unreachable!()\n{}}};\n\t\t\t\tself.result = Some(result);\n\t\t\t\tMinorType::None\n\t\t\t}},", indent, indent)?;
3085					}
3086					else if has_data_type
3087					{	writeln!(out, "),\n{}\t_ => unreachable!()\n{}}}\n\t\t\t\t}}\n\t\t\t),", indent, indent)?;
3088					}
3089					else
3090					{	writeln!(out, "),\n{}\t_ => unreachable!()\n{}}};\n\t\t\t\tMinorType::None\n\t\t\t}},", indent, indent)?;
3091					}
3092				}
3093				else
3094				{	if rule.lhs_start
3095					{	writeln!(out, "{}self.result = Some(super::rules::do_rule_{}(&mut self.extra));\n\t\t\t\tMinorType::None\n\t\t\t}},", indent, rule.index)?;
3096					}
3097					else if has_data_type
3098					{	writeln!(out, "{}super::rules::do_rule_{}(&mut self.extra)\n\t\t\t}}),", indent, rule.index)?;
3099					}
3100					else
3101					{	writeln!(out, "{}super::rules::do_rule_{}(&mut self.extra);\n\t\t\t\tMinorType::None\n\t\t\t}},", indent, rule.index)?;
3102					}
3103				}
3104			}
3105		}
3106		// Finally, output the default: rule.
3107		writeln!(out, "\t\t\t_ =>\n\t\t\t{{\tself.stack.pop(); MinorType::None\n\t\t\t}}")?;
3108
3109		// Next
3110		lempar_reader.copy_part(out)?; // to %% no. 7
3111
3112		writeln!(out, "\n}}\n\n\nmod rules\n{{\tuse super::code::{{TokenValue, ExtraArgumentType}};")?;
3113		for rule in self.rules.iter()
3114		{	let rule = rule;
3115			if !rule.code.is_empty()
3116			{	write!(out, "\n\t/// {}\n", rule)?;
3117				write!(out, "\t#[inline]\n\t#[allow(non_snake_case, unused_variables)]\n")?;
3118				write!(out, "\tpub fn do_rule_{}(extra: &mut ExtraArgumentType", rule.index)?;
3119				for rhs in &rule.rhs
3120				{	if !rhs.alias.is_empty()
3121					{	let data_type = if rhs.index < self.n_terminals
3122						{	"TokenValue"
3123						}
3124						else
3125						{	&self.symbols.array[rhs.index].data_type
3126						};
3127						write!(out, ", {}: {}", rhs.alias, if data_type.is_empty() {"()"} else {data_type})?;
3128					}
3129				}
3130				write!(out, ")")?;
3131				let lhs = &self.symbols.array[rule.lhs_index];
3132				if !lhs.data_type.is_empty()
3133				{	write!(out, " -> {}", lhs.data_type)?;
3134				}
3135				write!(out, "\n\t{{\t")?;
3136				let mut it = rule.code.chars();
3137'l:				while let Some(mut c) = it.next()
3138				{	'm:while c == '\n'
3139					{	write!(out, "\n\t\t")?;
3140						while let Some(c2) = it.next()
3141						{	if c2=='\n' || !c2.is_ascii_whitespace()
3142							{	c = c2;
3143								continue 'm;
3144							}
3145						}
3146						break 'l;
3147					}
3148					write!(out, "{}", c)?;
3149				}
3150				writeln!(out, "\n\t}}")?;
3151			}
3152		}
3153		writeln!(out, "}}\n")?;
3154
3155		// End
3156		lempar_reader.copy_part(out)?;
3157		if !self.extracode.is_empty()
3158		{	writeln!(out, "\n\n{}", self.extracode)?;
3159		}
3160
3161		Ok(())
3162	}
3163
3164	/// Generate a report of the parser generated.  (the "y.output" file)
3165	///
3166	/// * `basisflag` - Print only the basis in report
3167	/// * `show_precedence_conflict` - Show conflicts resolved by precedence rules
3168	pub fn gen_log<W>(&self, out: &mut W, basisflag: bool, show_precedence_conflict: bool) -> io::Result<()> where W: io::Write
3169	{	for stp in self.states.array[0 .. self.states.n_no_tail].iter()
3170		{	writeln!(out, "State {}:", stp.n_state)?;
3171
3172			for cfp in stp.configurations.iter()
3173			{	if basisflag
3174				{	let mut is_basis = false;
3175					for bp in stp.basis.iter()
3176					{	if *cfp.borrow() == *bp.borrow()
3177						{	is_basis = true;
3178							break;
3179						}
3180					}
3181					if !is_basis
3182					{	continue;
3183					}
3184				}
3185				if cfp.borrow().dot == self.rules[cfp.borrow().n_rule].rhs.len()
3186				{	let buf = format!("({})", self.rules[cfp.borrow().n_rule].index);
3187					write!(out, "    {:>5} ", buf)?;
3188				}
3189				else
3190				{	write!(out, "          ")?;
3191				}
3192				Self::config_print(&self.rules, out, cfp)?;
3193				writeln!(out, "")?;
3194			}
3195
3196			writeln!(out, "")?;
3197			for action in stp.actions.iter()
3198			{	if self.print_action(action, out, 30, show_precedence_conflict)?
3199				{	writeln!(out, "")?;
3200				}
3201			}
3202			writeln!(out, "")?;
3203		}
3204		writeln!(out, "----------------------------------------------------")?;
3205		writeln!(out, "Symbols:")?;
3206		for i in 0 .. self.symbols.n_symbols
3207		{	let sp = &self.symbols.array[i];
3208			write!(out, " {:>3}: {}", i, sp.name)?;
3209			if sp.typ == SymbolType::NONTERMINAL
3210			{	write!(out, ":")?;
3211				if sp.lambda
3212				{	write!(out, " <lambda>")?;
3213				}
3214				for j in 0 .. self.symbols.n_terminals
3215				{	if sp.firstset.len()>0 && sp.firstset.contains(j)
3216					{	write!(out, " {}", self.symbols.array[j].name)?;
3217					}
3218				}
3219			}
3220			writeln!(out, "")?;
3221		}
3222		Ok(())
3223	}
3224
3225	fn config_print<W>(rules: &Vec<Rule>, out: &mut W, cfp: &Rc<RefCell<Config>>) -> io::Result<()> where W: io::Write
3226	{	write!(out, "{} ::=", rules[cfp.borrow().n_rule].lhs)?;
3227		for i in 0 ..= rules[cfp.borrow().n_rule].rhs.len()
3228		{	if i == cfp.borrow().dot
3229			{	write!(out, " *")?;
3230			}
3231			if i == rules[cfp.borrow().n_rule].rhs.len()
3232			{	break;
3233			}
3234			write!(out, " {}", rules[cfp.borrow().n_rule].rhs[i].name)?;
3235		}
3236		Ok(())
3237	}
3238
3239	/// Print an action to the given file descriptor.  Return FALSE if nothing was actually printed.
3240	fn print_action<W>(&self, action: &Rc<RefCell<Action>>, out: &mut W, indent: usize, show_precedence_conflict: bool) -> io::Result<bool> where W: io::Write
3241	{	let mut result = false;
3242		match action.borrow().typ
3243		{	ActionType::Shift =>
3244			{	if let StateOrRule::State(n_state) = action.borrow().x
3245				{	write!(out, "{:>w$} shift        {:<7}", self.symbols.array[action.borrow().symbol_index].name, n_state, w=indent)?;
3246					result = true;
3247				}
3248			}
3249			ActionType::Reduce =>
3250			{	if let StateOrRule::Rule(n_rule) = action.borrow().x
3251				{	write!(out, "{:>w$} reduce       {:<7}", self.symbols.array[action.borrow().symbol_index].name, self.rules[n_rule].index, w=indent)?;
3252					Self::rule_print(out, &self.rules[n_rule], std::usize::MAX)?;
3253					result = true;
3254				}
3255			}
3256			ActionType::ShiftReduce =>
3257			{	if let StateOrRule::Rule(n_rule) = action.borrow().x
3258				{	write!(out, "{:>w$} shift-reduce {:<7}", self.symbols.array[action.borrow().symbol_index].name, self.rules[n_rule].index, w=indent)?;
3259					Self::rule_print(out, &self.rules[n_rule], std::usize::MAX)?;
3260					result = true;
3261				}
3262			}
3263			ActionType::Accept =>
3264			{	write!(out, "{:>w$} accept", self.symbols.array[action.borrow().symbol_index].name, w=indent)?;
3265				result = true;
3266			}
3267			ActionType::Error =>
3268			{	write!(out, "{:>w$} error", self.symbols.array[action.borrow().symbol_index].name, w=indent)?;
3269				result = true;
3270			}
3271			ActionType::SrConflict | ActionType::RrConflict =>
3272			{	if let StateOrRule::Rule(n_rule) = action.borrow().x
3273				{	write!(out, "{:>w$} reduce       {:<7} ** Parsing conflict **", self.symbols.array[action.borrow().symbol_index].name, self.rules[n_rule].index, w=indent)?;
3274					result = true;
3275				}
3276			}
3277			ActionType::SsConflict =>
3278			{	if let StateOrRule::State(n_state) = action.borrow().x
3279				{	write!(out, "{:>w$} shift        {:<7} ** Parsing conflict **", self.symbols.array[action.borrow().symbol_index].name, n_state, w=indent)?;
3280					result = true;
3281				}
3282			}
3283			ActionType::ShResolved =>
3284			{	if show_precedence_conflict
3285				{	if let StateOrRule::State(n_state) = action.borrow().x
3286					{	write!(out, "{:>w$} shift        {:<7} -- dropped by precedence", self.symbols.array[action.borrow().symbol_index].name, n_state, w=indent)?;
3287						result = true;
3288					}
3289				}
3290			}
3291			ActionType::RdResolved =>
3292			{	if show_precedence_conflict
3293				{	if let StateOrRule::Rule(n_rule) = action.borrow().x
3294					{	write!(out, "{:>w$} reduce {:<7} -- dropped by precedence", self.symbols.array[action.borrow().symbol_index].name, self.rules[n_rule].index, w=indent)?;
3295						result = true;
3296					}
3297				}
3298			}
3299			ActionType::NotUsed => {}
3300		}
3301		Ok(result)
3302	}
3303
3304	/// Print a single rule.
3305	fn rule_print<W>(out: &mut W, rule: &Rule, cursor: usize) -> io::Result<()> where W: io::Write
3306	{	write!(out, "{} ::=", rule.lhs)?;
3307		for (i, symbol) in rule.rhs.iter().enumerate()
3308		{	if i == cursor
3309			{	write!(out, " *")?;
3310			}
3311			write!(out, " {}", symbol.name)?;
3312		}
3313		if cursor == rule.rhs.len()
3314		{	write!(out, " *")?;
3315		}
3316		Ok(())
3317	}
3318
3319	// Duplicate the input file without comments and without actions on rules
3320	pub fn gen_y<W>(&self, out: &mut W) -> io::Result<()> where W: io::Write
3321	{	writeln!(out, "// Symbols:")?;
3322		let mut maxlen = 10;
3323		for sp in self.symbols.array[0 .. self.symbols.n_symbols].iter()
3324		{	if sp.name.len() > maxlen
3325			{	maxlen = sp.name.len();
3326			}
3327		}
3328		let mut ncolumns = 76 / (maxlen+5);
3329		if ncolumns < 1
3330		{	ncolumns = 1;
3331		}
3332		let skip = (self.symbols.n_symbols+ncolumns-1) / ncolumns;
3333		for i in 0 .. skip
3334		{	write!(out, "//")?;
3335			for j in (i .. self.symbols.n_symbols).step_by(skip)
3336			{	let sp = &self.symbols.array[j];
3337				assert_eq!(sp.index, j);
3338				write!(out, " {:3} {:<w$}", maxlen, sp.name, w=maxlen)?;
3339			}
3340			writeln!(out, "")?;
3341		}
3342		writeln!(out, "")?;
3343		// %start_symbol
3344		if !self.start_name.is_empty()
3345		{	writeln!(out, "%start_symbol {{{}}}", self.start_name)?;
3346		}
3347		// %token_type
3348		if !self.token_type.is_empty()
3349		{	writeln!(out, "%token_type {{{}}}", self.token_type)?;
3350		}
3351		// %default_type
3352		if !self.vartype.is_empty()
3353		{	writeln!(out, "%default_type {{{}}}", self.vartype)?;
3354		}
3355		// %extra_argument
3356		if !self.extra_argument_type.is_empty()
3357		{	writeln!(out, "%extra_argument {{{}}}", self.extra_argument_type)?;
3358		}
3359		// %left, %right, %nonassoc
3360		let mut prec = 0;
3361		loop
3362		{	let mut has_greater_prec = false;
3363			let mut started = false;
3364			for symbol in self.symbols.array[1 .. self.symbols.n_terminals].iter()
3365			{	if symbol.prec >= prec
3366				{	if symbol.prec > prec
3367					{	has_greater_prec = true;
3368					}
3369					else
3370					{	if !started
3371						{	write!(out, "{}", if symbol.assoc== Associativity::LEFT {"%left"} else if symbol.assoc== Associativity::RIGHT {"%right"} else {"%nonassoc"})?;
3372							started = true;
3373						}
3374						write!(out, " {}", symbol.name)?;
3375					}
3376				}
3377			}
3378			if started
3379			{	writeln!(out, ".")?;
3380			}
3381			if !has_greater_prec
3382			{	break;
3383			}
3384			prec += 1;
3385		}
3386		// %type
3387		for symbol in self.symbols.array[self.symbols.n_terminals+1 .. self.symbols.n_symbols].iter()
3388		{	let symbol = symbol;
3389			writeln!(out, "%type {} {{{}}}", symbol.name, symbol.data_type)?;
3390		}
3391		// rules
3392		for rule in self.rules.iter()
3393		{	write!(out, "{}", rule.lhs)?;
3394			if self.symbols.array[rule.lhs_index].dtnum != 0
3395			{	write!(out, "(RESULT)")?;
3396			}
3397			write!(out, " ::=")?;
3398			for sp in &rule.rhs
3399			{	write!(out, " {}", sp.name)?;
3400				if !sp.alias.is_empty()
3401				{	write!(out, "({})", sp.alias)?;
3402				}
3403			}
3404			write!(out, ".")?;
3405			if rule.precsym_index != std::usize::MAX
3406			{	write!(out, " [{}]", self.symbols.array[rule.precsym_index].name)?;
3407			}
3408			if !rule.code.is_empty()
3409			{	writeln!(out, " {{\n{}\n}}", rule.code)?;
3410			}
3411			writeln!(out, "")?;
3412		}
3413		Ok(())
3414	}
3415}
3416impl TryFrom<LemonMintBuilder> for LemonMint
3417{	type Error = LemonMintError;
3418
3419	fn try_from(builder: LemonMintBuilder) -> ParserResult<Self>
3420	{	builder.try_into_lemon()
3421	}
3422}
3423
3424#[cfg(not(feature = "with-debug"))]
3425fn dump_symbols(_symbols: &Symbols, _rules: &Vec<Rule>)
3426{
3427}
3428
3429#[cfg(not(feature = "with-debug"))]
3430fn dump_states(_states: &States, _rules: &Vec<Rule>, _symbols: &Symbols, _n: i32)
3431{
3432}
3433
3434#[cfg(feature = "with-debug")]
3435fn dump_symbols(symbols: &Symbols, rules: &Vec<Rule>)
3436{	use std::io::prelude::*;
3437	let mut out = File::create("/tmp/symbols-rust.js").unwrap();
3438	for s in symbols.array.iter()
3439	{	writeln!(out, "{} ({}) {:?} #{} {:?} {} {}LAM:", s.name, s.index, s.typ, s.dtnum, s.assoc, s.prec, if s.lambda {""} else {"!"}).unwrap();
3440		if s.firstset.len() > 0
3441		{	write!(out, "  ").unwrap();
3442			for (j, v) in s.firstset.data.iter().enumerate()
3443			{	write!(out, "{}{}", if j%4==0&&j>0 {" "} else {""}, if *v {'y'} else {'n'}).unwrap();
3444			}
3445			writeln!(out).unwrap();
3446		}
3447		for n_rule in s.sym_rules_arr.iter()
3448		{	let rule = &rules[*n_rule];
3449			writeln!(out, "   {} = {}", rule.index, rule.code).unwrap();
3450		}
3451	}
3452	/*	The same in lemon.c:
3453
3454		void dump_symbols(struct lemon *lemp)
3455		{	FILE *out = fopen("/tmp/symbols-c.js", "w");
3456			for (int i=0; i<lemp->nsymbol+1; i++)
3457			{	struct symbol *sym = lemp->symbols[i];
3458				fprintf(out, "%s (%d) %s #%d %s %d %s:\n", sym->name, sym->index, sym->type==TERMINAL?"TERMINAL":"NONTERMINAL", sym->dtnum, sym->assoc==LEFT?"LEFT":sym->assoc==RIGHT?"RIGHT":sym->assoc==NONE?"NONE":"UNKNOWN", sym->prec, sym->lambda?"LAM":"!LAM");
3459				if (sym->firstset)
3460				{	fprintf(out, "  ");
3461					for (int j=0; j<lemp->nterminal+1; j++)
3462					{	fprintf(out, "%s%c", j%4==0&&j>0 ? " " : "", sym->firstset[j]?'y':'n');
3463					}
3464					fprintf(out, "\n");
3465				}
3466				for (struct rule *rp=sym->rule; rp; rp=rp->nextlhs)
3467				{	const char *code = rp->code;
3468					if (code == nullptr) code = "";
3469					while (ISSPACE(code[0])) code++;
3470					int len = strlen(code);
3471					while (len>0 && ISSPACE(code[len-1])) len--;
3472					fprintf(out, "  %d = %.*s\n", rp->index, len, code);
3473				}
3474			}
3475			fclose(out);
3476		}
3477	*/
3478}
3479
3480#[cfg(feature = "with-debug")]
3481fn dump_states(states: &States, rules: &Vec<Rule>, symbols: &Symbols, n: i32)
3482{	use std::io::prelude::*;
3483	let mut out = File::create(format!("/tmp/states-{}-rust.js", n)).unwrap();
3484	for s in states.array.iter()
3485	{	writeln!
3486		(	out,
3487			"{}. nTknAct={}, nNtAct={}, iTknOfst={}, iNtOfst={}, iDfltReduce={}, {} dfR{}[{}]",
3488			s.n_state,
3489			s.n_token_act,
3490			s.n_nt_act,
3491			if s.token_offset==std::isize::MIN {-1000} else {s.token_offset},
3492			if s.nt_offset==std::isize::MIN {-1000} else {s.nt_offset},
3493			if s.default_reduce_action==std::usize::MAX {-1} else {s.default_reduce_action as isize},
3494			if s.auto_reduce {"A"} else {"!A"},
3495			if s.default_reduce_rule==std::usize::MAX {-1} else {s.default_reduce_rule as isize},
3496			if s.default_reduce_rule==std::usize::MAX {"".to_string()} else {rules[s.default_reduce_rule].lhs.to_string()}
3497		).unwrap();
3498		for cfp in s.basis.iter()
3499		{	let cfp = cfp.borrow();
3500			writeln!(out, "  bs{} R{}[{}] .{}", cfp.n_state as isize, cfp.n_rule, rules[cfp.n_rule].lhs, cfp.dot).unwrap();
3501		}
3502		for cfp in s.configurations.iter()
3503		{	let cfp = cfp.borrow();
3504			// rule
3505			writeln!(out, "  cf{} R{}[{}] .{}", cfp.n_state as isize, cfp.n_rule, rules[cfp.n_rule].lhs, cfp.dot).unwrap();
3506			// fws
3507			let mut delim = "";
3508			write!(out, "{:<12}[", "").unwrap();
3509			for (j, v) in cfp.fws.data.iter().enumerate()
3510			{	if *v
3511				{	write!(out, "{}{}", delim, symbols.array[j].name).unwrap();
3512					delim = " ";
3513				}
3514			}
3515			writeln!(out, "]").unwrap();
3516			// fplp
3517			for cfp in cfp.fplp.iter()
3518			{	write!(out, "{:<12}{} (state {:2}) ", "", "To  ", cfp.borrow().n_state as isize).unwrap();
3519				LemonMint::config_print(rules, &mut out, cfp).unwrap();
3520				writeln!(out).unwrap();
3521			}
3522			// bplp
3523			for cfp in cfp.bplp.iter()
3524			{	write!(out, "{:<12}{} (state {:2}) ", "", "From", cfp.borrow().n_state as isize).unwrap();
3525				LemonMint::config_print(rules, &mut out, cfp).unwrap();
3526				writeln!(out).unwrap();
3527			}
3528		}
3529		for action in s.actions.iter()
3530		{	if LemonMint::print_action(rules, action, &mut out, 30, true).unwrap()
3531			{	writeln!(out).unwrap();
3532			}
3533		}
3534	}
3535	/*	The same in lemon.c:
3536
3537		void dump_states(struct lemon *lemp, int n)
3538		{	char filename[1024];
3539			sprintf(filename, "/tmp/states-%d-c.js", n);
3540			FILE *out = fopen(filename, "w");
3541			showPrecedenceConflict = 1;
3542			for (int i=0; i<lemp->nstate; i++)
3543			{	struct state *stp = lemp->sorted[i];
3544				fprintf
3545				(	out,
3546					"%d. nTknAct=%d, nNtAct=%d, iTknOfst=%d, iNtOfst=%d, iDfltReduce=%d, %s dfR%d[%s]\n",
3547					stp->statenum,
3548					stp->nTknAct,
3549					stp->nNtAct,
3550					stp->iTknOfst==NO_OFFSET ? -1000 : stp->iTknOfst,
3551					stp->iNtOfst==NO_OFFSET ? -1000 : stp->iNtOfst,
3552					stp->iDfltReduce,
3553					stp->autoReduce?"A":"!A",
3554					stp->pDfltReduce ? stp->pDfltReduce->index : -1,
3555					stp->pDfltReduce ? stp->pDfltReduce->lhs->name : ""
3556				);
3557				for (struct config *cfp=stp->bp; cfp; cfp=cfp->bp)
3558				{	fprintf(out, "  bs%d R%d[%s] .%d\n", cfp->stp ? cfp->stp->statenum : -1, cfp->rp->index, cfp->rp->lhs->name, cfp->dot);
3559				}
3560				for (struct config *cfp=stp->cfp; cfp; cfp=cfp->next)
3561				{	fprintf(out, "  cf%d R%d[%s] .%d\n", cfp->stp ? cfp->stp->statenum : -1, cfp->rp->index, cfp->rp->lhs->name, cfp->dot);
3562					SetPrint(out, cfp->fws,lemp);
3563					PlinkPrint(out, cfp->fplp,"To  ");
3564					PlinkPrint(out, cfp->bplp,"From");
3565				}
3566				for (struct action *ap=stp->ap; ap; ap=ap->next)
3567				{	if (PrintAction(ap, out, 30)) fprintf(out, "\n");
3568				}
3569			}
3570			fclose(out);
3571		}
3572	*/
3573}