use crate::ast::{Alternatives, BaseFactor, Factor, IxmlGrammar, Mark, Repetition, Rule, Sequence};
use std::collections::{HashMap, HashSet};
#[allow(dead_code)]
const MAX_ANALYSIS_DEPTH: usize = 20;
#[derive(Debug, Clone)]
pub struct GrammarAnalysis {
pub recursive_rules: HashSet<String>,
pub left_recursive_rules: HashSet<String>,
pub hidden_rules: HashSet<String>,
pub promoted_rules: HashSet<String>,
pub attribute_rules: HashSet<String>,
pub complexity_scores: HashMap<String, usize>,
pub is_potentially_ambiguous: bool,
}
impl GrammarAnalysis {
pub fn analyze(grammar: &IxmlGrammar) -> Self {
let rule_map: HashMap<String, &Rule> =
grammar.rules.iter().map(|r| (r.name.clone(), r)).collect();
let recursive_rules = find_recursive_rules(grammar, &rule_map);
let left_recursive_rules = find_left_recursive_rules(grammar, &rule_map, &recursive_rules);
let mut hidden_rules = HashSet::new();
let mut promoted_rules = HashSet::new();
let mut attribute_rules = HashSet::new();
for rule in &grammar.rules {
match rule.mark {
Mark::Hidden => {
hidden_rules.insert(rule.name.clone());
}
Mark::Promoted => {
promoted_rules.insert(rule.name.clone());
}
Mark::Attribute => {
attribute_rules.insert(rule.name.clone());
}
Mark::None => {}
}
}
let complexity_scores = grammar
.rules
.iter()
.map(|rule| {
let score = calculate_complexity(&rule.alternatives);
(rule.name.clone(), score)
})
.collect();
let normalized = normalize_grammar(grammar);
let normalized_map: HashMap<String, &Rule> = normalized
.rules
.iter()
.map(|r| (r.name.clone(), r))
.collect();
let is_potentially_ambiguous =
detect_ambiguity_patterns(&normalized, &normalized_map, &recursive_rules);
GrammarAnalysis {
recursive_rules,
left_recursive_rules,
hidden_rules,
promoted_rules,
attribute_rules,
complexity_scores,
is_potentially_ambiguous,
}
}
pub fn is_recursive(&self, rule_name: &str) -> bool {
self.recursive_rules.contains(rule_name)
}
pub fn is_left_recursive(&self, rule_name: &str) -> bool {
self.left_recursive_rules.contains(rule_name)
}
pub fn complexity(&self, rule_name: &str) -> usize {
self.complexity_scores.get(rule_name).copied().unwrap_or(0)
}
pub fn report(&self) -> String {
let mut report = String::new();
if self.is_potentially_ambiguous {
report.push_str("⚠️ Grammar may be ambiguous (multiple parse trees possible)\n");
report.push_str(" Parse output will be marked with ixml:state=\"ambiguous\"\n");
report.push('\n');
}
if !self.left_recursive_rules.is_empty() {
report.push_str("⚠️ Left-recursive rules (may cause infinite loops):\n");
for rule in &self.left_recursive_rules {
report.push_str(&format!(" - {}\n", rule));
}
report.push('\n');
}
if !self.recursive_rules.is_empty() {
report.push_str("ℹ️ Recursive rules (normal, but watch for performance):\n");
for rule in &self.recursive_rules {
if !self.left_recursive_rules.contains(rule) {
report.push_str(&format!(" - {}\n", rule));
}
}
report.push('\n');
}
let high_complexity: Vec<_> = self
.complexity_scores
.iter()
.filter(|(_, &score)| score > 10)
.collect();
if !high_complexity.is_empty() {
report.push_str("ℹ️ High complexity rules (may be slow to parse):\n");
for (rule, score) in high_complexity {
report.push_str(&format!(" - {} (complexity: {})\n", rule, score));
}
report.push('\n');
}
if report.is_empty() {
report.push_str("✅ No issues detected\n");
}
report
}
}
fn find_recursive_rules(
grammar: &IxmlGrammar,
rule_map: &HashMap<String, &Rule>,
) -> HashSet<String> {
let mut recursive = HashSet::new();
for rule in &grammar.rules {
let mut visited = HashSet::new();
if is_recursive(&rule.name, rule_map, &mut visited, 0) {
recursive.insert(rule.name.clone());
}
}
recursive
}
fn detect_ambiguity_patterns(
grammar: &IxmlGrammar,
rule_map: &HashMap<String, &Rule>,
_recursive_rules: &HashSet<String>,
) -> bool {
let nullable_set = compute_nullable_set(rule_map);
for rule in &grammar.rules {
if rule.alternatives.alts.len() > 1 {
let mut nullable_alts = 0;
for alt in &rule.alternatives.alts {
let mut all_nullable = true;
for factor in &alt.factors {
if !is_factor_nullable_simple(factor, &nullable_set) {
all_nullable = false;
break;
}
}
if all_nullable {
nullable_alts += 1;
}
}
if nullable_alts > 1 {
return true;
}
}
if rule.alternatives.alts.len() > 1 {
let mut first_nullable_nonterminals: HashMap<String, usize> = HashMap::new();
for alt in &rule.alternatives.alts {
if alt.factors.is_empty() {
continue;
}
if let BaseFactor::Nonterminal { name, .. } = &alt.factors[0].base {
if nullable_set.contains(name) {
*first_nullable_nonterminals.entry(name.clone()).or_insert(0) += 1;
}
}
}
for count in first_nullable_nonterminals.values() {
if *count > 1 {
return true;
}
}
}
for alt in &rule.alternatives.alts {
for (i, factor) in alt.factors.iter().enumerate() {
if i >= alt.factors.len() - 1 {
continue;
}
if let BaseFactor::Nonterminal { name, .. } = &factor.base {
if nullable_set.contains(name) {
let next_factor = &alt.factors[i + 1];
if could_start_with_nullable(next_factor, name, rule_map, &nullable_set) {
return true;
}
}
}
}
}
}
false
}
fn could_start_with_nullable(
factor: &Factor,
nullable_name: &str,
rule_map: &HashMap<String, &Rule>,
nullable_set: &HashSet<String>,
) -> bool {
match &factor.base {
BaseFactor::Nonterminal { name, .. } => {
if name == nullable_name && nullable_set.contains(name) {
return true;
}
if let Some(rule) = rule_map.get(name.as_str()) {
for alt in &rule.alternatives.alts {
if alt.factors.is_empty() {
continue;
}
if let BaseFactor::Nonterminal {
name: first_name, ..
} = &alt.factors[0].base
{
if first_name == nullable_name && nullable_set.contains(nullable_name) {
return true;
}
}
}
}
false
}
_ => false,
}
}
fn is_recursive(
rule_name: &str,
rule_map: &HashMap<String, &Rule>,
visited: &mut HashSet<String>,
_depth: usize,
) -> bool {
let mut work_stack: Vec<(String, bool)> = vec![(rule_name.to_string(), false)];
let mut local_visited = HashSet::new();
while let Some((current_rule, returning)) = work_stack.pop() {
if returning {
local_visited.remove(¤t_rule);
continue;
}
if local_visited.contains(¤t_rule) || visited.contains(¤t_rule) {
return true;
}
let rule = match rule_map.get(current_rule.as_str()) {
Some(r) => r,
None => continue,
};
local_visited.insert(current_rule.clone());
work_stack.push((current_rule.clone(), true));
if current_rule != rule_name {
for alt in &rule.alternatives.alts {
for factor in &alt.factors {
if let BaseFactor::Nonterminal { name, .. } = &factor.base {
if name == rule_name {
return true; }
work_stack.push((name.clone(), false));
} else if let BaseFactor::Group { alternatives } = &factor.base {
for group_alt in &alternatives.alts {
for group_factor in &group_alt.factors {
if let BaseFactor::Nonterminal { name, .. } = &group_factor.base {
if name == rule_name {
return true;
}
work_stack.push((name.clone(), false));
}
}
}
}
}
}
} else {
for alt in &rule.alternatives.alts {
for factor in &alt.factors {
if let BaseFactor::Nonterminal { name, .. } = &factor.base {
work_stack.push((name.clone(), false));
} else if let BaseFactor::Group { alternatives } = &factor.base {
for group_alt in &alternatives.alts {
for group_factor in &group_alt.factors {
if let BaseFactor::Nonterminal { name, .. } = &group_factor.base {
work_stack.push((name.clone(), false));
}
}
}
}
}
}
}
}
false
}
fn find_left_recursive_rules(
grammar: &IxmlGrammar,
rule_map: &HashMap<String, &Rule>,
recursive_rules: &HashSet<String>,
) -> HashSet<String> {
let mut left_recursive = HashSet::new();
for rule in &grammar.rules {
if !recursive_rules.contains(&rule.name) {
continue;
}
if is_left_recursive(&rule.name, &rule.alternatives, rule_map) {
left_recursive.insert(rule.name.clone());
}
}
left_recursive
}
fn is_left_recursive(
rule_name: &str,
alternatives: &Alternatives,
rule_map: &HashMap<String, &Rule>,
) -> bool {
let nullable_set = compute_nullable_set(rule_map);
let left_reachable = compute_left_reachable(rule_name, alternatives, rule_map, &nullable_set);
left_reachable.contains(rule_name)
}
fn compute_left_reachable(
_rule_name: &str,
alternatives: &Alternatives,
rule_map: &HashMap<String, &Rule>,
nullable_set: &HashSet<String>,
) -> HashSet<String> {
let mut reachable = HashSet::new();
let mut changed = true;
while changed {
changed = false;
for alt in &alternatives.alts {
for factor in &alt.factors {
match &factor.base {
BaseFactor::Nonterminal { name, .. } => {
if reachable.insert(name.clone()) {
changed = true;
}
if let Some(ref_rule) = rule_map.get(name.as_str()) {
let ref_reachable =
compute_left_reachable_direct(&ref_rule.alternatives, nullable_set);
for ref_name in ref_reachable {
if reachable.insert(ref_name) {
changed = true;
}
}
}
if !nullable_set.contains(name) {
break;
}
}
BaseFactor::Literal { value, .. } => {
if !value.is_empty() {
break;
}
}
BaseFactor::CharClass { .. } => {
break;
}
BaseFactor::Group {
alternatives: group_alts,
} => {
let group_reachable =
compute_left_reachable_direct(group_alts, nullable_set);
for name in group_reachable {
if reachable.insert(name) {
changed = true;
}
}
if !is_alternatives_nullable(group_alts, nullable_set) {
break;
}
}
}
match factor.repetition {
Repetition::ZeroOrMore
| Repetition::Optional
| Repetition::SeparatedZeroOrMore(_) => {
continue;
}
_ => {
}
}
}
}
}
reachable
}
fn compute_left_reachable_direct(
alternatives: &Alternatives,
nullable_set: &HashSet<String>,
) -> HashSet<String> {
let mut reachable = HashSet::new();
for alt in &alternatives.alts {
for factor in &alt.factors {
match &factor.base {
BaseFactor::Nonterminal { name, .. } => {
reachable.insert(name.clone());
if !nullable_set.contains(name) {
break;
}
}
BaseFactor::Literal { value, .. } => {
if !value.is_empty() {
break;
}
}
BaseFactor::CharClass { .. } => {
break;
}
BaseFactor::Group {
alternatives: group_alts,
} => {
let group_reachable = compute_left_reachable_direct(group_alts, nullable_set);
reachable.extend(group_reachable);
if !is_alternatives_nullable(group_alts, nullable_set) {
break;
}
}
}
match factor.repetition {
Repetition::ZeroOrMore
| Repetition::Optional
| Repetition::SeparatedZeroOrMore(_) => {
continue;
}
_ => {
}
}
}
}
reachable
}
fn is_alternatives_nullable(alternatives: &Alternatives, nullable_set: &HashSet<String>) -> bool {
alternatives.alts.iter().any(|alt| {
alt.factors
.iter()
.all(|f| is_factor_nullable_simple(f, nullable_set))
})
}
fn compute_nullable_set(rule_map: &HashMap<String, &Rule>) -> HashSet<String> {
let mut nullable_rules: HashSet<String> = HashSet::new();
let mut changed = true;
while changed {
changed = false;
for (rule_name, rule) in rule_map.iter() {
if nullable_rules.contains(rule_name.as_str()) {
continue;
}
for alt in &rule.alternatives.alts {
let mut seq_nullable = true;
for factor in &alt.factors {
if !is_factor_nullable_simple(factor, &nullable_rules) {
seq_nullable = false;
break;
}
}
if seq_nullable {
nullable_rules.insert(rule_name.to_string());
changed = true;
break;
}
}
}
}
nullable_rules
}
fn is_factor_nullable_simple(factor: &Factor, nullable_rules: &HashSet<String>) -> bool {
let mut work_stack: Vec<&Factor> = vec![factor];
let mut results_stack: Vec<bool> = Vec::new();
while let Some(current_factor) = work_stack.pop() {
match current_factor.repetition {
Repetition::ZeroOrMore | Repetition::Optional | Repetition::SeparatedZeroOrMore(_) => {
results_stack.push(true);
continue;
}
_ => {}
}
match ¤t_factor.base {
BaseFactor::Literal { value, .. } => {
results_stack.push(value.is_empty());
}
BaseFactor::CharClass { .. } => {
results_stack.push(false);
}
BaseFactor::Nonterminal { name, .. } => {
results_stack.push(nullable_rules.contains(name));
}
BaseFactor::Group { alternatives } => {
let mut group_nullable = false;
for alt in &alternatives.alts {
let mut seq_nullable = true;
for seq_factor in &alt.factors {
let factor_nullable = match seq_factor.repetition {
Repetition::ZeroOrMore
| Repetition::Optional
| Repetition::SeparatedZeroOrMore(_) => true,
_ => match &seq_factor.base {
BaseFactor::Literal { value, .. } => value.is_empty(),
BaseFactor::CharClass { .. } => false,
BaseFactor::Nonterminal { name, .. } => {
nullable_rules.contains(name)
}
BaseFactor::Group { .. } => {
false
}
},
};
if !factor_nullable {
seq_nullable = false;
break;
}
}
if seq_nullable {
group_nullable = true;
break;
}
}
results_stack.push(group_nullable);
}
}
}
results_stack.pop().unwrap_or(false)
}
#[allow(dead_code)]
fn is_nullable(
alternatives: &Alternatives,
rule_map: &HashMap<String, &Rule>,
_visited: &mut HashSet<String>,
_depth: usize,
) -> bool {
let nullable_rules = compute_nullable_set(rule_map);
for alt in &alternatives.alts {
let mut seq_nullable = true;
for factor in &alt.factors {
if !is_factor_nullable_simple(factor, &nullable_rules) {
seq_nullable = false;
break;
}
}
if seq_nullable {
return true;
}
}
false
}
#[allow(dead_code)]
fn is_nullable_with_cache(
alternatives: &Alternatives,
rule_map: &HashMap<String, &Rule>,
visited: &HashSet<String>,
cache: &mut HashMap<String, bool>,
depth: usize,
) -> bool {
if depth > 5 {
return false;
}
for seq in &alternatives.alts {
if is_sequence_nullable_with_cache(seq, rule_map, visited, cache, depth + 1) {
return true;
}
}
false
}
#[allow(dead_code)]
fn is_sequence_nullable_with_cache(
seq: &Sequence,
rule_map: &HashMap<String, &Rule>,
visited: &HashSet<String>,
cache: &mut HashMap<String, bool>,
depth: usize,
) -> bool {
if depth > 5 {
return false;
}
for factor in &seq.factors {
if !is_factor_nullable_with_cache(factor, rule_map, visited, cache, depth + 1) {
return false;
}
}
true
}
#[allow(dead_code)]
fn is_factor_nullable_with_cache(
factor: &Factor,
rule_map: &HashMap<String, &Rule>,
visited: &HashSet<String>,
cache: &mut HashMap<String, bool>,
depth: usize,
) -> bool {
if depth > 5 {
return false;
}
match factor.repetition {
Repetition::ZeroOrMore | Repetition::Optional | Repetition::SeparatedZeroOrMore(_) => {
return true
}
_ => {}
}
match &factor.base {
BaseFactor::Literal { value, .. } => value.is_empty(),
BaseFactor::CharClass { .. } => false,
BaseFactor::Nonterminal { name, .. } => {
if let Some(&result) = cache.get(name) {
return result;
}
if visited.contains(name) {
return false;
}
let mut local_visited = visited.clone();
local_visited.insert(name.clone());
let result = if let Some(rule) = rule_map.get(name.as_str()) {
is_nullable_with_cache(
&rule.alternatives,
rule_map,
&local_visited,
cache,
depth + 1,
)
} else {
false
};
cache.insert(name.clone(), result);
result
}
BaseFactor::Group { alternatives } => {
is_nullable_with_cache(alternatives, rule_map, visited, cache, depth + 1)
}
}
}
#[allow(dead_code)]
fn is_sequence_nullable_iterative(
seq: &Sequence,
rule_map: &HashMap<String, &Rule>,
visited: &HashSet<String>,
) -> bool {
let mut cache = HashMap::new();
is_sequence_nullable_with_cache(seq, rule_map, visited, &mut cache, 0)
}
#[allow(dead_code)]
fn is_factor_nullable_iterative(
factor: &Factor,
rule_map: &HashMap<String, &Rule>,
_visited: &HashSet<String>,
) -> bool {
let nullable_rules = compute_nullable_set(rule_map);
is_factor_nullable_simple(factor, &nullable_rules)
}
#[allow(dead_code)]
fn is_sequence_nullable(
seq: &Sequence,
rule_map: &HashMap<String, &Rule>,
visited: &mut HashSet<String>,
depth: usize,
) -> bool {
if depth > MAX_ANALYSIS_DEPTH {
return false; }
seq.factors
.iter()
.all(|factor| is_factor_nullable(factor, rule_map, visited, depth + 1))
}
#[allow(dead_code)]
fn is_factor_nullable(
factor: &Factor,
rule_map: &HashMap<String, &Rule>,
visited: &mut HashSet<String>,
depth: usize,
) -> bool {
if depth > MAX_ANALYSIS_DEPTH {
return false; }
match factor.repetition {
Repetition::ZeroOrMore | Repetition::Optional => return true,
Repetition::OneOrMore => {
}
Repetition::None => {
}
Repetition::SeparatedZeroOrMore(_) => return true,
Repetition::SeparatedOneOrMore(_) => {
}
}
match &factor.base {
BaseFactor::Literal { value, .. } => value.is_empty(),
BaseFactor::CharClass { .. } => false, BaseFactor::Nonterminal { name, .. } => {
if visited.contains(name) {
return false; }
visited.insert(name.clone());
if let Some(rule) = rule_map.get(name) {
let result = is_nullable(&rule.alternatives, rule_map, visited, depth + 1);
visited.remove(name);
result
} else {
visited.remove(name);
false
}
}
BaseFactor::Group { alternatives } => {
is_nullable(alternatives, rule_map, visited, depth + 1)
}
}
}
#[allow(dead_code)]
fn check_alternatives_for_recursion(
alternatives: &Alternatives,
target_rule: &str,
rule_map: &HashMap<String, &Rule>,
visited: &mut HashSet<String>,
depth: usize,
) -> bool {
if depth > MAX_ANALYSIS_DEPTH {
return false;
}
for seq in &alternatives.alts {
if check_sequence_for_recursion(seq, target_rule, rule_map, visited, depth + 1) {
return true;
}
}
false
}
#[allow(dead_code)]
fn check_sequence_for_recursion(
seq: &Sequence,
target_rule: &str,
rule_map: &HashMap<String, &Rule>,
visited: &mut HashSet<String>,
depth: usize,
) -> bool {
if depth > MAX_ANALYSIS_DEPTH {
return false;
}
for factor in &seq.factors {
if check_factor_for_recursion(factor, target_rule, rule_map, visited, depth + 1) {
return true;
}
}
false
}
#[allow(dead_code)]
fn check_factor_for_recursion(
factor: &Factor,
target_rule: &str,
rule_map: &HashMap<String, &Rule>,
visited: &mut HashSet<String>,
depth: usize,
) -> bool {
if depth > MAX_ANALYSIS_DEPTH {
return false;
}
match &factor.base {
BaseFactor::Nonterminal { name, .. } => {
if name == target_rule {
return true;
}
is_recursive(name, rule_map, visited, depth + 1)
}
BaseFactor::Group { alternatives } => check_alternatives_for_recursion(
alternatives,
target_rule,
rule_map,
visited,
depth + 1,
),
_ => false,
}
}
fn calculate_complexity(alternatives: &Alternatives) -> usize {
let mut score = alternatives.alts.len();
for seq in &alternatives.alts {
score += seq.factors.len();
for factor in &seq.factors {
score += match &factor.base {
BaseFactor::Group { alternatives } => calculate_complexity(alternatives),
_ => 1,
};
}
}
score
}
fn normalize_grammar(grammar: &IxmlGrammar) -> IxmlGrammar {
let rule_map: HashMap<String, &Rule> =
grammar.rules.iter().map(|r| (r.name.clone(), r)).collect();
let mut inline_rules: HashSet<String> = HashSet::new();
for rule in &grammar.rules {
match rule.mark {
Mark::Hidden | Mark::Promoted => {
inline_rules.insert(rule.name.clone());
}
_ => {}
}
}
let mut normalized_rules = Vec::new();
for rule in &grammar.rules {
if inline_rules.contains(&rule.name) {
continue; }
let normalized_alts =
normalize_alternatives(&rule.alternatives, &rule_map, &inline_rules, 0);
normalized_rules.push(Rule::new(rule.name.clone(), rule.mark, normalized_alts));
}
IxmlGrammar::new(normalized_rules)
}
fn normalize_alternatives(
alternatives: &Alternatives,
rule_map: &HashMap<String, &Rule>,
inline_rules: &HashSet<String>,
depth: usize,
) -> Alternatives {
if depth > 10 {
return alternatives.clone();
}
let normalized_alts: Vec<Sequence> = alternatives
.alts
.iter()
.flat_map(|seq| normalize_sequence(seq, rule_map, inline_rules, depth + 1))
.collect();
Alternatives::new(normalized_alts)
}
fn normalize_sequence(
sequence: &Sequence,
rule_map: &HashMap<String, &Rule>,
inline_rules: &HashSet<String>,
depth: usize,
) -> Vec<Sequence> {
if depth > 10 {
return vec![sequence.clone()];
}
let mut result_sequences = vec![Vec::new()];
for factor in &sequence.factors {
let normalized_factors = normalize_factor(factor, rule_map, inline_rules, depth + 1);
if normalized_factors.len() == 1 {
for seq in &mut result_sequences {
seq.push(normalized_factors[0].clone());
}
} else {
let mut new_sequences = Vec::new();
for existing_seq in &result_sequences {
for new_factor in &normalized_factors {
let mut combined = existing_seq.clone();
combined.push(new_factor.clone());
new_sequences.push(combined);
}
}
result_sequences = new_sequences;
}
}
result_sequences.into_iter().map(Sequence::new).collect()
}
fn normalize_factor(
factor: &Factor,
rule_map: &HashMap<String, &Rule>,
inline_rules: &HashSet<String>,
depth: usize,
) -> Vec<Factor> {
if depth > 10 {
return vec![factor.clone()];
}
match &factor.base {
BaseFactor::Nonterminal { name, .. } => {
if inline_rules.contains(name) {
if let Some(rule) = rule_map.get(name.as_str()) {
match factor.repetition {
Repetition::None => {
let mut inlined_factors = Vec::new();
for alt in &rule.alternatives.alts {
for alt_factor in &alt.factors {
inlined_factors.push(alt_factor.clone());
}
}
return if inlined_factors.is_empty() {
vec![]
} else {
inlined_factors
};
}
_ => {
return vec![factor.clone()];
}
}
}
}
vec![factor.clone()]
}
BaseFactor::Group { alternatives } => {
let normalized_alts =
normalize_alternatives(alternatives, rule_map, inline_rules, depth + 1);
vec![Factor::new(
BaseFactor::Group {
alternatives: Box::new(normalized_alts),
},
factor.repetition.clone(),
)]
}
_ => {
vec![factor.clone()]
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ast::Repetition;
#[test]
fn test_detect_left_recursion() {
let grammar = IxmlGrammar::new(vec![
Rule::new(
"expr".to_string(),
Mark::None,
Alternatives::new(vec![
Sequence::new(vec![
Factor::simple(BaseFactor::nonterminal("expr".to_string())),
Factor::simple(BaseFactor::literal("+".to_string())),
Factor::simple(BaseFactor::nonterminal("term".to_string())),
]),
Sequence::new(vec![Factor::simple(BaseFactor::nonterminal(
"term".to_string(),
))]),
]),
),
Rule::new(
"term".to_string(),
Mark::None,
Alternatives::single(Sequence::new(vec![Factor::simple(BaseFactor::literal(
"x".to_string(),
))])),
),
]);
let analysis = GrammarAnalysis::analyze(&grammar);
assert!(analysis.is_left_recursive("expr"));
assert!(!analysis.is_left_recursive("term"));
}
#[test]
fn test_no_left_recursion() {
let grammar = IxmlGrammar::new(vec![
Rule::new(
"number".to_string(),
Mark::None,
Alternatives::single(Sequence::new(vec![Factor::new(
BaseFactor::nonterminal("digit".to_string()),
Repetition::OneOrMore,
)])),
),
Rule::new(
"digit".to_string(),
Mark::None,
Alternatives::single(Sequence::new(vec![Factor::simple(BaseFactor::charclass(
"\"0\"-\"9\"".to_string(),
))])),
),
]);
let analysis = GrammarAnalysis::analyze(&grammar);
assert!(!analysis.is_left_recursive("number"));
assert!(!analysis.is_left_recursive("digit"));
}
#[test]
fn test_complexity_calculation() {
let grammar = IxmlGrammar::new(vec![Rule::new(
"simple".to_string(),
Mark::None,
Alternatives::new(vec![
Sequence::new(vec![Factor::simple(BaseFactor::literal("a".to_string()))]),
Sequence::new(vec![Factor::simple(BaseFactor::literal("b".to_string()))]),
]),
)]);
let analysis = GrammarAnalysis::analyze(&grammar);
assert_eq!(analysis.complexity("simple"), 6);
}
}