1use crate::rule::{Rule, RuleBuilder};
2use crate::state::State;
3use crate::string_cache::Atom;
4use crate::token::*;
5use crate::update::SideInput;
6
7use std::fmt;
8
9const EXCESSIVE_PERMUTATION_LIMIT: usize = 2000;
10
11#[derive(Debug)]
12pub struct ExcessivePermutationError {
13 pub rule_id: i32,
14}
15
16impl std::error::Error for ExcessivePermutationError {}
17
18impl fmt::Display for ExcessivePermutationError {
19 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
20 write!(f,
21 "Rule {} caused > {} state permutations to be checked. Review the complexity of the rule.",
22 self.rule_id, EXCESSIVE_PERMUTATION_LIMIT
23 )
24 }
25}
26
27pub fn test_match_without_variables(input_tokens: &Phrase, pred_tokens: &Phrase) -> Option<bool> {
28 let mut matcher = NoVariablesMatcher { has_var: false };
29
30 if base_match(input_tokens, pred_tokens, &mut matcher) {
31 Some(matcher.has_var)
32 } else {
33 None
34 }
35}
36
37pub fn match_variables_twoway(
38 tokens1: &Phrase,
39 tokens2: &Phrase,
40 existing_matches_and_result: &mut Vec<Match>,
41) -> bool {
42 let initial_matches_len = existing_matches_and_result.len();
43
44 let mut matcher = TwoWayMatcher {
45 existing_matches_and_result,
46 initial_matches_len,
47 };
48
49 base_match(tokens1, tokens2, &mut matcher)
50}
51
52trait BaseMatcher {
53 fn is_twoway_matcher(&self) -> bool;
54 fn do_match(&mut self, token: &Token, phrase: &Phrase) -> bool;
55}
56
57struct NoVariablesMatcher {
58 has_var: bool,
59}
60
61impl BaseMatcher for NoVariablesMatcher {
62 fn is_twoway_matcher(&self) -> bool {
63 false
64 }
65
66 fn do_match(&mut self, _token: &Token, _phrase: &Phrase) -> bool {
67 self.has_var = true;
68 true
69 }
70}
71
72struct TwoWayMatcher<'a> {
73 existing_matches_and_result: &'a mut Vec<Match>,
74 initial_matches_len: usize,
75}
76
77impl BaseMatcher for TwoWayMatcher<'_> {
78 fn is_twoway_matcher(&self) -> bool {
79 true
80 }
81
82 fn do_match(&mut self, token: &Token, phrase: &Phrase) -> bool {
83 let variable_already_matched = if let Some(ref existing_match) = self
84 .existing_matches_and_result
85 .iter()
86 .find(|m| m.atom == token.atom)
87 {
88 if !phrase_equal(
89 &existing_match.phrase[..],
90 phrase,
91 existing_match.depths,
92 (token.open_depth, token.close_depth),
93 ) {
94 self.existing_matches_and_result
96 .drain(self.initial_matches_len..);
97 return false;
98 }
99
100 true
101 } else {
102 false
103 };
104
105 if !variable_already_matched {
106 self.existing_matches_and_result
107 .push(Match::new(token, phrase.to_vec()));
108 }
109
110 true
111 }
112}
113
114fn base_match(tokens1: &Phrase, tokens2: &Phrase, matcher: &mut impl BaseMatcher) -> bool {
115 let mut token1_iter = tokens1.iter();
116 let mut token2_iter = tokens2.iter();
117
118 let mut depth1 = 0;
119 let mut depth2 = 0;
120
121 let mut token1_i = 0;
122 let mut token2_i = 0;
123
124 loop {
125 let token1 = token1_iter.next();
126 let token2 = token2_iter.next();
127
128 match (token1, token2) {
129 (None, None) => break,
130 (Some(_), None) => return false,
131 (None, Some(_)) => return false,
132 (Some(token1), Some(token2)) => {
133 depth1 += token1.open_depth;
134 depth2 += token2.open_depth;
135
136 let is_var1 = is_var_token(token1);
137 let is_var2 = is_var_token(token2) && matcher.is_twoway_matcher();
138
139 if !is_var1 && !is_var2 {
140 if token1.atom != token2.atom || depth1 != depth2 {
141 return false;
142 }
143
144 depth1 -= token1.close_depth;
145 depth2 -= token2.close_depth;
146 } else if is_var1 && is_var2 {
147 if depth1 != depth2 {
148 return false;
149 }
150
151 depth1 -= token1.close_depth;
152 depth2 -= token2.close_depth;
153 } else if is_var1 {
154 depth2 -= token2.close_depth;
155
156 let start_i = token2_i;
158
159 while depth1 < depth2 {
160 if let Some(token2) = token2_iter.next() {
161 depth2 += token2.open_depth;
162 depth2 -= token2.close_depth;
163
164 token2_i += 1;
165 } else {
166 return false;
167 }
168 }
169
170 let end_i = token2_i + 1;
171
172 if !matcher.do_match(token1, &tokens2[start_i..end_i]) {
173 return false;
174 }
175
176 depth1 -= token1.close_depth;
177 } else if is_var2 {
178 depth1 -= token1.close_depth;
179
180 let start_i = token1_i;
182
183 while depth2 < depth1 {
184 if let Some(token1) = token1_iter.next() {
185 depth1 += token1.open_depth;
186 depth1 -= token1.close_depth;
187
188 token1_i += 1;
189 } else {
190 return false;
191 }
192 }
193
194 let end_i = token1_i + 1;
195
196 if !matcher.do_match(token2, &tokens1[start_i..end_i]) {
197 return false;
198 }
199
200 depth2 -= token2.close_depth;
201 }
202
203 token1_i += 1;
204 token2_i += 1;
205 }
206 }
207 }
208
209 true
210}
211
212#[derive(Clone, Eq, PartialEq, Debug)]
213pub struct MatchLite {
214 pub var_atom: Atom,
215 pub var_open_close_depth: (u8, u8),
216 pub var_open_close_depth_norm: (u8, u8),
217 pub state_i: usize,
218 pub state_token_range: (usize, usize),
219}
220
221impl MatchLite {
222 fn as_slice<'a>(&self, state: &'a State) -> &'a [Token] {
223 &state[self.state_i][self.state_token_range.0..self.state_token_range.1]
224 }
225
226 pub fn to_phrase(&self, state: &State) -> Vec<Token> {
227 let mut phrase = self.as_slice(state).to_vec();
228
229 let subset_len = phrase.len();
230 let source_len = state[self.state_i].len();
231
232 if subset_len < source_len {
233 if self.state_token_range.0 == 0 {
237 phrase[0].open_depth -= 1;
238 }
239 if self.state_token_range.1 == source_len {
240 phrase[subset_len - 1].close_depth -= 1;
241 }
242 }
243
244 phrase[0].open_depth -= self.var_open_close_depth_norm.0;
246
247 let mut depth = 0;
249 for i in 0..subset_len - 1 {
250 depth += phrase[i].open_depth;
251 depth -= phrase[i].close_depth;
252 }
253 depth += phrase[subset_len - 1].open_depth;
254 phrase[subset_len - 1].close_depth = depth;
255
256 phrase
257 }
258}
259
260#[derive(Clone, Eq, PartialEq, Debug)]
261pub struct Match {
262 pub atom: Atom,
263 depths: (u8, u8),
264 pub phrase: Vec<Token>,
265}
266
267impl Match {
268 fn new(token: &Token, mut phrase: Vec<Token>) -> Match {
269 let len = phrase.len();
270 let depths = (token.open_depth, token.close_depth);
271
272 if len == 1 {
273 phrase[0].open_depth = 0;
274 phrase[0].close_depth = 0;
275 } else {
276 phrase[0].open_depth -= depths.0;
277 phrase[len - 1].close_depth -= depths.1;
278 }
279
280 Match {
281 atom: token.atom,
282 depths,
283 phrase: phrase,
284 }
285 }
286}
287
288pub fn match_state_variables_with_existing(
289 input_tokens: &Phrase,
290 state: &State,
291 s_i: usize,
292 existing_matches_and_result: &mut Vec<MatchLite>,
293) -> bool {
294 if let Some(has_var) = test_match_without_variables(input_tokens, &state[s_i]) {
295 if has_var {
296 match_state_variables_assuming_compatible_structure(
297 input_tokens,
298 state,
299 s_i,
300 existing_matches_and_result,
301 )
302 } else {
303 true
304 }
305 } else {
306 false
307 }
308}
309
310pub fn match_state_variables_assuming_compatible_structure(
311 input_tokens: &Phrase,
312 state: &State,
313 state_i: usize,
314 existing_matches_and_result: &mut Vec<MatchLite>,
315) -> bool {
316 let pred_tokens = &state[state_i];
317
318 debug_assert!(test_match_without_variables(input_tokens, pred_tokens).is_some());
319
320 let existing_matches_len = existing_matches_and_result.len();
321
322 let mut pred_token_i = 0;
323
324 let mut input_depth = 0;
325 let mut pred_depth = 0;
326
327 for (token_i, token) in input_tokens.iter().enumerate() {
328 let pred_token = &pred_tokens[pred_token_i];
329 pred_token_i += 1;
330
331 input_depth += token.open_depth;
332 pred_depth += pred_token.open_depth;
333
334 let is_var = is_var_token(token);
335
336 if is_var {
337 pred_depth -= pred_token.close_depth;
338
339 let start_i = pred_token_i - 1;
341
342 while input_depth < pred_depth {
343 let pred_token = &pred_tokens[pred_token_i];
344 pred_token_i += 1;
345
346 pred_depth += pred_token.open_depth;
347 pred_depth -= pred_token.close_depth;
348 }
349
350 let end_i = pred_token_i;
351
352 let variable_already_matched = if let Some(ref existing_match) =
353 existing_matches_and_result
354 .iter()
355 .find(|m| m.var_atom == token.atom)
356 {
357 if !phrase_equal(
358 &existing_match.as_slice(state),
359 &pred_tokens[start_i..end_i],
360 existing_match.var_open_close_depth,
361 (token.open_depth, token.close_depth),
362 ) {
363 existing_matches_and_result.drain(existing_matches_len..);
365 return false;
366 }
367
368 true
369 } else {
370 false
371 };
372
373 if !variable_already_matched {
374 let normalized_depths = (
376 if token_i == 0 {
377 token.open_depth - 1
378 } else {
379 token.open_depth
380 },
381 if token_i == input_tokens.len() - 1 {
382 token.close_depth - 1
383 } else {
384 token.close_depth
385 },
386 );
387 let m = MatchLite {
388 var_atom: token.atom,
389 var_open_close_depth: (token.open_depth, token.close_depth),
390 var_open_close_depth_norm: normalized_depths,
391 state_i,
392 state_token_range: (start_i, end_i),
393 };
394
395 existing_matches_and_result.push(m);
396 }
397 } else {
398 pred_depth -= pred_token.close_depth;
399 }
400
401 input_depth -= token.close_depth;
402 }
403
404 true
405}
406
407pub fn match_variables_assuming_compatible_structure(
408 input_tokens: &Phrase,
409 pred_tokens: &Phrase,
410 existing_matches_and_result: &mut Vec<Match>,
411) -> bool {
412 assert!(test_match_without_variables(input_tokens, pred_tokens).is_some());
413
414 let existing_matches_len = existing_matches_and_result.len();
415
416 let mut pred_token_i = 0;
417
418 let mut input_depth = 0;
419 let mut pred_depth = 0;
420
421 for token in input_tokens {
422 let pred_token = &pred_tokens[pred_token_i];
423 pred_token_i += 1;
424
425 input_depth += token.open_depth;
426 pred_depth += pred_token.open_depth;
427
428 let is_var = is_var_token(token);
429
430 if is_var {
431 pred_depth -= pred_token.close_depth;
432
433 let start_i = pred_token_i - 1;
435
436 while input_depth < pred_depth {
437 let pred_token = &pred_tokens[pred_token_i];
438 pred_token_i += 1;
439
440 pred_depth += pred_token.open_depth;
441 pred_depth -= pred_token.close_depth;
442 }
443
444 let end_i = pred_token_i;
445
446 let variable_already_matched = if let Some(ref existing_match) =
447 existing_matches_and_result
448 .iter()
449 .find(|m| m.atom == token.atom)
450 {
451 if !phrase_equal(
452 &existing_match.phrase[..],
453 &pred_tokens[start_i..end_i],
454 existing_match.depths,
455 (token.open_depth, token.close_depth),
456 ) {
457 existing_matches_and_result.drain(existing_matches_len..);
459 return false;
460 }
461
462 true
463 } else {
464 false
465 };
466
467 if !variable_already_matched {
468 let phrase = pred_tokens[start_i..end_i].to_vec();
469 existing_matches_and_result.push(Match::new(token, phrase));
470 }
471 } else {
472 pred_depth -= pred_token.close_depth;
473 }
474
475 input_depth -= token.close_depth;
476 }
477
478 true
479}
480
481pub(crate) fn rule_matches_state<F>(
485 r: &Rule,
486 state: &mut State,
487 side_input: &mut F,
488) -> Result<Option<RuleMatchesStateResult>, ExcessivePermutationError>
489where
490 F: SideInput,
491{
492 state.update_cache();
493
494 let inputs = &r.inputs;
495 let outputs = &r.outputs;
496
497 let input_state_matches = if let Some(matches) =
499 gather_potential_input_state_matches(inputs, &r.input_phrase_group_counts, state)
500 {
501 matches
502 } else {
503 return Ok(None);
504 };
505
506 let mut input_rev_permutation_counts = vec![1; input_state_matches.potential_matches.len()];
508 let mut permutation_count = 1;
509 input_state_matches
510 .potential_matches
511 .iter()
512 .enumerate()
513 .rev()
514 .for_each(|(i, InputStateMatch { states, .. })| {
515 permutation_count *= states.len();
516
517 if i > 0 {
518 input_rev_permutation_counts[i - 1] = permutation_count;
519 }
520 });
521
522 if permutation_count > EXCESSIVE_PERMUTATION_LIMIT {
523 return Err(ExcessivePermutationError { rule_id: r.id });
524 }
525
526 state.lock_scratch();
528
529 'outer: for p_i in 0..permutation_count {
530 state.reset_scratch();
531
532 let mut variables_matched = input_state_matches.definite_matched_variables.clone();
533
534 if !test_inputs_with_permutation(
535 p_i,
536 inputs,
537 state,
538 &input_state_matches,
539 &input_rev_permutation_counts,
540 &mut variables_matched,
541 side_input,
542 ) {
543 continue 'outer;
544 }
545
546 let mut forward_concrete = vec![];
547 let mut outputs_concrete = vec![];
548
549 let mut input_phrase_group_counts = vec![];
550 inputs
551 .iter()
552 .filter(|pred| is_concrete_pred(pred) || is_var_pred(pred))
553 .for_each(|v| {
554 let mut group_counter = PhraseGroupCounter::new();
555 forward_concrete.push(assign_state_vars(
556 v,
557 state,
558 &variables_matched,
559 &mut group_counter,
560 ));
561 input_phrase_group_counts.push(group_counter.group_count);
562 });
563
564 let mut output_phrase_group_counts = vec![];
565 outputs.iter().for_each(|v| {
566 if is_side_pred(v) {
567 let pred =
568 assign_state_vars(v, state, &variables_matched, &mut PhraseGroupCounter::new());
569 side_input(&pred);
570 } else {
571 let mut group_counter = PhraseGroupCounter::new();
572 outputs_concrete.push(assign_state_vars(
573 v,
574 state,
575 &variables_matched,
576 &mut group_counter,
577 ));
578 output_phrase_group_counts.push(group_counter.group_count);
579 }
580 });
581
582 state.unlock_scratch();
583
584 return Ok(Some(RuleMatchesStateResult {
585 rule: RuleBuilder::new(forward_concrete, outputs_concrete, r.source_span)
586 .input_phrase_group_counts(input_phrase_group_counts)
587 .build(r.id),
588 output_phrase_group_counts,
589 }));
590 }
591
592 state.unlock_scratch();
593
594 Ok(None)
595}
596
597#[derive(Debug)]
598pub(crate) struct RuleMatchesStateResult {
599 pub rule: Rule,
600 pub output_phrase_group_counts: Vec<usize>,
601}
602
603#[derive(Debug)]
604struct InputStateMatches {
605 potential_matches: Vec<InputStateMatch>,
606 definite_matched_variables: Vec<MatchLite>,
607 initial_states_matched_bool: Vec<bool>,
608}
609
610#[derive(Debug)]
611struct InputStateMatch {
612 i_i: usize,
613 has_var: bool,
614 states: Vec<usize>,
615}
616
617impl InputStateMatch {
618 fn test_final_match(
619 &self,
620 state_match_idx: usize,
621 inputs: &Vec<Vec<Token>>,
622 state: &State,
623 states_matched_bool: &mut [bool],
624 variables_matched: &mut Vec<MatchLite>,
625 ) -> bool {
626 let s_i = self.states[state_match_idx];
627 let input_phrase = &inputs[self.i_i];
628
629 if input_phrase[0].is_consuming {
631 if states_matched_bool[s_i] {
632 return false;
633 } else {
634 states_matched_bool[s_i] = true;
635 }
636 }
637
638 !self.has_var
640 || match_state_variables_assuming_compatible_structure(
641 input_phrase,
642 state,
643 s_i,
644 variables_matched,
645 )
646 }
647}
648
649fn gather_potential_input_state_matches(
650 inputs: &Vec<Vec<Token>>,
651 input_phrase_group_counts: &Vec<usize>,
652 state: &State,
653) -> Option<InputStateMatches> {
654 let mut potential_matches = vec![]; let mut multiple_matches = vec![]; let mut single_matches = vec![]; for (i_i, input) in inputs.iter().enumerate() {
661 if input.len() == 1 && is_var_pred(input) {
662 let states = state.iter().enumerate().map(|(i, _)| i).collect::<Vec<_>>();
664 potential_matches.push(InputStateMatch {
665 i_i,
666 has_var: true,
667 states,
668 });
669 continue;
670 }
671
672 if !is_concrete_pred(input) && !is_var_pred(input) {
673 continue;
674 }
675
676 let cached_state_matches =
677 state.match_cached_state_indices_for_rule_input(input, input_phrase_group_counts[i_i]);
678
679 let mut has_var = false;
680 let mut states = vec![];
681 for s_i in cached_state_matches {
682 if let Some(match_has_var) = test_match_without_variables(input, &state[*s_i]) {
683 if match_has_var {
684 has_var = match_has_var;
685 }
686 states.push(*s_i);
687 }
688 }
689
690 if states.len() == 0 {
691 return None;
692 } else if states.len() == 1 {
693 single_matches.push(InputStateMatch {
694 i_i,
695 has_var,
696 states,
697 });
698 } else {
699 multiple_matches.push(InputStateMatch {
700 i_i,
701 has_var,
702 states,
703 });
704 }
705 }
706
707 let mut definite_matched_variables = vec![];
710 let mut initial_states_matched_bool = vec![false; state.len()];
711
712 for input_state_match in &single_matches {
713 if !input_state_match.test_final_match(
714 0,
715 inputs,
716 state,
717 &mut initial_states_matched_bool,
718 &mut definite_matched_variables,
719 ) {
720 return None;
721 }
722 }
723
724 if definite_matched_variables.len() > 0 {
727 for input_state_match in multiple_matches {
728 let mut state_single_match_idx = None;
729
730 if input_state_match.has_var {
731 let input_phrase = &inputs[input_state_match.i_i];
732 let existing_matches_len = definite_matched_variables.len();
733
734 for (state_match_idx, s_i) in input_state_match.states.iter().enumerate() {
735 if match_state_variables_assuming_compatible_structure(
736 input_phrase,
737 state,
738 *s_i,
739 &mut definite_matched_variables,
740 ) {
741 definite_matched_variables.drain(existing_matches_len..);
742
743 if state_single_match_idx.is_some() {
744 state_single_match_idx = None;
745 break;
746 }
747 state_single_match_idx = Some(state_match_idx);
748 }
749 }
750 }
751
752 if let Some(state_single_match_idx) = state_single_match_idx {
753 if !input_state_match.test_final_match(
754 state_single_match_idx,
755 inputs,
756 state,
757 &mut initial_states_matched_bool,
758 &mut definite_matched_variables,
759 ) {
760 return None;
761 }
762 } else {
763 potential_matches.push(input_state_match);
764 }
765 }
766 } else {
767 potential_matches.append(&mut multiple_matches);
768 }
769
770 potential_matches.sort_unstable_by_key(|InputStateMatch { states, .. }| states.len());
773
774 Some(InputStateMatches {
775 potential_matches,
776 definite_matched_variables,
777 initial_states_matched_bool,
778 })
779}
780
781fn test_inputs_with_permutation(
782 p_i: usize,
783 inputs: &Vec<Vec<Token>>,
784 state: &mut State,
785 input_state_matches: &InputStateMatches,
786 input_rev_permutation_counts: &[usize],
787 variables_matched: &mut Vec<MatchLite>,
788 side_input: &mut impl SideInput,
789) -> bool {
790 let len = state.len();
791 let mut states_matched_bool = input_state_matches.initial_states_matched_bool.clone();
792
793 for (concrete_input_i, input_state_match) in
796 input_state_matches.potential_matches.iter().enumerate()
797 {
798 let branch_idx =
799 (p_i / input_rev_permutation_counts[concrete_input_i]) % input_state_match.states.len();
800
801 if !input_state_match.test_final_match(
802 branch_idx,
803 inputs,
804 state,
805 &mut states_matched_bool,
806 variables_matched,
807 ) {
808 return false;
809 }
810 }
811
812 for input in inputs.iter().filter(|input| is_backwards_pred(input)) {
815 match_backwards_variables(input, state, variables_matched);
816 }
817
818 for input in inputs.iter().filter(|input| is_side_pred(input)) {
819 if !match_side_variables(input, state, variables_matched, side_input) {
820 return false;
821 }
822 }
823
824 for input in inputs.iter().filter(|input| is_backwards_pred(input)) {
826 if !match_backwards_variables(input, state, variables_matched) {
827 return false;
828 }
829 }
830
831 for input in inputs.iter().filter(|input| is_negated_pred(input)) {
832 if state
835 .iter()
836 .enumerate()
837 .filter(|&(s_i, _)| s_i < len && !states_matched_bool[s_i])
838 .any(|(s_i, _)| {
839 match_state_variables_with_existing(input, state, s_i, variables_matched)
840 })
841 {
842 return false;
843 }
844 }
845
846 true
847}
848
849fn match_backwards_variables(
850 pred: &Phrase,
851 state: &mut State,
852 existing_matches_and_result: &mut Vec<MatchLite>,
853) -> bool {
854 let mut group_counter = PhraseGroupCounter::new();
855 let pred = assign_state_vars(pred, state, existing_matches_and_result, &mut group_counter);
856
857 if let Some(eval_result) = evaluate_backwards_pred(&pred) {
858 let s_i = state.len();
859 state.push_with_metadata(eval_result, group_counter.group_count);
860
861 match_state_variables_with_existing(&pred, state, s_i, existing_matches_and_result)
862 } else {
863 false
864 }
865}
866
867fn match_side_variables<F>(
868 pred: &Phrase,
869 state: &mut State,
870 existing_matches_and_result: &mut Vec<MatchLite>,
871 side_input: &mut F,
872) -> bool
873where
874 F: SideInput,
875{
876 let mut group_counter = PhraseGroupCounter::new();
877 let pred = assign_state_vars(pred, state, existing_matches_and_result, &mut group_counter);
878
879 if let Some(eval_result) = side_input(&pred) {
880 if eval_result.len() == 0 {
881 return true;
882 }
883
884 let s_i = state.len();
885 state.push_with_metadata(eval_result, group_counter.group_count);
886
887 match_state_variables_with_existing(&pred, state, s_i, existing_matches_and_result)
888 } else {
889 false
890 }
891}
892
893pub(crate) fn assign_state_vars(
894 tokens: &Phrase,
895 state: &State,
896 matches: &[MatchLite],
897 group_counter: &mut PhraseGroupCounter,
898) -> Vec<Token> {
899 let mut result: Vec<Token> = vec![];
900
901 for token in tokens {
902 if is_var_token(token) {
903 if let Some(m) = matches.iter().find(|m| m.var_atom == token.atom) {
904 let mut append_phrase = normalize_match_phrase(token, m.to_phrase(state));
905 for t in &append_phrase {
906 group_counter.count(t);
907 }
908 result.append(&mut append_phrase);
909 continue;
910 }
911 }
912
913 result.push(token.clone());
914 group_counter.count(token);
915 }
916
917 if result.len() == 1 && result[0].open_depth == 0 {
919 result[0].open_depth = 1;
920 result[0].close_depth = 1;
921 group_counter.group_count += 1;
922 }
923
924 result
925}
926
927pub fn assign_vars(tokens: &Phrase, matches: &[Match]) -> Vec<Token> {
928 let mut result: Vec<Token> = vec![];
929
930 for token in tokens {
931 if is_var_token(token) {
932 if let Some(m) = matches.iter().find(|m| m.atom == token.atom) {
933 result.append(&mut normalize_match_phrase(token, m.phrase.clone()));
934 continue;
935 }
936 }
937
938 result.push(token.clone());
939 }
940
941 if result.len() == 1 {
943 result[0].open_depth = 1;
944 result[0].close_depth = 1;
945 }
946
947 result
948}
949
950pub fn evaluate_backwards_pred(tokens: &Phrase) -> Option<Vec<Token>> {
951 match tokens[0].flag {
952 TokenFlag::BackwardsPred(BackwardsPred::Plus) => {
953 let n1 = tokens[1].as_integer();
954 let n2 = tokens[2].as_integer();
955 let n3 = tokens[3].as_integer();
956
957 match (n1, n2, n3) {
958 (Some(v1), Some(v2), None) => Some(vec![
959 tokens[0].clone(),
960 tokens[1].clone(),
961 tokens[2].clone(),
962 Token::new_integer(v1 + v2, 0, 1),
963 ]),
964 (Some(v1), None, Some(v3)) => Some(vec![
965 tokens[0].clone(),
966 tokens[1].clone(),
967 Token::new_integer(v3 - v1, 0, 0),
968 tokens[3].clone(),
969 ]),
970 (None, Some(v2), Some(v3)) => Some(vec![
971 tokens[0].clone(),
972 Token::new_integer(v3 - v2, 0, 0),
973 tokens[2].clone(),
974 tokens[3].clone(),
975 ]),
976 (Some(v1), Some(v2), Some(v3)) if v1 + v2 == v3 => Some(tokens.to_owned()),
977 _ => None,
978 }
979 }
980 TokenFlag::BackwardsPred(BackwardsPred::Minus) => {
981 let n1 = tokens[1].as_integer();
982 let n2 = tokens[2].as_integer();
983 let n3 = tokens[3].as_integer();
984
985 match (n1, n2, n3) {
986 (Some(v1), Some(v2), None) => Some(vec![
987 tokens[0].clone(),
988 tokens[1].clone(),
989 tokens[2].clone(),
990 Token::new_integer(v1 - v2, 0, 1),
991 ]),
992 (Some(v1), None, Some(v3)) => Some(vec![
993 tokens[0].clone(),
994 tokens[1].clone(),
995 Token::new_integer(-v3 + v1, 0, 0),
996 tokens[3].clone(),
997 ]),
998 (None, Some(v2), Some(v3)) => Some(vec![
999 tokens[0].clone(),
1000 Token::new_integer(v3 + v2, 0, 0),
1001 tokens[2].clone(),
1002 tokens[3].clone(),
1003 ]),
1004 (Some(v1), Some(v2), Some(v3)) if v1 - v2 == v3 => Some(tokens.to_owned()),
1005 _ => None,
1006 }
1007 }
1008 TokenFlag::BackwardsPred(BackwardsPred::Lt) => {
1009 let n1 = tokens[1].as_integer();
1010 let n2 = tokens[2].as_integer();
1011
1012 match (n1, n2) {
1013 (Some(v1), Some(v2)) if v1 < v2 => Some(tokens.to_owned()),
1014 _ => None,
1015 }
1016 }
1017 TokenFlag::BackwardsPred(BackwardsPred::Gt) => {
1018 let n1 = tokens[1].as_integer();
1019 let n2 = tokens[2].as_integer();
1020
1021 match (n1, n2) {
1022 (Some(v1), Some(v2)) if v1 > v2 => Some(tokens.to_owned()),
1023 _ => None,
1024 }
1025 }
1026 TokenFlag::BackwardsPred(BackwardsPred::Lte) => {
1027 let n1 = tokens[1].as_integer();
1028 let n2 = tokens[2].as_integer();
1029
1030 match (n1, n2) {
1031 (Some(v1), Some(v2)) if v1 <= v2 => Some(tokens.to_owned()),
1032 _ => None,
1033 }
1034 }
1035 TokenFlag::BackwardsPred(BackwardsPred::Gte) => {
1036 let n1 = tokens[1].as_integer();
1037 let n2 = tokens[2].as_integer();
1038
1039 match (n1, n2) {
1040 (Some(v1), Some(v2)) if v1 >= v2 => Some(tokens.to_owned()),
1041 _ => None,
1042 }
1043 }
1044 TokenFlag::BackwardsPred(BackwardsPred::ModNeg) => {
1045 let n1 = tokens[1].as_integer();
1046 let n2 = tokens[2].as_integer();
1047 let n3 = tokens[3].as_integer();
1048
1049 let mod_neg = |x: i32, n: i32| x - n * (x / n);
1050
1051 match (n1, n2, n3) {
1052 (Some(v1), Some(v2), Some(v3)) => {
1053 if mod_neg(v1, v2) == v3 {
1054 Some(tokens.to_owned())
1055 } else {
1056 None
1057 }
1058 }
1059 (Some(v1), Some(v2), None) => Some(vec![
1060 tokens[0].clone(),
1061 tokens[1].clone(),
1062 tokens[2].clone(),
1063 Token::new_integer(mod_neg(v1, v2), 0, 1),
1064 ]),
1065 _ => None,
1066 }
1067 }
1068 TokenFlag::BackwardsPred(BackwardsPred::Equal) => {
1069 let mut args = tokens.groups().skip(1);
1070 let arg1 = args.next().expect("== : first argument missing");
1071 let arg2 = args.next().expect("== : second argument missing");
1072 if phrase_equal(arg1, arg2, (0, 0), (0, 1)) {
1073 if tokens[0].is_negated {
1074 None
1075 } else {
1076 Some(tokens.to_owned())
1077 }
1078 } else {
1079 if tokens[0].is_negated {
1080 Some(tokens.to_owned())
1081 } else {
1082 None
1083 }
1084 }
1085 }
1086 _ => unreachable!("{:?}", tokens[0].flag),
1087 }
1088}
1089
1090#[inline]
1091pub fn phrase_equal(a: &Phrase, b: &Phrase, a_depths: (u8, u8), b_depths: (u8, u8)) -> bool {
1092 if a.len() != b.len() {
1093 return false;
1094 }
1095
1096 let len = b.len();
1097
1098 if len == 1 {
1099 token_equal(&a[0], &b[0], true, None, None)
1100 } else {
1101 token_equal(
1102 &a[0],
1103 &b[0],
1104 false,
1105 Some((a_depths.0, 0)),
1106 Some((b_depths.0, 0)),
1107 ) && a
1108 .iter()
1109 .skip(1)
1110 .take(len - 2)
1111 .zip(b.iter().skip(1).take(len - 2))
1112 .all(|(t1, t2)| token_equal(t1, t2, false, None, None))
1113 && token_equal(
1114 &a[len - 1],
1115 &b[len - 1],
1116 false,
1117 Some((0, a_depths.1)),
1118 Some((0, b_depths.1)),
1119 )
1120 }
1121}