scryer_prolog/parser/
parser.rs

1use dashu::Integer;
2use dashu::Rational;
3
4use crate::arena::*;
5use crate::atom_table::*;
6use crate::parser::ast::*;
7use crate::parser::char_reader::*;
8use crate::parser::lexer::*;
9
10use std::cell::Cell;
11use std::mem;
12use std::ops::Neg;
13
14#[derive(Debug, Clone, Copy, PartialEq)]
15enum TokenType {
16    Term,
17    Open,
18    OpenCT,
19    OpenList,          // '['
20    OpenCurly,         // '{'
21    HeadTailSeparator, // '|'
22    Comma,             // ','
23    Close,
24    CloseList,  // ']'
25    CloseCurly, // '}'
26    End,
27}
28
29/*
30Specifies whether the token sequence should be read from the lexer or
31provided via the Provided variant.
32*/
33#[derive(Debug)]
34pub enum Tokens {
35    Default,
36    Provided(Vec<Token>),
37}
38
39impl TokenType {
40    fn is_sep(self) -> bool {
41        matches!(
42            self,
43            TokenType::HeadTailSeparator
44                | TokenType::OpenCT
45                | TokenType::Open
46                | TokenType::Close
47                | TokenType::OpenList
48                | TokenType::CloseList
49                | TokenType::OpenCurly
50                | TokenType::CloseCurly
51                | TokenType::Comma
52        )
53    }
54}
55
56#[derive(Debug, Clone, Copy)]
57struct TokenDesc {
58    tt: TokenType,
59    priority: usize,
60    spec: u32,
61    unfold_bounds: usize,
62}
63
64pub(crate) fn as_partial_string(
65    head: Term,
66    mut tail: Term,
67) -> Result<(String, Option<Box<Term>>), Term> {
68    let mut string = match &head {
69        Term::Literal(_, Literal::Atom(atom)) => {
70            if let Some(c) = atom.as_char() {
71                c.to_string()
72            } else {
73                return Err(Term::Cons(Cell::default(), Box::new(head), Box::new(tail)));
74            }
75        }
76        Term::Literal(_, Literal::Char(c)) => c.to_string(),
77        _ => {
78            return Err(Term::Cons(Cell::default(), Box::new(head), Box::new(tail)));
79        }
80    };
81
82    let mut orig_tail = Box::new(tail);
83    let mut tail_ref = &mut orig_tail;
84
85    loop {
86        match &mut **tail_ref {
87            Term::Cons(_, prev, succ) => {
88                match prev.as_ref() {
89                    Term::Literal(_, Literal::Atom(atom)) => {
90                        if let Some(c) = atom.as_char() {
91                            string.push(c);
92                        } else {
93                            return Err(Term::Cons(Cell::default(), Box::new(head), orig_tail));
94                        }
95                    }
96                    Term::Literal(_, Literal::Char(c)) => {
97                        string.push(*c);
98                    }
99                    _ => {
100                        tail = Term::Cons(
101                            Cell::default(),
102                            Box::new((**prev).clone()),
103                            Box::new((**succ).clone()),
104                        );
105                        break;
106                    }
107                }
108
109                tail_ref = succ;
110            }
111            Term::PartialString(_, pstr, tail) => {
112                string += &pstr;
113                tail_ref = tail;
114            }
115            Term::CompleteString(_, cstr) => {
116                string += &*cstr.as_str();
117                tail = Term::Literal(Cell::default(), Literal::Atom(atom!("[]")));
118                break;
119            }
120            tail_ref => {
121                tail = mem::replace(tail_ref, Term::AnonVar);
122                break;
123            }
124        }
125    }
126
127    match &tail {
128        Term::AnonVar | Term::Var(..) => Ok((string, Some(Box::new(tail)))),
129        Term::Literal(_, Literal::Atom(atom!("[]"))) => Ok((string, None)),
130        Term::Literal(_, Literal::String(tail)) => {
131            string += &*tail.as_str();
132            Ok((string, None))
133        }
134        _ => Ok((string, Some(Box::new(tail)))),
135    }
136}
137
138pub fn get_op_desc(name: Atom, op_dir: &CompositeOpDir) -> Option<CompositeOpDesc> {
139    let mut op_desc = CompositeOpDesc {
140        pre: 0,
141        inf: 0,
142        post: 0,
143        spec: 0,
144    };
145
146    if let Some(cell) = op_dir.get(name, Fixity::Pre) {
147        let (pri, spec) = cell.get();
148
149        if pri > 0 {
150            op_desc.pre = pri as usize;
151            op_desc.spec |= spec as u32;
152        } else if name == atom!("-") {
153            op_desc.spec |= NEGATIVE_SIGN;
154        }
155    }
156
157    if let Some(cell) = op_dir.get(name, Fixity::Post) {
158        let (pri, spec) = cell.get();
159
160        if pri > 0 {
161            op_desc.post = pri as usize;
162            op_desc.spec |= spec as u32;
163        }
164    }
165
166    if let Some(cell) = op_dir.get(name, Fixity::In) {
167        let (pri, spec) = cell.get();
168
169        if pri > 0 {
170            op_desc.inf = pri as usize;
171            op_desc.spec |= spec as u32;
172        }
173    }
174
175    if op_desc.pre + op_desc.post + op_desc.inf == 0 && !is_negate!(op_desc.spec) {
176        None
177    } else {
178        Some(op_desc)
179    }
180}
181
182pub fn get_clause_spec(name: Atom, arity: usize, op_dir: &CompositeOpDir) -> Option<OpDesc> {
183    match arity {
184        1 => {
185            /* This is a clause with an operator principal functor. Prefix operators
186            are supposed over post.
187             */
188            if let Some(cell) = op_dir.get(name, Fixity::Pre) {
189                return Some(cell);
190            }
191
192            if let Some(cell) = op_dir.get(name, Fixity::Post) {
193                return Some(cell);
194            }
195        }
196        2 => {
197            if let Some(cell) = op_dir.get(name, Fixity::In) {
198                return Some(cell);
199            }
200        }
201        _ => {}
202    };
203
204    None
205}
206
207fn affirm_xfx(priority: usize, d2: TokenDesc, d3: TokenDesc, d1: TokenDesc) -> bool {
208    d2.priority <= priority
209        && is_term!(d3.spec)
210        && is_term!(d1.spec)
211        && d3.priority < d2.priority
212        && d1.priority < d2.priority
213}
214
215fn affirm_yfx(priority: usize, d2: TokenDesc, d3: TokenDesc, d1: TokenDesc) -> bool {
216    d2.priority <= priority
217        && ((is_term!(d3.spec) && d3.priority < d2.priority)
218            || (is_lterm!(d3.spec) && d3.priority == d2.priority))
219        && is_term!(d1.spec)
220        && d1.priority < d2.priority
221}
222
223fn affirm_xfy(priority: usize, d2: TokenDesc, d3: TokenDesc, d1: TokenDesc) -> bool {
224    d2.priority < priority
225        && is_term!(d3.spec)
226        && d3.priority < d2.priority
227        && is_term!(d1.spec)
228        && d1.priority <= d2.priority
229}
230
231fn affirm_yf(d1: TokenDesc, d2: TokenDesc) -> bool {
232    let is_valid_lterm = is_lterm!(d2.spec) && d2.priority == d1.priority;
233    (is_term!(d2.spec) && d2.priority < d1.priority) || is_valid_lterm
234}
235
236fn affirm_xf(d1: TokenDesc, d2: TokenDesc) -> bool {
237    is_term!(d2.spec) && d2.priority < d1.priority
238}
239
240fn affirm_fy(priority: usize, d1: TokenDesc, d2: TokenDesc) -> bool {
241    d2.priority < priority && is_term!(d1.spec) && d1.priority <= d2.priority
242}
243
244fn affirm_fx(priority: usize, d1: TokenDesc, d2: TokenDesc) -> bool {
245    d2.priority <= priority && is_term!(d1.spec) && d1.priority < d2.priority
246}
247
248#[derive(Debug, Clone, Copy)]
249pub struct CompositeOpDesc {
250    pub pre: usize,
251    pub inf: usize,
252    pub post: usize,
253    pub spec: Specifier,
254}
255
256#[derive(Debug)]
257pub struct Parser<'a, R> {
258    pub lexer: Lexer<'a, R>,
259    tokens: Vec<Token>,
260    stack: Vec<TokenDesc>,
261    terms: Vec<Term>,
262}
263
264fn read_tokens<R: CharRead>(lexer: &mut Lexer<R>) -> Result<Vec<Token>, ParserError> {
265    let mut tokens = vec![];
266
267    loop {
268        match lexer.next_token() {
269            Ok(token) => {
270                let at_end = token.is_end();
271                tokens.push(token);
272
273                if at_end {
274                    break;
275                }
276            }
277            Err(e) if e.is_unexpected_eof() && !tokens.is_empty() => {
278                return Err(ParserError::IncompleteReduction(
279                    lexer.line_num,
280                    lexer.col_num,
281                ));
282            }
283            Err(e) => {
284                return Err(e);
285            }
286        }
287    }
288
289    tokens.reverse();
290
291    Ok(tokens)
292}
293
294fn atomize_term(atom_tbl: &AtomTable, term: &Term) -> Option<Atom> {
295    match term {
296        Term::Literal(_, ref c) => atomize_constant(atom_tbl, *c),
297        _ => None,
298    }
299}
300
301fn atomize_constant(atom_tbl: &AtomTable, c: Literal) -> Option<Atom> {
302    match c {
303        Literal::Atom(ref name) => Some(*name),
304        Literal::Char(c) => Some(AtomTable::build_with(atom_tbl, &c.to_string())),
305        _ => None,
306    }
307}
308
309impl<'a, R: CharRead> Parser<'a, R> {
310    pub fn new(stream: R, machine_st: &'a mut MachineState) -> Self {
311        Parser {
312            lexer: Lexer::new(stream, machine_st),
313            tokens: vec![],
314            stack: vec![],
315            terms: vec![],
316        }
317    }
318
319    pub fn from_lexer(lexer: Lexer<'a, R>) -> Self {
320        Parser {
321            lexer,
322            tokens: vec![],
323            stack: vec![],
324            terms: vec![],
325        }
326    }
327
328    fn sep_to_atom(&mut self, tt: TokenType) -> Option<Atom> {
329        match tt {
330            TokenType::Open | TokenType::OpenCT => Some(atom!("(")),
331            TokenType::Close => Some(atom!(")")),
332            TokenType::OpenList => Some(atom!("[")),
333            TokenType::CloseList => Some(atom!("]")),
334            TokenType::OpenCurly => Some(atom!("{")),
335            TokenType::CloseCurly => Some(atom!("}")),
336            TokenType::HeadTailSeparator => Some(atom!("|")),
337            TokenType::Comma => Some(atom!(",")),
338            TokenType::End => Some(atom!(".")),
339            _ => None,
340        }
341    }
342
343    #[inline]
344    pub fn line_num(&self) -> usize {
345        self.lexer.line_num
346    }
347
348    #[inline]
349    pub fn col_num(&self) -> usize {
350        self.lexer.col_num
351    }
352
353    fn get_term_name(&mut self, td: TokenDesc) -> Option<Atom> {
354        match td.tt {
355            TokenType::HeadTailSeparator => Some(atom!("|")),
356            TokenType::Comma => Some(atom!(",")),
357            TokenType::Term => match self.terms.pop() {
358                Some(Term::Literal(_, Literal::Atom(atom))) => Some(atom),
359                Some(term) => {
360                    self.terms.push(term);
361                    None
362                }
363                _ => None,
364            },
365            _ => None,
366        }
367    }
368
369    fn push_binary_op(&mut self, td: TokenDesc, spec: Specifier) {
370        if let Some(arg2) = self.terms.pop() {
371            if let Some(name) = self.get_term_name(td) {
372                if let Some(arg1) = self.terms.pop() {
373                    let term = Term::Clause(Cell::default(), name, vec![arg1, arg2]);
374
375                    self.terms.push(term);
376                    self.stack.push(TokenDesc {
377                        tt: TokenType::Term,
378                        priority: td.priority,
379                        spec,
380                        unfold_bounds: 0,
381                    });
382                }
383            }
384        }
385    }
386
387    fn push_unary_op(&mut self, td: TokenDesc, spec: Specifier, assoc: u32) {
388        if let Some(mut arg1) = self.terms.pop() {
389            if let Some(mut name) = self.terms.pop() {
390                if is_postfix!(assoc) {
391                    mem::swap(&mut arg1, &mut name);
392                }
393
394                if let Term::Literal(_, Literal::Atom(name)) = name {
395                    let term = Term::Clause(Cell::default(), name, vec![arg1]);
396
397                    self.terms.push(term);
398                    self.stack.push(TokenDesc {
399                        tt: TokenType::Term,
400                        priority: td.priority,
401                        spec,
402                        unfold_bounds: 0,
403                    });
404                }
405            }
406        }
407    }
408
409    fn promote_atom_op(&mut self, atom: Atom, priority: usize, assoc: u32) {
410        self.terms
411            .push(Term::Literal(Cell::default(), Literal::Atom(atom)));
412        self.stack.push(TokenDesc {
413            tt: TokenType::Term,
414            priority,
415            spec: assoc,
416            unfold_bounds: 0,
417        });
418    }
419
420    fn shift(&mut self, token: Token, priority: usize, spec: Specifier) {
421        let tt = match token {
422            Token::Literal(Literal::String(s))
423                if self.lexer.machine_st.flags.double_quotes.is_codes() =>
424            {
425                let mut list = Term::Literal(Cell::default(), Literal::Atom(atom!("[]")));
426
427                for c in s.as_str().chars().rev() {
428                    list = Term::Cons(
429                        Cell::default(),
430                        Box::new(Term::Literal(
431                            Cell::default(),
432                            Literal::Fixnum(Fixnum::build_with(c as i64)),
433                        )),
434                        Box::new(list),
435                    );
436                }
437
438                self.terms.push(list);
439                TokenType::Term
440            }
441            Token::Literal(Literal::String(s))
442                if self.lexer.machine_st.flags.double_quotes.is_chars() =>
443            {
444                self.terms.push(Term::CompleteString(Cell::default(), s));
445                TokenType::Term
446            }
447            Token::Literal(c) => {
448                self.terms.push(Term::Literal(Cell::default(), c));
449                TokenType::Term
450            }
451            Token::Var(v) => {
452                if v.trim() == "_" {
453                    self.terms.push(Term::AnonVar);
454                } else {
455                    self.terms.push(Term::Var(Cell::default(), VarPtr::from(v)));
456                }
457
458                TokenType::Term
459            }
460            Token::Comma => TokenType::Comma,
461            Token::Open => TokenType::Open,
462            Token::Close => TokenType::Close,
463            Token::OpenCT => TokenType::OpenCT,
464            Token::HeadTailSeparator => TokenType::HeadTailSeparator,
465            Token::OpenList => TokenType::OpenList,
466            Token::CloseList => TokenType::CloseList,
467            Token::OpenCurly => TokenType::OpenCurly,
468            Token::CloseCurly => TokenType::CloseCurly,
469            Token::End => TokenType::End,
470        };
471
472        self.stack.push(TokenDesc {
473            tt,
474            priority,
475            spec,
476            unfold_bounds: 0,
477        });
478    }
479
480    fn reduce_op(&mut self, priority: usize) {
481        loop {
482            if let Some(desc1) = self.stack.pop() {
483                if let Some(desc2) = self.stack.pop() {
484                    if let Some(desc3) = self.stack.pop() {
485                        if is_xfx!(desc2.spec) && affirm_xfx(priority, desc2, desc3, desc1)
486                            || is_yfx!(desc2.spec) && affirm_yfx(priority, desc2, desc3, desc1)
487                        {
488                            self.push_binary_op(desc2, LTERM);
489                            continue;
490                        } else if is_xfy!(desc2.spec) && affirm_xfy(priority, desc2, desc3, desc1) {
491                            self.push_binary_op(desc2, TERM);
492                            continue;
493                        } else {
494                            self.stack.push(desc3);
495                        }
496                    }
497
498                    if is_yf!(desc1.spec) && affirm_yf(desc1, desc2) {
499                        self.push_unary_op(desc1, LTERM, YF);
500                        continue;
501                    } else if is_xf!(desc1.spec) && affirm_xf(desc1, desc2) {
502                        self.push_unary_op(desc1, LTERM, XF);
503                        continue;
504                    } else if is_fy!(desc2.spec) && affirm_fy(priority, desc1, desc2) {
505                        self.push_unary_op(desc2, TERM, FY);
506                        continue;
507                    } else if is_fx!(desc2.spec) && affirm_fx(priority, desc1, desc2) {
508                        self.push_unary_op(desc2, TERM, FX);
509                        continue;
510                    } else {
511                        self.stack.push(desc2);
512                        self.stack.push(desc1);
513                    }
514                } else {
515                    self.stack.push(desc1);
516                }
517            }
518
519            break;
520        }
521    }
522
523    fn compute_arity_in_brackets(&self) -> Option<usize> {
524        let mut arity = 0;
525
526        for (i, desc) in self.stack.iter().rev().enumerate() {
527            if i % 2 == 0 {
528                // expect a term or non-comma operator.
529                if let TokenType::Comma = desc.tt {
530                    return None;
531                } else if is_term!(desc.spec) || is_op!(desc.spec) || is_negate!(desc.spec) {
532                    arity += 1;
533                } else {
534                    return None;
535                }
536            } else {
537                if desc.tt == TokenType::OpenCT {
538                    return Some(arity);
539                }
540
541                if let TokenType::Comma = desc.tt {
542                    continue;
543                } else {
544                    return None;
545                }
546            }
547        }
548
549        None
550    }
551
552    fn reduce_term(&mut self) -> bool {
553        if self.stack.is_empty() {
554            return false;
555        }
556
557        self.reduce_op(999);
558
559        let arity = match self.compute_arity_in_brackets() {
560            Some(arity) => arity,
561            None => return false,
562        };
563
564        if self.stack.len() > 2 * arity {
565            let idx = self.stack.len() - 2 * arity - 1;
566
567            if is_infix!(self.stack[idx].spec)
568                && idx > 0
569                && !is_op!(self.stack[idx - 1].spec)
570                && !self.stack[idx - 1].tt.is_sep()
571            {
572                return false;
573            }
574        } else {
575            return false;
576        }
577
578        if self.terms.len() < 1 + arity {
579            return false;
580        }
581
582        let stack_len = self.stack.len() - 2 * arity - 1;
583        let idx = self.terms.len() - arity;
584
585        if TokenType::Term == self.stack[stack_len].tt
586            && atomize_term(&self.lexer.machine_st.atom_tbl, &self.terms[idx - 1]).is_some()
587        {
588            self.stack.truncate(stack_len + 1);
589
590            let mut subterms: Vec<_> = self.terms.drain(idx..).collect();
591
592            if let Some(name) = self
593                .terms
594                .pop()
595                .and_then(|t| atomize_term(&self.lexer.machine_st.atom_tbl, &t))
596            {
597                // reduce the '.' functor to a cons cell if it applies.
598                if name == atom!(".") && subterms.len() == 2 {
599                    let tail = subterms.pop().unwrap();
600                    let head = subterms.pop().unwrap();
601
602                    self.terms.push(match as_partial_string(head, tail) {
603                        Ok((string_buf, Some(tail))) => {
604                            Term::PartialString(Cell::default(), string_buf, tail)
605                        }
606                        Ok((string_buf, None)) => {
607                            let atom =
608                                AtomTable::build_with(&self.lexer.machine_st.atom_tbl, &string_buf);
609                            Term::CompleteString(Cell::default(), atom)
610                        }
611                        Err(term) => term,
612                    });
613                } else {
614                    self.terms
615                        .push(Term::Clause(Cell::default(), name, subterms));
616                }
617
618                if let Some(&mut TokenDesc {
619                    ref mut tt,
620                    ref mut priority,
621                    ref mut spec,
622                    ref mut unfold_bounds,
623                }) = self.stack.last_mut()
624                {
625                    if *spec == BTERM {
626                        return false;
627                    }
628
629                    *tt = TokenType::Term;
630                    *priority = 0;
631                    *spec = TERM;
632                    *unfold_bounds = 0;
633                }
634
635                return true;
636            }
637        }
638
639        false
640    }
641
642    pub fn reset(&mut self) {
643        self.stack.clear()
644    }
645
646    fn expand_comma_compacted_terms(&mut self, index: usize) -> usize {
647        if let Some(mut term) = self.terms.pop() {
648            let mut op_desc = self.stack[index - 1];
649
650            if 0 < op_desc.priority && op_desc.priority < self.stack[index].priority {
651                /* '|' is a head-tail separator here, not
652                 * an operator, so expand the
653                 * terms it compacted out again. */
654                if let (Some(atom!(",")), 2) = (term.name(), term.arity()) {
655                    let terms = if op_desc.unfold_bounds == 0 {
656                        unfold_by_str(term, atom!(","))
657                    } else {
658                        let mut terms = vec![];
659
660                        while let Some((fst, snd)) = unfold_by_str_once(&mut term, atom!(",")) {
661                            terms.push(fst);
662                            term = snd;
663
664                            op_desc.unfold_bounds -= 2;
665
666                            if op_desc.unfold_bounds == 0 {
667                                break;
668                            }
669                        }
670
671                        terms.push(term);
672                        terms
673                    };
674
675                    let arity = terms.len() - 1;
676
677                    self.terms.extend(terms);
678                    return arity;
679                }
680            }
681
682            self.terms.push(term);
683        }
684
685        0
686    }
687
688    fn compute_arity_in_list(&self) -> Option<usize> {
689        let mut arity = 0;
690
691        for (i, desc) in self.stack.iter().rev().enumerate() {
692            if i % 2 == 0 {
693                // expect a term or non-comma operator.
694                if let TokenType::Comma = desc.tt {
695                    return None;
696                } else if is_term!(desc.spec) || is_op!(desc.spec) {
697                    arity += 1;
698                } else {
699                    return None;
700                }
701            } else if desc.tt == TokenType::HeadTailSeparator {
702                if arity == 1 {
703                    continue;
704                }
705                return None;
706            } else if desc.tt == TokenType::OpenList {
707                return Some(arity);
708            } else if desc.tt != TokenType::Comma {
709                return None;
710            }
711        }
712
713        None
714    }
715
716    fn reduce_list(&mut self) -> Result<bool, ParserError> {
717        if self.stack.is_empty() {
718            return Ok(false);
719        }
720
721        if let Some(ref mut td) = self.stack.last_mut() {
722            if td.tt == TokenType::OpenList {
723                td.spec = TERM;
724                td.tt = TokenType::Term;
725                td.priority = 0;
726
727                self.terms
728                    .push(Term::Literal(Cell::default(), Literal::Atom(atom!("[]"))));
729                return Ok(true);
730            }
731        }
732
733        self.reduce_op(1000);
734
735        let mut arity = match self.compute_arity_in_list() {
736            Some(arity) => arity,
737            None => return Ok(false),
738        };
739
740        // we know that self.stack.len() >= 2 by this point.
741        let idx = self.stack.len() - 2;
742        let list_len = self.stack.len() - 2 * arity;
743
744        let end_term = if self.stack[idx].tt != TokenType::HeadTailSeparator {
745            Term::Literal(Cell::default(), Literal::Atom(atom!("[]")))
746        } else {
747            let term = match self.terms.pop() {
748                Some(term) => term,
749                _ => {
750                    return Err(ParserError::IncompleteReduction(
751                        self.lexer.line_num,
752                        self.lexer.col_num,
753                    ))
754                }
755            };
756
757            if self.stack[idx].priority > 1000 {
758                arity += self.expand_comma_compacted_terms(idx);
759            }
760
761            arity -= 1;
762
763            term
764        };
765
766        if arity > self.terms.len() {
767            return Err(ParserError::IncompleteReduction(
768                self.lexer.line_num,
769                self.lexer.col_num,
770            ));
771        }
772
773        let idx = self.terms.len() - arity;
774
775        let list = self.terms.drain(idx..).rev().fold(end_term, |acc, t| {
776            Term::Cons(Cell::default(), Box::new(t), Box::new(acc))
777        });
778
779        self.stack.truncate(list_len);
780
781        self.stack.push(TokenDesc {
782            tt: TokenType::Term,
783            priority: 0,
784            spec: TERM,
785            unfold_bounds: 0,
786        });
787
788        self.terms.push(match list {
789            Term::Cons(_, head, tail) => match as_partial_string(*head, *tail) {
790                Ok((string_buf, Some(tail))) => {
791                    Term::PartialString(Cell::default(), string_buf, tail)
792                }
793                Ok((string_buf, None)) => {
794                    let atom = AtomTable::build_with(&self.lexer.machine_st.atom_tbl, &string_buf);
795                    Term::CompleteString(Cell::default(), atom)
796                }
797                Err(term) => term,
798            },
799            term => term,
800        });
801
802        Ok(true)
803    }
804
805    fn reduce_curly(&mut self) -> Result<bool, ParserError> {
806        if self.stack.is_empty() {
807            return Ok(false);
808        }
809
810        if let Some(ref mut td) = self.stack.last_mut() {
811            if td.tt == TokenType::OpenCurly {
812                td.tt = TokenType::Term;
813                td.priority = 0;
814                td.spec = TERM;
815
816                let term = Term::Literal(Cell::default(), Literal::Atom(atom!("{}")));
817
818                self.terms.push(term);
819                return Ok(true);
820            }
821        }
822
823        self.reduce_op(1201);
824
825        if self.stack.len() > 1 {
826            if let Some(td) = self.stack.pop() {
827                if let Some(ref mut oc) = self.stack.last_mut() {
828                    if td.tt != TokenType::Term {
829                        return Ok(false);
830                    }
831
832                    if oc.tt == TokenType::OpenCurly {
833                        oc.tt = TokenType::Term;
834                        oc.priority = 0;
835                        oc.spec = TERM;
836
837                        let term = match self.terms.pop() {
838                            Some(term) => term,
839                            _ => {
840                                return Err(ParserError::IncompleteReduction(
841                                    self.lexer.line_num,
842                                    self.lexer.col_num,
843                                ))
844                            }
845                        };
846
847                        self.terms
848                            .push(Term::Clause(Cell::default(), atom!("{}"), vec![term]));
849
850                        return Ok(true);
851                    }
852                }
853            }
854        }
855
856        Ok(false)
857    }
858
859    fn reduce_brackets(&mut self) -> bool {
860        if self.stack.is_empty() {
861            return false;
862        }
863
864        self.reduce_op(1400);
865
866        if self.stack.len() <= 1 {
867            return false;
868        }
869
870        if let Some(TokenType::Open | TokenType::OpenCT) = self.stack.last().map(|token| token.tt) {
871            return false;
872        }
873
874        let idx = self.stack.len() - 2;
875        let td = self.stack.remove(idx);
876
877        match td.tt {
878            TokenType::Open | TokenType::OpenCT => {
879                if self.stack[idx].tt == TokenType::Comma {
880                    return false;
881                }
882
883                if let Some(atom) = self.sep_to_atom(self.stack[idx].tt) {
884                    self.terms
885                        .push(Term::Literal(Cell::default(), Literal::Atom(atom)));
886                }
887
888                self.stack[idx].spec = BTERM;
889                self.stack[idx].tt = TokenType::Term;
890                self.stack[idx].priority = 0;
891
892                true
893            }
894            _ => false,
895        }
896    }
897
898    fn shift_op(&mut self, name: Atom, op_dir: &CompositeOpDir) -> Result<bool, ParserError> {
899        if let Some(CompositeOpDesc {
900            pre,
901            inf,
902            post,
903            spec,
904        }) = get_op_desc(name, op_dir)
905        {
906            if (pre > 0 && inf + post > 0) || is_negate!(spec) {
907                match self.tokens.last().ok_or(ParserError::unexpected_eof())? {
908                    // do this when layout hasn't been inserted,
909                    // ie. why we don't match on Token::Open.
910                    Token::OpenCT => {
911                        // can't be prefix, so either inf == 0
912                        // or post == 0.
913                        self.reduce_op(inf + post);
914
915                        // let fixity = if inf > 0 { Fixity::In } else { Fixity::Post };
916
917                        self.promote_atom_op(name, inf + post, spec & (XFX | XFY | YFX | YF | XF));
918                    }
919                    _ => {
920                        self.reduce_op(inf + post);
921
922                        if let Some(TokenDesc { spec: pspec, .. }) = self.stack.last().cloned() {
923                            // rterm.c: 412
924                            if is_term!(pspec) {
925                                self.promote_atom_op(
926                                    name,
927                                    inf + post,
928                                    spec & (XFX | XFY | YFX | XF | YF),
929                                );
930                            } else {
931                                self.promote_atom_op(name, pre, spec & (FX | FY | NEGATIVE_SIGN));
932                            }
933                        } else {
934                            self.promote_atom_op(name, pre, spec & (FX | FY | NEGATIVE_SIGN));
935                        }
936                    }
937                }
938            } else {
939                self.reduce_op(pre + inf + post); // only one non-zero priority among these.
940                self.promote_atom_op(name, pre + inf + post, spec);
941            }
942
943            Ok(true)
944        } else {
945            // not an operator.
946            Ok(false)
947        }
948    }
949
950    fn negate_number<N, Negator, ToLiteral>(&mut self, n: N, negator: Negator, constr: ToLiteral)
951    where
952        Negator: Fn(N, &mut Arena) -> N,
953        ToLiteral: Fn(N, &mut Arena) -> Literal,
954    {
955        if let Some(desc) = self.stack.last().cloned() {
956            if let Some(term) = self.terms.last().cloned() {
957                match term {
958                    Term::Literal(_, Literal::Atom(name))
959                        if name == atom!("-")
960                            && (is_prefix!(desc.spec) || is_negate!(desc.spec)) =>
961                    {
962                        self.stack.pop();
963                        self.terms.pop();
964
965                        let arena = &mut self.lexer.machine_st.arena;
966                        let literal = constr(negator(n, arena), arena);
967
968                        self.shift(Token::Literal(literal), 0, TERM);
969
970                        return;
971                    }
972                    _ => {}
973                }
974            }
975        }
976
977        let literal = constr(n, &mut self.lexer.machine_st.arena);
978        self.shift(Token::Literal(literal), 0, TERM);
979    }
980
981    fn shift_token(&mut self, token: Token, op_dir: &CompositeOpDir) -> Result<(), ParserError> {
982        fn negate_int_rc(t: TypedArenaPtr<Integer>, arena: &mut Arena) -> TypedArenaPtr<Integer> {
983            let i: Integer = (*t).clone();
984            let data = i.neg();
985            arena_alloc!(data, arena)
986        }
987
988        fn negate_rat_rc(t: TypedArenaPtr<Rational>, arena: &mut Arena) -> TypedArenaPtr<Rational> {
989            let r: Rational = (*t).clone();
990            let data = r.neg();
991            arena_alloc!(data, arena)
992        }
993
994        match token {
995            Token::Literal(Literal::Fixnum(n)) => {
996                self.negate_number(n, |n, _| -n, |n, _| Literal::Fixnum(n))
997            }
998            Token::Literal(Literal::Integer(n)) => {
999                self.negate_number(n, negate_int_rc, |n, _| Literal::Integer(n))
1000            }
1001            Token::Literal(Literal::Rational(n)) => {
1002                self.negate_number(n, negate_rat_rc, |r, _| Literal::Rational(r))
1003            }
1004            Token::Literal(Literal::Float(n)) => self.negate_number(
1005                **n.as_ptr(),
1006                |n, _| -n,
1007                |n, arena| Literal::from(float_alloc!(n, arena)),
1008            ),
1009            Token::Literal(c) => {
1010                let atomized = atomize_constant(&self.lexer.machine_st.atom_tbl, c);
1011
1012                if let Some(name) = atomized {
1013                    if !self.shift_op(name, op_dir)? {
1014                        self.shift(Token::Literal(c), 0, TERM);
1015                    }
1016                } else {
1017                    self.shift(Token::Literal(c), 0, TERM);
1018                }
1019            }
1020            Token::Var(v) => self.shift(Token::Var(v), 0, TERM),
1021            Token::Open => self.shift(Token::Open, 1300, DELIMITER),
1022            Token::OpenCT => self.shift(Token::OpenCT, 1300, DELIMITER),
1023            Token::Close => {
1024                if !self.reduce_term() && !self.reduce_brackets() {
1025                    return Err(ParserError::IncompleteReduction(
1026                        self.lexer.line_num,
1027                        self.lexer.col_num,
1028                    ));
1029                }
1030            }
1031            Token::OpenList => self.shift(Token::OpenList, 1300, DELIMITER),
1032            Token::CloseList => {
1033                if !self.reduce_list()? {
1034                    return Err(ParserError::IncompleteReduction(
1035                        self.lexer.line_num,
1036                        self.lexer.col_num,
1037                    ));
1038                }
1039            }
1040            Token::OpenCurly => self.shift(Token::OpenCurly, 1300, DELIMITER),
1041            Token::CloseCurly => {
1042                if !self.reduce_curly()? {
1043                    return Err(ParserError::IncompleteReduction(
1044                        self.lexer.line_num,
1045                        self.lexer.col_num,
1046                    ));
1047                }
1048            }
1049            Token::HeadTailSeparator => {
1050                /* '|' as an operator must have priority > 1000 and can only be infix.
1051                 * See: http://www.complang.tuwien.ac.at/ulrich/iso-prolog/dtc2#Res_A78
1052                 */
1053                let (priority, spec) = get_op_desc(atom!("|"), op_dir)
1054                    .map(|CompositeOpDesc { inf, spec, .. }| (inf, spec))
1055                    .unwrap_or((1000, DELIMITER));
1056
1057                let old_stack_len = self.stack.len();
1058
1059                self.reduce_op(priority);
1060
1061                let new_stack_len = self.stack.len();
1062
1063                if let Some(term_desc) = self.stack.last_mut() {
1064                    term_desc.unfold_bounds = old_stack_len - new_stack_len;
1065                }
1066
1067                self.shift(Token::HeadTailSeparator, priority, spec);
1068            }
1069            Token::Comma => {
1070                self.reduce_op(1000);
1071                self.shift(Token::Comma, 1000, XFY);
1072            }
1073            Token::End => match self.stack.last().map(|t| t.tt) {
1074                Some(TokenType::Open)
1075                | Some(TokenType::OpenCT)
1076                | Some(TokenType::OpenList)
1077                | Some(TokenType::OpenCurly)
1078                | Some(TokenType::HeadTailSeparator)
1079                | Some(TokenType::Comma) => {
1080                    return Err(ParserError::IncompleteReduction(
1081                        self.lexer.line_num,
1082                        self.lexer.col_num,
1083                    ))
1084                }
1085                _ => {}
1086            },
1087        }
1088
1089        Ok(())
1090    }
1091
1092    #[inline]
1093    pub fn add_lines_read(&mut self, lines_read: usize) {
1094        self.lexer.line_num += lines_read;
1095    }
1096
1097    #[inline]
1098    pub fn lines_read(&self) -> usize {
1099        self.lexer.line_num
1100    }
1101
1102    // on success, returns the parsed term and the number of lines read.
1103    pub fn read_term(
1104        &mut self,
1105        op_dir: &CompositeOpDir,
1106        tokens: Tokens,
1107    ) -> Result<Term, ParserError> {
1108        self.tokens = match tokens {
1109            Tokens::Default => read_tokens(&mut self.lexer)?,
1110            Tokens::Provided(tokens) => tokens,
1111        };
1112
1113        while let Some(token) = self.tokens.pop() {
1114            self.shift_token(token, op_dir)?;
1115        }
1116
1117        self.reduce_op(1400);
1118
1119        if self.terms.len() > 1 || self.stack.len() > 1 {
1120            return Err(ParserError::IncompleteReduction(
1121                self.lexer.line_num,
1122                self.lexer.col_num,
1123            ));
1124        }
1125
1126        match self.terms.pop() {
1127            Some(term) => {
1128                if self.terms.is_empty() {
1129                    Ok(term)
1130                } else {
1131                    Err(ParserError::IncompleteReduction(
1132                        self.lexer.line_num,
1133                        self.lexer.col_num,
1134                    ))
1135                }
1136            }
1137            _ => Err(ParserError::IncompleteReduction(
1138                self.lexer.line_num,
1139                self.lexer.col_num,
1140            )),
1141        }
1142    }
1143}