Skip to main content

smpl/parser/
expr_parser.rs

1use std::iter::Iterator;
2
3use super::parser::{
4    block, fn_param_list, module_binding as full_module_binding,
5    type_annotation, type_arg_list, type_arg_list_post_lparen, ParseErr,
6};
7use super::error::*;
8use super::tokens::*;
9use crate::ast::*;
10use crate::span::*;
11
12#[derive(PartialEq, Clone)]
13pub enum Delimiter {
14    RParen,
15    RBracket,
16    Comma,
17    Semi,
18    LBrace,
19    Pipe,
20}
21
22pub fn piped_expr(
23    tokens: &mut BufferedTokenizer,
24    delim_tokens: &[Delimiter],
25) -> ParseErr<AstNode<Expr>> {
26    let primary_base =
27        production!(parse_primary(tokens), parser_state!("piped-expr", "base"));
28    let expr_base = expr(tokens, primary_base, &delim_tokens, 0)?;
29
30    prebase_piped_expr(tokens, expr_base, delim_tokens)
31}
32
33pub fn prebase_piped_expr(
34    tokens: &mut BufferedTokenizer,
35    expr_base: AstNode<Expr>,
36    delim_tokens: &[Delimiter],
37) -> ParseErr<AstNode<Expr>> {
38    let mut delimiters = delim_tokens.to_vec();
39    delimiters.push(Delimiter::Pipe);
40
41    let mut piped_exprs = Vec::new();
42
43    while tokens.has_next()
44        && peek_token!(
45            tokens,
46            |tok| match tok {
47                Token::Pipe => true,
48                _ => false,
49            },
50            parser_state!("piped-expr", "|>?")
51        )
52    {
53        let _pipe = consume_token!(
54            tokens,
55            Token::Pipe,
56            parser_state!("piped-expr", "|>")
57        );
58
59        let primary = production!(
60            parse_primary(tokens),
61            parser_state!("piped-expr", "expr-base")
62        );
63        let expr = production!(
64            expr(tokens, primary, &delim_tokens, 0),
65            parser_state!("piped-expr", "expr")
66        );
67
68        piped_exprs.push(expr);
69    }
70
71    if piped_exprs.len() > 0 {
72        let (expr_base, eloc) = expr_base.to_data();
73        let expr_base = match expr_base {
74            Expr::FnCall(f) => f,
75
76            e @ _ => {
77                return Err(parser_error!(
78                    ParserErrorKind::InvalidPiping(e),
79                    parser_state!("prebase-piped-expr", "pipe-validation")
80                ));
81            }
82        };
83
84        let piped_exprs = piped_exprs
85            .into_iter()
86            .map(|e| {
87                let (e, _espan) = e.to_data();
88                match e {
89                    Expr::FnCall(f) => Ok(f),
90                    e @ _ => Err(parser_error!(
91                        ParserErrorKind::InvalidPiping(e),
92                        parser_state!("prebase-piped-expr", "pipe-validation")
93                    )),
94                }
95            })
96            .collect::<Result<Vec<AstNode<FnCall>>, ParserError>>()?;
97
98        let end = piped_exprs.last().unwrap().span();
99        let span = Span::combine(eloc, end);
100
101        let fn_chain = FnCallChain {
102            base: expr_base,
103            chain: piped_exprs,
104        };
105
106        let fn_chain = AstNode::new(fn_chain, span.clone());
107
108        Ok(AstNode::new(Expr::FnCallChain(fn_chain), span))
109    } else {
110        Ok(expr_base)
111    }
112}
113
114fn expr(
115    tokens: &mut BufferedTokenizer,
116    mut lhs: AstNode<Expr>,
117    delim_tokens: &[Delimiter],
118    min_precedence: u64,
119) -> ParseErr<AstNode<Expr>> {
120    enum PeekResult {
121        Execute(BinOp),
122        Break,
123    }
124
125    loop {
126        if tokens.has_next() == false {
127            return Ok(lhs);
128        }
129
130        let peek_result = peek_token!(
131            tokens,
132            |tok| {
133                if is_delim(tok, delim_tokens) {
134                    return PeekResult::Break;
135                }
136
137                let op = match get_op(tok) {
138                    Some(op) => op,
139                    None => return PeekResult::Break,
140                };
141
142                if bin_op_precedence(&op) >= min_precedence {
143                    PeekResult::Execute(op)
144                } else {
145                    PeekResult::Break
146                }
147            },
148            parser_state!("expr", "binop")
149        );
150
151        if let PeekResult::Break = peek_result {
152            break;
153        }
154
155        let (_next_span, next) = consume_token!(tokens, parser_state!("expr"));
156        let main_op = get_op(&next).unwrap();
157        let main_prec = bin_op_precedence(&main_op);
158
159        let mut rhs = production!(
160            parse_primary(tokens),
161            parser_state!("expr", "primary")
162        );
163
164        loop {
165            if tokens.has_next() == false {
166                break;
167            }
168
169            let peek_result = peek_token!(
170                tokens,
171                |tok| {
172                    // TODO: Is this delimiter check correct?
173                    if is_delim(tok, delim_tokens) {
174                        return PeekResult::Break;
175                    }
176
177                    let op = match get_op(tok) {
178                        Some(op) => op,
179                        None => return PeekResult::Break,
180                    };
181
182                    if bin_op_precedence(&op) > main_prec
183                        || (is_left_associative(&op) == false
184                            && bin_op_precedence(&op) == main_prec)
185                    {
186                        PeekResult::Execute(op)
187                    } else {
188                        PeekResult::Break
189                    }
190                },
191                parser_state!("expr", "binop")
192            );
193
194            let rhs_op_peek = match peek_result {
195                PeekResult::Execute(op) => op,
196                PeekResult::Break => break,
197            };
198
199            let rhs_op_prec = bin_op_precedence(&rhs_op_peek);
200
201            rhs = production!(
202                expr(tokens, rhs, delim_tokens, rhs_op_prec),
203                parser_state!("expr", "rhs")
204            );
205        }
206
207        let span = Span::combine(lhs.span(), rhs.span());
208
209        let bin_expr = {
210            let (lhs, _) = lhs.to_data();
211            let (rhs, _) = rhs.to_data();
212
213            BinExpr {
214                op: main_op,
215                lhs: Box::new(lhs),
216                rhs: Box::new(rhs),
217            }
218        };
219
220        lhs = AstNode::new(Expr::Bin(AstNode::new(bin_expr, span.clone())), span);
221    }
222
223    Ok(lhs)
224}
225
226fn parse_primary(tokens: &mut BufferedTokenizer) -> ParseErr<AstNode<Expr>> {
227    enum PrimaryDec {
228        Ident,
229        Literal,
230        UniExpr,
231        LParen,
232        Err,
233        StructInit,
234        ArrayInit,
235        AnonFn,
236    }
237
238    match peek_token!(
239        tokens,
240        |tok| match tok {
241            Token::Plus => PrimaryDec::UniExpr,
242            Token::Minus => PrimaryDec::UniExpr,
243            Token::Invert => PrimaryDec::UniExpr,
244
245            Token::IntLiteral(_) => PrimaryDec::Literal,
246            Token::FloatLiteral(_) => PrimaryDec::Literal,
247            Token::BoolLiteral(_) => PrimaryDec::Literal,
248            Token::StringLiteral(_) => PrimaryDec::Literal,
249
250            Token::LParen => PrimaryDec::LParen,
251
252            Token::Identifier(_) => PrimaryDec::Ident,
253
254            Token::Init => PrimaryDec::StructInit,
255            Token::LBracket => PrimaryDec::ArrayInit,
256
257            Token::Fn => PrimaryDec::AnonFn,
258
259            _ => PrimaryDec::Err,
260        },
261        parser_state!("parse-primary", "kind")
262    ) {
263        PrimaryDec::Ident => Ok(production!(
264            parse_ident_leaf(tokens),
265            parser_state!("parse-primary", "ident-leaf")
266        )),
267
268        PrimaryDec::UniExpr => {
269            let (uspan, uop) =
270                consume_token!(tokens, parser_state!("uni-expr", "uni-op"));
271
272            let uop = match uop {
273                Token::Plus => {
274                    return Ok(production!(
275                        parse_primary(tokens),
276                        parser_state!("uni-expr", "primary")
277                    ));
278                }
279
280                Token::Minus => UniOp::Negate,
281
282                Token::Invert => UniOp::LogicalInvert,
283
284                _ => unreachable!(),
285            };
286
287            let base = production!(
288                parse_primary(tokens),
289                parser_state!("uni-expr", "primary")
290            );
291            let (base, sbase) = base.to_data();
292
293            let span = Span::combine(uspan, sbase);
294
295            let uexpr = UniExpr {
296                op: uop,
297                expr: Box::new(base),
298            };
299
300            Ok(AstNode::new(Expr::Uni(AstNode::new(uexpr, span.clone())), span))
301        }
302
303        PrimaryDec::Literal => {
304            let (next_span, next) = tokens
305                .next()
306                .unwrap()
307                .map_err(|e| parser_error!(e.into(), parser_state!("literal")))?
308                .to_data();
309
310            let literal = match next {
311                Token::IntLiteral(i) => Literal::Int(i),
312                Token::FloatLiteral(f) => Literal::Float(f),
313                Token::BoolLiteral(b) => Literal::Bool(b),
314                Token::StringLiteral(s) => Literal::String(s),
315
316                _ => unreachable!(),
317            };
318
319            let span = next_span;
320
321            Ok(AstNode::new(
322                Expr::Literal(AstNode::new(literal, span.clone())),
323                span,
324            ))
325        }
326
327        PrimaryDec::LParen => {
328            let (lspan, _) = consume_token!(
329                tokens,
330                Token::LParen,
331                parser_state!("paren-expr", "lparen")
332            );
333
334            let inner = production!(
335                piped_expr(tokens, &[Delimiter::RParen]),
336                parser_state!("paren-expr", "inner-expr")
337            );
338
339            let (rspan, _) = consume_token!(
340                tokens,
341                Token::RParen,
342                parser_state!("paren-expr", "rparen")
343            );
344
345            let span = LocationSpan::combine(lspan, rspan);
346            let _span = span;
347
348            Ok(inner)
349        }
350
351        PrimaryDec::StructInit => Ok(production!(
352            struct_init(tokens),
353            parser_state!("primary", "struct-init")
354        )),
355
356        PrimaryDec::ArrayInit => Ok(production!(
357            array_init(tokens),
358            parser_state!("primary", "array-init")
359        )),
360
361        PrimaryDec::AnonFn => Ok(production!(
362            anonymous_fn(tokens),
363            parser_state!("primary", "anonymous-fn")
364        )),
365
366        PrimaryDec::Err => unimplemented!(),
367    }
368}
369
370fn parse_ident_leaf(tokens: &mut BufferedTokenizer) -> ParseErr<AstNode<Expr>> {
371    enum IdentLeafDec {
372        AccessPath,
373        ModulePath,
374        Singleton,
375        FnCallOrTypeArgFnCall,
376        Indexing,
377    }
378
379    let (base_span, base_ident) = consume_token!(tokens,
380                                                 Token::Identifier(ident) => Ident(ident),
381                                                 parser_state!("identifier-leaf", "root"));
382
383    match peek_token!(
384        tokens,
385        |tok| match tok {
386            Token::Dot => IdentLeafDec::AccessPath,
387            Token::ColonColon => IdentLeafDec::ModulePath,
388            Token::LParen => IdentLeafDec::FnCallOrTypeArgFnCall,
389            Token::LBracket => IdentLeafDec::Indexing,
390            _ => IdentLeafDec::Singleton,
391        },
392        parser_state!("ident-leaf", "leaf-kind")
393    ) {
394        IdentLeafDec::AccessPath => {
395            let span = base_span;
396            let root = PathSegment::Ident(AstNode::new(base_ident, span));
397            Ok(production!(
398                access_path(tokens, root),
399                parser_state!("ident-leaf", "access-path")
400            ))
401        }
402        IdentLeafDec::ModulePath => Ok(production!(
403            expr_module_path(tokens, base_ident, base_span),
404            parser_state!("ident-leaf", "expr-module-path")
405        )),
406
407        IdentLeafDec::FnCallOrTypeArgFnCall => {
408            let (lspan, _lparen) = consume_token!(
409                tokens,
410                Token::LParen,
411                parser_state!("fn-call", "lparen")
412            );
413
414            let type_args = if peek_token!(
415                tokens,
416                |tok| match tok {
417                    Token::Type => true,
418                    _ => false,
419                },
420                parser_state!("fn-call", "type-args?")
421            ) {
422                Some(production!(
423                    type_arg_list_post_lparen(tokens),
424                    parser_state!("fn-call", "type-args")
425                ))
426            } else {
427                None
428            };
429
430            // Check if fn call with type args or just a type instantiation on a function
431            if type_args.is_none()
432                || peek_token!(
433                    tokens,
434                    |tok| match tok {
435                        Token::LParen => true,
436                        _ => false,
437                    },
438                    parser_state!("potential-fn-call", "arg-lparen")
439                )
440            {
441                let args = if type_args.is_none() {
442                    production!(
443                        fn_args_post_lparen(tokens, lspan),
444                        parser_state!("fn-call", "fn-args")
445                    )
446                } else {
447                    production!(
448                        fn_args(tokens),
449                        parser_state!("fn-call", "fn-args")
450                    )
451                };
452
453                let (args, arg_span) = args.to_data();
454                let args = args
455                    .map(|v| v.into_iter().map(|a| a.to_data().0).collect());
456
457                let fn_path = ModulePath(vec![AstNode::new(
458                    base_ident,
459                    base_span.clone(),
460                )]);
461                let fn_path = match type_args {
462                    Some(args) => TypedPath::Parameterized(fn_path, args),
463                    None => TypedPath::NillArity(fn_path),
464                };
465
466                let fn_call = FnCall {
467                    path: AstNode::new(fn_path, base_span.clone()),
468                    args: args,
469                };
470
471                let span = Span::combine(base_span, arg_span);
472                Ok(AstNode::new(
473                    Expr::FnCall(AstNode::new(fn_call, span.clone())),
474                    span,
475                ))
476            } else {
477                // Definitely a type instantiation
478                let path = ModulePath(vec![AstNode::new(
479                    base_ident,
480                    base_span.clone(),
481                )]);
482                let path = TypedPath::Parameterized(path, type_args.unwrap());
483
484                Ok(AstNode::new(
485                    Expr::Path(AstNode::new(path, base_span.clone())),
486                    base_span,
487                ))
488            }
489        }
490
491        IdentLeafDec::Indexing => {
492            let _lbracket = consume_token!(
493                tokens,
494                Token::LBracket,
495                parser_state!("indexing-expr", "lbracket")
496            );
497            let indexer = production!(
498                piped_expr(tokens, &[Delimiter::RBracket]),
499                parser_state!("indexing-expr", "indexer")
500            );
501            let (indexer, _) = indexer.to_data();
502            let (rspan, _rbracket) = consume_token!(
503                tokens,
504                Token::RBracket,
505                parser_state!("indexing-expr", "rbracket")
506            );
507
508            if peek_token!(
509                tokens,
510                |tok| match tok {
511                    Token::Dot => true,
512                    _ => false,
513                },
514                parser_state!("indexing-expr", "access-path?")
515            ) {
516                // Access path with indexing as root
517                let span = base_span;
518                let root = PathSegment::Indexing(
519                    AstNode::new(base_ident, span),
520                    Box::new(indexer),
521                );
522                Ok(production!(
523                    access_path(tokens, root),
524                    parser_state!("access-path")
525                ))
526            } else {
527                // Single indexing
528                let binding =
529                    Expr::Binding(AstNode::new(base_ident, base_span.clone()));
530                let indexing = Indexing {
531                    array: Box::new(binding),
532                    indexer: Box::new(indexer),
533                };
534
535                let span = Span::combine(base_span, rspan);
536                Ok(AstNode::new(
537                    Expr::Indexing(AstNode::new(indexing, span.clone())),
538                    span,
539                ))
540            }
541        }
542        IdentLeafDec::Singleton => {
543            let span = base_span;
544            Ok(AstNode::new(
545                Expr::Binding(AstNode::new(base_ident, span.clone())),
546                span,
547            ))
548        }
549    }
550}
551
552pub fn access_path(
553    tokens: &mut BufferedTokenizer,
554    root: PathSegment,
555) -> ParseErr<AstNode<Expr>> {
556    let start = match root {
557        PathSegment::Ident(ref i) => i.span(),
558        PathSegment::Indexing(ref i, _) => i.span(),
559    };
560
561    let mut end = start.clone();
562    let mut path = vec![root];
563    while tokens.has_next()
564        && peek_token!(
565            tokens,
566            |tok| match tok {
567                Token::Dot => true,
568                _ => false,
569            },
570            parser_state!("access-path", "dot?")
571        )
572    {
573        let _dot = consume_token!(
574            tokens,
575            Token::Dot,
576            parser_state!("access-path", "dot")
577        );
578        let path_segment = production!(
579            path_segment(tokens),
580            parser_state!("access-path", "path-segment")
581        );
582
583        end = match path_segment {
584            PathSegment::Ident(ref i) => i.span(),
585            PathSegment::Indexing(ref i, _) => i.span(),
586        };
587        path.push(path_segment);
588    }
589
590    let span = Span::combine(start, end);
591
592    Ok(AstNode::new(
593        Expr::FieldAccess(AstNode::new(Path(path), span.clone())),
594        span,
595    ))
596}
597
598// At end of path_segment, next token should be DOT or end of path
599fn path_segment(tokens: &mut BufferedTokenizer) -> ParseErr<PathSegment> {
600    enum SegmentDec {
601        Dot,
602        Indexing,
603        End,
604    }
605
606    let (ispan, ident) = consume_token!(tokens, 
607                                        Token::Identifier(i) => Ident(i),
608                                        parser_state!("path-segment", "name"));
609
610    match peek_token!(
611        tokens,
612        |tok| match tok {
613            Token::Dot => SegmentDec::Dot,
614            Token::LBracket => SegmentDec::Indexing,
615            _ => SegmentDec::End,
616        },
617        parser_state!("path-segment", "dot,lbracket?")
618    ) {
619        SegmentDec::Dot => (),
620        SegmentDec::End => (),
621
622        SegmentDec::Indexing => {
623            // TODO: Convert path indexing segment to use Expr, Expr form instead of Ident form
624            // TODO: Allow multiple indexing
625
626            let _lbracket = consume_token!(
627                tokens,
628                Token::LBracket,
629                parser_state!("path-segment-indexing", "lbracket")
630            );
631
632            let indexer = production!(
633                piped_expr(tokens, &[Delimiter::RBracket]),
634                parser_state!("path-segment-indexing", "indexer")
635            );
636            let (indexer, _) = indexer.to_data();
637
638            let _rbracket = consume_token!(
639                tokens,
640                Token::RBracket,
641                parser_state!("path-segment-indexing", "rbracket")
642            );
643
644            return Ok(PathSegment::Indexing(
645                AstNode::new(ident, ispan),
646                Box::new(indexer),
647            ));
648        }
649    }
650
651    let span = ispan;
652    Ok(PathSegment::Ident(AstNode::new(ident, span)))
653}
654
655pub fn fn_args(
656    tokens: &mut BufferedTokenizer,
657) -> ParseErr<AstNode<Option<Vec<AstNode<Expr>>>>> {
658    let (lspan, _) = consume_token!(
659        tokens,
660        Token::LParen,
661        parser_state!("fn-args", "lparen")
662    );
663
664    fn_args_post_lparen(tokens, lspan)
665}
666
667pub fn fn_args_post_lparen(
668    tokens: &mut BufferedTokenizer,
669    lspan: Span,
670) -> ParseErr<AstNode<Option<Vec<AstNode<Expr>>>>> {
671    let mut args: Option<Vec<AstNode<Expr>>> = None;
672
673    while peek_token!(
674        tokens,
675        |tok| match tok {
676            Token::RParen => false,
677
678            _ => true,
679        },
680        parser_state!("fn-args", "rparen?")
681    ) {
682        let arg = production!(
683            piped_expr(tokens, &[Delimiter::RParen, Delimiter::Comma]),
684            parser_state!("fn-args", "value")
685        );
686
687        match args {
688            Some(mut a) => {
689                a.push(arg);
690                args = Some(a);
691            }
692            None => args = Some(vec![arg]),
693        }
694
695        if peek_token!(
696            tokens,
697            |tok| match tok {
698                Token::Comma => true,
699                _ => false,
700            },
701            parser_state!("fn-args", "comma separator?")
702        ) {
703            let _comma = consume_token!(
704                tokens,
705                Token::Comma,
706                parser_state!("fn-args", "comma separator")
707            );
708        }
709    }
710
711    let (rspan, _) = consume_token!(
712        tokens,
713        Token::RParen,
714        parser_state!("fn-args", "rparen")
715    );
716
717    let span = LocationSpan::combine(lspan, rspan);
718
719    Ok(AstNode::new(args, span))
720}
721
722pub fn expr_module_path(
723    tokens: &mut BufferedTokenizer,
724    base: Ident,
725    base_span: LocationSpan,
726) -> ParseErr<AstNode<Expr>> {
727    // Assume there at least 1 '::'
728    let root = AstNode::new(base, base_span.clone());
729    let mut path = vec![root];
730    let mut end = base_span.clone();
731
732    while tokens.has_next()
733        && peek_token!(
734            tokens,
735            |tok| match tok {
736                Token::ColonColon => true,
737                _ => false,
738            },
739            parser_state!("expr-module-segment", "coloncolon?")
740        )
741    {
742        let (_cspan, _) = consume_token!(
743            tokens,
744            Token::ColonColon,
745            parser_state!("expr-module-segment", "coloncolon")
746        );
747        let (ispan, ident) = consume_token!(tokens, 
748                                            Token::Identifier(i) => Ident(i),
749                                            parser_state!("expr-module-segment", "name"));
750
751        let span = ispan;
752        end = span; // Widen path span to end of current ident
753
754        path.push(AstNode::new(ident, end.clone()));
755    }
756
757    // End of module path
758    // Check if FN call or type application
759    if tokens.has_next()
760        && peek_token!(
761            tokens,
762            |tok| match tok {
763                Token::LParen => true,
764                _ => false,
765            },
766            parser_state!("expr-module-path", "fn-call?")
767        )
768    {
769        let (lspan, _) = consume_token!(
770            tokens,
771            Token::LParen,
772            parser_state!("expr-fn-call-or-type-app", "lparen")
773        );
774
775        let (path, args, args_span) = if peek_token!(
776            tokens,
777            |tok| match tok {
778                Token::Type => true,
779                _ => false,
780            },
781            parser_state!("expr-fn-call-or-type-app", "type?")
782        ) {
783            let type_args = production!(
784                type_arg_list_post_lparen(tokens),
785                parser_state!("expr-module-path", "type-args")
786            );
787
788            let typed_path =
789                TypedPath::Parameterized(ModulePath(path), type_args);
790
791            // Checks if function call or just a typed path
792            if peek_token!(
793                tokens,
794                |tok| match tok {
795                    Token::LParen => false,
796
797                    _ => true,
798                },
799                parser_state!("expr-fn-call-or-type-app?", "fn-call-lparen")
800            ) {
801                // A type-app, NOT a function call
802                let path_span = Span::combine(base_span, end);
803                let path_expr = Expr::Path(AstNode::new(typed_path, path_span.clone()));
804                return Ok(AstNode::new(path_expr, path_span));
805            }
806
807            let (args, args_span) = production!(
808                fn_args(tokens),
809                parser_state!("expr-module-path", "fn-call")
810            )
811            .to_data();
812
813            (typed_path, args, args_span)
814        } else {
815            let (args, args_span) = production!(
816                fn_args_post_lparen(tokens, lspan),
817                parser_state!("expr-module-path", "fn-call")
818            )
819            .to_data();
820
821            (TypedPath::NillArity(ModulePath(path)), args, args_span)
822        };
823
824        let start = base_span;
825
826        let span = Span::combine(start.clone(), args_span);
827
828        let fn_call = FnCall {
829            path: AstNode::new(path, Span::combine(start.clone(), end)),
830            args: args.map(|v| {
831                v.into_iter().map(|e| e.to_data().0).collect::<Vec<_>>()
832            }),
833        };
834
835        // TODO: FnCall chain check
836
837        Ok(AstNode::new(
838            Expr::FnCall(AstNode::new(fn_call, span.clone())),
839            span,
840        ))
841    } else {
842        let span = LocationSpan::combine(base_span, end);
843
844        let mod_access = ModulePath(path);
845        let path = AstNode::new(TypedPath::NillArity(mod_access), span.clone());
846        Ok(AstNode::new(Expr::Path(path), span))
847    }
848}
849
850fn struct_init(tokens: &mut BufferedTokenizer) -> ParseErr<AstNode<Expr>> {
851    let (linit, _) = consume_token!(
852        tokens,
853        Token::Init,
854        parser_state!("struct-init", "init")
855    );
856
857    let (path, type_args) = if peek_token!(
858        tokens,
859        |tok| match tok {
860            Token::LBrace => false,
861
862            _ => true,
863        },
864        parser_state!("struct-init", "anonymous?")
865    ) {
866        // Named struct init
867        let (path, _) = production!(
868            full_module_binding(tokens),
869            parser_state!("struct init", "struct-type")
870        )
871        .to_data();
872
873        let type_args = if peek_token!(
874            tokens,
875            |tok| match tok {
876                Token::LParen => true,
877                _ => false,
878            },
879            parser_state!("struct-init", "type-app?")
880        ) {
881            Some(production!(
882                type_arg_list(tokens),
883                parser_state!("struct-init", "type-app")
884            ))
885        } else {
886            None
887        };
888
889        (Some(path), type_args)
890    } else {
891        // Anonymous struct init
892        (None, None)
893    };
894
895    let _lbrace = consume_token!(
896        tokens,
897        Token::LBrace,
898        parser_state!("struct-init", "lbrace")
899    );
900
901    let mut init = Vec::new();
902    if peek_token!(
903        tokens,
904        |tok| match tok {
905            Token::RBrace => false,
906            _ => true,
907        },
908        parser_state!("struct-init", "rbrace?")
909    ) {
910        init = production!(
911            struct_field_init_list(tokens),
912            parser_state!("struct init", "field-init-list")
913        );
914    }
915
916    let (lroc, _rbrace) = consume_token!(
917        tokens,
918        Token::RBrace,
919        parser_state!("struct-init", "rbrace")
920    );
921
922    let span = LocationSpan::combine(linit, lroc);
923
924    if let Some(path) = path {
925        // Named struct init
926        let struct_path = match type_args {
927            Some(args) => TypedPath::Parameterized(path, args),
928
929            None => TypedPath::NillArity(path),
930        };
931
932        let struct_init = StructInit {
933            struct_name: struct_path,
934            field_init: init,
935        };
936
937        let struct_init = AstNode::new(struct_init, span.clone());
938
939        Ok(AstNode::new(Expr::StructInit(struct_init), span))
940    } else {
941        // Anonymous struct init
942        let struct_init = AnonStructInit { field_init: init };
943
944        let struct_init = AstNode::new(struct_init, span.clone());
945        Ok(AstNode::new(Expr::AnonStructInit(struct_init), span))
946    }
947}
948
949fn struct_field_init_list(
950    tokens: &mut BufferedTokenizer,
951) -> ParseErr<Vec<(AstNode<Ident>, Box<Expr>)>> {
952    let mut list = vec![production!(
953        struct_field_init(tokens),
954        parser_state!("struct-field-init-list", "field-init")
955    )];
956
957    loop {
958        if peek_token!(
959            tokens,
960            |tok| match tok {
961                Token::Comma => true,
962                _ => false,
963            },
964            parser_state!("struct-field-init-list", "comma separator?")
965        ) {
966            let _comma = consume_token!(
967                tokens,
968                Token::Comma,
969                parser_state!("struct-field-init-list", "comma separator")
970            );
971            if peek_token!(
972                tokens,
973                |tok| match tok {
974                    Token::RBrace => false,
975                    _ => true,
976                },
977                parser_state!("struct-field-init-list", "rbrace?")
978            ) {
979                list.push(production!(
980                    struct_field_init(tokens),
981                    parser_state!("struct-field-init-list", "field-init")
982                ));
983                continue;
984            }
985        }
986
987        break;
988    }
989
990    Ok(list)
991}
992
993fn struct_field_init(
994    tokens: &mut BufferedTokenizer,
995) -> ParseErr<(AstNode<Ident>, Box<Expr>)> {
996    let (iloc, ident) = consume_token!(tokens, 
997                                       Token::Identifier(i) => Ident(i),
998                                       parser_state!("struct-field-init", "field name"));
999
1000    let _colon = consume_token!(
1001        tokens,
1002        Token::Colon,
1003        parser_state!("struct-field-init", "type colon")
1004    );
1005
1006    let field_init = production!(
1007        parse_primary(tokens),
1008        parser_state!("struct-field-init", "primary")
1009    );
1010    let (expr, _) = production!(
1011        expr(
1012            tokens,
1013            field_init,
1014            &[Delimiter::Comma, Delimiter::RParen],
1015            0
1016        ),
1017        parser_state!("struct-field-init", "expr")
1018    )
1019    .to_data();
1020
1021    Ok((AstNode::new(ident, iloc), Box::new(expr)))
1022}
1023
1024fn array_init(tokens: &mut BufferedTokenizer) -> ParseErr<AstNode<Expr>> {
1025    enum InitDec {
1026        SingleList,
1027        List,
1028        Value,
1029        Err,
1030    }
1031
1032    let (lloc, _) = consume_token!(
1033        tokens,
1034        Token::LBracket,
1035        parser_state!("array-init", "lbracket")
1036    );
1037
1038    let base_expr = production!(
1039        parse_primary(tokens),
1040        parser_state!("array-init", "base-primary")
1041    );
1042    let (base_expr, _) = production!(
1043        expr(
1044            tokens,
1045            base_expr,
1046            &[Delimiter::Comma, Delimiter::RBracket],
1047            0
1048        ),
1049        parser_state!("array-init", "base-expr")
1050    )
1051    .to_data();
1052
1053    let init = {
1054        match peek_token!(
1055            tokens,
1056            |tok| match tok {
1057                Token::Comma => InitDec::List,
1058                Token::Semi => InitDec::Value,
1059                Token::RBracket => InitDec::SingleList,
1060                _ => InitDec::Err,
1061            },
1062            parser_state!("array-init", "init kind?")
1063        ) {
1064            InitDec::SingleList => ArrayInit::InitList(vec![base_expr]),
1065
1066            InitDec::List => {
1067                let mut list = production!(
1068                    array_init_list(tokens),
1069                    parser_state!("array-init", "init-list")
1070                );
1071
1072                list.insert(0, base_expr);
1073
1074                ArrayInit::InitList(list)
1075            }
1076
1077            InitDec::Value => {
1078                let _semi = consume_token!(
1079                    tokens,
1080                    Token::Semi,
1081                    parser_state!("uniform-array-init", "semicolon")
1082                );
1083                let (_, number) = consume_token!(tokens, 
1084                                                 Token::IntLiteral(i) => i,
1085                                                 parser_state!("uniform-array-init", "size"));
1086
1087                if number <= 0 {
1088                    unimplemented!("Invalid array size: {}", number);
1089                }
1090
1091                ArrayInit::Value(Box::new(base_expr), number as u64)
1092            }
1093
1094            InitDec::Err => unimplemented!("Unexpected token"),
1095        }
1096    };
1097
1098    let (rloc, _) = consume_token!(
1099        tokens,
1100        Token::RBracket,
1101        parser_state!("array-init", "rbracket")
1102    );
1103
1104    let span = LocationSpan::combine(lloc, rloc);
1105
1106    let array_init = AstNode::new(init, span.clone());
1107
1108    Ok(AstNode::new(Expr::ArrayInit(array_init), span))
1109}
1110
1111fn array_init_list(tokens: &mut BufferedTokenizer) -> ParseErr<Vec<Expr>> {
1112    // First element already consumed, check for rest of list
1113    let mut list = Vec::new();
1114
1115    loop {
1116        if peek_token!(
1117            tokens,
1118            |tok| match tok {
1119                Token::Comma => true,
1120                _ => false,
1121            },
1122            parser_state!("array-init-list", "comma separator?")
1123        ) {
1124            let _comma = consume_token!(
1125                tokens,
1126                Token::Comma,
1127                parser_state!("array-init-list", "comma separator")
1128            );
1129            if peek_token!(
1130                tokens,
1131                |tok| match tok {
1132                    Token::RBracket => false,
1133                    _ => true,
1134                },
1135                parser_state!("array-init-list", "rbracket")
1136            ) {
1137                let data = production!(
1138                    parse_primary(tokens),
1139                    parser_state!("array-init-list", "item-primary")
1140                );
1141
1142                let expr = production!(
1143                    expr(
1144                        tokens,
1145                        data,
1146                        &[Delimiter::Comma, Delimiter::RBracket],
1147                        0
1148                    ),
1149                    parser_state!("array-init-list", "item-expr")
1150                )
1151                .to_data();
1152
1153                list.push(expr.0);
1154                continue;
1155            }
1156        }
1157
1158        break;
1159    }
1160
1161    Ok(list)
1162}
1163
1164fn anonymous_fn(tokens: &mut BufferedTokenizer) -> ParseErr<AstNode<Expr>> {
1165    let (fnloc, _) =
1166        consume_token!(tokens, Token::Fn, parser_state!("anonymous-fn", "fn"));
1167
1168    let _lparen = consume_token!(
1169        tokens,
1170        Token::LParen,
1171        parser_state!("anonymous-fn", "param lparen")
1172    );
1173
1174    let params = if peek_token!(
1175        tokens,
1176        |tok| match tok {
1177            Token::RParen => false,
1178            _ => true,
1179        },
1180        parser_state!("anonymous-fn", "param rparen?")
1181    ) {
1182        Some(production!(
1183            fn_param_list(tokens),
1184            parser_state!("anonymous-fn", "fn-parameters")
1185        ))
1186    } else {
1187        None
1188    };
1189
1190    let (_rloc, _) = consume_token!(
1191        tokens,
1192        Token::RParen,
1193        parser_state!("anonymous-fn", "param rparen")
1194    );
1195
1196    let mut return_type = None;
1197    if peek_token!(
1198        tokens,
1199        |tok| match tok {
1200            Token::Arrow => true,
1201            _ => false,
1202        },
1203        parser_state!("anonymous-fn", "return type arrow?")
1204    ) {
1205        let _arrow = consume_token!(
1206            tokens,
1207            Token::Arrow,
1208            parser_state!("anonymous-fn", "return type arrow")
1209        );
1210        return_type = Some(production!(
1211            type_annotation(tokens),
1212            parser_state!("anonymous-fn", "return type")
1213        ));
1214    }
1215
1216    let body =
1217        production!(block(tokens), parser_state!("anonymous-fn", "body"));
1218
1219    let span = Span::combine(fnloc, body.span());
1220
1221    let anon = AnonymousFn {
1222        params: params,
1223        return_type: return_type,
1224        body: body,
1225    };
1226
1227    let anon = AstNode::new(anon, span.clone());
1228
1229    Ok(AstNode::new(Expr::AnonymousFn(anon), span))
1230}
1231
1232fn is_delim(token: &Token, delim: &[Delimiter]) -> bool {
1233    let token = match token {
1234        Token::RParen => Delimiter::RParen,
1235        Token::RBracket => Delimiter::RBracket,
1236        Token::Comma => Delimiter::Comma,
1237        Token::Semi => Delimiter::Semi,
1238        Token::LBrace => Delimiter::LBrace,
1239        Token::Pipe => Delimiter::Pipe,
1240
1241        _ => return false,
1242    };
1243
1244    delim.contains(&token)
1245}
1246
1247fn get_op(token: &Token) -> Option<BinOp> {
1248    use self::Token::*;
1249    match token {
1250        Plus => Some(BinOp::Add),
1251        Minus => Some(BinOp::Sub),
1252        Star => Some(BinOp::Mul),
1253        Slash => Some(BinOp::Div),
1254        Percent => Some(BinOp::Mod),
1255
1256        Gte => Some(BinOp::GreaterEq),
1257        Gt => Some(BinOp::Greater),
1258        Lte => Some(BinOp::LesserEq),
1259        Lt => Some(BinOp::Lesser),
1260
1261        LAnd => Some(BinOp::LogicalAnd),
1262        LOr => Some(BinOp::LogicalOr),
1263
1264        Eq => Some(BinOp::Eq),
1265        NEq => Some(BinOp::InEq),
1266
1267        _ => None,
1268    }
1269}
1270
1271fn bin_op_precedence(op: &BinOp) -> u64 {
1272    // Precedence based off of Clang precedence table
1273    // OperatorPrecedence.h
1274    use self::BinOp::*;
1275    match op {
1276        Add => 13,
1277        Sub => 13,
1278        Mul => 14,
1279        Div => 14,
1280        Mod => 14,
1281
1282        LogicalAnd => 4,
1283        LogicalOr => 4,
1284        GreaterEq => 10,
1285        LesserEq => 10,
1286        Greater => 10,
1287        Lesser => 10,
1288        Eq => 9,
1289        InEq => 9,
1290    }
1291}
1292
1293fn is_left_associative(op: &BinOp) -> bool {
1294    use self::BinOp::*;
1295    match op {
1296        Add => true,
1297        Sub => true,
1298        Mul => true,
1299        Div => true,
1300        Mod => true,
1301
1302        LogicalAnd => true,
1303        LogicalOr => true,
1304        GreaterEq => true,
1305        LesserEq => true,
1306        Greater => true,
1307        Lesser => true,
1308        Eq => true,
1309        InEq => true,
1310    }
1311}