sxd_xpath_visitor/
parser.rs

1use snafu::{ensure, OptionExt, ResultExt, Snafu};
2use std::iter::Peekable;
3
4use crate::axis::{Axis, AxisLike, PrincipalNodeType};
5use crate::expression::{self, SubExpression};
6use crate::node_test::{self, SubNodeTest};
7use crate::token::{AxisName, NodeTestName, Token};
8use crate::tokenizer::{self, TokenResult};
9use crate::Value;
10
11#[allow(missing_copy_implementations)]
12pub struct Parser;
13
14impl Parser {
15    pub fn new() -> Parser {
16        Parser
17    }
18}
19
20#[derive(Debug, Snafu, Clone, PartialEq)]
21#[cfg_attr(test, snafu(visibility(pub(crate))))]
22pub enum Error {
23    /// XPath was empty
24    NoXPath,
25    /// empty predicate
26    EmptyPredicate,
27    /// extra unparsed tokens
28    ExtraUnparsedTokens,
29    /// ran out of input
30    RanOutOfInput,
31    /// right hand side of expression is missing
32    RightHandSideExpressionMissing,
33    /// function argument is missing
34    ArgumentMissing,
35    #[snafu(display("tokenizer error: {}", source))]
36    Tokenizer { source: tokenizer::Error },
37    /// trailing slash
38    TrailingSlash,
39    #[snafu(display("unexpected token: {:?}", token))]
40    UnexpectedToken { token: Token },
41}
42
43pub type ParseResult<T = Option<SubExpression>> = Result<T, Error>;
44
45type BinaryExpressionBuilder = fn(SubExpression, SubExpression) -> SubExpression;
46
47struct BinaryRule {
48    token: Token,
49    builder: BinaryExpressionBuilder,
50}
51
52struct LeftAssociativeBinaryParser {
53    rules: Vec<BinaryRule>,
54}
55
56type TokenSource<'a, I> = &'a mut Peekable<I>;
57
58trait XCompat {
59    fn has_more_tokens(&mut self) -> bool;
60    fn next_token_is(&mut self, token: &Token) -> bool;
61    fn consume(&mut self, token: &Token) -> Result<(), Error>;
62}
63
64impl<I> XCompat for Peekable<I>
65where
66    I: Iterator<Item = TokenResult>,
67{
68    fn has_more_tokens(&mut self) -> bool {
69        self.peek().is_some()
70    }
71
72    fn next_token_is(&mut self, token: &Token) -> bool {
73        match self.peek() {
74            Some(&Ok(ref t)) => t == token,
75            _ => false,
76        }
77    }
78
79    fn consume(&mut self, token: &Token) -> Result<(), Error> {
80        let x = self.next().context(RanOutOfInput)?.context(Tokenizer)?;
81        ensure!(&x == token, UnexpectedToken { token: x });
82        Ok(())
83    }
84}
85
86/// Similar to `consume`, but can be used when the token carries a
87/// single value.
88macro_rules! consume_value(
89    ($source:expr, Token::$token:ident) => ({
90        let next = $source.next().context(RanOutOfInput)?.context(Tokenizer)?;
91
92        match next {
93            Token::$token(x) => x,
94            token => return UnexpectedToken { token }.fail(),
95        }
96    });
97);
98
99/// Similar to `next_token_is`, but can be used when the token carries
100/// a single value
101macro_rules! next_token_is(
102    ($source:expr, Token::$token:ident) => (
103        match $source.peek() {
104            Some(&Ok(Token::$token(_))) => true,
105            _ => false,
106        }
107    );
108);
109
110impl LeftAssociativeBinaryParser {
111    fn new(rules: Vec<BinaryRule>) -> LeftAssociativeBinaryParser {
112        LeftAssociativeBinaryParser { rules }
113    }
114
115    fn parse<F, I>(&self, source: TokenSource<'_, I>, child_parse: F) -> ParseResult
116    where
117        F: Fn(TokenSource<'_, I>) -> ParseResult,
118        I: Iterator<Item = TokenResult>,
119    {
120        let left = child_parse(source)?;
121
122        let mut left = match left {
123            None => return Ok(None),
124            Some(x) => x,
125        };
126
127        while source.has_more_tokens() {
128            let mut found = false;
129
130            for rule in &self.rules {
131                if source.next_token_is(&rule.token) {
132                    source.consume(&rule.token)?;
133
134                    let right = child_parse(source)?.context(RightHandSideExpressionMissing)?;
135
136                    left = (rule.builder)(left, right);
137
138                    found = true;
139                    break;
140                }
141            }
142
143            if !found {
144                break;
145            }
146        }
147
148        Ok(Some(left))
149    }
150}
151
152type Rule<'a, I> = dyn Fn(TokenSource<'_, I>) -> ParseResult + 'a;
153fn first_matching_rule<I>(child_parses: &[&Rule<'_, I>], source: TokenSource<'_, I>) -> ParseResult
154where
155    I: Iterator<Item = TokenResult>,
156{
157    for child_parse in child_parses.iter() {
158        let expr = (*child_parse)(source)?;
159        if expr.is_some() {
160            return Ok(expr);
161        }
162    }
163
164    Ok(None)
165}
166
167impl Parser {
168    fn parse_axis<I>(&self, source: TokenSource<'_, I>) -> Result<Axis, Error>
169    where
170        I: Iterator<Item = TokenResult>,
171    {
172        if next_token_is!(source, Token::Axis) {
173            let name = consume_value!(source, Token::Axis);
174
175            match name {
176                AxisName::Child => Ok(Axis::Child),
177                AxisName::SelfAxis => Ok(Axis::SelfAxis),
178                AxisName::Parent => Ok(Axis::Parent),
179                AxisName::Descendant => Ok(Axis::Descendant),
180                AxisName::DescendantOrSelf => Ok(Axis::DescendantOrSelf),
181                AxisName::Attribute => Ok(Axis::Attribute),
182                AxisName::Namespace => Ok(Axis::Namespace),
183                AxisName::Ancestor => Ok(Axis::Ancestor),
184                AxisName::AncestorOrSelf => Ok(Axis::AncestorOrSelf),
185                AxisName::PrecedingSibling => Ok(Axis::PrecedingSibling),
186                AxisName::FollowingSibling => Ok(Axis::FollowingSibling),
187                AxisName::Preceding => Ok(Axis::Preceding),
188                AxisName::Following => Ok(Axis::Following),
189            }
190        } else {
191            Ok(Axis::Child)
192        }
193    }
194
195    fn parse_node_test<I>(&self, source: TokenSource<'_, I>) -> Result<Option<SubNodeTest>, Error>
196    where
197        I: Iterator<Item = TokenResult>,
198    {
199        if next_token_is!(source, Token::NodeTest) {
200            let name = consume_value!(source, Token::NodeTest);
201
202            match name {
203                NodeTestName::Node => Ok(Some(Box::new(node_test::Node))),
204                NodeTestName::Text => Ok(Some(Box::new(node_test::Text))),
205                NodeTestName::Comment => Ok(Some(Box::new(node_test::Comment))),
206                NodeTestName::ProcessingInstruction(target) => Ok(Some(Box::new(
207                    node_test::ProcessingInstruction::new(target),
208                ))),
209            }
210        } else {
211            Ok(None)
212        }
213    }
214
215    fn default_node_test<I>(
216        &self,
217        source: TokenSource<'_, I>,
218        axis: Axis,
219    ) -> Result<Option<SubNodeTest>, Error>
220    where
221        I: Iterator<Item = TokenResult>,
222    {
223        if next_token_is!(source, Token::NameTest) {
224            let name = consume_value!(source, Token::NameTest);
225
226            let test: SubNodeTest = match axis.principal_node_type() {
227                PrincipalNodeType::Attribute => Box::new(node_test::Attribute::new(name)),
228                PrincipalNodeType::Element => Box::new(node_test::Element::new(name)),
229                PrincipalNodeType::Namespace => Box::new(node_test::Namespace::new(name)),
230            };
231
232            Ok(Some(test))
233        } else {
234            Ok(None)
235        }
236    }
237
238    fn parse_nested_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
239    where
240        I: Iterator<Item = TokenResult>,
241    {
242        if source.next_token_is(&Token::LeftParen) {
243            source.consume(&Token::LeftParen)?;
244            let result = self.parse_expression(source)?;
245            source.consume(&Token::RightParen)?;
246            Ok(result)
247        } else {
248            Ok(None)
249        }
250    }
251
252    fn parse_variable_reference<I>(&self, source: TokenSource<'_, I>) -> ParseResult
253    where
254        I: Iterator<Item = TokenResult>,
255    {
256        if next_token_is!(source, Token::Variable) {
257            let name = consume_value!(source, Token::Variable);
258            Ok(Some(Box::new(expression::Variable { name })))
259        } else {
260            Ok(None)
261        }
262    }
263
264    fn parse_string_literal<I>(&self, source: TokenSource<'_, I>) -> ParseResult
265    where
266        I: Iterator<Item = TokenResult>,
267    {
268        if next_token_is!(source, Token::Literal) {
269            let value = consume_value!(source, Token::Literal);
270            Ok(Some(Box::new(expression::Literal::from(Value::String(
271                value,
272            )))))
273        } else {
274            Ok(None)
275        }
276    }
277
278    fn parse_numeric_literal<I>(&self, source: TokenSource<'_, I>) -> ParseResult
279    where
280        I: Iterator<Item = TokenResult>,
281    {
282        if next_token_is!(source, Token::Number) {
283            let value = consume_value!(source, Token::Number);
284            Ok(Some(Box::new(expression::Literal::from(Value::Number(
285                value,
286            )))))
287        } else {
288            Ok(None)
289        }
290    }
291
292    fn parse_function_args_tail<I>(
293        &self,
294        source: TokenSource<'_, I>,
295        mut arguments: Vec<SubExpression>,
296    ) -> Result<Vec<SubExpression>, Error>
297    where
298        I: Iterator<Item = TokenResult>,
299    {
300        while source.next_token_is(&Token::Comma) {
301            source.consume(&Token::Comma)?;
302
303            let arg = self.parse_expression(source)?.context(ArgumentMissing)?;
304            arguments.push(arg);
305        }
306
307        Ok(arguments)
308    }
309
310    fn parse_function_args<I>(
311        &self,
312        source: TokenSource<'_, I>,
313    ) -> Result<Vec<SubExpression>, Error>
314    where
315        I: Iterator<Item = TokenResult>,
316    {
317        let mut arguments = Vec::new();
318
319        match self.parse_expression(source)? {
320            Some(arg) => arguments.push(arg),
321            None => return Ok(arguments),
322        }
323
324        self.parse_function_args_tail(source, arguments)
325    }
326
327    fn parse_function_call<I>(&self, source: TokenSource<'_, I>) -> ParseResult
328    where
329        I: Iterator<Item = TokenResult>,
330    {
331        if next_token_is!(source, Token::Function) {
332            let name = consume_value!(source, Token::Function);
333
334            source.consume(&Token::LeftParen)?;
335            let arguments = self.parse_function_args(source)?;
336            source.consume(&Token::RightParen)?;
337
338            Ok(Some(Box::new(expression::Function { name, arguments })))
339        } else {
340            Ok(None)
341        }
342    }
343
344    fn parse_primary_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
345    where
346        I: Iterator<Item = TokenResult>,
347    {
348        let rules: &[&Rule<'_, I>] = &[
349            &|src: TokenSource<'_, I>| self.parse_variable_reference(src),
350            &|src: TokenSource<'_, I>| self.parse_nested_expression(src),
351            &|src: TokenSource<'_, I>| self.parse_string_literal(src),
352            &|src: TokenSource<'_, I>| self.parse_numeric_literal(src),
353            &|src: TokenSource<'_, I>| self.parse_function_call(src),
354        ];
355
356        first_matching_rule(rules, source)
357    }
358
359    fn parse_predicate_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
360    where
361        I: Iterator<Item = TokenResult>,
362    {
363        if source.next_token_is(&Token::LeftBracket) {
364            source.consume(&Token::LeftBracket)?;
365
366            let predicate = self.parse_expression(source)?.context(EmptyPredicate)?;
367            source.consume(&Token::RightBracket)?;
368            Ok(Some(predicate))
369        } else {
370            Ok(None)
371        }
372    }
373
374    fn parse_predicates<I>(&self, source: TokenSource<'_, I>) -> Result<Vec<SubExpression>, Error>
375    where
376        I: Iterator<Item = TokenResult>,
377    {
378        let mut predicates = Vec::new();
379
380        while let Some(predicate) = self.parse_predicate_expression(source)? {
381            predicates.push(predicate)
382        }
383
384        Ok(predicates)
385    }
386
387    fn parse_step<I>(&self, source: TokenSource<'_, I>) -> Result<Option<expression::Step>, Error>
388    where
389        I: Iterator<Item = TokenResult>,
390    {
391        let axis = self.parse_axis(source)?;
392
393        let node_test = match self.parse_node_test(source)? {
394            Some(test) => Some(test),
395            None => self.default_node_test(source, axis)?,
396        };
397
398        let node_test = match node_test {
399            Some(test) => test,
400            None => return Ok(None),
401        };
402
403        let predicates = self.parse_predicates(source)?;
404
405        Ok(Some(expression::Step::new(axis, node_test, predicates)))
406    }
407
408    fn parse_relative_location_path_raw<I>(
409        &self,
410        source: TokenSource<'_, I>,
411        start_point: SubExpression,
412    ) -> ParseResult
413    where
414        I: Iterator<Item = TokenResult>,
415    {
416        match self.parse_step(source)? {
417            Some(step) => {
418                let mut steps = vec![step];
419
420                while source.next_token_is(&Token::Slash) {
421                    source.consume(&Token::Slash)?;
422
423                    let next = self.parse_step(source)?.context(TrailingSlash)?;
424                    steps.push(next);
425                }
426
427                Ok(Some(expression::Path::new(start_point, steps)))
428            }
429            None => Ok(None),
430        }
431    }
432
433    fn parse_relative_location_path<I>(&self, source: TokenSource<'_, I>) -> ParseResult
434    where
435        I: Iterator<Item = TokenResult>,
436    {
437        let start_point = Box::new(expression::ContextNode);
438        self.parse_relative_location_path_raw(source, start_point)
439    }
440
441    fn parse_absolute_location_path<I>(&self, source: TokenSource<'_, I>) -> ParseResult
442    where
443        I: Iterator<Item = TokenResult>,
444    {
445        if source.next_token_is(&Token::Slash) {
446            source.consume(&Token::Slash)?;
447
448            let start_point = Box::new(expression::RootNode);
449            match self.parse_relative_location_path_raw(source, start_point)? {
450                Some(expr) => Ok(Some(expr)),
451                None => Ok(Some(Box::new(expression::RootNode))),
452            }
453        } else {
454            Ok(None)
455        }
456    }
457
458    fn parse_location_path<I>(&self, source: TokenSource<'_, I>) -> ParseResult
459    where
460        I: Iterator<Item = TokenResult>,
461    {
462        let rules: &[&Rule<'_, I>] = &[
463            &|source: TokenSource<'_, I>| self.parse_relative_location_path(source),
464            &|source: TokenSource<'_, I>| self.parse_absolute_location_path(source),
465        ];
466
467        first_matching_rule(rules, source)
468    }
469
470    fn parse_filter_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
471    where
472        I: Iterator<Item = TokenResult>,
473    {
474        match self.parse_primary_expression(source)? {
475            Some(expr) => {
476                let predicates = self.parse_predicates(source)?;
477
478                Ok(Some(predicates.into_iter().fold(expr, |expr, pred| {
479                    expression::Filter::new(expr, pred)
480                })))
481            }
482            None => Ok(None),
483        }
484    }
485
486    fn parse_path_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
487    where
488        I: Iterator<Item = TokenResult>,
489    {
490        let expr = self.parse_location_path(source)?;
491        if expr.is_some() {
492            return Ok(expr);
493        } // TODO: investigate if this is a pattern
494
495        match self.parse_filter_expression(source)? {
496            Some(expr) => {
497                if source.next_token_is(&Token::Slash) {
498                    source.consume(&Token::Slash)?;
499
500                    let expr = self
501                        .parse_relative_location_path_raw(source, expr)?
502                        .context(TrailingSlash)?;
503                    Ok(Some(expr))
504                } else {
505                    Ok(Some(expr))
506                }
507            }
508            None => Ok(None),
509        }
510    }
511
512    fn parse_union_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
513    where
514        I: Iterator<Item = TokenResult>,
515    {
516        let rules = vec![BinaryRule {
517            token: Token::Pipe,
518            builder: expression::Union::new,
519        }];
520
521        let parser = LeftAssociativeBinaryParser::new(rules);
522        parser.parse(source, |source| self.parse_path_expression(source))
523    }
524
525    fn parse_unary_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
526    where
527        I: Iterator<Item = TokenResult>,
528    {
529        let expr = self.parse_union_expression(source)?;
530        if expr.is_some() {
531            return Ok(expr);
532        }
533
534        if source.next_token_is(&Token::MinusSign) {
535            source.consume(&Token::MinusSign)?;
536
537            let expression = self
538                .parse_unary_expression(source)?
539                .context(RightHandSideExpressionMissing)?;
540            let expression: SubExpression = Box::new(expression::Negation { expression });
541            Ok(Some(expression))
542        } else {
543            Ok(None)
544        }
545    }
546
547    fn parse_multiplicative_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
548    where
549        I: Iterator<Item = TokenResult>,
550    {
551        let rules = vec![
552            BinaryRule {
553                token: Token::Multiply,
554                builder: expression::Math::multiplication,
555            },
556            BinaryRule {
557                token: Token::Divide,
558                builder: expression::Math::division,
559            },
560            BinaryRule {
561                token: Token::Remainder,
562                builder: expression::Math::remainder,
563            },
564        ];
565
566        let parser = LeftAssociativeBinaryParser::new(rules);
567        parser.parse(source, |source| self.parse_unary_expression(source))
568    }
569
570    fn parse_additive_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
571    where
572        I: Iterator<Item = TokenResult>,
573    {
574        let rules = vec![
575            BinaryRule {
576                token: Token::PlusSign,
577                builder: expression::Math::addition,
578            },
579            BinaryRule {
580                token: Token::MinusSign,
581                builder: expression::Math::subtraction,
582            },
583        ];
584
585        let parser = LeftAssociativeBinaryParser::new(rules);
586        parser.parse(source, |source| {
587            self.parse_multiplicative_expression(source)
588        })
589    }
590
591    fn parse_relational_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
592    where
593        I: Iterator<Item = TokenResult>,
594    {
595        let rules = vec![
596            BinaryRule {
597                token: Token::LessThan,
598                builder: expression::Relational::less_than,
599            },
600            BinaryRule {
601                token: Token::LessThanOrEqual,
602                builder: expression::Relational::less_than_or_equal,
603            },
604            BinaryRule {
605                token: Token::GreaterThan,
606                builder: expression::Relational::greater_than,
607            },
608            BinaryRule {
609                token: Token::GreaterThanOrEqual,
610                builder: expression::Relational::greater_than_or_equal,
611            },
612        ];
613
614        let parser = LeftAssociativeBinaryParser::new(rules);
615        parser.parse(source, |source| self.parse_additive_expression(source))
616    }
617
618    fn parse_equality_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
619    where
620        I: Iterator<Item = TokenResult>,
621    {
622        let rules = vec![
623            BinaryRule {
624                token: Token::Equal,
625                builder: expression::Equal::new,
626            },
627            BinaryRule {
628                token: Token::NotEqual,
629                builder: expression::NotEqual::new,
630            },
631        ];
632
633        let parser = LeftAssociativeBinaryParser::new(rules);
634        parser.parse(source, |source| self.parse_relational_expression(source))
635    }
636
637    fn parse_and_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
638    where
639        I: Iterator<Item = TokenResult>,
640    {
641        let rules = vec![BinaryRule {
642            token: Token::And,
643            builder: expression::And::new,
644        }];
645
646        let parser = LeftAssociativeBinaryParser::new(rules);
647        parser.parse(source, |source| self.parse_equality_expression(source))
648    }
649
650    fn parse_or_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
651    where
652        I: Iterator<Item = TokenResult>,
653    {
654        let rules = vec![BinaryRule {
655            token: Token::Or,
656            builder: expression::Or::new,
657        }];
658
659        let parser = LeftAssociativeBinaryParser::new(rules);
660        parser.parse(source, |source| self.parse_and_expression(source))
661    }
662
663    fn parse_expression<I>(&self, source: TokenSource<'_, I>) -> ParseResult
664    where
665        I: Iterator<Item = TokenResult>,
666    {
667        self.parse_or_expression(source)
668    }
669
670    pub fn parse<I>(&self, source: I) -> ParseResult<SubExpression>
671    where
672        I: Iterator<Item = TokenResult>,
673    {
674        let mut source = source.peekable();
675
676        let expr = self.parse_or_expression(&mut source)?;
677
678        ensure!(!source.has_more_tokens(), ExtraUnparsedTokens);
679
680        let expr = expr.context(NoXPath)?;
681
682        Ok(expr)
683    }
684}
685
686#[cfg(test)]
687mod test {
688    use snafu::ResultExt;
689    use std::borrow::ToOwned;
690    use sxd_document::dom::{self, Document, Element, Root, Text};
691    use sxd_document::Package;
692
693    use crate::context::{self, Context};
694    use crate::expression::{Expression, SubExpression};
695    use crate::node_test;
696    use crate::nodeset::Node;
697    use crate::token::{AxisName, NodeTestName, Token};
698    use crate::tokenizer::{self, TokenResult};
699    use crate::Value;
700    use crate::Value::{Boolean, Number, String};
701
702    use super::*;
703
704    macro_rules! tokens(
705        ($($e:expr),*) => ({
706            // leading _ to allow empty construction without a warning.
707            let mut _temp: Vec<TokenResult> = ::std::vec::Vec::new();
708            $(_temp.push(Ok($e));)*
709            _temp
710        });
711        ($($e:expr),+,) => (tokens!($($e),+))
712    );
713
714    fn name_test(local_part: &str) -> Token {
715        Token::NameTest(node_test::NameTest {
716            prefix: None,
717            local_part: local_part.to_owned(),
718        })
719    }
720
721    trait ApproxEq {
722        fn is_approx_eq(&self, other: &Self) -> bool;
723    }
724
725    impl ApproxEq for f64 {
726        fn is_approx_eq(&self, other: &f64) -> bool {
727            (*self - *other).abs() < 1.0e-6
728        }
729    }
730
731    impl<'d> ApproxEq for Value<'d> {
732        fn is_approx_eq(&self, other: &Value<'d>) -> bool {
733            match (self, other) {
734                (&Number(ref x), &Number(ref y)) => x.is_approx_eq(y),
735                _ => panic!("It's nonsensical to compare these quantities"),
736            }
737        }
738    }
739
740    macro_rules! assert_approx_eq(
741        ($a:expr, $b:expr) => ({
742            let (a, b) = (&$a, &$b);
743            assert!(a.is_approx_eq(b),
744                    "{:?} is not approximately equal to {:?}", *a, *b);
745        })
746    );
747
748    struct TestDoc<'d>(Document<'d>);
749
750    impl<'d> TestDoc<'d> {
751        fn root(&'d self) -> Root<'d> {
752            let &TestDoc(ref doc) = self;
753            doc.root()
754        }
755
756        fn top_node(&'d self) -> Element<'d> {
757            let &TestDoc(ref doc) = self;
758
759            let kids = doc.root().children();
760            match kids.len() {
761                0 => {
762                    let n = doc.create_element("the-top-node");
763                    doc.root().append_child(n);
764                    n
765                }
766                1 => kids[0].element().expect("not an element"),
767                _ => panic!("Too many top nodes"),
768            }
769        }
770
771        fn add_top_child(&'d self, name: &str) -> Element<'d> {
772            self.add_child(self.top_node(), name)
773        }
774
775        fn add_child(&'d self, parent: Element<'d>, name: &str) -> Element<'d> {
776            let &TestDoc(ref doc) = self;
777
778            let n = doc.create_element(name);
779            parent.append_child(n);
780            n
781        }
782
783        fn add_text(&'d self, parent: Element<'d>, value: &str) -> Text<'d> {
784            let &TestDoc(ref doc) = self;
785
786            let tn = doc.create_text(value);
787            parent.append_child(tn);
788            tn
789        }
790
791        fn add_comment(&'d self, parent: Element<'d>, value: &str) -> dom::Comment<'d> {
792            let cn = self.0.create_comment(value);
793            parent.append_child(cn);
794            cn
795        }
796
797        fn add_processing_instruction(
798            &'d self,
799            parent: Element<'d>,
800            name: &str,
801            value: Option<&str>,
802        ) -> dom::ProcessingInstruction<'d> {
803            let pi = self.0.create_processing_instruction(name, value);
804            parent.append_child(pi);
805            pi
806        }
807    }
808
809    struct Exercise<'d> {
810        doc: &'d TestDoc<'d>,
811        context: Context<'d>,
812        parser: Parser,
813    }
814
815    impl<'d> Exercise<'d> {
816        fn new(doc: &'d TestDoc<'d>) -> Exercise<'d> {
817            Exercise {
818                doc,
819                context: Context::new(),
820                parser: Parser::new(),
821            }
822        }
823
824        fn parse_raw(&self, tokens: Vec<TokenResult>) -> ParseResult<SubExpression> {
825            self.parser.parse(tokens.into_iter())
826        }
827
828        fn parse(&self, tokens: Vec<TokenResult>) -> SubExpression {
829            self.parse_raw(tokens).expect("Unable to parse expression")
830        }
831
832        fn evaluate<E>(&self, expr: E) -> Value<'d>
833        where
834            E: Expression,
835        {
836            self.evaluate_on(expr, self.doc.top_node())
837        }
838
839        fn evaluate_on<E, N>(&self, expr: E, node: N) -> Value<'d>
840        where
841            E: Expression,
842            N: Into<Node<'d>>,
843        {
844            let context = context::Evaluation::new(&self.context, node.into());
845            expr.evaluate(&context)
846                .expect("Unable to evaluate expression")
847        }
848    }
849
850    #[test]
851    fn parses_string_as_child() {
852        let tokens = tokens![name_test("hello")];
853
854        let package = Package::new();
855        let doc = TestDoc(package.as_document());
856        let hello = doc.add_top_child("hello");
857
858        let ex = Exercise::new(&doc);
859        let expr = ex.parse(tokens);
860
861        assert_eq!(nodeset![hello], ex.evaluate_on(expr, doc.top_node()));
862    }
863
864    #[test]
865    fn parses_two_strings_as_grandchild() {
866        let tokens = tokens![name_test("hello"), Token::Slash, name_test("world")];
867
868        let package = Package::new();
869        let doc = TestDoc(package.as_document());
870        let hello = doc.add_top_child("hello");
871        let world = doc.add_child(hello, "world");
872
873        let ex = Exercise::new(&doc);
874        let expr = ex.parse(tokens);
875
876        assert_eq!(nodeset![world], ex.evaluate_on(expr, doc.top_node()));
877    }
878
879    #[test]
880    fn parses_self_axis() {
881        let tokens = tokens![Token::Axis(AxisName::SelfAxis), name_test("the-top-node")];
882
883        let package = Package::new();
884        let doc = TestDoc(package.as_document());
885
886        let ex = Exercise::new(&doc);
887        let expr = ex.parse(tokens);
888
889        assert_eq!(
890            nodeset![doc.top_node()],
891            ex.evaluate_on(expr, doc.top_node())
892        );
893    }
894
895    #[test]
896    fn parses_parent_axis() {
897        let tokens = tokens![Token::Axis(AxisName::Parent), name_test("the-top-node")];
898
899        let package = Package::new();
900        let doc = TestDoc(package.as_document());
901        let hello = doc.add_top_child("hello");
902
903        let ex = Exercise::new(&doc);
904        let expr = ex.parse(tokens);
905
906        assert_eq!(nodeset![doc.top_node()], ex.evaluate_on(expr, hello));
907    }
908
909    #[test]
910    fn parses_child_axis() {
911        let tokens = tokens![Token::Axis(AxisName::Child), name_test("*")];
912
913        let package = Package::new();
914        let doc = TestDoc(package.as_document());
915        let one = doc.add_top_child("one");
916        let two = doc.add_child(one, "two");
917
918        let ex = Exercise::new(&doc);
919        let expr = ex.parse(tokens);
920
921        assert_eq!(nodeset![two], ex.evaluate_on(expr, one));
922    }
923
924    #[test]
925    fn parses_descendant_axis() {
926        let tokens = tokens![Token::Axis(AxisName::Descendant), name_test("two")];
927
928        let package = Package::new();
929        let doc = TestDoc(package.as_document());
930        let one = doc.add_top_child("one");
931        let two = doc.add_child(one, "two");
932
933        let ex = Exercise::new(&doc);
934        let expr = ex.parse(tokens);
935
936        assert_eq!(nodeset![two], ex.evaluate_on(expr, doc.top_node()));
937    }
938
939    #[test]
940    fn parses_descendant_or_self_axis() {
941        let tokens = tokens![Token::Axis(AxisName::DescendantOrSelf), name_test("*")];
942
943        let package = Package::new();
944        let doc = TestDoc(package.as_document());
945        let one = doc.add_top_child("one");
946        let two = doc.add_child(one, "two");
947
948        let ex = Exercise::new(&doc);
949        let expr = ex.parse(tokens);
950
951        assert_eq!(nodeset![one, two], ex.evaluate_on(expr, one));
952    }
953
954    #[test]
955    fn parses_attribute_axis() {
956        let tokens = tokens![Token::Axis(AxisName::Attribute), name_test("*")];
957
958        let package = Package::new();
959        let doc = TestDoc(package.as_document());
960        let one = doc.add_top_child("one");
961        let attr = one.set_attribute_value("hello", "world");
962
963        let ex = Exercise::new(&doc);
964        let expr = ex.parse(tokens);
965
966        assert_eq!(nodeset![attr], ex.evaluate_on(expr, one));
967    }
968
969    #[test]
970    fn parses_namespace_axis() {
971        let tokens = tokens![Token::Axis(AxisName::Namespace), name_test("prefix")];
972
973        let package = Package::new();
974        let doc = TestDoc(package.as_document());
975        let one = doc.add_top_child("one");
976        one.register_prefix("prefix", "uri");
977
978        let ex = Exercise::new(&doc);
979        let expr = ex.parse(tokens);
980
981        match ex.evaluate_on(expr, one) {
982            Value::Nodeset(ns) => {
983                assert_eq!(1, ns.size());
984                match ns.into_iter().next() {
985                    Some(Node::Namespace(ns)) => {
986                        assert_eq!("prefix", ns.prefix());
987                        assert_eq!("uri", ns.uri());
988                    }
989                    _ => panic!("Not a namespace node"),
990                }
991            }
992            _ => panic!("Did not get the namespace node"),
993        }
994    }
995
996    #[test]
997    fn parses_child_with_same_name_as_an_axis() {
998        let tokens = tokens![name_test("self")];
999
1000        let package = Package::new();
1001        let doc = TestDoc(package.as_document());
1002        let element = doc.add_top_child("self");
1003
1004        let ex = Exercise::new(&doc);
1005        let expr = ex.parse(tokens);
1006
1007        assert_eq!(nodeset![element], ex.evaluate_on(expr, doc.top_node()));
1008    }
1009
1010    #[test]
1011    fn parses_node_node_test() {
1012        let tokens = tokens![Token::NodeTest(NodeTestName::Node)];
1013
1014        let package = Package::new();
1015        let doc = TestDoc(package.as_document());
1016        let one = doc.add_top_child("one");
1017        let two = doc.add_child(one, "two");
1018
1019        let ex = Exercise::new(&doc);
1020        let expr = ex.parse(tokens);
1021
1022        assert_eq!(nodeset![two], ex.evaluate_on(expr, one));
1023    }
1024
1025    #[test]
1026    fn parses_comment_node_test() {
1027        let tokens = tokens![Token::NodeTest(NodeTestName::Comment)];
1028
1029        let package = Package::new();
1030        let doc = TestDoc(package.as_document());
1031        let one = doc.add_top_child("one");
1032        let two = doc.add_comment(one, "two");
1033
1034        let ex = Exercise::new(&doc);
1035        let expr = ex.parse(tokens);
1036
1037        assert_eq!(nodeset![two], ex.evaluate_on(expr, one));
1038    }
1039
1040    #[test]
1041    fn parses_text_node_test() {
1042        let tokens = tokens![Token::NodeTest(NodeTestName::Text)];
1043
1044        let package = Package::new();
1045        let doc = TestDoc(package.as_document());
1046        let one = doc.add_top_child("one");
1047        let text = doc.add_text(one, "text");
1048
1049        let ex = Exercise::new(&doc);
1050        let expr = ex.parse(tokens);
1051
1052        assert_eq!(nodeset![text], ex.evaluate_on(expr, one));
1053    }
1054
1055    #[test]
1056    fn parses_processing_instruction_node_test() {
1057        let tokens = tokens![Token::NodeTest(NodeTestName::ProcessingInstruction(Some(
1058            "name".to_owned()
1059        )))];
1060
1061        let package = Package::new();
1062        let doc = TestDoc(package.as_document());
1063        let one = doc.add_top_child("one");
1064        let two = doc.add_processing_instruction(one, "name", None);
1065
1066        let ex = Exercise::new(&doc);
1067        let expr = ex.parse(tokens);
1068
1069        assert_eq!(nodeset![two], ex.evaluate_on(expr, one));
1070    }
1071
1072    #[test]
1073    fn parses_axis_and_node_test() {
1074        let tokens = tokens![
1075            Token::Axis(AxisName::SelfAxis),
1076            Token::NodeTest(NodeTestName::Text),
1077        ];
1078
1079        let package = Package::new();
1080        let doc = TestDoc(package.as_document());
1081        let one = doc.add_top_child("one");
1082        let text = doc.add_text(one, "text");
1083
1084        let ex = Exercise::new(&doc);
1085        let expr = ex.parse(tokens);
1086
1087        assert_eq!(nodeset![text], ex.evaluate_on(expr, text));
1088    }
1089
1090    #[test]
1091    fn numeric_predicate_selects_indexed_node() {
1092        let tokens = tokens![
1093            name_test("*"),
1094            Token::LeftBracket,
1095            Token::Number(2.0),
1096            Token::RightBracket
1097        ];
1098
1099        let package = Package::new();
1100        let doc = TestDoc(package.as_document());
1101        doc.add_top_child("first");
1102        let second = doc.add_top_child("second");
1103
1104        let ex = Exercise::new(&doc);
1105        let expr = ex.parse(tokens);
1106
1107        assert_eq!(nodeset![second], ex.evaluate_on(expr, doc.top_node()));
1108    }
1109
1110    #[test]
1111    fn string_literal() {
1112        let tokens = tokens![Token::Literal("string".to_owned())];
1113
1114        let package = Package::new();
1115        let doc = TestDoc(package.as_document());
1116
1117        let ex = Exercise::new(&doc);
1118        let expr = ex.parse(tokens);
1119
1120        assert_eq!(String("string".to_owned()), ex.evaluate(expr));
1121    }
1122
1123    #[test]
1124    fn predicate_accepts_any_expression() {
1125        let tokens = tokens![
1126            name_test("*"),
1127            Token::LeftBracket,
1128            Token::Function("true".into()),
1129            Token::LeftParen,
1130            Token::RightParen,
1131            Token::Or,
1132            Token::Function("false".into()),
1133            Token::LeftParen,
1134            Token::RightParen,
1135            Token::RightBracket
1136        ];
1137
1138        let package = Package::new();
1139        let doc = TestDoc(package.as_document());
1140        let first = doc.add_top_child("first");
1141        let second = doc.add_top_child("second");
1142
1143        let ex = Exercise::new(&doc);
1144        let expr = ex.parse(tokens);
1145
1146        assert_eq!(
1147            nodeset![first, second],
1148            ex.evaluate_on(expr, doc.top_node())
1149        );
1150    }
1151
1152    #[test]
1153    fn true_function_predicate_selects_all_nodes() {
1154        let tokens = tokens![
1155            name_test("*"),
1156            Token::LeftBracket,
1157            Token::Function("true".into()),
1158            Token::LeftParen,
1159            Token::RightParen,
1160            Token::RightBracket
1161        ];
1162
1163        let package = Package::new();
1164        let doc = TestDoc(package.as_document());
1165        let first = doc.add_top_child("first");
1166        let second = doc.add_top_child("second");
1167
1168        let ex = Exercise::new(&doc);
1169        let expr = ex.parse(tokens);
1170
1171        assert_eq!(
1172            nodeset![first, second],
1173            ex.evaluate_on(expr, doc.top_node())
1174        );
1175    }
1176
1177    #[test]
1178    fn false_function_predicate_selects_no_nodes() {
1179        let tokens = tokens![
1180            name_test("*"),
1181            Token::LeftBracket,
1182            Token::Function("false".into()),
1183            Token::LeftParen,
1184            Token::RightParen,
1185            Token::RightBracket
1186        ];
1187
1188        let package = Package::new();
1189        let doc = TestDoc(package.as_document());
1190        doc.add_top_child("first");
1191        doc.add_top_child("second");
1192
1193        let ex = Exercise::new(&doc);
1194        let expr = ex.parse(tokens);
1195
1196        assert_eq!(nodeset![], ex.evaluate_on(expr, doc.top_node()));
1197    }
1198
1199    #[test]
1200    fn multiple_predicates() {
1201        let tokens = tokens![
1202            name_test("*"),
1203            Token::LeftBracket,
1204            Token::Number(2.0),
1205            Token::RightBracket,
1206            Token::LeftBracket,
1207            Token::Number(1.0),
1208            Token::RightBracket
1209        ];
1210
1211        let package = Package::new();
1212        let doc = TestDoc(package.as_document());
1213        doc.add_top_child("first");
1214        let second = doc.add_top_child("second");
1215
1216        let ex = Exercise::new(&doc);
1217        let expr = ex.parse(tokens);
1218
1219        assert_eq!(nodeset![second], ex.evaluate_on(expr, doc.top_node()));
1220    }
1221
1222    #[test]
1223    fn functions_accept_arguments() {
1224        let tokens = tokens![
1225            Token::Function("not".into()),
1226            Token::LeftParen,
1227            Token::Function("true".into()),
1228            Token::LeftParen,
1229            Token::RightParen,
1230            Token::RightParen,
1231        ];
1232
1233        let package = Package::new();
1234        let doc = TestDoc(package.as_document());
1235
1236        let ex = Exercise::new(&doc);
1237        let expr = ex.parse(tokens);
1238
1239        assert_eq!(Boolean(false), ex.evaluate(expr));
1240    }
1241
1242    #[test]
1243    fn functions_accept_any_expression_as_an_argument() {
1244        let tokens = tokens![
1245            Token::Function("not".into()),
1246            Token::LeftParen,
1247            Token::Function("true".into()),
1248            Token::LeftParen,
1249            Token::RightParen,
1250            Token::Or,
1251            Token::Function("false".into()),
1252            Token::LeftParen,
1253            Token::RightParen,
1254            Token::RightParen,
1255        ];
1256
1257        let package = Package::new();
1258        let doc = TestDoc(package.as_document());
1259
1260        let ex = Exercise::new(&doc);
1261        let expr = ex.parse(tokens);
1262
1263        assert_eq!(Boolean(false), ex.evaluate(expr));
1264    }
1265
1266    #[test]
1267    fn numeric_literal() {
1268        let tokens = tokens![Token::Number(3.2)];
1269
1270        let package = Package::new();
1271        let doc = TestDoc(package.as_document());
1272
1273        let ex = Exercise::new(&doc);
1274        let expr = ex.parse(tokens);
1275
1276        assert_approx_eq!(Number(3.2), ex.evaluate(expr));
1277    }
1278
1279    #[test]
1280    fn addition_of_two_numbers() {
1281        let tokens = tokens![Token::Number(1.1), Token::PlusSign, Token::Number(2.2)];
1282
1283        let package = Package::new();
1284        let doc = TestDoc(package.as_document());
1285
1286        let ex = Exercise::new(&doc);
1287        let expr = ex.parse(tokens);
1288
1289        assert_approx_eq!(Number(3.3), ex.evaluate(expr));
1290    }
1291
1292    #[test]
1293    fn addition_of_multiple_numbers() {
1294        let tokens = tokens![
1295            Token::Number(1.1),
1296            Token::PlusSign,
1297            Token::Number(2.2),
1298            Token::PlusSign,
1299            Token::Number(3.3)
1300        ];
1301
1302        let package = Package::new();
1303        let doc = TestDoc(package.as_document());
1304
1305        let ex = Exercise::new(&doc);
1306        let expr = ex.parse(tokens);
1307
1308        assert_approx_eq!(Number(6.6), ex.evaluate(expr));
1309    }
1310
1311    #[test]
1312    fn subtraction_of_two_numbers() {
1313        let tokens = tokens![Token::Number(1.1), Token::MinusSign, Token::Number(2.2),];
1314
1315        let package = Package::new();
1316        let doc = TestDoc(package.as_document());
1317
1318        let ex = Exercise::new(&doc);
1319        let expr = ex.parse(tokens);
1320
1321        assert_approx_eq!(Number(-1.1), ex.evaluate(expr));
1322    }
1323
1324    #[test]
1325    fn additive_expression_is_left_associative() {
1326        let tokens = tokens![
1327            Token::Number(1.1),
1328            Token::MinusSign,
1329            Token::Number(2.2),
1330            Token::MinusSign,
1331            Token::Number(3.3),
1332        ];
1333
1334        let package = Package::new();
1335        let doc = TestDoc(package.as_document());
1336
1337        let ex = Exercise::new(&doc);
1338        let expr = ex.parse(tokens);
1339
1340        assert_approx_eq!(Number(-4.4), ex.evaluate(expr));
1341    }
1342
1343    #[test]
1344    fn multiplication_of_two_numbers() {
1345        let tokens = tokens![Token::Number(1.1), Token::Multiply, Token::Number(2.2),];
1346
1347        let package = Package::new();
1348        let doc = TestDoc(package.as_document());
1349
1350        let ex = Exercise::new(&doc);
1351        let expr = ex.parse(tokens);
1352
1353        assert_approx_eq!(Number(2.42), ex.evaluate(expr));
1354    }
1355
1356    #[test]
1357    fn division_of_two_numbers() {
1358        let tokens = tokens![Token::Number(7.1), Token::Divide, Token::Number(0.1),];
1359
1360        let package = Package::new();
1361        let doc = TestDoc(package.as_document());
1362
1363        let ex = Exercise::new(&doc);
1364        let expr = ex.parse(tokens);
1365
1366        assert_approx_eq!(Number(71.0), ex.evaluate(expr));
1367    }
1368
1369    #[test]
1370    fn remainder_of_two_numbers() {
1371        let tokens = tokens![Token::Number(7.1), Token::Remainder, Token::Number(3.0),];
1372
1373        let package = Package::new();
1374        let doc = TestDoc(package.as_document());
1375
1376        let ex = Exercise::new(&doc);
1377        let expr = ex.parse(tokens);
1378
1379        assert_approx_eq!(Number(1.1), ex.evaluate(expr));
1380    }
1381
1382    #[test]
1383    fn unary_negation() {
1384        let tokens = tokens![Token::MinusSign, Token::Number(7.2),];
1385
1386        let package = Package::new();
1387        let doc = TestDoc(package.as_document());
1388
1389        let ex = Exercise::new(&doc);
1390        let expr = ex.parse(tokens);
1391
1392        assert_approx_eq!(Number(-7.2), ex.evaluate(expr));
1393    }
1394
1395    #[test]
1396    fn repeated_unary_negation() {
1397        let tokens = tokens![
1398            Token::MinusSign,
1399            Token::MinusSign,
1400            Token::MinusSign,
1401            Token::Number(7.2),
1402        ];
1403
1404        let package = Package::new();
1405        let doc = TestDoc(package.as_document());
1406
1407        let ex = Exercise::new(&doc);
1408        let expr = ex.parse(tokens);
1409
1410        assert_approx_eq!(Number(-7.2), ex.evaluate(expr));
1411    }
1412
1413    #[test]
1414    fn top_level_function_call() {
1415        let tokens = tokens![
1416            Token::Function("true".into()),
1417            Token::LeftParen,
1418            Token::RightParen,
1419        ];
1420
1421        let package = Package::new();
1422        let doc = TestDoc(package.as_document());
1423
1424        let ex = Exercise::new(&doc);
1425        let expr = ex.parse(tokens);
1426
1427        assert_eq!(Boolean(true), ex.evaluate(expr));
1428    }
1429
1430    #[test]
1431    fn or_expression() {
1432        let tokens = tokens![
1433            Token::Function("true".into()),
1434            Token::LeftParen,
1435            Token::RightParen,
1436            Token::Or,
1437            Token::Function("false".into()),
1438            Token::LeftParen,
1439            Token::RightParen,
1440        ];
1441
1442        let package = Package::new();
1443        let doc = TestDoc(package.as_document());
1444
1445        let ex = Exercise::new(&doc);
1446        let expr = ex.parse(tokens);
1447
1448        assert_eq!(Boolean(true), ex.evaluate(expr));
1449    }
1450
1451    #[test]
1452    fn and_expression() {
1453        let tokens = tokens![Token::Number(1.2), Token::And, Token::Number(0.0),];
1454
1455        let package = Package::new();
1456        let doc = TestDoc(package.as_document());
1457
1458        let ex = Exercise::new(&doc);
1459        let expr = ex.parse(tokens);
1460
1461        assert_eq!(Boolean(false), ex.evaluate(expr));
1462    }
1463
1464    #[test]
1465    fn equality_expression() {
1466        let tokens = tokens![Token::Number(1.2), Token::Equal, Token::Number(1.1),];
1467
1468        let package = Package::new();
1469        let doc = TestDoc(package.as_document());
1470
1471        let ex = Exercise::new(&doc);
1472        let expr = ex.parse(tokens);
1473
1474        assert_eq!(Boolean(false), ex.evaluate(expr));
1475    }
1476
1477    #[test]
1478    fn inequality_expression() {
1479        let tokens = tokens![Token::Number(1.2), Token::NotEqual, Token::Number(1.2),];
1480
1481        let package = Package::new();
1482        let doc = TestDoc(package.as_document());
1483
1484        let ex = Exercise::new(&doc);
1485        let expr = ex.parse(tokens);
1486
1487        assert_eq!(Boolean(false), ex.evaluate(expr));
1488    }
1489
1490    #[test]
1491    fn less_than_expression() {
1492        let tokens = tokens![Token::Number(1.2), Token::LessThan, Token::Number(1.2),];
1493
1494        let package = Package::new();
1495        let doc = TestDoc(package.as_document());
1496
1497        let ex = Exercise::new(&doc);
1498        let expr = ex.parse(tokens);
1499
1500        assert_eq!(Boolean(false), ex.evaluate(expr));
1501    }
1502
1503    #[test]
1504    fn less_than_or_equal_expression() {
1505        let tokens = tokens![
1506            Token::Number(1.2),
1507            Token::LessThanOrEqual,
1508            Token::Number(1.2),
1509        ];
1510
1511        let package = Package::new();
1512        let doc = TestDoc(package.as_document());
1513
1514        let ex = Exercise::new(&doc);
1515        let expr = ex.parse(tokens);
1516
1517        assert_eq!(Boolean(true), ex.evaluate(expr));
1518    }
1519
1520    #[test]
1521    fn greater_than_expression() {
1522        let tokens = tokens![Token::Number(1.2), Token::GreaterThan, Token::Number(1.2),];
1523
1524        let package = Package::new();
1525        let doc = TestDoc(package.as_document());
1526
1527        let ex = Exercise::new(&doc);
1528        let expr = ex.parse(tokens);
1529
1530        assert_eq!(Boolean(false), ex.evaluate(expr));
1531    }
1532
1533    #[test]
1534    fn greater_than_or_equal_expression() {
1535        let tokens = tokens![
1536            Token::Number(1.2),
1537            Token::GreaterThanOrEqual,
1538            Token::Number(1.2),
1539        ];
1540
1541        let package = Package::new();
1542        let doc = TestDoc(package.as_document());
1543
1544        let ex = Exercise::new(&doc);
1545        let expr = ex.parse(tokens);
1546
1547        assert_eq!(Boolean(true), ex.evaluate(expr));
1548    }
1549
1550    #[test]
1551    fn nested_expression() {
1552        let tokens = tokens![Token::LeftParen, Token::Number(1.1), Token::RightParen,];
1553
1554        let package = Package::new();
1555        let doc = TestDoc(package.as_document());
1556
1557        let ex = Exercise::new(&doc);
1558        let expr = ex.parse(tokens);
1559
1560        assert_approx_eq!(Number(1.1), ex.evaluate(expr));
1561    }
1562
1563    #[test]
1564    fn variable_reference() {
1565        let tokens = tokens![Token::Variable("variable-name".into())];
1566
1567        let package = Package::new();
1568        let doc = TestDoc(package.as_document());
1569
1570        let mut ex = Exercise::new(&doc);
1571        ex.context.set_variable("variable-name", 12.3);
1572        let expr = ex.parse(tokens);
1573
1574        assert_approx_eq!(Number(12.3), ex.evaluate(expr));
1575    }
1576
1577    #[test]
1578    fn variable_reference_prefixed_name() {
1579        let tokens = tokens![Token::Variable(("ns", "variable-name").into())];
1580
1581        let package = Package::new();
1582        let doc = TestDoc(package.as_document());
1583
1584        let mut ex = Exercise::new(&doc);
1585        ex.context.set_namespace("ns", "uri:vars");
1586        ex.context.set_variable(("uri:vars", "variable-name"), 12.3);
1587        let expr = ex.parse(tokens);
1588
1589        assert_approx_eq!(Number(12.3), ex.evaluate(expr));
1590    }
1591
1592    #[test]
1593    fn filter_expression() {
1594        let tokens = tokens![
1595            Token::Variable("variable".into()),
1596            Token::LeftBracket,
1597            Token::Number(0.0),
1598            Token::RightBracket,
1599        ];
1600
1601        let package = Package::new();
1602        let doc = TestDoc(package.as_document());
1603        let value = nodeset![
1604            doc.add_top_child("first-node"),
1605            doc.add_top_child("second-node"),
1606        ];
1607
1608        let mut ex = Exercise::new(&doc);
1609        ex.context.set_variable("variable", value);
1610
1611        let expr = ex.parse(tokens);
1612
1613        assert_eq!(nodeset![], ex.evaluate(expr));
1614    }
1615
1616    #[test]
1617    fn filter_expression_and_relative_path() {
1618        let tokens = tokens![
1619            Token::Variable("variable".into()),
1620            Token::Slash,
1621            name_test("child"),
1622        ];
1623
1624        let package = Package::new();
1625        let doc = TestDoc(package.as_document());
1626        let parent = doc.add_top_child("parent");
1627        let child = doc.add_child(parent, "child");
1628
1629        let value = nodeset![parent];
1630
1631        let mut ex = Exercise::new(&doc);
1632        ex.context.set_variable("variable", value);
1633
1634        let expr = ex.parse(tokens);
1635
1636        assert_eq!(nodeset![child], ex.evaluate(expr));
1637    }
1638
1639    #[test]
1640    fn union_expression() {
1641        let tokens = tokens![
1642            Token::Variable("variable1".into()),
1643            Token::Pipe,
1644            Token::Variable("variable2".into()),
1645        ];
1646
1647        let package = Package::new();
1648        let doc = TestDoc(package.as_document());
1649        let node1 = doc.add_top_child("first-node");
1650        let node2 = doc.add_top_child("second-node");
1651
1652        let mut ex = Exercise::new(&doc);
1653        ex.context.set_variable("variable1", nodeset![node1]);
1654        ex.context.set_variable("variable2", nodeset![node2]);
1655
1656        let expr = ex.parse(tokens);
1657
1658        assert_eq!(nodeset![node1, node2], ex.evaluate(expr));
1659    }
1660
1661    #[test]
1662    fn absolute_path_expression() {
1663        let tokens = tokens![Token::Slash,];
1664
1665        let package = Package::new();
1666        let doc = TestDoc(package.as_document());
1667        let node1 = doc.add_top_child("first-node");
1668        let node2 = doc.add_child(node1, "second-node");
1669
1670        let ex = Exercise::new(&doc);
1671        let expr = ex.parse(tokens);
1672
1673        assert_eq!(nodeset![doc.root()], ex.evaluate_on(expr, node2));
1674    }
1675
1676    #[test]
1677    fn absolute_path_with_child_expression() {
1678        let tokens = tokens![Token::Slash, name_test("*"),];
1679
1680        let package = Package::new();
1681        let doc = TestDoc(package.as_document());
1682        let node1 = doc.add_top_child("first-node");
1683        let node2 = doc.add_child(node1, "second-node");
1684
1685        let ex = Exercise::new(&doc);
1686        let expr = ex.parse(tokens);
1687
1688        assert_eq!(nodeset![doc.top_node()], ex.evaluate_on(expr, node2));
1689    }
1690
1691    #[test]
1692    fn unexpected_token_is_reported_as_an_error() {
1693        let tokens = tokens![Token::Function("does-not-matter".into()), Token::RightParen];
1694
1695        let package = Package::new();
1696        let doc = TestDoc(package.as_document());
1697
1698        let ex = Exercise::new(&doc);
1699        let res = ex.parser.parse(tokens.into_iter());
1700        assert_eq!(
1701            Some(Error::UnexpectedToken {
1702                token: Token::RightParen
1703            }),
1704            res.err()
1705        );
1706    }
1707
1708    #[test]
1709    fn no_xpath_error() {
1710        let tokens = tokens![];
1711
1712        let package = Package::new();
1713        let doc = TestDoc(package.as_document());
1714
1715        let ex = Exercise::new(&doc);
1716        let res = ex.parse_raw(tokens);
1717        assert_eq!(Some(Error::NoXPath), res.err());
1718    }
1719
1720    #[test]
1721    fn binary_operator_without_right_hand_side_is_reported_as_an_error() {
1722        let tokens = tokens![Token::Literal("left".to_owned()), Token::And];
1723
1724        let package = Package::new();
1725        let doc = TestDoc(package.as_document());
1726
1727        let ex = Exercise::new(&doc);
1728        let res = ex.parse_raw(tokens);
1729        assert_eq!(Some(Error::RightHandSideExpressionMissing), res.err());
1730    }
1731
1732    #[test]
1733    fn unary_operator_without_right_hand_side_is_reported_as_an_error() {
1734        let tokens = tokens![Token::MinusSign,];
1735
1736        let package = Package::new();
1737        let doc = TestDoc(package.as_document());
1738
1739        let ex = Exercise::new(&doc);
1740        let res = ex.parser.parse(tokens.into_iter());
1741        assert_eq!(Some(Error::RightHandSideExpressionMissing), res.err());
1742    }
1743
1744    #[test]
1745    fn empty_predicate_is_reported_as_an_error() {
1746        let tokens = tokens![name_test("*"), Token::LeftBracket, Token::RightBracket,];
1747
1748        let package = Package::new();
1749        let doc = TestDoc(package.as_document());
1750
1751        let ex = Exercise::new(&doc);
1752        let res = ex.parse_raw(tokens);
1753        assert_eq!(Some(Error::EmptyPredicate), res.err());
1754    }
1755
1756    #[test]
1757    fn relative_path_with_trailing_slash_is_reported_as_an_error() {
1758        let tokens = tokens![name_test("*"), Token::Slash,];
1759
1760        let package = Package::new();
1761        let doc = TestDoc(package.as_document());
1762
1763        let ex = Exercise::new(&doc);
1764        let res = ex.parse_raw(tokens);
1765        assert_eq!(Some(Error::TrailingSlash), res.err());
1766    }
1767
1768    #[test]
1769    fn filter_expression_with_trailing_slash_is_reported_as_an_error() {
1770        let tokens = tokens![Token::Variable("variable".into()), Token::Slash,];
1771
1772        let package = Package::new();
1773        let doc = TestDoc(package.as_document());
1774
1775        let ex = Exercise::new(&doc);
1776        let res = ex.parse_raw(tokens);
1777        assert_eq!(Some(Error::TrailingSlash), res.err());
1778    }
1779
1780    #[test]
1781    fn running_out_of_input_is_reported_as_an_error() {
1782        let tokens = tokens![Token::Function("func".into())];
1783
1784        let package = Package::new();
1785        let doc = TestDoc(package.as_document());
1786
1787        let ex = Exercise::new(&doc);
1788        let res = ex.parse_raw(tokens);
1789        assert_eq!(Some(Error::RanOutOfInput), res.err());
1790    }
1791
1792    #[test]
1793    fn having_extra_tokens_is_reported_as_an_error() {
1794        let tokens = tokens![Token::LeftBracket];
1795
1796        let package = Package::new();
1797        let doc = TestDoc(package.as_document());
1798
1799        let ex = Exercise::new(&doc);
1800        let res = ex.parse_raw(tokens);
1801        assert_eq!(Some(Error::ExtraUnparsedTokens), res.err());
1802    }
1803
1804    #[test]
1805    fn a_tokenizer_error_is_reported_as_an_error() {
1806        let tokens = vec![
1807            Ok(Token::Function("func".into())),
1808            Err(tokenizer::Error::UnableToCreateToken),
1809        ];
1810
1811        let package = Package::new();
1812        let doc = TestDoc(package.as_document());
1813
1814        let ex = Exercise::new(&doc);
1815        let res = ex.parse_raw(tokens);
1816        assert_eq!(
1817            tokenizer::UnableToCreateToken
1818                .fail::<()>()
1819                .context(Tokenizer)
1820                .err(),
1821            res.err()
1822        );
1823    }
1824}