1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
use read_token;
use range::Range;
use std::rc::Rc;

use {
    DebugId,
    MetaData,
    ParseError,
    ParseResult,
    Tokenizer,
    TokenizerState,
};

/// Stores information about text.
#[derive(Clone, Debug, PartialEq)]
pub struct Text {
    /// Whether to allow empty string.
    pub allow_empty: bool,
    /// Which property to set if text is read.
    pub property: Option<Rc<String>>,
    /// A debug id to track down the rule generating an error.
    pub debug_id: DebugId,
}

impl Text {
    /// Parses text.
    pub fn parse(
        &self,
        tokenizer: &mut Tokenizer,
        state: &TokenizerState,
        chars: &[char],
        offset: usize
    ) -> ParseResult<TokenizerState> {
        if let Some(range) = read_token::string(chars, offset) {
            if !self.allow_empty && range.length == 2 {
                Err((range, ParseError::EmptyTextNotAllowed(self.debug_id)))
            } else {
                match read_token::parse_string(
                    chars, offset, range.next_offset()) {
                    // Focus range to invalid string format.
                    Err(err) => Err((err.range(),
                        ParseError::ParseStringError(err, self.debug_id))),
                    Ok(text) => {
                        if let Some(ref property) = self.property {
                            Ok((range, tokenizer.data(
                                MetaData::String(property.clone(), Rc::new(text)),
                                state,
                                range
                            ), None))
                        } else {
                            Ok((range, state.clone(), None))
                        }
                    }
                }
            }
        } else {
            Err((Range::new(offset, 0),
                ParseError::ExpectedText(self.debug_id)))
        }
    }
}

#[cfg(test)]
mod tests {
    use super::super::*;
    use range::Range;
    use std::rc::Rc;

    #[test]
    fn expected_text() {
        let text = "23";
        let chars: Vec<char> = text.chars().collect();
        let mut tokenizer = Tokenizer::new();
        let s = TokenizerState::new();
        let text = Text {
            debug_id: 0,
            allow_empty: true,
            property: None
        };
        let res = text.parse(&mut tokenizer, &s, &chars, 0);
        assert_eq!(res, Err((Range::new(0, 0), ParseError::ExpectedText(0))));
    }

    #[test]
    fn empty_string() {
        let text = "\"\"";
        let chars: Vec<char> = text.chars().collect();
        let mut tokenizer = Tokenizer::new();
        let s = TokenizerState::new();
        let text = Text {
            debug_id: 0,
            allow_empty: false,
            property: None
        };
        let res = text.parse(&mut tokenizer, &s, &chars, 0);
        assert_eq!(res, Err((Range::new(0, 2),
            ParseError::EmptyTextNotAllowed(0))));
    }

    #[test]
    fn successful() {
        let text = "foo \"hello\"";
        let chars: Vec<char> = text.chars().collect();
        let mut tokenizer = Tokenizer::new();
        let s = TokenizerState::new();
        let foo: Rc<String> = Rc::new("foo".into());
        let text = Text {
            debug_id: 0,
            allow_empty: true,
            property: Some(foo.clone())
        };
        let res = text.parse(&mut tokenizer, &s, &chars[4..], 4);
        assert_eq!(res, Ok((Range::new(4, 7), TokenizerState(1), None)));
        assert_eq!(tokenizer.tokens.len(), 1);
        assert_eq!(&tokenizer.tokens[0].1,
            &MetaData::String(foo.clone(), Rc::new("hello".into())));
    }
}