json_five/rt/
parser.rs

1use std::fmt::{Display, Formatter};
2use std::iter::Peekable;
3use std::slice::Iter;
4use crate::utils::get_line_col_char;
5use crate::tokenize::{TokType, TokenSpan, Tokens};
6#[derive(PartialEq, Debug, Clone)]
7pub enum UnaryOperator {
8    Plus,
9    Minus,
10}
11
12impl Display for UnaryOperator {
13    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
14        match self {
15            UnaryOperator::Plus => {write!(f, "+")}
16            UnaryOperator::Minus => {write!(f, "-")}
17        }
18    }
19}
20
21type Wsc = String; // Whitespace and comment tokens
22
23// {wsc.0} value {wsc.1}
24
25/// Represents contextual whitespace/comments for the associated JSONText
26#[derive(PartialEq, Debug, Clone)]
27pub struct JSONTextContext {
28    /// the whitespace and/or comments surrounding the Text production
29    ///
30    /// In other words: `{ wsc.0 } value { wsc.1 }`
31    pub wsc: (Wsc, Wsc)
32}
33
34// LBRACE {wsc} [ key_value_pairs ] RBRACE
35//                                 ^ any whitespace that would go here would be part of the KVP
36
37#[derive(PartialEq, Debug, Clone)]
38pub struct JSONObjectContext {
39    pub wsc: (Wsc,)
40}
41
42
43// key {wsc.0} COLON {wsc.1} value {wsc.2} [ COMMA {wsc.3} ] [ next_kvp ]
44#[derive(PartialEq, Debug, Clone)]
45pub struct KeyValuePairContext {
46    pub wsc: (Wsc, Wsc, Wsc, Option<Wsc>),
47    //                       ^ Some() here represents the presence of a comma and its subsequent wsc
48}
49
50
51#[derive(PartialEq, Debug, Clone)]
52pub struct JSONArrayContext {
53    /// Holds the whitespace/comments that follow the opening bracket of a [JSONValue::JSONArray]
54    ///
55    /// ```LBRACKET {wsc.0} [ array_values ] RBRACKET```
56    pub wsc: (Wsc,)
57}
58
59#[derive(PartialEq, Debug, Clone)]
60pub struct ArrayValueContext {
61    /// the whitespace and/or comments that may occur after the value and (optionally) after the comma following the value.
62    /// In other words, roughly:
63    /// ```ebnf
64    /// value { wsc.0 } [ COMMA { wsc.1 } [ next_value ]]
65    /// ```
66    pub wsc: (Wsc, Option<Wsc>),
67    //             ^ Some() here represents the presence of a comma and its subsequent wsc
68}
69
70/// Represents the ['JSON5Member' production](https://spec.json5.org/#prod-JSON5Member).
71///
72/// In other words, it's the key-value pairs of a [JSONValue::JSONObject]
73#[derive(PartialEq, Debug, Clone)]
74pub struct JSONKeyValuePair {
75
76    /// a `JSONValue`, in practice, is limited to [JSONValue::Identifier],
77    /// [JSONValue::DoubleQuotedString] or a [JSONValue::SingleQuotedString]
78    pub key: JSONValue,
79    pub value: JSONValue,
80
81    ///
82    pub context: Option<KeyValuePairContext>
83}
84
85/// Represents a value in a [JSONValue::JSONArray]
86#[derive(PartialEq, Debug, Clone)]
87pub struct JSONArrayValue {
88    pub value: JSONValue,
89    pub context: Option<ArrayValueContext>
90}
91
92/// Represents a JSON5 value
93///
94/// Where these enum members have `String`s, they represent the object as it was tokenized without any modifications (that
95/// is, for example, without any escape sequences un-escaped). The single- and double-quoted `String`s do not include the surrounding
96/// quote characters. The [JSONValue::JSONObject]
97#[derive(PartialEq, Debug, Clone)]
98pub enum JSONValue {
99    /// Represents a JSON5 Object
100    JSONObject {
101
102        /// The key-value pairs of the object
103        key_value_pairs: Vec<JSONKeyValuePair>,
104
105        context: Option<JSONObjectContext>
106    },
107
108    /// Represents a JSON5 Array.
109    JSONArray {
110
111        values: Vec<JSONArrayValue>,
112
113        context: Option<JSONArrayContext>
114    },
115
116    /// Represents an Integer value.
117    /// The String value is a literal, as it might appear in JSON5 source
118    Integer(String),
119
120    /// Represents a float value (not including NaN or Infinity, use [JSONValue::NaN] or [JSONValue::Infinity])
121    /// The String value is a literal as it might appear in JSON5 source
122    Float(String),
123
124    /// Represents an exponent value
125    /// The String value is a literal as it might appear in JSON5 source
126    Exponent(String),
127
128
129    Null,
130    Infinity,
131    NaN,
132
133    /// Represents a hexadecimal value
134    /// The String value is a literal as it might appear in JSON5 source e.g. `String::from("0xDEADBEEF")`
135    Hexadecimal(String),
136    Bool(bool),
137
138    /// Double-quoted string, as it appears in source.
139    /// The String value does not include surrounding quotes
140    DoubleQuotedString(String),
141
142    /// Single-quoted string, as it appears in source.
143    /// The String value does not include surrounding quotes
144    SingleQuotedString(String),
145
146    /// Represents a unary production
147    Unary { operator: UnaryOperator, value: Box<JSONValue> },
148
149    /// Represents unquoted identifiers.
150    ///
151    /// Uniquely, a [JSONValue::Identifier] can only be used in dictionary keys.
152    Identifier(String), // XXX: for keys only!
153}
154
155
156/// Represents the top-level Text production of a JSON5 document.
157///
158///
159/// ```rust
160/// use json_five::rt::parser::from_str;
161/// use json_five::rt::parser::JSONValue;
162///
163/// let doc = from_str(" 'foo'\n").unwrap();
164/// let context = doc.context.unwrap();
165///
166/// assert_eq!(&context.wsc.0, " ");
167/// assert_eq!(doc.value, JSONValue::SingleQuotedString("foo".to_string()));
168/// assert_eq!(&context.wsc.1, "\n");
169/// ```
170#[derive(PartialEq, Debug)]
171pub struct JSONText {
172
173    /// Can be any [JSONValue] except for [JSONValue::Identifier] (which is reserved for keys only)
174    pub value: JSONValue,
175
176    /// Contextual whitespace
177    pub context: Option<JSONTextContext>
178}
179
180
181impl JSONKeyValuePair {
182    // key {wsc.0} COLON {wsc.1} value {wsc.2} [ COMMA {wsc.3} ] [ next_kvp ]
183    fn to_string(&self) -> String {
184        match &self.context {
185            None => {
186                format!("{}:{}", self.key.to_string(), self.value.to_string())
187            }
188            Some(ctx) => {
189                match &ctx.wsc.3 {
190                    None => {
191                        format!("{}{}:{}{}{}", self.key.to_string(), ctx.wsc.0, ctx.wsc.1, self.value.to_string(), ctx.wsc.2)
192                    }
193                    Some(trailing_wsc) => {
194                        format!("{}{}:{}{}{},{}", self.key.to_string(), ctx.wsc.0, ctx.wsc.1, self.value.to_string(), ctx.wsc.2, trailing_wsc)
195                    }
196                }
197            }
198        }
199    }
200}
201
202
203
204impl JSONText {
205    fn to_string(&self) -> String {
206        match &self.context {
207            None => {
208                self.value.to_string()
209            },
210            Some(ctx) => {
211                format!("{}{}{}", ctx.wsc.0, self.value.to_string(), ctx.wsc.1)
212            }
213        }
214    }
215}
216
217// value {wsc.0} [ COMMA {wsc.1} ] [ next_value ]
218impl JSONArrayValue {
219    fn to_string(&self) -> String {
220        match &self.context {
221            None => {
222                self.value.to_string()
223            }
224            Some(ctx) => {
225                match &ctx.wsc.1 {
226                    None => {
227                        format!("{}{}", self.value.to_string(), ctx.wsc.0)
228                    }
229                    Some(trailing_whitespace) => {
230                        format!("{}{},{}", self.value.to_string(), ctx.wsc.0, trailing_whitespace)
231                    }
232                }
233            }
234        }
235    }
236}
237
238impl JSONValue {
239    fn to_string(&self) -> String {
240        match self {
241            JSONValue::JSONObject { key_value_pairs, context } => {
242                match context {
243                    None => {
244                        let mut s = String::from("{");
245                        for kvp in key_value_pairs {
246                            s.push_str(kvp.to_string().as_str())
247                        }
248                        s.push('}');
249                        s
250                    }
251                    Some(ctx) => {
252                        let mut s = format!("{{{}", ctx.wsc.0);
253                        for kvp in key_value_pairs {
254                            s.push_str(kvp.to_string().as_str());
255                        }
256                        s.push('}');
257                        s
258                    }
259                }
260            }
261            JSONValue::JSONArray { values, context } => {
262                match context {
263                    None => {
264                        let mut s = String::from('[');
265                        for array_value in values {
266                            s.push_str(array_value.to_string().as_str());
267                        }
268                        s.push(']');
269                        s
270                    }
271                    Some(ctx) => {
272                        let mut s = format!("[{}", ctx.wsc.0);
273                        for array_value in values {
274                            s.push_str(array_value.to_string().as_str());
275                        }
276                        s.push(']');
277                        s
278                    }
279                }
280            }
281            JSONValue::Integer(s) => {s.clone()}
282            JSONValue::Float(s) => {s.clone()}
283            JSONValue::Exponent(s) => {s.clone()}
284            JSONValue::Null => {String::from("null")}
285            JSONValue::Infinity => {String::from("Infinity")}
286            JSONValue::NaN => {String::from("Nan")}
287            JSONValue::Hexadecimal(s) => {s.clone()}
288            JSONValue::Bool(b) => b.to_string(),
289            JSONValue::DoubleQuotedString(s) => {s.clone()}
290            JSONValue::SingleQuotedString(s) => {s.clone()}
291            JSONValue::Unary { operator, value} => {
292                format!("{}{}", operator, value)
293            }
294            JSONValue::Identifier(s) => {s.clone()}
295        }
296    }
297}
298
299
300impl Display for JSONValue {
301    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
302        let res = self.to_string();
303        write!(f, "{}", res)
304    }
305}
306
307
308impl Display for JSONText {
309    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
310        write!(f, "{}", self.to_string())
311    }
312}
313
314
315
316#[derive(Debug, PartialEq)]
317pub struct ParsingError {
318    pub index: usize, // byte offset
319    pub message: String,
320    pub lineno: usize,
321    pub colno: usize,
322    pub char_index: usize, // character offset
323}
324
325
326
327impl Display for ParsingError {
328    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
329        write!(f, "ParsingError: {}: line {} column {} (char {})", self.message, self.lineno, self.colno, self.char_index)
330    }
331}
332
333struct JSON5Parser<'toks, 'input> {
334    source: &'input str,
335    source_tokens: Peekable<Iter<'toks, TokenSpan>>,
336    lookahead: Option<&'toks TokenSpan>,
337    current_depth: usize,
338    max_depth: usize,
339}
340
341
342impl<'toks, 'input> JSON5Parser<'toks, 'input> {
343    fn new(tokens: &'toks Tokens<'input>) -> Self {
344        use crate::utils::MAX_DEPTH;
345        JSON5Parser { source_tokens: tokens.tok_spans.iter().peekable(), lookahead: None, source: tokens.source, current_depth: 0, max_depth: MAX_DEPTH }
346    }
347
348    fn with_max_depth(&mut self, tokens: &'toks Tokens<'input>, max_depth: usize) -> Self {
349        JSON5Parser { source_tokens: tokens.tok_spans.iter().peekable(), lookahead: None, source: tokens.source, current_depth: 0, max_depth: max_depth }
350    }
351
352    fn advance(&mut self) -> Option<&'toks TokenSpan> {
353        match self.source_tokens.next() {
354            None => {
355                self.lookahead = None;
356                None
357            }
358            Some(span) => {
359                self.lookahead = Some(span);
360                self.lookahead
361            }
362        }
363    }
364
365    #[inline]
366    fn get_tok_source(&self, span: &'toks TokenSpan) -> &'input str {
367        &self.source[span.0 .. span.2]
368    }
369
370
371    fn peek(&mut self) -> Option<&'toks TokenSpan> {
372        match self.source_tokens.peek() {
373            None => None,
374            Some(span) => {
375                Some(span)
376            }
377        }
378    }
379
380
381    fn position(&mut self) -> usize {
382        match self.peek() {
383            None => {
384                match self.lookahead {
385                    None => 0, // XXX: ???? might be end, actually?
386                    Some(span) => {span.2}
387                }
388            }
389            Some(span) => {
390                span.0
391            }
392        }
393    }
394
395    fn make_error(&self, message: String, index: usize) -> ParsingError {
396        let (lineno, colno, char_index) = get_line_col_char(self.source, index);
397        ParsingError {
398            index,
399            message,
400            lineno,
401            colno,
402            char_index,
403        }
404    }
405
406    fn check_and_consume(&mut self, types: Vec<TokType>) -> Option<&'toks TokenSpan> {
407        let next_tok = self.peek()?;
408        for toktype in types {
409            if next_tok.1 == toktype {
410                return self.advance();
411            }
412        }
413        None
414    }
415
416    #[inline]
417    fn check_and_consume_with_source(&mut self, types: Vec<TokType>) -> Option<(&'toks TokenSpan, &'input str)> {
418        let tok = self.check_and_consume(types)?;
419        let source = self.get_tok_source(tok);
420        Some((tok, source))
421    }
422
423    fn parse_key(&mut self) -> Result<JSONValue, ParsingError>{
424        // This is a terminal point
425        // We either get a valid key or we bail.
426        match self.check_and_consume_with_source(vec![TokType::Name, TokType::DoubleQuotedString, TokType::SingleQuotedString]) {
427            None => {
428                match self.peek() {
429                    None => {
430                        let idx = self.position();
431                        Err(self.make_error("Unexpected EOF. Was expecting MemberName at".to_string(), idx))
432                    }
433                    Some(span) => {
434                        let src = self.get_tok_source(span);
435                        Err(self.make_error(format!("Invalid token for unquoted key ({}, {:?}) at", span.2, src), span.0))
436                    }
437                }
438            },
439            Some((span, lexeme)) => {
440                match span.1 {
441                    TokType::DoubleQuotedString => {
442                        Ok(JSONValue::DoubleQuotedString(lexeme[1..lexeme.len() - 1].to_string()))
443                    },
444                    TokType:: SingleQuotedString => {
445                        Ok(JSONValue::SingleQuotedString(lexeme[1..lexeme.len() - 1].to_string()))
446                    }
447                    TokType::Name => {
448                        Ok(JSONValue::Identifier(lexeme.to_string()))
449                    }
450                    _ => unreachable!("Programming error. Please report this as a bug")
451                }
452            }
453        }
454    }
455
456    fn parse_object(&mut self) -> Result<JSONValue, ParsingError> {
457        use crate::tokenize::TokType::*;
458        let mut kvps: Vec<JSONKeyValuePair> = Vec::new();
459        let leading_wsc = &self.consume_whitespace_and_comments();
460        loop {
461            match self.check_and_consume(vec![RightBrace]) {
462                None => {
463                    let key = self.parse_key()?;
464                    let wsc_0 = self.consume_whitespace_and_comments();
465
466                    match self.check_and_consume(vec![Colon]) {
467                        None => {
468                            let idx = self.position();
469                            return Err(self.make_error("Expecting ':' delimiter".to_string(), idx))
470                        }
471                        Some(_) => {
472                            let wsc_1 = self.consume_whitespace_and_comments();
473                            let val = self.parse_value()?;
474                            let wsc_2 = self.consume_whitespace_and_comments();
475                            match self.check_and_consume(vec![Comma]) {
476                                None => {
477                                    let context = KeyValuePairContext{wsc: (
478                                        self.collect_wsc_vec_to_string(&wsc_0),
479                                        self.collect_wsc_vec_to_string(&wsc_1),
480                                        self.collect_wsc_vec_to_string(&wsc_2),
481                                        None
482                                        )};
483                                    let kvp = JSONKeyValuePair{key: key, value: val, context: Some(context)};
484                                    kvps.push(kvp);
485                                    match self.check_and_consume(vec![RightBrace]) {
486                                        None => {
487                                            let idx = self.position();
488                                            return Err(self.make_error("Expecting '}' at end of object".to_string(), idx))
489                                        },
490                                        Some(_) => {
491                                            break Ok(JSONValue::JSONObject {key_value_pairs: kvps, context: Some(JSONObjectContext{wsc: (self.collect_wsc_vec_to_string(&leading_wsc), )})})
492                                        }
493                                    }
494                                }
495                                Some(_) => {
496                                    let wsc_3 = self.consume_whitespace_and_comments();
497                                    let context = KeyValuePairContext{wsc: (
498                                        self.collect_wsc_vec_to_string(&wsc_0),
499                                        self.collect_wsc_vec_to_string(&wsc_1),
500                                        self.collect_wsc_vec_to_string(&wsc_2),
501                                        Some(self.collect_wsc_vec_to_string(&wsc_3)),
502                                    )};
503                                    let kvp = JSONKeyValuePair{key: key, value: val, context: Some(context)};
504                                    kvps.push(kvp);
505                                    continue
506                                }
507                            }
508                        }
509                    }
510                }
511                Some(_) => {
512                    break Ok(JSONValue::JSONObject {key_value_pairs: kvps, context: Some(JSONObjectContext{wsc: (self.collect_wsc_vec_to_string(&leading_wsc), )})})
513                }
514            }
515        }
516    }
517
518
519    fn collect_wsc_vec_to_string(&self, wsc: &Vec<&'toks TokenSpan>) -> String {
520        if wsc.len() == 0 {
521            return String::with_capacity(0);
522        }
523
524        let first = wsc.first().unwrap();
525        if wsc.len() == 1 {
526            self.get_tok_source(first).to_string()
527        } else {
528            let last = wsc.last().unwrap();
529            let mut buff = String::with_capacity(last.2 - first.0);
530            for span in wsc {
531                let src = self.get_tok_source(span);
532                buff.push_str(src);
533            }
534            buff
535        }
536    }
537
538    fn parse_array(&mut self) -> Result<JSONValue, ParsingError> {
539        use crate::tokenize::TokType::*;
540        let mut values:Vec<JSONArrayValue> = Vec::new();
541        let leading_wsc = self.consume_whitespace_and_comments();
542        loop {
543            match self.check_and_consume(vec![TokType::RightBracket]) {
544                None => {
545                    let val = self.parse_value()?;
546                    let wsc_0 = self.consume_whitespace_and_comments();
547                    match self.check_and_consume(vec![Comma]) {
548                        None => {
549                            let array_val_context = ArrayValueContext{wsc: (self.collect_wsc_vec_to_string(&wsc_0), None)};
550                            let array_val = JSONArrayValue {value: val, context: Some(array_val_context)};
551                            values.push(array_val);
552                            match self.check_and_consume(vec![TokType::RightBracket]) {
553                                None => {
554                                    let idx = self.position();
555                                    return Err(self.make_error("Expecting ']' at end of array".to_string(), idx))
556                                },
557                                Some(_) => {
558                                    break Ok(JSONValue::JSONArray {values: values, context: Some(JSONArrayContext{wsc: (self.collect_wsc_vec_to_string(&leading_wsc), )})})
559                                }
560                            }
561                        }
562                        Some(_) => {
563                            let wsc_1 = self.consume_whitespace_and_comments();
564                            let array_val_context = ArrayValueContext{wsc: (self.collect_wsc_vec_to_string(&wsc_0), Some(self.collect_wsc_vec_to_string(&wsc_1)))};
565                            let array_val = JSONArrayValue {value: val, context: Some(array_val_context)};
566                            values.push(array_val);
567                            continue
568                        }
569                    }
570                }
571                Some(_) => {
572                    break Ok(JSONValue::JSONArray {values: values, context: Some(JSONArrayContext{wsc: (self.collect_wsc_vec_to_string(&leading_wsc), )})})
573                }
574            }
575        }
576    }
577
578    fn parse_primary(&mut self) -> Result<JSONValue, ParsingError> {
579        let span = self.advance().unwrap();
580        match &span.1 {
581            TokType::Integer => {Ok(JSONValue::Integer(self.get_tok_source(span).to_string()))}
582            TokType::Float => {Ok(JSONValue::Float(self.get_tok_source(span).to_string()))}
583            TokType::Exponent => { Ok(JSONValue::Exponent(self.get_tok_source(span).to_string()))}
584            TokType::SingleQuotedString => {
585                let lexeme = self.get_tok_source(span);
586                Ok(JSONValue::SingleQuotedString(lexeme[1..lexeme.len() - 1].to_string()))
587            },
588            TokType::DoubleQuotedString => {
589                let lexeme = self.get_tok_source(span);
590                Ok(JSONValue::DoubleQuotedString(lexeme[1..lexeme.len() - 1].to_string()))
591            },
592            TokType::True => Ok(JSONValue::Bool(true)),
593            TokType::False => Ok(JSONValue::Bool(false)),
594            TokType::Null => Ok(JSONValue::Null),
595            TokType::Infinity => Ok(JSONValue::Infinity),
596            TokType::Nan => Ok(JSONValue::NaN),
597            TokType::Hexadecimal => Ok(JSONValue::Hexadecimal(self.get_tok_source(span).to_string())),
598            TokType::EOF => {
599                match self.position() {
600                    0 => Err(self.make_error("Unexpected EOF. Was expecting value.".to_string(), 0)),
601                    pos => Err(self.make_error("Unexpected EOF".to_string(), pos))
602                }
603            },
604            t => Err(self.make_error(format!("Unexpected token of type {:?}: {:?}", t, self.get_tok_source(span)), span.0))
605        }
606    }
607
608    fn parse_unary(&mut self) -> Result<JSONValue, ParsingError> {
609        match self.check_and_consume(vec![TokType::Plus, TokType::Minus]) {
610            None => self.parse_primary(),
611            Some(span) => {
612                match span.1 {
613                    TokType::Plus => {
614                        let value = self.parse_unary()?;
615                        match value {
616                            JSONValue::Float(_) | JSONValue::Integer(_) | JSONValue::Infinity | JSONValue::NaN | JSONValue::Hexadecimal(_) | JSONValue::Exponent(_) => {}
617                            JSONValue::Unary{ .. } => {
618                                return Err(self.make_error("Only one unary operator is allowed".to_string(), span.2))
619                            }
620                            val => {
621                                return Err(self.make_error(format!("Unary operations not allowed for value {:?}", val), span.2))
622                            }
623                        }
624                        Ok(JSONValue::Unary {operator: UnaryOperator::Plus, value: Box::new(value)})
625                    }
626                    TokType::Minus => {
627                        let value = self.parse_unary()?;
628                        match value {
629                            JSONValue::Float(_) | JSONValue::Integer(_) | JSONValue::Infinity | JSONValue::NaN | JSONValue::Hexadecimal(_) | JSONValue::Exponent(_) => {}
630                            JSONValue::Unary{ .. } => {
631                                return Err(self.make_error("Only one unary operator is allowed".to_string(), span.2))
632                            }
633                            val => {
634                                return Err(self.make_error(format!("Unary operations not allowed for value {:?}", val), span.2))
635                            }
636                        }
637                        Ok(JSONValue::Unary {operator: UnaryOperator::Minus, value: Box::new(value)})
638                    }
639                    _ => unreachable!("no")
640                }
641            }
642        }
643    }
644
645    fn parse_obj_or_array(&mut self) -> Result<JSONValue, ParsingError> {
646        match self.check_and_consume(vec![TokType::LeftBracket, TokType::LeftBrace]) {
647            None => self.parse_unary(),
648            Some(span) => {
649                match span.1 {
650                    TokType::LeftBrace => self.parse_object(),
651                    TokType::LeftBracket => self.parse_array(),
652                    _ => unreachable!("no")
653                }
654            }
655        }
656    }
657
658
659    fn parse_value(&mut self) -> Result<JSONValue, ParsingError> {
660        self.current_depth = self.current_depth + 1;
661        if self.current_depth > self.max_depth {
662            let idx = self.position();
663            return Err(self.make_error(format!("max depth ({}) exceeded in nested arrays/objects. To expand the depth, use the ``with_max_depth`` constructor or enable the `unlimited_depth` feature", self.max_depth), idx))
664        }
665        let res = self.parse_obj_or_array();
666        self.current_depth = self.current_depth - 1;
667        res
668    }
669
670    fn parse_text(&mut self) -> Result<JSONText, ParsingError> {
671        let wsc_0 = self.consume_whitespace_and_comments();
672        let value = self.parse_value()?;
673        let wsc_1 = self.consume_whitespace_and_comments();
674        match self.advance() {
675            None => {}
676            Some(span) => {
677                if span.1 != TokType::EOF {
678                    return Err(self.make_error(format!("Unexpected {:?} token after value", span.1), span.0))
679                }
680            }
681        }
682        let context = JSONTextContext{wsc: (self.collect_wsc_vec_to_string(&wsc_0), self.collect_wsc_vec_to_string(&wsc_1))};
683        Ok(JSONText { value, context: Some(context) })
684    }
685
686    fn consume_whitespace_and_comments(&mut self) -> Vec<&'toks TokenSpan> {
687        let mut ret: Vec<&TokenSpan> = Vec::new();
688        loop {
689            match self.peek() {
690                None => {return ret}
691                Some(span) => {
692                    match span.1 {
693                        TokType::BlockComment | TokType::LineComment | TokType::Whitespace => {
694                            ret.push(span);
695                            self.advance();
696                        }
697                        _ => {return ret}
698                    }
699                }
700            }
701        }
702    }
703}
704
705pub fn from_tokens<'toks, 'input>(tokens: &'toks Tokens<'input>) -> Result<JSONText, ParsingError> {
706    let mut parser = JSON5Parser::new(tokens);
707    parser.parse_text()
708}
709
710pub fn from_str(source: &str) -> Result<JSONText, ParsingError> {
711    use crate::tokenize::tokenize_rt_str;
712    let maybe_toks = tokenize_rt_str(source);
713    match maybe_toks {
714        Err(e) => {
715            Err(ParsingError{index: e.index, message: e.message, char_index: e.char_index, lineno: e.lineno, colno: e.colno})
716        }
717        Ok(toks) => {
718            from_tokens(&toks)
719        }
720    }
721}
722
723#[cfg(test)]
724mod tests {
725    use crate::tokenize::Tokenizer;
726
727    use super::*;
728
729    #[test]
730    fn test_fuzz_1() {
731        let res = from_str("0xA18 {9");
732        assert!(res.is_err());
733    }
734
735    #[cfg(not(feature = "unlimited_depth"))]
736    #[test]
737    fn test_deeply_nested() {
738        let n = 4000;
739        let mut s = String::with_capacity(n * 2);
740        for _ in 0 .. n {
741            s.push('[')
742        }
743        for _ in 0 .. n {
744            s.push(']')
745        }
746        let res = crate::parser::from_str(s.as_str());
747        assert!(res.is_err());
748        assert!(res.unwrap_err().message.contains("max depth"))
749    }
750
751    #[test]
752    fn test_foo() {
753        let res = from_str("{}").unwrap();
754        let expected = JSONText{context: Some(JSONTextContext{wsc: (String::new(), String::new())}), value: JSONValue::JSONObject {key_value_pairs: vec![], context: Some(JSONObjectContext{wsc: (String::new(),)})}};
755        assert_eq!(res.value, expected.value)
756    }
757
758    #[test]
759    fn test_illegal_identifier_escape() {
760        let text = r#"{ \u0031foo: 123 }"#;
761        from_str(text).unwrap_err();
762    }
763
764    #[test]
765    fn test_leading_comma_array() {
766        let sample = r#"[
767    ,null
768]"#;
769        let maybe_tokens = Tokenizer::new(sample).tokenize();
770        if maybe_tokens.is_err() {
771            return
772        } else {
773            let toks = maybe_tokens.unwrap();
774            let res = from_tokens(&toks);
775            assert!(res.is_err());
776        }
777    }
778
779
780    #[test]
781    fn test_lone_trailing_comma_array() {
782        let sample = r#"[
783    ,
784]"#;
785        let maybe_tokens = Tokenizer::new(sample).tokenize();
786        if maybe_tokens.is_err() {
787            return
788        } else {
789            let toks = maybe_tokens.unwrap();
790            let res = from_tokens(&toks);
791            assert!(res.is_err());
792        }
793    }
794
795
796    #[test]
797    fn test_no_comma_array() {
798        let sample = r#"[
799    true
800    false
801]"#;
802        let maybe_tokens = Tokenizer::new(sample).tokenize();
803        if maybe_tokens.is_err() {
804            return
805        } else {
806            let toks = maybe_tokens.unwrap();
807            let res = from_tokens(&toks);
808            assert!(res.is_err());
809        }
810    }
811
812
813    #[test]
814    fn test_regular_array() {
815        let sample = r#"[
816    true,
817    false,
818    null
819]"#;
820        let _res = from_str(sample).unwrap();
821    }
822
823
824
825    #[test]
826    fn test_trailing_comma_array() {
827        let sample = r#"[
828    null,
829]"#;
830        let _res = from_str(sample).unwrap();
831    }
832
833
834
835    #[test]
836    fn test_block_comment_following_array_element() {
837        let sample = r#"[
838    false
839    /*
840        true
841    */
842]"#;
843        let _res = from_str(sample).unwrap();
844    }
845
846
847
848    #[test]
849    fn test_block_comment_following_top_level_value() {
850        let sample = r#"null
851/*
852    Some non-comment top-level value is needed;
853    we use null above.
854*/"#;
855        let _res = from_str(sample).unwrap();
856    }
857
858
859
860    #[test]
861    fn test_block_comment_in_string() {
862        let sample = r#""This /* block comment */ isn't really a block comment.""#;
863        let _res = from_str(sample).unwrap();
864    }
865
866
867
868    #[test]
869    fn test_block_comment_preceding_top_level_value() {
870        let sample = r#"/*
871    Some non-comment top-level value is needed;
872    we use null below.
873*/
874null"#;
875        let _res = from_str(sample).unwrap();
876    }
877
878
879
880    #[test]
881    fn test_block_comment_with_asterisks() {
882        let sample = r#"/**
883 * This is a JavaDoc-like block comment.
884 * It contains asterisks inside of it.
885 * It might also be closed with multiple asterisks.
886 * Like this:
887 **/
888true"#;
889        let _res = from_str(sample).unwrap();
890    }
891
892
893
894    #[test]
895    fn test_inline_comment_following_array_element() {
896        let sample = r#"[
897    false   // true
898]"#;
899        let _res = from_str(sample).unwrap();
900    }
901
902
903
904    #[test]
905    fn test_inline_comment_following_top_level_value() {
906        let sample = r#"null // Some non-comment top-level value is needed; we use null here."#;
907        let _res = from_str(sample).unwrap();
908    }
909
910
911
912    #[test]
913    fn test_inline_comment_in_string() {
914        let sample = r#""This inline comment // isn't really an inline comment.""#;
915        let _res = from_str(sample).unwrap();
916    }
917
918
919
920    #[test]
921    fn test_inline_comment_preceding_top_level_value() {
922        let sample = r#"// Some non-comment top-level value is needed; we use null below.
923null"#;
924        let _res = from_str(sample).unwrap();
925    }
926
927
928
929    #[test]
930    fn test_top_level_block_comment() {
931        let sample = r#"/*
932    This should fail;
933    comments cannot be the only top-level value.
934*/"#;
935        let maybe_tokens = Tokenizer::new(sample).tokenize();
936        if maybe_tokens.is_err() {
937            return
938        } else {
939            let toks = maybe_tokens.unwrap();
940            let res = from_tokens(&toks);
941            assert!(res.is_err());
942        }
943    }
944
945
946    #[test]
947    fn test_top_level_inline_comment() {
948        let sample = r#"// This should fail; comments cannot be the only top-level value."#;
949        let maybe_tokens = Tokenizer::new(sample).tokenize();
950        if maybe_tokens.is_err() {
951            return
952        } else {
953            let toks = maybe_tokens.unwrap();
954            let res = from_tokens(&toks);
955            assert!(res.is_err());
956        }
957    }
958
959
960    #[test]
961    fn test_unterminated_block_comment() {
962        let sample = r#"true
963/*
964    This block comment doesn't terminate.
965    There was a legitimate value before this,
966    but this is still invalid JS/JSON5.
967"#;
968        let maybe_tokens = Tokenizer::new(sample).tokenize();
969        if maybe_tokens.is_err() {
970            return
971        } else {
972            let toks = maybe_tokens.unwrap();
973            let res = from_tokens(&toks);
974            assert!(res.is_err());
975        }
976    }
977
978
979    #[test]
980    fn test_empty() {
981        let sample = r#""#;
982        let maybe_tokens = Tokenizer::new(sample).tokenize();
983        if maybe_tokens.is_err() {
984            return
985        } else {
986            let toks = maybe_tokens.unwrap();
987            let res = from_tokens(&toks);
988            assert!(res.is_err());
989        }
990    }
991
992
993    #[test]
994    fn test_npm_package() {
995        let sample = r#"{
996  "name": "npm",
997  "publishConfig": {
998    "proprietary-attribs": false
999  },
1000  "description": "A package manager for node",
1001  "keywords": [
1002    "package manager",
1003    "modules",
1004    "install",
1005    "package.json"
1006  ],
1007  "version": "1.1.22",
1008  "preferGlobal": true,
1009  "config": {
1010    "publishtest": false
1011  },
1012  "homepage": "http://npmjs.org/",
1013  "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
1014  "repository": {
1015    "type": "git",
1016    "url": "https://github.com/isaacs/npm"
1017  },
1018  "bugs": {
1019    "email": "npm-@googlegroups.com",
1020    "url": "http://github.com/isaacs/npm/issues"
1021  },
1022  "directories": {
1023    "doc": "./doc",
1024    "man": "./man",
1025    "lib": "./lib",
1026    "bin": "./bin"
1027  },
1028  "main": "./lib/npm.js",
1029  "bin": "./bin/npm-cli.js",
1030  "dependencies": {
1031    "semver": "~1.0.14",
1032    "ini": "1",
1033    "slide": "1",
1034    "abbrev": "1",
1035    "graceful-fs": "~1.1.1",
1036    "minimatch": "~0.2",
1037    "nopt": "1",
1038    "node-uuid": "~1.3",
1039    "proto-list": "1",
1040    "rimraf": "2",
1041    "request": "~2.9",
1042    "which": "1",
1043    "tar": "~0.1.12",
1044    "fstream": "~0.1.17",
1045    "block-stream": "*",
1046    "inherits": "1",
1047    "mkdirp": "0.3",
1048    "read": "0",
1049    "lru-cache": "1",
1050    "node-gyp": "~0.4.1",
1051    "fstream-npm": "0 >=0.0.5",
1052    "uid-number": "0",
1053    "archy": "0",
1054    "chownr": "0"
1055  },
1056  "bundleDependencies": [
1057    "slide",
1058    "ini",
1059    "semver",
1060    "abbrev",
1061    "graceful-fs",
1062    "minimatch",
1063    "nopt",
1064    "node-uuid",
1065    "rimraf",
1066    "request",
1067    "proto-list",
1068    "which",
1069    "tar",
1070    "fstream",
1071    "block-stream",
1072    "inherits",
1073    "mkdirp",
1074    "read",
1075    "lru-cache",
1076    "node-gyp",
1077    "fstream-npm",
1078    "uid-number",
1079    "archy",
1080    "chownr"
1081  ],
1082  "devDependencies": {
1083    "ronn": "https://github.com/isaacs/ronnjs/tarball/master"
1084  },
1085  "engines": {
1086    "node": "0.6 || 0.7 || 0.8",
1087    "npm": "1"
1088  },
1089  "scripts": {
1090    "test": "node ./test/run.js",
1091    "prepublish": "npm prune; rm -rf node_modules/*/{test,example,bench}*; make -j4 doc",
1092    "dumpconf": "env | grep npm | sort | uniq"
1093  },
1094  "licenses": [
1095    {
1096      "type": "MIT +no-false-attribs",
1097      "url": "http://github.com/isaacs/npm/raw/master/LICENSE"
1098    }
1099  ]
1100}
1101"#;
1102        let _res = from_str(sample).unwrap();
1103    }
1104
1105
1106
1107    #[test]
1108    fn test_npm_package2() {
1109        let sample = r#"{
1110  name: 'npm',
1111  publishConfig: {
1112    'proprietary-attribs': false,
1113  },
1114  description: 'A package manager for node',
1115  keywords: [
1116    'package manager',
1117    'modules',
1118    'install',
1119    'package.json',
1120  ],
1121  version: '1.1.22',
1122  preferGlobal: true,
1123  config: {
1124    publishtest: false,
1125  },
1126  homepage: 'http://npmjs.org/',
1127  author: 'Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)',
1128  repository: {
1129    type: 'git',
1130    url: 'https://github.com/isaacs/npm',
1131  },
1132  bugs: {
1133    email: 'npm-@googlegroups.com',
1134    url: 'http://github.com/isaacs/npm/issues',
1135  },
1136  directories: {
1137    doc: './doc',
1138    man: './man',
1139    lib: './lib',
1140    bin: './bin',
1141  },
1142  main: './lib/npm.js',
1143  bin: './bin/npm-cli.js',
1144  dependencies: {
1145    semver: '~1.0.14',
1146    ini: '1',
1147    slide: '1',
1148    abbrev: '1',
1149    'graceful-fs': '~1.1.1',
1150    minimatch: '~0.2',
1151    nopt: '1',
1152    'node-uuid': '~1.3',
1153    'proto-list': '1',
1154    rimraf: '2',
1155    request: '~2.9',
1156    which: '1',
1157    tar: '~0.1.12',
1158    fstream: '~0.1.17',
1159    'block-stream': '*',
1160    inherits: '1',
1161    mkdirp: '0.3',
1162    read: '0',
1163    'lru-cache': '1',
1164    'node-gyp': '~0.4.1',
1165    'fstream-npm': '0 >=0.0.5',
1166    'uid-number': '0',
1167    archy: '0',
1168    chownr: '0',
1169  },
1170  bundleDependencies: [
1171    'slide',
1172    'ini',
1173    'semver',
1174    'abbrev',
1175    'graceful-fs',
1176    'minimatch',
1177    'nopt',
1178    'node-uuid',
1179    'rimraf',
1180    'request',
1181    'proto-list',
1182    'which',
1183    'tar',
1184    'fstream',
1185    'block-stream',
1186    'inherits',
1187    'mkdirp',
1188    'read',
1189    'lru-cache',
1190    'node-gyp',
1191    'fstream-npm',
1192    'uid-number',
1193    'archy',
1194    'chownr',
1195  ],
1196  devDependencies: {
1197    ronn: 'https://github.com/isaacs/ronnjs/tarball/master',
1198  },
1199  engines: {
1200    node: '0.6 || 0.7 || 0.8',
1201    npm: '1',
1202  },
1203  scripts: {
1204    test: 'node ./test/run.js',
1205    prepublish: 'npm prune; rm -rf node_modules/*/{test,example,bench}*; make -j4 doc',
1206    dumpconf: 'env | grep npm | sort | uniq',
1207  },
1208  licenses: [
1209    {
1210      type: 'MIT +no-false-attribs',
1211      url: 'http://github.com/isaacs/npm/raw/master/LICENSE',
1212    },
1213  ],
1214}
1215"#;
1216        let _res = from_str(sample).unwrap();
1217    }
1218
1219
1220
1221    #[test]
1222    fn test_readme_example() {
1223        let sample = r#"{
1224    foo: 'bar',
1225    while: true,
1226
1227    this: 'is a \
1228multi-line string',
1229
1230    // this is an inline comment
1231    here: 'is another', // inline comment
1232
1233    /* this is a block comment
1234       that continues on another line */
1235
1236    hex: 0xDEADbeef,
1237    half: .5,
1238    delta: +10,
1239    to: Infinity,   // and beyond!
1240
1241    finally: 'a trailing comma',
1242    oh: [
1243        "we shouldn't forget",
1244        'arrays can have',
1245        'trailing commas too',
1246    ],
1247}
1248"#;
1249        let _res = from_str(sample).unwrap();
1250    }
1251
1252
1253
1254    #[test]
1255    fn test_valid_whitespace() {
1256        let sample = r#"{
1257    // An invalid form feed character (\x0c) has been entered before this comment.
1258    // Be careful not to delete it.
1259  "a": true
1260}
1261"#;
1262        let _res = from_str(sample).unwrap();
1263    }
1264
1265
1266
1267    #[test]
1268    fn test_comment_cr() {
1269        let sample = r#"{
1270    // This comment is terminated with `\r`.
1271}
1272"#;
1273        let _res = from_str(sample).unwrap();
1274    }
1275
1276
1277
1278    #[test]
1279    fn test_comment_crlf() {
1280        let sample = r#"{
1281    // This comment is terminated with `\r\n`.
1282}
1283"#;
1284        let _res = from_str(sample).unwrap();
1285    }
1286
1287
1288
1289    #[test]
1290    fn test_comment_lf() {
1291        let sample = r#"{
1292    // This comment is terminated with `\n`.
1293}
1294"#;
1295        let _res = from_str(sample).unwrap();
1296    }
1297
1298
1299
1300    #[test]
1301    fn test_escaped_cr() {
1302        let sample = r#"{
1303    // the following string contains an escaped `\r`
1304    a: 'line 1 \
1305line 2'
1306}
1307"#;
1308        let _res = from_str(sample).unwrap();
1309    }
1310
1311
1312
1313    #[test]
1314    fn test_escaped_crlf() {
1315        let sample = r#"{
1316    // the following string contains an escaped `\r\n`
1317    a: 'line 1 \
1318line 2'
1319}
1320"#;
1321        let _res = from_str(sample).unwrap();
1322    }
1323
1324
1325
1326    #[test]
1327    fn test_escaped_lf() {
1328        let sample = r#"{
1329    // the following string contains an escaped `\n`
1330    a: 'line 1 \
1331line 2'
1332}
1333"#;
1334        let _res = from_str(sample).unwrap();
1335    }
1336
1337
1338
1339    #[test]
1340    fn test_float_leading_decimal_point() {
1341        let sample = r#".5
1342"#;
1343        let _res = from_str(sample).unwrap();
1344    }
1345
1346
1347
1348    #[test]
1349    fn test_float_leading_zero() {
1350        let sample = r#"0.5
1351"#;
1352        let _res = from_str(sample).unwrap();
1353    }
1354
1355
1356
1357    #[test]
1358    fn test_float_trailing_decimal_point_with_integer_exponent() {
1359        let sample = r#"5.e4
1360"#;
1361        let _res = from_str(sample).unwrap();
1362    }
1363
1364
1365
1366    #[test]
1367    fn test_float_trailing_decimal_point() {
1368        let sample = r#"5.
1369"#;
1370        let _res = from_str(sample).unwrap();
1371    }
1372
1373
1374
1375    #[test]
1376    fn test_float_with_integer_exponent() {
1377        let sample = r#"1.2e3
1378"#;
1379        let _res = from_str(sample).unwrap();
1380    }
1381
1382
1383
1384    #[test]
1385    fn test_float() {
1386        let sample = r#"1.2
1387"#;
1388        let _res = from_str(sample).unwrap();
1389    }
1390
1391
1392
1393    #[test]
1394    fn test_hexadecimal_empty() {
1395        let sample = r#"0x
1396"#;
1397        let maybe_tokens = Tokenizer::new(sample).tokenize();
1398        if maybe_tokens.is_err() {
1399            return
1400        } else {
1401            let toks = maybe_tokens.unwrap();
1402            let res = from_tokens(&toks);
1403            assert!(res.is_err());
1404        }
1405    }
1406
1407
1408    #[test]
1409    fn test_hexadecimal_lowercase_letter() {
1410        let sample = r#"0xc8
1411"#;
1412        let _res = from_str(sample).unwrap();
1413    }
1414
1415
1416
1417    #[test]
1418    fn test_hexadecimal_uppercase_x() {
1419        let sample = r#"0XC8
1420"#;
1421        let _res = from_str(sample).unwrap();
1422    }
1423
1424
1425
1426    #[test]
1427    fn test_hexadecimal_with_integer_exponent() {
1428        let sample = r#"0xc8e4
1429"#;
1430        let _res = from_str(sample).unwrap();
1431    }
1432
1433
1434
1435    #[test]
1436    fn test_hexadecimal() {
1437        let sample = r#"0xC8
1438"#;
1439        let _res = from_str(sample).unwrap();
1440    }
1441
1442
1443
1444    #[test]
1445    fn test_infinity() {
1446        let sample = r#"Infinity
1447"#;
1448        let _res = from_str(sample).unwrap();
1449    }
1450
1451
1452
1453    #[test]
1454    fn test_integer_with_float_exponent() {
1455        let sample = r#"1e2.3
1456"#;
1457        let maybe_tokens = Tokenizer::new(sample).tokenize();
1458        if maybe_tokens.is_err() {
1459            return
1460        } else {
1461            let toks = maybe_tokens.unwrap();
1462            let res = from_tokens(&toks);
1463            assert!(res.is_err(), "{:?}", res.unwrap());
1464        }
1465    }
1466
1467
1468    #[test]
1469    fn test_integer_with_hexadecimal_exponent() {
1470        let sample = r#"1e0x4
1471"#;
1472        let maybe_tokens = Tokenizer::new(sample).tokenize();
1473        if maybe_tokens.is_err() {
1474            return
1475        } else {
1476            let toks = maybe_tokens.unwrap();
1477            let res = from_tokens(&toks);
1478            assert!(res.is_err());
1479        }
1480    }
1481
1482
1483    #[test]
1484    fn test_integer_with_integer_exponent() {
1485        let sample = r#"2e23
1486"#;
1487        let _res = from_str(sample).unwrap();
1488    }
1489
1490
1491
1492    #[test]
1493    fn test_integer_with_negative_float_exponent() {
1494        let sample = r#"1e-2.3
1495"#;
1496        let maybe_tokens = Tokenizer::new(sample).tokenize();
1497        if maybe_tokens.is_err() {
1498            return
1499        } else {
1500            let toks = maybe_tokens.unwrap();
1501            let res = from_tokens(&toks);
1502            assert!(res.is_err());
1503        }
1504    }
1505
1506
1507    #[test]
1508    fn test_integer_with_negative_hexadecimal_exponent() {
1509        let sample = r#"1e-0x4
1510"#;
1511        let maybe_tokens = Tokenizer::new(sample).tokenize();
1512        if maybe_tokens.is_err() {
1513            return
1514        } else {
1515            let toks = maybe_tokens.unwrap();
1516            let res = from_tokens(&toks);
1517            assert!(res.is_err(), "{:?}", res.unwrap());
1518        }
1519    }
1520
1521
1522    #[test]
1523    fn test_integer_with_negative_integer_exponent() {
1524        let sample = r#"2e-23
1525"#;
1526        let _res = from_str(sample).unwrap();
1527    }
1528
1529
1530
1531    #[test]
1532    fn test_integer_with_negative_zero_integer_exponent() {
1533        let sample = r#"5e-0
1534"#;
1535        let _res = from_str(sample).unwrap();
1536    }
1537
1538
1539
1540    #[test]
1541    fn test_integer_with_positive_float_exponent() {
1542        let sample = r#"1e+2.3
1543"#;
1544        let maybe_tokens = Tokenizer::new(sample).tokenize();
1545        if maybe_tokens.is_err() {
1546            return
1547        } else {
1548            let toks = maybe_tokens.unwrap();
1549            let res = from_tokens(&toks);
1550            assert!(res.is_err());
1551        }
1552    }
1553
1554
1555    #[test]
1556    fn test_integer_with_positive_hexadecimal_exponent() {
1557        let sample = r#"1e+0x4
1558"#;
1559        let maybe_tokens = Tokenizer::new(sample).tokenize();
1560        if maybe_tokens.is_err() {
1561            return
1562        } else {
1563            let toks = maybe_tokens.unwrap();
1564            let res = from_tokens(&toks);
1565            assert!(res.is_err());
1566        }
1567    }
1568
1569
1570    #[test]
1571    fn test_integer_with_positive_integer_exponent() {
1572        let sample = r#"1e+2
1573"#;
1574        let _res = from_str(sample).unwrap();
1575    }
1576
1577
1578
1579    #[test]
1580    fn test_integer_with_positive_zero_integer_exponent() {
1581        let sample = r#"5e+0
1582"#;
1583        let _res = from_str(sample).unwrap();
1584    }
1585
1586
1587
1588    #[test]
1589    fn test_integer_with_zero_integer_exponent() {
1590        let sample = r#"5e0
1591"#;
1592        let _res = from_str(sample).unwrap();
1593    }
1594
1595
1596
1597    #[test]
1598    fn test_integer() {
1599        let sample = r#"15
1600"#;
1601        let _res = from_str(sample).unwrap();
1602    }
1603
1604
1605
1606    #[test]
1607    fn test_lone_decimal_point() {
1608        let sample = r#".
1609"#;
1610        let maybe_tokens = Tokenizer::new(sample).tokenize();
1611        if maybe_tokens.is_err() {
1612            return
1613        } else {
1614            let toks = maybe_tokens.unwrap();
1615            let res = from_tokens(&toks);
1616            assert!(res.is_err(), "{:?}", res.unwrap());
1617        }
1618    }
1619
1620
1621    #[test]
1622    fn test_nan() {
1623        let sample = r#"NaN
1624"#;
1625        let _res = from_str(sample).unwrap();
1626    }
1627
1628
1629
1630    #[test]
1631    fn test_negative_float_leading_decimal_point() {
1632        let sample = r#"-.5
1633"#;
1634        let _res = from_str(sample).unwrap();
1635    }
1636
1637
1638
1639    #[test]
1640    fn test_negative_float_leading_zero() {
1641        let sample = r#"-0.5
1642"#;
1643        let _res = from_str(sample).unwrap();
1644    }
1645
1646
1647
1648    #[test]
1649    fn test_negative_float_trailing_decimal_point() {
1650        let sample = r#"-5.
1651"#;
1652        let _res = from_str(sample).unwrap();
1653    }
1654
1655
1656
1657    #[test]
1658    fn test_negative_float() {
1659        let sample = r#"-1.2
1660"#;
1661        let _res = from_str(sample).unwrap();
1662    }
1663
1664
1665
1666    #[test]
1667    fn test_negative_hexadecimal() {
1668        let sample = r#"-0xC8
1669"#;
1670        let _res = from_str(sample).unwrap();
1671    }
1672
1673
1674
1675    #[test]
1676    fn test_negative_infinity() {
1677        let sample = r#"-Infinity
1678"#;
1679        let _res = from_str(sample).unwrap();
1680    }
1681
1682
1683
1684    #[test]
1685    fn test_negative_integer() {
1686        let sample = r#"-15
1687"#;
1688        let _res = from_str(sample).unwrap();
1689    }
1690
1691
1692
1693    #[test]
1694    fn test_negative_noctal() {
1695        let sample = r#"-098
1696"#;
1697        let maybe_tokens = Tokenizer::new(sample).tokenize();
1698        if maybe_tokens.is_err() {
1699            return
1700        } else {
1701            let toks = maybe_tokens.unwrap();
1702            let res = from_tokens(&toks);
1703            assert!(res.is_err());
1704        }
1705    }
1706
1707
1708    #[test]
1709    fn test_negative_octal() {
1710        let sample = r#"-0123
1711"#;
1712        let maybe_tokens = Tokenizer::new(sample).tokenize();
1713        if maybe_tokens.is_err() {
1714            return
1715        } else {
1716            let toks = maybe_tokens.unwrap();
1717            let res = from_tokens(&toks);
1718            assert!(res.is_err());
1719        }
1720    }
1721
1722
1723    #[test]
1724    fn test_negative_zero_float_leading_decimal_point() {
1725        let sample = r#"-.0
1726"#;
1727        let _res = from_str(sample).unwrap();
1728    }
1729
1730
1731
1732    #[test]
1733    fn test_negative_zero_float_trailing_decimal_point() {
1734        let sample = r#"-0.
1735"#;
1736        let _res = from_str(sample).unwrap();
1737    }
1738
1739
1740
1741    #[test]
1742    fn test_negative_zero_float() {
1743        let sample = r#"-0.0
1744"#;
1745        let _res = from_str(sample).unwrap();
1746    }
1747
1748
1749
1750    #[test]
1751    fn test_negative_zero_hexadecimal() {
1752        let sample = r#"-0x0
1753"#;
1754        let _res = from_str(sample).unwrap();
1755    }
1756
1757
1758
1759    #[test]
1760    fn test_negative_zero_integer() {
1761        let sample = r#"-0
1762"#;
1763        let _res = from_str(sample).unwrap();
1764    }
1765
1766
1767
1768    #[test]
1769    fn test_negative_zero_octal() {
1770        let sample = r#"-00
1771"#;
1772        let maybe_tokens = Tokenizer::new(sample).tokenize();
1773        if maybe_tokens.is_err() {
1774            return
1775        } else {
1776            let toks = maybe_tokens.unwrap();
1777            let res = from_tokens(&toks);
1778            assert!(res.is_err());
1779        }
1780    }
1781
1782
1783    #[test]
1784    fn test_noctal_with_leading_octal_digit() {
1785        let sample = r#"0780
1786"#;
1787        let maybe_tokens = Tokenizer::new(sample).tokenize();
1788        if maybe_tokens.is_err() {
1789            return
1790        } else {
1791            let toks = maybe_tokens.unwrap();
1792            let res = from_tokens(&toks);
1793            assert!(res.is_err());
1794        }
1795    }
1796
1797
1798    #[test]
1799    fn test_noctal() {
1800        let sample = r#"080
1801"#;
1802        let maybe_tokens = Tokenizer::new(sample).tokenize();
1803        if maybe_tokens.is_err() {
1804            return
1805        } else {
1806            let toks = maybe_tokens.unwrap();
1807            let res = from_tokens(&toks);
1808            assert!(res.is_err());
1809        }
1810    }
1811
1812
1813    #[test]
1814    fn test_octal() {
1815        let sample = r#"010
1816"#;
1817        let maybe_tokens = Tokenizer::new(sample).tokenize();
1818        if maybe_tokens.is_err() {
1819            return
1820        } else {
1821            let toks = maybe_tokens.unwrap();
1822            let res = from_tokens(&toks);
1823            assert!(res.is_err());
1824        }
1825    }
1826
1827
1828    #[test]
1829    fn test_positive_float_leading_decimal_point() {
1830        let sample = r#"+.5
1831"#;
1832        let _res = from_str(sample).unwrap();
1833    }
1834
1835
1836
1837    #[test]
1838    fn test_positive_float_leading_zero() {
1839        let sample = r#"+0.5
1840"#;
1841        let _res = from_str(sample).unwrap();
1842    }
1843
1844
1845
1846    #[test]
1847    fn test_positive_float_trailing_decimal_point() {
1848        let sample = r#"+5.
1849"#;
1850        let _res = from_str(sample).unwrap();
1851    }
1852
1853
1854
1855    #[test]
1856    fn test_positive_float() {
1857        let sample = r#"+1.2
1858"#;
1859        let _res = from_str(sample).unwrap();
1860    }
1861
1862
1863
1864    #[test]
1865    fn test_positive_hexadecimal() {
1866        let sample = r#"+0xC8
1867"#;
1868        let _res = from_str(sample).unwrap();
1869    }
1870
1871
1872
1873    #[test]
1874    fn test_positive_infinity() {
1875        let sample = r#"+Infinity
1876"#;
1877        let _res = from_str(sample).unwrap();
1878    }
1879
1880
1881
1882    #[test]
1883    fn test_positive_integer() {
1884        let sample = r#"+15
1885"#;
1886        let _res = from_str(sample).unwrap();
1887    }
1888
1889
1890
1891    #[test]
1892    fn test_positive_noctal() {
1893        let sample = r#"+098
1894"#;
1895        let maybe_tokens = Tokenizer::new(sample).tokenize();
1896        if maybe_tokens.is_err() {
1897            return
1898        } else {
1899            let toks = maybe_tokens.unwrap();
1900            let res = from_tokens(&toks);
1901            assert!(res.is_err());
1902        }
1903    }
1904
1905
1906    #[test]
1907    fn test_positive_octal() {
1908        let sample = r#"+0123
1909"#;
1910        let maybe_tokens = Tokenizer::new(sample).tokenize();
1911        if maybe_tokens.is_err() {
1912            return
1913        } else {
1914            let toks = maybe_tokens.unwrap();
1915            let res = from_tokens(&toks);
1916            assert!(res.is_err());
1917        }
1918    }
1919
1920
1921    #[test]
1922    fn test_positive_zero_float_leading_decimal_point() {
1923        let sample = r#"+.0
1924"#;
1925        let _res = from_str(sample).unwrap();
1926    }
1927
1928
1929
1930    #[test]
1931    fn test_positive_zero_float_trailing_decimal_point() {
1932        let sample = r#"+0.
1933"#;
1934        let _res = from_str(sample).unwrap();
1935    }
1936
1937
1938
1939    #[test]
1940    fn test_positive_zero_float() {
1941        let sample = r#"+0.0
1942"#;
1943        let _res = from_str(sample).unwrap();
1944    }
1945
1946
1947
1948    #[test]
1949    fn test_positive_zero_hexadecimal() {
1950        let sample = r#"+0x0
1951"#;
1952        let _res = from_str(sample).unwrap();
1953    }
1954
1955
1956
1957    #[test]
1958    fn test_positive_zero_integer() {
1959        let sample = r#"+0
1960"#;
1961        let _res = from_str(sample).unwrap();
1962    }
1963
1964
1965
1966    #[test]
1967    fn test_positive_zero_octal() {
1968        let sample = r#"+00
1969"#;
1970        let maybe_tokens = Tokenizer::new(sample).tokenize();
1971        if maybe_tokens.is_err() {
1972            return
1973        } else {
1974            let toks = maybe_tokens.unwrap();
1975            let res = from_tokens(&toks);
1976            assert!(res.is_err());
1977        }
1978    }
1979
1980
1981    #[test]
1982    fn test_zero_float_leading_decimal_point() {
1983        let sample = r#".0
1984"#;
1985        let _res = from_str(sample).unwrap();
1986    }
1987
1988
1989
1990    #[test]
1991    fn test_zero_float_trailing_decimal_point() {
1992        let sample = r#"0.
1993"#;
1994        let _res = from_str(sample).unwrap();
1995    }
1996
1997
1998
1999    #[test]
2000    fn test_zero_float() {
2001        let sample = r#"0.0
2002"#;
2003        let _res = from_str(sample).unwrap();
2004    }
2005
2006
2007
2008    #[test]
2009    fn test_zero_hexadecimal() {
2010        let sample = r#"0x0
2011"#;
2012        let _res = from_str(sample).unwrap();
2013    }
2014
2015
2016
2017    #[test]
2018    fn test_zero_integer_with_integer_exponent() {
2019        let sample = r#"0e23
2020"#;
2021        let _res = from_str(sample).unwrap();
2022    }
2023
2024
2025
2026    #[test]
2027    fn test_zero_integer() {
2028        let sample = r#"0
2029"#;
2030        let _res = from_str(sample).unwrap();
2031    }
2032
2033
2034
2035    #[test]
2036    fn test_zero_octal() {
2037        let sample = r#"00
2038"#;
2039        let maybe_tokens = Tokenizer::new(sample).tokenize();
2040        if maybe_tokens.is_err() {
2041            return
2042        } else {
2043            let toks = maybe_tokens.unwrap();
2044            let res = from_tokens(&toks);
2045            assert!(res.is_err());
2046        }
2047    }
2048
2049
2050    #[test]
2051    fn test_duplicate_keys() {
2052        let sample = r#"{
2053    "a": true,
2054    "a": false
2055}
2056"#;
2057        let _res = from_str(sample).unwrap();
2058    }
2059
2060
2061
2062    #[test]
2063    fn test_empty_object() {
2064        let sample = r#"{}"#;
2065        let _res = from_str(sample).unwrap();
2066    }
2067
2068
2069
2070    #[test]
2071    fn test_illegal_unquoted_key_number() {
2072        let sample = r#"{
2073    10twenty: "ten twenty"
2074}"#;
2075        let maybe_tokens = Tokenizer::new(sample).tokenize();
2076        if maybe_tokens.is_err() {
2077            return
2078        } else {
2079            let toks = maybe_tokens.unwrap();
2080            let res = from_tokens(&toks);
2081            assert!(res.is_err());
2082        }
2083    }
2084
2085
2086    #[test]
2087    fn test_illegal_unquoted_key_symbol() {
2088        let sample = r#"{
2089    multi-word: "multi-word"
2090}"#;
2091        let maybe_tokens = Tokenizer::new(sample).tokenize();
2092        if maybe_tokens.is_err() {
2093            return
2094        } else {
2095            let toks = maybe_tokens.unwrap();
2096            let res = from_tokens(&toks);
2097            assert!(res.is_err());
2098        }
2099    }
2100
2101
2102    #[test]
2103    fn test_leading_comma_object() {
2104        let sample = r#"{
2105    ,"foo": "bar"
2106}"#;
2107        let maybe_tokens = Tokenizer::new(sample).tokenize();
2108        if maybe_tokens.is_err() {
2109            return
2110        } else {
2111            let toks = maybe_tokens.unwrap();
2112            let res = from_tokens(&toks);
2113            assert!(res.is_err());
2114        }
2115    }
2116
2117
2118    #[test]
2119    fn test_lone_trailing_comma_object() {
2120        let sample = r#"{
2121    ,
2122}"#;
2123        let maybe_tokens = Tokenizer::new(sample).tokenize();
2124        if maybe_tokens.is_err() {
2125            return
2126        } else {
2127            let toks = maybe_tokens.unwrap();
2128            let res = from_tokens(&toks);
2129            assert!(res.is_err());
2130        }
2131    }
2132
2133
2134    #[test]
2135    fn test_no_comma_object() {
2136        let sample = r#"{
2137    "foo": "bar"
2138    "hello": "world"
2139}"#;
2140        let maybe_tokens = Tokenizer::new(sample).tokenize();
2141        if maybe_tokens.is_err() {
2142            return
2143        } else {
2144            let toks = maybe_tokens.unwrap();
2145            let res = from_tokens(&toks);
2146            assert!(res.is_err());
2147        }
2148    }
2149
2150
2151    #[test]
2152    fn test_reserved_unquoted_key() {
2153        let sample = r#"{
2154    while: true
2155}"#;
2156        let _res = from_str(sample).unwrap();
2157    }
2158
2159
2160
2161    #[test]
2162    fn test_single_quoted_key() {
2163        let sample = r#"{
2164    'hello': "world"
2165}"#;
2166        let _res = from_str(sample).unwrap();
2167    }
2168
2169
2170
2171    #[test]
2172    fn test_trailing_comma_object() {
2173        let sample = r#"{
2174    "foo": "bar",
2175}"#;
2176        let _res = from_str(sample).unwrap();
2177    }
2178
2179
2180
2181    #[test]
2182    fn test_unquoted_keys() {
2183        let sample = r#"{
2184    hello: "world",
2185    _: "underscore",
2186    $: "dollar sign",
2187    one1: "numerals",
2188    _$_: "multiple symbols",
2189    $_$hello123world_$_: "mixed"
2190}"#;
2191        let _res = from_str(sample).unwrap();
2192    }
2193
2194
2195
2196    #[test]
2197    fn test_escaped_single_quoted_string() {
2198        let sample = r#"'I can\'t wait'"#;
2199        let _res = from_str(sample).unwrap();
2200    }
2201
2202
2203
2204    #[test]
2205    fn test_multi_line_string() {
2206        let sample = r#"'hello\
2207 world'"#;
2208        let _res = from_str(sample).unwrap();
2209    }
2210
2211
2212
2213    #[test]
2214    fn test_single_quoted_string() {
2215        let sample = r#"'hello world'"#;
2216        let _res = from_str(sample).unwrap();
2217    }
2218
2219
2220
2221    #[test]
2222    fn test_unescaped_multi_line_string() {
2223        let sample = r#""foo
2224bar"
2225"#;
2226        let maybe_tokens = Tokenizer::new(sample).tokenize();
2227        if maybe_tokens.is_err() {
2228            return
2229        } else {
2230            let toks = maybe_tokens.unwrap();
2231            let res = from_tokens(&toks);
2232            assert!(res.is_err());
2233        }
2234    }
2235    // Start error tests
2236
2237
2238
2239    #[test]
2240    fn test_error_no_comma_array_lineno() {
2241        let sample = r#"[
2242    true
2243    false
2244]"#;
2245        let maybe_tokens = Tokenizer::new(sample).tokenize();
2246        if maybe_tokens.is_err() {
2247            let err = maybe_tokens.unwrap_err();
2248            assert_eq!(err.lineno, 3_usize, "{:?}", err);
2249        } else {
2250            let toks = maybe_tokens.unwrap();
2251            let res = from_tokens(&toks);
2252            let err = res.unwrap_err();
2253            assert_eq!(err.lineno, 3_usize, "{:?}", err);
2254        }
2255    }
2256
2257
2258    #[test]
2259    fn test_error_no_comma_array_index() {
2260        let sample = r#"[
2261    true
2262    false
2263]"#;
2264        let maybe_tokens = Tokenizer::new(sample).tokenize();
2265        if maybe_tokens.is_err() {
2266            let err = maybe_tokens.unwrap_err();
2267            assert_eq!(err.char_index, 15_usize, "{:?}", err)
2268        } else {
2269            let toks = maybe_tokens.unwrap();
2270            let res = from_tokens(&toks);
2271            let err = res.unwrap_err();
2272            assert_eq!(err.char_index, 15_usize, "{:?}", err);
2273        }
2274    }
2275
2276    #[test]
2277    fn test_error_no_comma_array_colno() {
2278        let sample = r#"[
2279    true
2280    false
2281]"#;
2282        let maybe_tokens = Tokenizer::new(sample).tokenize();
2283        if maybe_tokens.is_err() {
2284            let err = maybe_tokens.unwrap_err();
2285            assert_eq!(err.colno, 5_usize, "{:?}", err);
2286        } else {
2287            let toks = maybe_tokens.unwrap();
2288            let res = from_tokens(&toks);
2289            let err = res.unwrap_err();
2290            assert_eq!(err.colno, 5_usize, "{:?}", err);
2291        }
2292    }
2293
2294
2295    #[test]
2296    fn test_error_top_level_block_comment_lineno() {
2297        let sample = r#"/*
2298    This should fail;
2299    comments cannot be the only top-level value.
2300*/"#;
2301        let maybe_tokens = Tokenizer::new(sample).tokenize();
2302        if maybe_tokens.is_err() {
2303            let err = maybe_tokens.unwrap_err();
2304            assert_eq!(err.lineno, 4_usize, "{:?}", err);
2305        } else {
2306            let toks = maybe_tokens.unwrap();
2307            let res = from_tokens(&toks);
2308            let err = res.unwrap_err();
2309            assert_eq!(err.lineno, 4_usize, "{:?}", err);
2310        }
2311    }
2312
2313
2314    #[test]
2315    fn test_error_top_level_block_comment_index() {
2316        let sample = r#"/*
2317    This should fail;
2318    comments cannot be the only top-level value.
2319*/"#;
2320        let maybe_tokens = Tokenizer::new(sample).tokenize();
2321        if maybe_tokens.is_err() {
2322            let err = maybe_tokens.unwrap_err();
2323            assert_eq!(err.char_index, 76_usize, "{:?}", err)
2324        } else {
2325            let toks = maybe_tokens.unwrap();
2326            let res = from_tokens(&toks);
2327            let err = res.unwrap_err();
2328            assert_eq!(err.char_index, 76_usize, "{:?}", err);
2329        }
2330    }
2331
2332    #[test]
2333    fn test_error_top_level_block_comment_colno() {
2334        let sample = r#"/*
2335    This should fail;
2336    comments cannot be the only top-level value.
2337*/"#;
2338        let maybe_tokens = Tokenizer::new(sample).tokenize();
2339        if maybe_tokens.is_err() {
2340            let err = maybe_tokens.unwrap_err();
2341            assert_eq!(err.colno, 3_usize, "{:?}", err);
2342        } else {
2343            let toks = maybe_tokens.unwrap();
2344            let res = from_tokens(&toks);
2345            let err = res.unwrap_err();
2346            assert_eq!(err.colno, 3_usize, "{:?}", err);
2347        }
2348    }
2349
2350
2351
2352    #[test]
2353    fn test_error_top_level_inline_comment_lineno() {
2354        let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2355        let maybe_tokens = Tokenizer::new(sample).tokenize();
2356        if maybe_tokens.is_err() {
2357            let err = maybe_tokens.unwrap_err();
2358            assert_eq!(err.lineno, 1_usize, "{:?}", err);
2359        } else {
2360            let toks = maybe_tokens.unwrap();
2361            let res = from_tokens(&toks);
2362            let err = res.unwrap_err();
2363            assert_eq!(err.lineno, 1_usize, "{:?}", err);
2364        }
2365    }
2366
2367
2368    #[test]
2369    fn test_error_top_level_inline_comment_index() {
2370        let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2371        let maybe_tokens = Tokenizer::new(sample).tokenize();
2372        if maybe_tokens.is_err() {
2373            let err = maybe_tokens.unwrap_err();
2374            assert_eq!(err.char_index, 65_usize, "{:?}", err)
2375        } else {
2376            let toks = maybe_tokens.unwrap();
2377            let res = from_tokens(&toks);
2378            let err = res.unwrap_err();
2379            assert_eq!(err.char_index, 65_usize, "{:?}", err);
2380        }
2381    }
2382
2383    #[test]
2384    fn test_error_top_level_inline_comment_colno() {
2385        let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2386        let maybe_tokens = Tokenizer::new(sample).tokenize();
2387        if maybe_tokens.is_err() {
2388            let err = maybe_tokens.unwrap_err();
2389            assert_eq!(err.colno, 66_usize, "{:?}", err);
2390        } else {
2391            let toks = maybe_tokens.unwrap();
2392            let res = from_tokens(&toks);
2393            let err = res.unwrap_err();
2394            assert_eq!(err.colno, 66_usize, "{:?}", err);
2395        }
2396    }
2397
2398    #[test]
2399    fn test_error_illegal_unquoted_key_number_lineno() {
2400        let sample = r#"{
2401    10twenty: "ten twenty"
2402}"#;
2403        let maybe_tokens = Tokenizer::new(sample).tokenize();
2404        if maybe_tokens.is_err() {
2405            let err = maybe_tokens.unwrap_err();
2406            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2407        } else {
2408            let toks = maybe_tokens.unwrap();
2409            let res = from_tokens(&toks);
2410            let err = res.unwrap_err();
2411            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2412        }
2413    }
2414
2415
2416    #[test]
2417    fn test_error_illegal_unquoted_key_number_index() {
2418        let sample = r#"{
2419    10twenty: "ten twenty"
2420}"#;
2421        let maybe_tokens = Tokenizer::new(sample).tokenize();
2422        if maybe_tokens.is_err() {
2423            let err = maybe_tokens.unwrap_err();
2424            assert_eq!(err.char_index, 6_usize, "{:?}", err)
2425        } else {
2426            let toks = maybe_tokens.unwrap();
2427            let res = from_tokens(&toks);
2428            let err = res.unwrap_err();
2429            assert_eq!(err.char_index, 6_usize, "{:?}", err);
2430        }
2431    }
2432
2433    #[test]
2434    fn test_error_illegal_unquoted_key_number_colno() {
2435        let sample = r#"{
2436    10twenty: "ten twenty"
2437}"#;
2438        let maybe_tokens = Tokenizer::new(sample).tokenize();
2439        if maybe_tokens.is_err() {
2440            let err = maybe_tokens.unwrap_err();
2441            assert_eq!(err.colno, 5_usize, "{:?}", err);
2442        } else {
2443            let toks = maybe_tokens.unwrap();
2444            let res = from_tokens(&toks);
2445            let err = res.unwrap_err();
2446            assert_eq!(err.colno, 5_usize, "{:?}", err);
2447        }
2448    }
2449
2450
2451
2452    #[test]
2453    fn test_error_illegal_unquoted_key_symbol_lineno() {
2454        let sample = r#"{
2455    multi-word: "multi-word"
2456}"#;
2457        let maybe_tokens = Tokenizer::new(sample).tokenize();
2458        if maybe_tokens.is_err() {
2459            let err = maybe_tokens.unwrap_err();
2460            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2461        } else {
2462            let toks = maybe_tokens.unwrap();
2463            let res = from_tokens(&toks);
2464            let err = res.unwrap_err();
2465            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2466        }
2467    }
2468
2469
2470    #[test]
2471    fn test_error_illegal_unquoted_key_symbol_index() {
2472        let sample = r#"{
2473    multi-word: "multi-word"
2474}"#;
2475        let maybe_tokens = Tokenizer::new(sample).tokenize();
2476        if maybe_tokens.is_err() {
2477            let err = maybe_tokens.unwrap_err();
2478            assert_eq!(err.char_index, 11_usize, "{:?}", err)
2479        } else {
2480            let toks = maybe_tokens.unwrap();
2481            let res = from_tokens(&toks);
2482            let err = res.unwrap_err();
2483            assert_eq!(err.char_index, 11_usize, "{:?}", err);
2484        }
2485    }
2486
2487    #[test]
2488    fn test_error_illegal_unquoted_key_symbol_colno() {
2489        let sample = r#"{
2490    multi-word: "multi-word"
2491}"#;
2492        let maybe_tokens = Tokenizer::new(sample).tokenize();
2493        if maybe_tokens.is_err() {
2494            let err = maybe_tokens.unwrap_err();
2495            assert_eq!(err.colno, 10_usize, "{:?}", err);
2496        } else {
2497            let toks = maybe_tokens.unwrap();
2498            let res = from_tokens(&toks);
2499            let err = res.unwrap_err();
2500            assert_eq!(err.colno, 10_usize, "{:?}", err);
2501        }
2502    }
2503
2504
2505
2506    #[test]
2507    fn test_error_leading_comma_object_lineno() {
2508        let sample = r#"{
2509    ,"foo": "bar"
2510}"#;
2511        let maybe_tokens = Tokenizer::new(sample).tokenize();
2512        if maybe_tokens.is_err() {
2513            let err = maybe_tokens.unwrap_err();
2514            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2515        } else {
2516            let toks = maybe_tokens.unwrap();
2517            let res = from_tokens(&toks);
2518            let err = res.unwrap_err();
2519            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2520        }
2521    }
2522
2523
2524    #[test]
2525    fn test_error_leading_comma_object_index() {
2526        let sample = r#"{
2527    ,"foo": "bar"
2528}"#;
2529        let maybe_tokens = Tokenizer::new(sample).tokenize();
2530        if maybe_tokens.is_err() {
2531            let err = maybe_tokens.unwrap_err();
2532            assert_eq!(err.char_index, 6_usize, "{:?}", err)
2533        } else {
2534            let toks = maybe_tokens.unwrap();
2535            let res = from_tokens(&toks);
2536            let err = res.unwrap_err();
2537            assert_eq!(err.char_index, 6_usize, "{:?}", err);
2538        }
2539    }
2540
2541    #[test]
2542    fn test_error_leading_comma_object_colno() {
2543        let sample = r#"{
2544    ,"foo": "bar"
2545}"#;
2546        let maybe_tokens = Tokenizer::new(sample).tokenize();
2547        if maybe_tokens.is_err() {
2548            let err = maybe_tokens.unwrap_err();
2549            assert_eq!(err.colno, 5_usize, "{:?}", err);
2550        } else {
2551            let toks = maybe_tokens.unwrap();
2552            let res = from_tokens(&toks);
2553            let err = res.unwrap_err();
2554            assert_eq!(err.colno, 5_usize, "{:?}", err);
2555        }
2556    }
2557
2558    #[test]
2559    fn test_error_unescaped_multi_line_string_lineno() {
2560        let sample = r#""foo
2561bar"
2562"#;
2563        let maybe_tokens = Tokenizer::new(sample).tokenize();
2564        if maybe_tokens.is_err() {
2565            let err = maybe_tokens.unwrap_err();
2566            assert_eq!(err.lineno, 1_usize, "{:?}", err);
2567        } else {
2568            let toks = maybe_tokens.unwrap();
2569            let res = from_tokens(&toks);
2570            let err = res.unwrap_err();
2571            assert_eq!(err.lineno, 1_usize, "{:?}", err);
2572        }
2573    }
2574
2575
2576    #[test]
2577    fn test_error_unescaped_multi_line_string_index() {
2578        let sample = r#""foo
2579bar"
2580"#;
2581        let maybe_tokens = Tokenizer::new(sample).tokenize();
2582        if maybe_tokens.is_err() {
2583            let err = maybe_tokens.unwrap_err();
2584            assert_eq!(err.char_index, 4_usize, "{:?}", err)
2585        } else {
2586            let toks = maybe_tokens.unwrap();
2587            let res = from_tokens(&toks);
2588            let err = res.unwrap_err();
2589            assert_eq!(err.char_index, 4_usize, "{:?}", err);
2590        }
2591    }
2592
2593    #[test]
2594    fn test_error_unescaped_multi_line_string_colno() {
2595        let sample = r#""foo
2596bar"
2597"#;
2598        let maybe_tokens = Tokenizer::new(sample).tokenize();
2599        if maybe_tokens.is_err() {
2600            let err = maybe_tokens.unwrap_err();
2601            assert_eq!(err.colno, 5_usize, "{:?}", err);
2602        } else {
2603            let toks = maybe_tokens.unwrap();
2604            let res = from_tokens(&toks);
2605            let err = res.unwrap_err();
2606            assert_eq!(err.colno, 5_usize, "{:?}", err);
2607        }
2608    }
2609}