json_five/rt/
parser.rs

1use std::fmt::{Display, Formatter};
2use std::iter::Peekable;
3use std::slice::Iter;
4use crate::utils::get_line_col_char;
5use crate::tokenize::{TokType, TokenSpan, Tokens};
6#[derive(PartialEq, Debug, Clone)]
7pub enum UnaryOperator {
8    Plus,
9    Minus,
10}
11
12impl Display for UnaryOperator {
13    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
14        match self {
15            UnaryOperator::Plus => {write!(f, "+")}
16            UnaryOperator::Minus => {write!(f, "-")}
17        }
18    }
19}
20
21type Wsc = String; // Whitespace and comment tokens
22
23// {wsc.0} value {wsc.1}
24
25/// Represents contextual whitespace/comments for the associated JSONText
26#[derive(PartialEq, Debug, Clone)]
27pub struct JSONTextContext {
28    /// the whitespace and/or comments surrounding the Text production
29    ///
30    /// In other words: `{ wsc.0 } value { wsc.1 }`
31    pub wsc: (Wsc, Wsc)
32}
33
34// LBRACE {wsc} [ key_value_pairs ] RBRACE
35//                                 ^ any whitespace that would go here would be part of the KVP
36
37#[derive(PartialEq, Debug, Clone)]
38pub struct JSONObjectContext {
39    pub wsc: (Wsc,)
40}
41
42
43// key {wsc.0} COLON {wsc.1} value {wsc.2} [ COMMA {wsc.3} ] [ next_kvp ]
44#[derive(PartialEq, Debug, Clone)]
45pub struct KeyValuePairContext {
46    pub wsc: (Wsc, Wsc, Wsc, Option<Wsc>),
47    //                       ^ Some() here represents the presence of a comma and its subsequent wsc
48}
49
50
51#[derive(PartialEq, Debug, Clone)]
52pub struct JSONArrayContext {
53    /// Holds the whitespace/comments that follow the opening bracket of a [JSONValue::JSONArray]
54    ///
55    /// ```LBRACKET {wsc.0} [ array_values ] RBRACKET```
56    pub wsc: (Wsc,)
57}
58
59#[derive(PartialEq, Debug, Clone)]
60pub struct ArrayValueContext {
61    /// the whitespace and/or comments that may occur after the value and (optionally) after the comma following the value.
62    /// In other words, roughly:
63    /// ```ebnf
64    /// value { wsc.0 } [ COMMA { wsc.1 } [ next_value ]]
65    /// ```
66    pub wsc: (Wsc, Option<Wsc>),
67    //             ^ Some() here represents the presence of a comma and its subsequent wsc
68}
69
70/// Represents the ['JSON5Member' production](https://spec.json5.org/#prod-JSON5Member).
71///
72/// In other words, it's the key-value pairs of a [JSONValue::JSONObject]
73#[derive(PartialEq, Debug, Clone)]
74pub struct JSONKeyValuePair {
75
76    /// a `JSONValue`, in practice, is limited to [JSONValue::Identifier],
77    /// [JSONValue::DoubleQuotedString] or a [JSONValue::SingleQuotedString]
78    pub key: JSONValue,
79    pub value: JSONValue,
80
81    ///
82    pub context: Option<KeyValuePairContext>
83}
84
85/// Represents a value in a [JSONValue::JSONArray]
86#[derive(PartialEq, Debug, Clone)]
87pub struct JSONArrayValue {
88    pub value: JSONValue,
89    pub context: Option<ArrayValueContext>
90}
91
92/// Represents a JSON5 value
93///
94/// Where these enum members have `String`s, they represent the object as it was tokenized without any modifications (that
95/// is, for example, without any escape sequences un-escaped). The single- and double-quoted `String`s do not include the surrounding
96/// quote characters. The [JSONValue::JSONObject]
97#[derive(PartialEq, Debug, Clone)]
98pub enum JSONValue {
99    /// Represents a JSON5 Object
100    JSONObject {
101
102        /// The key-value pairs of the object
103        key_value_pairs: Vec<JSONKeyValuePair>,
104
105        context: Option<JSONObjectContext>
106    },
107
108    /// Represents a JSON5 Array.
109    JSONArray {
110
111        values: Vec<JSONArrayValue>,
112
113        context: Option<JSONArrayContext>
114    },
115
116    /// Represents an Integer value.
117    /// The String value is a literal, as it might appear in JSON5 source
118    Integer(String),
119
120    /// Represents a float value (not including NaN or Infinity, use [JSONValue::NaN] or [JSONValue::Infinity])
121    /// The String value is a literal as it might appear in JSON5 source
122    Float(String),
123
124    /// Represents an exponent value
125    /// The String value is a literal as it might appear in JSON5 source
126    Exponent(String),
127
128
129    Null,
130    Infinity,
131    NaN,
132
133    /// Represents a hexadecimal value
134    /// The String value is a literal as it might appear in JSON5 source e.g. `String::from("0xDEADBEEF")`
135    Hexadecimal(String),
136    Bool(bool),
137
138    /// Double-quoted string, as it appears in source.
139    /// The String value does not include surrounding quotes
140    DoubleQuotedString(String),
141
142    /// Single-quoted string, as it appears in source.
143    /// The String value does not include surrounding quotes
144    SingleQuotedString(String),
145
146    /// Represents a unary production
147    Unary { operator: UnaryOperator, value: Box<JSONValue> },
148
149    /// Represents unquoted identifiers.
150    ///
151    /// Uniquely, a [JSONValue::Identifier] can only be used in dictionary keys.
152    Identifier(String), // XXX: for keys only!
153}
154
155
156/// Represents the top-level Text production of a JSON5 document.
157///
158///
159/// ```rust
160/// use json_five::rt::parser::from_str;
161/// use json_five::rt::parser::JSONValue;
162///
163/// let doc = from_str(" 'foo'\n").unwrap();
164/// let context = doc.context.unwrap();
165///
166/// assert_eq!(&context.wsc.0, " ");
167/// assert_eq!(doc.value, JSONValue::SingleQuotedString("foo".to_string()));
168/// assert_eq!(&context.wsc.1, "\n");
169/// ```
170#[derive(PartialEq, Debug)]
171pub struct JSONText {
172
173    /// Can be any [JSONValue] except for [JSONValue::Identifier] (which is reserved for keys only)
174    pub value: JSONValue,
175
176    /// Contextual whitespace
177    pub context: Option<JSONTextContext>
178}
179
180
181impl JSONKeyValuePair {
182    // key {wsc.0} COLON {wsc.1} value {wsc.2} [ COMMA {wsc.3} ] [ next_kvp ]
183    fn to_string(&self) -> String {
184        match &self.context {
185            None => {
186                format!("{}:{}", self.key.to_string(), self.value.to_string())
187            }
188            Some(ctx) => {
189                match &ctx.wsc.3 {
190                    None => {
191                        format!("{}{}:{}{}{}", self.key.to_string(), ctx.wsc.0, ctx.wsc.1, self.value.to_string(), ctx.wsc.2)
192                    }
193                    Some(trailing_wsc) => {
194                        format!("{}{}:{}{}{},{}", self.key.to_string(), ctx.wsc.0, ctx.wsc.1, self.value.to_string(), ctx.wsc.2, trailing_wsc)
195                    }
196                }
197            }
198        }
199    }
200}
201
202
203
204impl JSONText {
205    fn to_string(&self) -> String {
206        match &self.context {
207            None => {
208                self.value.to_string()
209            },
210            Some(ctx) => {
211                format!("{}{}{}", ctx.wsc.0, self.value.to_string(), ctx.wsc.1)
212            }
213        }
214    }
215}
216
217// value {wsc.0} [ COMMA {wsc.1} ] [ next_value ]
218impl JSONArrayValue {
219    fn to_string(&self) -> String {
220        match &self.context {
221            None => {
222                self.value.to_string()
223            }
224            Some(ctx) => {
225                match &ctx.wsc.1 {
226                    None => {
227                        format!("{}{}", self.value.to_string(), ctx.wsc.0)
228                    }
229                    Some(trailing_whitespace) => {
230                        format!("{}{},{}", self.value.to_string(), ctx.wsc.0, trailing_whitespace)
231                    }
232                }
233            }
234        }
235    }
236}
237
238impl JSONValue {
239    fn to_string(&self) -> String {
240        match self {
241            JSONValue::JSONObject { key_value_pairs, context } => {
242                match context {
243                    None => {
244                        let mut s = String::from("{");
245                        for kvp in key_value_pairs {
246                            s.push_str(kvp.to_string().as_str())
247                        }
248                        s.push('}');
249                        s
250                    }
251                    Some(ctx) => {
252                        let mut s = format!("{{{}", ctx.wsc.0);
253                        for kvp in key_value_pairs {
254                            s.push_str(kvp.to_string().as_str());
255                        }
256                        s.push('}');
257                        s
258                    }
259                }
260            }
261            JSONValue::JSONArray { values, context } => {
262                match context {
263                    None => {
264                        let mut s = String::from('[');
265                        for array_value in values {
266                            s.push_str(array_value.to_string().as_str());
267                        }
268                        s.push(']');
269                        s
270                    }
271                    Some(ctx) => {
272                        let mut s = format!("[{}", ctx.wsc.0);
273                        for array_value in values {
274                            s.push_str(array_value.to_string().as_str());
275                        }
276                        s.push(']');
277                        s
278                    }
279                }
280            }
281            JSONValue::Integer(s) => {s.clone()}
282            JSONValue::Float(s) => {s.clone()}
283            JSONValue::Exponent(s) => {s.clone()}
284            JSONValue::Null => {String::from("null")}
285            JSONValue::Infinity => {String::from("Infinity")}
286            JSONValue::NaN => {String::from("Nan")}
287            JSONValue::Hexadecimal(s) => {s.clone()}
288            JSONValue::Bool(b) => b.to_string(),
289            JSONValue::DoubleQuotedString(s) => {s.clone()}
290            JSONValue::SingleQuotedString(s) => {s.clone()}
291            JSONValue::Unary { operator, value} => {
292                format!("{}{}", operator, value)
293            }
294            JSONValue::Identifier(s) => {s.clone()}
295        }
296    }
297}
298
299
300impl Display for JSONValue {
301    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
302        let res = self.to_string();
303        write!(f, "{}", res)
304    }
305}
306
307
308impl Display for JSONText {
309    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
310        write!(f, "{}", self.to_string())
311    }
312}
313
314
315
316#[derive(Debug, PartialEq)]
317pub struct ParsingError {
318    pub index: usize, // byte offset
319    pub message: String,
320    pub lineno: usize,
321    pub colno: usize,
322    pub char_index: usize, // character offset
323}
324
325
326
327impl Display for ParsingError {
328    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
329        write!(f, "ParsingError: {}: line {} column {} (char {})", self.message, self.lineno, self.colno, self.char_index)
330    }
331}
332
333struct JSON5Parser<'toks, 'input> {
334    source: &'input str,
335    source_tokens: Peekable<Iter<'toks, TokenSpan>>,
336    lookahead: Option<&'toks TokenSpan>,
337}
338
339
340impl<'toks, 'input> JSON5Parser<'toks, 'input> {
341    fn new(tokens: &'toks Tokens<'input>) -> Self {
342        JSON5Parser { source_tokens: tokens.tok_spans.iter().peekable(), lookahead: None, source: tokens.source }
343    }
344
345    fn advance(&mut self) -> Option<&'toks TokenSpan> {
346        match self.source_tokens.next() {
347            None => {
348                self.lookahead = None;
349                None
350            }
351            Some(span) => {
352                self.lookahead = Some(span);
353                self.lookahead
354            }
355        }
356    }
357
358    #[inline]
359    fn get_tok_source(&self, span: &'toks TokenSpan) -> &'input str {
360        &self.source[span.0 .. span.2]
361    }
362
363
364    fn peek(&mut self) -> Option<&'toks TokenSpan> {
365        match self.source_tokens.peek() {
366            None => None,
367            Some(span) => {
368                Some(span)
369            }
370        }
371    }
372
373
374    fn position(&mut self) -> usize {
375        match self.peek() {
376            None => {
377                match self.lookahead {
378                    None => 0, // XXX: ???? might be end, actually?
379                    Some(span) => {span.2}
380                }
381            }
382            Some(span) => {
383                span.0
384            }
385        }
386    }
387
388    fn make_error(&self, message: String, index: usize) -> ParsingError {
389        let (lineno, colno, char_index) = get_line_col_char(self.source, index);
390        ParsingError {
391            index,
392            message,
393            lineno,
394            colno,
395            char_index,
396        }
397    }
398
399    fn check_and_consume(&mut self, types: Vec<TokType>) -> Option<&'toks TokenSpan> {
400        let next_tok = self.peek()?;
401        for toktype in types {
402            if next_tok.1 == toktype {
403                return self.advance();
404            }
405        }
406        None
407    }
408
409    #[inline]
410    fn check_and_consume_with_source(&mut self, types: Vec<TokType>) -> Option<(&'toks TokenSpan, &'input str)> {
411        let tok = self.check_and_consume(types)?;
412        let source = self.get_tok_source(tok);
413        Some((tok, source))
414    }
415
416    fn parse_key(&mut self) -> Result<JSONValue, ParsingError>{
417        // This is a terminal point
418        // We either get a valid key or we bail.
419        match self.check_and_consume_with_source(vec![TokType::Name, TokType::DoubleQuotedString, TokType::SingleQuotedString]) {
420            None => {
421                match self.peek() {
422                    None => {
423                        let idx = self.position();
424                        Err(self.make_error("Unexpected EOF. Was expecting MemberName at".to_string(), idx))
425                    }
426                    Some(span) => {
427                        let src = self.get_tok_source(span);
428                        Err(self.make_error(format!("Invalid token for unquoted key ({}, {:?}) at", span.2, src), span.0))
429                    }
430                }
431            },
432            Some((span, lexeme)) => {
433                match span.1 {
434                    TokType::DoubleQuotedString => {
435                        Ok(JSONValue::DoubleQuotedString(lexeme[1..lexeme.len() - 1].to_string()))
436                    },
437                    TokType:: SingleQuotedString => {
438                        Ok(JSONValue::SingleQuotedString(lexeme[1..lexeme.len() - 1].to_string()))
439                    }
440                    TokType::Name => {
441                        Ok(JSONValue::Identifier(lexeme.to_string()))
442                    }
443                    _ => unreachable!("Programming error. Please report this as a bug")
444                }
445            }
446        }
447    }
448
449    fn parse_object(&mut self) -> Result<JSONValue, ParsingError> {
450        use crate::tokenize::TokType::*;
451        let mut kvps: Vec<JSONKeyValuePair> = Vec::new();
452        let leading_wsc = &self.consume_whitespace_and_comments();
453        loop {
454            match self.check_and_consume(vec![RightBrace]) {
455                None => {
456                    let key = self.parse_key()?;
457                    let wsc_0 = self.consume_whitespace_and_comments();
458
459                    match self.check_and_consume(vec![Colon]) {
460                        None => {
461                            let idx = self.position();
462                            return Err(self.make_error("Expecting ':' delimiter".to_string(), idx))
463                        }
464                        Some(_) => {
465                            let wsc_1 = self.consume_whitespace_and_comments();
466                            let val = self.parse_value()?;
467                            let wsc_2 = self.consume_whitespace_and_comments();
468                            match self.check_and_consume(vec![Comma]) {
469                                None => {
470                                    let context = KeyValuePairContext{wsc: (
471                                        self.collect_wsc_vec_to_string(&wsc_0),
472                                        self.collect_wsc_vec_to_string(&wsc_1),
473                                        self.collect_wsc_vec_to_string(&wsc_2),
474                                        None
475                                        )};
476                                    let kvp = JSONKeyValuePair{key: key, value: val, context: Some(context)};
477                                    kvps.push(kvp);
478                                    match self.check_and_consume(vec![RightBrace]) {
479                                        None => {
480                                            let idx = self.position();
481                                            return Err(self.make_error("Expecting '}' at end of object".to_string(), idx))
482                                        },
483                                        Some(_) => {
484                                            break Ok(JSONValue::JSONObject {key_value_pairs: kvps, context: Some(JSONObjectContext{wsc: (self.collect_wsc_vec_to_string(&leading_wsc), )})})
485                                        }
486                                    }
487                                }
488                                Some(_) => {
489                                    let wsc_3 = self.consume_whitespace_and_comments();
490                                    let context = KeyValuePairContext{wsc: (
491                                        self.collect_wsc_vec_to_string(&wsc_0),
492                                        self.collect_wsc_vec_to_string(&wsc_1),
493                                        self.collect_wsc_vec_to_string(&wsc_2),
494                                        Some(self.collect_wsc_vec_to_string(&wsc_3)),
495                                    )};
496                                    let kvp = JSONKeyValuePair{key: key, value: val, context: Some(context)};
497                                    kvps.push(kvp);
498                                    continue
499                                }
500                            }
501                        }
502                    }
503                }
504                Some(_) => {
505                    break Ok(JSONValue::JSONObject {key_value_pairs: kvps, context: Some(JSONObjectContext{wsc: (self.collect_wsc_vec_to_string(&leading_wsc), )})})
506                }
507            }
508        }
509    }
510
511
512    fn collect_wsc_vec_to_string(&self, wsc: &Vec<&'toks TokenSpan>) -> String {
513        if wsc.len() == 0 {
514            return String::with_capacity(0);
515        }
516
517        let first = wsc.first().unwrap();
518        if wsc.len() == 1 {
519            self.get_tok_source(first).to_string()
520        } else {
521            let last = wsc.last().unwrap();
522            let mut buff = String::with_capacity(last.2 - first.0);
523            for span in wsc {
524                let src = self.get_tok_source(span);
525                buff.push_str(src);
526            }
527            buff
528        }
529    }
530
531    fn parse_array(&mut self) -> Result<JSONValue, ParsingError> {
532        use crate::tokenize::TokType::*;
533        let mut values:Vec<JSONArrayValue> = Vec::new();
534        let leading_wsc = self.consume_whitespace_and_comments();
535        loop {
536            match self.check_and_consume(vec![TokType::RightBracket]) {
537                None => {
538                    let val = self.parse_value()?;
539                    let wsc_0 = self.consume_whitespace_and_comments();
540                    match self.check_and_consume(vec![Comma]) {
541                        None => {
542                            let array_val_context = ArrayValueContext{wsc: (self.collect_wsc_vec_to_string(&wsc_0), None)};
543                            let array_val = JSONArrayValue {value: val, context: Some(array_val_context)};
544                            values.push(array_val);
545                            match self.check_and_consume(vec![TokType::RightBracket]) {
546                                None => {
547                                    let idx = self.position();
548                                    return Err(self.make_error("Expecting ']' at end of array".to_string(), idx))
549                                },
550                                Some(_) => {
551                                    break Ok(JSONValue::JSONArray {values: values, context: Some(JSONArrayContext{wsc: (self.collect_wsc_vec_to_string(&leading_wsc), )})})
552                                }
553                            }
554                        }
555                        Some(_) => {
556                            let wsc_1 = self.consume_whitespace_and_comments();
557                            let array_val_context = ArrayValueContext{wsc: (self.collect_wsc_vec_to_string(&wsc_0), Some(self.collect_wsc_vec_to_string(&wsc_1)))};
558                            let array_val = JSONArrayValue {value: val, context: Some(array_val_context)};
559                            values.push(array_val);
560                            continue
561                        }
562                    }
563                }
564                Some(_) => {
565                    break Ok(JSONValue::JSONArray {values: values, context: Some(JSONArrayContext{wsc: (self.collect_wsc_vec_to_string(&leading_wsc), )})})
566                }
567            }
568        }
569    }
570
571    fn parse_primary(&mut self) -> Result<JSONValue, ParsingError> {
572        let span = self.advance().unwrap();
573        match &span.1 {
574            TokType::Integer => {Ok(JSONValue::Integer(self.get_tok_source(span).to_string()))}
575            TokType::Float => {Ok(JSONValue::Float(self.get_tok_source(span).to_string()))}
576            TokType::Exponent => { Ok(JSONValue::Exponent(self.get_tok_source(span).to_string()))}
577            TokType::SingleQuotedString => {
578                let lexeme = self.get_tok_source(span);
579                Ok(JSONValue::SingleQuotedString(lexeme[1..lexeme.len() - 1].to_string()))
580            },
581            TokType::DoubleQuotedString => {
582                let lexeme = self.get_tok_source(span);
583                Ok(JSONValue::DoubleQuotedString(lexeme[1..lexeme.len() - 1].to_string()))
584            },
585            TokType::True => Ok(JSONValue::Bool(true)),
586            TokType::False => Ok(JSONValue::Bool(false)),
587            TokType::Null => Ok(JSONValue::Null),
588            TokType::Infinity => Ok(JSONValue::Infinity),
589            TokType::Nan => Ok(JSONValue::NaN),
590            TokType::Hexadecimal => Ok(JSONValue::Hexadecimal(self.get_tok_source(span).to_string())),
591            TokType::EOF => {
592                match self.position() {
593                    0 => Err(self.make_error("Unexpected EOF. Was expecting value.".to_string(), 0)),
594                    pos => Err(self.make_error("Unexpected EOF".to_string(), pos))
595                }
596            },
597            t => Err(self.make_error(format!("Unexpected token of type {:?}: {:?}", t, self.get_tok_source(span)), span.0))
598        }
599    }
600
601    fn parse_unary(&mut self) -> Result<JSONValue, ParsingError> {
602        match self.check_and_consume(vec![TokType::Plus, TokType::Minus]) {
603            None => self.parse_primary(),
604            Some(span) => {
605                match span.1 {
606                    TokType::Plus => {
607                        let value = self.parse_unary()?;
608                        match value {
609                            JSONValue::Float(_) | JSONValue::Integer(_) | JSONValue::Infinity | JSONValue::NaN | JSONValue::Unary { .. } | JSONValue::Hexadecimal(_) | JSONValue::Exponent(_) => {}
610                            val => {
611                                return Err(self.make_error(format!("Unary operations not allowed for value {:?}", val), span.2))
612                            }
613                        }
614
615                        Ok(JSONValue::Unary {operator: UnaryOperator::Plus, value: Box::new(value)})
616                    }
617                    TokType::Minus => {
618                        let value = self.parse_unary()?;
619                        match value {
620                            JSONValue::Float(_) | JSONValue::Integer(_) | JSONValue::Infinity | JSONValue::NaN | JSONValue::Unary { .. } | JSONValue::Hexadecimal(_) | JSONValue::Exponent(_) => {}
621                            val => {
622                                return Err(self.make_error(format!("Unary operations not allowed for value {:?}", val), span.2))
623                            }
624                        }
625                        Ok(JSONValue::Unary {operator: UnaryOperator::Minus, value: Box::new(value)})
626                    }
627                    _ => unreachable!("no")
628                }
629            }
630        }
631    }
632
633    fn parse_obj_or_array(&mut self) -> Result<JSONValue, ParsingError> {
634        match self.check_and_consume(vec![TokType::LeftBracket, TokType::LeftBrace]) {
635            None => self.parse_unary(),
636            Some(span) => {
637                match span.1 {
638                    TokType::LeftBrace => self.parse_object(),
639                    TokType::LeftBracket => self.parse_array(),
640                    _ => unreachable!("no")
641                }
642            }
643        }
644    }
645
646
647    fn parse_value(&mut self) -> Result<JSONValue, ParsingError> {
648        self.parse_obj_or_array()
649    }
650
651    fn parse_text(&mut self) -> Result<JSONText, ParsingError> {
652        let wsc_0 = self.consume_whitespace_and_comments();
653        let value = self.parse_value()?;
654        let wsc_1 = self.consume_whitespace_and_comments();
655        match self.advance() {
656            None => {}
657            Some(span) => {
658                if span.1 != TokType::EOF {
659                    return Err(self.make_error(format!("Unexpected {:?} token after value", span.1), span.0 - 1))
660                }
661            }
662        }
663        let context = JSONTextContext{wsc: (self.collect_wsc_vec_to_string(&wsc_0), self.collect_wsc_vec_to_string(&wsc_1))};
664        Ok(JSONText { value, context: Some(context) })
665    }
666
667    fn consume_whitespace_and_comments(&mut self) -> Vec<&'toks TokenSpan> {
668        let mut ret: Vec<&TokenSpan> = Vec::new();
669        loop {
670            match self.peek() {
671                None => {return ret}
672                Some(span) => {
673                    match span.1 {
674                        TokType::BlockComment | TokType::LineComment | TokType::Whitespace => {
675                            ret.push(span);
676                            self.advance();
677                        }
678                        _ => {return ret}
679                    }
680                }
681            }
682        }
683    }
684}
685
686pub fn from_tokens<'toks, 'input>(tokens: &'toks Tokens<'input>) -> Result<JSONText, ParsingError> {
687    let mut parser = JSON5Parser::new(tokens);
688    parser.parse_text()
689}
690
691pub fn from_str(source: &str) -> Result<JSONText, ParsingError> {
692    use crate::tokenize::tokenize_rt_str;
693    let maybe_toks = tokenize_rt_str(source);
694    match maybe_toks {
695        Err(e) => {
696            Err(ParsingError{index: e.index, message: e.message, char_index: e.char_index, lineno: e.lineno, colno: e.colno})
697        }
698        Ok(toks) => {
699            from_tokens(&toks)
700        }
701    }
702}
703
704#[cfg(test)]
705mod tests {
706    use crate::tokenize::Tokenizer;
707    use super::*;
708    #[test]
709    fn test_foo() {
710        let res = from_str("{}").unwrap();
711        let expected = JSONText{context: Some(JSONTextContext{wsc: (String::new(), String::new())}), value: JSONValue::JSONObject {key_value_pairs: vec![], context: Some(JSONObjectContext{wsc: (String::new(),)})}};
712        assert_eq!(res.value, expected.value)
713    }
714
715    #[test]
716    fn test_illegal_identifier_escape() {
717        let text = r#"{ \u0031foo: 123 }"#;
718        from_str(text).unwrap_err();
719    }
720
721    #[test]
722    fn test_leading_comma_array() {
723        let sample = r#"[
724    ,null
725]"#;
726        let maybe_tokens = Tokenizer::new(sample).tokenize();
727        if maybe_tokens.is_err() {
728            return
729        } else {
730            let toks = maybe_tokens.unwrap();
731            let res = from_tokens(&toks);
732            assert!(res.is_err());
733        }
734    }
735
736
737    #[test]
738    fn test_lone_trailing_comma_array() {
739        let sample = r#"[
740    ,
741]"#;
742        let maybe_tokens = Tokenizer::new(sample).tokenize();
743        if maybe_tokens.is_err() {
744            return
745        } else {
746            let toks = maybe_tokens.unwrap();
747            let res = from_tokens(&toks);
748            assert!(res.is_err());
749        }
750    }
751
752
753    #[test]
754    fn test_no_comma_array() {
755        let sample = r#"[
756    true
757    false
758]"#;
759        let maybe_tokens = Tokenizer::new(sample).tokenize();
760        if maybe_tokens.is_err() {
761            return
762        } else {
763            let toks = maybe_tokens.unwrap();
764            let res = from_tokens(&toks);
765            assert!(res.is_err());
766        }
767    }
768
769
770    #[test]
771    fn test_regular_array() {
772        let sample = r#"[
773    true,
774    false,
775    null
776]"#;
777        let _res = from_str(sample).unwrap();
778    }
779
780
781
782    #[test]
783    fn test_trailing_comma_array() {
784        let sample = r#"[
785    null,
786]"#;
787        let _res = from_str(sample).unwrap();
788    }
789
790
791
792    #[test]
793    fn test_block_comment_following_array_element() {
794        let sample = r#"[
795    false
796    /*
797        true
798    */
799]"#;
800        let _res = from_str(sample).unwrap();
801    }
802
803
804
805    #[test]
806    fn test_block_comment_following_top_level_value() {
807        let sample = r#"null
808/*
809    Some non-comment top-level value is needed;
810    we use null above.
811*/"#;
812        let _res = from_str(sample).unwrap();
813    }
814
815
816
817    #[test]
818    fn test_block_comment_in_string() {
819        let sample = r#""This /* block comment */ isn't really a block comment.""#;
820        let _res = from_str(sample).unwrap();
821    }
822
823
824
825    #[test]
826    fn test_block_comment_preceding_top_level_value() {
827        let sample = r#"/*
828    Some non-comment top-level value is needed;
829    we use null below.
830*/
831null"#;
832        let _res = from_str(sample).unwrap();
833    }
834
835
836
837    #[test]
838    fn test_block_comment_with_asterisks() {
839        let sample = r#"/**
840 * This is a JavaDoc-like block comment.
841 * It contains asterisks inside of it.
842 * It might also be closed with multiple asterisks.
843 * Like this:
844 **/
845true"#;
846        let _res = from_str(sample).unwrap();
847    }
848
849
850
851    #[test]
852    fn test_inline_comment_following_array_element() {
853        let sample = r#"[
854    false   // true
855]"#;
856        let _res = from_str(sample).unwrap();
857    }
858
859
860
861    #[test]
862    fn test_inline_comment_following_top_level_value() {
863        let sample = r#"null // Some non-comment top-level value is needed; we use null here."#;
864        let _res = from_str(sample).unwrap();
865    }
866
867
868
869    #[test]
870    fn test_inline_comment_in_string() {
871        let sample = r#""This inline comment // isn't really an inline comment.""#;
872        let _res = from_str(sample).unwrap();
873    }
874
875
876
877    #[test]
878    fn test_inline_comment_preceding_top_level_value() {
879        let sample = r#"// Some non-comment top-level value is needed; we use null below.
880null"#;
881        let _res = from_str(sample).unwrap();
882    }
883
884
885
886    #[test]
887    fn test_top_level_block_comment() {
888        let sample = r#"/*
889    This should fail;
890    comments cannot be the only top-level value.
891*/"#;
892        let maybe_tokens = Tokenizer::new(sample).tokenize();
893        if maybe_tokens.is_err() {
894            return
895        } else {
896            let toks = maybe_tokens.unwrap();
897            let res = from_tokens(&toks);
898            assert!(res.is_err());
899        }
900    }
901
902
903    #[test]
904    fn test_top_level_inline_comment() {
905        let sample = r#"// This should fail; comments cannot be the only top-level value."#;
906        let maybe_tokens = Tokenizer::new(sample).tokenize();
907        if maybe_tokens.is_err() {
908            return
909        } else {
910            let toks = maybe_tokens.unwrap();
911            let res = from_tokens(&toks);
912            assert!(res.is_err());
913        }
914    }
915
916
917    #[test]
918    fn test_unterminated_block_comment() {
919        let sample = r#"true
920/*
921    This block comment doesn't terminate.
922    There was a legitimate value before this,
923    but this is still invalid JS/JSON5.
924"#;
925        let maybe_tokens = Tokenizer::new(sample).tokenize();
926        if maybe_tokens.is_err() {
927            return
928        } else {
929            let toks = maybe_tokens.unwrap();
930            let res = from_tokens(&toks);
931            assert!(res.is_err());
932        }
933    }
934
935
936    #[test]
937    fn test_empty() {
938        let sample = r#""#;
939        let maybe_tokens = Tokenizer::new(sample).tokenize();
940        if maybe_tokens.is_err() {
941            return
942        } else {
943            let toks = maybe_tokens.unwrap();
944            let res = from_tokens(&toks);
945            assert!(res.is_err());
946        }
947    }
948
949
950    #[test]
951    fn test_npm_package() {
952        let sample = r#"{
953  "name": "npm",
954  "publishConfig": {
955    "proprietary-attribs": false
956  },
957  "description": "A package manager for node",
958  "keywords": [
959    "package manager",
960    "modules",
961    "install",
962    "package.json"
963  ],
964  "version": "1.1.22",
965  "preferGlobal": true,
966  "config": {
967    "publishtest": false
968  },
969  "homepage": "http://npmjs.org/",
970  "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
971  "repository": {
972    "type": "git",
973    "url": "https://github.com/isaacs/npm"
974  },
975  "bugs": {
976    "email": "npm-@googlegroups.com",
977    "url": "http://github.com/isaacs/npm/issues"
978  },
979  "directories": {
980    "doc": "./doc",
981    "man": "./man",
982    "lib": "./lib",
983    "bin": "./bin"
984  },
985  "main": "./lib/npm.js",
986  "bin": "./bin/npm-cli.js",
987  "dependencies": {
988    "semver": "~1.0.14",
989    "ini": "1",
990    "slide": "1",
991    "abbrev": "1",
992    "graceful-fs": "~1.1.1",
993    "minimatch": "~0.2",
994    "nopt": "1",
995    "node-uuid": "~1.3",
996    "proto-list": "1",
997    "rimraf": "2",
998    "request": "~2.9",
999    "which": "1",
1000    "tar": "~0.1.12",
1001    "fstream": "~0.1.17",
1002    "block-stream": "*",
1003    "inherits": "1",
1004    "mkdirp": "0.3",
1005    "read": "0",
1006    "lru-cache": "1",
1007    "node-gyp": "~0.4.1",
1008    "fstream-npm": "0 >=0.0.5",
1009    "uid-number": "0",
1010    "archy": "0",
1011    "chownr": "0"
1012  },
1013  "bundleDependencies": [
1014    "slide",
1015    "ini",
1016    "semver",
1017    "abbrev",
1018    "graceful-fs",
1019    "minimatch",
1020    "nopt",
1021    "node-uuid",
1022    "rimraf",
1023    "request",
1024    "proto-list",
1025    "which",
1026    "tar",
1027    "fstream",
1028    "block-stream",
1029    "inherits",
1030    "mkdirp",
1031    "read",
1032    "lru-cache",
1033    "node-gyp",
1034    "fstream-npm",
1035    "uid-number",
1036    "archy",
1037    "chownr"
1038  ],
1039  "devDependencies": {
1040    "ronn": "https://github.com/isaacs/ronnjs/tarball/master"
1041  },
1042  "engines": {
1043    "node": "0.6 || 0.7 || 0.8",
1044    "npm": "1"
1045  },
1046  "scripts": {
1047    "test": "node ./test/run.js",
1048    "prepublish": "npm prune; rm -rf node_modules/*/{test,example,bench}*; make -j4 doc",
1049    "dumpconf": "env | grep npm | sort | uniq"
1050  },
1051  "licenses": [
1052    {
1053      "type": "MIT +no-false-attribs",
1054      "url": "http://github.com/isaacs/npm/raw/master/LICENSE"
1055    }
1056  ]
1057}
1058"#;
1059        let _res = from_str(sample).unwrap();
1060    }
1061
1062
1063
1064    #[test]
1065    fn test_npm_package2() {
1066        let sample = r#"{
1067  name: 'npm',
1068  publishConfig: {
1069    'proprietary-attribs': false,
1070  },
1071  description: 'A package manager for node',
1072  keywords: [
1073    'package manager',
1074    'modules',
1075    'install',
1076    'package.json',
1077  ],
1078  version: '1.1.22',
1079  preferGlobal: true,
1080  config: {
1081    publishtest: false,
1082  },
1083  homepage: 'http://npmjs.org/',
1084  author: 'Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)',
1085  repository: {
1086    type: 'git',
1087    url: 'https://github.com/isaacs/npm',
1088  },
1089  bugs: {
1090    email: 'npm-@googlegroups.com',
1091    url: 'http://github.com/isaacs/npm/issues',
1092  },
1093  directories: {
1094    doc: './doc',
1095    man: './man',
1096    lib: './lib',
1097    bin: './bin',
1098  },
1099  main: './lib/npm.js',
1100  bin: './bin/npm-cli.js',
1101  dependencies: {
1102    semver: '~1.0.14',
1103    ini: '1',
1104    slide: '1',
1105    abbrev: '1',
1106    'graceful-fs': '~1.1.1',
1107    minimatch: '~0.2',
1108    nopt: '1',
1109    'node-uuid': '~1.3',
1110    'proto-list': '1',
1111    rimraf: '2',
1112    request: '~2.9',
1113    which: '1',
1114    tar: '~0.1.12',
1115    fstream: '~0.1.17',
1116    'block-stream': '*',
1117    inherits: '1',
1118    mkdirp: '0.3',
1119    read: '0',
1120    'lru-cache': '1',
1121    'node-gyp': '~0.4.1',
1122    'fstream-npm': '0 >=0.0.5',
1123    'uid-number': '0',
1124    archy: '0',
1125    chownr: '0',
1126  },
1127  bundleDependencies: [
1128    'slide',
1129    'ini',
1130    'semver',
1131    'abbrev',
1132    'graceful-fs',
1133    'minimatch',
1134    'nopt',
1135    'node-uuid',
1136    'rimraf',
1137    'request',
1138    'proto-list',
1139    'which',
1140    'tar',
1141    'fstream',
1142    'block-stream',
1143    'inherits',
1144    'mkdirp',
1145    'read',
1146    'lru-cache',
1147    'node-gyp',
1148    'fstream-npm',
1149    'uid-number',
1150    'archy',
1151    'chownr',
1152  ],
1153  devDependencies: {
1154    ronn: 'https://github.com/isaacs/ronnjs/tarball/master',
1155  },
1156  engines: {
1157    node: '0.6 || 0.7 || 0.8',
1158    npm: '1',
1159  },
1160  scripts: {
1161    test: 'node ./test/run.js',
1162    prepublish: 'npm prune; rm -rf node_modules/*/{test,example,bench}*; make -j4 doc',
1163    dumpconf: 'env | grep npm | sort | uniq',
1164  },
1165  licenses: [
1166    {
1167      type: 'MIT +no-false-attribs',
1168      url: 'http://github.com/isaacs/npm/raw/master/LICENSE',
1169    },
1170  ],
1171}
1172"#;
1173        let _res = from_str(sample).unwrap();
1174    }
1175
1176
1177
1178    #[test]
1179    fn test_readme_example() {
1180        let sample = r#"{
1181    foo: 'bar',
1182    while: true,
1183
1184    this: 'is a \
1185multi-line string',
1186
1187    // this is an inline comment
1188    here: 'is another', // inline comment
1189
1190    /* this is a block comment
1191       that continues on another line */
1192
1193    hex: 0xDEADbeef,
1194    half: .5,
1195    delta: +10,
1196    to: Infinity,   // and beyond!
1197
1198    finally: 'a trailing comma',
1199    oh: [
1200        "we shouldn't forget",
1201        'arrays can have',
1202        'trailing commas too',
1203    ],
1204}
1205"#;
1206        let _res = from_str(sample).unwrap();
1207    }
1208
1209
1210
1211    #[test]
1212    fn test_valid_whitespace() {
1213        let sample = r#"{
1214    // An invalid form feed character (\x0c) has been entered before this comment.
1215    // Be careful not to delete it.
1216  "a": true
1217}
1218"#;
1219        let _res = from_str(sample).unwrap();
1220    }
1221
1222
1223
1224    #[test]
1225    fn test_comment_cr() {
1226        let sample = r#"{
1227    // This comment is terminated with `\r`.
1228}
1229"#;
1230        let _res = from_str(sample).unwrap();
1231    }
1232
1233
1234
1235    #[test]
1236    fn test_comment_crlf() {
1237        let sample = r#"{
1238    // This comment is terminated with `\r\n`.
1239}
1240"#;
1241        let _res = from_str(sample).unwrap();
1242    }
1243
1244
1245
1246    #[test]
1247    fn test_comment_lf() {
1248        let sample = r#"{
1249    // This comment is terminated with `\n`.
1250}
1251"#;
1252        let _res = from_str(sample).unwrap();
1253    }
1254
1255
1256
1257    #[test]
1258    fn test_escaped_cr() {
1259        let sample = r#"{
1260    // the following string contains an escaped `\r`
1261    a: 'line 1 \
1262line 2'
1263}
1264"#;
1265        let _res = from_str(sample).unwrap();
1266    }
1267
1268
1269
1270    #[test]
1271    fn test_escaped_crlf() {
1272        let sample = r#"{
1273    // the following string contains an escaped `\r\n`
1274    a: 'line 1 \
1275line 2'
1276}
1277"#;
1278        let _res = from_str(sample).unwrap();
1279    }
1280
1281
1282
1283    #[test]
1284    fn test_escaped_lf() {
1285        let sample = r#"{
1286    // the following string contains an escaped `\n`
1287    a: 'line 1 \
1288line 2'
1289}
1290"#;
1291        let _res = from_str(sample).unwrap();
1292    }
1293
1294
1295
1296    #[test]
1297    fn test_float_leading_decimal_point() {
1298        let sample = r#".5
1299"#;
1300        let _res = from_str(sample).unwrap();
1301    }
1302
1303
1304
1305    #[test]
1306    fn test_float_leading_zero() {
1307        let sample = r#"0.5
1308"#;
1309        let _res = from_str(sample).unwrap();
1310    }
1311
1312
1313
1314    #[test]
1315    fn test_float_trailing_decimal_point_with_integer_exponent() {
1316        let sample = r#"5.e4
1317"#;
1318        let _res = from_str(sample).unwrap();
1319    }
1320
1321
1322
1323    #[test]
1324    fn test_float_trailing_decimal_point() {
1325        let sample = r#"5.
1326"#;
1327        let _res = from_str(sample).unwrap();
1328    }
1329
1330
1331
1332    #[test]
1333    fn test_float_with_integer_exponent() {
1334        let sample = r#"1.2e3
1335"#;
1336        let _res = from_str(sample).unwrap();
1337    }
1338
1339
1340
1341    #[test]
1342    fn test_float() {
1343        let sample = r#"1.2
1344"#;
1345        let _res = from_str(sample).unwrap();
1346    }
1347
1348
1349
1350    #[test]
1351    fn test_hexadecimal_empty() {
1352        let sample = r#"0x
1353"#;
1354        let maybe_tokens = Tokenizer::new(sample).tokenize();
1355        if maybe_tokens.is_err() {
1356            return
1357        } else {
1358            let toks = maybe_tokens.unwrap();
1359            let res = from_tokens(&toks);
1360            assert!(res.is_err());
1361        }
1362    }
1363
1364
1365    #[test]
1366    fn test_hexadecimal_lowercase_letter() {
1367        let sample = r#"0xc8
1368"#;
1369        let _res = from_str(sample).unwrap();
1370    }
1371
1372
1373
1374    #[test]
1375    fn test_hexadecimal_uppercase_x() {
1376        let sample = r#"0XC8
1377"#;
1378        let _res = from_str(sample).unwrap();
1379    }
1380
1381
1382
1383    #[test]
1384    fn test_hexadecimal_with_integer_exponent() {
1385        let sample = r#"0xc8e4
1386"#;
1387        let _res = from_str(sample).unwrap();
1388    }
1389
1390
1391
1392    #[test]
1393    fn test_hexadecimal() {
1394        let sample = r#"0xC8
1395"#;
1396        let _res = from_str(sample).unwrap();
1397    }
1398
1399
1400
1401    #[test]
1402    fn test_infinity() {
1403        let sample = r#"Infinity
1404"#;
1405        let _res = from_str(sample).unwrap();
1406    }
1407
1408
1409
1410    #[test]
1411    fn test_integer_with_float_exponent() {
1412        let sample = r#"1e2.3
1413"#;
1414        let maybe_tokens = Tokenizer::new(sample).tokenize();
1415        if maybe_tokens.is_err() {
1416            return
1417        } else {
1418            let toks = maybe_tokens.unwrap();
1419            let res = from_tokens(&toks);
1420            assert!(res.is_err(), "{:?}", res.unwrap());
1421        }
1422    }
1423
1424
1425    #[test]
1426    fn test_integer_with_hexadecimal_exponent() {
1427        let sample = r#"1e0x4
1428"#;
1429        let maybe_tokens = Tokenizer::new(sample).tokenize();
1430        if maybe_tokens.is_err() {
1431            return
1432        } else {
1433            let toks = maybe_tokens.unwrap();
1434            let res = from_tokens(&toks);
1435            assert!(res.is_err());
1436        }
1437    }
1438
1439
1440    #[test]
1441    fn test_integer_with_integer_exponent() {
1442        let sample = r#"2e23
1443"#;
1444        let _res = from_str(sample).unwrap();
1445    }
1446
1447
1448
1449    #[test]
1450    fn test_integer_with_negative_float_exponent() {
1451        let sample = r#"1e-2.3
1452"#;
1453        let maybe_tokens = Tokenizer::new(sample).tokenize();
1454        if maybe_tokens.is_err() {
1455            return
1456        } else {
1457            let toks = maybe_tokens.unwrap();
1458            let res = from_tokens(&toks);
1459            assert!(res.is_err());
1460        }
1461    }
1462
1463
1464    #[test]
1465    fn test_integer_with_negative_hexadecimal_exponent() {
1466        let sample = r#"1e-0x4
1467"#;
1468        let maybe_tokens = Tokenizer::new(sample).tokenize();
1469        if maybe_tokens.is_err() {
1470            return
1471        } else {
1472            let toks = maybe_tokens.unwrap();
1473            let res = from_tokens(&toks);
1474            assert!(res.is_err(), "{:?}", res.unwrap());
1475        }
1476    }
1477
1478
1479    #[test]
1480    fn test_integer_with_negative_integer_exponent() {
1481        let sample = r#"2e-23
1482"#;
1483        let _res = from_str(sample).unwrap();
1484    }
1485
1486
1487
1488    #[test]
1489    fn test_integer_with_negative_zero_integer_exponent() {
1490        let sample = r#"5e-0
1491"#;
1492        let _res = from_str(sample).unwrap();
1493    }
1494
1495
1496
1497    #[test]
1498    fn test_integer_with_positive_float_exponent() {
1499        let sample = r#"1e+2.3
1500"#;
1501        let maybe_tokens = Tokenizer::new(sample).tokenize();
1502        if maybe_tokens.is_err() {
1503            return
1504        } else {
1505            let toks = maybe_tokens.unwrap();
1506            let res = from_tokens(&toks);
1507            assert!(res.is_err());
1508        }
1509    }
1510
1511
1512    #[test]
1513    fn test_integer_with_positive_hexadecimal_exponent() {
1514        let sample = r#"1e+0x4
1515"#;
1516        let maybe_tokens = Tokenizer::new(sample).tokenize();
1517        if maybe_tokens.is_err() {
1518            return
1519        } else {
1520            let toks = maybe_tokens.unwrap();
1521            let res = from_tokens(&toks);
1522            assert!(res.is_err());
1523        }
1524    }
1525
1526
1527    #[test]
1528    fn test_integer_with_positive_integer_exponent() {
1529        let sample = r#"1e+2
1530"#;
1531        let _res = from_str(sample).unwrap();
1532    }
1533
1534
1535
1536    #[test]
1537    fn test_integer_with_positive_zero_integer_exponent() {
1538        let sample = r#"5e+0
1539"#;
1540        let _res = from_str(sample).unwrap();
1541    }
1542
1543
1544
1545    #[test]
1546    fn test_integer_with_zero_integer_exponent() {
1547        let sample = r#"5e0
1548"#;
1549        let _res = from_str(sample).unwrap();
1550    }
1551
1552
1553
1554    #[test]
1555    fn test_integer() {
1556        let sample = r#"15
1557"#;
1558        let _res = from_str(sample).unwrap();
1559    }
1560
1561
1562
1563    #[test]
1564    fn test_lone_decimal_point() {
1565        let sample = r#".
1566"#;
1567        let maybe_tokens = Tokenizer::new(sample).tokenize();
1568        if maybe_tokens.is_err() {
1569            return
1570        } else {
1571            let toks = maybe_tokens.unwrap();
1572            let res = from_tokens(&toks);
1573            assert!(res.is_err(), "{:?}", res.unwrap());
1574        }
1575    }
1576
1577
1578    #[test]
1579    fn test_nan() {
1580        let sample = r#"NaN
1581"#;
1582        let _res = from_str(sample).unwrap();
1583    }
1584
1585
1586
1587    #[test]
1588    fn test_negative_float_leading_decimal_point() {
1589        let sample = r#"-.5
1590"#;
1591        let _res = from_str(sample).unwrap();
1592    }
1593
1594
1595
1596    #[test]
1597    fn test_negative_float_leading_zero() {
1598        let sample = r#"-0.5
1599"#;
1600        let _res = from_str(sample).unwrap();
1601    }
1602
1603
1604
1605    #[test]
1606    fn test_negative_float_trailing_decimal_point() {
1607        let sample = r#"-5.
1608"#;
1609        let _res = from_str(sample).unwrap();
1610    }
1611
1612
1613
1614    #[test]
1615    fn test_negative_float() {
1616        let sample = r#"-1.2
1617"#;
1618        let _res = from_str(sample).unwrap();
1619    }
1620
1621
1622
1623    #[test]
1624    fn test_negative_hexadecimal() {
1625        let sample = r#"-0xC8
1626"#;
1627        let _res = from_str(sample).unwrap();
1628    }
1629
1630
1631
1632    #[test]
1633    fn test_negative_infinity() {
1634        let sample = r#"-Infinity
1635"#;
1636        let _res = from_str(sample).unwrap();
1637    }
1638
1639
1640
1641    #[test]
1642    fn test_negative_integer() {
1643        let sample = r#"-15
1644"#;
1645        let _res = from_str(sample).unwrap();
1646    }
1647
1648
1649
1650    #[test]
1651    fn test_negative_noctal() {
1652        let sample = r#"-098
1653"#;
1654        let maybe_tokens = Tokenizer::new(sample).tokenize();
1655        if maybe_tokens.is_err() {
1656            return
1657        } else {
1658            let toks = maybe_tokens.unwrap();
1659            let res = from_tokens(&toks);
1660            assert!(res.is_err());
1661        }
1662    }
1663
1664
1665    #[test]
1666    fn test_negative_octal() {
1667        let sample = r#"-0123
1668"#;
1669        let maybe_tokens = Tokenizer::new(sample).tokenize();
1670        if maybe_tokens.is_err() {
1671            return
1672        } else {
1673            let toks = maybe_tokens.unwrap();
1674            let res = from_tokens(&toks);
1675            assert!(res.is_err());
1676        }
1677    }
1678
1679
1680    #[test]
1681    fn test_negative_zero_float_leading_decimal_point() {
1682        let sample = r#"-.0
1683"#;
1684        let _res = from_str(sample).unwrap();
1685    }
1686
1687
1688
1689    #[test]
1690    fn test_negative_zero_float_trailing_decimal_point() {
1691        let sample = r#"-0.
1692"#;
1693        let _res = from_str(sample).unwrap();
1694    }
1695
1696
1697
1698    #[test]
1699    fn test_negative_zero_float() {
1700        let sample = r#"-0.0
1701"#;
1702        let _res = from_str(sample).unwrap();
1703    }
1704
1705
1706
1707    #[test]
1708    fn test_negative_zero_hexadecimal() {
1709        let sample = r#"-0x0
1710"#;
1711        let _res = from_str(sample).unwrap();
1712    }
1713
1714
1715
1716    #[test]
1717    fn test_negative_zero_integer() {
1718        let sample = r#"-0
1719"#;
1720        let _res = from_str(sample).unwrap();
1721    }
1722
1723
1724
1725    #[test]
1726    fn test_negative_zero_octal() {
1727        let sample = r#"-00
1728"#;
1729        let maybe_tokens = Tokenizer::new(sample).tokenize();
1730        if maybe_tokens.is_err() {
1731            return
1732        } else {
1733            let toks = maybe_tokens.unwrap();
1734            let res = from_tokens(&toks);
1735            assert!(res.is_err());
1736        }
1737    }
1738
1739
1740    #[test]
1741    fn test_noctal_with_leading_octal_digit() {
1742        let sample = r#"0780
1743"#;
1744        let maybe_tokens = Tokenizer::new(sample).tokenize();
1745        if maybe_tokens.is_err() {
1746            return
1747        } else {
1748            let toks = maybe_tokens.unwrap();
1749            let res = from_tokens(&toks);
1750            assert!(res.is_err());
1751        }
1752    }
1753
1754
1755    #[test]
1756    fn test_noctal() {
1757        let sample = r#"080
1758"#;
1759        let maybe_tokens = Tokenizer::new(sample).tokenize();
1760        if maybe_tokens.is_err() {
1761            return
1762        } else {
1763            let toks = maybe_tokens.unwrap();
1764            let res = from_tokens(&toks);
1765            assert!(res.is_err());
1766        }
1767    }
1768
1769
1770    #[test]
1771    fn test_octal() {
1772        let sample = r#"010
1773"#;
1774        let maybe_tokens = Tokenizer::new(sample).tokenize();
1775        if maybe_tokens.is_err() {
1776            return
1777        } else {
1778            let toks = maybe_tokens.unwrap();
1779            let res = from_tokens(&toks);
1780            assert!(res.is_err());
1781        }
1782    }
1783
1784
1785    #[test]
1786    fn test_positive_float_leading_decimal_point() {
1787        let sample = r#"+.5
1788"#;
1789        let _res = from_str(sample).unwrap();
1790    }
1791
1792
1793
1794    #[test]
1795    fn test_positive_float_leading_zero() {
1796        let sample = r#"+0.5
1797"#;
1798        let _res = from_str(sample).unwrap();
1799    }
1800
1801
1802
1803    #[test]
1804    fn test_positive_float_trailing_decimal_point() {
1805        let sample = r#"+5.
1806"#;
1807        let _res = from_str(sample).unwrap();
1808    }
1809
1810
1811
1812    #[test]
1813    fn test_positive_float() {
1814        let sample = r#"+1.2
1815"#;
1816        let _res = from_str(sample).unwrap();
1817    }
1818
1819
1820
1821    #[test]
1822    fn test_positive_hexadecimal() {
1823        let sample = r#"+0xC8
1824"#;
1825        let _res = from_str(sample).unwrap();
1826    }
1827
1828
1829
1830    #[test]
1831    fn test_positive_infinity() {
1832        let sample = r#"+Infinity
1833"#;
1834        let _res = from_str(sample).unwrap();
1835    }
1836
1837
1838
1839    #[test]
1840    fn test_positive_integer() {
1841        let sample = r#"+15
1842"#;
1843        let _res = from_str(sample).unwrap();
1844    }
1845
1846
1847
1848    #[test]
1849    fn test_positive_noctal() {
1850        let sample = r#"+098
1851"#;
1852        let maybe_tokens = Tokenizer::new(sample).tokenize();
1853        if maybe_tokens.is_err() {
1854            return
1855        } else {
1856            let toks = maybe_tokens.unwrap();
1857            let res = from_tokens(&toks);
1858            assert!(res.is_err());
1859        }
1860    }
1861
1862
1863    #[test]
1864    fn test_positive_octal() {
1865        let sample = r#"+0123
1866"#;
1867        let maybe_tokens = Tokenizer::new(sample).tokenize();
1868        if maybe_tokens.is_err() {
1869            return
1870        } else {
1871            let toks = maybe_tokens.unwrap();
1872            let res = from_tokens(&toks);
1873            assert!(res.is_err());
1874        }
1875    }
1876
1877
1878    #[test]
1879    fn test_positive_zero_float_leading_decimal_point() {
1880        let sample = r#"+.0
1881"#;
1882        let _res = from_str(sample).unwrap();
1883    }
1884
1885
1886
1887    #[test]
1888    fn test_positive_zero_float_trailing_decimal_point() {
1889        let sample = r#"+0.
1890"#;
1891        let _res = from_str(sample).unwrap();
1892    }
1893
1894
1895
1896    #[test]
1897    fn test_positive_zero_float() {
1898        let sample = r#"+0.0
1899"#;
1900        let _res = from_str(sample).unwrap();
1901    }
1902
1903
1904
1905    #[test]
1906    fn test_positive_zero_hexadecimal() {
1907        let sample = r#"+0x0
1908"#;
1909        let _res = from_str(sample).unwrap();
1910    }
1911
1912
1913
1914    #[test]
1915    fn test_positive_zero_integer() {
1916        let sample = r#"+0
1917"#;
1918        let _res = from_str(sample).unwrap();
1919    }
1920
1921
1922
1923    #[test]
1924    fn test_positive_zero_octal() {
1925        let sample = r#"+00
1926"#;
1927        let maybe_tokens = Tokenizer::new(sample).tokenize();
1928        if maybe_tokens.is_err() {
1929            return
1930        } else {
1931            let toks = maybe_tokens.unwrap();
1932            let res = from_tokens(&toks);
1933            assert!(res.is_err());
1934        }
1935    }
1936
1937
1938    #[test]
1939    fn test_zero_float_leading_decimal_point() {
1940        let sample = r#".0
1941"#;
1942        let _res = from_str(sample).unwrap();
1943    }
1944
1945
1946
1947    #[test]
1948    fn test_zero_float_trailing_decimal_point() {
1949        let sample = r#"0.
1950"#;
1951        let _res = from_str(sample).unwrap();
1952    }
1953
1954
1955
1956    #[test]
1957    fn test_zero_float() {
1958        let sample = r#"0.0
1959"#;
1960        let _res = from_str(sample).unwrap();
1961    }
1962
1963
1964
1965    #[test]
1966    fn test_zero_hexadecimal() {
1967        let sample = r#"0x0
1968"#;
1969        let _res = from_str(sample).unwrap();
1970    }
1971
1972
1973
1974    #[test]
1975    fn test_zero_integer_with_integer_exponent() {
1976        let sample = r#"0e23
1977"#;
1978        let _res = from_str(sample).unwrap();
1979    }
1980
1981
1982
1983    #[test]
1984    fn test_zero_integer() {
1985        let sample = r#"0
1986"#;
1987        let _res = from_str(sample).unwrap();
1988    }
1989
1990
1991
1992    #[test]
1993    fn test_zero_octal() {
1994        let sample = r#"00
1995"#;
1996        let maybe_tokens = Tokenizer::new(sample).tokenize();
1997        if maybe_tokens.is_err() {
1998            return
1999        } else {
2000            let toks = maybe_tokens.unwrap();
2001            let res = from_tokens(&toks);
2002            assert!(res.is_err());
2003        }
2004    }
2005
2006
2007    #[test]
2008    fn test_duplicate_keys() {
2009        let sample = r#"{
2010    "a": true,
2011    "a": false
2012}
2013"#;
2014        let _res = from_str(sample).unwrap();
2015    }
2016
2017
2018
2019    #[test]
2020    fn test_empty_object() {
2021        let sample = r#"{}"#;
2022        let _res = from_str(sample).unwrap();
2023    }
2024
2025
2026
2027    #[test]
2028    fn test_illegal_unquoted_key_number() {
2029        let sample = r#"{
2030    10twenty: "ten twenty"
2031}"#;
2032        let maybe_tokens = Tokenizer::new(sample).tokenize();
2033        if maybe_tokens.is_err() {
2034            return
2035        } else {
2036            let toks = maybe_tokens.unwrap();
2037            let res = from_tokens(&toks);
2038            assert!(res.is_err());
2039        }
2040    }
2041
2042
2043    #[test]
2044    fn test_illegal_unquoted_key_symbol() {
2045        let sample = r#"{
2046    multi-word: "multi-word"
2047}"#;
2048        let maybe_tokens = Tokenizer::new(sample).tokenize();
2049        if maybe_tokens.is_err() {
2050            return
2051        } else {
2052            let toks = maybe_tokens.unwrap();
2053            let res = from_tokens(&toks);
2054            assert!(res.is_err());
2055        }
2056    }
2057
2058
2059    #[test]
2060    fn test_leading_comma_object() {
2061        let sample = r#"{
2062    ,"foo": "bar"
2063}"#;
2064        let maybe_tokens = Tokenizer::new(sample).tokenize();
2065        if maybe_tokens.is_err() {
2066            return
2067        } else {
2068            let toks = maybe_tokens.unwrap();
2069            let res = from_tokens(&toks);
2070            assert!(res.is_err());
2071        }
2072    }
2073
2074
2075    #[test]
2076    fn test_lone_trailing_comma_object() {
2077        let sample = r#"{
2078    ,
2079}"#;
2080        let maybe_tokens = Tokenizer::new(sample).tokenize();
2081        if maybe_tokens.is_err() {
2082            return
2083        } else {
2084            let toks = maybe_tokens.unwrap();
2085            let res = from_tokens(&toks);
2086            assert!(res.is_err());
2087        }
2088    }
2089
2090
2091    #[test]
2092    fn test_no_comma_object() {
2093        let sample = r#"{
2094    "foo": "bar"
2095    "hello": "world"
2096}"#;
2097        let maybe_tokens = Tokenizer::new(sample).tokenize();
2098        if maybe_tokens.is_err() {
2099            return
2100        } else {
2101            let toks = maybe_tokens.unwrap();
2102            let res = from_tokens(&toks);
2103            assert!(res.is_err());
2104        }
2105    }
2106
2107
2108    #[test]
2109    fn test_reserved_unquoted_key() {
2110        let sample = r#"{
2111    while: true
2112}"#;
2113        let _res = from_str(sample).unwrap();
2114    }
2115
2116
2117
2118    #[test]
2119    fn test_single_quoted_key() {
2120        let sample = r#"{
2121    'hello': "world"
2122}"#;
2123        let _res = from_str(sample).unwrap();
2124    }
2125
2126
2127
2128    #[test]
2129    fn test_trailing_comma_object() {
2130        let sample = r#"{
2131    "foo": "bar",
2132}"#;
2133        let _res = from_str(sample).unwrap();
2134    }
2135
2136
2137
2138    #[test]
2139    fn test_unquoted_keys() {
2140        let sample = r#"{
2141    hello: "world",
2142    _: "underscore",
2143    $: "dollar sign",
2144    one1: "numerals",
2145    _$_: "multiple symbols",
2146    $_$hello123world_$_: "mixed"
2147}"#;
2148        let _res = from_str(sample).unwrap();
2149    }
2150
2151
2152
2153    #[test]
2154    fn test_escaped_single_quoted_string() {
2155        let sample = r#"'I can\'t wait'"#;
2156        let _res = from_str(sample).unwrap();
2157    }
2158
2159
2160
2161    #[test]
2162    fn test_multi_line_string() {
2163        let sample = r#"'hello\
2164 world'"#;
2165        let _res = from_str(sample).unwrap();
2166    }
2167
2168
2169
2170    #[test]
2171    fn test_single_quoted_string() {
2172        let sample = r#"'hello world'"#;
2173        let _res = from_str(sample).unwrap();
2174    }
2175
2176
2177
2178    #[test]
2179    fn test_unescaped_multi_line_string() {
2180        let sample = r#""foo
2181bar"
2182"#;
2183        let maybe_tokens = Tokenizer::new(sample).tokenize();
2184        if maybe_tokens.is_err() {
2185            return
2186        } else {
2187            let toks = maybe_tokens.unwrap();
2188            let res = from_tokens(&toks);
2189            assert!(res.is_err());
2190        }
2191    }
2192    // Start error tests
2193
2194
2195
2196    #[test]
2197    fn test_error_no_comma_array_lineno() {
2198        let sample = r#"[
2199    true
2200    false
2201]"#;
2202        let maybe_tokens = Tokenizer::new(sample).tokenize();
2203        if maybe_tokens.is_err() {
2204            let err = maybe_tokens.unwrap_err();
2205            assert_eq!(err.lineno, 3_usize, "{:?}", err);
2206        } else {
2207            let toks = maybe_tokens.unwrap();
2208            let res = from_tokens(&toks);
2209            let err = res.unwrap_err();
2210            assert_eq!(err.lineno, 3_usize, "{:?}", err);
2211        }
2212    }
2213
2214
2215    #[test]
2216    fn test_error_no_comma_array_index() {
2217        let sample = r#"[
2218    true
2219    false
2220]"#;
2221        let maybe_tokens = Tokenizer::new(sample).tokenize();
2222        if maybe_tokens.is_err() {
2223            let err = maybe_tokens.unwrap_err();
2224            assert_eq!(err.char_index, 15_usize, "{:?}", err)
2225        } else {
2226            let toks = maybe_tokens.unwrap();
2227            let res = from_tokens(&toks);
2228            let err = res.unwrap_err();
2229            assert_eq!(err.char_index, 15_usize, "{:?}", err);
2230        }
2231    }
2232
2233    #[test]
2234    fn test_error_no_comma_array_colno() {
2235        let sample = r#"[
2236    true
2237    false
2238]"#;
2239        let maybe_tokens = Tokenizer::new(sample).tokenize();
2240        if maybe_tokens.is_err() {
2241            let err = maybe_tokens.unwrap_err();
2242            assert_eq!(err.colno, 5_usize, "{:?}", err);
2243        } else {
2244            let toks = maybe_tokens.unwrap();
2245            let res = from_tokens(&toks);
2246            let err = res.unwrap_err();
2247            assert_eq!(err.colno, 5_usize, "{:?}", err);
2248        }
2249    }
2250
2251
2252    #[test]
2253    fn test_error_top_level_block_comment_lineno() {
2254        let sample = r#"/*
2255    This should fail;
2256    comments cannot be the only top-level value.
2257*/"#;
2258        let maybe_tokens = Tokenizer::new(sample).tokenize();
2259        if maybe_tokens.is_err() {
2260            let err = maybe_tokens.unwrap_err();
2261            assert_eq!(err.lineno, 4_usize, "{:?}", err);
2262        } else {
2263            let toks = maybe_tokens.unwrap();
2264            let res = from_tokens(&toks);
2265            let err = res.unwrap_err();
2266            assert_eq!(err.lineno, 4_usize, "{:?}", err);
2267        }
2268    }
2269
2270
2271    #[test]
2272    fn test_error_top_level_block_comment_index() {
2273        let sample = r#"/*
2274    This should fail;
2275    comments cannot be the only top-level value.
2276*/"#;
2277        let maybe_tokens = Tokenizer::new(sample).tokenize();
2278        if maybe_tokens.is_err() {
2279            let err = maybe_tokens.unwrap_err();
2280            assert_eq!(err.char_index, 76_usize, "{:?}", err)
2281        } else {
2282            let toks = maybe_tokens.unwrap();
2283            let res = from_tokens(&toks);
2284            let err = res.unwrap_err();
2285            assert_eq!(err.char_index, 76_usize, "{:?}", err);
2286        }
2287    }
2288
2289    #[test]
2290    fn test_error_top_level_block_comment_colno() {
2291        let sample = r#"/*
2292    This should fail;
2293    comments cannot be the only top-level value.
2294*/"#;
2295        let maybe_tokens = Tokenizer::new(sample).tokenize();
2296        if maybe_tokens.is_err() {
2297            let err = maybe_tokens.unwrap_err();
2298            assert_eq!(err.colno, 3_usize, "{:?}", err);
2299        } else {
2300            let toks = maybe_tokens.unwrap();
2301            let res = from_tokens(&toks);
2302            let err = res.unwrap_err();
2303            assert_eq!(err.colno, 3_usize, "{:?}", err);
2304        }
2305    }
2306
2307
2308
2309    #[test]
2310    fn test_error_top_level_inline_comment_lineno() {
2311        let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2312        let maybe_tokens = Tokenizer::new(sample).tokenize();
2313        if maybe_tokens.is_err() {
2314            let err = maybe_tokens.unwrap_err();
2315            assert_eq!(err.lineno, 1_usize, "{:?}", err);
2316        } else {
2317            let toks = maybe_tokens.unwrap();
2318            let res = from_tokens(&toks);
2319            let err = res.unwrap_err();
2320            assert_eq!(err.lineno, 1_usize, "{:?}", err);
2321        }
2322    }
2323
2324
2325    #[test]
2326    fn test_error_top_level_inline_comment_index() {
2327        let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2328        let maybe_tokens = Tokenizer::new(sample).tokenize();
2329        if maybe_tokens.is_err() {
2330            let err = maybe_tokens.unwrap_err();
2331            assert_eq!(err.char_index, 65_usize, "{:?}", err)
2332        } else {
2333            let toks = maybe_tokens.unwrap();
2334            let res = from_tokens(&toks);
2335            let err = res.unwrap_err();
2336            assert_eq!(err.char_index, 65_usize, "{:?}", err);
2337        }
2338    }
2339
2340    #[test]
2341    fn test_error_top_level_inline_comment_colno() {
2342        let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2343        let maybe_tokens = Tokenizer::new(sample).tokenize();
2344        if maybe_tokens.is_err() {
2345            let err = maybe_tokens.unwrap_err();
2346            assert_eq!(err.colno, 66_usize, "{:?}", err);
2347        } else {
2348            let toks = maybe_tokens.unwrap();
2349            let res = from_tokens(&toks);
2350            let err = res.unwrap_err();
2351            assert_eq!(err.colno, 66_usize, "{:?}", err);
2352        }
2353    }
2354
2355    #[test]
2356    fn test_error_illegal_unquoted_key_number_lineno() {
2357        let sample = r#"{
2358    10twenty: "ten twenty"
2359}"#;
2360        let maybe_tokens = Tokenizer::new(sample).tokenize();
2361        if maybe_tokens.is_err() {
2362            let err = maybe_tokens.unwrap_err();
2363            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2364        } else {
2365            let toks = maybe_tokens.unwrap();
2366            let res = from_tokens(&toks);
2367            let err = res.unwrap_err();
2368            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2369        }
2370    }
2371
2372
2373    #[test]
2374    fn test_error_illegal_unquoted_key_number_index() {
2375        let sample = r#"{
2376    10twenty: "ten twenty"
2377}"#;
2378        let maybe_tokens = Tokenizer::new(sample).tokenize();
2379        if maybe_tokens.is_err() {
2380            let err = maybe_tokens.unwrap_err();
2381            assert_eq!(err.char_index, 6_usize, "{:?}", err)
2382        } else {
2383            let toks = maybe_tokens.unwrap();
2384            let res = from_tokens(&toks);
2385            let err = res.unwrap_err();
2386            assert_eq!(err.char_index, 6_usize, "{:?}", err);
2387        }
2388    }
2389
2390    #[test]
2391    fn test_error_illegal_unquoted_key_number_colno() {
2392        let sample = r#"{
2393    10twenty: "ten twenty"
2394}"#;
2395        let maybe_tokens = Tokenizer::new(sample).tokenize();
2396        if maybe_tokens.is_err() {
2397            let err = maybe_tokens.unwrap_err();
2398            assert_eq!(err.colno, 5_usize, "{:?}", err);
2399        } else {
2400            let toks = maybe_tokens.unwrap();
2401            let res = from_tokens(&toks);
2402            let err = res.unwrap_err();
2403            assert_eq!(err.colno, 5_usize, "{:?}", err);
2404        }
2405    }
2406
2407
2408
2409    #[test]
2410    fn test_error_illegal_unquoted_key_symbol_lineno() {
2411        let sample = r#"{
2412    multi-word: "multi-word"
2413}"#;
2414        let maybe_tokens = Tokenizer::new(sample).tokenize();
2415        if maybe_tokens.is_err() {
2416            let err = maybe_tokens.unwrap_err();
2417            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2418        } else {
2419            let toks = maybe_tokens.unwrap();
2420            let res = from_tokens(&toks);
2421            let err = res.unwrap_err();
2422            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2423        }
2424    }
2425
2426
2427    #[test]
2428    fn test_error_illegal_unquoted_key_symbol_index() {
2429        let sample = r#"{
2430    multi-word: "multi-word"
2431}"#;
2432        let maybe_tokens = Tokenizer::new(sample).tokenize();
2433        if maybe_tokens.is_err() {
2434            let err = maybe_tokens.unwrap_err();
2435            assert_eq!(err.char_index, 11_usize, "{:?}", err)
2436        } else {
2437            let toks = maybe_tokens.unwrap();
2438            let res = from_tokens(&toks);
2439            let err = res.unwrap_err();
2440            assert_eq!(err.char_index, 11_usize, "{:?}", err);
2441        }
2442    }
2443
2444    #[test]
2445    fn test_error_illegal_unquoted_key_symbol_colno() {
2446        let sample = r#"{
2447    multi-word: "multi-word"
2448}"#;
2449        let maybe_tokens = Tokenizer::new(sample).tokenize();
2450        if maybe_tokens.is_err() {
2451            let err = maybe_tokens.unwrap_err();
2452            assert_eq!(err.colno, 10_usize, "{:?}", err);
2453        } else {
2454            let toks = maybe_tokens.unwrap();
2455            let res = from_tokens(&toks);
2456            let err = res.unwrap_err();
2457            assert_eq!(err.colno, 10_usize, "{:?}", err);
2458        }
2459    }
2460
2461
2462
2463    #[test]
2464    fn test_error_leading_comma_object_lineno() {
2465        let sample = r#"{
2466    ,"foo": "bar"
2467}"#;
2468        let maybe_tokens = Tokenizer::new(sample).tokenize();
2469        if maybe_tokens.is_err() {
2470            let err = maybe_tokens.unwrap_err();
2471            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2472        } else {
2473            let toks = maybe_tokens.unwrap();
2474            let res = from_tokens(&toks);
2475            let err = res.unwrap_err();
2476            assert_eq!(err.lineno, 2_usize, "{:?}", err);
2477        }
2478    }
2479
2480
2481    #[test]
2482    fn test_error_leading_comma_object_index() {
2483        let sample = r#"{
2484    ,"foo": "bar"
2485}"#;
2486        let maybe_tokens = Tokenizer::new(sample).tokenize();
2487        if maybe_tokens.is_err() {
2488            let err = maybe_tokens.unwrap_err();
2489            assert_eq!(err.char_index, 6_usize, "{:?}", err)
2490        } else {
2491            let toks = maybe_tokens.unwrap();
2492            let res = from_tokens(&toks);
2493            let err = res.unwrap_err();
2494            assert_eq!(err.char_index, 6_usize, "{:?}", err);
2495        }
2496    }
2497
2498    #[test]
2499    fn test_error_leading_comma_object_colno() {
2500        let sample = r#"{
2501    ,"foo": "bar"
2502}"#;
2503        let maybe_tokens = Tokenizer::new(sample).tokenize();
2504        if maybe_tokens.is_err() {
2505            let err = maybe_tokens.unwrap_err();
2506            assert_eq!(err.colno, 5_usize, "{:?}", err);
2507        } else {
2508            let toks = maybe_tokens.unwrap();
2509            let res = from_tokens(&toks);
2510            let err = res.unwrap_err();
2511            assert_eq!(err.colno, 5_usize, "{:?}", err);
2512        }
2513    }
2514
2515    #[test]
2516    fn test_error_unescaped_multi_line_string_lineno() {
2517        let sample = r#""foo
2518bar"
2519"#;
2520        let maybe_tokens = Tokenizer::new(sample).tokenize();
2521        if maybe_tokens.is_err() {
2522            let err = maybe_tokens.unwrap_err();
2523            assert_eq!(err.lineno, 1_usize, "{:?}", err);
2524        } else {
2525            let toks = maybe_tokens.unwrap();
2526            let res = from_tokens(&toks);
2527            let err = res.unwrap_err();
2528            assert_eq!(err.lineno, 1_usize, "{:?}", err);
2529        }
2530    }
2531
2532
2533    #[test]
2534    fn test_error_unescaped_multi_line_string_index() {
2535        let sample = r#""foo
2536bar"
2537"#;
2538        let maybe_tokens = Tokenizer::new(sample).tokenize();
2539        if maybe_tokens.is_err() {
2540            let err = maybe_tokens.unwrap_err();
2541            assert_eq!(err.char_index, 4_usize, "{:?}", err)
2542        } else {
2543            let toks = maybe_tokens.unwrap();
2544            let res = from_tokens(&toks);
2545            let err = res.unwrap_err();
2546            assert_eq!(err.char_index, 4_usize, "{:?}", err);
2547        }
2548    }
2549
2550    #[test]
2551    fn test_error_unescaped_multi_line_string_colno() {
2552        let sample = r#""foo
2553bar"
2554"#;
2555        let maybe_tokens = Tokenizer::new(sample).tokenize();
2556        if maybe_tokens.is_err() {
2557            let err = maybe_tokens.unwrap_err();
2558            assert_eq!(err.colno, 5_usize, "{:?}", err);
2559        } else {
2560            let toks = maybe_tokens.unwrap();
2561            let res = from_tokens(&toks);
2562            let err = res.unwrap_err();
2563            assert_eq!(err.colno, 5_usize, "{:?}", err);
2564        }
2565    }
2566}