1use std::fmt::{Display, Formatter};
2use std::iter::Peekable;
3use std::slice::Iter;
4use crate::tokenize::{TokenSpan, TokType};
5use crate::tokenize::Tokens;
6use crate::tokenize::TokType::{Colon, Comma, RightBrace};
7use crate::utils::get_line_col_char;
8
9
10
11#[derive(PartialEq, Debug)]
12pub struct JSONKeyValuePair<'input> {
13 pub(crate) key: JSONValue<'input>,
14 pub(crate) value: JSONValue<'input>,
15}
16
17
18#[derive(PartialEq, Debug)]
19pub enum UnaryOperator {
20 Plus,
21 Minus,
22}
23
24
25#[derive(PartialEq, Debug)]
26pub enum JSONValue<'input> {
27 JSONObject { key_value_pairs: Vec<JSONKeyValuePair<'input>> },
28 JSONArray { values: Vec<JSONValue<'input>> },
29 Integer(&'input str),
30 Float(&'input str),
31 Exponent(&'input str),
32 Null,
33 Infinity,
34 NaN,
35 Hexadecimal(&'input str),
36 Bool(bool),
37 DoubleQuotedString(&'input str),
38 SingleQuotedString(&'input str),
39 Unary { operator: UnaryOperator, value: Box<JSONValue<'input>> },
40 Identifier(&'input str), }
42
43#[derive(PartialEq, Debug)]
44pub struct JSONText<'input> {
45 pub value: JSONValue<'input>,
46}
47
48#[allow(dead_code)]
49pub enum TrailingComma {
50 ALL,
51 OBJECTS,
52 ARRAYS,
53 NONE
54}
55
56pub struct FormatConfiguration {
57 pub(crate) indent: Option<usize>,
58 pub(crate) item_separator: String,
59 pub(crate) key_separator: String,
60 pub(crate) current_indent: String,
61 pub(crate) trailing_comma: TrailingComma
62}
63
64#[allow(dead_code)]
65impl FormatConfiguration {
66 pub fn new(indent: Option<usize>, item_separator: &str, key_separator: &str, trailing_comma: TrailingComma) -> Self {
67 FormatConfiguration {indent: indent, item_separator: item_separator.to_string(), key_separator: key_separator.to_string(), current_indent: String::with_capacity(64), trailing_comma: trailing_comma}
68 }
69
70 pub fn with_indent(indent: usize, trailing_comma: TrailingComma) -> Self {
71 FormatConfiguration {indent: Some(indent), item_separator: ",".to_string(), key_separator: ": ".to_string(), trailing_comma, current_indent: String::with_capacity(64)}
72 }
73
74 pub fn with_separators(item_separator: &str, key_separator: &str, trailing_comma: TrailingComma) -> Self {
75 FormatConfiguration {indent: Some(0), key_separator: key_separator.to_string(), trailing_comma, item_separator: item_separator.to_string(), current_indent: String::with_capacity(64)}
76 }
77
78 pub fn default() -> Self {
79 FormatConfiguration {indent: None, item_separator: ", ".to_string(), key_separator: ": ".to_string(), current_indent: String::with_capacity(64), trailing_comma: TrailingComma::NONE}
80 }
81
82 pub fn compact() -> Self {
83 FormatConfiguration {indent: None, item_separator: ",".to_string(), key_separator: ":".to_string(), current_indent: String::with_capacity(64), trailing_comma: TrailingComma::NONE}
84 }
85}
86
87impl<'input> JSONKeyValuePair<'input> {
88 fn to_string_formatted(&self, style: &mut FormatConfiguration) -> String {
89 format!("{}{}{}", self.key.to_string_formatted(style), style.key_separator, self.value)
90 }
91}
92
93impl<'input> JSONValue<'input> {
94 fn to_string_formatted(&self, style: &mut FormatConfiguration) -> String {
95 match self {
96 JSONValue::Identifier(s) | JSONValue::Integer(s) | JSONValue::Float(s) | JSONValue::Exponent(s) | JSONValue::Hexadecimal(s) => {
97 format!("{}", s)
98 }
99 JSONValue::Bool(b) => {
100 format!("{}", b)
101 }
102 JSONValue::DoubleQuotedString(s) => {
103 format!("\"{}\"", s)
104 }
105 JSONValue::SingleQuotedString(s) => {
106 format!("'{}'", s)
107 }
108
109 JSONValue::Null => {"null".to_string()}
110 JSONValue::Infinity => {"Infinity".to_string()}
111 JSONValue::NaN => {"NaN".to_string()}
112
113 JSONValue::Unary { operator, value } => {
114 let op_char = match operator {
115 UnaryOperator::Plus => {'+'}
116 UnaryOperator::Minus => {'-'}
117 };
118 let value_string = value.to_string();
119 format!("{}{}", op_char, value_string)
120 }
121 JSONValue::JSONObject { key_value_pairs} => {
122 let mut ret: String;
123
124 match style.indent {
125 None => {
126 ret = String::from("{");
127 }
128 Some(ident) => {
129 style.current_indent.reserve(ident);
130 for _ in 0 .. ident {
131 style.current_indent.push(' ');
132 }
133 ret = format!("{{\n{}", style.current_indent);
134 }
135 }
136 for (idx, kvp) in key_value_pairs.iter().enumerate() {
137 ret.push_str(kvp.to_string_formatted(style).as_str());
138 if idx < key_value_pairs.len() - 1 {
139 match style.indent {
140 None => {
141 ret.push_str(style.item_separator.as_str());
142 }
143 Some(_) => {
144 ret.push_str(format!(",\n{}", style.current_indent).as_str())
145 }
146 }
147 }
148 }
149 match style.trailing_comma {
150 TrailingComma::ALL | TrailingComma::OBJECTS => {
151 ret.push(',');
152 }
153 _ => {}
154 }
155 match style.indent {
156 None => {
157 ret.push_str("}");
158 }
159 Some(ident) => {
160 style.current_indent.truncate(style.current_indent.len() - ident);
161 ret.push_str(format!("\n{}}}", style.current_indent).as_str());
162 }
163 }
164 ret
165 }
166 JSONValue::JSONArray { values } => {
167 let mut ret: String;
168
169 match style.indent {
170 None => {
171 ret = String::from("[");
172 }
173 Some(ident) => {
174 style.current_indent.reserve(ident);
175 for _ in 0 .. ident {
176 style.current_indent.push(' ');
177 }
178 ret = format!("{{\n{}", style.current_indent);
179 }
180 }
181 for (idx, value) in values.iter().enumerate() {
182 ret.push_str(value.to_string_formatted(style).as_str());
183 if idx < values.len() - 1 {
184 match style.indent {
185 None => {
186 ret.push_str(style.item_separator.as_str());
187 }
188 Some(_) => {
189 ret.push_str(format!(",\n{}", style.current_indent).as_str())
190 }
191 }
192 }
193 }
194 match style.trailing_comma {
195 TrailingComma::ALL | TrailingComma::ARRAYS => {
196 ret.push(',');
197 }
198 _ => {}
199 }
200 match style.indent {
201 None => {
202 ret.push_str("]");
203 }
204 Some(ident) => {
205 style.current_indent.truncate(style.current_indent.len() - ident);
206 ret.push_str(format!("\n{}}}", style.current_indent).as_str());
207 }
208 }
209 ret
210 }
211 }
212 }
213}
214
215
216impl<'input> Display for JSONValue<'input> {
217 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
218 let mut style = FormatConfiguration::default();
219 let res = self.to_string_formatted(&mut style);
220 write!(f, "{}", res)
221 }
222}
223
224
225impl<'input> Display for JSONText<'input> {
226 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
227 write!(f, "{}", self.value.to_string_formatted(&mut FormatConfiguration::default()))
228 }
229}
230
231
232
233#[derive(Debug, PartialEq)]
234pub struct ParsingError {
235 pub index: usize, pub message: String,
237 pub lineno: usize,
238 pub colno: usize,
239 pub char_index: usize, }
241
242
243
244impl Display for ParsingError {
245 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
246 write!(f, "ParsingError: {}: line {} column {} (char {})", self.message, self.lineno, self.colno, self.char_index)
247 }
248}
249
250struct JSON5Parser<'toks, 'input> {
251 source: &'input str,
252 source_tokens: Peekable<Iter<'toks, TokenSpan>>,
253 lookahead: Option<&'toks TokenSpan>,
254 current_depth: usize,
255 max_depth: usize,
256}
257
258
259impl<'toks, 'input> JSON5Parser<'toks, 'input> {
260 fn new(tokens: &'toks Tokens<'input>) -> Self {
261 use crate::utils::MAX_DEPTH;
262 JSON5Parser { source_tokens: tokens.tok_spans.iter().peekable(), lookahead: None, source: tokens.source, current_depth: 0, max_depth: MAX_DEPTH }
263 }
264
265 fn with_max_depth(tokens: &'toks Tokens<'input>, max_depth: usize) -> Self {
266 JSON5Parser { source_tokens: tokens.tok_spans.iter().peekable(), lookahead: None, source: tokens.source, current_depth: 0, max_depth: max_depth }
267 }
268
269 fn advance(&mut self) -> Option<&'toks TokenSpan> {
270 match self.source_tokens.next() {
271 None => {
272 self.lookahead = None;
273 None
274 }
275 Some(span) => {
276 match span.1 {
277 TokType::BlockComment | TokType::LineComment | TokType::Whitespace => {
278 return self.advance()
279 }
280 _ => {
281
282 self.lookahead = Some(span);
283 self.lookahead
284 }
285 }
286 }
287 }
288 }
289
290 #[inline]
291 fn get_tok_source(&self, span: &'toks TokenSpan) -> &'input str {
292 &self.source[span.0 .. span.2]
293 }
294
295
296 fn peek(&mut self) -> Option<&'toks TokenSpan> {
297 match self.source_tokens.peek() {
298 None => None,
299 Some(span) => {
300 match span.1 {
301 TokType::BlockComment | TokType::LineComment | TokType::Whitespace => {
302 self.source_tokens.next();
303 self.peek()
304 }
305 _ => {
306 Some(span)
307 }
308 }
309 }
310 }
311 }
312
313
314 fn position(&mut self) -> usize {
315 match self.peek() {
316 None => {
317 match self.lookahead {
318 None => 0, Some(span) => {span.2}
320 }
321 }
322 Some(span) => {
323 span.0
324 }
325 }
326 }
327
328 fn make_error(&self, message: String, index: usize) -> ParsingError {
329 let (lineno, colno, char_index) = get_line_col_char(self.source, index);
330 ParsingError {
331 index,
332 message,
333 lineno,
334 colno,
335 char_index,
336 }
337 }
338
339 fn check_and_consume(&mut self, types: Vec<TokType>) -> Option<&'toks TokenSpan> {
340 let next_tok = self.peek()?;
341 for toktype in types {
342 if next_tok.1 == toktype {
343 return self.advance();
344 }
345 }
346 None
347 }
348
349 #[inline]
350 fn check_and_consume_with_source(&mut self, types: Vec<TokType>) -> Option<(&'toks TokenSpan, &'input str)> {
351 let tok = self.check_and_consume(types)?;
352 let source = self.get_tok_source(tok);
353 Some((tok, source))
354 }
355
356 fn parse_key(&mut self) -> Result<JSONValue<'input>, ParsingError>{
357 match self.check_and_consume_with_source(vec![TokType::Name, TokType::DoubleQuotedString, TokType::SingleQuotedString]) {
360 None => {
361 match self.peek() {
362 None => {
363 let idx = self.position();
364 Err(self.make_error("Unexpected EOF. Was expecting MemberName at".to_string(), idx))
365 }
366 Some(span) => {
367 let src = self.get_tok_source(span);
368 Err(self.make_error(format!("Invalid token for unquoted key ({}, {:?}) at", span.2, src), span.0))
369 }
370 }
371 },
372 Some((span, lexeme)) => {
373 match span.1 {
374 TokType::DoubleQuotedString => {
375 Ok(JSONValue::DoubleQuotedString(&lexeme[1..lexeme.len() - 1]))
376 },
377 TokType:: SingleQuotedString => {
378 Ok(JSONValue::SingleQuotedString(&lexeme[1..lexeme.len() - 1]))
379 }
380 TokType::Name => {
381 Ok(JSONValue::Identifier(lexeme))
382 }
383 _ => unreachable!("Programming error. Please report this as a bug")
384 }
385 }
386 }
387 }
388
389 fn parse_object(&mut self) -> Result<JSONValue<'input>, ParsingError> {
390 let mut kvps: Vec<JSONKeyValuePair> = Vec::new();
391 loop {
392 match self.check_and_consume(vec![RightBrace]) {
393 None => {
394 let key = self.parse_key()?;
395 match self.check_and_consume(vec![Colon]) {
396
397 None => {
398 let idx = self.position();
399 return Err(self.make_error("Expecting ':' delimiter".to_string(), idx))
400 }
401 Some(_) => {
402 let val = self.parse_value()?;
403 let kvp = JSONKeyValuePair{key: key, value: val};
404 kvps.push(kvp);
405 match self.check_and_consume(vec![Comma]) {
406 None => {
407 match self.check_and_consume(vec![RightBrace]) {
408 None => {
409 let idx = self.position();
410 return Err(self.make_error("Expecting '}' at end of object".to_string(), idx))
411 },
412 Some(_) => {
413 break Ok(JSONValue::JSONObject {key_value_pairs: kvps})
414 }
415 }
416
417 }
418 Some(_) => {
419 continue
420 }
421 }
422 }
423 }
424 }
425 Some(_) => {
426 break Ok(JSONValue::JSONObject {key_value_pairs: kvps})
427 }
428 }
429 }
430 }
431
432 fn parse_array(&mut self) -> Result<JSONValue<'input>, ParsingError> {
433 let mut values:Vec<JSONValue> = Vec::new();
434 loop {
435 match self.check_and_consume(vec![TokType::RightBracket]) {
436 None => {
437 let val = self.parse_value()?;
438 values.push(val);
439 match self.check_and_consume(vec![Comma]) {
440 None => {
441 match self.check_and_consume(vec![TokType::RightBracket]) {
442 None => {
443 let idx = self.position();
444 return Err(self.make_error("Expecting ']' at end of array".to_string(), idx))
445 },
446 Some(_) => {
447 break Ok(JSONValue::JSONArray {values: values})
448 }
449 }
450 }
451 Some(_) => {
452 continue
453 }
454 }
455 }
456 Some(_) => {
457 break Ok(JSONValue::JSONArray {values: values})
458 }
459 }
460 }
461 }
462
463 fn parse_primary(&mut self) -> Result<JSONValue<'input>, ParsingError> {
464 let span = self.advance().unwrap();
465 match &span.1 {
466 TokType::Integer => {Ok(JSONValue::Integer(self.get_tok_source(span)))}
467 TokType::Float => {Ok(JSONValue::Float(self.get_tok_source(span)))}
468 TokType::Exponent => { Ok(JSONValue::Exponent(self.get_tok_source(span)))}
469 TokType::SingleQuotedString => {
470 let lexeme = self.get_tok_source(span);
471 Ok(JSONValue::SingleQuotedString(&lexeme[1..lexeme.len() - 1]))
472 },
473 TokType::DoubleQuotedString => {
474 let lexeme = self.get_tok_source(span);
475 Ok(JSONValue::DoubleQuotedString(&lexeme[1..lexeme.len() - 1]))
476 },
477 TokType::True => Ok(JSONValue::Bool(true)),
478 TokType::False => Ok(JSONValue::Bool(false)),
479 TokType::Null => Ok(JSONValue::Null),
480 TokType::Infinity => Ok(JSONValue::Infinity),
481 TokType::Nan => Ok(JSONValue::NaN),
482 TokType::Hexadecimal => Ok(JSONValue::Hexadecimal(self.get_tok_source(span))),
483 TokType::EOF => {
484 match self.position() {
485 0 => Err(self.make_error("Unexpected EOF. Was expecting value.".to_string(), 0)),
486 pos => Err(self.make_error("Unexpected EOF".to_string(), pos))
487 }
488 },
489 t => Err(self.make_error(format!("Unexpected token of type {:?}: {:?}", t, self.get_tok_source(span)), span.0))
490 }
491 }
492
493 fn parse_unary(&mut self) -> Result<JSONValue<'input>, ParsingError> {
494 match self.check_and_consume(vec![TokType::Plus, TokType::Minus]) {
495 None => self.parse_primary(),
496 Some(span) => {
497 let value = self.parse_unary()?;
498 match value {
499 JSONValue::Float(_) | JSONValue::Integer(_) | JSONValue::Infinity | JSONValue::NaN | JSONValue::Hexadecimal(_) | JSONValue::Exponent(_) => {}
500 JSONValue::Unary{ .. } => {
501 return Err(self.make_error("Only one unary operator is allowed".to_string(), span.2))
502 }
503 val => {
504 return Err(self.make_error(format!("Unary operations not allowed for value {:?}", val), span.2))
505 }
506 }
507 match span.1 {
508 TokType::Plus => {
509 Ok(JSONValue::Unary {operator: UnaryOperator::Plus, value: Box::new(value)})
510 }
511 TokType::Minus => {
512 Ok(JSONValue::Unary {operator: UnaryOperator::Minus, value: Box::new(value)})
513 }
514 _ => unreachable!("unexpected unary token type")
515 }
516 }
517 }
518 }
519
520 fn parse_obj_or_array(&mut self) -> Result<JSONValue<'input>, ParsingError> {
521 match self.check_and_consume(vec![TokType::LeftBracket, TokType::LeftBrace]) {
522 None => self.parse_unary(),
523 Some(span) => {
524 match span.1 {
525 TokType::LeftBrace => self.parse_object(),
526 TokType::LeftBracket => self.parse_array(),
527 _ => unreachable!("no")
528 }
529 }
530 }
531 }
532
533
534 fn parse_value(&mut self) -> Result<JSONValue<'input>, ParsingError> {
535 self.current_depth = self.current_depth + 1;
536 if self.current_depth > self.max_depth {
537 let idx = self.position();
538 return Err(self.make_error(format!("max depth ({}) exceeded in nested arrays/objects. To expand the depth, use the ``with_max_depth`` constructor or enable the `unlimited_depth` feature", self.max_depth), idx))
539 }
540 let res = self.parse_obj_or_array();
541 self.current_depth = self.current_depth - 1;
542 res
543 }
544
545 fn parse_text(&mut self) -> Result<JSONText<'input>, ParsingError> {
546 let value = self.parse_value()?;
547 match self.advance() {
548 None => {}
549 Some(span) => {
550 if span.1 != TokType::EOF {
551 return Err(self.make_error(format!("Unexpected {:?} token after value", span.1), span.0))
552 }
553 }
554 }
555 Ok(JSONText { value })
556 }
557}
558
559
560pub fn from_tokens<'toks, 'input>(tokens: &'toks Tokens<'input>) -> Result<JSONText<'input>, ParsingError> {
562 let mut parser = JSON5Parser::new(tokens);
563 parser.parse_text()
564}
565
566pub fn from_str<'input>(source: &'input str) -> Result<JSONText<'input>, ParsingError> {
581 use crate::tokenize::tokenize_str;
582 let maybe_toks = tokenize_str(source);
583 match maybe_toks {
584 Err(e) => {
585 Err(ParsingError{index: e.index, message: e.message, char_index: e.char_index, lineno: e.lineno, colno: e.colno})
586 }
587 Ok(toks) => {
588 from_tokens(&toks)
589 }
590 }
591}
592
593pub fn from_bytes<'input>(source: &'input [u8]) -> Result<JSONText<'input>, ParsingError> {
595 use crate::tokenize::tokenize_bytes;
596 let maybe_toks = tokenize_bytes(source);
597 match maybe_toks {
598 Err(e) => {
599 Err(ParsingError{index: e.index, message: e.message, char_index: e.char_index, lineno: e.lineno, colno: e.colno})
600 }
601 Ok(toks) => {
602 from_tokens(&toks)
603 }
604 }
605}
606
607#[cfg(test)]
608mod tests {
609 use crate::tokenize::Tokenizer;
610 use crate::parser::JSONValue::*;
611 use super::*;
612
613 #[test]
614 fn test_fuzz_1() {
615 let res = from_str("0xA18 {9");
616 assert!(res.is_err());
617 }
618
619 #[cfg(not(feature = "unlimited_depth"))]
620 #[test]
621 fn test_deeply_nested() {
622 let n = 4000;
623 let mut s = String::with_capacity(n * 2);
624 for _ in 0 .. n {
625 s.push('[')
626 }
627 for _ in 0 .. n {
628 s.push(']')
629 }
630 let res = from_str(s.as_str());
631 assert!(res.is_err());
632 assert!(res.unwrap_err().message.contains("max depth"))
633 }
634
635
636 #[test]
637 fn test_from_bytes() {
638 let res = from_bytes(b"{}").unwrap();
639 let expected = JSONText{value: JSONValue::JSONObject {key_value_pairs: vec![]}};
640 assert_eq!(res, expected)
641
642 }
643
644 #[test]
645 fn test_foo() {
646 let res = from_str("{}").unwrap();
647 let expected = JSONText{value: JSONValue::JSONObject {key_value_pairs: vec![]}};
648 assert_eq!(res, expected)
649 }
650
651 #[test]
652 fn test_illegal_identifier_escape() {
653 let text = r#"{ \u0031foo: 123 }"#;
654 from_str(text).unwrap_err();
655 }
656
657 #[test]
658 fn test_illegal_unary() {
659 let res = from_str("-'foo'");
660 res.unwrap_err();
661 }
662
663 #[test]
664 fn test_object() {
665 let res = from_str("{\"foo\": \"bar\"}").unwrap();
666 let expected = JSONText{value: JSONValue::JSONObject {key_value_pairs: vec![JSONKeyValuePair{key: JSONValue::DoubleQuotedString("foo"), value: JSONValue::DoubleQuotedString("bar")}]}};
667 assert_eq!(res, expected)
668 }
669
670 #[test]
671 fn test_identifier(){
672 let res = from_str("{foo: \"bar\"}").unwrap();
673 let expected = JSONText{value: JSONValue::JSONObject {key_value_pairs: vec![JSONKeyValuePair{key: JSONValue::Identifier("foo"), value: JSONValue::DoubleQuotedString("bar")}]}};
674 assert_eq!(res, expected)
675 }
676
677 #[test]
678 fn test_array() {
679 let res = from_str("[1,2,3]").unwrap();
680 let expected = JSONText{value: JSONArray {values: vec![JSONValue::Integer("1"), JSONValue::Integer("2"), JSONValue::Integer("3")]}};
681 assert_eq!(res, expected)
682 }
683
684 #[test]
685 fn val_int() {
686 let res = from_str("1").unwrap();
687 let expected = JSONText{value: Integer("1")};
688 assert_eq!(res, expected)
689 }
690
691 #[test]
692 fn val_float() {
693 let res = from_str("1.0").unwrap();
694 let expected = JSONText{value: Float("1.0")};
695 assert_eq!(res, expected)
696 }
697
698 #[test]
699 fn val_string() {
700 let res = from_str("'foo'").unwrap();
701 let expected = JSONText{value: SingleQuotedString("foo")};
702 assert_eq!(res, expected)
703 }
704
705 #[test]
706 fn multiline_string() {
707 let res = from_str("'foo\\\nbar'").unwrap();
708 let expected = JSONText{value: SingleQuotedString("foo\\\nbar")};
709 assert_eq!(res, expected)
710 }
711
712 #[test]
713 fn test_empty_string() {
714 let res = from_str("\"\"").unwrap();
715 let expected = JSONText { value: DoubleQuotedString("") };
716 assert_eq!(res, expected);
717 }
718
719 #[test]
720 fn test_single_element_array() {
721 let res = from_str("[42]").unwrap();
722 let expected = JSONText { value: JSONArray { values: vec![Integer("42")] } };
723 assert_eq!(res, expected);
724 }
725
726 #[test]
727 fn test_single_key_object() {
728 let res = from_str("{\"key\": \"value\"}").unwrap();
729 let expected = JSONText {
730 value: JSONObject {
731 key_value_pairs: vec![
732 JSONKeyValuePair {
733 key: DoubleQuotedString("key"),
734 value: DoubleQuotedString("value"),
735 }
736 ]
737 }
738 };
739 assert_eq!(res, expected);
740 }
741
742 #[test]
743 fn test_trailing_comma_in_array() {
744 let res = from_str("[1, 2, 3,]").unwrap();
745 let expected = JSONText {
746 value: JSONArray {
747 values: vec![Integer("1"), Integer("2"), Integer("3")]
748 }
749 };
750 assert_eq!(res, expected);
751 }
752
753 #[test]
754 fn test_trailing_comma_in_object() {
755 let res = from_str("{\"a\": 1, \"b\": 2,}").unwrap();
756 let expected = JSONText {
757 value: JSONObject {
758 key_value_pairs: vec![
759 JSONKeyValuePair {
760 key: DoubleQuotedString("a"),
761 value: Integer("1"),
762 },
763 JSONKeyValuePair {
764 key: DoubleQuotedString("b"),
765 value: Integer("2"),
766 }
767 ]
768 }
769 };
770 assert_eq!(res, expected);
771 }
772
773 #[test]
774 fn test_unquoted_key() {
775 let res = from_str("{key: \"value\"}").unwrap();
776 let expected = JSONText {
777 value: JSONObject {
778 key_value_pairs: vec![
779 JSONKeyValuePair {
780 key: Identifier("key"),
781 value: DoubleQuotedString("value"),
782 }
783 ]
784 }
785 };
786 assert_eq!(res, expected);
787 }
788
789 #[test]
790 fn test_multiline_string() {
791 let res = from_str("\"multi\\\nline\"").unwrap();
792 let expected = JSONText { value: DoubleQuotedString("multi\\\nline") };
793 assert_eq!(res, expected);
794 }
795
796 #[test]
797 fn test_unicode_characters() {
798 let res = from_str("\"\\u2764\"").unwrap();
799 let expected = JSONText { value: DoubleQuotedString("\\u2764") };
800 assert_eq!(res, expected);
801 }
802 #[test]
803 fn test_trailing_comma_in_nested_array() {
804 let res = from_str("[[1, 2,],]").unwrap();
805 let expected = JSONText {
806 value: JSONArray {
807 values: vec![
808 JSONArray { values: vec![Integer("1"), Integer("2")] }
809 ]
810 }
811 };
812 assert_eq!(res, expected);
813 }
814
815 #[test]
819 fn test_empty_array() {
820 let sample = r#"[]"#;
821 let res = from_str(sample).unwrap();
822 let expected = JSONText{ value: JSONArray {values: vec![]}};
823 assert_eq!(res, expected)
824 }
825
826
827
828 #[test]
829 fn test_leading_comma_array() {
830 let sample = r#"[
831 ,null
832]"#;
833 let maybe_tokens = Tokenizer::new(sample).tokenize();
834 if maybe_tokens.is_err() {
835 return
836 } else {
837 let toks = maybe_tokens.unwrap();
838 let res = from_tokens(&toks);
839 assert!(res.is_err());
840 }
841 }
842
843
844 #[test]
845 fn test_lone_trailing_comma_array() {
846 let sample = r#"[
847 ,
848]"#;
849 let maybe_tokens = Tokenizer::new(sample).tokenize();
850 if maybe_tokens.is_err() {
851 return
852 } else {
853 let toks = maybe_tokens.unwrap();
854 let res = from_tokens(&toks);
855 assert!(res.is_err());
856 }
857 }
858
859
860 #[test]
861 fn test_no_comma_array() {
862 let sample = r#"[
863 true
864 false
865]"#;
866 let maybe_tokens = Tokenizer::new(sample).tokenize();
867 if maybe_tokens.is_err() {
868 return
869 } else {
870 let toks = maybe_tokens.unwrap();
871 let res = from_tokens(&toks);
872 assert!(res.is_err());
873 }
874 }
875
876
877 #[test]
878 fn test_regular_array() {
879 let sample = r#"[
880 true,
881 false,
882 null
883]"#;
884 let _res = from_str(sample).unwrap();
885 }
886
887
888
889 #[test]
890 fn test_trailing_comma_array() {
891 let sample = r#"[
892 null,
893]"#;
894 let _res = from_str(sample).unwrap();
895 }
896
897
898
899 #[test]
900 fn test_block_comment_following_array_element() {
901 let sample = r#"[
902 false
903 /*
904 true
905 */
906]"#;
907 let _res = from_str(sample).unwrap();
908 }
909
910
911
912 #[test]
913 fn test_block_comment_following_top_level_value() {
914 let sample = r#"null
915/*
916 Some non-comment top-level value is needed;
917 we use null above.
918*/"#;
919 let _res = from_str(sample).unwrap();
920 }
921
922
923
924 #[test]
925 fn test_block_comment_in_string() {
926 let sample = r#""This /* block comment */ isn't really a block comment.""#;
927 let _res = from_str(sample).unwrap();
928 }
929
930
931
932 #[test]
933 fn test_block_comment_preceding_top_level_value() {
934 let sample = r#"/*
935 Some non-comment top-level value is needed;
936 we use null below.
937*/
938null"#;
939 let _res = from_str(sample).unwrap();
940 }
941
942
943
944 #[test]
945 fn test_block_comment_with_asterisks() {
946 let sample = r#"/**
947 * This is a JavaDoc-like block comment.
948 * It contains asterisks inside of it.
949 * It might also be closed with multiple asterisks.
950 * Like this:
951 **/
952true"#;
953 let _res = from_str(sample).unwrap();
954 }
955
956
957
958 #[test]
959 fn test_inline_comment_following_array_element() {
960 let sample = r#"[
961 false // true
962]"#;
963 let _res = from_str(sample).unwrap();
964 }
965
966
967
968 #[test]
969 fn test_inline_comment_following_top_level_value() {
970 let sample = r#"null // Some non-comment top-level value is needed; we use null here."#;
971 let _res = from_str(sample).unwrap();
972 }
973
974
975
976 #[test]
977 fn test_inline_comment_in_string() {
978 let sample = r#""This inline comment // isn't really an inline comment.""#;
979 let _res = from_str(sample).unwrap();
980 }
981
982
983
984 #[test]
985 fn test_inline_comment_preceding_top_level_value() {
986 let sample = r#"// Some non-comment top-level value is needed; we use null below.
987null"#;
988 let _res = from_str(sample).unwrap();
989 }
990
991
992
993 #[test]
994 fn test_top_level_block_comment() {
995 let sample = r#"/*
996 This should fail;
997 comments cannot be the only top-level value.
998*/"#;
999 let maybe_tokens = Tokenizer::new(sample).tokenize();
1000 if maybe_tokens.is_err() {
1001 return
1002 } else {
1003 let toks = maybe_tokens.unwrap();
1004 let res = from_tokens(&toks);
1005 assert!(res.is_err());
1006 }
1007 }
1008
1009
1010 #[test]
1011 fn test_top_level_inline_comment() {
1012 let sample = r#"// This should fail; comments cannot be the only top-level value."#;
1013 let maybe_tokens = Tokenizer::new(sample).tokenize();
1014 if maybe_tokens.is_err() {
1015 return
1016 } else {
1017 let toks = maybe_tokens.unwrap();
1018 let res = from_tokens(&toks);
1019 assert!(res.is_err());
1020 }
1021 }
1022
1023
1024 #[test]
1025 fn test_unterminated_block_comment() {
1026 let sample = r#"true
1027/*
1028 This block comment doesn't terminate.
1029 There was a legitimate value before this,
1030 but this is still invalid JS/JSON5.
1031"#;
1032 let maybe_tokens = Tokenizer::new(sample).tokenize();
1033 if maybe_tokens.is_err() {
1034 return
1035 } else {
1036 let toks = maybe_tokens.unwrap();
1037 let res = from_tokens(&toks);
1038 assert!(res.is_err());
1039 }
1040 }
1041
1042
1043 #[test]
1044 fn test_empty() {
1045 let sample = r#""#;
1046 let maybe_tokens = Tokenizer::new(sample).tokenize();
1047 if maybe_tokens.is_err() {
1048 return
1049 } else {
1050 let toks = maybe_tokens.unwrap();
1051 let res = from_tokens(&toks);
1052 assert!(res.is_err());
1053 }
1054 }
1055
1056
1057 #[test]
1058 fn test_npm_package() {
1059 let sample = r#"{
1060 "name": "npm",
1061 "publishConfig": {
1062 "proprietary-attribs": false
1063 },
1064 "description": "A package manager for node",
1065 "keywords": [
1066 "package manager",
1067 "modules",
1068 "install",
1069 "package.json"
1070 ],
1071 "version": "1.1.22",
1072 "preferGlobal": true,
1073 "config": {
1074 "publishtest": false
1075 },
1076 "homepage": "http://npmjs.org/",
1077 "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
1078 "repository": {
1079 "type": "git",
1080 "url": "https://github.com/isaacs/npm"
1081 },
1082 "bugs": {
1083 "email": "npm-@googlegroups.com",
1084 "url": "http://github.com/isaacs/npm/issues"
1085 },
1086 "directories": {
1087 "doc": "./doc",
1088 "man": "./man",
1089 "lib": "./lib",
1090 "bin": "./bin"
1091 },
1092 "main": "./lib/npm.js",
1093 "bin": "./bin/npm-cli.js",
1094 "dependencies": {
1095 "semver": "~1.0.14",
1096 "ini": "1",
1097 "slide": "1",
1098 "abbrev": "1",
1099 "graceful-fs": "~1.1.1",
1100 "minimatch": "~0.2",
1101 "nopt": "1",
1102 "node-uuid": "~1.3",
1103 "proto-list": "1",
1104 "rimraf": "2",
1105 "request": "~2.9",
1106 "which": "1",
1107 "tar": "~0.1.12",
1108 "fstream": "~0.1.17",
1109 "block-stream": "*",
1110 "inherits": "1",
1111 "mkdirp": "0.3",
1112 "read": "0",
1113 "lru-cache": "1",
1114 "node-gyp": "~0.4.1",
1115 "fstream-npm": "0 >=0.0.5",
1116 "uid-number": "0",
1117 "archy": "0",
1118 "chownr": "0"
1119 },
1120 "bundleDependencies": [
1121 "slide",
1122 "ini",
1123 "semver",
1124 "abbrev",
1125 "graceful-fs",
1126 "minimatch",
1127 "nopt",
1128 "node-uuid",
1129 "rimraf",
1130 "request",
1131 "proto-list",
1132 "which",
1133 "tar",
1134 "fstream",
1135 "block-stream",
1136 "inherits",
1137 "mkdirp",
1138 "read",
1139 "lru-cache",
1140 "node-gyp",
1141 "fstream-npm",
1142 "uid-number",
1143 "archy",
1144 "chownr"
1145 ],
1146 "devDependencies": {
1147 "ronn": "https://github.com/isaacs/ronnjs/tarball/master"
1148 },
1149 "engines": {
1150 "node": "0.6 || 0.7 || 0.8",
1151 "npm": "1"
1152 },
1153 "scripts": {
1154 "test": "node ./test/run.js",
1155 "prepublish": "npm prune; rm -rf node_modules/*/{test,example,bench}*; make -j4 doc",
1156 "dumpconf": "env | grep npm | sort | uniq"
1157 },
1158 "licenses": [
1159 {
1160 "type": "MIT +no-false-attribs",
1161 "url": "http://github.com/isaacs/npm/raw/master/LICENSE"
1162 }
1163 ]
1164}
1165"#;
1166 let _res = from_str(sample).unwrap();
1167 }
1168
1169
1170
1171 #[test]
1172 fn test_npm_package2() {
1173 let sample = r#"{
1174 name: 'npm',
1175 publishConfig: {
1176 'proprietary-attribs': false,
1177 },
1178 description: 'A package manager for node',
1179 keywords: [
1180 'package manager',
1181 'modules',
1182 'install',
1183 'package.json',
1184 ],
1185 version: '1.1.22',
1186 preferGlobal: true,
1187 config: {
1188 publishtest: false,
1189 },
1190 homepage: 'http://npmjs.org/',
1191 author: 'Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)',
1192 repository: {
1193 type: 'git',
1194 url: 'https://github.com/isaacs/npm',
1195 },
1196 bugs: {
1197 email: 'npm-@googlegroups.com',
1198 url: 'http://github.com/isaacs/npm/issues',
1199 },
1200 directories: {
1201 doc: './doc',
1202 man: './man',
1203 lib: './lib',
1204 bin: './bin',
1205 },
1206 main: './lib/npm.js',
1207 bin: './bin/npm-cli.js',
1208 dependencies: {
1209 semver: '~1.0.14',
1210 ini: '1',
1211 slide: '1',
1212 abbrev: '1',
1213 'graceful-fs': '~1.1.1',
1214 minimatch: '~0.2',
1215 nopt: '1',
1216 'node-uuid': '~1.3',
1217 'proto-list': '1',
1218 rimraf: '2',
1219 request: '~2.9',
1220 which: '1',
1221 tar: '~0.1.12',
1222 fstream: '~0.1.17',
1223 'block-stream': '*',
1224 inherits: '1',
1225 mkdirp: '0.3',
1226 read: '0',
1227 'lru-cache': '1',
1228 'node-gyp': '~0.4.1',
1229 'fstream-npm': '0 >=0.0.5',
1230 'uid-number': '0',
1231 archy: '0',
1232 chownr: '0',
1233 },
1234 bundleDependencies: [
1235 'slide',
1236 'ini',
1237 'semver',
1238 'abbrev',
1239 'graceful-fs',
1240 'minimatch',
1241 'nopt',
1242 'node-uuid',
1243 'rimraf',
1244 'request',
1245 'proto-list',
1246 'which',
1247 'tar',
1248 'fstream',
1249 'block-stream',
1250 'inherits',
1251 'mkdirp',
1252 'read',
1253 'lru-cache',
1254 'node-gyp',
1255 'fstream-npm',
1256 'uid-number',
1257 'archy',
1258 'chownr',
1259 ],
1260 devDependencies: {
1261 ronn: 'https://github.com/isaacs/ronnjs/tarball/master',
1262 },
1263 engines: {
1264 node: '0.6 || 0.7 || 0.8',
1265 npm: '1',
1266 },
1267 scripts: {
1268 test: 'node ./test/run.js',
1269 prepublish: 'npm prune; rm -rf node_modules/*/{test,example,bench}*; make -j4 doc',
1270 dumpconf: 'env | grep npm | sort | uniq',
1271 },
1272 licenses: [
1273 {
1274 type: 'MIT +no-false-attribs',
1275 url: 'http://github.com/isaacs/npm/raw/master/LICENSE',
1276 },
1277 ],
1278}
1279"#;
1280 let _res = from_str(sample).unwrap();
1281 }
1282
1283
1284
1285 #[test]
1286 fn test_readme_example() {
1287 let sample = r#"{
1288 foo: 'bar',
1289 while: true,
1290
1291 this: 'is a \
1292multi-line string',
1293
1294 // this is an inline comment
1295 here: 'is another', // inline comment
1296
1297 /* this is a block comment
1298 that continues on another line */
1299
1300 hex: 0xDEADbeef,
1301 half: .5,
1302 delta: +10,
1303 to: Infinity, // and beyond!
1304
1305 finally: 'a trailing comma',
1306 oh: [
1307 "we shouldn't forget",
1308 'arrays can have',
1309 'trailing commas too',
1310 ],
1311}
1312"#;
1313 let _res = from_str(sample).unwrap();
1314 }
1315
1316
1317
1318 #[test]
1319 fn test_valid_whitespace() {
1320 let sample = r#"{
1321 // An invalid form feed character (\x0c) has been entered before this comment.
1322 // Be careful not to delete it.
1323 "a": true
1324}
1325"#;
1326 let _res = from_str(sample).unwrap();
1327 }
1328
1329
1330
1331 #[test]
1332 fn test_comment_cr() {
1333 let sample = r#"{
1334 // This comment is terminated with `\r`.
1335}
1336"#;
1337 let _res = from_str(sample).unwrap();
1338 }
1339
1340
1341
1342 #[test]
1343 fn test_comment_crlf() {
1344 let sample = r#"{
1345 // This comment is terminated with `\r\n`.
1346}
1347"#;
1348 let _res = from_str(sample).unwrap();
1349 }
1350
1351
1352
1353 #[test]
1354 fn test_comment_lf() {
1355 let sample = r#"{
1356 // This comment is terminated with `\n`.
1357}
1358"#;
1359 let _res = from_str(sample).unwrap();
1360 }
1361
1362
1363
1364 #[test]
1365 fn test_escaped_cr() {
1366 let sample = r#"{
1367 // the following string contains an escaped `\r`
1368 a: 'line 1 \
1369line 2'
1370}
1371"#;
1372 let _res = from_str(sample).unwrap();
1373 }
1374
1375
1376
1377 #[test]
1378 fn test_escaped_crlf() {
1379 let sample = r#"{
1380 // the following string contains an escaped `\r\n`
1381 a: 'line 1 \
1382line 2'
1383}
1384"#;
1385 let _res = from_str(sample).unwrap();
1386 }
1387
1388
1389
1390 #[test]
1391 fn test_escaped_lf() {
1392 let sample = r#"{
1393 // the following string contains an escaped `\n`
1394 a: 'line 1 \
1395line 2'
1396}
1397"#;
1398 let _res = from_str(sample).unwrap();
1399 }
1400
1401
1402
1403 #[test]
1404 fn test_float_leading_decimal_point() {
1405 let sample = r#".5
1406"#;
1407 let _res = from_str(sample).unwrap();
1408 }
1409
1410
1411
1412 #[test]
1413 fn test_float_leading_zero() {
1414 let sample = r#"0.5
1415"#;
1416 let _res = from_str(sample).unwrap();
1417 }
1418
1419
1420
1421 #[test]
1422 fn test_float_trailing_decimal_point_with_integer_exponent() {
1423 let sample = r#"5.e4
1424"#;
1425 let _res = from_str(sample).unwrap();
1426 }
1427
1428
1429
1430 #[test]
1431 fn test_float_trailing_decimal_point() {
1432 let sample = r#"5.
1433"#;
1434 let _res = from_str(sample).unwrap();
1435 }
1436
1437
1438
1439 #[test]
1440 fn test_float_with_integer_exponent() {
1441 let sample = r#"1.2e3
1442"#;
1443 let _res = from_str(sample).unwrap();
1444 }
1445
1446
1447
1448 #[test]
1449 fn test_float() {
1450 let sample = r#"1.2
1451"#;
1452 let _res = from_str(sample).unwrap();
1453 }
1454
1455
1456
1457 #[test]
1458 fn test_hexadecimal_empty() {
1459 let sample = r#"0x
1460"#;
1461 let maybe_tokens = Tokenizer::new(sample).tokenize();
1462 if maybe_tokens.is_err() {
1463 return
1464 } else {
1465 let toks = maybe_tokens.unwrap();
1466 let res = from_tokens(&toks);
1467 assert!(res.is_err());
1468 }
1469 }
1470
1471
1472 #[test]
1473 fn test_hexadecimal_lowercase_letter() {
1474 let sample = r#"0xc8
1475"#;
1476 let _res = from_str(sample).unwrap();
1477 }
1478
1479
1480
1481 #[test]
1482 fn test_hexadecimal_uppercase_x() {
1483 let sample = r#"0XC8
1484"#;
1485 let _res = from_str(sample).unwrap();
1486 }
1487
1488
1489
1490 #[test]
1491 fn test_hexadecimal_with_integer_exponent() {
1492 let sample = r#"0xc8e4
1493"#;
1494 let _res = from_str(sample).unwrap();
1495 }
1496
1497
1498
1499 #[test]
1500 fn test_hexadecimal() {
1501 let sample = r#"0xC8
1502"#;
1503 let _res = from_str(sample).unwrap();
1504 }
1505
1506
1507
1508 #[test]
1509 fn test_infinity() {
1510 let sample = r#"Infinity
1511"#;
1512 let _res = from_str(sample).unwrap();
1513 }
1514
1515
1516
1517 #[test]
1518 fn test_integer_with_float_exponent() {
1519 let sample = r#"1e2.3
1520"#;
1521 let maybe_tokens = Tokenizer::new(sample).tokenize();
1522 if maybe_tokens.is_err() {
1523 return
1524 } else {
1525 let toks = maybe_tokens.unwrap();
1526 let res = from_tokens(&toks);
1527 assert!(res.is_err(), "{:?}", res.unwrap());
1528 }
1529 }
1530
1531
1532 #[test]
1533 fn test_integer_with_hexadecimal_exponent() {
1534 let sample = r#"1e0x4
1535"#;
1536 let maybe_tokens = Tokenizer::new(sample).tokenize();
1537 if maybe_tokens.is_err() {
1538 return
1539 } else {
1540 let toks = maybe_tokens.unwrap();
1541 let res = from_tokens(&toks);
1542 assert!(res.is_err());
1543 }
1544 }
1545
1546
1547 #[test]
1548 fn test_integer_with_integer_exponent() {
1549 let sample = r#"2e23
1550"#;
1551 let _res = from_str(sample).unwrap();
1552 }
1553
1554
1555
1556 #[test]
1557 fn test_integer_with_negative_float_exponent() {
1558 let sample = r#"1e-2.3
1559"#;
1560 let maybe_tokens = Tokenizer::new(sample).tokenize();
1561 if maybe_tokens.is_err() {
1562 return
1563 } else {
1564 let toks = maybe_tokens.unwrap();
1565 let res = from_tokens(&toks);
1566 assert!(res.is_err());
1567 }
1568 }
1569
1570
1571 #[test]
1572 fn test_integer_with_negative_hexadecimal_exponent() {
1573 let sample = r#"1e-0x4
1574"#;
1575 let maybe_tokens = Tokenizer::new(sample).tokenize();
1576 if maybe_tokens.is_err() {
1577 return
1578 } else {
1579 let toks = maybe_tokens.unwrap();
1580 let res = from_tokens(&toks);
1581 assert!(res.is_err(), "{:?}", res.unwrap());
1582 }
1583 }
1584
1585
1586 #[test]
1587 fn test_integer_with_negative_integer_exponent() {
1588 let sample = r#"2e-23
1589"#;
1590 let _res = from_str(sample).unwrap();
1591 }
1592
1593
1594
1595 #[test]
1596 fn test_integer_with_negative_zero_integer_exponent() {
1597 let sample = r#"5e-0
1598"#;
1599 let _res = from_str(sample).unwrap();
1600 }
1601
1602
1603
1604 #[test]
1605 fn test_integer_with_positive_float_exponent() {
1606 let sample = r#"1e+2.3
1607"#;
1608 let maybe_tokens = Tokenizer::new(sample).tokenize();
1609 if maybe_tokens.is_err() {
1610 return
1611 } else {
1612 let toks = maybe_tokens.unwrap();
1613 let res = from_tokens(&toks);
1614 assert!(res.is_err());
1615 }
1616 }
1617
1618
1619 #[test]
1620 fn test_integer_with_positive_hexadecimal_exponent() {
1621 let sample = r#"1e+0x4
1622"#;
1623 let maybe_tokens = Tokenizer::new(sample).tokenize();
1624 if maybe_tokens.is_err() {
1625 return
1626 } else {
1627 let toks = maybe_tokens.unwrap();
1628 let res = from_tokens(&toks);
1629 assert!(res.is_err());
1630 }
1631 }
1632
1633
1634 #[test]
1635 fn test_integer_with_positive_integer_exponent() {
1636 let sample = r#"1e+2
1637"#;
1638 let _res = from_str(sample).unwrap();
1639 }
1640
1641
1642
1643 #[test]
1644 fn test_integer_with_positive_zero_integer_exponent() {
1645 let sample = r#"5e+0
1646"#;
1647 let _res = from_str(sample).unwrap();
1648 }
1649
1650
1651
1652 #[test]
1653 fn test_integer_with_zero_integer_exponent() {
1654 let sample = r#"5e0
1655"#;
1656 let _res = from_str(sample).unwrap();
1657 }
1658
1659
1660
1661 #[test]
1662 fn test_integer() {
1663 let sample = r#"15
1664"#;
1665 let _res = from_str(sample).unwrap();
1666 }
1667
1668
1669
1670 #[test]
1671 fn test_lone_decimal_point() {
1672 let sample = r#".
1673"#;
1674 let maybe_tokens = Tokenizer::new(sample).tokenize();
1675 if maybe_tokens.is_err() {
1676 return
1677 } else {
1678 let toks = maybe_tokens.unwrap();
1679 let res = from_tokens(&toks);
1680 assert!(res.is_err(), "{:?}", res.unwrap());
1681 }
1682 }
1683
1684
1685 #[test]
1686 fn test_nan() {
1687 let sample = r#"NaN
1688"#;
1689 let _res = from_str(sample).unwrap();
1690 }
1691
1692
1693
1694 #[test]
1695 fn test_negative_float_leading_decimal_point() {
1696 let sample = r#"-.5
1697"#;
1698 let _res = from_str(sample).unwrap();
1699 }
1700
1701
1702
1703 #[test]
1704 fn test_negative_float_leading_zero() {
1705 let sample = r#"-0.5
1706"#;
1707 let _res = from_str(sample).unwrap();
1708 }
1709
1710
1711
1712 #[test]
1713 fn test_negative_float_trailing_decimal_point() {
1714 let sample = r#"-5.
1715"#;
1716 let _res = from_str(sample).unwrap();
1717 }
1718
1719
1720
1721 #[test]
1722 fn test_negative_float() {
1723 let sample = r#"-1.2
1724"#;
1725 let _res = from_str(sample).unwrap();
1726 }
1727
1728
1729
1730 #[test]
1731 fn test_negative_hexadecimal() {
1732 let sample = r#"-0xC8
1733"#;
1734 let _res = from_str(sample).unwrap();
1735 }
1736
1737
1738
1739 #[test]
1740 fn test_negative_infinity() {
1741 let sample = r#"-Infinity
1742"#;
1743 let _res = from_str(sample).unwrap();
1744 }
1745
1746
1747
1748 #[test]
1749 fn test_negative_integer() {
1750 let sample = r#"-15
1751"#;
1752 let _res = from_str(sample).unwrap();
1753 }
1754
1755
1756
1757 #[test]
1758 fn test_negative_noctal() {
1759 let sample = r#"-098
1760"#;
1761 let maybe_tokens = Tokenizer::new(sample).tokenize();
1762 if maybe_tokens.is_err() {
1763 return
1764 } else {
1765 let toks = maybe_tokens.unwrap();
1766 let res = from_tokens(&toks);
1767 assert!(res.is_err());
1768 }
1769 }
1770
1771
1772 #[test]
1773 fn test_negative_octal() {
1774 let sample = r#"-0123
1775"#;
1776 let maybe_tokens = Tokenizer::new(sample).tokenize();
1777 if maybe_tokens.is_err() {
1778 return
1779 } else {
1780 let toks = maybe_tokens.unwrap();
1781 let res = from_tokens(&toks);
1782 assert!(res.is_err());
1783 }
1784 }
1785
1786
1787 #[test]
1788 fn test_negative_zero_float_leading_decimal_point() {
1789 let sample = r#"-.0
1790"#;
1791 let _res = from_str(sample).unwrap();
1792 }
1793
1794
1795
1796 #[test]
1797 fn test_negative_zero_float_trailing_decimal_point() {
1798 let sample = r#"-0.
1799"#;
1800 let _res = from_str(sample).unwrap();
1801 }
1802
1803
1804
1805 #[test]
1806 fn test_negative_zero_float() {
1807 let sample = r#"-0.0
1808"#;
1809 let _res = from_str(sample).unwrap();
1810 }
1811
1812
1813
1814 #[test]
1815 fn test_negative_zero_hexadecimal() {
1816 let sample = r#"-0x0
1817"#;
1818 let _res = from_str(sample).unwrap();
1819 }
1820
1821
1822
1823 #[test]
1824 fn test_negative_zero_integer() {
1825 let sample = r#"-0
1826"#;
1827 let _res = from_str(sample).unwrap();
1828 }
1829
1830
1831
1832 #[test]
1833 fn test_negative_zero_octal() {
1834 let sample = r#"-00
1835"#;
1836 let maybe_tokens = Tokenizer::new(sample).tokenize();
1837 if maybe_tokens.is_err() {
1838 return
1839 } else {
1840 let toks = maybe_tokens.unwrap();
1841 let res = from_tokens(&toks);
1842 assert!(res.is_err());
1843 }
1844 }
1845
1846
1847 #[test]
1848 fn test_noctal_with_leading_octal_digit() {
1849 let sample = r#"0780
1850"#;
1851 let maybe_tokens = Tokenizer::new(sample).tokenize();
1852 if maybe_tokens.is_err() {
1853 return
1854 } else {
1855 let toks = maybe_tokens.unwrap();
1856 let res = from_tokens(&toks);
1857 assert!(res.is_err());
1858 }
1859 }
1860
1861
1862 #[test]
1863 fn test_noctal() {
1864 let sample = r#"080
1865"#;
1866 let maybe_tokens = Tokenizer::new(sample).tokenize();
1867 if maybe_tokens.is_err() {
1868 return
1869 } else {
1870 let toks = maybe_tokens.unwrap();
1871 let res = from_tokens(&toks);
1872 assert!(res.is_err());
1873 }
1874 }
1875
1876
1877 #[test]
1878 fn test_octal() {
1879 let sample = r#"010
1880"#;
1881 let maybe_tokens = Tokenizer::new(sample).tokenize();
1882 if maybe_tokens.is_err() {
1883 return
1884 } else {
1885 let toks = maybe_tokens.unwrap();
1886 let res = from_tokens(&toks);
1887 assert!(res.is_err());
1888 }
1889 }
1890
1891
1892 #[test]
1893 fn test_positive_float_leading_decimal_point() {
1894 let sample = r#"+.5
1895"#;
1896 let _res = from_str(sample).unwrap();
1897 }
1898
1899
1900
1901 #[test]
1902 fn test_positive_float_leading_zero() {
1903 let sample = r#"+0.5
1904"#;
1905 let _res = from_str(sample).unwrap();
1906 }
1907
1908
1909
1910 #[test]
1911 fn test_positive_float_trailing_decimal_point() {
1912 let sample = r#"+5.
1913"#;
1914 let _res = from_str(sample).unwrap();
1915 }
1916
1917
1918
1919 #[test]
1920 fn test_positive_float() {
1921 let sample = r#"+1.2
1922"#;
1923 let _res = from_str(sample).unwrap();
1924 }
1925
1926
1927
1928 #[test]
1929 fn test_positive_hexadecimal() {
1930 let sample = r#"+0xC8
1931"#;
1932 let _res = from_str(sample).unwrap();
1933 }
1934
1935
1936
1937 #[test]
1938 fn test_positive_infinity() {
1939 let sample = r#"+Infinity
1940"#;
1941 let _res = from_str(sample).unwrap();
1942 }
1943
1944
1945
1946 #[test]
1947 fn test_positive_integer() {
1948 let sample = r#"+15
1949"#;
1950 let _res = from_str(sample).unwrap();
1951 }
1952
1953
1954
1955 #[test]
1956 fn test_positive_noctal() {
1957 let sample = r#"+098
1958"#;
1959 let maybe_tokens = Tokenizer::new(sample).tokenize();
1960 if maybe_tokens.is_err() {
1961 return
1962 } else {
1963 let toks = maybe_tokens.unwrap();
1964 let res = from_tokens(&toks);
1965 assert!(res.is_err());
1966 }
1967 }
1968
1969
1970 #[test]
1971 fn test_positive_octal() {
1972 let sample = r#"+0123
1973"#;
1974 let maybe_tokens = Tokenizer::new(sample).tokenize();
1975 if maybe_tokens.is_err() {
1976 return
1977 } else {
1978 let toks = maybe_tokens.unwrap();
1979 let res = from_tokens(&toks);
1980 assert!(res.is_err());
1981 }
1982 }
1983
1984
1985 #[test]
1986 fn test_positive_zero_float_leading_decimal_point() {
1987 let sample = r#"+.0
1988"#;
1989 let _res = from_str(sample).unwrap();
1990 }
1991
1992
1993
1994 #[test]
1995 fn test_positive_zero_float_trailing_decimal_point() {
1996 let sample = r#"+0.
1997"#;
1998 let _res = from_str(sample).unwrap();
1999 }
2000
2001
2002
2003 #[test]
2004 fn test_positive_zero_float() {
2005 let sample = r#"+0.0
2006"#;
2007 let _res = from_str(sample).unwrap();
2008 }
2009
2010
2011
2012 #[test]
2013 fn test_positive_zero_hexadecimal() {
2014 let sample = r#"+0x0
2015"#;
2016 let _res = from_str(sample).unwrap();
2017 }
2018
2019
2020
2021 #[test]
2022 fn test_positive_zero_integer() {
2023 let sample = r#"+0
2024"#;
2025 let _res = from_str(sample).unwrap();
2026 }
2027
2028
2029
2030 #[test]
2031 fn test_positive_zero_octal() {
2032 let sample = r#"+00
2033"#;
2034 let maybe_tokens = Tokenizer::new(sample).tokenize();
2035 if maybe_tokens.is_err() {
2036 return
2037 } else {
2038 let toks = maybe_tokens.unwrap();
2039 let res = from_tokens(&toks);
2040 assert!(res.is_err());
2041 }
2042 }
2043
2044
2045 #[test]
2046 fn test_zero_float_leading_decimal_point() {
2047 let sample = r#".0
2048"#;
2049 let _res = from_str(sample).unwrap();
2050 }
2051
2052
2053
2054 #[test]
2055 fn test_zero_float_trailing_decimal_point() {
2056 let sample = r#"0.
2057"#;
2058 let _res = from_str(sample).unwrap();
2059 }
2060
2061
2062
2063 #[test]
2064 fn test_zero_float() {
2065 let sample = r#"0.0
2066"#;
2067 let _res = from_str(sample).unwrap();
2068 }
2069
2070
2071
2072 #[test]
2073 fn test_zero_hexadecimal() {
2074 let sample = r#"0x0
2075"#;
2076 let _res = from_str(sample).unwrap();
2077 }
2078
2079
2080
2081 #[test]
2082 fn test_zero_integer_with_integer_exponent() {
2083 let sample = r#"0e23
2084"#;
2085 let _res = from_str(sample).unwrap();
2086 }
2087
2088
2089
2090 #[test]
2091 fn test_zero_integer() {
2092 let sample = r#"0
2093"#;
2094 let _res = from_str(sample).unwrap();
2095 }
2096
2097
2098
2099 #[test]
2100 fn test_zero_octal() {
2101 let sample = r#"00
2102"#;
2103 let maybe_tokens = Tokenizer::new(sample).tokenize();
2104 if maybe_tokens.is_err() {
2105 return
2106 } else {
2107 let toks = maybe_tokens.unwrap();
2108 let res = from_tokens(&toks);
2109 assert!(res.is_err());
2110 }
2111 }
2112
2113
2114 #[test]
2115 fn test_duplicate_keys() {
2116 let sample = r#"{
2117 "a": true,
2118 "a": false
2119}
2120"#;
2121 let _res = from_str(sample).unwrap();
2122 }
2123
2124
2125
2126 #[test]
2127 fn test_empty_object() {
2128 let sample = r#"{}"#;
2129 let _res = from_str(sample).unwrap();
2130 }
2131
2132
2133
2134 #[test]
2135 fn test_illegal_unquoted_key_number() {
2136 let sample = r#"{
2137 10twenty: "ten twenty"
2138}"#;
2139 let maybe_tokens = Tokenizer::new(sample).tokenize();
2140 if maybe_tokens.is_err() {
2141 return
2142 } else {
2143 let toks = maybe_tokens.unwrap();
2144 let res = from_tokens(&toks);
2145 assert!(res.is_err());
2146 }
2147 }
2148
2149
2150 #[test]
2151 fn test_illegal_unquoted_key_symbol() {
2152 let sample = r#"{
2153 multi-word: "multi-word"
2154}"#;
2155 let maybe_tokens = Tokenizer::new(sample).tokenize();
2156 if maybe_tokens.is_err() {
2157 return
2158 } else {
2159 let toks = maybe_tokens.unwrap();
2160 let res = from_tokens(&toks);
2161 assert!(res.is_err());
2162 }
2163 }
2164
2165
2166 #[test]
2167 fn test_leading_comma_object() {
2168 let sample = r#"{
2169 ,"foo": "bar"
2170}"#;
2171 let maybe_tokens = Tokenizer::new(sample).tokenize();
2172 if maybe_tokens.is_err() {
2173 return
2174 } else {
2175 let toks = maybe_tokens.unwrap();
2176 let res = from_tokens(&toks);
2177 assert!(res.is_err());
2178 }
2179 }
2180
2181
2182 #[test]
2183 fn test_lone_trailing_comma_object() {
2184 let sample = r#"{
2185 ,
2186}"#;
2187 let maybe_tokens = Tokenizer::new(sample).tokenize();
2188 if maybe_tokens.is_err() {
2189 return
2190 } else {
2191 let toks = maybe_tokens.unwrap();
2192 let res = from_tokens(&toks);
2193 assert!(res.is_err());
2194 }
2195 }
2196
2197
2198 #[test]
2199 fn test_no_comma_object() {
2200 let sample = r#"{
2201 "foo": "bar"
2202 "hello": "world"
2203}"#;
2204 let maybe_tokens = Tokenizer::new(sample).tokenize();
2205 if maybe_tokens.is_err() {
2206 return
2207 } else {
2208 let toks = maybe_tokens.unwrap();
2209 let res = from_tokens(&toks);
2210 assert!(res.is_err());
2211 }
2212 }
2213
2214
2215 #[test]
2216 fn test_reserved_unquoted_key() {
2217 let sample = r#"{
2218 while: true
2219}"#;
2220 let _res = from_str(sample).unwrap();
2221 }
2222
2223
2224
2225 #[test]
2226 fn test_single_quoted_key() {
2227 let sample = r#"{
2228 'hello': "world"
2229}"#;
2230 let _res = from_str(sample).unwrap();
2231 }
2232
2233
2234
2235 #[test]
2236 fn test_trailing_comma_object() {
2237 let sample = r#"{
2238 "foo": "bar",
2239}"#;
2240 let _res = from_str(sample).unwrap();
2241 }
2242
2243
2244
2245 #[test]
2246 fn test_unquoted_keys() {
2247 let sample = r#"{
2248 hello: "world",
2249 _: "underscore",
2250 $: "dollar sign",
2251 one1: "numerals",
2252 _$_: "multiple symbols",
2253 $_$hello123world_$_: "mixed"
2254}"#;
2255 let _res = from_str(sample).unwrap();
2256 }
2257
2258
2259
2260 #[test]
2261 fn test_escaped_single_quoted_string() {
2262 let sample = r#"'I can\'t wait'"#;
2263 let _res = from_str(sample).unwrap();
2264 }
2265
2266
2267
2268 #[test]
2269 fn test_multi_line_string() {
2270 let sample = r#"'hello\
2271 world'"#;
2272 let _res = from_str(sample).unwrap();
2273 }
2274
2275
2276
2277 #[test]
2278 fn test_single_quoted_string() {
2279 let sample = r#"'hello world'"#;
2280 let _res = from_str(sample).unwrap();
2281 }
2282
2283
2284
2285 #[test]
2286 fn test_unescaped_multi_line_string() {
2287 let sample = r#""foo
2288bar"
2289"#;
2290 let maybe_tokens = Tokenizer::new(sample).tokenize();
2291 if maybe_tokens.is_err() {
2292 return
2293 } else {
2294 let toks = maybe_tokens.unwrap();
2295 let res = from_tokens(&toks);
2296 assert!(res.is_err());
2297 }
2298 }
2299 #[test]
2304 fn test_error_no_comma_array_lineno() {
2305 let sample = r#"[
2306 true
2307 false
2308]"#;
2309 let maybe_tokens = Tokenizer::new(sample).tokenize();
2310 if maybe_tokens.is_err() {
2311 let err = maybe_tokens.unwrap_err();
2312 assert_eq!(err.lineno, 3_usize, "{:?}", err);
2313 } else {
2314 let toks = maybe_tokens.unwrap();
2315 let res = from_tokens(&toks);
2316 let err = res.unwrap_err();
2317 assert_eq!(err.lineno, 3_usize, "{:?}", err);
2318 }
2319 }
2320
2321
2322 #[test]
2323 fn test_error_no_comma_array_index() {
2324 let sample = r#"[
2325 true
2326 false
2327]"#;
2328 let maybe_tokens = Tokenizer::new(sample).tokenize();
2329 if maybe_tokens.is_err() {
2330 let err = maybe_tokens.unwrap_err();
2331 assert_eq!(err.char_index, 15_usize, "{:?}", err)
2332 } else {
2333 let toks = maybe_tokens.unwrap();
2334 let res = from_tokens(&toks);
2335 let err = res.unwrap_err();
2336 assert_eq!(err.char_index, 15_usize, "{:?}", err);
2337 }
2338 }
2339
2340 #[test]
2341 fn test_error_no_comma_array_colno() {
2342 let sample = r#"[
2343 true
2344 false
2345]"#;
2346 let maybe_tokens = Tokenizer::new(sample).tokenize();
2347 if maybe_tokens.is_err() {
2348 let err = maybe_tokens.unwrap_err();
2349 assert_eq!(err.colno, 5_usize, "{:?}", err);
2350 } else {
2351 let toks = maybe_tokens.unwrap();
2352 let res = from_tokens(&toks);
2353 let err = res.unwrap_err();
2354 assert_eq!(err.colno, 5_usize, "{:?}", err);
2355 }
2356 }
2357
2358
2359 #[test]
2360 fn test_error_top_level_block_comment_lineno() {
2361 let sample = r#"/*
2362 This should fail;
2363 comments cannot be the only top-level value.
2364*/"#;
2365 let maybe_tokens = Tokenizer::new(sample).tokenize();
2366 if maybe_tokens.is_err() {
2367 let err = maybe_tokens.unwrap_err();
2368 assert_eq!(err.lineno, 4_usize, "{:?}", err);
2369 } else {
2370 let toks = maybe_tokens.unwrap();
2371 let res = from_tokens(&toks);
2372 let err = res.unwrap_err();
2373 assert_eq!(err.lineno, 4_usize, "{:?}", err);
2374 }
2375 }
2376
2377
2378 #[test]
2379 fn test_error_top_level_block_comment_index() {
2380 let sample = r#"/*
2381 This should fail;
2382 comments cannot be the only top-level value.
2383*/"#;
2384 let maybe_tokens = Tokenizer::new(sample).tokenize();
2385 if maybe_tokens.is_err() {
2386 let err = maybe_tokens.unwrap_err();
2387 assert_eq!(err.char_index, 76_usize, "{:?}", err)
2388 } else {
2389 let toks = maybe_tokens.unwrap();
2390 let res = from_tokens(&toks);
2391 let err = res.unwrap_err();
2392 assert_eq!(err.char_index, 76_usize, "{:?}", err);
2393 }
2394 }
2395
2396 #[test]
2397 fn test_error_top_level_block_comment_colno() {
2398 let sample = r#"/*
2399 This should fail;
2400 comments cannot be the only top-level value.
2401*/"#;
2402 let maybe_tokens = Tokenizer::new(sample).tokenize();
2403 if maybe_tokens.is_err() {
2404 let err = maybe_tokens.unwrap_err();
2405 assert_eq!(err.colno, 3_usize, "{:?}", err);
2406 } else {
2407 let toks = maybe_tokens.unwrap();
2408 let res = from_tokens(&toks);
2409 let err = res.unwrap_err();
2410 assert_eq!(err.colno, 3_usize, "{:?}", err);
2411 }
2412 }
2413
2414
2415
2416 #[test]
2417 fn test_error_top_level_inline_comment_lineno() {
2418 let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2419 let maybe_tokens = Tokenizer::new(sample).tokenize();
2420 if maybe_tokens.is_err() {
2421 let err = maybe_tokens.unwrap_err();
2422 assert_eq!(err.lineno, 1_usize, "{:?}", err);
2423 } else {
2424 let toks = maybe_tokens.unwrap();
2425 let res = from_tokens(&toks);
2426 let err = res.unwrap_err();
2427 assert_eq!(err.lineno, 1_usize, "{:?}", err);
2428 }
2429 }
2430
2431
2432 #[test]
2433 fn test_error_top_level_inline_comment_index() {
2434 let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2435 let maybe_tokens = Tokenizer::new(sample).tokenize();
2436 if maybe_tokens.is_err() {
2437 let err = maybe_tokens.unwrap_err();
2438 assert_eq!(err.char_index, 65_usize, "{:?}", err)
2439 } else {
2440 let toks = maybe_tokens.unwrap();
2441 let res = from_tokens(&toks);
2442 let err = res.unwrap_err();
2443 assert_eq!(err.char_index, 65_usize, "{:?}", err);
2444 }
2445 }
2446
2447 #[test]
2448 fn test_error_top_level_inline_comment_colno() {
2449 let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2450 let maybe_tokens = Tokenizer::new(sample).tokenize();
2451 if maybe_tokens.is_err() {
2452 let err = maybe_tokens.unwrap_err();
2453 assert_eq!(err.colno, 66_usize, "{:?}", err);
2454 } else {
2455 let toks = maybe_tokens.unwrap();
2456 let res = from_tokens(&toks);
2457 let err = res.unwrap_err();
2458 assert_eq!(err.colno, 66_usize, "{:?}", err);
2459 }
2460 }
2461
2462 #[test]
2463 fn test_error_illegal_unquoted_key_number_lineno() {
2464 let sample = r#"{
2465 10twenty: "ten twenty"
2466}"#;
2467 let maybe_tokens = Tokenizer::new(sample).tokenize();
2468 if maybe_tokens.is_err() {
2469 let err = maybe_tokens.unwrap_err();
2470 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2471 } else {
2472 let toks = maybe_tokens.unwrap();
2473 let res = from_tokens(&toks);
2474 let err = res.unwrap_err();
2475 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2476 }
2477 }
2478
2479
2480 #[test]
2481 fn test_error_illegal_unquoted_key_number_index() {
2482 let sample = r#"{
2483 10twenty: "ten twenty"
2484}"#;
2485 let maybe_tokens = Tokenizer::new(sample).tokenize();
2486 if maybe_tokens.is_err() {
2487 let err = maybe_tokens.unwrap_err();
2488 assert_eq!(err.char_index, 6_usize, "{:?}", err)
2489 } else {
2490 let toks = maybe_tokens.unwrap();
2491 let res = from_tokens(&toks);
2492 let err = res.unwrap_err();
2493 assert_eq!(err.char_index, 6_usize, "{:?}", err);
2494 }
2495 }
2496
2497 #[test]
2498 fn test_error_illegal_unquoted_key_number_colno() {
2499 let sample = r#"{
2500 10twenty: "ten twenty"
2501}"#;
2502 let maybe_tokens = Tokenizer::new(sample).tokenize();
2503 if maybe_tokens.is_err() {
2504 let err = maybe_tokens.unwrap_err();
2505 assert_eq!(err.colno, 5_usize, "{:?}", err);
2506 } else {
2507 let toks = maybe_tokens.unwrap();
2508 let res = from_tokens(&toks);
2509 let err = res.unwrap_err();
2510 assert_eq!(err.colno, 5_usize, "{:?}", err);
2511 }
2512 }
2513
2514
2515
2516 #[test]
2517 fn test_error_illegal_unquoted_key_symbol_lineno() {
2518 let sample = r#"{
2519 multi-word: "multi-word"
2520}"#;
2521 let maybe_tokens = Tokenizer::new(sample).tokenize();
2522 if maybe_tokens.is_err() {
2523 let err = maybe_tokens.unwrap_err();
2524 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2525 } else {
2526 let toks = maybe_tokens.unwrap();
2527 let res = from_tokens(&toks);
2528 let err = res.unwrap_err();
2529 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2530 }
2531 }
2532
2533
2534 #[test]
2535 fn test_error_illegal_unquoted_key_symbol_index() {
2536 let sample = r#"{
2537 multi-word: "multi-word"
2538}"#;
2539 let maybe_tokens = Tokenizer::new(sample).tokenize();
2540 if maybe_tokens.is_err() {
2541 let err = maybe_tokens.unwrap_err();
2542 assert_eq!(err.char_index, 11_usize, "{:?}", err)
2543 } else {
2544 let toks = maybe_tokens.unwrap();
2545 let res = from_tokens(&toks);
2546 let err = res.unwrap_err();
2547 assert_eq!(err.char_index, 11_usize, "{:?}", err);
2548 }
2549 }
2550
2551 #[test]
2552 fn test_error_illegal_unquoted_key_symbol_colno() {
2553 let sample = r#"{
2554 multi-word: "multi-word"
2555}"#;
2556 let maybe_tokens = Tokenizer::new(sample).tokenize();
2557 if maybe_tokens.is_err() {
2558 let err = maybe_tokens.unwrap_err();
2559 assert_eq!(err.colno, 10_usize, "{:?}", err);
2560 } else {
2561 let toks = maybe_tokens.unwrap();
2562 let res = from_tokens(&toks);
2563 let err = res.unwrap_err();
2564 assert_eq!(err.colno, 10_usize, "{:?}", err);
2565 }
2566 }
2567
2568
2569
2570 #[test]
2571 fn test_error_leading_comma_object_lineno() {
2572 let sample = r#"{
2573 ,"foo": "bar"
2574}"#;
2575 let maybe_tokens = Tokenizer::new(sample).tokenize();
2576 if maybe_tokens.is_err() {
2577 let err = maybe_tokens.unwrap_err();
2578 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2579 } else {
2580 let toks = maybe_tokens.unwrap();
2581 let res = from_tokens(&toks);
2582 let err = res.unwrap_err();
2583 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2584 }
2585 }
2586
2587
2588 #[test]
2589 fn test_error_leading_comma_object_index() {
2590 let sample = r#"{
2591 ,"foo": "bar"
2592}"#;
2593 let maybe_tokens = Tokenizer::new(sample).tokenize();
2594 if maybe_tokens.is_err() {
2595 let err = maybe_tokens.unwrap_err();
2596 assert_eq!(err.char_index, 6_usize, "{:?}", err)
2597 } else {
2598 let toks = maybe_tokens.unwrap();
2599 let res = from_tokens(&toks);
2600 let err = res.unwrap_err();
2601 assert_eq!(err.char_index, 6_usize, "{:?}", err);
2602 }
2603 }
2604
2605 #[test]
2606 fn test_error_leading_comma_object_colno() {
2607 let sample = r#"{
2608 ,"foo": "bar"
2609}"#;
2610 let maybe_tokens = Tokenizer::new(sample).tokenize();
2611 if maybe_tokens.is_err() {
2612 let err = maybe_tokens.unwrap_err();
2613 assert_eq!(err.colno, 5_usize, "{:?}", err);
2614 } else {
2615 let toks = maybe_tokens.unwrap();
2616 let res = from_tokens(&toks);
2617 let err = res.unwrap_err();
2618 assert_eq!(err.colno, 5_usize, "{:?}", err);
2619 }
2620 }
2621
2622 #[test]
2623 fn test_error_unescaped_multi_line_string_lineno() {
2624 let sample = r#""foo
2625bar"
2626"#;
2627 let maybe_tokens = Tokenizer::new(sample).tokenize();
2628 if maybe_tokens.is_err() {
2629 let err = maybe_tokens.unwrap_err();
2630 assert_eq!(err.lineno, 1_usize, "{:?}", err);
2631 } else {
2632 let toks = maybe_tokens.unwrap();
2633 let res = from_tokens(&toks);
2634 let err = res.unwrap_err();
2635 assert_eq!(err.lineno, 1_usize, "{:?}", err);
2636 }
2637 }
2638
2639
2640 #[test]
2641 fn test_error_unescaped_multi_line_string_index() {
2642 let sample = r#""foo
2643bar"
2644"#;
2645 let maybe_tokens = Tokenizer::new(sample).tokenize();
2646 if maybe_tokens.is_err() {
2647 let err = maybe_tokens.unwrap_err();
2648 assert_eq!(err.char_index, 4_usize, "{:?}", err)
2649 } else {
2650 let toks = maybe_tokens.unwrap();
2651 let res = from_tokens(&toks);
2652 let err = res.unwrap_err();
2653 assert_eq!(err.char_index, 4_usize, "{:?}", err);
2654 }
2655 }
2656
2657 #[test]
2658 fn test_error_unescaped_multi_line_string_colno() {
2659 let sample = r#""foo
2660bar"
2661"#;
2662 let maybe_tokens = Tokenizer::new(sample).tokenize();
2663 if maybe_tokens.is_err() {
2664 let err = maybe_tokens.unwrap_err();
2665 assert_eq!(err.colno, 5_usize, "{:?}", err);
2666 } else {
2667 let toks = maybe_tokens.unwrap();
2668 let res = from_tokens(&toks);
2669 let err = res.unwrap_err();
2670 assert_eq!(err.colno, 5_usize, "{:?}", err);
2671 }
2672 }
2673}