1use std::fmt::{Display, Formatter};
2use std::iter::Peekable;
3use std::slice::Iter;
4use crate::tokenize::{TokenSpan, TokType};
5use crate::tokenize::Tokens;
6use crate::tokenize::TokType::{Colon, Comma, RightBrace};
7use crate::utils::get_line_col_char;
8
9
10
11#[derive(PartialEq, Debug)]
12pub struct JSONKeyValuePair<'input> {
13 pub(crate) key: JSONValue<'input>,
14 pub(crate) value: JSONValue<'input>,
15}
16
17
18#[derive(PartialEq, Debug)]
19pub enum UnaryOperator {
20 Plus,
21 Minus,
22}
23
24
25#[derive(PartialEq, Debug)]
26pub enum JSONValue<'input> {
27 JSONObject { key_value_pairs: Vec<JSONKeyValuePair<'input>> },
28 JSONArray { values: Vec<JSONValue<'input>> },
29 Integer(&'input str),
30 Float(&'input str),
31 Exponent(&'input str),
32 Null,
33 Infinity,
34 NaN,
35 Hexadecimal(&'input str),
36 Bool(bool),
37 DoubleQuotedString(&'input str),
38 SingleQuotedString(&'input str),
39 Unary { operator: UnaryOperator, value: Box<JSONValue<'input>> },
40 Identifier(&'input str), }
42
43#[derive(PartialEq, Debug)]
44pub struct JSONText<'input> {
45 pub value: JSONValue<'input>,
46}
47
48#[allow(dead_code)]
49pub enum TrailingComma {
50 ALL,
51 OBJECTS,
52 ARRAYS,
53 NONE
54}
55
56pub struct FormatConfiguration {
57 pub(crate) indent: Option<usize>,
58 pub(crate) item_separator: String,
59 pub(crate) key_separator: String,
60 pub(crate) current_indent: String,
61 pub(crate) trailing_comma: TrailingComma
62}
63
64#[allow(dead_code)]
65impl FormatConfiguration {
66 pub fn new(indent: Option<usize>, item_separator: &str, key_separator: &str, trailing_comma: TrailingComma) -> Self {
67 FormatConfiguration {indent: indent, item_separator: item_separator.to_string(), key_separator: key_separator.to_string(), current_indent: String::with_capacity(64), trailing_comma: trailing_comma}
68 }
69
70 pub fn with_indent(indent: usize, trailing_comma: TrailingComma) -> Self {
71 FormatConfiguration {indent: Some(indent), item_separator: ",".to_string(), key_separator: ": ".to_string(), trailing_comma, current_indent: String::with_capacity(64)}
72 }
73
74 pub fn with_separators(item_separator: &str, key_separator: &str, trailing_comma: TrailingComma) -> Self {
75 FormatConfiguration {indent: Some(0), key_separator: key_separator.to_string(), trailing_comma, item_separator: item_separator.to_string(), current_indent: String::with_capacity(64)}
76 }
77
78 pub fn default() -> Self {
79 FormatConfiguration {indent: None, item_separator: ", ".to_string(), key_separator: ": ".to_string(), current_indent: String::with_capacity(64), trailing_comma: TrailingComma::NONE}
80 }
81
82 pub fn compact() -> Self {
83 FormatConfiguration {indent: None, item_separator: ",".to_string(), key_separator: ":".to_string(), current_indent: String::with_capacity(64), trailing_comma: TrailingComma::NONE}
84 }
85}
86
87impl<'input> JSONKeyValuePair<'input> {
88 fn to_string_formatted(&self, style: &mut FormatConfiguration) -> String {
89 format!("{}{}{}", self.key.to_string_formatted(style), style.key_separator, self.value)
90 }
91}
92
93impl<'input> JSONValue<'input> {
94 fn to_string_formatted(&self, style: &mut FormatConfiguration) -> String {
95 match self {
96 JSONValue::Identifier(s) | JSONValue::Integer(s) | JSONValue::Float(s) | JSONValue::Exponent(s) | JSONValue::Hexadecimal(s) => {
97 format!("{}", s)
98 }
99 JSONValue::Bool(b) => {
100 format!("{}", b)
101 }
102 JSONValue::DoubleQuotedString(s) => {
103 format!("\"{}\"", s)
104 }
105 JSONValue::SingleQuotedString(s) => {
106 format!("'{}'", s)
107 }
108
109 JSONValue::Null => {"null".to_string()}
110 JSONValue::Infinity => {"Infinity".to_string()}
111 JSONValue::NaN => {"NaN".to_string()}
112
113 JSONValue::Unary { operator, value } => {
114 let op_char = match operator {
115 UnaryOperator::Plus => {'+'}
116 UnaryOperator::Minus => {'-'}
117 };
118 let value_string = value.to_string();
119 format!("{}{}", op_char, value_string)
120 }
121 JSONValue::JSONObject { key_value_pairs} => {
122 let mut ret: String;
123
124 match style.indent {
125 None => {
126 ret = String::from("{");
127 }
128 Some(ident) => {
129 style.current_indent.reserve(ident);
130 for _ in 0 .. ident {
131 style.current_indent.push(' ');
132 }
133 ret = format!("{{\n{}", style.current_indent);
134 }
135 }
136 for (idx, kvp) in key_value_pairs.iter().enumerate() {
137 ret.push_str(kvp.to_string_formatted(style).as_str());
138 if idx < key_value_pairs.len() - 1 {
139 match style.indent {
140 None => {
141 ret.push_str(style.item_separator.as_str());
142 }
143 Some(_) => {
144 ret.push_str(format!(",\n{}", style.current_indent).as_str())
145 }
146 }
147 }
148 }
149 match style.trailing_comma {
150 TrailingComma::ALL | TrailingComma::OBJECTS => {
151 ret.push(',');
152 }
153 _ => {}
154 }
155 match style.indent {
156 None => {
157 ret.push_str("}");
158 }
159 Some(ident) => {
160 style.current_indent.truncate(style.current_indent.len() - ident);
161 ret.push_str(format!("\n{}}}", style.current_indent).as_str());
162 }
163 }
164 ret
165 }
166 JSONValue::JSONArray { values } => {
167 let mut ret: String;
168
169 match style.indent {
170 None => {
171 ret = String::from("[");
172 }
173 Some(ident) => {
174 style.current_indent.reserve(ident);
175 for _ in 0 .. ident {
176 style.current_indent.push(' ');
177 }
178 ret = format!("{{\n{}", style.current_indent);
179 }
180 }
181 for (idx, value) in values.iter().enumerate() {
182 ret.push_str(value.to_string_formatted(style).as_str());
183 if idx < values.len() - 1 {
184 match style.indent {
185 None => {
186 ret.push_str(style.item_separator.as_str());
187 }
188 Some(_) => {
189 ret.push_str(format!(",\n{}", style.current_indent).as_str())
190 }
191 }
192 }
193 }
194 match style.trailing_comma {
195 TrailingComma::ALL | TrailingComma::ARRAYS => {
196 ret.push(',');
197 }
198 _ => {}
199 }
200 match style.indent {
201 None => {
202 ret.push_str("]");
203 }
204 Some(ident) => {
205 style.current_indent.truncate(style.current_indent.len() - ident);
206 ret.push_str(format!("\n{}}}", style.current_indent).as_str());
207 }
208 }
209 ret
210 }
211 }
212 }
213}
214
215
216impl<'input> Display for JSONValue<'input> {
217 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
218 let mut style = FormatConfiguration::default();
219 let res = self.to_string_formatted(&mut style);
220 write!(f, "{}", res)
221 }
222}
223
224
225impl<'input> Display for JSONText<'input> {
226 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
227 write!(f, "{}", self.value.to_string_formatted(&mut FormatConfiguration::default()))
228 }
229}
230
231
232
233#[derive(Debug, PartialEq)]
234pub struct ParsingError {
235 pub index: usize, pub message: String,
237 pub lineno: usize,
238 pub colno: usize,
239 pub char_index: usize, }
241
242
243
244impl Display for ParsingError {
245 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
246 write!(f, "ParsingError: {}: line {} column {} (char {})", self.message, self.lineno, self.colno, self.char_index)
247 }
248}
249
250struct JSON5Parser<'toks, 'input> {
251 source: &'input str,
252 source_tokens: Peekable<Iter<'toks, TokenSpan>>,
253 lookahead: Option<&'toks TokenSpan>,
254}
255
256
257impl<'toks, 'input> JSON5Parser<'toks, 'input> {
258 fn new(tokens: &'toks Tokens<'input>) -> Self {
259 JSON5Parser { source_tokens: tokens.tok_spans.iter().peekable(), lookahead: None, source: tokens.source }
260 }
261
262 fn advance(&mut self) -> Option<&'toks TokenSpan> {
263 match self.source_tokens.next() {
264 None => {
265 self.lookahead = None;
266 None
267 }
268 Some(span) => {
269 match span.1 {
270 TokType::BlockComment | TokType::LineComment | TokType::Whitespace => {
271 return self.advance()
272 }
273 _ => {
274
275 self.lookahead = Some(span);
276 self.lookahead
277 }
278 }
279 }
280 }
281 }
282
283 #[inline]
284 fn get_tok_source(&self, span: &'toks TokenSpan) -> &'input str {
285 &self.source[span.0 .. span.2]
286 }
287
288
289 fn peek(&mut self) -> Option<&'toks TokenSpan> {
290 match self.source_tokens.peek() {
291 None => None,
292 Some(span) => {
293 match span.1 {
294 TokType::BlockComment | TokType::LineComment | TokType::Whitespace => {
295 self.source_tokens.next();
296 self.peek()
297 }
298 _ => {
299 Some(span)
300 }
301 }
302 }
303 }
304 }
305
306
307 fn position(&mut self) -> usize {
308 match self.peek() {
309 None => {
310 match self.lookahead {
311 None => 0, Some(span) => {span.2}
313 }
314 }
315 Some(span) => {
316 span.0
317 }
318 }
319 }
320
321 fn make_error(&self, message: String, index: usize) -> ParsingError {
322 let (lineno, colno, char_index) = get_line_col_char(self.source, index);
323 ParsingError {
324 index,
325 message,
326 lineno,
327 colno,
328 char_index,
329 }
330 }
331
332 fn check_and_consume(&mut self, types: Vec<TokType>) -> Option<&'toks TokenSpan> {
333 let next_tok = self.peek()?;
334 for toktype in types {
335 if next_tok.1 == toktype {
336 return self.advance();
337 }
338 }
339 None
340 }
341
342 #[inline]
343 fn check_and_consume_with_source(&mut self, types: Vec<TokType>) -> Option<(&'toks TokenSpan, &'input str)> {
344 let tok = self.check_and_consume(types)?;
345 let source = self.get_tok_source(tok);
346 Some((tok, source))
347 }
348
349 fn parse_key(&mut self) -> Result<JSONValue<'input>, ParsingError>{
350 match self.check_and_consume_with_source(vec![TokType::Name, TokType::DoubleQuotedString, TokType::SingleQuotedString]) {
353 None => {
354 match self.peek() {
355 None => {
356 let idx = self.position();
357 Err(self.make_error("Unexpected EOF. Was expecting MemberName at".to_string(), idx))
358 }
359 Some(span) => {
360 let src = self.get_tok_source(span);
361 Err(self.make_error(format!("Invalid token for unquoted key ({}, {:?}) at", span.2, src), span.0))
362 }
363 }
364 },
365 Some((span, lexeme)) => {
366 match span.1 {
367 TokType::DoubleQuotedString => {
368 Ok(JSONValue::DoubleQuotedString(&lexeme[1..lexeme.len() - 1]))
369 },
370 TokType:: SingleQuotedString => {
371 Ok(JSONValue::SingleQuotedString(&lexeme[1..lexeme.len() - 1]))
372 }
373 TokType::Name => {
374 Ok(JSONValue::Identifier(lexeme))
375 }
376 _ => unreachable!("Programming error. Please report this as a bug")
377 }
378 }
379 }
380 }
381
382 fn parse_object(&mut self) -> Result<JSONValue<'input>, ParsingError> {
383 let mut kvps: Vec<JSONKeyValuePair> = Vec::new();
384 loop {
385 match self.check_and_consume(vec![RightBrace]) {
386 None => {
387 let key = self.parse_key()?;
388 match self.check_and_consume(vec![Colon]) {
389
390 None => {
391 let idx = self.position();
392 return Err(self.make_error("Expecting ':' delimiter".to_string(), idx))
393 }
394 Some(_) => {
395 let val = self.parse_value()?;
396 let kvp = JSONKeyValuePair{key: key, value: val};
397 kvps.push(kvp);
398 match self.check_and_consume(vec![Comma]) {
399 None => {
400 match self.check_and_consume(vec![RightBrace]) {
401 None => {
402 let idx = self.position();
403 return Err(self.make_error("Expecting '}' at end of object".to_string(), idx))
404 },
405 Some(_) => {
406 break Ok(JSONValue::JSONObject {key_value_pairs: kvps})
407 }
408 }
409
410 }
411 Some(_) => {
412 continue
413 }
414 }
415 }
416 }
417 }
418 Some(_) => {
419 break Ok(JSONValue::JSONObject {key_value_pairs: kvps})
420 }
421 }
422 }
423 }
424
425 fn parse_array(&mut self) -> Result<JSONValue<'input>, ParsingError> {
426 let mut values:Vec<JSONValue> = Vec::new();
427 loop {
428 match self.check_and_consume(vec![TokType::RightBracket]) {
429 None => {
430 let val = self.parse_value()?;
431 values.push(val);
432 match self.check_and_consume(vec![Comma]) {
433 None => {
434 match self.check_and_consume(vec![TokType::RightBracket]) {
435 None => {
436 let idx = self.position();
437 return Err(self.make_error("Expecting ']' at end of array".to_string(), idx))
438 },
439 Some(_) => {
440 break Ok(JSONValue::JSONArray {values: values})
441 }
442 }
443 }
444 Some(_) => {
445 continue
446 }
447 }
448 }
449 Some(_) => {
450 break Ok(JSONValue::JSONArray {values: values})
451 }
452 }
453 }
454 }
455
456 fn parse_primary(&mut self) -> Result<JSONValue<'input>, ParsingError> {
457 let span = self.advance().unwrap();
458 match &span.1 {
459 TokType::Integer => {Ok(JSONValue::Integer(self.get_tok_source(span)))}
460 TokType::Float => {Ok(JSONValue::Float(self.get_tok_source(span)))}
461 TokType::Exponent => { Ok(JSONValue::Exponent(self.get_tok_source(span)))}
462 TokType::SingleQuotedString => {
463 let lexeme = self.get_tok_source(span);
464 Ok(JSONValue::SingleQuotedString(&lexeme[1..lexeme.len() - 1]))
465 },
466 TokType::DoubleQuotedString => {
467 let lexeme = self.get_tok_source(span);
468 Ok(JSONValue::DoubleQuotedString(&lexeme[1..lexeme.len() - 1]))
469 },
470 TokType::True => Ok(JSONValue::Bool(true)),
471 TokType::False => Ok(JSONValue::Bool(false)),
472 TokType::Null => Ok(JSONValue::Null),
473 TokType::Infinity => Ok(JSONValue::Infinity),
474 TokType::Nan => Ok(JSONValue::NaN),
475 TokType::Hexadecimal => Ok(JSONValue::Hexadecimal(self.get_tok_source(span))),
476 TokType::EOF => {
477 match self.position() {
478 0 => Err(self.make_error("Unexpected EOF. Was expecting value.".to_string(), 0)),
479 pos => Err(self.make_error("Unexpected EOF".to_string(), pos))
480 }
481 },
482 t => Err(self.make_error(format!("Unexpected token of type {:?}: {:?}", t, self.get_tok_source(span)), span.0))
483 }
484 }
485
486 fn parse_unary(&mut self) -> Result<JSONValue<'input>, ParsingError> {
487 match self.check_and_consume(vec![TokType::Plus, TokType::Minus]) {
488 None => self.parse_primary(),
489 Some(span) => {
490 match span.1 {
491 TokType::Plus => {
492 let value = self.parse_unary()?;
493 match value {
494 JSONValue::Float(_) | JSONValue::Integer(_) | JSONValue::Infinity | JSONValue::NaN | JSONValue::Unary { .. } | JSONValue::Hexadecimal(_) | JSONValue::Exponent(_) => {}
495 val => {
496 return Err(self.make_error(format!("Unary operations not allowed for value {:?}", val), span.2))
497 }
498 }
499 Ok(JSONValue::Unary {operator: UnaryOperator::Plus, value: Box::new(value)})
500 }
501 TokType::Minus => {
502 let value = self.parse_unary()?;
503 match value {
504 JSONValue::Float(_) | JSONValue::Integer(_) | JSONValue::Infinity | JSONValue::NaN | JSONValue::Unary { .. } | JSONValue::Hexadecimal(_) | JSONValue::Exponent(_) => {}
505 val => {
506 return Err(self.make_error(format!("Unary operations not allowed for value {:?}", val), span.2))
507 }
508 }
509 Ok(JSONValue::Unary {operator: UnaryOperator::Minus, value: Box::new(value)})
510 }
511 _ => unreachable!("no")
512 }
513 }
514 }
515 }
516
517 fn parse_obj_or_array(&mut self) -> Result<JSONValue<'input>, ParsingError> {
518 match self.check_and_consume(vec![TokType::LeftBracket, TokType::LeftBrace]) {
519 None => self.parse_unary(),
520 Some(span) => {
521 match span.1 {
522 TokType::LeftBrace => self.parse_object(),
523 TokType::LeftBracket => self.parse_array(),
524 _ => unreachable!("no")
525 }
526 }
527 }
528 }
529
530
531 fn parse_value(&mut self) -> Result<JSONValue<'input>, ParsingError> {
532 self.parse_obj_or_array()
533 }
534
535 fn parse_text(&mut self) -> Result<JSONText<'input>, ParsingError> {
536 let value = self.parse_value()?;
537 match self.advance() {
538 None => {}
539 Some(span) => {
540 if span.1 != TokType::EOF {
541 return Err(self.make_error(format!("Unexpected {:?} token after value", span.1), span.0 - 1))
542 }
543 }
544 }
545 Ok(JSONText { value })
546 }
547}
548
549
550pub fn from_tokens<'toks, 'input>(tokens: &'toks Tokens<'input>) -> Result<JSONText<'input>, ParsingError> {
552 let mut parser = JSON5Parser::new(tokens);
553 parser.parse_text()
554}
555
556pub fn from_str<'input>(source: &'input str) -> Result<JSONText<'input>, ParsingError> {
571 use crate::tokenize::tokenize_str;
572 let maybe_toks = tokenize_str(source);
573 match maybe_toks {
574 Err(e) => {
575 Err(ParsingError{index: e.index, message: e.message, char_index: e.char_index, lineno: e.lineno, colno: e.colno})
576 }
577 Ok(toks) => {
578 from_tokens(&toks)
579 }
580 }
581}
582
583#[cfg(test)]
584mod tests {
585 use crate::tokenize::Tokenizer;
586 use crate::parser::JSONValue::*;
587 use super::*;
588 #[test]
589 fn test_foo() {
590 let res = from_str("{}").unwrap();
591 let expected = JSONText{value: JSONValue::JSONObject {key_value_pairs: vec![]}};
592 assert_eq!(res, expected)
593 }
594
595 #[test]
596 fn test_illegal_identifier_escape() {
597 let text = r#"{ \u0031foo: 123 }"#;
598 from_str(text).unwrap_err();
599 }
600
601 #[test]
602 fn test_illegal_unary() {
603 let res = from_str("-'foo'");
604 res.unwrap_err();
605 }
606
607 #[test]
608 fn test_object() {
609 let res = from_str("{\"foo\": \"bar\"}").unwrap();
610 let expected = JSONText{value: JSONValue::JSONObject {key_value_pairs: vec![JSONKeyValuePair{key: JSONValue::DoubleQuotedString("foo"), value: JSONValue::DoubleQuotedString("bar")}]}};
611 assert_eq!(res, expected)
612 }
613
614 #[test]
615 fn test_identifier(){
616 let res = from_str("{foo: \"bar\"}").unwrap();
617 let expected = JSONText{value: JSONValue::JSONObject {key_value_pairs: vec![JSONKeyValuePair{key: JSONValue::Identifier("foo"), value: JSONValue::DoubleQuotedString("bar")}]}};
618 assert_eq!(res, expected)
619 }
620
621 #[test]
622 fn test_array() {
623 let res = from_str("[1,2,3]").unwrap();
624 let expected = JSONText{value: JSONArray {values: vec![JSONValue::Integer("1"), JSONValue::Integer("2"), JSONValue::Integer("3")]}};
625 assert_eq!(res, expected)
626 }
627
628 #[test]
629 fn val_int() {
630 let res = from_str("1").unwrap();
631 let expected = JSONText{value: Integer("1")};
632 assert_eq!(res, expected)
633 }
634
635 #[test]
636 fn val_float() {
637 let res = from_str("1.0").unwrap();
638 let expected = JSONText{value: Float("1.0")};
639 assert_eq!(res, expected)
640 }
641
642 #[test]
643 fn val_string() {
644 let res = from_str("'foo'").unwrap();
645 let expected = JSONText{value: SingleQuotedString("foo")};
646 assert_eq!(res, expected)
647 }
648
649 #[test]
650 fn multiline_string() {
651 let res = from_str("'foo\\\nbar'").unwrap();
652 let expected = JSONText{value: SingleQuotedString("foo\\\nbar")};
653 assert_eq!(res, expected)
654 }
655
656 #[test]
657 fn test_empty_string() {
658 let res = from_str("\"\"").unwrap();
659 let expected = JSONText { value: DoubleQuotedString("") };
660 assert_eq!(res, expected);
661 }
662
663 #[test]
664 fn test_single_element_array() {
665 let res = from_str("[42]").unwrap();
666 let expected = JSONText { value: JSONArray { values: vec![Integer("42")] } };
667 assert_eq!(res, expected);
668 }
669
670 #[test]
671 fn test_single_key_object() {
672 let res = from_str("{\"key\": \"value\"}").unwrap();
673 let expected = JSONText {
674 value: JSONObject {
675 key_value_pairs: vec![
676 JSONKeyValuePair {
677 key: DoubleQuotedString("key"),
678 value: DoubleQuotedString("value"),
679 }
680 ]
681 }
682 };
683 assert_eq!(res, expected);
684 }
685
686 #[test]
687 fn test_trailing_comma_in_array() {
688 let res = from_str("[1, 2, 3,]").unwrap();
689 let expected = JSONText {
690 value: JSONArray {
691 values: vec![Integer("1"), Integer("2"), Integer("3")]
692 }
693 };
694 assert_eq!(res, expected);
695 }
696
697 #[test]
698 fn test_trailing_comma_in_object() {
699 let res = from_str("{\"a\": 1, \"b\": 2,}").unwrap();
700 let expected = JSONText {
701 value: JSONObject {
702 key_value_pairs: vec![
703 JSONKeyValuePair {
704 key: DoubleQuotedString("a"),
705 value: Integer("1"),
706 },
707 JSONKeyValuePair {
708 key: DoubleQuotedString("b"),
709 value: Integer("2"),
710 }
711 ]
712 }
713 };
714 assert_eq!(res, expected);
715 }
716
717 #[test]
718 fn test_unquoted_key() {
719 let res = from_str("{key: \"value\"}").unwrap();
720 let expected = JSONText {
721 value: JSONObject {
722 key_value_pairs: vec![
723 JSONKeyValuePair {
724 key: Identifier("key"),
725 value: DoubleQuotedString("value"),
726 }
727 ]
728 }
729 };
730 assert_eq!(res, expected);
731 }
732
733 #[test]
734 fn test_multiline_string() {
735 let res = from_str("\"multi\\\nline\"").unwrap();
736 let expected = JSONText { value: DoubleQuotedString("multi\\\nline") };
737 assert_eq!(res, expected);
738 }
739
740 #[test]
741 fn test_unicode_characters() {
742 let res = from_str("\"\\u2764\"").unwrap();
743 let expected = JSONText { value: DoubleQuotedString("\\u2764") };
744 assert_eq!(res, expected);
745 }
746 #[test]
747 fn test_trailing_comma_in_nested_array() {
748 let res = from_str("[[1, 2,],]").unwrap();
749 let expected = JSONText {
750 value: JSONArray {
751 values: vec![
752 JSONArray { values: vec![Integer("1"), Integer("2")] }
753 ]
754 }
755 };
756 assert_eq!(res, expected);
757 }
758
759 #[test]
763 fn test_empty_array() {
764 let sample = r#"[]"#;
765 let res = from_str(sample).unwrap();
766 let expected = JSONText{ value: JSONArray {values: vec![]}};
767 assert_eq!(res, expected)
768 }
769
770
771
772 #[test]
773 fn test_leading_comma_array() {
774 let sample = r#"[
775 ,null
776]"#;
777 let maybe_tokens = Tokenizer::new(sample).tokenize();
778 if maybe_tokens.is_err() {
779 return
780 } else {
781 let toks = maybe_tokens.unwrap();
782 let res = from_tokens(&toks);
783 assert!(res.is_err());
784 }
785 }
786
787
788 #[test]
789 fn test_lone_trailing_comma_array() {
790 let sample = r#"[
791 ,
792]"#;
793 let maybe_tokens = Tokenizer::new(sample).tokenize();
794 if maybe_tokens.is_err() {
795 return
796 } else {
797 let toks = maybe_tokens.unwrap();
798 let res = from_tokens(&toks);
799 assert!(res.is_err());
800 }
801 }
802
803
804 #[test]
805 fn test_no_comma_array() {
806 let sample = r#"[
807 true
808 false
809]"#;
810 let maybe_tokens = Tokenizer::new(sample).tokenize();
811 if maybe_tokens.is_err() {
812 return
813 } else {
814 let toks = maybe_tokens.unwrap();
815 let res = from_tokens(&toks);
816 assert!(res.is_err());
817 }
818 }
819
820
821 #[test]
822 fn test_regular_array() {
823 let sample = r#"[
824 true,
825 false,
826 null
827]"#;
828 let _res = from_str(sample).unwrap();
829 }
830
831
832
833 #[test]
834 fn test_trailing_comma_array() {
835 let sample = r#"[
836 null,
837]"#;
838 let _res = from_str(sample).unwrap();
839 }
840
841
842
843 #[test]
844 fn test_block_comment_following_array_element() {
845 let sample = r#"[
846 false
847 /*
848 true
849 */
850]"#;
851 let _res = from_str(sample).unwrap();
852 }
853
854
855
856 #[test]
857 fn test_block_comment_following_top_level_value() {
858 let sample = r#"null
859/*
860 Some non-comment top-level value is needed;
861 we use null above.
862*/"#;
863 let _res = from_str(sample).unwrap();
864 }
865
866
867
868 #[test]
869 fn test_block_comment_in_string() {
870 let sample = r#""This /* block comment */ isn't really a block comment.""#;
871 let _res = from_str(sample).unwrap();
872 }
873
874
875
876 #[test]
877 fn test_block_comment_preceding_top_level_value() {
878 let sample = r#"/*
879 Some non-comment top-level value is needed;
880 we use null below.
881*/
882null"#;
883 let _res = from_str(sample).unwrap();
884 }
885
886
887
888 #[test]
889 fn test_block_comment_with_asterisks() {
890 let sample = r#"/**
891 * This is a JavaDoc-like block comment.
892 * It contains asterisks inside of it.
893 * It might also be closed with multiple asterisks.
894 * Like this:
895 **/
896true"#;
897 let _res = from_str(sample).unwrap();
898 }
899
900
901
902 #[test]
903 fn test_inline_comment_following_array_element() {
904 let sample = r#"[
905 false // true
906]"#;
907 let _res = from_str(sample).unwrap();
908 }
909
910
911
912 #[test]
913 fn test_inline_comment_following_top_level_value() {
914 let sample = r#"null // Some non-comment top-level value is needed; we use null here."#;
915 let _res = from_str(sample).unwrap();
916 }
917
918
919
920 #[test]
921 fn test_inline_comment_in_string() {
922 let sample = r#""This inline comment // isn't really an inline comment.""#;
923 let _res = from_str(sample).unwrap();
924 }
925
926
927
928 #[test]
929 fn test_inline_comment_preceding_top_level_value() {
930 let sample = r#"// Some non-comment top-level value is needed; we use null below.
931null"#;
932 let _res = from_str(sample).unwrap();
933 }
934
935
936
937 #[test]
938 fn test_top_level_block_comment() {
939 let sample = r#"/*
940 This should fail;
941 comments cannot be the only top-level value.
942*/"#;
943 let maybe_tokens = Tokenizer::new(sample).tokenize();
944 if maybe_tokens.is_err() {
945 return
946 } else {
947 let toks = maybe_tokens.unwrap();
948 let res = from_tokens(&toks);
949 assert!(res.is_err());
950 }
951 }
952
953
954 #[test]
955 fn test_top_level_inline_comment() {
956 let sample = r#"// This should fail; comments cannot be the only top-level value."#;
957 let maybe_tokens = Tokenizer::new(sample).tokenize();
958 if maybe_tokens.is_err() {
959 return
960 } else {
961 let toks = maybe_tokens.unwrap();
962 let res = from_tokens(&toks);
963 assert!(res.is_err());
964 }
965 }
966
967
968 #[test]
969 fn test_unterminated_block_comment() {
970 let sample = r#"true
971/*
972 This block comment doesn't terminate.
973 There was a legitimate value before this,
974 but this is still invalid JS/JSON5.
975"#;
976 let maybe_tokens = Tokenizer::new(sample).tokenize();
977 if maybe_tokens.is_err() {
978 return
979 } else {
980 let toks = maybe_tokens.unwrap();
981 let res = from_tokens(&toks);
982 assert!(res.is_err());
983 }
984 }
985
986
987 #[test]
988 fn test_empty() {
989 let sample = r#""#;
990 let maybe_tokens = Tokenizer::new(sample).tokenize();
991 if maybe_tokens.is_err() {
992 return
993 } else {
994 let toks = maybe_tokens.unwrap();
995 let res = from_tokens(&toks);
996 assert!(res.is_err());
997 }
998 }
999
1000
1001 #[test]
1002 fn test_npm_package() {
1003 let sample = r#"{
1004 "name": "npm",
1005 "publishConfig": {
1006 "proprietary-attribs": false
1007 },
1008 "description": "A package manager for node",
1009 "keywords": [
1010 "package manager",
1011 "modules",
1012 "install",
1013 "package.json"
1014 ],
1015 "version": "1.1.22",
1016 "preferGlobal": true,
1017 "config": {
1018 "publishtest": false
1019 },
1020 "homepage": "http://npmjs.org/",
1021 "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
1022 "repository": {
1023 "type": "git",
1024 "url": "https://github.com/isaacs/npm"
1025 },
1026 "bugs": {
1027 "email": "npm-@googlegroups.com",
1028 "url": "http://github.com/isaacs/npm/issues"
1029 },
1030 "directories": {
1031 "doc": "./doc",
1032 "man": "./man",
1033 "lib": "./lib",
1034 "bin": "./bin"
1035 },
1036 "main": "./lib/npm.js",
1037 "bin": "./bin/npm-cli.js",
1038 "dependencies": {
1039 "semver": "~1.0.14",
1040 "ini": "1",
1041 "slide": "1",
1042 "abbrev": "1",
1043 "graceful-fs": "~1.1.1",
1044 "minimatch": "~0.2",
1045 "nopt": "1",
1046 "node-uuid": "~1.3",
1047 "proto-list": "1",
1048 "rimraf": "2",
1049 "request": "~2.9",
1050 "which": "1",
1051 "tar": "~0.1.12",
1052 "fstream": "~0.1.17",
1053 "block-stream": "*",
1054 "inherits": "1",
1055 "mkdirp": "0.3",
1056 "read": "0",
1057 "lru-cache": "1",
1058 "node-gyp": "~0.4.1",
1059 "fstream-npm": "0 >=0.0.5",
1060 "uid-number": "0",
1061 "archy": "0",
1062 "chownr": "0"
1063 },
1064 "bundleDependencies": [
1065 "slide",
1066 "ini",
1067 "semver",
1068 "abbrev",
1069 "graceful-fs",
1070 "minimatch",
1071 "nopt",
1072 "node-uuid",
1073 "rimraf",
1074 "request",
1075 "proto-list",
1076 "which",
1077 "tar",
1078 "fstream",
1079 "block-stream",
1080 "inherits",
1081 "mkdirp",
1082 "read",
1083 "lru-cache",
1084 "node-gyp",
1085 "fstream-npm",
1086 "uid-number",
1087 "archy",
1088 "chownr"
1089 ],
1090 "devDependencies": {
1091 "ronn": "https://github.com/isaacs/ronnjs/tarball/master"
1092 },
1093 "engines": {
1094 "node": "0.6 || 0.7 || 0.8",
1095 "npm": "1"
1096 },
1097 "scripts": {
1098 "test": "node ./test/run.js",
1099 "prepublish": "npm prune; rm -rf node_modules/*/{test,example,bench}*; make -j4 doc",
1100 "dumpconf": "env | grep npm | sort | uniq"
1101 },
1102 "licenses": [
1103 {
1104 "type": "MIT +no-false-attribs",
1105 "url": "http://github.com/isaacs/npm/raw/master/LICENSE"
1106 }
1107 ]
1108}
1109"#;
1110 let _res = from_str(sample).unwrap();
1111 }
1112
1113
1114
1115 #[test]
1116 fn test_npm_package2() {
1117 let sample = r#"{
1118 name: 'npm',
1119 publishConfig: {
1120 'proprietary-attribs': false,
1121 },
1122 description: 'A package manager for node',
1123 keywords: [
1124 'package manager',
1125 'modules',
1126 'install',
1127 'package.json',
1128 ],
1129 version: '1.1.22',
1130 preferGlobal: true,
1131 config: {
1132 publishtest: false,
1133 },
1134 homepage: 'http://npmjs.org/',
1135 author: 'Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)',
1136 repository: {
1137 type: 'git',
1138 url: 'https://github.com/isaacs/npm',
1139 },
1140 bugs: {
1141 email: 'npm-@googlegroups.com',
1142 url: 'http://github.com/isaacs/npm/issues',
1143 },
1144 directories: {
1145 doc: './doc',
1146 man: './man',
1147 lib: './lib',
1148 bin: './bin',
1149 },
1150 main: './lib/npm.js',
1151 bin: './bin/npm-cli.js',
1152 dependencies: {
1153 semver: '~1.0.14',
1154 ini: '1',
1155 slide: '1',
1156 abbrev: '1',
1157 'graceful-fs': '~1.1.1',
1158 minimatch: '~0.2',
1159 nopt: '1',
1160 'node-uuid': '~1.3',
1161 'proto-list': '1',
1162 rimraf: '2',
1163 request: '~2.9',
1164 which: '1',
1165 tar: '~0.1.12',
1166 fstream: '~0.1.17',
1167 'block-stream': '*',
1168 inherits: '1',
1169 mkdirp: '0.3',
1170 read: '0',
1171 'lru-cache': '1',
1172 'node-gyp': '~0.4.1',
1173 'fstream-npm': '0 >=0.0.5',
1174 'uid-number': '0',
1175 archy: '0',
1176 chownr: '0',
1177 },
1178 bundleDependencies: [
1179 'slide',
1180 'ini',
1181 'semver',
1182 'abbrev',
1183 'graceful-fs',
1184 'minimatch',
1185 'nopt',
1186 'node-uuid',
1187 'rimraf',
1188 'request',
1189 'proto-list',
1190 'which',
1191 'tar',
1192 'fstream',
1193 'block-stream',
1194 'inherits',
1195 'mkdirp',
1196 'read',
1197 'lru-cache',
1198 'node-gyp',
1199 'fstream-npm',
1200 'uid-number',
1201 'archy',
1202 'chownr',
1203 ],
1204 devDependencies: {
1205 ronn: 'https://github.com/isaacs/ronnjs/tarball/master',
1206 },
1207 engines: {
1208 node: '0.6 || 0.7 || 0.8',
1209 npm: '1',
1210 },
1211 scripts: {
1212 test: 'node ./test/run.js',
1213 prepublish: 'npm prune; rm -rf node_modules/*/{test,example,bench}*; make -j4 doc',
1214 dumpconf: 'env | grep npm | sort | uniq',
1215 },
1216 licenses: [
1217 {
1218 type: 'MIT +no-false-attribs',
1219 url: 'http://github.com/isaacs/npm/raw/master/LICENSE',
1220 },
1221 ],
1222}
1223"#;
1224 let _res = from_str(sample).unwrap();
1225 }
1226
1227
1228
1229 #[test]
1230 fn test_readme_example() {
1231 let sample = r#"{
1232 foo: 'bar',
1233 while: true,
1234
1235 this: 'is a \
1236multi-line string',
1237
1238 // this is an inline comment
1239 here: 'is another', // inline comment
1240
1241 /* this is a block comment
1242 that continues on another line */
1243
1244 hex: 0xDEADbeef,
1245 half: .5,
1246 delta: +10,
1247 to: Infinity, // and beyond!
1248
1249 finally: 'a trailing comma',
1250 oh: [
1251 "we shouldn't forget",
1252 'arrays can have',
1253 'trailing commas too',
1254 ],
1255}
1256"#;
1257 let _res = from_str(sample).unwrap();
1258 }
1259
1260
1261
1262 #[test]
1263 fn test_valid_whitespace() {
1264 let sample = r#"{
1265 // An invalid form feed character (\x0c) has been entered before this comment.
1266 // Be careful not to delete it.
1267 "a": true
1268}
1269"#;
1270 let _res = from_str(sample).unwrap();
1271 }
1272
1273
1274
1275 #[test]
1276 fn test_comment_cr() {
1277 let sample = r#"{
1278 // This comment is terminated with `\r`.
1279}
1280"#;
1281 let _res = from_str(sample).unwrap();
1282 }
1283
1284
1285
1286 #[test]
1287 fn test_comment_crlf() {
1288 let sample = r#"{
1289 // This comment is terminated with `\r\n`.
1290}
1291"#;
1292 let _res = from_str(sample).unwrap();
1293 }
1294
1295
1296
1297 #[test]
1298 fn test_comment_lf() {
1299 let sample = r#"{
1300 // This comment is terminated with `\n`.
1301}
1302"#;
1303 let _res = from_str(sample).unwrap();
1304 }
1305
1306
1307
1308 #[test]
1309 fn test_escaped_cr() {
1310 let sample = r#"{
1311 // the following string contains an escaped `\r`
1312 a: 'line 1 \
1313line 2'
1314}
1315"#;
1316 let _res = from_str(sample).unwrap();
1317 }
1318
1319
1320
1321 #[test]
1322 fn test_escaped_crlf() {
1323 let sample = r#"{
1324 // the following string contains an escaped `\r\n`
1325 a: 'line 1 \
1326line 2'
1327}
1328"#;
1329 let _res = from_str(sample).unwrap();
1330 }
1331
1332
1333
1334 #[test]
1335 fn test_escaped_lf() {
1336 let sample = r#"{
1337 // the following string contains an escaped `\n`
1338 a: 'line 1 \
1339line 2'
1340}
1341"#;
1342 let _res = from_str(sample).unwrap();
1343 }
1344
1345
1346
1347 #[test]
1348 fn test_float_leading_decimal_point() {
1349 let sample = r#".5
1350"#;
1351 let _res = from_str(sample).unwrap();
1352 }
1353
1354
1355
1356 #[test]
1357 fn test_float_leading_zero() {
1358 let sample = r#"0.5
1359"#;
1360 let _res = from_str(sample).unwrap();
1361 }
1362
1363
1364
1365 #[test]
1366 fn test_float_trailing_decimal_point_with_integer_exponent() {
1367 let sample = r#"5.e4
1368"#;
1369 let _res = from_str(sample).unwrap();
1370 }
1371
1372
1373
1374 #[test]
1375 fn test_float_trailing_decimal_point() {
1376 let sample = r#"5.
1377"#;
1378 let _res = from_str(sample).unwrap();
1379 }
1380
1381
1382
1383 #[test]
1384 fn test_float_with_integer_exponent() {
1385 let sample = r#"1.2e3
1386"#;
1387 let _res = from_str(sample).unwrap();
1388 }
1389
1390
1391
1392 #[test]
1393 fn test_float() {
1394 let sample = r#"1.2
1395"#;
1396 let _res = from_str(sample).unwrap();
1397 }
1398
1399
1400
1401 #[test]
1402 fn test_hexadecimal_empty() {
1403 let sample = r#"0x
1404"#;
1405 let maybe_tokens = Tokenizer::new(sample).tokenize();
1406 if maybe_tokens.is_err() {
1407 return
1408 } else {
1409 let toks = maybe_tokens.unwrap();
1410 let res = from_tokens(&toks);
1411 assert!(res.is_err());
1412 }
1413 }
1414
1415
1416 #[test]
1417 fn test_hexadecimal_lowercase_letter() {
1418 let sample = r#"0xc8
1419"#;
1420 let _res = from_str(sample).unwrap();
1421 }
1422
1423
1424
1425 #[test]
1426 fn test_hexadecimal_uppercase_x() {
1427 let sample = r#"0XC8
1428"#;
1429 let _res = from_str(sample).unwrap();
1430 }
1431
1432
1433
1434 #[test]
1435 fn test_hexadecimal_with_integer_exponent() {
1436 let sample = r#"0xc8e4
1437"#;
1438 let _res = from_str(sample).unwrap();
1439 }
1440
1441
1442
1443 #[test]
1444 fn test_hexadecimal() {
1445 let sample = r#"0xC8
1446"#;
1447 let _res = from_str(sample).unwrap();
1448 }
1449
1450
1451
1452 #[test]
1453 fn test_infinity() {
1454 let sample = r#"Infinity
1455"#;
1456 let _res = from_str(sample).unwrap();
1457 }
1458
1459
1460
1461 #[test]
1462 fn test_integer_with_float_exponent() {
1463 let sample = r#"1e2.3
1464"#;
1465 let maybe_tokens = Tokenizer::new(sample).tokenize();
1466 if maybe_tokens.is_err() {
1467 return
1468 } else {
1469 let toks = maybe_tokens.unwrap();
1470 let res = from_tokens(&toks);
1471 assert!(res.is_err(), "{:?}", res.unwrap());
1472 }
1473 }
1474
1475
1476 #[test]
1477 fn test_integer_with_hexadecimal_exponent() {
1478 let sample = r#"1e0x4
1479"#;
1480 let maybe_tokens = Tokenizer::new(sample).tokenize();
1481 if maybe_tokens.is_err() {
1482 return
1483 } else {
1484 let toks = maybe_tokens.unwrap();
1485 let res = from_tokens(&toks);
1486 assert!(res.is_err());
1487 }
1488 }
1489
1490
1491 #[test]
1492 fn test_integer_with_integer_exponent() {
1493 let sample = r#"2e23
1494"#;
1495 let _res = from_str(sample).unwrap();
1496 }
1497
1498
1499
1500 #[test]
1501 fn test_integer_with_negative_float_exponent() {
1502 let sample = r#"1e-2.3
1503"#;
1504 let maybe_tokens = Tokenizer::new(sample).tokenize();
1505 if maybe_tokens.is_err() {
1506 return
1507 } else {
1508 let toks = maybe_tokens.unwrap();
1509 let res = from_tokens(&toks);
1510 assert!(res.is_err());
1511 }
1512 }
1513
1514
1515 #[test]
1516 fn test_integer_with_negative_hexadecimal_exponent() {
1517 let sample = r#"1e-0x4
1518"#;
1519 let maybe_tokens = Tokenizer::new(sample).tokenize();
1520 if maybe_tokens.is_err() {
1521 return
1522 } else {
1523 let toks = maybe_tokens.unwrap();
1524 let res = from_tokens(&toks);
1525 assert!(res.is_err(), "{:?}", res.unwrap());
1526 }
1527 }
1528
1529
1530 #[test]
1531 fn test_integer_with_negative_integer_exponent() {
1532 let sample = r#"2e-23
1533"#;
1534 let _res = from_str(sample).unwrap();
1535 }
1536
1537
1538
1539 #[test]
1540 fn test_integer_with_negative_zero_integer_exponent() {
1541 let sample = r#"5e-0
1542"#;
1543 let _res = from_str(sample).unwrap();
1544 }
1545
1546
1547
1548 #[test]
1549 fn test_integer_with_positive_float_exponent() {
1550 let sample = r#"1e+2.3
1551"#;
1552 let maybe_tokens = Tokenizer::new(sample).tokenize();
1553 if maybe_tokens.is_err() {
1554 return
1555 } else {
1556 let toks = maybe_tokens.unwrap();
1557 let res = from_tokens(&toks);
1558 assert!(res.is_err());
1559 }
1560 }
1561
1562
1563 #[test]
1564 fn test_integer_with_positive_hexadecimal_exponent() {
1565 let sample = r#"1e+0x4
1566"#;
1567 let maybe_tokens = Tokenizer::new(sample).tokenize();
1568 if maybe_tokens.is_err() {
1569 return
1570 } else {
1571 let toks = maybe_tokens.unwrap();
1572 let res = from_tokens(&toks);
1573 assert!(res.is_err());
1574 }
1575 }
1576
1577
1578 #[test]
1579 fn test_integer_with_positive_integer_exponent() {
1580 let sample = r#"1e+2
1581"#;
1582 let _res = from_str(sample).unwrap();
1583 }
1584
1585
1586
1587 #[test]
1588 fn test_integer_with_positive_zero_integer_exponent() {
1589 let sample = r#"5e+0
1590"#;
1591 let _res = from_str(sample).unwrap();
1592 }
1593
1594
1595
1596 #[test]
1597 fn test_integer_with_zero_integer_exponent() {
1598 let sample = r#"5e0
1599"#;
1600 let _res = from_str(sample).unwrap();
1601 }
1602
1603
1604
1605 #[test]
1606 fn test_integer() {
1607 let sample = r#"15
1608"#;
1609 let _res = from_str(sample).unwrap();
1610 }
1611
1612
1613
1614 #[test]
1615 fn test_lone_decimal_point() {
1616 let sample = r#".
1617"#;
1618 let maybe_tokens = Tokenizer::new(sample).tokenize();
1619 if maybe_tokens.is_err() {
1620 return
1621 } else {
1622 let toks = maybe_tokens.unwrap();
1623 let res = from_tokens(&toks);
1624 assert!(res.is_err(), "{:?}", res.unwrap());
1625 }
1626 }
1627
1628
1629 #[test]
1630 fn test_nan() {
1631 let sample = r#"NaN
1632"#;
1633 let _res = from_str(sample).unwrap();
1634 }
1635
1636
1637
1638 #[test]
1639 fn test_negative_float_leading_decimal_point() {
1640 let sample = r#"-.5
1641"#;
1642 let _res = from_str(sample).unwrap();
1643 }
1644
1645
1646
1647 #[test]
1648 fn test_negative_float_leading_zero() {
1649 let sample = r#"-0.5
1650"#;
1651 let _res = from_str(sample).unwrap();
1652 }
1653
1654
1655
1656 #[test]
1657 fn test_negative_float_trailing_decimal_point() {
1658 let sample = r#"-5.
1659"#;
1660 let _res = from_str(sample).unwrap();
1661 }
1662
1663
1664
1665 #[test]
1666 fn test_negative_float() {
1667 let sample = r#"-1.2
1668"#;
1669 let _res = from_str(sample).unwrap();
1670 }
1671
1672
1673
1674 #[test]
1675 fn test_negative_hexadecimal() {
1676 let sample = r#"-0xC8
1677"#;
1678 let _res = from_str(sample).unwrap();
1679 }
1680
1681
1682
1683 #[test]
1684 fn test_negative_infinity() {
1685 let sample = r#"-Infinity
1686"#;
1687 let _res = from_str(sample).unwrap();
1688 }
1689
1690
1691
1692 #[test]
1693 fn test_negative_integer() {
1694 let sample = r#"-15
1695"#;
1696 let _res = from_str(sample).unwrap();
1697 }
1698
1699
1700
1701 #[test]
1702 fn test_negative_noctal() {
1703 let sample = r#"-098
1704"#;
1705 let maybe_tokens = Tokenizer::new(sample).tokenize();
1706 if maybe_tokens.is_err() {
1707 return
1708 } else {
1709 let toks = maybe_tokens.unwrap();
1710 let res = from_tokens(&toks);
1711 assert!(res.is_err());
1712 }
1713 }
1714
1715
1716 #[test]
1717 fn test_negative_octal() {
1718 let sample = r#"-0123
1719"#;
1720 let maybe_tokens = Tokenizer::new(sample).tokenize();
1721 if maybe_tokens.is_err() {
1722 return
1723 } else {
1724 let toks = maybe_tokens.unwrap();
1725 let res = from_tokens(&toks);
1726 assert!(res.is_err());
1727 }
1728 }
1729
1730
1731 #[test]
1732 fn test_negative_zero_float_leading_decimal_point() {
1733 let sample = r#"-.0
1734"#;
1735 let _res = from_str(sample).unwrap();
1736 }
1737
1738
1739
1740 #[test]
1741 fn test_negative_zero_float_trailing_decimal_point() {
1742 let sample = r#"-0.
1743"#;
1744 let _res = from_str(sample).unwrap();
1745 }
1746
1747
1748
1749 #[test]
1750 fn test_negative_zero_float() {
1751 let sample = r#"-0.0
1752"#;
1753 let _res = from_str(sample).unwrap();
1754 }
1755
1756
1757
1758 #[test]
1759 fn test_negative_zero_hexadecimal() {
1760 let sample = r#"-0x0
1761"#;
1762 let _res = from_str(sample).unwrap();
1763 }
1764
1765
1766
1767 #[test]
1768 fn test_negative_zero_integer() {
1769 let sample = r#"-0
1770"#;
1771 let _res = from_str(sample).unwrap();
1772 }
1773
1774
1775
1776 #[test]
1777 fn test_negative_zero_octal() {
1778 let sample = r#"-00
1779"#;
1780 let maybe_tokens = Tokenizer::new(sample).tokenize();
1781 if maybe_tokens.is_err() {
1782 return
1783 } else {
1784 let toks = maybe_tokens.unwrap();
1785 let res = from_tokens(&toks);
1786 assert!(res.is_err());
1787 }
1788 }
1789
1790
1791 #[test]
1792 fn test_noctal_with_leading_octal_digit() {
1793 let sample = r#"0780
1794"#;
1795 let maybe_tokens = Tokenizer::new(sample).tokenize();
1796 if maybe_tokens.is_err() {
1797 return
1798 } else {
1799 let toks = maybe_tokens.unwrap();
1800 let res = from_tokens(&toks);
1801 assert!(res.is_err());
1802 }
1803 }
1804
1805
1806 #[test]
1807 fn test_noctal() {
1808 let sample = r#"080
1809"#;
1810 let maybe_tokens = Tokenizer::new(sample).tokenize();
1811 if maybe_tokens.is_err() {
1812 return
1813 } else {
1814 let toks = maybe_tokens.unwrap();
1815 let res = from_tokens(&toks);
1816 assert!(res.is_err());
1817 }
1818 }
1819
1820
1821 #[test]
1822 fn test_octal() {
1823 let sample = r#"010
1824"#;
1825 let maybe_tokens = Tokenizer::new(sample).tokenize();
1826 if maybe_tokens.is_err() {
1827 return
1828 } else {
1829 let toks = maybe_tokens.unwrap();
1830 let res = from_tokens(&toks);
1831 assert!(res.is_err());
1832 }
1833 }
1834
1835
1836 #[test]
1837 fn test_positive_float_leading_decimal_point() {
1838 let sample = r#"+.5
1839"#;
1840 let _res = from_str(sample).unwrap();
1841 }
1842
1843
1844
1845 #[test]
1846 fn test_positive_float_leading_zero() {
1847 let sample = r#"+0.5
1848"#;
1849 let _res = from_str(sample).unwrap();
1850 }
1851
1852
1853
1854 #[test]
1855 fn test_positive_float_trailing_decimal_point() {
1856 let sample = r#"+5.
1857"#;
1858 let _res = from_str(sample).unwrap();
1859 }
1860
1861
1862
1863 #[test]
1864 fn test_positive_float() {
1865 let sample = r#"+1.2
1866"#;
1867 let _res = from_str(sample).unwrap();
1868 }
1869
1870
1871
1872 #[test]
1873 fn test_positive_hexadecimal() {
1874 let sample = r#"+0xC8
1875"#;
1876 let _res = from_str(sample).unwrap();
1877 }
1878
1879
1880
1881 #[test]
1882 fn test_positive_infinity() {
1883 let sample = r#"+Infinity
1884"#;
1885 let _res = from_str(sample).unwrap();
1886 }
1887
1888
1889
1890 #[test]
1891 fn test_positive_integer() {
1892 let sample = r#"+15
1893"#;
1894 let _res = from_str(sample).unwrap();
1895 }
1896
1897
1898
1899 #[test]
1900 fn test_positive_noctal() {
1901 let sample = r#"+098
1902"#;
1903 let maybe_tokens = Tokenizer::new(sample).tokenize();
1904 if maybe_tokens.is_err() {
1905 return
1906 } else {
1907 let toks = maybe_tokens.unwrap();
1908 let res = from_tokens(&toks);
1909 assert!(res.is_err());
1910 }
1911 }
1912
1913
1914 #[test]
1915 fn test_positive_octal() {
1916 let sample = r#"+0123
1917"#;
1918 let maybe_tokens = Tokenizer::new(sample).tokenize();
1919 if maybe_tokens.is_err() {
1920 return
1921 } else {
1922 let toks = maybe_tokens.unwrap();
1923 let res = from_tokens(&toks);
1924 assert!(res.is_err());
1925 }
1926 }
1927
1928
1929 #[test]
1930 fn test_positive_zero_float_leading_decimal_point() {
1931 let sample = r#"+.0
1932"#;
1933 let _res = from_str(sample).unwrap();
1934 }
1935
1936
1937
1938 #[test]
1939 fn test_positive_zero_float_trailing_decimal_point() {
1940 let sample = r#"+0.
1941"#;
1942 let _res = from_str(sample).unwrap();
1943 }
1944
1945
1946
1947 #[test]
1948 fn test_positive_zero_float() {
1949 let sample = r#"+0.0
1950"#;
1951 let _res = from_str(sample).unwrap();
1952 }
1953
1954
1955
1956 #[test]
1957 fn test_positive_zero_hexadecimal() {
1958 let sample = r#"+0x0
1959"#;
1960 let _res = from_str(sample).unwrap();
1961 }
1962
1963
1964
1965 #[test]
1966 fn test_positive_zero_integer() {
1967 let sample = r#"+0
1968"#;
1969 let _res = from_str(sample).unwrap();
1970 }
1971
1972
1973
1974 #[test]
1975 fn test_positive_zero_octal() {
1976 let sample = r#"+00
1977"#;
1978 let maybe_tokens = Tokenizer::new(sample).tokenize();
1979 if maybe_tokens.is_err() {
1980 return
1981 } else {
1982 let toks = maybe_tokens.unwrap();
1983 let res = from_tokens(&toks);
1984 assert!(res.is_err());
1985 }
1986 }
1987
1988
1989 #[test]
1990 fn test_zero_float_leading_decimal_point() {
1991 let sample = r#".0
1992"#;
1993 let _res = from_str(sample).unwrap();
1994 }
1995
1996
1997
1998 #[test]
1999 fn test_zero_float_trailing_decimal_point() {
2000 let sample = r#"0.
2001"#;
2002 let _res = from_str(sample).unwrap();
2003 }
2004
2005
2006
2007 #[test]
2008 fn test_zero_float() {
2009 let sample = r#"0.0
2010"#;
2011 let _res = from_str(sample).unwrap();
2012 }
2013
2014
2015
2016 #[test]
2017 fn test_zero_hexadecimal() {
2018 let sample = r#"0x0
2019"#;
2020 let _res = from_str(sample).unwrap();
2021 }
2022
2023
2024
2025 #[test]
2026 fn test_zero_integer_with_integer_exponent() {
2027 let sample = r#"0e23
2028"#;
2029 let _res = from_str(sample).unwrap();
2030 }
2031
2032
2033
2034 #[test]
2035 fn test_zero_integer() {
2036 let sample = r#"0
2037"#;
2038 let _res = from_str(sample).unwrap();
2039 }
2040
2041
2042
2043 #[test]
2044 fn test_zero_octal() {
2045 let sample = r#"00
2046"#;
2047 let maybe_tokens = Tokenizer::new(sample).tokenize();
2048 if maybe_tokens.is_err() {
2049 return
2050 } else {
2051 let toks = maybe_tokens.unwrap();
2052 let res = from_tokens(&toks);
2053 assert!(res.is_err());
2054 }
2055 }
2056
2057
2058 #[test]
2059 fn test_duplicate_keys() {
2060 let sample = r#"{
2061 "a": true,
2062 "a": false
2063}
2064"#;
2065 let _res = from_str(sample).unwrap();
2066 }
2067
2068
2069
2070 #[test]
2071 fn test_empty_object() {
2072 let sample = r#"{}"#;
2073 let _res = from_str(sample).unwrap();
2074 }
2075
2076
2077
2078 #[test]
2079 fn test_illegal_unquoted_key_number() {
2080 let sample = r#"{
2081 10twenty: "ten twenty"
2082}"#;
2083 let maybe_tokens = Tokenizer::new(sample).tokenize();
2084 if maybe_tokens.is_err() {
2085 return
2086 } else {
2087 let toks = maybe_tokens.unwrap();
2088 let res = from_tokens(&toks);
2089 assert!(res.is_err());
2090 }
2091 }
2092
2093
2094 #[test]
2095 fn test_illegal_unquoted_key_symbol() {
2096 let sample = r#"{
2097 multi-word: "multi-word"
2098}"#;
2099 let maybe_tokens = Tokenizer::new(sample).tokenize();
2100 if maybe_tokens.is_err() {
2101 return
2102 } else {
2103 let toks = maybe_tokens.unwrap();
2104 let res = from_tokens(&toks);
2105 assert!(res.is_err());
2106 }
2107 }
2108
2109
2110 #[test]
2111 fn test_leading_comma_object() {
2112 let sample = r#"{
2113 ,"foo": "bar"
2114}"#;
2115 let maybe_tokens = Tokenizer::new(sample).tokenize();
2116 if maybe_tokens.is_err() {
2117 return
2118 } else {
2119 let toks = maybe_tokens.unwrap();
2120 let res = from_tokens(&toks);
2121 assert!(res.is_err());
2122 }
2123 }
2124
2125
2126 #[test]
2127 fn test_lone_trailing_comma_object() {
2128 let sample = r#"{
2129 ,
2130}"#;
2131 let maybe_tokens = Tokenizer::new(sample).tokenize();
2132 if maybe_tokens.is_err() {
2133 return
2134 } else {
2135 let toks = maybe_tokens.unwrap();
2136 let res = from_tokens(&toks);
2137 assert!(res.is_err());
2138 }
2139 }
2140
2141
2142 #[test]
2143 fn test_no_comma_object() {
2144 let sample = r#"{
2145 "foo": "bar"
2146 "hello": "world"
2147}"#;
2148 let maybe_tokens = Tokenizer::new(sample).tokenize();
2149 if maybe_tokens.is_err() {
2150 return
2151 } else {
2152 let toks = maybe_tokens.unwrap();
2153 let res = from_tokens(&toks);
2154 assert!(res.is_err());
2155 }
2156 }
2157
2158
2159 #[test]
2160 fn test_reserved_unquoted_key() {
2161 let sample = r#"{
2162 while: true
2163}"#;
2164 let _res = from_str(sample).unwrap();
2165 }
2166
2167
2168
2169 #[test]
2170 fn test_single_quoted_key() {
2171 let sample = r#"{
2172 'hello': "world"
2173}"#;
2174 let _res = from_str(sample).unwrap();
2175 }
2176
2177
2178
2179 #[test]
2180 fn test_trailing_comma_object() {
2181 let sample = r#"{
2182 "foo": "bar",
2183}"#;
2184 let _res = from_str(sample).unwrap();
2185 }
2186
2187
2188
2189 #[test]
2190 fn test_unquoted_keys() {
2191 let sample = r#"{
2192 hello: "world",
2193 _: "underscore",
2194 $: "dollar sign",
2195 one1: "numerals",
2196 _$_: "multiple symbols",
2197 $_$hello123world_$_: "mixed"
2198}"#;
2199 let _res = from_str(sample).unwrap();
2200 }
2201
2202
2203
2204 #[test]
2205 fn test_escaped_single_quoted_string() {
2206 let sample = r#"'I can\'t wait'"#;
2207 let _res = from_str(sample).unwrap();
2208 }
2209
2210
2211
2212 #[test]
2213 fn test_multi_line_string() {
2214 let sample = r#"'hello\
2215 world'"#;
2216 let _res = from_str(sample).unwrap();
2217 }
2218
2219
2220
2221 #[test]
2222 fn test_single_quoted_string() {
2223 let sample = r#"'hello world'"#;
2224 let _res = from_str(sample).unwrap();
2225 }
2226
2227
2228
2229 #[test]
2230 fn test_unescaped_multi_line_string() {
2231 let sample = r#""foo
2232bar"
2233"#;
2234 let maybe_tokens = Tokenizer::new(sample).tokenize();
2235 if maybe_tokens.is_err() {
2236 return
2237 } else {
2238 let toks = maybe_tokens.unwrap();
2239 let res = from_tokens(&toks);
2240 assert!(res.is_err());
2241 }
2242 }
2243 #[test]
2248 fn test_error_no_comma_array_lineno() {
2249 let sample = r#"[
2250 true
2251 false
2252]"#;
2253 let maybe_tokens = Tokenizer::new(sample).tokenize();
2254 if maybe_tokens.is_err() {
2255 let err = maybe_tokens.unwrap_err();
2256 assert_eq!(err.lineno, 3_usize, "{:?}", err);
2257 } else {
2258 let toks = maybe_tokens.unwrap();
2259 let res = from_tokens(&toks);
2260 let err = res.unwrap_err();
2261 assert_eq!(err.lineno, 3_usize, "{:?}", err);
2262 }
2263 }
2264
2265
2266 #[test]
2267 fn test_error_no_comma_array_index() {
2268 let sample = r#"[
2269 true
2270 false
2271]"#;
2272 let maybe_tokens = Tokenizer::new(sample).tokenize();
2273 if maybe_tokens.is_err() {
2274 let err = maybe_tokens.unwrap_err();
2275 assert_eq!(err.char_index, 15_usize, "{:?}", err)
2276 } else {
2277 let toks = maybe_tokens.unwrap();
2278 let res = from_tokens(&toks);
2279 let err = res.unwrap_err();
2280 assert_eq!(err.char_index, 15_usize, "{:?}", err);
2281 }
2282 }
2283
2284 #[test]
2285 fn test_error_no_comma_array_colno() {
2286 let sample = r#"[
2287 true
2288 false
2289]"#;
2290 let maybe_tokens = Tokenizer::new(sample).tokenize();
2291 if maybe_tokens.is_err() {
2292 let err = maybe_tokens.unwrap_err();
2293 assert_eq!(err.colno, 5_usize, "{:?}", err);
2294 } else {
2295 let toks = maybe_tokens.unwrap();
2296 let res = from_tokens(&toks);
2297 let err = res.unwrap_err();
2298 assert_eq!(err.colno, 5_usize, "{:?}", err);
2299 }
2300 }
2301
2302
2303 #[test]
2304 fn test_error_top_level_block_comment_lineno() {
2305 let sample = r#"/*
2306 This should fail;
2307 comments cannot be the only top-level value.
2308*/"#;
2309 let maybe_tokens = Tokenizer::new(sample).tokenize();
2310 if maybe_tokens.is_err() {
2311 let err = maybe_tokens.unwrap_err();
2312 assert_eq!(err.lineno, 4_usize, "{:?}", err);
2313 } else {
2314 let toks = maybe_tokens.unwrap();
2315 let res = from_tokens(&toks);
2316 let err = res.unwrap_err();
2317 assert_eq!(err.lineno, 4_usize, "{:?}", err);
2318 }
2319 }
2320
2321
2322 #[test]
2323 fn test_error_top_level_block_comment_index() {
2324 let sample = r#"/*
2325 This should fail;
2326 comments cannot be the only top-level value.
2327*/"#;
2328 let maybe_tokens = Tokenizer::new(sample).tokenize();
2329 if maybe_tokens.is_err() {
2330 let err = maybe_tokens.unwrap_err();
2331 assert_eq!(err.char_index, 76_usize, "{:?}", err)
2332 } else {
2333 let toks = maybe_tokens.unwrap();
2334 let res = from_tokens(&toks);
2335 let err = res.unwrap_err();
2336 assert_eq!(err.char_index, 76_usize, "{:?}", err);
2337 }
2338 }
2339
2340 #[test]
2341 fn test_error_top_level_block_comment_colno() {
2342 let sample = r#"/*
2343 This should fail;
2344 comments cannot be the only top-level value.
2345*/"#;
2346 let maybe_tokens = Tokenizer::new(sample).tokenize();
2347 if maybe_tokens.is_err() {
2348 let err = maybe_tokens.unwrap_err();
2349 assert_eq!(err.colno, 3_usize, "{:?}", err);
2350 } else {
2351 let toks = maybe_tokens.unwrap();
2352 let res = from_tokens(&toks);
2353 let err = res.unwrap_err();
2354 assert_eq!(err.colno, 3_usize, "{:?}", err);
2355 }
2356 }
2357
2358
2359
2360 #[test]
2361 fn test_error_top_level_inline_comment_lineno() {
2362 let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2363 let maybe_tokens = Tokenizer::new(sample).tokenize();
2364 if maybe_tokens.is_err() {
2365 let err = maybe_tokens.unwrap_err();
2366 assert_eq!(err.lineno, 1_usize, "{:?}", err);
2367 } else {
2368 let toks = maybe_tokens.unwrap();
2369 let res = from_tokens(&toks);
2370 let err = res.unwrap_err();
2371 assert_eq!(err.lineno, 1_usize, "{:?}", err);
2372 }
2373 }
2374
2375
2376 #[test]
2377 fn test_error_top_level_inline_comment_index() {
2378 let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2379 let maybe_tokens = Tokenizer::new(sample).tokenize();
2380 if maybe_tokens.is_err() {
2381 let err = maybe_tokens.unwrap_err();
2382 assert_eq!(err.char_index, 65_usize, "{:?}", err)
2383 } else {
2384 let toks = maybe_tokens.unwrap();
2385 let res = from_tokens(&toks);
2386 let err = res.unwrap_err();
2387 assert_eq!(err.char_index, 65_usize, "{:?}", err);
2388 }
2389 }
2390
2391 #[test]
2392 fn test_error_top_level_inline_comment_colno() {
2393 let sample = r#"// This should fail; comments cannot be the only top-level value."#;
2394 let maybe_tokens = Tokenizer::new(sample).tokenize();
2395 if maybe_tokens.is_err() {
2396 let err = maybe_tokens.unwrap_err();
2397 assert_eq!(err.colno, 66_usize, "{:?}", err);
2398 } else {
2399 let toks = maybe_tokens.unwrap();
2400 let res = from_tokens(&toks);
2401 let err = res.unwrap_err();
2402 assert_eq!(err.colno, 66_usize, "{:?}", err);
2403 }
2404 }
2405
2406 #[test]
2407 fn test_error_illegal_unquoted_key_number_lineno() {
2408 let sample = r#"{
2409 10twenty: "ten twenty"
2410}"#;
2411 let maybe_tokens = Tokenizer::new(sample).tokenize();
2412 if maybe_tokens.is_err() {
2413 let err = maybe_tokens.unwrap_err();
2414 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2415 } else {
2416 let toks = maybe_tokens.unwrap();
2417 let res = from_tokens(&toks);
2418 let err = res.unwrap_err();
2419 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2420 }
2421 }
2422
2423
2424 #[test]
2425 fn test_error_illegal_unquoted_key_number_index() {
2426 let sample = r#"{
2427 10twenty: "ten twenty"
2428}"#;
2429 let maybe_tokens = Tokenizer::new(sample).tokenize();
2430 if maybe_tokens.is_err() {
2431 let err = maybe_tokens.unwrap_err();
2432 assert_eq!(err.char_index, 6_usize, "{:?}", err)
2433 } else {
2434 let toks = maybe_tokens.unwrap();
2435 let res = from_tokens(&toks);
2436 let err = res.unwrap_err();
2437 assert_eq!(err.char_index, 6_usize, "{:?}", err);
2438 }
2439 }
2440
2441 #[test]
2442 fn test_error_illegal_unquoted_key_number_colno() {
2443 let sample = r#"{
2444 10twenty: "ten twenty"
2445}"#;
2446 let maybe_tokens = Tokenizer::new(sample).tokenize();
2447 if maybe_tokens.is_err() {
2448 let err = maybe_tokens.unwrap_err();
2449 assert_eq!(err.colno, 5_usize, "{:?}", err);
2450 } else {
2451 let toks = maybe_tokens.unwrap();
2452 let res = from_tokens(&toks);
2453 let err = res.unwrap_err();
2454 assert_eq!(err.colno, 5_usize, "{:?}", err);
2455 }
2456 }
2457
2458
2459
2460 #[test]
2461 fn test_error_illegal_unquoted_key_symbol_lineno() {
2462 let sample = r#"{
2463 multi-word: "multi-word"
2464}"#;
2465 let maybe_tokens = Tokenizer::new(sample).tokenize();
2466 if maybe_tokens.is_err() {
2467 let err = maybe_tokens.unwrap_err();
2468 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2469 } else {
2470 let toks = maybe_tokens.unwrap();
2471 let res = from_tokens(&toks);
2472 let err = res.unwrap_err();
2473 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2474 }
2475 }
2476
2477
2478 #[test]
2479 fn test_error_illegal_unquoted_key_symbol_index() {
2480 let sample = r#"{
2481 multi-word: "multi-word"
2482}"#;
2483 let maybe_tokens = Tokenizer::new(sample).tokenize();
2484 if maybe_tokens.is_err() {
2485 let err = maybe_tokens.unwrap_err();
2486 assert_eq!(err.char_index, 11_usize, "{:?}", err)
2487 } else {
2488 let toks = maybe_tokens.unwrap();
2489 let res = from_tokens(&toks);
2490 let err = res.unwrap_err();
2491 assert_eq!(err.char_index, 11_usize, "{:?}", err);
2492 }
2493 }
2494
2495 #[test]
2496 fn test_error_illegal_unquoted_key_symbol_colno() {
2497 let sample = r#"{
2498 multi-word: "multi-word"
2499}"#;
2500 let maybe_tokens = Tokenizer::new(sample).tokenize();
2501 if maybe_tokens.is_err() {
2502 let err = maybe_tokens.unwrap_err();
2503 assert_eq!(err.colno, 10_usize, "{:?}", err);
2504 } else {
2505 let toks = maybe_tokens.unwrap();
2506 let res = from_tokens(&toks);
2507 let err = res.unwrap_err();
2508 assert_eq!(err.colno, 10_usize, "{:?}", err);
2509 }
2510 }
2511
2512
2513
2514 #[test]
2515 fn test_error_leading_comma_object_lineno() {
2516 let sample = r#"{
2517 ,"foo": "bar"
2518}"#;
2519 let maybe_tokens = Tokenizer::new(sample).tokenize();
2520 if maybe_tokens.is_err() {
2521 let err = maybe_tokens.unwrap_err();
2522 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2523 } else {
2524 let toks = maybe_tokens.unwrap();
2525 let res = from_tokens(&toks);
2526 let err = res.unwrap_err();
2527 assert_eq!(err.lineno, 2_usize, "{:?}", err);
2528 }
2529 }
2530
2531
2532 #[test]
2533 fn test_error_leading_comma_object_index() {
2534 let sample = r#"{
2535 ,"foo": "bar"
2536}"#;
2537 let maybe_tokens = Tokenizer::new(sample).tokenize();
2538 if maybe_tokens.is_err() {
2539 let err = maybe_tokens.unwrap_err();
2540 assert_eq!(err.char_index, 6_usize, "{:?}", err)
2541 } else {
2542 let toks = maybe_tokens.unwrap();
2543 let res = from_tokens(&toks);
2544 let err = res.unwrap_err();
2545 assert_eq!(err.char_index, 6_usize, "{:?}", err);
2546 }
2547 }
2548
2549 #[test]
2550 fn test_error_leading_comma_object_colno() {
2551 let sample = r#"{
2552 ,"foo": "bar"
2553}"#;
2554 let maybe_tokens = Tokenizer::new(sample).tokenize();
2555 if maybe_tokens.is_err() {
2556 let err = maybe_tokens.unwrap_err();
2557 assert_eq!(err.colno, 5_usize, "{:?}", err);
2558 } else {
2559 let toks = maybe_tokens.unwrap();
2560 let res = from_tokens(&toks);
2561 let err = res.unwrap_err();
2562 assert_eq!(err.colno, 5_usize, "{:?}", err);
2563 }
2564 }
2565
2566 #[test]
2567 fn test_error_unescaped_multi_line_string_lineno() {
2568 let sample = r#""foo
2569bar"
2570"#;
2571 let maybe_tokens = Tokenizer::new(sample).tokenize();
2572 if maybe_tokens.is_err() {
2573 let err = maybe_tokens.unwrap_err();
2574 assert_eq!(err.lineno, 1_usize, "{:?}", err);
2575 } else {
2576 let toks = maybe_tokens.unwrap();
2577 let res = from_tokens(&toks);
2578 let err = res.unwrap_err();
2579 assert_eq!(err.lineno, 1_usize, "{:?}", err);
2580 }
2581 }
2582
2583
2584 #[test]
2585 fn test_error_unescaped_multi_line_string_index() {
2586 let sample = r#""foo
2587bar"
2588"#;
2589 let maybe_tokens = Tokenizer::new(sample).tokenize();
2590 if maybe_tokens.is_err() {
2591 let err = maybe_tokens.unwrap_err();
2592 assert_eq!(err.char_index, 4_usize, "{:?}", err)
2593 } else {
2594 let toks = maybe_tokens.unwrap();
2595 let res = from_tokens(&toks);
2596 let err = res.unwrap_err();
2597 assert_eq!(err.char_index, 4_usize, "{:?}", err);
2598 }
2599 }
2600
2601 #[test]
2602 fn test_error_unescaped_multi_line_string_colno() {
2603 let sample = r#""foo
2604bar"
2605"#;
2606 let maybe_tokens = Tokenizer::new(sample).tokenize();
2607 if maybe_tokens.is_err() {
2608 let err = maybe_tokens.unwrap_err();
2609 assert_eq!(err.colno, 5_usize, "{:?}", err);
2610 } else {
2611 let toks = maybe_tokens.unwrap();
2612 let res = from_tokens(&toks);
2613 let err = res.unwrap_err();
2614 assert_eq!(err.colno, 5_usize, "{:?}", err);
2615 }
2616 }
2617}