1use crate::scanner::*;
2use std::collections::HashMap;
3
4#[derive(Clone, Copy, PartialEq, Debug, Eq)]
5enum State {
6 StreamStart,
7 ImplicitDocumentStart,
8 DocumentStart,
9 DocumentContent,
10 DocumentEnd,
11 BlockNode,
12 BlockSequenceFirstEntry,
15 BlockSequenceEntry,
16 IndentlessSequenceEntry,
17 BlockMappingFirstKey,
18 BlockMappingKey,
19 BlockMappingValue,
20 FlowSequenceFirstEntry,
21 FlowSequenceEntry,
22 FlowSequenceEntryMappingKey,
23 FlowSequenceEntryMappingValue,
24 FlowSequenceEntryMappingEnd,
25 FlowMappingFirstKey,
26 FlowMappingKey,
27 FlowMappingValue,
28 FlowMappingEmptyValue,
29 End,
30}
31
32#[derive(Clone, PartialEq, Debug, Eq)]
35pub enum Event {
36 Nothing,
38 StreamStart,
39 StreamEnd,
40 DocumentStart(u64, u64),
41 DocumentEnd,
42 Alias(usize),
44 Scalar(String, TScalarStyle, usize, Option<TokenType>),
46 SequenceStart(usize),
48 SequenceEnd,
49 MappingStart(usize, bool),
51 MappingEnd,
52
53 Line(String),
54}
55
56impl Event {
57 fn empty_scalar() -> Event {
58 Event::Scalar("~".to_owned(), TScalarStyle::Plain, 0, None)
60 }
61
62 fn empty_scalar_with_anchor(anchor: usize, tag: Option<TokenType>) -> Event {
63 Event::Scalar("".to_owned(), TScalarStyle::Plain, anchor, tag)
64 }
65}
66
67#[derive(Debug)]
68pub struct Parser<T> {
69 scanner: Scanner<T>,
70 states: Vec<State>,
71 state: State,
72 marks: Vec<Marker>,
73 token: Option<Token>,
74 current: Option<(Event, Marker)>,
75 anchors: HashMap<String, usize>,
76 anchor_id: usize,
77}
78
79pub trait EventReceiver {
80 fn on_event(&mut self, ev: Event);
81}
82
83pub trait MarkedEventReceiver {
84 fn on_event(&mut self, ev: Event, _mark: Marker);
85}
86
87impl<R: EventReceiver> MarkedEventReceiver for R {
88 fn on_event(&mut self, ev: Event, _mark: Marker) {
89 self.on_event(ev)
90 }
91}
92
93pub type ParseResult = Result<(Event, Marker), ScanError>;
94
95impl<T: Iterator<Item = char>> Parser<T> {
96 pub fn new(src: T) -> Parser<T> {
97 Parser {
98 scanner: Scanner::new(src),
99 states: Vec::new(),
100 state: State::StreamStart,
101 marks: Vec::new(),
102 token: None,
103 current: None,
104
105 anchors: HashMap::new(),
106 anchor_id: 1,
108 }
109 }
110
111 pub fn peek(&mut self) -> Result<&(Event, Marker), ScanError> {
112 match self.current {
113 Some(ref x) => Ok(x),
114 None => {
115 self.current = Some(self.next()?);
116 self.peek()
117 }
118 }
119 }
120
121 pub fn next(&mut self) -> ParseResult {
122 match self.current {
123 None => self.parse(),
124 Some(_) => Ok(self.current.take().unwrap()),
125 }
126 }
127
128 fn peek_token(&mut self) -> Result<&Token, ScanError> {
129 match self.token {
130 None => {
131 self.token = Some(self.scan_next_token()?);
132 Ok(self.token.as_ref().unwrap())
133 }
134 Some(ref tok) => Ok(tok),
135 }
136 }
137
138 fn scan_next_token(&mut self) -> Result<Token, ScanError> {
139 let token = self.scanner.next();
140 match token {
141 None => match self.scanner.get_error() {
142 None => Err(ScanError::new(self.scanner.mark(), "unexpected eof")),
143 Some(e) => Err(e),
144 },
145 Some(tok) => Ok(tok),
146 }
147 }
148
149 fn fetch_token(&mut self) -> Token {
150 self.token
151 .take()
152 .expect("fetch_token needs to be preceded by peek_token")
153 }
154
155 fn skip(&mut self) {
156 self.token = None;
157 }
159 fn pop_state(&mut self) {
160 self.state = self.states.pop().unwrap()
161 }
162 fn push_state(&mut self, state: State) {
163 self.states.push(state);
164 }
165
166 fn parse(&mut self) -> ParseResult {
167 if self.state == State::End {
168 return Ok((Event::StreamEnd, self.scanner.mark()));
169 }
170 let (ev, mark) = self.state_machine()?;
171 Ok((ev, mark))
173 }
174
175 pub fn load<R: MarkedEventReceiver>(
176 &mut self,
177 recv: &mut R,
178 multi: bool,
179 ) -> Result<(), ScanError> {
180 if !self.scanner.stream_started() {
181 let (ev, mark) = self.next()?;
182 assert_eq!(ev, Event::StreamStart);
183 recv.on_event(ev, mark);
184 }
185
186 if self.scanner.stream_ended() {
187 recv.on_event(Event::StreamEnd, self.scanner.mark());
189 return Ok(());
190 }
191 loop {
192 let (ev, mark) = self.next()?;
193 if ev == Event::StreamEnd {
194 recv.on_event(ev, mark);
195 return Ok(());
196 }
197 if let Event::Line(_) = ev {
198 recv.on_event(ev, mark);
199 continue;
200 }
201 self.anchors.clear();
203 self.load_document(ev, mark, recv)?;
204 if !multi {
205 break;
206 }
207 }
208 Ok(())
209 }
210
211 fn load_document<R: MarkedEventReceiver>(
212 &mut self,
213 first_ev: Event,
214 mark: Marker,
215 recv: &mut R,
216 ) -> Result<(), ScanError> {
217 recv.on_event(first_ev, mark);
219
220 let (ev, mark) = self.next()?;
221 self.load_node(ev, mark, recv)?;
222
223 let (ev, mark) = self.next()?;
225 assert_eq!(ev, Event::DocumentEnd);
226 recv.on_event(ev, mark);
227
228 Ok(())
229 }
230
231 fn load_node<R: MarkedEventReceiver>(
232 &mut self,
233 first_ev: Event,
234 mark: Marker,
235 recv: &mut R,
236 ) -> Result<(), ScanError> {
237 match first_ev {
238 Event::Alias(..) | Event::Scalar(..) => {
239 recv.on_event(first_ev, mark);
240 Ok(())
241 }
242 Event::SequenceStart(_) => {
243 recv.on_event(first_ev, mark);
244 self.load_sequence(recv)
245 }
246 Event::MappingStart(_,_) => {
247 recv.on_event(first_ev, mark);
248 self.load_mapping(recv)
249 }
250 _ => {
251 println!("UNREACHABLE EVENT: {:?}", first_ev);
252 unreachable!();
253 }
254 }
255 }
256
257 fn load_mapping<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
258 let (mut key_ev, mut key_mark) = self.next()?;
259 while key_ev != Event::MappingEnd {
260 self.load_node(key_ev, key_mark, recv)?;
262
263 let (ev, mark) = self.next()?;
265 self.load_node(ev, mark, recv)?;
266
267 let (ev, mark) = self.next()?;
269 key_ev = ev;
270 key_mark = mark;
271 }
272 recv.on_event(key_ev, key_mark);
273 Ok(())
274 }
275
276 fn load_sequence<R: MarkedEventReceiver>(&mut self, recv: &mut R) -> Result<(), ScanError> {
277 let (mut ev, mut mark) = self.next()?;
278 while ev != Event::SequenceEnd {
279 self.load_node(ev, mark, recv)?;
280
281 let (next_ev, next_mark) = self.next()?;
283 ev = next_ev;
284 mark = next_mark;
285 }
286 recv.on_event(ev, mark);
287 Ok(())
288 }
289
290 fn state_machine(&mut self) -> ParseResult {
291 match self.state {
294 State::StreamStart => self.stream_start(),
295 State::ImplicitDocumentStart => self.document_start(true),
296 State::DocumentStart => self.document_start(false),
297 State::DocumentContent => self.document_content(),
298 State::DocumentEnd => self.document_end(),
299
300 State::BlockNode => self.parse_node(true, false),
301 State::BlockMappingFirstKey => self.block_mapping_key(true),
304 State::BlockMappingKey => self.block_mapping_key(false),
305 State::BlockMappingValue => self.block_mapping_value(),
306
307 State::BlockSequenceFirstEntry => self.block_sequence_entry(true),
308 State::BlockSequenceEntry => self.block_sequence_entry(false),
309
310 State::FlowSequenceFirstEntry => self.flow_sequence_entry(true),
311 State::FlowSequenceEntry => self.flow_sequence_entry(false),
312
313 State::FlowMappingFirstKey => self.flow_mapping_key(true),
314 State::FlowMappingKey => self.flow_mapping_key(false),
315 State::FlowMappingValue => self.flow_mapping_value(false),
316
317 State::IndentlessSequenceEntry => self.indentless_sequence_entry(),
318
319 State::FlowSequenceEntryMappingKey => self.flow_sequence_entry_mapping_key(),
320 State::FlowSequenceEntryMappingValue => self.flow_sequence_entry_mapping_value(),
321 State::FlowSequenceEntryMappingEnd => self.flow_sequence_entry_mapping_end(),
322 State::FlowMappingEmptyValue => self.flow_mapping_value(true),
323
324 State::End => unreachable!(),
326 }
327 }
328
329 fn stream_start(&mut self) -> ParseResult {
330 match *self.peek_token()? {
331 Token(mark, TokenType::StreamStart(_)) => {
332 self.state = State::ImplicitDocumentStart;
333 self.skip();
334 Ok((Event::StreamStart, mark))
335 }
336 Token(mark, _) => Err(ScanError::new(mark, "did not find expected <stream-start>")),
337 }
338 }
339
340 fn document_start(&mut self, implicit: bool) -> ParseResult {
341 if !implicit {
342 while let TokenType::DocumentEnd = self.peek_token()?.1 {
343 self.skip();
344 }
345 }
346
347 match *self.peek_token()? {
348 Token(mark, TokenType::StreamEnd) => {
349 self.state = State::End;
350 self.skip();
351 Ok((Event::StreamEnd, mark))
352 }
353 Token(_, TokenType::VersionDirective(..))
354 | Token(_, TokenType::TagDirective(..))
355 | Token(_, TokenType::DocumentStart(..)) => {
356 self._explicit_document_start()
358 }
359 Token(mark, _) if implicit => {
360 self.parser_process_directives()?;
361 self.push_state(State::DocumentEnd);
362 self.state = State::BlockNode;
363 Ok((Event::DocumentStart(0,0), mark))
364 }
365 _ => {
366 self._explicit_document_start()
368 }
369 }
370 }
371
372 fn parser_process_directives(&mut self) -> Result<(), ScanError> {
373 loop {
374 match self.peek_token()?.1 {
375 TokenType::VersionDirective(_, _) => {
376 println!("versionDirective");
382 }
383 TokenType::TagDirective(..) => {
384 println!("tagDirective");
386 }
387 _ => break,
388 }
389 self.skip();
390 }
391 Ok(())
393 }
394
395 fn _explicit_document_start(&mut self) -> ParseResult {
396 match *self.peek_token()? {
398 Token(mark, TokenType::VersionDirective(major, minor)) => {
399 self.skip();
400 Ok((Event::Line(format!("%YAML {}.{}", major, minor)), mark))
401 }
402 Token(mark, TokenType::TagDirective(ref handle, ref prefix)) => {
403 let tag = format!("%TAG {} {}", handle, prefix);
404 self.skip();
405 Ok((Event::Line(tag), mark))
406 }
407 Token(mark, TokenType::DocumentStart(cid, oid)) => {
408 self.push_state(State::DocumentEnd);
409 self.state = State::DocumentContent;
410 self.skip();
411 Ok((Event::DocumentStart(cid, oid), mark))
412 }
413 Token(mark, _) => Err(ScanError::new(
414 mark,
415 "did not find expected <document start>",
416 )),
417 }
418 }
419
420 fn document_content(&mut self) -> ParseResult {
421 match *self.peek_token()? {
422 Token(mark, TokenType::VersionDirective(..))
423 | Token(mark, TokenType::TagDirective(..))
424 | Token(mark, TokenType::DocumentStart(..))
425 | Token(mark, TokenType::DocumentEnd)
426 | Token(mark, TokenType::StreamEnd) => {
427 self.pop_state();
428 Ok((Event::empty_scalar(), mark))
430 }
431 _ => self.parse_node(true, false),
432 }
433 }
434
435 fn document_end(&mut self) -> ParseResult {
436 let mut _implicit = true;
437 let marker: Marker = match *self.peek_token()? {
438 Token(mark, TokenType::DocumentEnd) => {
439 self.skip();
440 _implicit = false;
441 mark
442 }
443 Token(mark, _) => mark,
444 };
445
446 self.state = State::DocumentStart;
448 Ok((Event::DocumentEnd, marker))
449 }
450
451 fn register_anchor(&mut self, name: String, _: &Marker) -> Result<usize, ScanError> {
452 let new_id = self.anchor_id;
458 self.anchor_id += 1;
459 self.anchors.insert(name, new_id);
460 Ok(new_id)
461 }
462
463 fn parse_node(&mut self, block: bool, indentless_sequence: bool) -> ParseResult {
464 let mut anchor_id = 0;
465 let mut tag = None;
466 match *self.peek_token()? {
467 Token(_, TokenType::Alias(_)) => {
468 self.pop_state();
469 if let Token(mark, TokenType::Alias(name)) = self.fetch_token() {
470 match self.anchors.get(&name) {
471 None => {
472 return Err(ScanError::new(
473 mark,
474 "while parsing node, found unknown anchor",
475 ))
476 }
477 Some(id) => return Ok((Event::Alias(*id), mark)),
478 }
479 } else {
480 unreachable!()
481 }
482 }
483 Token(_, TokenType::Anchor(_)) => {
484 if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
485 anchor_id = self.register_anchor(name, &mark)?;
486 if let TokenType::Tag(..) = self.peek_token()?.1 {
487 if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
488 tag = Some(tg);
489 } else {
490 unreachable!()
491 }
492 }
493 } else {
494 unreachable!()
495 }
496 }
497 Token(_, TokenType::Tag(..)) => {
498 if let tg @ TokenType::Tag(..) = self.fetch_token().1 {
499 tag = Some(tg);
500 if let TokenType::Anchor(_) = self.peek_token()?.1 {
501 if let Token(mark, TokenType::Anchor(name)) = self.fetch_token() {
502 anchor_id = self.register_anchor(name, &mark)?;
503 } else {
504 unreachable!()
505 }
506 }
507 } else {
508 unreachable!()
509 }
510 }
511 _ => {}
512 }
513 match *self.peek_token()? {
514 Token(mark, TokenType::BlockEntry) if indentless_sequence => {
515 self.state = State::IndentlessSequenceEntry;
516 Ok((Event::SequenceStart(anchor_id), mark))
517 }
518 Token(_, TokenType::Scalar(..)) => {
519 self.pop_state();
520 if let Token(mark, TokenType::Scalar(style, v)) = self.fetch_token() {
521 Ok((Event::Scalar(v, style, anchor_id, tag), mark))
522 } else {
523 unreachable!()
524 }
525 }
526 Token(mark, TokenType::FlowSequenceStart) => {
527 self.state = State::FlowSequenceFirstEntry;
528 Ok((Event::SequenceStart(anchor_id), mark))
529 }
530 Token(mark, TokenType::FlowMappingStart) => {
531 self.state = State::FlowMappingFirstKey;
532 Ok((Event::MappingStart(anchor_id, false), mark))
533 }
534 Token(mark, TokenType::BlockSequenceStart) if block => {
535 self.state = State::BlockSequenceFirstEntry;
536 Ok((Event::SequenceStart(anchor_id), mark))
537 }
538 Token(mark, TokenType::BlockMappingStart) if block => {
539 self.state = State::BlockMappingFirstKey;
540 Ok((Event::MappingStart(anchor_id, true), mark))
541 }
542 Token(mark, _) if tag.is_some() || anchor_id > 0 => {
544 self.pop_state();
545 Ok((Event::empty_scalar_with_anchor(anchor_id, tag), mark))
546 }
547 Token(mark, _) => Err(ScanError::new(
548 mark,
549 "while parsing a node, did not find expected node content",
550 )),
551 }
552 }
553
554 fn block_mapping_key(&mut self, first: bool) -> ParseResult {
555 if first {
557 let _ = self.peek_token()?;
558 self.skip();
560 }
561 match *self.peek_token()? {
562 Token(_, TokenType::Key) => {
563 self.skip();
564 match *self.peek_token()? {
565 Token(mark, TokenType::Key)
566 | Token(mark, TokenType::Value)
567 | Token(mark, TokenType::BlockEnd) => {
568 self.state = State::BlockMappingValue;
569 Ok((Event::empty_scalar(), mark))
571 }
572 _ => {
573 self.push_state(State::BlockMappingValue);
574 self.parse_node(true, true)
575 }
576 }
577 }
578 Token(mark, TokenType::Value) => {
580 self.state = State::BlockMappingValue;
581 Ok((Event::empty_scalar(), mark))
582 }
583 Token(mark, TokenType::BlockEnd) => {
584 self.pop_state();
585 self.skip();
586 Ok((Event::MappingEnd, mark))
587 }
588 Token(mark, _) => Err(ScanError::new(
589 mark,
590 "while parsing a block mapping, did not find expected key",
591 )),
592 }
593 }
594
595 fn block_mapping_value(&mut self) -> ParseResult {
596 match *self.peek_token()? {
597 Token(_, TokenType::Value) => {
598 self.skip();
599 match *self.peek_token()? {
600 Token(mark, TokenType::Key)
601 | Token(mark, TokenType::Value)
602 | Token(mark, TokenType::BlockEnd) => {
603 self.state = State::BlockMappingKey;
604 Ok((Event::empty_scalar(), mark))
606 }
607 _ => {
608 self.push_state(State::BlockMappingKey);
609 self.parse_node(true, true)
610 }
611 }
612 }
613 Token(mark, _) => {
614 self.state = State::BlockMappingKey;
615 Ok((Event::empty_scalar(), mark))
617 }
618 }
619 }
620
621 fn flow_mapping_key(&mut self, first: bool) -> ParseResult {
622 if first {
623 let _ = self.peek_token()?;
624 self.skip();
625 }
626 let marker: Marker =
627 {
628 match *self.peek_token()? {
629 Token(mark, TokenType::FlowMappingEnd) => mark,
630 Token(mark, _) => {
631 if !first {
632 match *self.peek_token()? {
633 Token(_, TokenType::FlowEntry) => self.skip(),
634 Token(mark, _) => return Err(ScanError::new(mark,
635 "while parsing a flow mapping, did not find expected ',' or '}'"))
636 }
637 }
638
639 match *self.peek_token()? {
640 Token(_, TokenType::Key) => {
641 self.skip();
642 match *self.peek_token()? {
643 Token(mark, TokenType::Value)
644 | Token(mark, TokenType::FlowEntry)
645 | Token(mark, TokenType::FlowMappingEnd) => {
646 self.state = State::FlowMappingValue;
647 return Ok((Event::empty_scalar(), mark));
648 }
649 _ => {
650 self.push_state(State::FlowMappingValue);
651 return self.parse_node(false, false);
652 }
653 }
654 }
655 Token(marker, TokenType::Value) => {
656 self.state = State::FlowMappingValue;
657 return Ok((Event::empty_scalar(), marker));
658 }
659 Token(_, TokenType::FlowMappingEnd) => (),
660 _ => {
661 self.push_state(State::FlowMappingEmptyValue);
662 return self.parse_node(false, false);
663 }
664 }
665
666 mark
667 }
668 }
669 };
670
671 self.pop_state();
672 self.skip();
673 Ok((Event::MappingEnd, marker))
674 }
675
676 fn flow_mapping_value(&mut self, empty: bool) -> ParseResult {
677 let mark: Marker = {
678 if empty {
679 let Token(mark, _) = *self.peek_token()?;
680 self.state = State::FlowMappingKey;
681 return Ok((Event::empty_scalar(), mark));
682 } else {
683 match *self.peek_token()? {
684 Token(marker, TokenType::Value) => {
685 self.skip();
686 match self.peek_token()?.1 {
687 TokenType::FlowEntry | TokenType::FlowMappingEnd => {}
688 _ => {
689 self.push_state(State::FlowMappingKey);
690 return self.parse_node(false, false);
691 }
692 }
693 marker
694 }
695 Token(marker, _) => marker,
696 }
697 }
698 };
699
700 self.state = State::FlowMappingKey;
701 Ok((Event::empty_scalar(), mark))
702 }
703
704 fn flow_sequence_entry(&mut self, first: bool) -> ParseResult {
705 if first {
707 let _ = self.peek_token()?;
708 self.skip();
710 }
711 match *self.peek_token()? {
712 Token(mark, TokenType::FlowSequenceEnd) => {
713 self.pop_state();
714 self.skip();
715 return Ok((Event::SequenceEnd, mark));
716 }
717 Token(_, TokenType::FlowEntry) if !first => {
718 self.skip();
719 }
720 Token(mark, _) if !first => {
721 return Err(ScanError::new(
722 mark,
723 "while parsing a flow sequence, expected ',' or ']'",
724 ));
725 }
726 _ => { }
727 }
728 match *self.peek_token()? {
729 Token(mark, TokenType::FlowSequenceEnd) => {
730 self.pop_state();
731 self.skip();
732 Ok((Event::SequenceEnd, mark))
733 }
734 Token(mark, TokenType::Key) => {
735 self.state = State::FlowSequenceEntryMappingKey;
736 self.skip();
737 Ok((Event::MappingStart(0, false), mark))
738 }
739 _ => {
740 self.push_state(State::FlowSequenceEntry);
741 self.parse_node(false, false)
742 }
743 }
744 }
745
746 fn indentless_sequence_entry(&mut self) -> ParseResult {
747 match *self.peek_token()? {
748 Token(_, TokenType::BlockEntry) => (),
749 Token(mark, _) => {
750 self.pop_state();
751 return Ok((Event::SequenceEnd, mark));
752 }
753 }
754 self.skip();
755 match *self.peek_token()? {
756 Token(mark, TokenType::BlockEntry)
757 | Token(mark, TokenType::Key)
758 | Token(mark, TokenType::Value)
759 | Token(mark, TokenType::BlockEnd) => {
760 self.state = State::IndentlessSequenceEntry;
761 Ok((Event::empty_scalar(), mark))
762 }
763 _ => {
764 self.push_state(State::IndentlessSequenceEntry);
765 self.parse_node(true, false)
766 }
767 }
768 }
769
770 fn block_sequence_entry(&mut self, first: bool) -> ParseResult {
771 if first {
773 let _ = self.peek_token()?;
774 self.skip();
776 }
777 match *self.peek_token()? {
778 Token(mark, TokenType::BlockEnd) => {
779 self.pop_state();
780 self.skip();
781 Ok((Event::SequenceEnd, mark))
782 }
783 Token(_, TokenType::BlockEntry) => {
784 self.skip();
785 match *self.peek_token()? {
786 Token(mark, TokenType::BlockEntry) | Token(mark, TokenType::BlockEnd) => {
787 self.state = State::BlockSequenceEntry;
788 Ok((Event::empty_scalar(), mark))
789 }
790 _ => {
791 self.push_state(State::BlockSequenceEntry);
792 self.parse_node(true, false)
793 }
794 }
795 }
796 Token(mark, _) => Err(ScanError::new(
797 mark,
798 "while parsing a block collection, did not find expected '-' indicator",
799 )),
800 }
801 }
802
803 fn flow_sequence_entry_mapping_key(&mut self) -> ParseResult {
804 match *self.peek_token()? {
805 Token(mark, TokenType::Value)
806 | Token(mark, TokenType::FlowEntry)
807 | Token(mark, TokenType::FlowSequenceEnd) => {
808 self.skip();
809 self.state = State::FlowSequenceEntryMappingValue;
810 Ok((Event::empty_scalar(), mark))
811 }
812 _ => {
813 self.push_state(State::FlowSequenceEntryMappingValue);
814 self.parse_node(false, false)
815 }
816 }
817 }
818
819 fn flow_sequence_entry_mapping_value(&mut self) -> ParseResult {
820 match *self.peek_token()? {
821 Token(_, TokenType::Value) => {
822 self.skip();
823 self.state = State::FlowSequenceEntryMappingValue;
824 match *self.peek_token()? {
825 Token(mark, TokenType::FlowEntry) | Token(mark, TokenType::FlowSequenceEnd) => {
826 self.state = State::FlowSequenceEntryMappingEnd;
827 Ok((Event::empty_scalar(), mark))
828 }
829 _ => {
830 self.push_state(State::FlowSequenceEntryMappingEnd);
831 self.parse_node(false, false)
832 }
833 }
834 }
835 Token(mark, _) => {
836 self.state = State::FlowSequenceEntryMappingEnd;
837 Ok((Event::empty_scalar(), mark))
838 }
839 }
840 }
841
842 fn flow_sequence_entry_mapping_end(&mut self) -> ParseResult {
843 self.state = State::FlowSequenceEntry;
844 Ok((Event::MappingEnd, self.scanner.mark()))
845 }
846}
847
848#[cfg(test)]
849mod test {
850 use super::{Event, Parser};
851
852 #[test]
853 fn test_peek_eq_parse() {
854 let s = "
855a0 bb: val
856a1: &x
857 b1: 4
858 b2: d
859a2: 4
860a3: [1, 2, 3]
861a4:
862 - [a1, a2]
863 - 2
864a5: *x
865";
866 let mut p = Parser::new(s.chars());
867 while {
868 let event_peek = p.peek().unwrap().clone();
869 let event = p.next().unwrap();
870 assert_eq!(event, event_peek);
871 event.0 != Event::StreamEnd
872 } {}
873 }
874}