kodept_parse/peg/
compatibility.rs1use crate::common::TokenProducer;
2use crate::lexer::{DefaultLexer, Ignore::*, Token::Ignore};
3use crate::token_match::TokenMatch;
4use crate::token_stream::TokenStream;
5use derive_more::Display;
6use kodept_core::code_point::CodePoint;
7use peg::str::LineCol;
8use peg::{Parse, ParseElem, ParseLiteral, ParseSlice, RuleResult};
9
10#[derive(Display, Copy, Clone, Debug)]
11#[display("{line}:{col}")]
12pub struct Position {
13 line: usize,
14 col: u32,
15 length: u32,
16 offset: u32,
17}
18
19impl From<Position> for CodePoint {
20 fn from(value: Position) -> Self {
21 CodePoint::new(value.length, value.offset)
22 }
23}
24
25impl From<LineCol> for Position {
26 fn from(value: LineCol) -> Self {
27 Position {
28 line: value.line,
29 col: value.column as u32,
30 length: 1,
31 offset: value.offset as u32,
32 }
33 }
34}
35
36impl<'t> Parse for TokenStream<'t> {
37 type PositionRepr = Position;
38
39 #[inline(always)]
40 fn start(&self) -> usize {
41 self.slice.first().map_or(0, |it| it.span.point.offset) as usize
42 }
43
44 #[inline(always)]
45 fn is_eof(&self, pos: usize) -> bool {
46 pos >= self.len()
47 }
48
49 #[inline(always)]
50 fn position_repr(&self, pos: usize) -> Self::PositionRepr {
51 let (before, point) = match self.slice.split_at(pos) {
52 (a, [b, ..]) => (a, b.span.point),
53 (a @ [.., last], []) => (a, last.span.point),
54 ([], []) => panic!("Cannot slice empty stream"),
55 };
56 let line = before
57 .iter()
58 .filter(|it| matches!(it.token, Ignore(Newline)))
59 .count()
60 + 1;
61 let col = before
62 .iter()
63 .rev()
64 .take_while(|it| !matches!(it.token, Ignore(Newline)))
65 .map(|it| it.span.point.length)
66 .sum::<u32>()
67 + 1;
68
69 Position {
70 line,
71 col,
72 length: point.length,
73 offset: point.offset,
74 }
75 }
76}
77
78impl<'input> ParseElem<'input> for TokenStream<'input> {
79 type Element = TokenMatch<'input>;
80
81 #[inline(always)]
82 fn parse_elem(&'input self, pos: usize) -> RuleResult<Self::Element> {
83 let slice = &self.slice[pos..];
84 match slice.first() {
85 None => RuleResult::Failed,
86 Some(x) => RuleResult::Matched(pos + 1, *x),
87 }
88 }
89}
90
91impl<'input> ParseLiteral for TokenStream<'input> {
92 #[inline(always)]
93 fn parse_string_literal(&self, pos: usize, literal: &str) -> RuleResult<()> {
94 let token_match = DefaultLexer::new()
95 .parse_token(literal, 0)
96 .expect("Unexpected token received in grammar");
97 debug_assert_eq!(token_match.span.point.length, literal.len() as u32);
98
99 match (self.slice.get(pos), token_match.token) {
100 (Some(a), b) if a.token == b => RuleResult::Matched(pos + 1, ()),
101 _ => RuleResult::Failed,
102 }
103 }
104}
105
106impl<'input> ParseSlice<'input> for TokenStream<'input> {
107 type Slice = TokenStream<'input>;
108
109 #[inline(always)]
110 fn parse_slice(&'input self, p1: usize, p2: usize) -> Self::Slice {
111 TokenStream::new(&self.slice[p1..p2])
112 }
113}