1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
use rowan::{GreenNode, GreenNodeBuilder};
use super::err::TomlResult;
use super::kinds::TomlKind::{self, *};
use super::parse_tkns::Tokenizer;
use super::walk::{walk, walk_tokens};
pub type SyntaxNode = rowan::SyntaxNode<TomlLang>;
pub type SyntaxToken = rowan::SyntaxToken<TomlLang>;
pub type SyntaxElement = rowan::NodeOrToken<SyntaxNode, SyntaxToken>;
pub trait SyntaxNodeExtTrait {
fn token_text(&self) -> String;
fn deep_eq(&self, other: &Self) -> bool;
}
impl From<TomlKind> for rowan::SyntaxKind {
fn from(kind: TomlKind) -> Self {
Self(kind as u16)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct TomlLang;
impl rowan::Language for TomlLang {
type Kind = TomlKind;
fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
assert!(raw.0 <= Root as u16);
unsafe { std::mem::transmute::<u16, TomlKind>(raw.0) }
}
fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
kind.into()
}
}
impl SyntaxNodeExtTrait for SyntaxNode {
fn token_text(&self) -> String {
walk_tokens(self).fold(String::default(), |mut s, tkn| {
s.push_str(tkn.text());
s
})
}
fn deep_eq(&self, other: &Self) -> bool {
for (a, b) in walk(self).zip(walk(other)) {
match (&a, &b) {
(SyntaxElement::Node(n1), SyntaxElement::Node(n2)) => {
if n1.token_text() != n2.token_text() {
return false;
}
}
(SyntaxElement::Token(t1), SyntaxElement::Token(t2)) => {
if t1.text() != t2.text() {
return false;
}
}
(_, _) => return false,
}
if a.kind() != b.kind() {
return false;
}
}
true
}
}
pub struct ParsedToml {
green: rowan::GreenNode,
}
impl ParsedToml {
pub fn syntax(&self) -> SyntaxNode {
SyntaxNode::new_root(self.green.clone())
}
}
pub struct Parser {
pub(crate) builder: GreenNodeBuilder<'static>,
}
impl Default for Parser {
fn default() -> Self {
Parser::new()
}
}
impl Parser {
pub fn new() -> Parser {
Self {
builder: GreenNodeBuilder::new(),
}
}
pub fn parse(self) -> TomlResult<ParsedToml> {
let green: GreenNode = self.builder.finish();
Ok(ParsedToml { green })
}
}
pub fn parse_it(input: &str) -> TomlResult<ParsedToml> {
let parse_builder = Parser::new();
let parsed = Tokenizer::parse(input, parse_builder)?;
parsed.parse()
}