1use crate::diagnostics::{BuildDiagnostics, SourceFile, Spanned};
16use smol_str::SmolStr;
17use std::fmt::Display;
18
19mod document;
20mod element;
21mod expressions;
22mod statements;
23mod r#type;
24
25mod prelude {
27 #[cfg(test)]
28 pub use super::DefaultParser;
29 #[cfg(test)]
30 pub use super::{syntax_nodes, SyntaxNode, SyntaxNodeVerify};
31 pub use super::{Parser, SyntaxKind};
32 #[cfg(test)]
33 pub use i_slint_parser_test_macro::parser_test;
34}
35
36#[cfg(test)]
37pub trait SyntaxNodeVerify {
38 const KIND: SyntaxKind;
40 fn verify(node: SyntaxNode) {
43 assert_eq!(node.kind(), Self::KIND)
44 }
45}
46
47pub use rowan::{TextRange, TextSize};
48
49#[cfg(test)]
51macro_rules! verify_node {
52 ($node:ident, [ $($t1:tt $($t2:ident)?),* ]) => {
54 $(verify_node!(@check_has_children $node, $t1 $($t2)* );)*
56
57 for c in $node.children() {
59 assert!(
60 false $(|| c.kind() == verify_node!(@extract_kind $t1 $($t2)*))*,
61 "Node is none of [{}]\n{:?}", stringify!($($t1 $($t2)*),*) ,c);
62 }
63
64 $(
66 for _c in $node.children().filter(|n| n.kind() == verify_node!(@extract_kind $t1 $($t2)*)) {
67 <verify_node!(@extract_type $t1 $($t2)*)>::verify(_c)
68 }
69 )*
70 };
71
72 (@check_has_children $node:ident, * $kind:ident) => {};
74 (@check_has_children $node:ident, ? $kind:ident) => {
76 let count = $node.children_with_tokens().filter(|n| n.kind() == SyntaxKind::$kind).count();
77 assert!(count <= 1, "Expecting one or zero sub-node of type {}, found {}\n{:?}", stringify!($kind), count, $node);
78 };
79 (@check_has_children $node:ident, $kind:ident) => {
81 let count = $node.children_with_tokens().filter(|n| n.kind() == SyntaxKind::$kind).count();
82 assert_eq!(count, 1, "Expecting exactly one sub-node of type {}\n{:?}", stringify!($kind), $node);
83 };
84 (@check_has_children $node:ident, $count:literal $kind:ident) => {
86 let count = $node.children_with_tokens().filter(|n| n.kind() == SyntaxKind::$kind).count();
87 assert_eq!(count, $count, "Expecting {} sub-node of type {}, found {}\n{:?}", $count, stringify!($kind), count, $node);
88 };
89
90 (@extract_kind * $kind:ident) => {SyntaxKind::$kind};
91 (@extract_kind ? $kind:ident) => {SyntaxKind::$kind};
92 (@extract_kind $count:literal $kind:ident) => {SyntaxKind::$kind};
93 (@extract_kind $kind:ident) => {SyntaxKind::$kind};
94
95 (@extract_type * $kind:ident) => {$crate::parser::syntax_nodes::$kind};
96 (@extract_type ? $kind:ident) => {$crate::parser::syntax_nodes::$kind};
97 (@extract_type $count:literal $kind:ident) => {$crate::parser::syntax_nodes::$kind};
98 (@extract_type $kind:ident) => {$crate::parser::syntax_nodes::$kind};
99}
100
101macro_rules! node_accessors {
102 ([ $($t1:tt $($t2:ident)?),* ]) => {
104 $(node_accessors!{@ $t1 $($t2)*} )*
105 };
106
107 (@ * $kind:ident) => {
108 #[allow(non_snake_case)]
109 pub fn $kind(&self) -> impl Iterator<Item = $kind> {
110 self.0.children().filter(|n| n.kind() == SyntaxKind::$kind).map(Into::into)
111 }
112 };
113 (@ ? $kind:ident) => {
114 #[allow(non_snake_case)]
115 pub fn $kind(&self) -> Option<$kind> {
116 self.0.child_node(SyntaxKind::$kind).map(Into::into)
117 }
118 };
119 (@ 2 $kind:ident) => {
120 #[allow(non_snake_case)]
121 #[track_caller]
122 pub fn $kind(&self) -> ($kind, $kind) {
123 let mut it = self.0.children().filter(|n| n.kind() == SyntaxKind::$kind);
124 let a = it.next().expect(stringify!(Missing first $kind));
125 let b = it.next().expect(stringify!(Missing second $kind));
126 debug_assert!(it.next().is_none(), stringify!(More $kind than expected));
127 (a.into(), b.into())
128 }
129 };
130 (@ 3 $kind:ident) => {
131 #[allow(non_snake_case)]
132 #[track_caller]
133 pub fn $kind(&self) -> ($kind, $kind, $kind) {
134 let mut it = self.0.children().filter(|n| n.kind() == SyntaxKind::$kind);
135 let a = it.next().expect(stringify!(Missing first $kind));
136 let b = it.next().expect(stringify!(Missing second $kind));
137 let c = it.next().expect(stringify!(Missing third $kind));
138 debug_assert!(it.next().is_none(), stringify!(More $kind than expected));
139 (a.into(), b.into(), c.into())
140 }
141 };
142 (@ $kind:ident) => {
143 #[allow(non_snake_case)]
144 #[track_caller]
145 pub fn $kind(&self) -> $kind {
146 self.0.child_node(SyntaxKind::$kind).expect(stringify!(Missing $kind)).into()
147 }
148 };
149
150}
151
152macro_rules! declare_syntax {
184 ({
185 $($token:ident -> $rule:expr ,)*
186 }
187 {
188 $( $(#[$attr:meta])* $nodekind:ident -> $children:tt ,)*
189 })
190 => {
191 #[repr(u16)]
192 #[derive(Debug, Copy, Clone, Eq, PartialEq, num_enum::IntoPrimitive, num_enum::TryFromPrimitive, Hash, Ord, PartialOrd)]
193 pub enum SyntaxKind {
194 Error,
195 Eof,
196
197 $(
199 $token,
201 )*
202
203 $(
205 $(#[$attr])*
206 $nodekind,
207 )*
208 }
209
210 impl Display for SyntaxKind {
211 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
212 match self {
213 $(Self::$token => {
214 if let Some(character) = <dyn std::any::Any>::downcast_ref::<&str>(& $rule) {
215 return write!(f, "'{}'", character)
216 }
217 })*
218 _ => ()
219 }
220 write!(f, "{:?}", self)
221 }
222 }
223
224
225 pub fn lex_next_token(text : &str, state: &mut crate::lexer::LexState) -> Option<(usize, SyntaxKind)> {
227 use crate::lexer::LexingRule;
228 $(
229 let len = ($rule).lex(text, state);
230 if len > 0 {
231 return Some((len, SyntaxKind::$token));
232 }
233 )*
234 None
235 }
236
237 pub mod syntax_nodes {
238 use super::*;
239 $(
240 #[derive(Debug, Clone, derive_more::Deref, derive_more::Into)]
241 pub struct $nodekind(SyntaxNode);
242 #[cfg(test)]
243 impl SyntaxNodeVerify for $nodekind {
244 const KIND: SyntaxKind = SyntaxKind::$nodekind;
245 #[track_caller]
246 fn verify(node: SyntaxNode) {
247 assert_eq!(node.kind(), Self::KIND);
248 verify_node!(node, $children);
249 }
250 }
251 impl $nodekind {
252 node_accessors!{$children}
253
254 pub fn new(node: SyntaxNode) -> Option<Self> {
256 (node.kind() == SyntaxKind::$nodekind).then(|| Self(node))
257 }
258 }
259
260 impl From<SyntaxNode> for $nodekind {
261 #[track_caller]
262 fn from(node: SyntaxNode) -> Self {
263 assert_eq!(node.kind(), SyntaxKind::$nodekind);
264 Self(node)
265 }
266 }
267
268 impl Spanned for $nodekind {
269 fn span(&self) -> crate::diagnostics::Span {
270 self.0.span()
271 }
272
273 fn source_file(&self) -> Option<&SourceFile> {
274 self.0.source_file()
275 }
276 }
277 )*
278 }
279 }
280}
281declare_syntax! {
282 {
287 Whitespace -> &crate::lexer::lex_whitespace,
288 Comment -> &crate::lexer::lex_comment,
289 StringLiteral -> &crate::lexer::lex_string,
290 NumberLiteral -> &crate::lexer::lex_number,
291 ColorLiteral -> &crate::lexer::lex_color,
292 Identifier -> &crate::lexer::lex_identifier,
293 DoubleArrow -> "<=>",
294 PlusEqual -> "+=",
295 MinusEqual -> "-=",
296 StarEqual -> "*=",
297 DivEqual -> "/=",
298 LessEqual -> "<=",
299 GreaterEqual -> ">=",
300 EqualEqual -> "==",
301 NotEqual -> "!=",
302 ColonEqual -> ":=",
303 FatArrow -> "=>",
304 Arrow -> "->",
305 OrOr -> "||",
306 AndAnd -> "&&",
307 LBrace -> "{",
308 RBrace -> "}",
309 LParent -> "(",
310 RParent -> ")",
311 LAngle -> "<",
312 RAngle -> ">",
313 LBracket -> "[",
314 RBracket -> "]",
315 Plus -> "+",
316 Minus -> "-",
317 Star -> "*",
318 Div -> "/",
319 Equal -> "=",
320 Colon -> ":",
321 Comma -> ",",
322 Semicolon -> ";",
323 Bang -> "!",
324 Dot -> ".",
325 Question -> "?",
326 Dollar -> "$",
327 At -> "@",
328 Pipe -> "|",
329 Percent -> "%",
330 }
331 {
333 Document -> [ *Component, *ExportsList, *ImportSpecifier, *StructDeclaration, *EnumDeclaration ],
334 Component -> [ DeclaredIdentifier, Element ],
336 SubElement -> [ Element ],
338 Element -> [ ?QualifiedName, *PropertyDeclaration, *Binding, *CallbackConnection,
339 *CallbackDeclaration, *ConditionalElement, *Function, *SubElement,
340 *RepeatedElement, *PropertyAnimation, *PropertyChangedCallback,
341 *TwoWayBinding, *States, *Transitions, ?ChildrenPlaceholder ],
342 RepeatedElement -> [ ?DeclaredIdentifier, ?RepeatedIndex, Expression , SubElement],
343 RepeatedIndex -> [],
344 ConditionalElement -> [ Expression , SubElement],
345 CallbackDeclaration -> [ DeclaredIdentifier, *CallbackDeclarationParameter, ?ReturnType, ?TwoWayBinding ],
346 CallbackDeclarationParameter -> [ ?DeclaredIdentifier, Type],
348 Function -> [DeclaredIdentifier, *ArgumentDeclaration, ?ReturnType, CodeBlock ],
349 ArgumentDeclaration -> [DeclaredIdentifier, Type],
350 ReturnType -> [Type],
352 CallbackConnection -> [ *DeclaredIdentifier, ?CodeBlock, ?Expression ],
353 PropertyDeclaration-> [ ?Type , DeclaredIdentifier, ?BindingExpression, ?TwoWayBinding ],
355 PropertyAnimation-> [ *QualifiedName, *Binding ],
357 PropertyChangedCallback-> [ DeclaredIdentifier, ?CodeBlock, ?Expression ],
359 QualifiedName-> [],
361 DeclaredIdentifier -> [],
363 ChildrenPlaceholder -> [],
364 Binding-> [ BindingExpression ],
365 TwoWayBinding -> [ Expression ],
367 BindingExpression-> [ ?CodeBlock, ?Expression ],
370 CodeBlock-> [ *Expression, *LetStatement, *ReturnStatement ],
371 LetStatement -> [ DeclaredIdentifier, ?Type, Expression ],
372 ReturnStatement -> [ ?Expression ],
373 Expression-> [ ?Expression, ?FunctionCallExpression, ?IndexExpression, ?SelfAssignment,
375 ?ConditionalExpression, ?QualifiedName, ?BinaryExpression, ?Array, ?ObjectLiteral,
376 ?UnaryOpExpression, ?CodeBlock, ?StringTemplate, ?AtImageUrl, ?AtGradient, ?AtTr,
377 ?MemberAccess ],
378 StringTemplate -> [*Expression],
380 AtImageUrl -> [],
382 AtGradient -> [*Expression],
384 AtTr -> [?TrContext, ?TrPlural, *Expression],
386 TrContext -> [],
388 TrPlural -> [Expression],
390 FunctionCallExpression -> [*Expression],
392 IndexExpression -> [2 Expression],
394 SelfAssignment -> [2 Expression],
396 ConditionalExpression -> [3 Expression],
398 BinaryExpression -> [2 Expression],
400 UnaryOpExpression -> [Expression],
402 MemberAccess -> [Expression],
404 Array -> [ *Expression ],
406 ObjectLiteral -> [ *ObjectMember ],
408 ObjectMember -> [ Expression ],
410 States -> [*State],
412 State -> [DeclaredIdentifier, ?Expression, *StatePropertyChange, *Transition],
414 StatePropertyChange -> [ QualifiedName, BindingExpression ],
416 Transitions -> [*Transition],
418 Transition -> [?DeclaredIdentifier, *PropertyAnimation],
420 ExportsList -> [ *ExportSpecifier, ?Component, *StructDeclaration, ?ExportModule, *EnumDeclaration ],
422 ExportSpecifier -> [ ExportIdentifier, ?ExportName ],
425 ExportIdentifier -> [],
426 ExportName -> [],
427 ExportModule -> [],
429 ImportSpecifier -> [ ?ImportIdentifierList ],
431 ImportIdentifierList -> [ *ImportIdentifier ],
432 ImportIdentifier -> [ ExternalName, ?InternalName ],
434 ExternalName -> [],
435 InternalName -> [],
436 Type -> [ ?QualifiedName, ?ObjectType, ?ArrayType ],
438 ObjectType ->[ *ObjectTypeMember ],
440 ObjectTypeMember -> [ Type ],
442 ArrayType -> [ Type ],
444 StructDeclaration -> [DeclaredIdentifier, ObjectType, ?AtRustAttr],
446 EnumDeclaration -> [DeclaredIdentifier, *EnumValue, ?AtRustAttr],
448 EnumValue -> [],
450 AtRustAttr -> [],
452 }
453}
454
455impl From<SyntaxKind> for rowan::SyntaxKind {
456 fn from(v: SyntaxKind) -> Self {
457 rowan::SyntaxKind(v.into())
458 }
459}
460
461#[derive(Clone, Debug)]
462pub struct Token {
463 pub kind: SyntaxKind,
464 pub text: SmolStr,
465 pub offset: usize,
466 #[cfg(feature = "proc_macro_span")]
467 pub span: Option<proc_macro::Span>,
468}
469
470impl Default for Token {
471 fn default() -> Self {
472 Token {
473 kind: SyntaxKind::Eof,
474 text: Default::default(),
475 offset: 0,
476 #[cfg(feature = "proc_macro_span")]
477 span: None,
478 }
479 }
480}
481
482impl Token {
483 pub fn as_str(&self) -> &str {
484 self.text.as_str()
485 }
486
487 pub fn kind(&self) -> SyntaxKind {
488 self.kind
489 }
490}
491
492mod parser_trait {
493 use super::*;
495
496 pub trait Parser: Sized {
497 type Checkpoint: Clone;
498
499 #[must_use = "The node will be finished when it is dropped"]
505 fn start_node(&mut self, kind: SyntaxKind) -> Node<'_, Self> {
506 self.start_node_impl(kind, None, NodeToken(()));
507 Node(self)
508 }
509 #[must_use = "use start_node_at to use this checkpoint"]
510 fn checkpoint(&mut self) -> Self::Checkpoint;
511 #[must_use = "The node will be finished when it is dropped"]
512 fn start_node_at(
513 &mut self,
514 checkpoint: impl Into<Option<Self::Checkpoint>>,
515 kind: SyntaxKind,
516 ) -> Node<'_, Self> {
517 self.start_node_impl(kind, checkpoint.into(), NodeToken(()));
518 Node(self)
519 }
520
521 fn finish_node_impl(&mut self, token: NodeToken);
523 fn start_node_impl(
525 &mut self,
526 kind: SyntaxKind,
527 checkpoint: Option<Self::Checkpoint>,
528 token: NodeToken,
529 );
530
531 fn peek(&mut self) -> Token {
533 self.nth(0)
534 }
535 fn nth(&mut self, n: usize) -> Token;
537 fn consume(&mut self);
538 fn error(&mut self, e: impl Into<String>);
539 fn warning(&mut self, e: impl Into<String>);
540
541 fn expect(&mut self, kind: SyntaxKind) -> bool {
544 if !self.test(kind) {
545 self.error(format!("Syntax error: expected {kind}"));
546 return false;
547 }
548 true
549 }
550
551 fn test(&mut self, kind: SyntaxKind) -> bool {
553 if self.nth(0).kind() != kind {
554 return false;
555 }
556 self.consume();
557 true
558 }
559
560 fn until(&mut self, kind: SyntaxKind) {
562 let mut parens = 0;
563 let mut braces = 0;
564 let mut brackets = 0;
565 loop {
566 match self.nth(0).kind() {
567 k if k == kind && parens == 0 && braces == 0 && brackets == 0 => break,
568 SyntaxKind::Eof => break,
569 SyntaxKind::LParent => parens += 1,
570 SyntaxKind::LBrace => braces += 1,
571 SyntaxKind::LBracket => brackets += 1,
572 SyntaxKind::RParent if parens == 0 => break,
573 SyntaxKind::RParent => parens -= 1,
574 SyntaxKind::RBrace if braces == 0 => break,
575 SyntaxKind::RBrace => braces -= 1,
576 SyntaxKind::RBracket if brackets == 0 => break,
577 SyntaxKind::RBracket => brackets -= 1,
578 _ => {}
579 };
580 self.consume();
581 }
582 self.expect(kind);
583 }
584 }
585
586 pub struct NodeToken(());
591 #[derive(derive_more::DerefMut)]
594 pub struct Node<'a, P: Parser>(&'a mut P);
595 impl<P: Parser> Drop for Node<'_, P> {
596 fn drop(&mut self) {
597 self.0.finish_node_impl(NodeToken(()));
598 }
599 }
600 impl<P: Parser> core::ops::Deref for Node<'_, P> {
601 type Target = P;
602 fn deref(&self) -> &Self::Target {
603 self.0
604 }
605 }
606}
607#[doc(inline)]
608pub use parser_trait::*;
609
610pub struct DefaultParser<'a> {
611 builder: rowan::GreenNodeBuilder<'static>,
612 tokens: Vec<Token>,
613 cursor: usize,
614 diags: &'a mut BuildDiagnostics,
615 source_file: SourceFile,
616}
617
618impl<'a> DefaultParser<'a> {
619 fn from_tokens(tokens: Vec<Token>, diags: &'a mut BuildDiagnostics) -> Self {
620 Self {
621 builder: Default::default(),
622 tokens,
623 cursor: 0,
624 diags,
625 source_file: Default::default(),
626 }
627 }
628
629 pub fn new(source: &str, diags: &'a mut BuildDiagnostics) -> Self {
631 Self::from_tokens(crate::lexer::lex(source), diags)
632 }
633
634 fn current_token(&self) -> Token {
635 self.tokens.get(self.cursor).cloned().unwrap_or_default()
636 }
637
638 pub fn consume_ws(&mut self) {
640 while matches!(self.current_token().kind, SyntaxKind::Whitespace | SyntaxKind::Comment) {
641 self.consume()
642 }
643 }
644}
645
646impl Parser for DefaultParser<'_> {
647 fn start_node_impl(
648 &mut self,
649 kind: SyntaxKind,
650 checkpoint: Option<Self::Checkpoint>,
651 _: NodeToken,
652 ) {
653 if kind != SyntaxKind::Document {
654 self.consume_ws();
655 }
656 match checkpoint {
657 None => self.builder.start_node(kind.into()),
658 Some(cp) => self.builder.start_node_at(cp, kind.into()),
659 }
660 }
661
662 fn finish_node_impl(&mut self, _: NodeToken) {
663 self.builder.finish_node();
664 }
665
666 fn nth(&mut self, mut n: usize) -> Token {
668 self.consume_ws();
669 let mut c = self.cursor;
670 while n > 0 {
671 n -= 1;
672 c += 1;
673 while c < self.tokens.len()
674 && matches!(self.tokens[c].kind, SyntaxKind::Whitespace | SyntaxKind::Comment)
675 {
676 c += 1;
677 }
678 }
679 self.tokens.get(c).cloned().unwrap_or_default()
680 }
681
682 fn consume(&mut self) {
684 let t = self.current_token();
685 self.builder.token(t.kind.into(), t.text.as_str());
686 if t.kind != SyntaxKind::Eof {
687 self.cursor += 1;
688 }
689 }
690
691 fn error(&mut self, e: impl Into<String>) {
693 let current_token = self.current_token();
694 #[allow(unused_mut)]
695 let mut span = crate::diagnostics::Span::new(current_token.offset);
696 #[cfg(feature = "proc_macro_span")]
697 {
698 span.span = current_token.span;
699 }
700
701 self.diags.push_error_with_span(
702 e.into(),
703 crate::diagnostics::SourceLocation {
704 source_file: Some(self.source_file.clone()),
705 span,
706 },
707 );
708 }
709
710 fn warning(&mut self, e: impl Into<String>) {
712 let current_token = self.current_token();
713 #[allow(unused_mut)]
714 let mut span = crate::diagnostics::Span::new(current_token.offset);
715 #[cfg(feature = "proc_macro_span")]
716 {
717 span.span = current_token.span;
718 }
719
720 self.diags.push_warning_with_span(
721 e.into(),
722 crate::diagnostics::SourceLocation {
723 source_file: Some(self.source_file.clone()),
724 span,
725 },
726 );
727 }
728
729 type Checkpoint = rowan::Checkpoint;
730 fn checkpoint(&mut self) -> Self::Checkpoint {
731 self.builder.checkpoint()
732 }
733}
734
735#[derive(Clone, Copy, Debug, Eq, Ord, Hash, PartialEq, PartialOrd)]
736pub enum Language {}
737impl rowan::Language for Language {
738 type Kind = SyntaxKind;
739 fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
740 SyntaxKind::try_from(raw.0).unwrap()
741 }
742 fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
743 kind.into()
744 }
745}
746
747#[derive(Debug, Clone, derive_more::Deref)]
748pub struct SyntaxNode {
749 #[deref]
750 pub node: rowan::SyntaxNode<Language>,
751 pub source_file: SourceFile,
752}
753
754#[derive(Debug, Clone, derive_more::Deref)]
755pub struct SyntaxToken {
756 #[deref]
757 pub token: rowan::SyntaxToken<Language>,
758 pub source_file: SourceFile,
759}
760
761impl SyntaxToken {
762 pub fn parent(&self) -> SyntaxNode {
763 SyntaxNode { node: self.token.parent().unwrap(), source_file: self.source_file.clone() }
764 }
765 pub fn parent_ancestors(&self) -> impl Iterator<Item = SyntaxNode> + '_ {
766 self.token
767 .parent_ancestors()
768 .map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
769 }
770 pub fn next_token(&self) -> Option<SyntaxToken> {
771 let token = self
779 .token
780 .next_sibling_or_token()
781 .and_then(|e| match e {
782 rowan::NodeOrToken::Node(n) => n.first_token(),
783 rowan::NodeOrToken::Token(t) => Some(t),
784 })
785 .or_else(|| {
786 self.token.parent_ancestors().find_map(|it| it.next_sibling_or_token()).and_then(
787 |e| match e {
788 rowan::NodeOrToken::Node(n) => n.first_token(),
789 rowan::NodeOrToken::Token(t) => Some(t),
790 },
791 )
792 })?;
793 Some(SyntaxToken { token, source_file: self.source_file.clone() })
794 }
795 pub fn prev_token(&self) -> Option<SyntaxToken> {
796 let token = self.token.prev_token()?;
797 Some(SyntaxToken { token, source_file: self.source_file.clone() })
798 }
799 pub fn text(&self) -> &str {
800 self.token.text()
801 }
802}
803
804impl std::fmt::Display for SyntaxToken {
805 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
806 self.token.fmt(f)
807 }
808}
809
810impl SyntaxNode {
811 pub fn child_node(&self, kind: SyntaxKind) -> Option<SyntaxNode> {
812 self.node
813 .children()
814 .find(|n| n.kind() == kind)
815 .map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
816 }
817 pub fn child_token(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
818 self.node
819 .children_with_tokens()
820 .find(|n| n.kind() == kind)
821 .and_then(|x| x.into_token())
822 .map(|token| SyntaxToken { token, source_file: self.source_file.clone() })
823 }
824 pub fn child_text(&self, kind: SyntaxKind) -> Option<SmolStr> {
825 self.node
826 .children_with_tokens()
827 .find(|n| n.kind() == kind)
828 .and_then(|x| x.as_token().map(|x| x.text().into()))
829 }
830 pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> + use<'_> {
831 let source_file = self.source_file.clone();
832 self.node
833 .descendants()
834 .map(move |node| SyntaxNode { node, source_file: source_file.clone() })
835 }
836 pub fn kind(&self) -> SyntaxKind {
837 self.node.kind()
838 }
839 pub fn children(&self) -> impl Iterator<Item = SyntaxNode> {
840 let source_file = self.source_file.clone();
841 self.node.children().map(move |node| SyntaxNode { node, source_file: source_file.clone() })
842 }
843 pub fn children_with_tokens(&self) -> impl Iterator<Item = NodeOrToken> {
844 let source_file = self.source_file.clone();
845 self.node.children_with_tokens().map(move |token| match token {
846 rowan::NodeOrToken::Node(node) => {
847 SyntaxNode { node, source_file: source_file.clone() }.into()
848 }
849 rowan::NodeOrToken::Token(token) => {
850 SyntaxToken { token, source_file: source_file.clone() }.into()
851 }
852 })
853 }
854 pub fn text(&self) -> rowan::SyntaxText {
855 self.node.text()
856 }
857 pub fn parent(&self) -> Option<SyntaxNode> {
858 self.node.parent().map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
859 }
860 pub fn first_token(&self) -> Option<SyntaxToken> {
861 self.node
862 .first_token()
863 .map(|token| SyntaxToken { token, source_file: self.source_file.clone() })
864 }
865 pub fn last_token(&self) -> Option<SyntaxToken> {
866 self.node
867 .last_token()
868 .map(|token| SyntaxToken { token, source_file: self.source_file.clone() })
869 }
870 pub fn token_at_offset(&self, offset: TextSize) -> rowan::TokenAtOffset<SyntaxToken> {
871 self.node
872 .token_at_offset(offset)
873 .map(|token| SyntaxToken { token, source_file: self.source_file.clone() })
874 }
875 pub fn first_child(&self) -> Option<SyntaxNode> {
876 self.node
877 .first_child()
878 .map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
879 }
880 pub fn first_child_or_token(&self) -> Option<NodeOrToken> {
881 self.node.first_child_or_token().map(|n_o_t| match n_o_t {
882 rowan::NodeOrToken::Node(node) => {
883 NodeOrToken::Node(SyntaxNode { node, source_file: self.source_file.clone() })
884 }
885 rowan::NodeOrToken::Token(token) => {
886 NodeOrToken::Token(SyntaxToken { token, source_file: self.source_file.clone() })
887 }
888 })
889 }
890 pub fn next_sibling(&self) -> Option<SyntaxNode> {
891 self.node
892 .next_sibling()
893 .map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
894 }
895}
896
897#[derive(Debug, Clone, derive_more::From)]
898pub enum NodeOrToken {
899 Node(SyntaxNode),
900 Token(SyntaxToken),
901}
902
903impl NodeOrToken {
904 pub fn kind(&self) -> SyntaxKind {
905 match self {
906 NodeOrToken::Node(n) => n.kind(),
907 NodeOrToken::Token(t) => t.kind(),
908 }
909 }
910
911 pub fn as_node(&self) -> Option<&SyntaxNode> {
912 match self {
913 NodeOrToken::Node(n) => Some(n),
914 NodeOrToken::Token(_) => None,
915 }
916 }
917
918 pub fn as_token(&self) -> Option<&SyntaxToken> {
919 match self {
920 NodeOrToken::Node(_) => None,
921 NodeOrToken::Token(t) => Some(t),
922 }
923 }
924
925 pub fn into_token(self) -> Option<SyntaxToken> {
926 match self {
927 NodeOrToken::Token(t) => Some(t),
928 _ => None,
929 }
930 }
931
932 pub fn into_node(self) -> Option<SyntaxNode> {
933 match self {
934 NodeOrToken::Node(n) => Some(n),
935 _ => None,
936 }
937 }
938
939 pub fn text_range(&self) -> TextRange {
940 match self {
941 NodeOrToken::Node(n) => n.text_range(),
942 NodeOrToken::Token(t) => t.text_range(),
943 }
944 }
945}
946
947impl Spanned for SyntaxNode {
948 fn span(&self) -> crate::diagnostics::Span {
949 crate::diagnostics::Span::new(self.node.text_range().start().into())
950 }
951
952 fn source_file(&self) -> Option<&SourceFile> {
953 Some(&self.source_file)
954 }
955}
956
957impl Spanned for Option<SyntaxNode> {
958 fn span(&self) -> crate::diagnostics::Span {
959 self.as_ref().map(|n| n.span()).unwrap_or_default()
960 }
961
962 fn source_file(&self) -> Option<&SourceFile> {
963 self.as_ref().and_then(|n| n.source_file())
964 }
965}
966
967impl Spanned for SyntaxToken {
968 fn span(&self) -> crate::diagnostics::Span {
969 crate::diagnostics::Span::new(self.token.text_range().start().into())
970 }
971
972 fn source_file(&self) -> Option<&SourceFile> {
973 Some(&self.source_file)
974 }
975}
976
977impl Spanned for NodeOrToken {
978 fn span(&self) -> crate::diagnostics::Span {
979 match self {
980 NodeOrToken::Node(n) => n.span(),
981 NodeOrToken::Token(t) => t.span(),
982 }
983 }
984
985 fn source_file(&self) -> Option<&SourceFile> {
986 match self {
987 NodeOrToken::Node(n) => n.source_file(),
988 NodeOrToken::Token(t) => t.source_file(),
989 }
990 }
991}
992
993impl Spanned for Option<NodeOrToken> {
994 fn span(&self) -> crate::diagnostics::Span {
995 self.as_ref().map(|t| t.span()).unwrap_or_default()
996 }
997 fn source_file(&self) -> Option<&SourceFile> {
998 self.as_ref().and_then(|t| t.source_file())
999 }
1000}
1001
1002impl Spanned for Option<SyntaxToken> {
1003 fn span(&self) -> crate::diagnostics::Span {
1004 self.as_ref().map(|t| t.span()).unwrap_or_default()
1005 }
1006 fn source_file(&self) -> Option<&SourceFile> {
1007 self.as_ref().and_then(|t| t.source_file())
1008 }
1009}
1010
1011pub fn identifier_text(node: &SyntaxNode) -> Option<SmolStr> {
1013 node.child_text(SyntaxKind::Identifier).map(|x| normalize_identifier(&x))
1014}
1015
1016pub fn normalize_identifier(ident: &str) -> SmolStr {
1017 let mut builder = smol_str::SmolStrBuilder::default();
1018 for (pos, c) in ident.chars().enumerate() {
1019 match (pos, c) {
1020 (0, '-') | (0, '_') => builder.push('_'),
1021 (_, '_') => builder.push('-'),
1022 (_, c) => builder.push(c),
1023 }
1024 }
1025 builder.finish()
1026}
1027
1028#[test]
1029fn test_normalize_identifier() {
1030 assert_eq!(normalize_identifier("true"), SmolStr::new("true"));
1031 assert_eq!(normalize_identifier("foo_bar"), SmolStr::new("foo-bar"));
1032 assert_eq!(normalize_identifier("-foo_bar"), SmolStr::new("_foo-bar"));
1033 assert_eq!(normalize_identifier("-foo-bar"), SmolStr::new("_foo-bar"));
1034 assert_eq!(normalize_identifier("foo_bar_"), SmolStr::new("foo-bar-"));
1035 assert_eq!(normalize_identifier("foo_bar-"), SmolStr::new("foo-bar-"));
1036 assert_eq!(normalize_identifier("_foo_bar_"), SmolStr::new("_foo-bar-"));
1037 assert_eq!(normalize_identifier("__1"), SmolStr::new("_-1"));
1038 assert_eq!(normalize_identifier("--1"), SmolStr::new("_-1"));
1039 assert_eq!(normalize_identifier("--1--"), SmolStr::new("_-1--"));
1040}
1041
1042pub fn parse(
1044 source: String,
1045 path: Option<&std::path::Path>,
1046 build_diagnostics: &mut BuildDiagnostics,
1047) -> SyntaxNode {
1048 let mut p = DefaultParser::new(&source, build_diagnostics);
1049 p.source_file = std::rc::Rc::new(crate::diagnostics::SourceFileInner::new(
1050 path.map(crate::pathutils::clean_path).unwrap_or_default(),
1051 source,
1052 ));
1053 document::parse_document(&mut p);
1054 SyntaxNode {
1055 node: rowan::SyntaxNode::new_root(p.builder.finish()),
1056 source_file: p.source_file.clone(),
1057 }
1058}
1059
1060pub fn parse_file<P: AsRef<std::path::Path>>(
1061 path: P,
1062 build_diagnostics: &mut BuildDiagnostics,
1063) -> Option<SyntaxNode> {
1064 let path = crate::pathutils::clean_path(path.as_ref());
1065 let source = crate::diagnostics::load_from_path(&path)
1066 .map_err(|d| build_diagnostics.push_internal_error(d))
1067 .ok()?;
1068 Some(parse(source, Some(path.as_ref()), build_diagnostics))
1069}
1070
1071pub fn parse_tokens(
1072 tokens: Vec<Token>,
1073 source_file: SourceFile,
1074 diags: &mut BuildDiagnostics,
1075) -> SyntaxNode {
1076 let mut p = DefaultParser::from_tokens(tokens, diags);
1077 document::parse_document(&mut p);
1078 SyntaxNode { node: rowan::SyntaxNode::new_root(p.builder.finish()), source_file }
1079}