1use crate::diagnostics::{BuildDiagnostics, SourceFile, Spanned};
16use smol_str::SmolStr;
17use std::fmt::Display;
18
19mod document;
20mod element;
21mod expressions;
22mod statements;
23mod r#type;
24
25mod prelude {
27 #[cfg(test)]
28 pub use super::DefaultParser;
29 pub use super::{Parser, SyntaxKind};
30 #[cfg(test)]
31 pub use super::{SyntaxNode, SyntaxNodeVerify, syntax_nodes};
32 #[cfg(test)]
33 pub use i_slint_parser_test_macro::parser_test;
34}
35
36#[cfg(test)]
37pub trait SyntaxNodeVerify {
38 const KIND: SyntaxKind;
40 fn verify(node: SyntaxNode) {
43 assert_eq!(node.kind(), Self::KIND)
44 }
45}
46
47pub use rowan::{TextRange, TextSize};
48
49#[cfg(test)]
51macro_rules! verify_node {
52 ($node:ident, [ $($t1:tt $($t2:ident)?),* ]) => {
54 $(verify_node!(@check_has_children $node, $t1 $($t2)* );)*
56
57 for c in $node.children() {
59 assert!(
60 false $(|| c.kind() == verify_node!(@extract_kind $t1 $($t2)*))*,
61 "Node is none of [{}]\n{:?}", stringify!($($t1 $($t2)*),*) ,c);
62 }
63
64 $(
66 for _c in $node.children().filter(|n| n.kind() == verify_node!(@extract_kind $t1 $($t2)*)) {
67 <verify_node!(@extract_type $t1 $($t2)*)>::verify(_c)
68 }
69 )*
70 };
71
72 (@check_has_children $node:ident, * $kind:ident) => {};
74 (@check_has_children $node:ident, ? $kind:ident) => {
76 let count = $node.children_with_tokens().filter(|n| n.kind() == SyntaxKind::$kind).count();
77 assert!(count <= 1, "Expecting one or zero sub-node of type {}, found {}\n{:?}", stringify!($kind), count, $node);
78 };
79 (@check_has_children $node:ident, $kind:ident) => {
81 let count = $node.children_with_tokens().filter(|n| n.kind() == SyntaxKind::$kind).count();
82 assert_eq!(count, 1, "Expecting exactly one sub-node of type {}\n{:?}", stringify!($kind), $node);
83 };
84 (@check_has_children $node:ident, $count:literal $kind:ident) => {
86 let count = $node.children_with_tokens().filter(|n| n.kind() == SyntaxKind::$kind).count();
87 assert_eq!(count, $count, "Expecting {} sub-node of type {}, found {}\n{:?}", $count, stringify!($kind), count, $node);
88 };
89
90 (@extract_kind * $kind:ident) => {SyntaxKind::$kind};
91 (@extract_kind ? $kind:ident) => {SyntaxKind::$kind};
92 (@extract_kind $count:literal $kind:ident) => {SyntaxKind::$kind};
93 (@extract_kind $kind:ident) => {SyntaxKind::$kind};
94
95 (@extract_type * $kind:ident) => {$crate::parser::syntax_nodes::$kind};
96 (@extract_type ? $kind:ident) => {$crate::parser::syntax_nodes::$kind};
97 (@extract_type $count:literal $kind:ident) => {$crate::parser::syntax_nodes::$kind};
98 (@extract_type $kind:ident) => {$crate::parser::syntax_nodes::$kind};
99}
100
101macro_rules! node_accessors {
102 ([ $($t1:tt $($t2:ident)?),* ]) => {
104 $(node_accessors!{@ $t1 $($t2)*} )*
105 };
106
107 (@ * $kind:ident) => {
108 #[allow(non_snake_case)]
109 pub fn $kind(&self) -> impl Iterator<Item = $kind> + use<> {
110 self.0.children().filter(|n| n.kind() == SyntaxKind::$kind).map(Into::into)
111 }
112 };
113 (@ ? $kind:ident) => {
114 #[allow(non_snake_case)]
115 pub fn $kind(&self) -> Option<$kind> {
116 self.0.child_node(SyntaxKind::$kind).map(Into::into)
117 }
118 };
119 (@ 2 $kind:ident) => {
120 #[allow(non_snake_case)]
121 #[track_caller]
122 pub fn $kind(&self) -> ($kind, $kind) {
123 let mut it = self.0.children().filter(|n| n.kind() == SyntaxKind::$kind);
124 let a = it.next().expect(stringify!(Missing first $kind));
125 let b = it.next().expect(stringify!(Missing second $kind));
126 debug_assert!(it.next().is_none(), stringify!(More $kind than expected));
127 (a.into(), b.into())
128 }
129 };
130 (@ 3 $kind:ident) => {
131 #[allow(non_snake_case)]
132 #[track_caller]
133 pub fn $kind(&self) -> ($kind, $kind, $kind) {
134 let mut it = self.0.children().filter(|n| n.kind() == SyntaxKind::$kind);
135 let a = it.next().expect(stringify!(Missing first $kind));
136 let b = it.next().expect(stringify!(Missing second $kind));
137 let c = it.next().expect(stringify!(Missing third $kind));
138 debug_assert!(it.next().is_none(), stringify!(More $kind than expected));
139 (a.into(), b.into(), c.into())
140 }
141 };
142 (@ $kind:ident) => {
143 #[allow(non_snake_case)]
144 #[track_caller]
145 pub fn $kind(&self) -> $kind {
146 self.0.child_node(SyntaxKind::$kind).expect(stringify!(Missing $kind)).into()
147 }
148 };
149
150}
151
152macro_rules! declare_syntax {
184 ({
185 $($token:ident -> $rule:expr ,)*
186 }
187 {
188 $( $(#[$attr:meta])* $nodekind:ident -> $children:tt ,)*
189 })
190 => {
191 #[repr(u16)]
192 #[derive(Debug, Copy, Clone, Eq, PartialEq, num_enum::IntoPrimitive, num_enum::TryFromPrimitive, Hash, Ord, PartialOrd)]
193 pub enum SyntaxKind {
194 Error,
195 Eof,
196
197 $(
199 $token,
201 )*
202
203 $(
205 $(#[$attr])*
206 $nodekind,
207 )*
208 }
209
210 impl Display for SyntaxKind {
211 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
212 match self {
213 $(Self::$token => {
214 if let Some(character) = <dyn std::any::Any>::downcast_ref::<&str>(& $rule) {
215 return write!(f, "'{}'", character)
216 }
217 })*
218 _ => ()
219 }
220 write!(f, "{:?}", self)
221 }
222 }
223
224
225 pub fn lex_next_token(text : &str, state: &mut crate::lexer::LexState) -> Option<(usize, SyntaxKind)> {
227 use crate::lexer::LexingRule;
228 $(
229 let len = ($rule).lex(text, state);
230 if len > 0 {
231 return Some((len, SyntaxKind::$token));
232 }
233 )*
234 None
235 }
236
237 pub mod syntax_nodes {
238 use super::*;
239 $(
240 #[derive(Debug, Clone, derive_more::Deref, derive_more::Into)]
241 pub struct $nodekind(SyntaxNode);
242 #[cfg(test)]
243 impl SyntaxNodeVerify for $nodekind {
244 const KIND: SyntaxKind = SyntaxKind::$nodekind;
245 #[track_caller]
246 fn verify(node: SyntaxNode) {
247 assert_eq!(node.kind(), Self::KIND);
248 verify_node!(node, $children);
249 }
250 }
251 impl $nodekind {
252 node_accessors!{$children}
253
254 pub fn new(node: SyntaxNode) -> Option<Self> {
256 (node.kind() == SyntaxKind::$nodekind).then(|| Self(node))
257 }
258 }
259
260 impl From<SyntaxNode> for $nodekind {
261 #[track_caller]
262 fn from(node: SyntaxNode) -> Self {
263 assert_eq!(node.kind(), SyntaxKind::$nodekind);
264 Self(node)
265 }
266 }
267
268 impl Spanned for $nodekind {
269 fn span(&self) -> crate::diagnostics::Span {
270 self.0.span()
271 }
272
273 fn source_file(&self) -> Option<&SourceFile> {
274 self.0.source_file()
275 }
276 }
277 )*
278 }
279 }
280}
281declare_syntax! {
282 {
287 Whitespace -> &crate::lexer::lex_whitespace,
288 Comment -> &crate::lexer::lex_comment,
289 StringLiteral -> &crate::lexer::lex_string,
290 NumberLiteral -> &crate::lexer::lex_number,
291 ColorLiteral -> &crate::lexer::lex_color,
292 Identifier -> &crate::lexer::lex_identifier,
293 DoubleArrow -> "<=>",
294 PlusEqual -> "+=",
295 MinusEqual -> "-=",
296 StarEqual -> "*=",
297 DivEqual -> "/=",
298 LessEqual -> "<=",
299 GreaterEqual -> ">=",
300 EqualEqual -> "==",
301 NotEqual -> "!=",
302 ColonEqual -> ":=",
303 FatArrow -> "=>",
304 Arrow -> "->",
305 OrOr -> "||",
306 AndAnd -> "&&",
307 LBrace -> "{",
308 RBrace -> "}",
309 LParent -> "(",
310 RParent -> ")",
311 LAngle -> "<",
312 RAngle -> ">",
313 LBracket -> "[",
314 RBracket -> "]",
315 Plus -> "+",
316 Minus -> "-",
317 Star -> "*",
318 Div -> "/",
319 Equal -> "=",
320 Colon -> ":",
321 Comma -> ",",
322 Semicolon -> ";",
323 Bang -> "!",
324 Dot -> ".",
325 Question -> "?",
326 Dollar -> "$",
327 At -> "@",
328 Pipe -> "|",
329 Percent -> "%",
330 }
331 {
334 Document -> [ *Component, *ExportsList, *ImportSpecifier, *StructDeclaration, *EnumDeclaration ],
335 Component -> [ DeclaredIdentifier, ?UsesSpecifier, Element ],
337 SubElement -> [ Element ],
339 Element -> [ ?QualifiedName, *PropertyDeclaration, *Binding, *CallbackConnection,
340 *CallbackDeclaration, *ConditionalElement, *Function, *SubElement,
341 *RepeatedElement, *PropertyAnimation, *PropertyChangedCallback,
342 *TwoWayBinding, *States, *Transitions, ?ChildrenPlaceholder ],
343 RepeatedElement -> [ ?DeclaredIdentifier, ?RepeatedIndex, Expression , SubElement],
344 RepeatedIndex -> [],
345 ConditionalElement -> [ Expression , SubElement],
346 CallbackDeclaration -> [ DeclaredIdentifier, *CallbackDeclarationParameter, ?ReturnType, ?TwoWayBinding ],
347 CallbackDeclarationParameter -> [ ?DeclaredIdentifier, Type],
349 Function -> [DeclaredIdentifier, *ArgumentDeclaration, ?ReturnType, CodeBlock ],
350 ArgumentDeclaration -> [DeclaredIdentifier, Type],
351 ReturnType -> [Type],
353 CallbackConnection -> [ *DeclaredIdentifier, ?CodeBlock, ?Expression ],
354 PropertyDeclaration-> [ ?Type , DeclaredIdentifier, ?BindingExpression, ?TwoWayBinding ],
356 PropertyAnimation-> [ *QualifiedName, *Binding ],
358 PropertyChangedCallback-> [ DeclaredIdentifier, ?CodeBlock, ?Expression ],
360 QualifiedName-> [],
362 DeclaredIdentifier -> [],
364 ChildrenPlaceholder -> [],
365 Binding-> [ BindingExpression ],
366 TwoWayBinding -> [ Expression ],
368 BindingExpression-> [ ?CodeBlock, ?Expression ],
371 CodeBlock-> [ *Expression, *LetStatement, *ReturnStatement ],
372 LetStatement -> [ DeclaredIdentifier, ?Type, Expression ],
373 ReturnStatement -> [ ?Expression ],
374 Expression-> [ ?Expression, ?FunctionCallExpression, ?IndexExpression, ?SelfAssignment,
376 ?ConditionalExpression, ?QualifiedName, ?BinaryExpression, ?Array, ?ObjectLiteral,
377 ?UnaryOpExpression, ?CodeBlock, ?StringTemplate, ?AtImageUrl, ?AtGradient, ?AtTr,
378 ?MemberAccess ],
379 StringTemplate -> [*Expression],
381 AtImageUrl -> [],
383 AtGradient -> [*Expression],
385 AtTr -> [?TrContext, ?TrPlural, *Expression],
387 AtMarkdown -> [*Expression],
388 TrContext -> [],
390 TrPlural -> [Expression],
392 FunctionCallExpression -> [*Expression],
394 IndexExpression -> [2 Expression],
396 SelfAssignment -> [2 Expression],
398 ConditionalExpression -> [3 Expression],
400 BinaryExpression -> [2 Expression],
402 UnaryOpExpression -> [Expression],
404 MemberAccess -> [Expression],
406 Array -> [ *Expression ],
408 ObjectLiteral -> [ *ObjectMember ],
410 ObjectMember -> [ Expression ],
412 States -> [*State],
414 State -> [DeclaredIdentifier, ?Expression, *StatePropertyChange, *Transition],
416 StatePropertyChange -> [ QualifiedName, BindingExpression ],
418 Transitions -> [*Transition],
420 Transition -> [?DeclaredIdentifier, *PropertyAnimation],
422 ExportsList -> [ *ExportSpecifier, ?Component, *StructDeclaration, ?ExportModule, *EnumDeclaration ],
424 ExportSpecifier -> [ ExportIdentifier, ?ExportName ],
427 ExportIdentifier -> [],
428 ExportName -> [],
429 ExportModule -> [],
431 ImportSpecifier -> [ ?ImportIdentifierList ],
433 ImportIdentifierList -> [ *ImportIdentifier ],
434 ImportIdentifier -> [ ExternalName, ?InternalName ],
436 ExternalName -> [],
437 InternalName -> [],
438 Type -> [ ?QualifiedName, ?ObjectType, ?ArrayType ],
440 ObjectType ->[ *ObjectTypeMember ],
442 ObjectTypeMember -> [ Type ],
444 ArrayType -> [ Type ],
446 StructDeclaration -> [DeclaredIdentifier, ObjectType, ?AtRustAttr],
448 EnumDeclaration -> [DeclaredIdentifier, *EnumValue, ?AtRustAttr],
450 EnumValue -> [],
452 AtRustAttr -> [],
454 UsesSpecifier -> [ *UsesIdentifier ],
456 UsesIdentifier -> [QualifiedName, DeclaredIdentifier],
458 }
459}
460
461impl From<SyntaxKind> for rowan::SyntaxKind {
462 fn from(v: SyntaxKind) -> Self {
463 rowan::SyntaxKind(v.into())
464 }
465}
466
467#[derive(Clone, Debug)]
468pub struct Token {
469 pub kind: SyntaxKind,
470 pub text: SmolStr,
471 pub offset: usize,
472 pub length: usize,
473 #[cfg(feature = "proc_macro_span")]
474 pub span: Option<proc_macro::Span>,
475}
476
477impl Default for Token {
478 fn default() -> Self {
479 Token {
480 kind: SyntaxKind::Eof,
481 text: Default::default(),
482 offset: 0,
483 length: 0,
484 #[cfg(feature = "proc_macro_span")]
485 span: None,
486 }
487 }
488}
489
490impl Token {
491 pub fn as_str(&self) -> &str {
492 self.text.as_str()
493 }
494
495 pub fn kind(&self) -> SyntaxKind {
496 self.kind
497 }
498}
499
500mod parser_trait {
501 use super::*;
503
504 pub trait Parser: Sized {
505 type Checkpoint: Clone;
506
507 #[must_use = "The node will be finished when it is dropped"]
513 fn start_node(&mut self, kind: SyntaxKind) -> Node<'_, Self> {
514 self.start_node_impl(kind, None, NodeToken(()));
515 Node(self)
516 }
517 #[must_use = "use start_node_at to use this checkpoint"]
518 fn checkpoint(&mut self) -> Self::Checkpoint;
519 #[must_use = "The node will be finished when it is dropped"]
520 fn start_node_at(
521 &mut self,
522 checkpoint: impl Into<Option<Self::Checkpoint>>,
523 kind: SyntaxKind,
524 ) -> Node<'_, Self> {
525 self.start_node_impl(kind, checkpoint.into(), NodeToken(()));
526 Node(self)
527 }
528
529 fn finish_node_impl(&mut self, token: NodeToken);
531 fn start_node_impl(
533 &mut self,
534 kind: SyntaxKind,
535 checkpoint: Option<Self::Checkpoint>,
536 token: NodeToken,
537 );
538
539 fn peek(&mut self) -> Token {
541 self.nth(0)
542 }
543 fn nth(&mut self, n: usize) -> Token;
545 fn consume(&mut self);
547 fn error(&mut self, e: impl Into<String>);
548 fn warning(&mut self, e: impl Into<String>);
549
550 fn expect(&mut self, kind: SyntaxKind) -> bool {
553 if !self.test(kind) {
554 self.error(format!("Syntax error: expected {kind}"));
555 return false;
556 }
557 true
558 }
559
560 fn test(&mut self, kind: SyntaxKind) -> bool {
562 if self.nth(0).kind() != kind {
563 return false;
564 }
565 self.consume();
566 true
567 }
568
569 fn until(&mut self, kind: SyntaxKind) {
571 let mut parens = 0;
572 let mut braces = 0;
573 let mut brackets = 0;
574 loop {
575 match self.nth(0).kind() {
576 k if k == kind && parens == 0 && braces == 0 && brackets == 0 => break,
577 SyntaxKind::Eof => break,
578 SyntaxKind::LParent => parens += 1,
579 SyntaxKind::LBrace => braces += 1,
580 SyntaxKind::LBracket => brackets += 1,
581 SyntaxKind::RParent if parens == 0 => break,
582 SyntaxKind::RParent => parens -= 1,
583 SyntaxKind::RBrace if braces == 0 => break,
584 SyntaxKind::RBrace => braces -= 1,
585 SyntaxKind::RBracket if brackets == 0 => break,
586 SyntaxKind::RBracket => brackets -= 1,
587 _ => {}
588 };
589 self.consume();
590 }
591 self.expect(kind);
592 }
593 }
594
595 pub struct NodeToken(());
600 #[derive(derive_more::DerefMut)]
603 pub struct Node<'a, P: Parser>(&'a mut P);
604 impl<P: Parser> Drop for Node<'_, P> {
605 fn drop(&mut self) {
606 self.0.finish_node_impl(NodeToken(()));
607 }
608 }
609 impl<P: Parser> core::ops::Deref for Node<'_, P> {
610 type Target = P;
611 fn deref(&self) -> &Self::Target {
612 self.0
613 }
614 }
615}
616#[doc(inline)]
617pub use parser_trait::*;
618
619pub struct DefaultParser<'a> {
620 builder: rowan::GreenNodeBuilder<'static>,
621 tokens: Vec<Token>,
623 cursor: usize,
625 diags: &'a mut BuildDiagnostics,
626 source_file: SourceFile,
627}
628
629impl<'a> DefaultParser<'a> {
630 fn from_tokens(tokens: Vec<Token>, diags: &'a mut BuildDiagnostics) -> Self {
631 Self {
632 builder: Default::default(),
633 tokens,
634 cursor: 0,
635 diags,
636 source_file: Default::default(),
637 }
638 }
639
640 pub fn new(source: &str, diags: &'a mut BuildDiagnostics) -> Self {
643 Self::from_tokens(crate::lexer::lex(source), diags)
644 }
645
646 fn current_token(&self) -> Token {
647 self.tokens.get(self.cursor).cloned().unwrap_or_default()
648 }
649
650 pub fn consume_ws(&mut self) {
652 while matches!(self.current_token().kind, SyntaxKind::Whitespace | SyntaxKind::Comment) {
653 self.consume()
654 }
655 }
656}
657
658impl Parser for DefaultParser<'_> {
659 fn start_node_impl(
660 &mut self,
661 kind: SyntaxKind,
662 checkpoint: Option<Self::Checkpoint>,
663 _: NodeToken,
664 ) {
665 if kind != SyntaxKind::Document {
666 self.consume_ws();
667 }
668 match checkpoint {
669 None => self.builder.start_node(kind.into()),
670 Some(cp) => self.builder.start_node_at(cp, kind.into()),
671 }
672 }
673
674 fn finish_node_impl(&mut self, _: NodeToken) {
675 self.builder.finish_node();
676 }
677
678 fn nth(&mut self, mut n: usize) -> Token {
680 self.consume_ws();
681 let mut c = self.cursor;
682 while n > 0 {
683 n -= 1;
684 c += 1;
685 while c < self.tokens.len()
686 && matches!(self.tokens[c].kind, SyntaxKind::Whitespace | SyntaxKind::Comment)
687 {
688 c += 1;
689 }
690 }
691 self.tokens.get(c).cloned().unwrap_or_default()
692 }
693
694 fn consume(&mut self) {
696 let t = self.current_token();
697 self.builder.token(t.kind.into(), t.text.as_str());
698 if t.kind != SyntaxKind::Eof {
699 self.cursor += 1;
700 }
701 }
702
703 fn error(&mut self, e: impl Into<String>) {
705 let current_token = self.current_token();
706 #[allow(unused_mut)]
707 let mut span = crate::diagnostics::Span::new(current_token.offset, current_token.length);
708 #[cfg(feature = "proc_macro_span")]
709 {
710 span.span = current_token.span;
711 }
712
713 self.diags.push_error_with_span(
714 e.into(),
715 crate::diagnostics::SourceLocation {
716 source_file: Some(self.source_file.clone()),
717 span,
718 },
719 );
720 }
721
722 fn warning(&mut self, e: impl Into<String>) {
724 let current_token = self.current_token();
725 #[allow(unused_mut)]
726 let mut span = crate::diagnostics::Span::new(current_token.offset, current_token.length);
727 #[cfg(feature = "proc_macro_span")]
728 {
729 span.span = current_token.span;
730 }
731
732 self.diags.push_warning_with_span(
733 e.into(),
734 crate::diagnostics::SourceLocation {
735 source_file: Some(self.source_file.clone()),
736 span,
737 },
738 );
739 }
740
741 type Checkpoint = rowan::Checkpoint;
742 fn checkpoint(&mut self) -> Self::Checkpoint {
743 self.builder.checkpoint()
744 }
745}
746
747#[derive(Clone, Copy, Debug, Eq, Ord, Hash, PartialEq, PartialOrd)]
748pub enum Language {}
749impl rowan::Language for Language {
750 type Kind = SyntaxKind;
751 fn kind_from_raw(raw: rowan::SyntaxKind) -> Self::Kind {
752 SyntaxKind::try_from(raw.0).unwrap()
753 }
754 fn kind_to_raw(kind: Self::Kind) -> rowan::SyntaxKind {
755 kind.into()
756 }
757}
758
759#[derive(Debug, Clone, derive_more::Deref)]
760pub struct SyntaxNode {
761 #[deref]
762 pub node: rowan::SyntaxNode<Language>,
763 pub source_file: SourceFile,
764}
765
766#[derive(Debug, Clone, derive_more::Deref)]
767pub struct SyntaxToken {
768 #[deref]
769 pub token: rowan::SyntaxToken<Language>,
770 pub source_file: SourceFile,
771}
772
773impl SyntaxToken {
774 pub fn parent(&self) -> SyntaxNode {
775 SyntaxNode { node: self.token.parent().unwrap(), source_file: self.source_file.clone() }
776 }
777 pub fn parent_ancestors(&self) -> impl Iterator<Item = SyntaxNode> + '_ {
778 self.token
779 .parent_ancestors()
780 .map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
781 }
782 pub fn next_token(&self) -> Option<SyntaxToken> {
783 let token = self
791 .token
792 .next_sibling_or_token()
793 .and_then(|e| match e {
794 rowan::NodeOrToken::Node(n) => n.first_token(),
795 rowan::NodeOrToken::Token(t) => Some(t),
796 })
797 .or_else(|| {
798 self.token.parent_ancestors().find_map(|it| it.next_sibling_or_token()).and_then(
799 |e| match e {
800 rowan::NodeOrToken::Node(n) => n.first_token(),
801 rowan::NodeOrToken::Token(t) => Some(t),
802 },
803 )
804 })?;
805 Some(SyntaxToken { token, source_file: self.source_file.clone() })
806 }
807 pub fn prev_token(&self) -> Option<SyntaxToken> {
808 let token = self.token.prev_token()?;
809 Some(SyntaxToken { token, source_file: self.source_file.clone() })
810 }
811 pub fn text(&self) -> &str {
812 self.token.text()
813 }
814}
815
816impl std::fmt::Display for SyntaxToken {
817 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
818 self.token.fmt(f)
819 }
820}
821
822impl SyntaxNode {
823 pub fn child_node(&self, kind: SyntaxKind) -> Option<SyntaxNode> {
824 self.node
825 .children()
826 .find(|n| n.kind() == kind)
827 .map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
828 }
829 pub fn child_token(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
830 self.node
831 .children_with_tokens()
832 .find(|n| n.kind() == kind)
833 .and_then(|x| x.into_token())
834 .map(|token| SyntaxToken { token, source_file: self.source_file.clone() })
835 }
836 pub fn child_text(&self, kind: SyntaxKind) -> Option<SmolStr> {
837 self.node
838 .children_with_tokens()
839 .find(|n| n.kind() == kind)
840 .and_then(|x| x.as_token().map(|x| x.text().into()))
841 }
842 pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode> + use<> {
843 let source_file = self.source_file.clone();
844 self.node
845 .descendants()
846 .map(move |node| SyntaxNode { node, source_file: source_file.clone() })
847 }
848 pub fn kind(&self) -> SyntaxKind {
849 self.node.kind()
850 }
851 pub fn children(&self) -> impl Iterator<Item = SyntaxNode> + use<> {
852 let source_file = self.source_file.clone();
853 self.node.children().map(move |node| SyntaxNode { node, source_file: source_file.clone() })
854 }
855 pub fn children_with_tokens(&self) -> impl Iterator<Item = NodeOrToken> + use<> {
856 let source_file = self.source_file.clone();
857 self.node.children_with_tokens().map(move |token| match token {
858 rowan::NodeOrToken::Node(node) => {
859 SyntaxNode { node, source_file: source_file.clone() }.into()
860 }
861 rowan::NodeOrToken::Token(token) => {
862 SyntaxToken { token, source_file: source_file.clone() }.into()
863 }
864 })
865 }
866 pub fn text(&self) -> rowan::SyntaxText {
867 self.node.text()
868 }
869 pub fn parent(&self) -> Option<SyntaxNode> {
870 self.node.parent().map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
871 }
872 pub fn first_token(&self) -> Option<SyntaxToken> {
873 self.node
874 .first_token()
875 .map(|token| SyntaxToken { token, source_file: self.source_file.clone() })
876 }
877 pub fn last_token(&self) -> Option<SyntaxToken> {
878 self.node
879 .last_token()
880 .map(|token| SyntaxToken { token, source_file: self.source_file.clone() })
881 }
882 pub fn token_at_offset(&self, offset: TextSize) -> rowan::TokenAtOffset<SyntaxToken> {
883 self.node
884 .token_at_offset(offset)
885 .map(|token| SyntaxToken { token, source_file: self.source_file.clone() })
886 }
887 pub fn first_child(&self) -> Option<SyntaxNode> {
888 self.node
889 .first_child()
890 .map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
891 }
892 pub fn first_child_or_token(&self) -> Option<NodeOrToken> {
893 self.node.first_child_or_token().map(|n_o_t| match n_o_t {
894 rowan::NodeOrToken::Node(node) => {
895 NodeOrToken::Node(SyntaxNode { node, source_file: self.source_file.clone() })
896 }
897 rowan::NodeOrToken::Token(token) => {
898 NodeOrToken::Token(SyntaxToken { token, source_file: self.source_file.clone() })
899 }
900 })
901 }
902 pub fn next_sibling(&self) -> Option<SyntaxNode> {
903 self.node
904 .next_sibling()
905 .map(|node| SyntaxNode { node, source_file: self.source_file.clone() })
906 }
907}
908
909#[derive(Debug, Clone, derive_more::From)]
910pub enum NodeOrToken {
911 Node(SyntaxNode),
912 Token(SyntaxToken),
913}
914
915impl NodeOrToken {
916 pub fn kind(&self) -> SyntaxKind {
917 match self {
918 NodeOrToken::Node(n) => n.kind(),
919 NodeOrToken::Token(t) => t.kind(),
920 }
921 }
922
923 pub fn as_node(&self) -> Option<&SyntaxNode> {
924 match self {
925 NodeOrToken::Node(n) => Some(n),
926 NodeOrToken::Token(_) => None,
927 }
928 }
929
930 pub fn as_token(&self) -> Option<&SyntaxToken> {
931 match self {
932 NodeOrToken::Node(_) => None,
933 NodeOrToken::Token(t) => Some(t),
934 }
935 }
936
937 pub fn into_token(self) -> Option<SyntaxToken> {
938 match self {
939 NodeOrToken::Token(t) => Some(t),
940 _ => None,
941 }
942 }
943
944 pub fn into_node(self) -> Option<SyntaxNode> {
945 match self {
946 NodeOrToken::Node(n) => Some(n),
947 _ => None,
948 }
949 }
950
951 pub fn text_range(&self) -> TextRange {
952 match self {
953 NodeOrToken::Node(n) => n.text_range(),
954 NodeOrToken::Token(t) => t.text_range(),
955 }
956 }
957}
958
959impl Spanned for SyntaxNode {
960 fn span(&self) -> crate::diagnostics::Span {
961 let range = self.node.text_range();
962 crate::diagnostics::Span::new(range.start().into(), range.len().into())
963 }
964
965 fn source_file(&self) -> Option<&SourceFile> {
966 Some(&self.source_file)
967 }
968}
969
970impl Spanned for Option<SyntaxNode> {
971 fn span(&self) -> crate::diagnostics::Span {
972 self.as_ref().map(|n| n.span()).unwrap_or_default()
973 }
974
975 fn source_file(&self) -> Option<&SourceFile> {
976 self.as_ref().and_then(|n| n.source_file())
977 }
978}
979
980impl Spanned for SyntaxToken {
981 fn span(&self) -> crate::diagnostics::Span {
982 let range = self.token.text_range();
983 crate::diagnostics::Span::new(range.start().into(), range.len().into())
984 }
985
986 fn source_file(&self) -> Option<&SourceFile> {
987 Some(&self.source_file)
988 }
989}
990
991impl Spanned for NodeOrToken {
992 fn span(&self) -> crate::diagnostics::Span {
993 match self {
994 NodeOrToken::Node(n) => n.span(),
995 NodeOrToken::Token(t) => t.span(),
996 }
997 }
998
999 fn source_file(&self) -> Option<&SourceFile> {
1000 match self {
1001 NodeOrToken::Node(n) => n.source_file(),
1002 NodeOrToken::Token(t) => t.source_file(),
1003 }
1004 }
1005}
1006
1007impl Spanned for Option<NodeOrToken> {
1008 fn span(&self) -> crate::diagnostics::Span {
1009 self.as_ref().map(|t| t.span()).unwrap_or_default()
1010 }
1011 fn source_file(&self) -> Option<&SourceFile> {
1012 self.as_ref().and_then(|t| t.source_file())
1013 }
1014}
1015
1016impl Spanned for Option<SyntaxToken> {
1017 fn span(&self) -> crate::diagnostics::Span {
1018 self.as_ref().map(|t| t.span()).unwrap_or_default()
1019 }
1020 fn source_file(&self) -> Option<&SourceFile> {
1021 self.as_ref().and_then(|t| t.source_file())
1022 }
1023}
1024
1025pub fn identifier_text(node: &SyntaxNode) -> Option<SmolStr> {
1027 node.child_text(SyntaxKind::Identifier).map(|x| normalize_identifier(&x))
1028}
1029
1030pub fn normalize_identifier(ident: &str) -> SmolStr {
1031 let mut builder = smol_str::SmolStrBuilder::default();
1032 for (pos, c) in ident.chars().enumerate() {
1033 match (pos, c) {
1034 (0, '-') | (0, '_') => builder.push('_'),
1035 (_, '_') => builder.push('-'),
1036 (_, c) => builder.push(c),
1037 }
1038 }
1039 builder.finish()
1040}
1041
1042#[test]
1043fn test_normalize_identifier() {
1044 assert_eq!(normalize_identifier("true"), SmolStr::new("true"));
1045 assert_eq!(normalize_identifier("foo_bar"), SmolStr::new("foo-bar"));
1046 assert_eq!(normalize_identifier("-foo_bar"), SmolStr::new("_foo-bar"));
1047 assert_eq!(normalize_identifier("-foo-bar"), SmolStr::new("_foo-bar"));
1048 assert_eq!(normalize_identifier("foo_bar_"), SmolStr::new("foo-bar-"));
1049 assert_eq!(normalize_identifier("foo_bar-"), SmolStr::new("foo-bar-"));
1050 assert_eq!(normalize_identifier("_foo_bar_"), SmolStr::new("_foo-bar-"));
1051 assert_eq!(normalize_identifier("__1"), SmolStr::new("_-1"));
1052 assert_eq!(normalize_identifier("--1"), SmolStr::new("_-1"));
1053 assert_eq!(normalize_identifier("--1--"), SmolStr::new("_-1--"));
1054}
1055
1056pub fn parse(
1058 source: String,
1059 path: Option<&std::path::Path>,
1060 build_diagnostics: &mut BuildDiagnostics,
1061) -> SyntaxNode {
1062 let mut p = DefaultParser::new(&source, build_diagnostics);
1063 p.source_file = std::rc::Rc::new(crate::diagnostics::SourceFileInner::new(
1064 path.map(crate::pathutils::clean_path).unwrap_or_default(),
1065 source,
1066 ));
1067 document::parse_document(&mut p);
1068 SyntaxNode {
1069 node: rowan::SyntaxNode::new_root(p.builder.finish()),
1070 source_file: p.source_file.clone(),
1071 }
1072}
1073
1074pub fn parse_file<P: AsRef<std::path::Path>>(
1075 path: P,
1076 build_diagnostics: &mut BuildDiagnostics,
1077) -> Option<SyntaxNode> {
1078 let path = crate::pathutils::clean_path(path.as_ref());
1079 let source = crate::diagnostics::load_from_path(&path)
1080 .map_err(|d| build_diagnostics.push_internal_error(d))
1081 .ok()?;
1082 Some(parse(source, Some(path.as_ref()), build_diagnostics))
1083}
1084
1085pub fn parse_tokens(
1086 tokens: Vec<Token>,
1087 source_file: SourceFile,
1088 diags: &mut BuildDiagnostics,
1089) -> SyntaxNode {
1090 let mut p = DefaultParser::from_tokens(tokens, diags);
1091 document::parse_document(&mut p);
1092 SyntaxNode { node: rowan::SyntaxNode::new_root(p.builder.finish()), source_file }
1093}