1#![doc = include_str!("../README.md")]
2#![allow(clippy::new_without_default, clippy::too_many_lines)]
3#![warn(clippy::must_use_candidate)]
4
5mod block;
6mod comments;
7pub mod declarations;
8mod errors;
9pub mod expressions;
10mod extensions;
11pub mod functions;
12pub mod generator_helpers;
13mod lexer;
14pub mod marker;
15mod modules;
16pub mod number;
17pub mod options;
18pub mod property_key;
19pub mod statements;
20mod tokens;
21pub mod types;
22mod variable_fields;
23pub mod visiting;
24
25pub use block::{Block, BlockLike, BlockLikeMut, BlockOrSingleStatement, StatementOrDeclaration};
26pub use comments::WithComment;
27pub use declarations::Declaration;
28use functions::FunctionBody;
29pub use marker::Marker;
30
31pub use errors::{ParseError, ParseErrors, ParseResult};
32pub use expressions::{Expression, PropertyReference};
33pub use extensions::{
34 decorators::{Decorated, Decorator},
35 is_expression,
36 jsx::*,
37};
38pub use functions::{FunctionBase, FunctionBased, FunctionHeader};
39pub use generator_helpers::IntoAST;
40use iterator_endiate::EndiateIteratorExt;
41pub use lexer::{lex_script, LexerOptions};
42pub use modules::Module;
43pub use options::*;
44pub use property_key::PropertyKey;
45pub use source_map::{self, SourceId, Span};
46pub use statements::Statement;
47pub use tokens::{TSXKeyword, TSXToken};
48pub use types::{
49 type_annotations::{self, TypeAnnotation},
50 type_declarations::{self, TypeParameter},
51};
52pub use variable_fields::*;
53pub(crate) use visiting::{
54 Chain, ChainVariable, VisitOptions, Visitable, VisitorMutReceiver, VisitorReceiver,
55};
56
57use tokenizer_lib::{
58 sized_tokens::{SizedToken, TokenEnd},
59 Token, TokenReader,
60};
61
62pub(crate) use tokenizer_lib::sized_tokens::TokenStart;
63
64use crate::errors::parse_lexing_error;
65
66#[macro_use]
67extern crate macro_rules_attribute;
68
69attribute_alias! {
70 #[apply(derive_ASTNode!)] =
73 #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))]
74 #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))]
75 #[cfg_attr(target_family = "wasm", derive(tsify::Tsify))];
76}
77
78#[derive(PartialEq, Eq, Debug, Clone, Copy)]
80#[apply(derive_ASTNode!)]
81pub enum Quoted {
82 Single,
83 Double,
84}
85
86impl Quoted {
87 fn as_char(self) -> char {
88 match self {
89 Quoted::Single => '\'',
90 Quoted::Double => '"',
91 }
92 }
93}
94
95#[derive(Debug, Clone, Copy)]
96pub struct LocalToStringInformation {
97 under: SourceId,
98 depth: u8,
99 should_try_pretty_print: bool,
100}
101
102impl LocalToStringInformation {
103 #[must_use]
104 pub fn new_under(under: SourceId) -> Self {
105 Self { under, depth: 0, should_try_pretty_print: true }
106 }
107
108 pub(crate) fn next_level(self) -> Self {
109 Self {
110 under: self.under,
111 depth: self.depth + 1,
112 should_try_pretty_print: self.should_try_pretty_print,
113 }
114 }
115
116 pub(crate) fn change_source(self, new: SourceId) -> Self {
118 Self {
119 under: new,
120 depth: self.depth,
121 should_try_pretty_print: self.should_try_pretty_print,
122 }
123 }
124
125 pub(crate) fn do_not_pretty_print(self) -> Self {
127 Self { under: self.under, depth: self.depth, should_try_pretty_print: false }
128 }
129}
130
131pub trait ASTNode: Sized + Clone + PartialEq + std::fmt::Debug + Sync + Send + 'static {
136 fn from_string(script: String, options: ParseOptions) -> ParseResult<Self> {
138 Self::from_string_with_options(script, options, None).map(|(ast, _)| ast)
139 }
140
141 fn from_string_with_options(
142 script: String,
143 options: ParseOptions,
144 offset: Option<u32>,
145 ) -> ParseResult<(Self, ParsingState)> {
146 let line_starts = source_map::LineStarts::new(script.as_str());
147 lex_and_parse_script(line_starts, options, &script, offset)
148 }
149
150 fn get_position(&self) -> Span;
152
153 fn from_reader(
154 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
155 state: &mut crate::ParsingState,
156 options: &crate::ParseOptions,
157 ) -> ParseResult<Self>;
158
159 fn to_string_from_buffer<T: source_map::ToString>(
160 &self,
161 buf: &mut T,
162 options: &crate::ToStringOptions,
163 local: crate::LocalToStringInformation,
164 );
165
166 fn to_string(&self, options: &crate::ToStringOptions) -> String {
168 let mut buf = source_map::StringWithOptionalSourceMap::new(false);
169 let local = LocalToStringInformation::new_under(source_map::Nullable::NULL);
170 self.to_string_from_buffer(&mut buf, options, local);
171 buf.source
172 }
173}
174
175#[cfg(not(target_arch = "wasm32"))]
176#[doc(hidden)]
177pub fn lex_and_parse_script<T: ASTNode>(
178 line_starts: source_map::LineStarts,
179 options: ParseOptions,
180 script: &str,
181 offset: Option<u32>,
182) -> ParseResult<(T, ParsingState)> {
183 let (mut sender, mut reader) =
184 tokenizer_lib::ParallelTokenQueue::new_with_buffer_size(options.buffer_size);
185 let lex_options = options.get_lex_options();
186
187 #[allow(clippy::cast_possible_truncation)]
188 let length_of_source = script.len() as u32;
189
190 let mut thread = std::thread::Builder::new().name("AST parsing".into());
191 if let Some(stack_size) = options.stack_size {
192 thread = thread.stack_size(stack_size);
193 }
194
195 let parsing_thread = thread
196 .spawn(move || {
197 let mut state = ParsingState {
198 line_starts,
199 length_of_source,
200 constant_imports: Default::default(),
201 keyword_positions: options
202 .record_keyword_positions
203 .then_some(KeywordPositions::new()),
204 partial_points: Default::default(),
205 };
206 let res = T::from_reader(&mut reader, &mut state, &options);
207 if res.is_ok() {
208 reader.expect_next(TSXToken::EOS)?;
209 }
210 res.map(|res| (res, state))
211 })
212 .unwrap();
213
214 let lex_result = lexer::lex_script(script, &mut sender, &lex_options, offset);
215 if let Err((reason, pos)) = lex_result {
216 return Err(ParseError::new(reason, pos));
217 }
218 drop(sender);
219 parsing_thread.join().expect("Parsing panicked")
220}
221
222#[cfg(target_arch = "wasm32")]
224#[doc(hidden)]
225pub fn lex_and_parse_script<T: ASTNode>(
226 line_starts: source_map::LineStarts,
227 options: ParseOptions,
228 script: &str,
229 offset: Option<u32>,
230) -> ParseResult<(T, ParsingState)> {
231 let mut queue = tokenizer_lib::BufferedTokenQueue::new();
232 let lex_result = lexer::lex_script(script, &mut queue, &options.get_lex_options(), offset);
233
234 if let Err((reason, pos)) = lex_result {
235 return Err(ParseError::new(reason, pos));
236 }
237
238 let mut state = ParsingState {
239 line_starts,
240 length_of_source: script.len() as u32,
241 constant_imports: Default::default(),
242 keyword_positions: options.record_keyword_positions.then_some(KeywordPositions::new()),
243 partial_points: Default::default(),
244 };
245 let res = T::from_reader(&mut queue, &mut state, &options);
246 if res.is_ok() {
247 queue.expect_next(TSXToken::EOS)?;
248 }
249 res.map(|res| (res, state))
250}
251
252pub(crate) fn throw_unexpected_token<T>(
253 reader: &mut impl TokenReader<TSXToken, TokenStart>,
254 expected: &[TSXToken],
255) -> ParseResult<T> {
256 throw_unexpected_token_with_token(reader.next().unwrap(), expected)
257}
258
259pub(crate) fn throw_unexpected_token_with_token<T>(
260 token: Token<TSXToken, TokenStart>,
261 expected: &[TSXToken],
262) -> ParseResult<T> {
263 let position = token.get_span();
264 Err(ParseError::new(ParseErrors::UnexpectedToken { expected, found: token.0 }, position))
265}
266
267#[derive(Debug)]
268pub struct ParsingState {
269 pub line_starts: source_map::LineStarts,
270 pub length_of_source: u32,
271 pub constant_imports: Vec<String>,
273 pub keyword_positions: Option<KeywordPositions>,
274 pub partial_points: Vec<TokenStart>,
275}
276
277impl ParsingState {
278 pub(crate) fn expect_keyword(
279 &mut self,
280 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
281 kw: TSXKeyword,
282 ) -> crate::ParseResult<TokenStart> {
283 let start = reader.expect_next(TSXToken::Keyword(kw))?;
284 self.append_keyword_at_pos(start.0, kw);
285 Ok(start)
286 }
287
288 pub(crate) fn optionally_expect_keyword(
289 &mut self,
290 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
291 kw: TSXKeyword,
292 ) -> Option<Span> {
293 if let Some(Token(t, start)) = reader.conditional_next(|t| *t == TSXToken::Keyword(kw)) {
294 self.append_keyword_at_pos(start.0, kw);
295 Some(start.with_length(t.length() as usize))
296 } else {
297 None
298 }
299 }
300
301 pub(crate) fn expect_keyword_get_full_span(
302 &mut self,
303 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
304 kw: TSXKeyword,
305 ) -> crate::ParseResult<Span> {
306 let start = reader.expect_next(TSXToken::Keyword(kw))?;
307 self.append_keyword_at_pos(start.0, kw);
308 Ok(start.with_length(kw.length() as usize))
309 }
310
311 fn append_keyword_at_pos(&mut self, start: u32, kw: TSXKeyword) {
312 if let Some(ref mut keyword_positions) = self.keyword_positions {
313 keyword_positions.0.push((start, kw));
314 }
315 }
316
317 fn new_partial_point_marker<T>(&mut self, at: source_map::Start) -> Marker<T> {
318 let id = self.partial_points.len();
319 self.partial_points.push(at);
320 Marker(u8::try_from(id).expect("more than 256 markers"), Default::default())
321 }
322}
323
324#[derive(Debug)]
326pub struct KeywordPositions(Vec<(u32, TSXKeyword)>);
327
328impl KeywordPositions {
329 #[must_use]
330 #[allow(clippy::cast_possible_truncation)]
331 pub fn try_get_keyword_at_position(&self, pos: u32) -> Option<TSXKeyword> {
332 let mut l: u32 = 0;
334 let mut r: u32 = self.0.len() as u32 - 1u32;
335 while l <= r {
336 let m = (l + r) >> 1;
337 let (kw_pos, kw) = self.0[m as usize];
338 if kw_pos <= pos && pos < (kw_pos + kw.length()) {
339 return Some(kw);
340 } else if pos > kw_pos {
341 l = m + 1;
342 } else if pos < kw_pos {
343 r = m - 1;
344 }
345 }
346 None
347 }
348
349 fn new() -> Self {
350 Self(Default::default())
351 }
352}
353
354pub trait ExpressionOrStatementPosition:
357 Clone + std::fmt::Debug + Sync + Send + PartialEq + 'static
358{
359 type FunctionBody: ASTNode;
360
361 fn from_reader(
362 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
363 state: &mut crate::ParsingState,
364 options: &ParseOptions,
365 ) -> ParseResult<Self>;
366
367 fn as_option_variable_identifier(&self) -> Option<&VariableIdentifier>;
368
369 fn as_option_variable_identifier_mut(&mut self) -> Option<&mut VariableIdentifier>;
370
371 fn as_option_str(&self) -> Option<&str> {
372 if let Some(identifier) = self.as_option_variable_identifier() {
373 identifier.as_option_str()
374 } else {
375 None
376 }
377 }
378
379 fn has_function_body(body: &Self::FunctionBody) -> bool;
380
381 fn is_declare(&self) -> bool;
382}
383
384#[derive(Debug, PartialEq, Clone)]
385#[apply(derive_ASTNode)]
386pub struct StatementPosition {
387 pub identifier: VariableIdentifier,
388 pub is_declare: bool,
389}
390
391impl ExpressionOrStatementPosition for StatementPosition {
392 type FunctionBody = FunctionBody;
393
394 fn from_reader(
395 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
396 state: &mut crate::ParsingState,
397 options: &ParseOptions,
398 ) -> ParseResult<Self> {
399 VariableIdentifier::from_reader(reader, state, options)
400 .map(|identifier| Self { identifier, is_declare: false })
401 }
402
403 fn as_option_variable_identifier(&self) -> Option<&VariableIdentifier> {
404 Some(&self.identifier)
405 }
406
407 fn as_option_variable_identifier_mut(&mut self) -> Option<&mut VariableIdentifier> {
408 Some(&mut self.identifier)
409 }
410
411 fn has_function_body(body: &Self::FunctionBody) -> bool {
412 body.0.is_some()
413 }
414
415 fn is_declare(&self) -> bool {
416 self.is_declare
417 }
418}
419
420#[derive(Debug, PartialEq, Clone)]
421#[apply(derive_ASTNode)]
422pub struct ExpressionPosition(pub Option<VariableIdentifier>);
423
424impl ExpressionOrStatementPosition for ExpressionPosition {
425 type FunctionBody = Block;
426
427 fn from_reader(
428 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
429 state: &mut crate::ParsingState,
430 options: &ParseOptions,
431 ) -> ParseResult<Self> {
432 if let Some(Token(
433 TSXToken::OpenBrace
434 | TSXToken::OpenParentheses
435 | TSXToken::Keyword(TSXKeyword::Extends),
436 _,
437 ))
438 | None = reader.peek()
439 {
440 Ok(Self(None))
441 } else {
442 Ok(Self(Some(VariableIdentifier::from_reader(reader, state, options)?)))
443 }
444 }
445
446 fn as_option_variable_identifier(&self) -> Option<&VariableIdentifier> {
447 self.0.as_ref()
448 }
449
450 fn as_option_variable_identifier_mut(&mut self) -> Option<&mut VariableIdentifier> {
451 self.0.as_mut()
452 }
453
454 fn has_function_body(_: &Self::FunctionBody) -> bool {
455 true
456 }
457
458 fn is_declare(&self) -> bool {
459 false
460 }
461}
462
463pub trait ListItem: Sized {
464 type LAST;
465 const LAST_PREFIX: Option<TSXToken> = None;
466 const EMPTY: Option<Self> = None;
467
468 #[allow(unused)]
469 fn parse_last_item(
470 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
471 state: &mut crate::ParsingState,
472 options: &ParseOptions,
473 ) -> ParseResult<Self::LAST> {
474 unreachable!("ListItem::LAST != ASTNode")
475 }
476}
477
478pub(crate) fn parse_bracketed<T: ASTNode + ListItem>(
482 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
483 state: &mut crate::ParsingState,
484 options: &ParseOptions,
485 start: Option<TSXToken>,
486 end: TSXToken,
487) -> ParseResult<(Vec<T>, Option<T::LAST>, TokenEnd)> {
488 if let Some(start) = start {
489 let _ = reader.expect_next(start)?;
490 }
491 let mut nodes: Vec<T> = Vec::new();
492 loop {
493 if let Some(empty) = T::EMPTY {
494 let Token(next, _) = reader.peek().ok_or_else(parse_lexing_error)?;
495 if matches!(next, TSXToken::Comma) || *next == end {
496 if matches!(next, TSXToken::Comma) || (*next == end && !nodes.is_empty()) {
497 nodes.push(empty);
498 }
499 let Token(token, s) = reader.next().unwrap();
500 if token == end {
501 return Ok((nodes, None, s.get_end_after(token.length() as usize)));
502 }
503 continue;
504 }
505 } else if let Some(token) = reader.conditional_next(|token| *token == end) {
506 return Ok((nodes, None, token.get_end()));
507 }
508
509 if T::LAST_PREFIX.is_some_and(|l| reader.peek().is_some_and(|Token(token, _)| *token == l))
510 {
511 let last = T::parse_last_item(reader, state, options)?;
512 let len = end.length() as usize;
513 let end = reader.expect_next(end)?.get_end_after(len);
514 return Ok((nodes, Some(last), end));
515 }
516
517 let node = T::from_reader(reader, state, options)?;
518 nodes.push(node);
519
520 match reader.next().ok_or_else(errors::parse_lexing_error)? {
521 Token(TSXToken::Comma, _) => {}
522 token => {
523 if token.0 == end {
524 let get_end = token.get_end();
525 return Ok((nodes, None, get_end));
526 }
527 let position = token.get_span();
528 return Err(ParseError::new(
529 crate::ParseErrors::UnexpectedToken {
530 expected: &[end, TSXToken::Comma],
531 found: token.0,
532 },
533 position,
534 ));
535 }
536 }
537 }
538}
539
540#[cfg(not(target_arch = "wasm32"))]
541pub fn script_to_tokens(source: String) -> impl Iterator<Item = (String, bool)> + 'static {
543 let (mut sender, reader) = tokenizer_lib::ParallelTokenQueue::new();
544 let input = source.clone();
546 let _lexing_thread = std::thread::spawn(move || {
547 let _lex_script = lexer::lex_script(&input, &mut sender, &Default::default(), None);
548 drop(sender);
549 });
550
551 receiver_to_tokens(reader, source)
552}
553
554#[cfg(target_arch = "wasm32")]
555pub fn script_to_tokens(source: String) -> impl Iterator<Item = (String, bool)> + 'static {
557 let mut queue = tokenizer_lib::BufferedTokenQueue::new();
558
559 let _lex_script = lexer::lex_script(&source, &mut queue, &Default::default(), None);
560
561 receiver_to_tokens(queue, source)
562}
563
564fn receiver_to_tokens(
566 mut receiver: impl TokenReader<TSXToken, TokenStart> + 'static,
567 input: String,
568) -> impl Iterator<Item = (String, bool)> + 'static {
569 let mut last = 0u32;
570 let mut last_section = None;
571 std::iter::from_fn(move || {
572 if last_section.is_some() {
573 return last_section.take();
574 }
575
576 let token = receiver.next()?;
577 if matches!(token.0, TSXToken::EOS) {
578 return None;
579 }
580 let span = token.get_span();
581 let start = span.start;
582 let section = (input.get(std::ops::Range::from(span)).unwrap_or("?").to_owned(), true);
583 if last == start {
584 last = span.end;
585 Some(section)
586 } else {
587 last_section = Some(section);
588 let token = input.get((last as usize)..(start as usize)).unwrap_or("?").to_owned();
589 last = span.end;
590 Some((token, false))
591 }
592 })
593}
594
595pub(crate) fn to_string_bracketed<T: source_map::ToString, U: ASTNode>(
597 nodes: &[U],
598 (left_bracket, right_bracket): (char, char),
599 buf: &mut T,
600 options: &crate::ToStringOptions,
601 local: crate::LocalToStringInformation,
602) {
603 const MAX_INLINE_OBJECT_LITERAL: u32 = 40;
604 let large =
605 are_nodes_over_length(nodes.iter(), options, local, Some(MAX_INLINE_OBJECT_LITERAL), true);
606
607 buf.push(left_bracket);
608 let inner_local = if large {
609 local.next_level()
610 } else {
611 if left_bracket == '{' {
612 options.push_gap_optionally(buf);
613 }
614 local
615 };
616 for (at_end, node) in nodes.iter().endiate() {
617 if large {
618 buf.push_new_line();
619 options.add_indent(inner_local.depth, buf);
620 }
621 node.to_string_from_buffer(buf, options, inner_local);
622 if !at_end {
623 buf.push(',');
624 options.push_gap_optionally(buf);
625 }
626 }
627 if large {
628 buf.push_new_line();
629 options.add_indent(local.depth, buf);
630 } else if left_bracket == '{' {
631 options.push_gap_optionally(buf);
632 }
633 buf.push(right_bracket);
634}
635
636pub(crate) fn expect_semi_colon(
640 reader: &mut impl TokenReader<TSXToken, crate::TokenStart>,
641 line_starts: &source_map::LineStarts,
642 statement_end: u32,
643 options: &ParseOptions,
644) -> ParseResult<usize> {
645 if let Some(token) = reader.peek() {
646 let Token(kind, start) = token;
647
648 if let TSXToken::CloseBrace
649 | TSXToken::EOS
650 | TSXToken::Comment(..)
651 | TSXToken::MultiLineComment(..) = kind
652 {
653 Ok(line_starts
654 .byte_indexes_crosses_lines(statement_end as usize, start.0 as usize + 1)
655 .saturating_sub(1))
656 } else if let TSXToken::SemiColon = kind {
657 let Token(_, semicolon_end) = reader.next().unwrap();
658 let Token(kind, next) = reader.peek().ok_or_else(parse_lexing_error)?;
659 if options.retain_blank_lines {
660 let byte_indexes_crosses_lines = line_starts
661 .byte_indexes_crosses_lines(semicolon_end.0 as usize, next.0 as usize + 1);
662
663 if let TSXToken::EOS = kind {
665 Ok(byte_indexes_crosses_lines)
666 } else {
667 Ok(byte_indexes_crosses_lines.saturating_sub(1))
668 }
669 } else {
670 Ok(0)
671 }
672 } else {
673 let line_difference = line_starts
674 .byte_indexes_crosses_lines(statement_end as usize, start.0 as usize + 1);
675 if line_difference == 0 {
676 if options.partial_syntax {
677 Ok(0)
678 } else {
679 throw_unexpected_token(reader, &[TSXToken::SemiColon])
680 }
681 } else {
682 Ok(line_difference - 1)
683 }
684 }
685 } else {
686 Ok(0)
687 }
688}
689
690#[derive(Debug, Clone, PartialEq, Eq)]
691#[apply(derive_ASTNode)]
692pub enum VariableKeyword {
693 Const,
694 Let,
695 Var,
696}
697
698impl VariableKeyword {
699 #[must_use]
700 pub fn is_token_variable_keyword(token: &TSXToken) -> bool {
701 matches!(token, TSXToken::Keyword(TSXKeyword::Const | TSXKeyword::Let | TSXKeyword::Var))
702 }
703
704 pub(crate) fn from_reader(token: Token<TSXToken, crate::TokenStart>) -> ParseResult<Self> {
705 match token {
706 Token(TSXToken::Keyword(TSXKeyword::Const), _) => Ok(Self::Const),
707 Token(TSXToken::Keyword(TSXKeyword::Let), _) => Ok(Self::Let),
708 Token(TSXToken::Keyword(TSXKeyword::Var), _) => Ok(Self::Var),
709 token => crate::throw_unexpected_token_with_token(
710 token,
711 &[
712 TSXToken::Keyword(TSXKeyword::Const),
713 TSXToken::Keyword(TSXKeyword::Let),
714 TSXToken::Keyword(TSXKeyword::Var),
715 ],
716 ),
717 }
718 }
719
720 #[must_use]
721 pub fn as_str(&self) -> &str {
722 match self {
723 Self::Const => "const ",
724 Self::Let => "let ",
725 Self::Var => "var ",
726 }
727 }
728}
729
730pub fn are_nodes_over_length<'a, T: ASTNode>(
735 nodes: impl ExactSizeIterator<Item = &'a T>,
736 options: &ToStringOptions,
737 local: crate::LocalToStringInformation,
738 available_space: Option<u32>,
740 total: bool,
742) -> bool {
743 if options.enforce_limit_length_limit() && local.should_try_pretty_print {
744 let room = available_space.map_or(options.max_line_length as usize, |s| s as usize);
745 let mut buf = source_map::StringWithOptionalSourceMap {
746 source: String::new(),
747 source_map: None,
748 quit_after: Some(room),
749 since_new_line: nodes.len().try_into().expect("4 billion nodes ?"),
751 };
752
753 for node in nodes {
754 node.to_string_from_buffer(&mut buf, options, local);
755
756 let length = if total { buf.source.len() } else { buf.since_new_line as usize };
757 let is_over = length > room;
758 if is_over {
759 return is_over;
760 }
761 }
762 false
763 } else {
764 false
765 }
766}
767
768pub mod ast {
770 pub use crate::{
771 declarations::classes::*,
772 declarations::*,
773 expressions::*,
774 extensions::jsx::*,
775 functions::{
776 FunctionBase, FunctionBody, FunctionHeader, FunctionParameters, MethodHeader,
777 Parameter, ParameterData, SpreadParameter,
778 },
779 number::NumberRepresentation,
780 statements::*,
781 Block, Decorated, ExpressionPosition, PropertyKey, StatementOrDeclaration,
782 StatementPosition, VariableField, VariableIdentifier, WithComment,
783 };
784
785 pub use source_map::{BaseSpan, SourceId};
786
787 pub use self::assignments::{LHSOfAssignment, VariableOrPropertyAccess};
788}
789
790#[cfg(test)]
791#[doc(hidden)]
792pub(crate) mod test_utils {
793 #[macro_export]
794 #[allow(clippy::crate_in_macro_def)]
795 macro_rules! assert_matches_ast {
796 ($source:literal, $ast_pattern:pat) => {{
797 let node = crate::ASTNode::from_string($source.to_owned(), Default::default()).unwrap();
798 let matches = ::match_deref::match_deref! {
800 match &node {
801 $ast_pattern => true,
802 _ => false,
803 }
804 };
805
806 if !matches {
807 panic!("{:#?} did not match {}", node, stringify!($ast_pattern));
808 }
809 }};
810 }
811
812 #[macro_export]
813 #[allow(clippy::crate_in_macro_def)]
814 macro_rules! span {
815 ($start:pat, $end:pat) => {
816 crate::Span { start: $start, end: $end, .. }
817 };
818 }
819}