sqlparser/parser/
mod.rs

1// Licensed under the Apache License, Version 2.0 (the "License");
2// you may not use this file except in compliance with the License.
3// You may obtain a copy of the License at
4//
5// http://www.apache.org/licenses/LICENSE-2.0
6//
7// Unless required by applicable law or agreed to in writing, software
8// distributed under the License is distributed on an "AS IS" BASIS,
9// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10// See the License for the specific language governing permissions and
11// limitations under the License.
12
13//! SQL Parser
14
15#[cfg(not(feature = "std"))]
16use alloc::{
17    boxed::Box,
18    format,
19    string::{String, ToString},
20    vec,
21    vec::Vec,
22};
23use core::{
24    fmt::{self, Display},
25    str::FromStr,
26};
27use helpers::attached_token::AttachedToken;
28
29use log::debug;
30
31use recursion::RecursionCounter;
32use IsLateral::*;
33use IsOptional::*;
34
35use crate::ast::helpers::stmt_create_table::{CreateTableBuilder, CreateTableConfiguration};
36use crate::ast::Statement::CreatePolicy;
37use crate::ast::*;
38use crate::dialect::*;
39use crate::keywords::{Keyword, ALL_KEYWORDS};
40use crate::tokenizer::*;
41
42mod alter;
43
44#[derive(Debug, Clone, PartialEq, Eq)]
45pub enum ParserError {
46    TokenizerError(String),
47    ParserError(String),
48    RecursionLimitExceeded,
49}
50
51// Use `Parser::expected` instead, if possible
52macro_rules! parser_err {
53    ($MSG:expr, $loc:expr) => {
54        Err(ParserError::ParserError(format!("{}{}", $MSG, $loc)))
55    };
56}
57
58#[cfg(feature = "std")]
59/// Implementation [`RecursionCounter`] if std is available
60mod recursion {
61    use std::cell::Cell;
62    use std::rc::Rc;
63
64    use super::ParserError;
65
66    /// Tracks remaining recursion depth. This value is decremented on
67    /// each call to [`RecursionCounter::try_decrease()`], when it reaches 0 an error will
68    /// be returned.
69    ///
70    /// Note: Uses an [`std::rc::Rc`] and [`std::cell::Cell`] in order to satisfy the Rust
71    /// borrow checker so the automatic [`DepthGuard`] decrement a
72    /// reference to the counter.
73    ///
74    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
75    /// for some of its recursive methods. See [`recursive::recursive`] for more information.
76    pub(crate) struct RecursionCounter {
77        remaining_depth: Rc<Cell<usize>>,
78    }
79
80    impl RecursionCounter {
81        /// Creates a [`RecursionCounter`] with the specified maximum
82        /// depth
83        pub fn new(remaining_depth: usize) -> Self {
84            Self {
85                remaining_depth: Rc::new(remaining_depth.into()),
86            }
87        }
88
89        /// Decreases the remaining depth by 1.
90        ///
91        /// Returns [`Err`] if the remaining depth falls to 0.
92        ///
93        /// Returns a [`DepthGuard`] which will adds 1 to the
94        /// remaining depth upon drop;
95        pub fn try_decrease(&self) -> Result<DepthGuard, ParserError> {
96            let old_value = self.remaining_depth.get();
97            // ran out of space
98            if old_value == 0 {
99                Err(ParserError::RecursionLimitExceeded)
100            } else {
101                self.remaining_depth.set(old_value - 1);
102                Ok(DepthGuard::new(Rc::clone(&self.remaining_depth)))
103            }
104        }
105    }
106
107    /// Guard that increases the remaining depth by 1 on drop
108    pub struct DepthGuard {
109        remaining_depth: Rc<Cell<usize>>,
110    }
111
112    impl DepthGuard {
113        fn new(remaining_depth: Rc<Cell<usize>>) -> Self {
114            Self { remaining_depth }
115        }
116    }
117    impl Drop for DepthGuard {
118        fn drop(&mut self) {
119            let old_value = self.remaining_depth.get();
120            self.remaining_depth.set(old_value + 1);
121        }
122    }
123}
124
125#[cfg(not(feature = "std"))]
126mod recursion {
127    /// Implementation [`RecursionCounter`] if std is NOT available (and does not
128    /// guard against stack overflow).
129    ///
130    /// Has the same API as the std [`RecursionCounter`] implementation
131    /// but does not actually limit stack depth.
132    pub(crate) struct RecursionCounter {}
133
134    impl RecursionCounter {
135        pub fn new(_remaining_depth: usize) -> Self {
136            Self {}
137        }
138        pub fn try_decrease(&self) -> Result<DepthGuard, super::ParserError> {
139            Ok(DepthGuard {})
140        }
141    }
142
143    pub struct DepthGuard {}
144}
145
146#[derive(PartialEq, Eq)]
147pub enum IsOptional {
148    Optional,
149    Mandatory,
150}
151
152pub enum IsLateral {
153    Lateral,
154    NotLateral,
155}
156
157pub enum WildcardExpr {
158    Expr(Expr),
159    QualifiedWildcard(ObjectName),
160    Wildcard,
161}
162
163impl From<TokenizerError> for ParserError {
164    fn from(e: TokenizerError) -> Self {
165        ParserError::TokenizerError(e.to_string())
166    }
167}
168
169impl fmt::Display for ParserError {
170    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
171        write!(
172            f,
173            "sql parser error: {}",
174            match self {
175                ParserError::TokenizerError(s) => s,
176                ParserError::ParserError(s) => s,
177                ParserError::RecursionLimitExceeded => "recursion limit exceeded",
178            }
179        )
180    }
181}
182
183#[cfg(feature = "std")]
184impl std::error::Error for ParserError {}
185
186// By default, allow expressions up to this deep before erroring
187const DEFAULT_REMAINING_DEPTH: usize = 50;
188
189// A constant EOF token that can be referenced.
190const EOF_TOKEN: TokenWithSpan = TokenWithSpan {
191    token: Token::EOF,
192    span: Span {
193        start: Location { line: 0, column: 0 },
194        end: Location { line: 0, column: 0 },
195    },
196};
197
198/// Composite types declarations using angle brackets syntax can be arbitrary
199/// nested such that the following declaration is possible:
200///      `ARRAY<ARRAY<INT>>`
201/// But the tokenizer recognizes the `>>` as a ShiftRight token.
202/// We work around that limitation when parsing a data type by accepting
203/// either a `>` or `>>` token in such cases, remembering which variant we
204/// matched.
205/// In the latter case having matched a `>>`, the parent type will not look to
206/// match its closing `>` as a result since that will have taken place at the
207/// child type.
208///
209/// See [Parser::parse_data_type] for details
210struct MatchedTrailingBracket(bool);
211
212impl From<bool> for MatchedTrailingBracket {
213    fn from(value: bool) -> Self {
214        Self(value)
215    }
216}
217
218/// Options that control how the [`Parser`] parses SQL text
219#[derive(Debug, Clone, PartialEq, Eq)]
220pub struct ParserOptions {
221    pub trailing_commas: bool,
222    /// Controls how literal values are unescaped. See
223    /// [`Tokenizer::with_unescape`] for more details.
224    pub unescape: bool,
225    /// Controls if the parser expects a semi-colon token
226    /// between statements. Default is `true`.
227    pub require_semicolon_stmt_delimiter: bool,
228}
229
230impl Default for ParserOptions {
231    fn default() -> Self {
232        Self {
233            trailing_commas: false,
234            unescape: true,
235            require_semicolon_stmt_delimiter: true,
236        }
237    }
238}
239
240impl ParserOptions {
241    /// Create a new [`ParserOptions`]
242    pub fn new() -> Self {
243        Default::default()
244    }
245
246    /// Set if trailing commas are allowed.
247    ///
248    /// If this option is `false` (the default), the following SQL will
249    /// not parse. If the option is `true`, the SQL will parse.
250    ///
251    /// ```sql
252    ///  SELECT
253    ///   foo,
254    ///   bar,
255    ///  FROM baz
256    /// ```
257    pub fn with_trailing_commas(mut self, trailing_commas: bool) -> Self {
258        self.trailing_commas = trailing_commas;
259        self
260    }
261
262    /// Set if literal values are unescaped. Defaults to true. See
263    /// [`Tokenizer::with_unescape`] for more details.
264    pub fn with_unescape(mut self, unescape: bool) -> Self {
265        self.unescape = unescape;
266        self
267    }
268}
269
270#[derive(Copy, Clone)]
271enum ParserState {
272    /// The default state of the parser.
273    Normal,
274    /// The state when parsing a CONNECT BY expression. This allows parsing
275    /// PRIOR expressions while still allowing prior as an identifier name
276    /// in other contexts.
277    ConnectBy,
278}
279
280/// A SQL Parser
281///
282/// This struct is the main entry point for parsing SQL queries.
283///
284/// # Functionality:
285/// * Parsing SQL: see examples on [`Parser::new`] and [`Parser::parse_sql`]
286/// * Controlling recursion: See [`Parser::with_recursion_limit`]
287/// * Controlling parser options: See [`Parser::with_options`]
288/// * Providing your own tokens: See [`Parser::with_tokens`]
289///
290/// # Internals
291///
292/// The parser uses a [`Tokenizer`] to tokenize the input SQL string into a
293/// `Vec` of [`TokenWithSpan`]s and maintains an `index` to the current token
294/// being processed. The token vec may contain multiple SQL statements.
295///
296/// * The "current" token is the token at `index - 1`
297/// * The "next" token is the token at `index`
298/// * The "previous" token is the token at `index - 2`
299///
300/// If `index` is equal to the length of the token stream, the 'next' token is
301/// [`Token::EOF`].
302///
303/// For example, the SQL string "SELECT * FROM foo" will be tokenized into
304/// following tokens:
305/// ```text
306///  [
307///    "SELECT", // token index 0
308///    " ",      // whitespace
309///    "*",
310///    " ",
311///    "FROM",
312///    " ",
313///    "foo"
314///   ]
315/// ```
316///
317///
318pub struct Parser<'a> {
319    /// The tokens
320    tokens: Vec<TokenWithSpan>,
321    /// The index of the first unprocessed token in [`Parser::tokens`].
322    index: usize,
323    /// The current state of the parser.
324    state: ParserState,
325    /// The SQL dialect to use.
326    dialect: &'a dyn Dialect,
327    /// Additional options that allow you to mix & match behavior
328    /// otherwise constrained to certain dialects (e.g. trailing
329    /// commas) and/or format of parse (e.g. unescaping).
330    options: ParserOptions,
331    /// Ensures the stack does not overflow by limiting recursion depth.
332    recursion_counter: RecursionCounter,
333}
334
335impl<'a> Parser<'a> {
336    /// Create a parser for a [`Dialect`]
337    ///
338    /// See also [`Parser::parse_sql`]
339    ///
340    /// Example:
341    /// ```
342    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
343    /// # fn main() -> Result<(), ParserError> {
344    /// let dialect = GenericDialect{};
345    /// let statements = Parser::new(&dialect)
346    ///   .try_with_sql("SELECT * FROM foo")?
347    ///   .parse_statements()?;
348    /// # Ok(())
349    /// # }
350    /// ```
351    pub fn new(dialect: &'a dyn Dialect) -> Self {
352        Self {
353            tokens: vec![],
354            index: 0,
355            state: ParserState::Normal,
356            dialect,
357            recursion_counter: RecursionCounter::new(DEFAULT_REMAINING_DEPTH),
358            options: ParserOptions::new().with_trailing_commas(dialect.supports_trailing_commas()),
359        }
360    }
361
362    /// Specify the maximum recursion limit while parsing.
363    ///
364    /// [`Parser`] prevents stack overflows by returning
365    /// [`ParserError::RecursionLimitExceeded`] if the parser exceeds
366    /// this depth while processing the query.
367    ///
368    /// Example:
369    /// ```
370    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
371    /// # fn main() -> Result<(), ParserError> {
372    /// let dialect = GenericDialect{};
373    /// let result = Parser::new(&dialect)
374    ///   .with_recursion_limit(1)
375    ///   .try_with_sql("SELECT * FROM foo WHERE (a OR (b OR (c OR d)))")?
376    ///   .parse_statements();
377    ///   assert_eq!(result, Err(ParserError::RecursionLimitExceeded));
378    /// # Ok(())
379    /// # }
380    /// ```
381    ///
382    /// Note: when "recursive-protection" feature is enabled, this crate uses additional stack overflow protection
383    //  for some of its recursive methods. See [`recursive::recursive`] for more information.
384    pub fn with_recursion_limit(mut self, recursion_limit: usize) -> Self {
385        self.recursion_counter = RecursionCounter::new(recursion_limit);
386        self
387    }
388
389    /// Specify additional parser options
390    ///
391    /// [`Parser`] supports additional options ([`ParserOptions`])
392    /// that allow you to mix & match behavior otherwise constrained
393    /// to certain dialects (e.g. trailing commas).
394    ///
395    /// Example:
396    /// ```
397    /// # use sqlparser::{parser::{Parser, ParserError, ParserOptions}, dialect::GenericDialect};
398    /// # fn main() -> Result<(), ParserError> {
399    /// let dialect = GenericDialect{};
400    /// let options = ParserOptions::new()
401    ///    .with_trailing_commas(true)
402    ///    .with_unescape(false);
403    /// let result = Parser::new(&dialect)
404    ///   .with_options(options)
405    ///   .try_with_sql("SELECT a, b, COUNT(*), FROM foo GROUP BY a, b,")?
406    ///   .parse_statements();
407    ///   assert!(matches!(result, Ok(_)));
408    /// # Ok(())
409    /// # }
410    /// ```
411    pub fn with_options(mut self, options: ParserOptions) -> Self {
412        self.options = options;
413        self
414    }
415
416    /// Reset this parser to parse the specified token stream
417    pub fn with_tokens_with_locations(mut self, tokens: Vec<TokenWithSpan>) -> Self {
418        self.tokens = tokens;
419        self.index = 0;
420        self
421    }
422
423    /// Reset this parser state to parse the specified tokens
424    pub fn with_tokens(self, tokens: Vec<Token>) -> Self {
425        // Put in dummy locations
426        let tokens_with_locations: Vec<TokenWithSpan> = tokens
427            .into_iter()
428            .map(|token| TokenWithSpan {
429                token,
430                span: Span::empty(),
431            })
432            .collect();
433        self.with_tokens_with_locations(tokens_with_locations)
434    }
435
436    /// Tokenize the sql string and sets this [`Parser`]'s state to
437    /// parse the resulting tokens
438    ///
439    /// Returns an error if there was an error tokenizing the SQL string.
440    ///
441    /// See example on [`Parser::new()`] for an example
442    pub fn try_with_sql(self, sql: &str) -> Result<Self, ParserError> {
443        debug!("Parsing sql '{sql}'...");
444        let tokens = Tokenizer::new(self.dialect, sql)
445            .with_unescape(self.options.unescape)
446            .tokenize_with_location()?;
447        Ok(self.with_tokens_with_locations(tokens))
448    }
449
450    /// Parse potentially multiple statements
451    ///
452    /// Example
453    /// ```
454    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
455    /// # fn main() -> Result<(), ParserError> {
456    /// let dialect = GenericDialect{};
457    /// let statements = Parser::new(&dialect)
458    ///   // Parse a SQL string with 2 separate statements
459    ///   .try_with_sql("SELECT * FROM foo; SELECT * FROM bar;")?
460    ///   .parse_statements()?;
461    /// assert_eq!(statements.len(), 2);
462    /// # Ok(())
463    /// # }
464    /// ```
465    pub fn parse_statements(&mut self) -> Result<Vec<Statement>, ParserError> {
466        let mut stmts = Vec::new();
467        let mut expecting_statement_delimiter = false;
468        loop {
469            // ignore empty statements (between successive statement delimiters)
470            while self.consume_token(&Token::SemiColon) {
471                expecting_statement_delimiter = false;
472            }
473
474            if !self.options.require_semicolon_stmt_delimiter {
475                expecting_statement_delimiter = false;
476            }
477
478            match self.peek_token().token {
479                Token::EOF => break,
480
481                // end of statement
482                Token::Word(word) => {
483                    if expecting_statement_delimiter && word.keyword == Keyword::END {
484                        break;
485                    }
486                }
487                _ => {}
488            }
489
490            if expecting_statement_delimiter {
491                return self.expected("end of statement", self.peek_token());
492            }
493
494            let statement = self.parse_statement()?;
495            stmts.push(statement);
496            expecting_statement_delimiter = true;
497        }
498        Ok(stmts)
499    }
500
501    /// Convenience method to parse a string with one or more SQL
502    /// statements into produce an Abstract Syntax Tree (AST).
503    ///
504    /// Example
505    /// ```
506    /// # use sqlparser::{parser::{Parser, ParserError}, dialect::GenericDialect};
507    /// # fn main() -> Result<(), ParserError> {
508    /// let dialect = GenericDialect{};
509    /// let statements = Parser::parse_sql(
510    ///   &dialect, "SELECT * FROM foo"
511    /// )?;
512    /// assert_eq!(statements.len(), 1);
513    /// # Ok(())
514    /// # }
515    /// ```
516    pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
517        Parser::new(dialect).try_with_sql(sql)?.parse_statements()
518    }
519
520    /// Parse a single top-level statement (such as SELECT, INSERT, CREATE, etc.),
521    /// stopping before the statement separator, if any.
522    pub fn parse_statement(&mut self) -> Result<Statement, ParserError> {
523        let _guard = self.recursion_counter.try_decrease()?;
524
525        // allow the dialect to override statement parsing
526        if let Some(statement) = self.dialect.parse_statement(self) {
527            return statement;
528        }
529
530        let next_token = self.next_token();
531        match &next_token.token {
532            Token::Word(w) => match w.keyword {
533                Keyword::KILL => self.parse_kill(),
534                Keyword::FLUSH => self.parse_flush(),
535                Keyword::DESC => self.parse_explain(DescribeAlias::Desc),
536                Keyword::DESCRIBE => self.parse_explain(DescribeAlias::Describe),
537                Keyword::EXPLAIN => self.parse_explain(DescribeAlias::Explain),
538                Keyword::ANALYZE => self.parse_analyze(),
539                Keyword::CASE => {
540                    self.prev_token();
541                    self.parse_case_stmt()
542                }
543                Keyword::IF => {
544                    self.prev_token();
545                    self.parse_if_stmt()
546                }
547                Keyword::WHILE => {
548                    self.prev_token();
549                    self.parse_while()
550                }
551                Keyword::RAISE => {
552                    self.prev_token();
553                    self.parse_raise_stmt()
554                }
555                Keyword::SELECT | Keyword::WITH | Keyword::VALUES | Keyword::FROM => {
556                    self.prev_token();
557                    self.parse_query().map(Statement::Query)
558                }
559                Keyword::TRUNCATE => self.parse_truncate(),
560                Keyword::ATTACH => {
561                    if dialect_of!(self is DuckDbDialect) {
562                        self.parse_attach_duckdb_database()
563                    } else {
564                        self.parse_attach_database()
565                    }
566                }
567                Keyword::DETACH if dialect_of!(self is DuckDbDialect | GenericDialect) => {
568                    self.parse_detach_duckdb_database()
569                }
570                Keyword::MSCK => self.parse_msck(),
571                Keyword::CREATE => self.parse_create(),
572                Keyword::CACHE => self.parse_cache_table(),
573                Keyword::DROP => self.parse_drop(),
574                Keyword::DISCARD => self.parse_discard(),
575                Keyword::DECLARE => self.parse_declare(),
576                Keyword::FETCH => self.parse_fetch_statement(),
577                Keyword::DELETE => self.parse_delete(),
578                Keyword::INSERT => self.parse_insert(),
579                Keyword::REPLACE => self.parse_replace(),
580                Keyword::UNCACHE => self.parse_uncache_table(),
581                Keyword::UPDATE => self.parse_update(),
582                Keyword::ALTER => self.parse_alter(),
583                Keyword::CALL => self.parse_call(),
584                Keyword::COPY => self.parse_copy(),
585                Keyword::OPEN => {
586                    self.prev_token();
587                    self.parse_open()
588                }
589                Keyword::CLOSE => self.parse_close(),
590                Keyword::SET => self.parse_set(),
591                Keyword::SHOW => self.parse_show(),
592                Keyword::USE => self.parse_use(),
593                Keyword::GRANT => self.parse_grant(),
594                Keyword::DENY => {
595                    self.prev_token();
596                    self.parse_deny()
597                }
598                Keyword::REVOKE => self.parse_revoke(),
599                Keyword::START => self.parse_start_transaction(),
600                Keyword::BEGIN => self.parse_begin(),
601                Keyword::END => self.parse_end(),
602                Keyword::SAVEPOINT => self.parse_savepoint(),
603                Keyword::RELEASE => self.parse_release(),
604                Keyword::COMMIT => self.parse_commit(),
605                Keyword::RAISERROR => Ok(self.parse_raiserror()?),
606                Keyword::ROLLBACK => self.parse_rollback(),
607                Keyword::ASSERT => self.parse_assert(),
608                // `PREPARE`, `EXECUTE` and `DEALLOCATE` are Postgres-specific
609                // syntaxes. They are used for Postgres prepared statement.
610                Keyword::DEALLOCATE => self.parse_deallocate(),
611                Keyword::EXECUTE | Keyword::EXEC => self.parse_execute(),
612                Keyword::PREPARE => self.parse_prepare(),
613                Keyword::MERGE => self.parse_merge(),
614                // `LISTEN`, `UNLISTEN` and `NOTIFY` are Postgres-specific
615                // syntaxes. They are used for Postgres statement.
616                Keyword::LISTEN if self.dialect.supports_listen_notify() => self.parse_listen(),
617                Keyword::UNLISTEN if self.dialect.supports_listen_notify() => self.parse_unlisten(),
618                Keyword::NOTIFY if self.dialect.supports_listen_notify() => self.parse_notify(),
619                // `PRAGMA` is sqlite specific https://www.sqlite.org/pragma.html
620                Keyword::PRAGMA => self.parse_pragma(),
621                Keyword::UNLOAD => self.parse_unload(),
622                Keyword::RENAME => self.parse_rename(),
623                // `INSTALL` is duckdb specific https://duckdb.org/docs/extensions/overview
624                Keyword::INSTALL if dialect_of!(self is DuckDbDialect | GenericDialect) => {
625                    self.parse_install()
626                }
627                Keyword::LOAD => self.parse_load(),
628                // `OPTIMIZE` is clickhouse specific https://clickhouse.tech/docs/en/sql-reference/statements/optimize/
629                Keyword::OPTIMIZE if dialect_of!(self is ClickHouseDialect | GenericDialect) => {
630                    self.parse_optimize_table()
631                }
632                // `COMMENT` is snowflake specific https://docs.snowflake.com/en/sql-reference/sql/comment
633                Keyword::COMMENT if self.dialect.supports_comment_on() => self.parse_comment(),
634                Keyword::PRINT => self.parse_print(),
635                Keyword::RETURN => self.parse_return(),
636                _ => self.expected("an SQL statement", next_token),
637            },
638            Token::LParen => {
639                self.prev_token();
640                self.parse_query().map(Statement::Query)
641            }
642            _ => self.expected("an SQL statement", next_token),
643        }
644    }
645
646    /// Parse a `CASE` statement.
647    ///
648    /// See [Statement::Case]
649    pub fn parse_case_stmt(&mut self) -> Result<Statement, ParserError> {
650        let case_token = self.expect_keyword(Keyword::CASE)?;
651
652        let match_expr = if self.peek_keyword(Keyword::WHEN) {
653            None
654        } else {
655            Some(self.parse_expr()?)
656        };
657
658        self.expect_keyword_is(Keyword::WHEN)?;
659        let when_blocks = self.parse_keyword_separated(Keyword::WHEN, |parser| {
660            parser.parse_conditional_statement_block(&[Keyword::WHEN, Keyword::ELSE, Keyword::END])
661        })?;
662
663        let else_block = if self.parse_keyword(Keyword::ELSE) {
664            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
665        } else {
666            None
667        };
668
669        let mut end_case_token = self.expect_keyword(Keyword::END)?;
670        if self.peek_keyword(Keyword::CASE) {
671            end_case_token = self.expect_keyword(Keyword::CASE)?;
672        }
673
674        Ok(Statement::Case(CaseStatement {
675            case_token: AttachedToken(case_token),
676            match_expr,
677            when_blocks,
678            else_block,
679            end_case_token: AttachedToken(end_case_token),
680        }))
681    }
682
683    /// Parse an `IF` statement.
684    ///
685    /// See [Statement::If]
686    pub fn parse_if_stmt(&mut self) -> Result<Statement, ParserError> {
687        self.expect_keyword_is(Keyword::IF)?;
688        let if_block = self.parse_conditional_statement_block(&[
689            Keyword::ELSE,
690            Keyword::ELSEIF,
691            Keyword::END,
692        ])?;
693
694        let elseif_blocks = if self.parse_keyword(Keyword::ELSEIF) {
695            self.parse_keyword_separated(Keyword::ELSEIF, |parser| {
696                parser.parse_conditional_statement_block(&[
697                    Keyword::ELSEIF,
698                    Keyword::ELSE,
699                    Keyword::END,
700                ])
701            })?
702        } else {
703            vec![]
704        };
705
706        let else_block = if self.parse_keyword(Keyword::ELSE) {
707            Some(self.parse_conditional_statement_block(&[Keyword::END])?)
708        } else {
709            None
710        };
711
712        self.expect_keyword_is(Keyword::END)?;
713        let end_token = self.expect_keyword(Keyword::IF)?;
714
715        Ok(Statement::If(IfStatement {
716            if_block,
717            elseif_blocks,
718            else_block,
719            end_token: Some(AttachedToken(end_token)),
720        }))
721    }
722
723    /// Parse a `WHILE` statement.
724    ///
725    /// See [Statement::While]
726    fn parse_while(&mut self) -> Result<Statement, ParserError> {
727        self.expect_keyword_is(Keyword::WHILE)?;
728        let while_block = self.parse_conditional_statement_block(&[Keyword::END])?;
729
730        Ok(Statement::While(WhileStatement { while_block }))
731    }
732
733    /// Parses an expression and associated list of statements
734    /// belonging to a conditional statement like `IF` or `WHEN` or `WHILE`.
735    ///
736    /// Example:
737    /// ```sql
738    /// IF condition THEN statement1; statement2;
739    /// ```
740    fn parse_conditional_statement_block(
741        &mut self,
742        terminal_keywords: &[Keyword],
743    ) -> Result<ConditionalStatementBlock, ParserError> {
744        let start_token = self.get_current_token().clone(); // self.expect_keyword(keyword)?;
745        let mut then_token = None;
746
747        let condition = match &start_token.token {
748            Token::Word(w) if w.keyword == Keyword::ELSE => None,
749            Token::Word(w) if w.keyword == Keyword::WHILE => {
750                let expr = self.parse_expr()?;
751                Some(expr)
752            }
753            _ => {
754                let expr = self.parse_expr()?;
755                then_token = Some(AttachedToken(self.expect_keyword(Keyword::THEN)?));
756                Some(expr)
757            }
758        };
759
760        let conditional_statements = self.parse_conditional_statements(terminal_keywords)?;
761
762        Ok(ConditionalStatementBlock {
763            start_token: AttachedToken(start_token),
764            condition,
765            then_token,
766            conditional_statements,
767        })
768    }
769
770    /// Parse a BEGIN/END block or a sequence of statements
771    /// This could be inside of a conditional (IF, CASE, WHILE etc.) or an object body defined optionally BEGIN/END and one or more statements.
772    pub(crate) fn parse_conditional_statements(
773        &mut self,
774        terminal_keywords: &[Keyword],
775    ) -> Result<ConditionalStatements, ParserError> {
776        let conditional_statements = if self.peek_keyword(Keyword::BEGIN) {
777            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
778            let statements = self.parse_statement_list(terminal_keywords)?;
779            let end_token = self.expect_keyword(Keyword::END)?;
780
781            ConditionalStatements::BeginEnd(BeginEndStatements {
782                begin_token: AttachedToken(begin_token),
783                statements,
784                end_token: AttachedToken(end_token),
785            })
786        } else {
787            ConditionalStatements::Sequence {
788                statements: self.parse_statement_list(terminal_keywords)?,
789            }
790        };
791        Ok(conditional_statements)
792    }
793
794    /// Parse a `RAISE` statement.
795    ///
796    /// See [Statement::Raise]
797    pub fn parse_raise_stmt(&mut self) -> Result<Statement, ParserError> {
798        self.expect_keyword_is(Keyword::RAISE)?;
799
800        let value = if self.parse_keywords(&[Keyword::USING, Keyword::MESSAGE]) {
801            self.expect_token(&Token::Eq)?;
802            Some(RaiseStatementValue::UsingMessage(self.parse_expr()?))
803        } else {
804            self.maybe_parse(|parser| parser.parse_expr().map(RaiseStatementValue::Expr))?
805        };
806
807        Ok(Statement::Raise(RaiseStatement { value }))
808    }
809
810    pub fn parse_comment(&mut self) -> Result<Statement, ParserError> {
811        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
812
813        self.expect_keyword_is(Keyword::ON)?;
814        let token = self.next_token();
815
816        let (object_type, object_name) = match token.token {
817            Token::Word(w) if w.keyword == Keyword::COLUMN => {
818                (CommentObject::Column, self.parse_object_name(false)?)
819            }
820            Token::Word(w) if w.keyword == Keyword::TABLE => {
821                (CommentObject::Table, self.parse_object_name(false)?)
822            }
823            Token::Word(w) if w.keyword == Keyword::EXTENSION => {
824                (CommentObject::Extension, self.parse_object_name(false)?)
825            }
826            Token::Word(w) if w.keyword == Keyword::SCHEMA => {
827                (CommentObject::Schema, self.parse_object_name(false)?)
828            }
829            Token::Word(w) if w.keyword == Keyword::DATABASE => {
830                (CommentObject::Database, self.parse_object_name(false)?)
831            }
832            Token::Word(w) if w.keyword == Keyword::USER => {
833                (CommentObject::User, self.parse_object_name(false)?)
834            }
835            Token::Word(w) if w.keyword == Keyword::ROLE => {
836                (CommentObject::Role, self.parse_object_name(false)?)
837            }
838            _ => self.expected("comment object_type", token)?,
839        };
840
841        self.expect_keyword_is(Keyword::IS)?;
842        let comment = if self.parse_keyword(Keyword::NULL) {
843            None
844        } else {
845            Some(self.parse_literal_string()?)
846        };
847        Ok(Statement::Comment {
848            object_type,
849            object_name,
850            comment,
851            if_exists,
852        })
853    }
854
855    pub fn parse_flush(&mut self) -> Result<Statement, ParserError> {
856        let mut channel = None;
857        let mut tables: Vec<ObjectName> = vec![];
858        let mut read_lock = false;
859        let mut export = false;
860
861        if !dialect_of!(self is MySqlDialect | GenericDialect) {
862            return parser_err!("Unsupported statement FLUSH", self.peek_token().span.start);
863        }
864
865        let location = if self.parse_keyword(Keyword::NO_WRITE_TO_BINLOG) {
866            Some(FlushLocation::NoWriteToBinlog)
867        } else if self.parse_keyword(Keyword::LOCAL) {
868            Some(FlushLocation::Local)
869        } else {
870            None
871        };
872
873        let object_type = if self.parse_keywords(&[Keyword::BINARY, Keyword::LOGS]) {
874            FlushType::BinaryLogs
875        } else if self.parse_keywords(&[Keyword::ENGINE, Keyword::LOGS]) {
876            FlushType::EngineLogs
877        } else if self.parse_keywords(&[Keyword::ERROR, Keyword::LOGS]) {
878            FlushType::ErrorLogs
879        } else if self.parse_keywords(&[Keyword::GENERAL, Keyword::LOGS]) {
880            FlushType::GeneralLogs
881        } else if self.parse_keywords(&[Keyword::HOSTS]) {
882            FlushType::Hosts
883        } else if self.parse_keyword(Keyword::PRIVILEGES) {
884            FlushType::Privileges
885        } else if self.parse_keyword(Keyword::OPTIMIZER_COSTS) {
886            FlushType::OptimizerCosts
887        } else if self.parse_keywords(&[Keyword::RELAY, Keyword::LOGS]) {
888            if self.parse_keywords(&[Keyword::FOR, Keyword::CHANNEL]) {
889                channel = Some(self.parse_object_name(false).unwrap().to_string());
890            }
891            FlushType::RelayLogs
892        } else if self.parse_keywords(&[Keyword::SLOW, Keyword::LOGS]) {
893            FlushType::SlowLogs
894        } else if self.parse_keyword(Keyword::STATUS) {
895            FlushType::Status
896        } else if self.parse_keyword(Keyword::USER_RESOURCES) {
897            FlushType::UserResources
898        } else if self.parse_keywords(&[Keyword::LOGS]) {
899            FlushType::Logs
900        } else if self.parse_keywords(&[Keyword::TABLES]) {
901            loop {
902                let next_token = self.next_token();
903                match &next_token.token {
904                    Token::Word(w) => match w.keyword {
905                        Keyword::WITH => {
906                            read_lock = self.parse_keywords(&[Keyword::READ, Keyword::LOCK]);
907                        }
908                        Keyword::FOR => {
909                            export = self.parse_keyword(Keyword::EXPORT);
910                        }
911                        Keyword::NoKeyword => {
912                            self.prev_token();
913                            tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
914                        }
915                        _ => {}
916                    },
917                    _ => {
918                        break;
919                    }
920                }
921            }
922
923            FlushType::Tables
924        } else {
925            return self.expected(
926                "BINARY LOGS, ENGINE LOGS, ERROR LOGS, GENERAL LOGS, HOSTS, LOGS, PRIVILEGES, OPTIMIZER_COSTS,\
927                 RELAY LOGS [FOR CHANNEL channel], SLOW LOGS, STATUS, USER_RESOURCES",
928                self.peek_token(),
929            );
930        };
931
932        Ok(Statement::Flush {
933            object_type,
934            location,
935            channel,
936            read_lock,
937            export,
938            tables,
939        })
940    }
941
942    pub fn parse_msck(&mut self) -> Result<Statement, ParserError> {
943        let repair = self.parse_keyword(Keyword::REPAIR);
944        self.expect_keyword_is(Keyword::TABLE)?;
945        let table_name = self.parse_object_name(false)?;
946        let partition_action = self
947            .maybe_parse(|parser| {
948                let pa = match parser.parse_one_of_keywords(&[
949                    Keyword::ADD,
950                    Keyword::DROP,
951                    Keyword::SYNC,
952                ]) {
953                    Some(Keyword::ADD) => Some(AddDropSync::ADD),
954                    Some(Keyword::DROP) => Some(AddDropSync::DROP),
955                    Some(Keyword::SYNC) => Some(AddDropSync::SYNC),
956                    _ => None,
957                };
958                parser.expect_keyword_is(Keyword::PARTITIONS)?;
959                Ok(pa)
960            })?
961            .unwrap_or_default();
962        Ok(Statement::Msck {
963            repair,
964            table_name,
965            partition_action,
966        })
967    }
968
969    pub fn parse_truncate(&mut self) -> Result<Statement, ParserError> {
970        let table = self.parse_keyword(Keyword::TABLE);
971
972        let table_names = self
973            .parse_comma_separated(|p| {
974                Ok((p.parse_keyword(Keyword::ONLY), p.parse_object_name(false)?))
975            })?
976            .into_iter()
977            .map(|(only, name)| TruncateTableTarget { name, only })
978            .collect();
979
980        let mut partitions = None;
981        if self.parse_keyword(Keyword::PARTITION) {
982            self.expect_token(&Token::LParen)?;
983            partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
984            self.expect_token(&Token::RParen)?;
985        }
986
987        let mut identity = None;
988        let mut cascade = None;
989
990        if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
991            identity = if self.parse_keywords(&[Keyword::RESTART, Keyword::IDENTITY]) {
992                Some(TruncateIdentityOption::Restart)
993            } else if self.parse_keywords(&[Keyword::CONTINUE, Keyword::IDENTITY]) {
994                Some(TruncateIdentityOption::Continue)
995            } else {
996                None
997            };
998
999            cascade = self.parse_cascade_option();
1000        };
1001
1002        let on_cluster = self.parse_optional_on_cluster()?;
1003
1004        Ok(Statement::Truncate {
1005            table_names,
1006            partitions,
1007            table,
1008            identity,
1009            cascade,
1010            on_cluster,
1011        })
1012    }
1013
1014    fn parse_cascade_option(&mut self) -> Option<CascadeOption> {
1015        if self.parse_keyword(Keyword::CASCADE) {
1016            Some(CascadeOption::Cascade)
1017        } else if self.parse_keyword(Keyword::RESTRICT) {
1018            Some(CascadeOption::Restrict)
1019        } else {
1020            None
1021        }
1022    }
1023
1024    pub fn parse_attach_duckdb_database_options(
1025        &mut self,
1026    ) -> Result<Vec<AttachDuckDBDatabaseOption>, ParserError> {
1027        if !self.consume_token(&Token::LParen) {
1028            return Ok(vec![]);
1029        }
1030
1031        let mut options = vec![];
1032        loop {
1033            if self.parse_keyword(Keyword::READ_ONLY) {
1034                let boolean = if self.parse_keyword(Keyword::TRUE) {
1035                    Some(true)
1036                } else if self.parse_keyword(Keyword::FALSE) {
1037                    Some(false)
1038                } else {
1039                    None
1040                };
1041                options.push(AttachDuckDBDatabaseOption::ReadOnly(boolean));
1042            } else if self.parse_keyword(Keyword::TYPE) {
1043                let ident = self.parse_identifier()?;
1044                options.push(AttachDuckDBDatabaseOption::Type(ident));
1045            } else {
1046                return self.expected("expected one of: ), READ_ONLY, TYPE", self.peek_token());
1047            };
1048
1049            if self.consume_token(&Token::RParen) {
1050                return Ok(options);
1051            } else if self.consume_token(&Token::Comma) {
1052                continue;
1053            } else {
1054                return self.expected("expected one of: ')', ','", self.peek_token());
1055            }
1056        }
1057    }
1058
1059    pub fn parse_attach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1060        let database = self.parse_keyword(Keyword::DATABASE);
1061        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
1062        let database_path = self.parse_identifier()?;
1063        let database_alias = if self.parse_keyword(Keyword::AS) {
1064            Some(self.parse_identifier()?)
1065        } else {
1066            None
1067        };
1068
1069        let attach_options = self.parse_attach_duckdb_database_options()?;
1070        Ok(Statement::AttachDuckDBDatabase {
1071            if_not_exists,
1072            database,
1073            database_path,
1074            database_alias,
1075            attach_options,
1076        })
1077    }
1078
1079    pub fn parse_detach_duckdb_database(&mut self) -> Result<Statement, ParserError> {
1080        let database = self.parse_keyword(Keyword::DATABASE);
1081        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
1082        let database_alias = self.parse_identifier()?;
1083        Ok(Statement::DetachDuckDBDatabase {
1084            if_exists,
1085            database,
1086            database_alias,
1087        })
1088    }
1089
1090    pub fn parse_attach_database(&mut self) -> Result<Statement, ParserError> {
1091        let database = self.parse_keyword(Keyword::DATABASE);
1092        let database_file_name = self.parse_expr()?;
1093        self.expect_keyword_is(Keyword::AS)?;
1094        let schema_name = self.parse_identifier()?;
1095        Ok(Statement::AttachDatabase {
1096            database,
1097            schema_name,
1098            database_file_name,
1099        })
1100    }
1101
1102    pub fn parse_analyze(&mut self) -> Result<Statement, ParserError> {
1103        let has_table_keyword = self.parse_keyword(Keyword::TABLE);
1104        let table_name = self.parse_object_name(false)?;
1105        let mut for_columns = false;
1106        let mut cache_metadata = false;
1107        let mut noscan = false;
1108        let mut partitions = None;
1109        let mut compute_statistics = false;
1110        let mut columns = vec![];
1111        loop {
1112            match self.parse_one_of_keywords(&[
1113                Keyword::PARTITION,
1114                Keyword::FOR,
1115                Keyword::CACHE,
1116                Keyword::NOSCAN,
1117                Keyword::COMPUTE,
1118            ]) {
1119                Some(Keyword::PARTITION) => {
1120                    self.expect_token(&Token::LParen)?;
1121                    partitions = Some(self.parse_comma_separated(Parser::parse_expr)?);
1122                    self.expect_token(&Token::RParen)?;
1123                }
1124                Some(Keyword::NOSCAN) => noscan = true,
1125                Some(Keyword::FOR) => {
1126                    self.expect_keyword_is(Keyword::COLUMNS)?;
1127
1128                    columns = self
1129                        .maybe_parse(|parser| {
1130                            parser.parse_comma_separated(|p| p.parse_identifier())
1131                        })?
1132                        .unwrap_or_default();
1133                    for_columns = true
1134                }
1135                Some(Keyword::CACHE) => {
1136                    self.expect_keyword_is(Keyword::METADATA)?;
1137                    cache_metadata = true
1138                }
1139                Some(Keyword::COMPUTE) => {
1140                    self.expect_keyword_is(Keyword::STATISTICS)?;
1141                    compute_statistics = true
1142                }
1143                _ => break,
1144            }
1145        }
1146
1147        Ok(Statement::Analyze {
1148            has_table_keyword,
1149            table_name,
1150            for_columns,
1151            columns,
1152            partitions,
1153            cache_metadata,
1154            noscan,
1155            compute_statistics,
1156        })
1157    }
1158
1159    /// Parse a new expression including wildcard & qualified wildcard.
1160    pub fn parse_wildcard_expr(&mut self) -> Result<Expr, ParserError> {
1161        let index = self.index;
1162
1163        let next_token = self.next_token();
1164        match next_token.token {
1165            t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1166                if self.peek_token().token == Token::Period {
1167                    let mut id_parts: Vec<Ident> = vec![match t {
1168                        Token::Word(w) => w.into_ident(next_token.span),
1169                        Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1170                        _ => unreachable!(), // We matched above
1171                    }];
1172
1173                    while self.consume_token(&Token::Period) {
1174                        let next_token = self.next_token();
1175                        match next_token.token {
1176                            Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1177                            Token::SingleQuotedString(s) => {
1178                                // SQLite has single-quoted identifiers
1179                                id_parts.push(Ident::with_quote('\'', s))
1180                            }
1181                            Token::Mul => {
1182                                return Ok(Expr::QualifiedWildcard(
1183                                    ObjectName::from(id_parts),
1184                                    AttachedToken(next_token),
1185                                ));
1186                            }
1187                            _ => {
1188                                return self
1189                                    .expected("an identifier or a '*' after '.'", next_token);
1190                            }
1191                        }
1192                    }
1193                }
1194            }
1195            Token::Mul => {
1196                return Ok(Expr::Wildcard(AttachedToken(next_token)));
1197            }
1198            _ => (),
1199        };
1200
1201        self.index = index;
1202        self.parse_expr()
1203    }
1204
1205    /// Parse a new expression.
1206    pub fn parse_expr(&mut self) -> Result<Expr, ParserError> {
1207        self.parse_subexpr(self.dialect.prec_unknown())
1208    }
1209
1210    pub fn parse_expr_with_alias_and_order_by(
1211        &mut self,
1212    ) -> Result<ExprWithAliasAndOrderBy, ParserError> {
1213        let expr = self.parse_expr()?;
1214
1215        fn validator(explicit: bool, kw: &Keyword, _parser: &mut Parser) -> bool {
1216            explicit || !&[Keyword::ASC, Keyword::DESC, Keyword::GROUP].contains(kw)
1217        }
1218        let alias = self.parse_optional_alias_inner(None, validator)?;
1219        let order_by = OrderByOptions {
1220            asc: self.parse_asc_desc(),
1221            nulls_first: None,
1222        };
1223        Ok(ExprWithAliasAndOrderBy {
1224            expr: ExprWithAlias { expr, alias },
1225            order_by,
1226        })
1227    }
1228
1229    /// Parse tokens until the precedence changes.
1230    pub fn parse_subexpr(&mut self, precedence: u8) -> Result<Expr, ParserError> {
1231        let _guard = self.recursion_counter.try_decrease()?;
1232        debug!("parsing expr");
1233        let mut expr = self.parse_prefix()?;
1234
1235        expr = self.parse_compound_expr(expr, vec![])?;
1236
1237        debug!("prefix: {expr:?}");
1238        loop {
1239            let next_precedence = self.get_next_precedence()?;
1240            debug!("next precedence: {next_precedence:?}");
1241
1242            if precedence >= next_precedence {
1243                break;
1244            }
1245
1246            // The period operator is handled exclusively by the
1247            // compound field access parsing.
1248            if Token::Period == self.peek_token_ref().token {
1249                break;
1250            }
1251
1252            expr = self.parse_infix(expr, next_precedence)?;
1253        }
1254        Ok(expr)
1255    }
1256
1257    pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
1258        let condition = self.parse_expr()?;
1259        let message = if self.parse_keyword(Keyword::AS) {
1260            Some(self.parse_expr()?)
1261        } else {
1262            None
1263        };
1264
1265        Ok(Statement::Assert { condition, message })
1266    }
1267
1268    pub fn parse_savepoint(&mut self) -> Result<Statement, ParserError> {
1269        let name = self.parse_identifier()?;
1270        Ok(Statement::Savepoint { name })
1271    }
1272
1273    pub fn parse_release(&mut self) -> Result<Statement, ParserError> {
1274        let _ = self.parse_keyword(Keyword::SAVEPOINT);
1275        let name = self.parse_identifier()?;
1276
1277        Ok(Statement::ReleaseSavepoint { name })
1278    }
1279
1280    pub fn parse_listen(&mut self) -> Result<Statement, ParserError> {
1281        let channel = self.parse_identifier()?;
1282        Ok(Statement::LISTEN { channel })
1283    }
1284
1285    pub fn parse_unlisten(&mut self) -> Result<Statement, ParserError> {
1286        let channel = if self.consume_token(&Token::Mul) {
1287            Ident::new(Expr::Wildcard(AttachedToken::empty()).to_string())
1288        } else {
1289            match self.parse_identifier() {
1290                Ok(expr) => expr,
1291                _ => {
1292                    self.prev_token();
1293                    return self.expected("wildcard or identifier", self.peek_token());
1294                }
1295            }
1296        };
1297        Ok(Statement::UNLISTEN { channel })
1298    }
1299
1300    pub fn parse_notify(&mut self) -> Result<Statement, ParserError> {
1301        let channel = self.parse_identifier()?;
1302        let payload = if self.consume_token(&Token::Comma) {
1303            Some(self.parse_literal_string()?)
1304        } else {
1305            None
1306        };
1307        Ok(Statement::NOTIFY { channel, payload })
1308    }
1309
1310    /// Parses a `RENAME TABLE` statement. See [Statement::RenameTable]
1311    pub fn parse_rename(&mut self) -> Result<Statement, ParserError> {
1312        if self.peek_keyword(Keyword::TABLE) {
1313            self.expect_keyword(Keyword::TABLE)?;
1314            let rename_tables = self.parse_comma_separated(|parser| {
1315                let old_name = parser.parse_object_name(false)?;
1316                parser.expect_keyword(Keyword::TO)?;
1317                let new_name = parser.parse_object_name(false)?;
1318
1319                Ok(RenameTable { old_name, new_name })
1320            })?;
1321            Ok(Statement::RenameTable(rename_tables))
1322        } else {
1323            self.expected("KEYWORD `TABLE` after RENAME", self.peek_token())
1324        }
1325    }
1326
1327    /// Tries to parse an expression by matching the specified word to known keywords that have a special meaning in the dialect.
1328    /// Returns `None if no match is found.
1329    fn parse_expr_prefix_by_reserved_word(
1330        &mut self,
1331        w: &Word,
1332        w_span: Span,
1333    ) -> Result<Option<Expr>, ParserError> {
1334        match w.keyword {
1335            Keyword::TRUE | Keyword::FALSE if self.dialect.supports_boolean_literals() => {
1336                self.prev_token();
1337                Ok(Some(Expr::Value(self.parse_value()?)))
1338            }
1339            Keyword::NULL => {
1340                self.prev_token();
1341                Ok(Some(Expr::Value(self.parse_value()?)))
1342            }
1343            Keyword::CURRENT_CATALOG
1344            | Keyword::CURRENT_USER
1345            | Keyword::SESSION_USER
1346            | Keyword::USER
1347            if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
1348                {
1349                    Ok(Some(Expr::Function(Function {
1350                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1351                        uses_odbc_syntax: false,
1352                        parameters: FunctionArguments::None,
1353                        args: FunctionArguments::None,
1354                        null_treatment: None,
1355                        filter: None,
1356                        over: None,
1357                        within_group: vec![],
1358                    })))
1359                }
1360            Keyword::CURRENT_TIMESTAMP
1361            | Keyword::CURRENT_TIME
1362            | Keyword::CURRENT_DATE
1363            | Keyword::LOCALTIME
1364            | Keyword::LOCALTIMESTAMP => {
1365                Ok(Some(self.parse_time_functions(ObjectName::from(vec![w.clone().into_ident(w_span)]))?))
1366            }
1367            Keyword::CASE => Ok(Some(self.parse_case_expr()?)),
1368            Keyword::CONVERT => Ok(Some(self.parse_convert_expr(false)?)),
1369            Keyword::TRY_CONVERT if self.dialect.supports_try_convert() => Ok(Some(self.parse_convert_expr(true)?)),
1370            Keyword::CAST => Ok(Some(self.parse_cast_expr(CastKind::Cast)?)),
1371            Keyword::TRY_CAST => Ok(Some(self.parse_cast_expr(CastKind::TryCast)?)),
1372            Keyword::SAFE_CAST => Ok(Some(self.parse_cast_expr(CastKind::SafeCast)?)),
1373            Keyword::EXISTS
1374            // Support parsing Databricks has a function named `exists`.
1375            if !dialect_of!(self is DatabricksDialect)
1376                || matches!(
1377                        self.peek_nth_token_ref(1).token,
1378                        Token::Word(Word {
1379                            keyword: Keyword::SELECT | Keyword::WITH,
1380                            ..
1381                        })
1382                    ) =>
1383                {
1384                    Ok(Some(self.parse_exists_expr(false)?))
1385                }
1386            Keyword::EXTRACT => Ok(Some(self.parse_extract_expr()?)),
1387            Keyword::CEIL => Ok(Some(self.parse_ceil_floor_expr(true)?)),
1388            Keyword::FLOOR => Ok(Some(self.parse_ceil_floor_expr(false)?)),
1389            Keyword::POSITION if self.peek_token_ref().token == Token::LParen => {
1390                Ok(Some(self.parse_position_expr(w.clone().into_ident(w_span))?))
1391            }
1392            Keyword::SUBSTR | Keyword::SUBSTRING => {
1393                self.prev_token();
1394                Ok(Some(self.parse_substring()?))
1395            }
1396            Keyword::OVERLAY => Ok(Some(self.parse_overlay_expr()?)),
1397            Keyword::TRIM => Ok(Some(self.parse_trim_expr()?)),
1398            Keyword::INTERVAL => Ok(Some(self.parse_interval()?)),
1399            // Treat ARRAY[1,2,3] as an array [1,2,3], otherwise try as subquery or a function call
1400            Keyword::ARRAY if *self.peek_token_ref() == Token::LBracket => {
1401                self.expect_token(&Token::LBracket)?;
1402                Ok(Some(self.parse_array_expr(true)?))
1403            }
1404            Keyword::ARRAY
1405            if self.peek_token() == Token::LParen
1406                && !dialect_of!(self is ClickHouseDialect | DatabricksDialect) =>
1407                {
1408                    self.expect_token(&Token::LParen)?;
1409                    let query = self.parse_query()?;
1410                    self.expect_token(&Token::RParen)?;
1411                    Ok(Some(Expr::Function(Function {
1412                        name: ObjectName::from(vec![w.clone().into_ident(w_span)]),
1413                        uses_odbc_syntax: false,
1414                        parameters: FunctionArguments::None,
1415                        args: FunctionArguments::Subquery(query),
1416                        filter: None,
1417                        null_treatment: None,
1418                        over: None,
1419                        within_group: vec![],
1420                    })))
1421                }
1422            Keyword::NOT => Ok(Some(self.parse_not()?)),
1423            Keyword::MATCH if self.dialect.supports_match_against() => {
1424                Ok(Some(self.parse_match_against()?))
1425            }
1426            Keyword::STRUCT if self.dialect.supports_struct_literal() => {
1427                let struct_expr = self.parse_struct_literal()?;
1428                Ok(Some(struct_expr))
1429            }
1430            Keyword::PRIOR if matches!(self.state, ParserState::ConnectBy) => {
1431                let expr = self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?;
1432                Ok(Some(Expr::Prior(Box::new(expr))))
1433            }
1434            Keyword::MAP if *self.peek_token_ref() == Token::LBrace && self.dialect.support_map_literal_syntax() => {
1435                Ok(Some(self.parse_duckdb_map_literal()?))
1436            }
1437            _ if self.dialect.supports_geometric_types() => match w.keyword {
1438                Keyword::CIRCLE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Circle)?)),
1439                Keyword::BOX => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricBox)?)),
1440                Keyword::PATH => Ok(Some(self.parse_geometric_type(GeometricTypeKind::GeometricPath)?)),
1441                Keyword::LINE => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Line)?)),
1442                Keyword::LSEG => Ok(Some(self.parse_geometric_type(GeometricTypeKind::LineSegment)?)),
1443                Keyword::POINT => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Point)?)),
1444                Keyword::POLYGON => Ok(Some(self.parse_geometric_type(GeometricTypeKind::Polygon)?)),
1445                _ => Ok(None),
1446            },
1447            _ => Ok(None),
1448        }
1449    }
1450
1451    /// Tries to parse an expression by a word that is not known to have a special meaning in the dialect.
1452    fn parse_expr_prefix_by_unreserved_word(
1453        &mut self,
1454        w: &Word,
1455        w_span: Span,
1456    ) -> Result<Expr, ParserError> {
1457        match self.peek_token().token {
1458            Token::LParen if !self.peek_outer_join_operator() => {
1459                let id_parts = vec![w.clone().into_ident(w_span)];
1460                self.parse_function(ObjectName::from(id_parts))
1461            }
1462            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1463            Token::SingleQuotedString(_)
1464            | Token::DoubleQuotedString(_)
1465            | Token::HexStringLiteral(_)
1466                if w.value.starts_with('_') =>
1467            {
1468                Ok(Expr::Prefixed {
1469                    prefix: w.clone().into_ident(w_span),
1470                    value: self.parse_introduced_string_expr()?.into(),
1471                })
1472            }
1473            // string introducer https://dev.mysql.com/doc/refman/8.0/en/charset-introducer.html
1474            Token::SingleQuotedString(_)
1475            | Token::DoubleQuotedString(_)
1476            | Token::HexStringLiteral(_)
1477                if w.value.starts_with('_') =>
1478            {
1479                Ok(Expr::Prefixed {
1480                    prefix: w.clone().into_ident(w_span),
1481                    value: self.parse_introduced_string_expr()?.into(),
1482                })
1483            }
1484            Token::Arrow if self.dialect.supports_lambda_functions() => {
1485                self.expect_token(&Token::Arrow)?;
1486                Ok(Expr::Lambda(LambdaFunction {
1487                    params: OneOrManyWithParens::One(w.clone().into_ident(w_span)),
1488                    body: Box::new(self.parse_expr()?),
1489                }))
1490            }
1491            _ => Ok(Expr::Identifier(w.clone().into_ident(w_span))),
1492        }
1493    }
1494
1495    /// Parse an expression prefix.
1496    pub fn parse_prefix(&mut self) -> Result<Expr, ParserError> {
1497        // allow the dialect to override prefix parsing
1498        if let Some(prefix) = self.dialect.parse_prefix(self) {
1499            return prefix;
1500        }
1501
1502        // PostgreSQL allows any string literal to be preceded by a type name, indicating that the
1503        // string literal represents a literal of that type. Some examples:
1504        //
1505        //      DATE '2020-05-20'
1506        //      TIMESTAMP WITH TIME ZONE '2020-05-20 7:43:54'
1507        //      BOOL 'true'
1508        //
1509        // The first two are standard SQL, while the latter is a PostgreSQL extension. Complicating
1510        // matters is the fact that INTERVAL string literals may optionally be followed by special
1511        // keywords, e.g.:
1512        //
1513        //      INTERVAL '7' DAY
1514        //
1515        // Note also that naively `SELECT date` looks like a syntax error because the `date` type
1516        // name is not followed by a string literal, but in fact in PostgreSQL it is a valid
1517        // expression that should parse as the column name "date".
1518        let loc = self.peek_token_ref().span.start;
1519        let opt_expr = self.maybe_parse(|parser| {
1520            match parser.parse_data_type()? {
1521                DataType::Interval => parser.parse_interval(),
1522                // PostgreSQL allows almost any identifier to be used as custom data type name,
1523                // and we support that in `parse_data_type()`. But unlike Postgres we don't
1524                // have a list of globally reserved keywords (since they vary across dialects),
1525                // so given `NOT 'a' LIKE 'b'`, we'd accept `NOT` as a possible custom data type
1526                // name, resulting in `NOT 'a'` being recognized as a `TypedString` instead of
1527                // an unary negation `NOT ('a' LIKE 'b')`. To solve this, we don't accept the
1528                // `type 'string'` syntax for the custom data types at all.
1529                DataType::Custom(..) => parser_err!("dummy", loc),
1530                data_type => Ok(Expr::TypedString {
1531                    data_type,
1532                    value: parser.parse_value()?,
1533                }),
1534            }
1535        })?;
1536
1537        if let Some(expr) = opt_expr {
1538            return Ok(expr);
1539        }
1540
1541        // Cache some dialect properties to avoid lifetime issues with the
1542        // next_token reference.
1543
1544        let dialect = self.dialect;
1545
1546        self.advance_token();
1547        let next_token_index = self.get_current_index();
1548        let next_token = self.get_current_token();
1549        let span = next_token.span;
1550        let expr = match &next_token.token {
1551            Token::Word(w) => {
1552                // The word we consumed may fall into one of two cases: it has a special meaning, or not.
1553                // For example, in Snowflake, the word `interval` may have two meanings depending on the context:
1554                // `SELECT CURRENT_DATE() + INTERVAL '1 DAY', MAX(interval) FROM tbl;`
1555                //                          ^^^^^^^^^^^^^^^^      ^^^^^^^^
1556                //                         interval expression   identifier
1557                //
1558                // We first try to parse the word and following tokens as a special expression, and if that fails,
1559                // we rollback and try to parse it as an identifier.
1560                let w = w.clone();
1561                match self.try_parse(|parser| parser.parse_expr_prefix_by_reserved_word(&w, span)) {
1562                    // This word indicated an expression prefix and parsing was successful
1563                    Ok(Some(expr)) => Ok(expr),
1564
1565                    // No expression prefix associated with this word
1566                    Ok(None) => Ok(self.parse_expr_prefix_by_unreserved_word(&w, span)?),
1567
1568                    // If parsing of the word as a special expression failed, we are facing two options:
1569                    // 1. The statement is malformed, e.g. `SELECT INTERVAL '1 DAI` (`DAI` instead of `DAY`)
1570                    // 2. The word is used as an identifier, e.g. `SELECT MAX(interval) FROM tbl`
1571                    // We first try to parse the word as an identifier and if that fails
1572                    // we rollback and return the parsing error we got from trying to parse a
1573                    // special expression (to maintain backwards compatibility of parsing errors).
1574                    Err(e) => {
1575                        if !self.dialect.is_reserved_for_identifier(w.keyword) {
1576                            if let Ok(Some(expr)) = self.maybe_parse(|parser| {
1577                                parser.parse_expr_prefix_by_unreserved_word(&w, span)
1578                            }) {
1579                                return Ok(expr);
1580                            }
1581                        }
1582                        return Err(e);
1583                    }
1584                }
1585            } // End of Token::Word
1586            // array `[1, 2, 3]`
1587            Token::LBracket => self.parse_array_expr(false),
1588            tok @ Token::Minus | tok @ Token::Plus => {
1589                let op = if *tok == Token::Plus {
1590                    UnaryOperator::Plus
1591                } else {
1592                    UnaryOperator::Minus
1593                };
1594                Ok(Expr::UnaryOp {
1595                    op,
1596                    expr: Box::new(
1597                        self.parse_subexpr(self.dialect.prec_value(Precedence::MulDivModOp))?,
1598                    ),
1599                })
1600            }
1601            Token::ExclamationMark if dialect.supports_bang_not_operator() => Ok(Expr::UnaryOp {
1602                op: UnaryOperator::BangNot,
1603                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
1604            }),
1605            tok @ Token::DoubleExclamationMark
1606            | tok @ Token::PGSquareRoot
1607            | tok @ Token::PGCubeRoot
1608            | tok @ Token::AtSign
1609            | tok @ Token::Tilde
1610                if dialect_is!(dialect is PostgreSqlDialect) =>
1611            {
1612                let op = match tok {
1613                    Token::DoubleExclamationMark => UnaryOperator::PGPrefixFactorial,
1614                    Token::PGSquareRoot => UnaryOperator::PGSquareRoot,
1615                    Token::PGCubeRoot => UnaryOperator::PGCubeRoot,
1616                    Token::AtSign => UnaryOperator::PGAbs,
1617                    Token::Tilde => UnaryOperator::PGBitwiseNot,
1618                    _ => unreachable!(),
1619                };
1620                Ok(Expr::UnaryOp {
1621                    op,
1622                    expr: Box::new(
1623                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1624                    ),
1625                })
1626            }
1627            tok @ Token::Sharp
1628            | tok @ Token::AtDashAt
1629            | tok @ Token::AtAt
1630            | tok @ Token::QuestionMarkDash
1631            | tok @ Token::QuestionPipe
1632                if self.dialect.supports_geometric_types() =>
1633            {
1634                let op = match tok {
1635                    Token::Sharp => UnaryOperator::Hash,
1636                    Token::AtDashAt => UnaryOperator::AtDashAt,
1637                    Token::AtAt => UnaryOperator::DoubleAt,
1638                    Token::QuestionMarkDash => UnaryOperator::QuestionDash,
1639                    Token::QuestionPipe => UnaryOperator::QuestionPipe,
1640                    _ => {
1641                        return Err(ParserError::ParserError(format!(
1642                            "Unexpected token in unary operator parsing: {tok:?}"
1643                        )))
1644                    }
1645                };
1646                Ok(Expr::UnaryOp {
1647                    op,
1648                    expr: Box::new(
1649                        self.parse_subexpr(self.dialect.prec_value(Precedence::PlusMinus))?,
1650                    ),
1651                })
1652            }
1653            Token::EscapedStringLiteral(_) if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) =>
1654            {
1655                self.prev_token();
1656                Ok(Expr::Value(self.parse_value()?))
1657            }
1658            Token::UnicodeStringLiteral(_) => {
1659                self.prev_token();
1660                Ok(Expr::Value(self.parse_value()?))
1661            }
1662            Token::Number(_, _)
1663            | Token::SingleQuotedString(_)
1664            | Token::DoubleQuotedString(_)
1665            | Token::TripleSingleQuotedString(_)
1666            | Token::TripleDoubleQuotedString(_)
1667            | Token::DollarQuotedString(_)
1668            | Token::SingleQuotedByteStringLiteral(_)
1669            | Token::DoubleQuotedByteStringLiteral(_)
1670            | Token::TripleSingleQuotedByteStringLiteral(_)
1671            | Token::TripleDoubleQuotedByteStringLiteral(_)
1672            | Token::SingleQuotedRawStringLiteral(_)
1673            | Token::DoubleQuotedRawStringLiteral(_)
1674            | Token::TripleSingleQuotedRawStringLiteral(_)
1675            | Token::TripleDoubleQuotedRawStringLiteral(_)
1676            | Token::NationalStringLiteral(_)
1677            | Token::HexStringLiteral(_) => {
1678                self.prev_token();
1679                Ok(Expr::Value(self.parse_value()?))
1680            }
1681            Token::LParen => {
1682                let expr = if let Some(expr) = self.try_parse_expr_sub_query()? {
1683                    expr
1684                } else if let Some(lambda) = self.try_parse_lambda()? {
1685                    return Ok(lambda);
1686                } else {
1687                    let exprs = self.parse_comma_separated(Parser::parse_expr)?;
1688                    match exprs.len() {
1689                        0 => unreachable!(), // parse_comma_separated ensures 1 or more
1690                        1 => Expr::Nested(Box::new(exprs.into_iter().next().unwrap())),
1691                        _ => Expr::Tuple(exprs),
1692                    }
1693                };
1694                self.expect_token(&Token::RParen)?;
1695                Ok(expr)
1696            }
1697            Token::Placeholder(_) | Token::Colon | Token::AtSign => {
1698                self.prev_token();
1699                Ok(Expr::Value(self.parse_value()?))
1700            }
1701            Token::LBrace => {
1702                self.prev_token();
1703                self.parse_lbrace_expr()
1704            }
1705            _ => self.expected_at("an expression", next_token_index),
1706        }?;
1707
1708        if self.parse_keyword(Keyword::COLLATE) {
1709            Ok(Expr::Collate {
1710                expr: Box::new(expr),
1711                collation: self.parse_object_name(false)?,
1712            })
1713        } else {
1714            Ok(expr)
1715        }
1716    }
1717
1718    fn parse_geometric_type(&mut self, kind: GeometricTypeKind) -> Result<Expr, ParserError> {
1719        Ok(Expr::TypedString {
1720            data_type: DataType::GeometricType(kind),
1721            value: self.parse_value()?,
1722        })
1723    }
1724
1725    /// Try to parse an [Expr::CompoundFieldAccess] like `a.b.c` or `a.b[1].c`.
1726    /// If all the fields are `Expr::Identifier`s, return an [Expr::CompoundIdentifier] instead.
1727    /// If only the root exists, return the root.
1728    /// Parses compound expressions which may be delimited by period
1729    /// or bracket notation.
1730    /// For example: `a.b.c`, `a.b[1]`.
1731    pub fn parse_compound_expr(
1732        &mut self,
1733        root: Expr,
1734        mut chain: Vec<AccessExpr>,
1735    ) -> Result<Expr, ParserError> {
1736        let mut ending_wildcard: Option<TokenWithSpan> = None;
1737        loop {
1738            if self.consume_token(&Token::Period) {
1739                let next_token = self.peek_token_ref();
1740                match &next_token.token {
1741                    Token::Mul => {
1742                        // Postgres explicitly allows funcnm(tablenm.*) and the
1743                        // function array_agg traverses this control flow
1744                        if dialect_of!(self is PostgreSqlDialect) {
1745                            ending_wildcard = Some(self.next_token());
1746                        } else {
1747                            // Put back the consumed `.` tokens before exiting.
1748                            // If this expression is being parsed in the
1749                            // context of a projection, then the `.*` could imply
1750                            // a wildcard expansion. For example:
1751                            // `SELECT STRUCT('foo').* FROM T`
1752                            self.prev_token(); // .
1753                        }
1754
1755                        break;
1756                    }
1757                    Token::SingleQuotedString(s) => {
1758                        let expr =
1759                            Expr::Identifier(Ident::with_quote_and_span('\'', next_token.span, s));
1760                        chain.push(AccessExpr::Dot(expr));
1761                        self.advance_token(); // The consumed string
1762                    }
1763                    // Fallback to parsing an arbitrary expression.
1764                    _ => match self.parse_subexpr(self.dialect.prec_value(Precedence::Period))? {
1765                        // If we get back a compound field access or identifier,
1766                        // we flatten the nested expression.
1767                        // For example if the current root is `foo`
1768                        // and we get back a compound identifier expression `bar.baz`
1769                        // The full expression should be `foo.bar.baz` (i.e.
1770                        // a root with an access chain with 2 entries) and not
1771                        // `foo.(bar.baz)` (i.e. a root with an access chain with
1772                        // 1 entry`).
1773                        Expr::CompoundFieldAccess { root, access_chain } => {
1774                            chain.push(AccessExpr::Dot(*root));
1775                            chain.extend(access_chain);
1776                        }
1777                        Expr::CompoundIdentifier(parts) => chain
1778                            .extend(parts.into_iter().map(Expr::Identifier).map(AccessExpr::Dot)),
1779                        expr => {
1780                            chain.push(AccessExpr::Dot(expr));
1781                        }
1782                    },
1783                }
1784            } else if !self.dialect.supports_partiql()
1785                && self.peek_token_ref().token == Token::LBracket
1786            {
1787                self.parse_multi_dim_subscript(&mut chain)?;
1788            } else {
1789                break;
1790            }
1791        }
1792
1793        let tok_index = self.get_current_index();
1794        if let Some(wildcard_token) = ending_wildcard {
1795            if !Self::is_all_ident(&root, &chain) {
1796                return self.expected("an identifier or a '*' after '.'", self.peek_token());
1797            };
1798            Ok(Expr::QualifiedWildcard(
1799                ObjectName::from(Self::exprs_to_idents(root, chain)?),
1800                AttachedToken(wildcard_token),
1801            ))
1802        } else if self.maybe_parse_outer_join_operator() {
1803            if !Self::is_all_ident(&root, &chain) {
1804                return self.expected_at("column identifier before (+)", tok_index);
1805            };
1806            let expr = if chain.is_empty() {
1807                root
1808            } else {
1809                Expr::CompoundIdentifier(Self::exprs_to_idents(root, chain)?)
1810            };
1811            Ok(Expr::OuterJoin(expr.into()))
1812        } else {
1813            Self::build_compound_expr(root, chain)
1814        }
1815    }
1816
1817    /// Combines a root expression and access chain to form
1818    /// a compound expression. Which may be a [Expr::CompoundFieldAccess]
1819    /// or other special cased expressions like [Expr::CompoundIdentifier],
1820    /// [Expr::OuterJoin].
1821    fn build_compound_expr(
1822        root: Expr,
1823        mut access_chain: Vec<AccessExpr>,
1824    ) -> Result<Expr, ParserError> {
1825        if access_chain.is_empty() {
1826            return Ok(root);
1827        }
1828
1829        if Self::is_all_ident(&root, &access_chain) {
1830            return Ok(Expr::CompoundIdentifier(Self::exprs_to_idents(
1831                root,
1832                access_chain,
1833            )?));
1834        }
1835
1836        // Flatten qualified function calls.
1837        // For example, the expression `a.b.c.foo(1,2,3)` should
1838        // represent a function called `a.b.c.foo`, rather than
1839        // a composite expression.
1840        if matches!(root, Expr::Identifier(_))
1841            && matches!(
1842                access_chain.last(),
1843                Some(AccessExpr::Dot(Expr::Function(_)))
1844            )
1845            && access_chain
1846                .iter()
1847                .rev()
1848                .skip(1) // All except the Function
1849                .all(|access| matches!(access, AccessExpr::Dot(Expr::Identifier(_))))
1850        {
1851            let Some(AccessExpr::Dot(Expr::Function(mut func))) = access_chain.pop() else {
1852                return parser_err!("expected function expression", root.span().start);
1853            };
1854
1855            let compound_func_name = [root]
1856                .into_iter()
1857                .chain(access_chain.into_iter().flat_map(|access| match access {
1858                    AccessExpr::Dot(expr) => Some(expr),
1859                    _ => None,
1860                }))
1861                .flat_map(|expr| match expr {
1862                    Expr::Identifier(ident) => Some(ident),
1863                    _ => None,
1864                })
1865                .map(ObjectNamePart::Identifier)
1866                .chain(func.name.0)
1867                .collect::<Vec<_>>();
1868            func.name = ObjectName(compound_func_name);
1869
1870            return Ok(Expr::Function(func));
1871        }
1872
1873        // Flatten qualified outer join expressions.
1874        // For example, the expression `T.foo(+)` should
1875        // represent an outer join on the column name `T.foo`
1876        // rather than a composite expression.
1877        if access_chain.len() == 1
1878            && matches!(
1879                access_chain.last(),
1880                Some(AccessExpr::Dot(Expr::OuterJoin(_)))
1881            )
1882        {
1883            let Some(AccessExpr::Dot(Expr::OuterJoin(inner_expr))) = access_chain.pop() else {
1884                return parser_err!("expected (+) expression", root.span().start);
1885            };
1886
1887            if !Self::is_all_ident(&root, &[]) {
1888                return parser_err!("column identifier before (+)", root.span().start);
1889            };
1890
1891            let token_start = root.span().start;
1892            let mut idents = Self::exprs_to_idents(root, vec![])?;
1893            match *inner_expr {
1894                Expr::CompoundIdentifier(suffix) => idents.extend(suffix),
1895                Expr::Identifier(suffix) => idents.push(suffix),
1896                _ => {
1897                    return parser_err!("column identifier before (+)", token_start);
1898                }
1899            }
1900
1901            return Ok(Expr::OuterJoin(Expr::CompoundIdentifier(idents).into()));
1902        }
1903
1904        Ok(Expr::CompoundFieldAccess {
1905            root: Box::new(root),
1906            access_chain,
1907        })
1908    }
1909
1910    fn keyword_to_modifier(k: Keyword) -> Option<ContextModifier> {
1911        match k {
1912            Keyword::LOCAL => Some(ContextModifier::Local),
1913            Keyword::GLOBAL => Some(ContextModifier::Global),
1914            Keyword::SESSION => Some(ContextModifier::Session),
1915            _ => None,
1916        }
1917    }
1918
1919    /// Check if the root is an identifier and all fields are identifiers.
1920    fn is_all_ident(root: &Expr, fields: &[AccessExpr]) -> bool {
1921        if !matches!(root, Expr::Identifier(_)) {
1922            return false;
1923        }
1924        fields
1925            .iter()
1926            .all(|x| matches!(x, AccessExpr::Dot(Expr::Identifier(_))))
1927    }
1928
1929    /// Convert a root and a list of fields to a list of identifiers.
1930    fn exprs_to_idents(root: Expr, fields: Vec<AccessExpr>) -> Result<Vec<Ident>, ParserError> {
1931        let mut idents = vec![];
1932        if let Expr::Identifier(root) = root {
1933            idents.push(root);
1934            for x in fields {
1935                if let AccessExpr::Dot(Expr::Identifier(ident)) = x {
1936                    idents.push(ident);
1937                } else {
1938                    return parser_err!(
1939                        format!("Expected identifier, found: {}", x),
1940                        x.span().start
1941                    );
1942                }
1943            }
1944            Ok(idents)
1945        } else {
1946            parser_err!(
1947                format!("Expected identifier, found: {}", root),
1948                root.span().start
1949            )
1950        }
1951    }
1952
1953    /// Returns true if the next tokens indicate the outer join operator `(+)`.
1954    fn peek_outer_join_operator(&mut self) -> bool {
1955        if !self.dialect.supports_outer_join_operator() {
1956            return false;
1957        }
1958
1959        let [maybe_lparen, maybe_plus, maybe_rparen] = self.peek_tokens_ref();
1960        Token::LParen == maybe_lparen.token
1961            && Token::Plus == maybe_plus.token
1962            && Token::RParen == maybe_rparen.token
1963    }
1964
1965    /// If the next tokens indicates the outer join operator `(+)`, consume
1966    /// the tokens and return true.
1967    fn maybe_parse_outer_join_operator(&mut self) -> bool {
1968        self.dialect.supports_outer_join_operator()
1969            && self.consume_tokens(&[Token::LParen, Token::Plus, Token::RParen])
1970    }
1971
1972    pub fn parse_utility_options(&mut self) -> Result<Vec<UtilityOption>, ParserError> {
1973        self.expect_token(&Token::LParen)?;
1974        let options = self.parse_comma_separated(Self::parse_utility_option)?;
1975        self.expect_token(&Token::RParen)?;
1976
1977        Ok(options)
1978    }
1979
1980    fn parse_utility_option(&mut self) -> Result<UtilityOption, ParserError> {
1981        let name = self.parse_identifier()?;
1982
1983        let next_token = self.peek_token();
1984        if next_token == Token::Comma || next_token == Token::RParen {
1985            return Ok(UtilityOption { name, arg: None });
1986        }
1987        let arg = self.parse_expr()?;
1988
1989        Ok(UtilityOption {
1990            name,
1991            arg: Some(arg),
1992        })
1993    }
1994
1995    fn try_parse_expr_sub_query(&mut self) -> Result<Option<Expr>, ParserError> {
1996        if !self.peek_sub_query() {
1997            return Ok(None);
1998        }
1999
2000        Ok(Some(Expr::Subquery(self.parse_query()?)))
2001    }
2002
2003    fn try_parse_lambda(&mut self) -> Result<Option<Expr>, ParserError> {
2004        if !self.dialect.supports_lambda_functions() {
2005            return Ok(None);
2006        }
2007        self.maybe_parse(|p| {
2008            let params = p.parse_comma_separated(|p| p.parse_identifier())?;
2009            p.expect_token(&Token::RParen)?;
2010            p.expect_token(&Token::Arrow)?;
2011            let expr = p.parse_expr()?;
2012            Ok(Expr::Lambda(LambdaFunction {
2013                params: OneOrManyWithParens::Many(params),
2014                body: Box::new(expr),
2015            }))
2016        })
2017    }
2018
2019    /// Tries to parse the body of an [ODBC function] call.
2020    /// i.e. without the enclosing braces
2021    ///
2022    /// ```sql
2023    /// fn myfunc(1,2,3)
2024    /// ```
2025    ///
2026    /// [ODBC function]: https://learn.microsoft.com/en-us/sql/odbc/reference/develop-app/scalar-function-calls?view=sql-server-2017
2027    fn maybe_parse_odbc_fn_body(&mut self) -> Result<Option<Expr>, ParserError> {
2028        self.maybe_parse(|p| {
2029            p.expect_keyword(Keyword::FN)?;
2030            let fn_name = p.parse_object_name(false)?;
2031            let mut fn_call = p.parse_function_call(fn_name)?;
2032            fn_call.uses_odbc_syntax = true;
2033            Ok(Expr::Function(fn_call))
2034        })
2035    }
2036
2037    pub fn parse_function(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2038        self.parse_function_call(name).map(Expr::Function)
2039    }
2040
2041    fn parse_function_call(&mut self, name: ObjectName) -> Result<Function, ParserError> {
2042        self.expect_token(&Token::LParen)?;
2043
2044        // Snowflake permits a subquery to be passed as an argument without
2045        // an enclosing set of parens if it's the only argument.
2046        if dialect_of!(self is SnowflakeDialect) && self.peek_sub_query() {
2047            let subquery = self.parse_query()?;
2048            self.expect_token(&Token::RParen)?;
2049            return Ok(Function {
2050                name,
2051                uses_odbc_syntax: false,
2052                parameters: FunctionArguments::None,
2053                args: FunctionArguments::Subquery(subquery),
2054                filter: None,
2055                null_treatment: None,
2056                over: None,
2057                within_group: vec![],
2058            });
2059        }
2060
2061        let mut args = self.parse_function_argument_list()?;
2062        let mut parameters = FunctionArguments::None;
2063        // ClickHouse aggregations support parametric functions like `HISTOGRAM(0.5, 0.6)(x, y)`
2064        // which (0.5, 0.6) is a parameter to the function.
2065        if dialect_of!(self is ClickHouseDialect | GenericDialect)
2066            && self.consume_token(&Token::LParen)
2067        {
2068            parameters = FunctionArguments::List(args);
2069            args = self.parse_function_argument_list()?;
2070        }
2071
2072        let within_group = if self.parse_keywords(&[Keyword::WITHIN, Keyword::GROUP]) {
2073            self.expect_token(&Token::LParen)?;
2074            self.expect_keywords(&[Keyword::ORDER, Keyword::BY])?;
2075            let order_by = self.parse_comma_separated(Parser::parse_order_by_expr)?;
2076            self.expect_token(&Token::RParen)?;
2077            order_by
2078        } else {
2079            vec![]
2080        };
2081
2082        let filter = if self.dialect.supports_filter_during_aggregation()
2083            && self.parse_keyword(Keyword::FILTER)
2084            && self.consume_token(&Token::LParen)
2085            && self.parse_keyword(Keyword::WHERE)
2086        {
2087            let filter = Some(Box::new(self.parse_expr()?));
2088            self.expect_token(&Token::RParen)?;
2089            filter
2090        } else {
2091            None
2092        };
2093
2094        // Syntax for null treatment shows up either in the args list
2095        // or after the function call, but not both.
2096        let null_treatment = if args
2097            .clauses
2098            .iter()
2099            .all(|clause| !matches!(clause, FunctionArgumentClause::IgnoreOrRespectNulls(_)))
2100        {
2101            self.parse_null_treatment()?
2102        } else {
2103            None
2104        };
2105
2106        let over = if self.parse_keyword(Keyword::OVER) {
2107            if self.consume_token(&Token::LParen) {
2108                let window_spec = self.parse_window_spec()?;
2109                Some(WindowType::WindowSpec(window_spec))
2110            } else {
2111                Some(WindowType::NamedWindow(self.parse_identifier()?))
2112            }
2113        } else {
2114            None
2115        };
2116
2117        Ok(Function {
2118            name,
2119            uses_odbc_syntax: false,
2120            parameters,
2121            args: FunctionArguments::List(args),
2122            null_treatment,
2123            filter,
2124            over,
2125            within_group,
2126        })
2127    }
2128
2129    /// Optionally parses a null treatment clause.
2130    fn parse_null_treatment(&mut self) -> Result<Option<NullTreatment>, ParserError> {
2131        match self.parse_one_of_keywords(&[Keyword::RESPECT, Keyword::IGNORE]) {
2132            Some(keyword) => {
2133                self.expect_keyword_is(Keyword::NULLS)?;
2134
2135                Ok(match keyword {
2136                    Keyword::RESPECT => Some(NullTreatment::RespectNulls),
2137                    Keyword::IGNORE => Some(NullTreatment::IgnoreNulls),
2138                    _ => None,
2139                })
2140            }
2141            None => Ok(None),
2142        }
2143    }
2144
2145    pub fn parse_time_functions(&mut self, name: ObjectName) -> Result<Expr, ParserError> {
2146        let args = if self.consume_token(&Token::LParen) {
2147            FunctionArguments::List(self.parse_function_argument_list()?)
2148        } else {
2149            FunctionArguments::None
2150        };
2151        Ok(Expr::Function(Function {
2152            name,
2153            uses_odbc_syntax: false,
2154            parameters: FunctionArguments::None,
2155            args,
2156            filter: None,
2157            over: None,
2158            null_treatment: None,
2159            within_group: vec![],
2160        }))
2161    }
2162
2163    pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2164        let next_token = self.next_token();
2165        match &next_token.token {
2166            Token::Word(w) => match w.keyword {
2167                Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2168                Keyword::RANGE => Ok(WindowFrameUnits::Range),
2169                Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2170                _ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2171            },
2172            _ => self.expected("ROWS, RANGE, GROUPS", next_token),
2173        }
2174    }
2175
2176    pub fn parse_window_frame(&mut self) -> Result<WindowFrame, ParserError> {
2177        let units = self.parse_window_frame_units()?;
2178        let (start_bound, end_bound) = if self.parse_keyword(Keyword::BETWEEN) {
2179            let start_bound = self.parse_window_frame_bound()?;
2180            self.expect_keyword_is(Keyword::AND)?;
2181            let end_bound = Some(self.parse_window_frame_bound()?);
2182            (start_bound, end_bound)
2183        } else {
2184            (self.parse_window_frame_bound()?, None)
2185        };
2186        Ok(WindowFrame {
2187            units,
2188            start_bound,
2189            end_bound,
2190        })
2191    }
2192
2193    /// Parse `CURRENT ROW` or `{ <positive number> | UNBOUNDED } { PRECEDING | FOLLOWING }`
2194    pub fn parse_window_frame_bound(&mut self) -> Result<WindowFrameBound, ParserError> {
2195        if self.parse_keywords(&[Keyword::CURRENT, Keyword::ROW]) {
2196            Ok(WindowFrameBound::CurrentRow)
2197        } else {
2198            let rows = if self.parse_keyword(Keyword::UNBOUNDED) {
2199                None
2200            } else {
2201                Some(Box::new(match self.peek_token().token {
2202                    Token::SingleQuotedString(_) => self.parse_interval()?,
2203                    _ => self.parse_expr()?,
2204                }))
2205            };
2206            if self.parse_keyword(Keyword::PRECEDING) {
2207                Ok(WindowFrameBound::Preceding(rows))
2208            } else if self.parse_keyword(Keyword::FOLLOWING) {
2209                Ok(WindowFrameBound::Following(rows))
2210            } else {
2211                self.expected("PRECEDING or FOLLOWING", self.peek_token())
2212            }
2213        }
2214    }
2215
2216    /// Parse a group by expr. Group by expr can be one of group sets, roll up, cube, or simple expr.
2217    fn parse_group_by_expr(&mut self) -> Result<Expr, ParserError> {
2218        if self.dialect.supports_group_by_expr() {
2219            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
2220                self.expect_token(&Token::LParen)?;
2221                let result = self.parse_comma_separated(|p| p.parse_tuple(false, true))?;
2222                self.expect_token(&Token::RParen)?;
2223                Ok(Expr::GroupingSets(result))
2224            } else if self.parse_keyword(Keyword::CUBE) {
2225                self.expect_token(&Token::LParen)?;
2226                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2227                self.expect_token(&Token::RParen)?;
2228                Ok(Expr::Cube(result))
2229            } else if self.parse_keyword(Keyword::ROLLUP) {
2230                self.expect_token(&Token::LParen)?;
2231                let result = self.parse_comma_separated(|p| p.parse_tuple(true, true))?;
2232                self.expect_token(&Token::RParen)?;
2233                Ok(Expr::Rollup(result))
2234            } else if self.consume_tokens(&[Token::LParen, Token::RParen]) {
2235                // PostgreSQL allow to use empty tuple as a group by expression,
2236                // e.g. `GROUP BY (), name`. Please refer to GROUP BY Clause section in
2237                // [PostgreSQL](https://www.postgresql.org/docs/16/sql-select.html)
2238                Ok(Expr::Tuple(vec![]))
2239            } else {
2240                self.parse_expr()
2241            }
2242        } else {
2243            // TODO parse rollup for other dialects
2244            self.parse_expr()
2245        }
2246    }
2247
2248    /// Parse a tuple with `(` and `)`.
2249    /// If `lift_singleton` is true, then a singleton tuple is lifted to a tuple of length 1, otherwise it will fail.
2250    /// If `allow_empty` is true, then an empty tuple is allowed.
2251    fn parse_tuple(
2252        &mut self,
2253        lift_singleton: bool,
2254        allow_empty: bool,
2255    ) -> Result<Vec<Expr>, ParserError> {
2256        if lift_singleton {
2257            if self.consume_token(&Token::LParen) {
2258                let result = if allow_empty && self.consume_token(&Token::RParen) {
2259                    vec![]
2260                } else {
2261                    let result = self.parse_comma_separated(Parser::parse_expr)?;
2262                    self.expect_token(&Token::RParen)?;
2263                    result
2264                };
2265                Ok(result)
2266            } else {
2267                Ok(vec![self.parse_expr()?])
2268            }
2269        } else {
2270            self.expect_token(&Token::LParen)?;
2271            let result = if allow_empty && self.consume_token(&Token::RParen) {
2272                vec![]
2273            } else {
2274                let result = self.parse_comma_separated(Parser::parse_expr)?;
2275                self.expect_token(&Token::RParen)?;
2276                result
2277            };
2278            Ok(result)
2279        }
2280    }
2281
2282    pub fn parse_case_expr(&mut self) -> Result<Expr, ParserError> {
2283        let case_token = AttachedToken(self.get_current_token().clone());
2284        let mut operand = None;
2285        if !self.parse_keyword(Keyword::WHEN) {
2286            operand = Some(Box::new(self.parse_expr()?));
2287            self.expect_keyword_is(Keyword::WHEN)?;
2288        }
2289        let mut conditions = vec![];
2290        loop {
2291            let condition = self.parse_expr()?;
2292            self.expect_keyword_is(Keyword::THEN)?;
2293            let result = self.parse_expr()?;
2294            conditions.push(CaseWhen { condition, result });
2295            if !self.parse_keyword(Keyword::WHEN) {
2296                break;
2297            }
2298        }
2299        let else_result = if self.parse_keyword(Keyword::ELSE) {
2300            Some(Box::new(self.parse_expr()?))
2301        } else {
2302            None
2303        };
2304        let end_token = AttachedToken(self.expect_keyword(Keyword::END)?);
2305        Ok(Expr::Case {
2306            case_token,
2307            end_token,
2308            operand,
2309            conditions,
2310            else_result,
2311        })
2312    }
2313
2314    pub fn parse_optional_cast_format(&mut self) -> Result<Option<CastFormat>, ParserError> {
2315        if self.parse_keyword(Keyword::FORMAT) {
2316            let value = self.parse_value()?.value;
2317            match self.parse_optional_time_zone()? {
2318                Some(tz) => Ok(Some(CastFormat::ValueAtTimeZone(value, tz))),
2319                None => Ok(Some(CastFormat::Value(value))),
2320            }
2321        } else {
2322            Ok(None)
2323        }
2324    }
2325
2326    pub fn parse_optional_time_zone(&mut self) -> Result<Option<Value>, ParserError> {
2327        if self.parse_keywords(&[Keyword::AT, Keyword::TIME, Keyword::ZONE]) {
2328            self.parse_value().map(|v| Some(v.value))
2329        } else {
2330            Ok(None)
2331        }
2332    }
2333
2334    /// mssql-like convert function
2335    fn parse_mssql_convert(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2336        self.expect_token(&Token::LParen)?;
2337        let data_type = self.parse_data_type()?;
2338        self.expect_token(&Token::Comma)?;
2339        let expr = self.parse_expr()?;
2340        let styles = if self.consume_token(&Token::Comma) {
2341            self.parse_comma_separated(Parser::parse_expr)?
2342        } else {
2343            Default::default()
2344        };
2345        self.expect_token(&Token::RParen)?;
2346        Ok(Expr::Convert {
2347            is_try,
2348            expr: Box::new(expr),
2349            data_type: Some(data_type),
2350            charset: None,
2351            target_before_value: true,
2352            styles,
2353        })
2354    }
2355
2356    /// Parse a SQL CONVERT function:
2357    ///  - `CONVERT('héhé' USING utf8mb4)` (MySQL)
2358    ///  - `CONVERT('héhé', CHAR CHARACTER SET utf8mb4)` (MySQL)
2359    ///  - `CONVERT(DECIMAL(10, 5), 42)` (MSSQL) - the type comes first
2360    pub fn parse_convert_expr(&mut self, is_try: bool) -> Result<Expr, ParserError> {
2361        if self.dialect.convert_type_before_value() {
2362            return self.parse_mssql_convert(is_try);
2363        }
2364        self.expect_token(&Token::LParen)?;
2365        let expr = self.parse_expr()?;
2366        if self.parse_keyword(Keyword::USING) {
2367            let charset = self.parse_object_name(false)?;
2368            self.expect_token(&Token::RParen)?;
2369            return Ok(Expr::Convert {
2370                is_try,
2371                expr: Box::new(expr),
2372                data_type: None,
2373                charset: Some(charset),
2374                target_before_value: false,
2375                styles: vec![],
2376            });
2377        }
2378        self.expect_token(&Token::Comma)?;
2379        let data_type = self.parse_data_type()?;
2380        let charset = if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
2381            Some(self.parse_object_name(false)?)
2382        } else {
2383            None
2384        };
2385        self.expect_token(&Token::RParen)?;
2386        Ok(Expr::Convert {
2387            is_try,
2388            expr: Box::new(expr),
2389            data_type: Some(data_type),
2390            charset,
2391            target_before_value: false,
2392            styles: vec![],
2393        })
2394    }
2395
2396    /// Parse a SQL CAST function e.g. `CAST(expr AS FLOAT)`
2397    pub fn parse_cast_expr(&mut self, kind: CastKind) -> Result<Expr, ParserError> {
2398        self.expect_token(&Token::LParen)?;
2399        let expr = self.parse_expr()?;
2400        self.expect_keyword_is(Keyword::AS)?;
2401        let data_type = self.parse_data_type()?;
2402        let format = self.parse_optional_cast_format()?;
2403        self.expect_token(&Token::RParen)?;
2404        Ok(Expr::Cast {
2405            kind,
2406            expr: Box::new(expr),
2407            data_type,
2408            format,
2409        })
2410    }
2411
2412    /// Parse a SQL EXISTS expression e.g. `WHERE EXISTS(SELECT ...)`.
2413    pub fn parse_exists_expr(&mut self, negated: bool) -> Result<Expr, ParserError> {
2414        self.expect_token(&Token::LParen)?;
2415        let exists_node = Expr::Exists {
2416            negated,
2417            subquery: self.parse_query()?,
2418        };
2419        self.expect_token(&Token::RParen)?;
2420        Ok(exists_node)
2421    }
2422
2423    pub fn parse_extract_expr(&mut self) -> Result<Expr, ParserError> {
2424        self.expect_token(&Token::LParen)?;
2425        let field = self.parse_date_time_field()?;
2426
2427        let syntax = if self.parse_keyword(Keyword::FROM) {
2428            ExtractSyntax::From
2429        } else if self.consume_token(&Token::Comma)
2430            && dialect_of!(self is SnowflakeDialect | GenericDialect)
2431        {
2432            ExtractSyntax::Comma
2433        } else {
2434            return Err(ParserError::ParserError(
2435                "Expected 'FROM' or ','".to_string(),
2436            ));
2437        };
2438
2439        let expr = self.parse_expr()?;
2440        self.expect_token(&Token::RParen)?;
2441        Ok(Expr::Extract {
2442            field,
2443            expr: Box::new(expr),
2444            syntax,
2445        })
2446    }
2447
2448    pub fn parse_ceil_floor_expr(&mut self, is_ceil: bool) -> Result<Expr, ParserError> {
2449        self.expect_token(&Token::LParen)?;
2450        let expr = self.parse_expr()?;
2451        // Parse `CEIL/FLOOR(expr)`
2452        let field = if self.parse_keyword(Keyword::TO) {
2453            // Parse `CEIL/FLOOR(expr TO DateTimeField)`
2454            CeilFloorKind::DateTimeField(self.parse_date_time_field()?)
2455        } else if self.consume_token(&Token::Comma) {
2456            // Parse `CEIL/FLOOR(expr, scale)`
2457            match self.parse_value()?.value {
2458                Value::Number(n, s) => CeilFloorKind::Scale(Value::Number(n, s)),
2459                _ => {
2460                    return Err(ParserError::ParserError(
2461                        "Scale field can only be of number type".to_string(),
2462                    ))
2463                }
2464            }
2465        } else {
2466            CeilFloorKind::DateTimeField(DateTimeField::NoDateTime)
2467        };
2468        self.expect_token(&Token::RParen)?;
2469        if is_ceil {
2470            Ok(Expr::Ceil {
2471                expr: Box::new(expr),
2472                field,
2473            })
2474        } else {
2475            Ok(Expr::Floor {
2476                expr: Box::new(expr),
2477                field,
2478            })
2479        }
2480    }
2481
2482    pub fn parse_position_expr(&mut self, ident: Ident) -> Result<Expr, ParserError> {
2483        let between_prec = self.dialect.prec_value(Precedence::Between);
2484        let position_expr = self.maybe_parse(|p| {
2485            // PARSE SELECT POSITION('@' in field)
2486            p.expect_token(&Token::LParen)?;
2487
2488            // Parse the subexpr till the IN keyword
2489            let expr = p.parse_subexpr(between_prec)?;
2490            p.expect_keyword_is(Keyword::IN)?;
2491            let from = p.parse_expr()?;
2492            p.expect_token(&Token::RParen)?;
2493            Ok(Expr::Position {
2494                expr: Box::new(expr),
2495                r#in: Box::new(from),
2496            })
2497        })?;
2498        match position_expr {
2499            Some(expr) => Ok(expr),
2500            // Snowflake supports `position` as an ordinary function call
2501            // without the special `IN` syntax.
2502            None => self.parse_function(ObjectName::from(vec![ident])),
2503        }
2504    }
2505
2506    // { SUBSTRING | SUBSTR } (<EXPR> [FROM 1] [FOR 3])
2507    pub fn parse_substring(&mut self) -> Result<Expr, ParserError> {
2508        let shorthand = match self.expect_one_of_keywords(&[Keyword::SUBSTR, Keyword::SUBSTRING])? {
2509            Keyword::SUBSTR => true,
2510            Keyword::SUBSTRING => false,
2511            _ => {
2512                self.prev_token();
2513                return self.expected("SUBSTR or SUBSTRING", self.peek_token());
2514            }
2515        };
2516        self.expect_token(&Token::LParen)?;
2517        let expr = self.parse_expr()?;
2518        let mut from_expr = None;
2519        let special = self.consume_token(&Token::Comma);
2520        if special || self.parse_keyword(Keyword::FROM) {
2521            from_expr = Some(self.parse_expr()?);
2522        }
2523
2524        let mut to_expr = None;
2525        if self.parse_keyword(Keyword::FOR) || self.consume_token(&Token::Comma) {
2526            to_expr = Some(self.parse_expr()?);
2527        }
2528        self.expect_token(&Token::RParen)?;
2529
2530        Ok(Expr::Substring {
2531            expr: Box::new(expr),
2532            substring_from: from_expr.map(Box::new),
2533            substring_for: to_expr.map(Box::new),
2534            special,
2535            shorthand,
2536        })
2537    }
2538
2539    pub fn parse_overlay_expr(&mut self) -> Result<Expr, ParserError> {
2540        // PARSE OVERLAY (EXPR PLACING EXPR FROM 1 [FOR 3])
2541        self.expect_token(&Token::LParen)?;
2542        let expr = self.parse_expr()?;
2543        self.expect_keyword_is(Keyword::PLACING)?;
2544        let what_expr = self.parse_expr()?;
2545        self.expect_keyword_is(Keyword::FROM)?;
2546        let from_expr = self.parse_expr()?;
2547        let mut for_expr = None;
2548        if self.parse_keyword(Keyword::FOR) {
2549            for_expr = Some(self.parse_expr()?);
2550        }
2551        self.expect_token(&Token::RParen)?;
2552
2553        Ok(Expr::Overlay {
2554            expr: Box::new(expr),
2555            overlay_what: Box::new(what_expr),
2556            overlay_from: Box::new(from_expr),
2557            overlay_for: for_expr.map(Box::new),
2558        })
2559    }
2560
2561    /// ```sql
2562    /// TRIM ([WHERE] ['text' FROM] 'text')
2563    /// TRIM ('text')
2564    /// TRIM(<expr>, [, characters]) -- only Snowflake or BigQuery
2565    /// ```
2566    pub fn parse_trim_expr(&mut self) -> Result<Expr, ParserError> {
2567        self.expect_token(&Token::LParen)?;
2568        let mut trim_where = None;
2569        if let Token::Word(word) = self.peek_token().token {
2570            if [Keyword::BOTH, Keyword::LEADING, Keyword::TRAILING].contains(&word.keyword) {
2571                trim_where = Some(self.parse_trim_where()?);
2572            }
2573        }
2574        let expr = self.parse_expr()?;
2575        if self.parse_keyword(Keyword::FROM) {
2576            let trim_what = Box::new(expr);
2577            let expr = self.parse_expr()?;
2578            self.expect_token(&Token::RParen)?;
2579            Ok(Expr::Trim {
2580                expr: Box::new(expr),
2581                trim_where,
2582                trim_what: Some(trim_what),
2583                trim_characters: None,
2584            })
2585        } else if self.consume_token(&Token::Comma)
2586            && dialect_of!(self is DuckDbDialect | SnowflakeDialect | BigQueryDialect | GenericDialect)
2587        {
2588            let characters = self.parse_comma_separated(Parser::parse_expr)?;
2589            self.expect_token(&Token::RParen)?;
2590            Ok(Expr::Trim {
2591                expr: Box::new(expr),
2592                trim_where: None,
2593                trim_what: None,
2594                trim_characters: Some(characters),
2595            })
2596        } else {
2597            self.expect_token(&Token::RParen)?;
2598            Ok(Expr::Trim {
2599                expr: Box::new(expr),
2600                trim_where,
2601                trim_what: None,
2602                trim_characters: None,
2603            })
2604        }
2605    }
2606
2607    pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2608        let next_token = self.next_token();
2609        match &next_token.token {
2610            Token::Word(w) => match w.keyword {
2611                Keyword::BOTH => Ok(TrimWhereField::Both),
2612                Keyword::LEADING => Ok(TrimWhereField::Leading),
2613                Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2614                _ => self.expected("trim_where field", next_token)?,
2615            },
2616            _ => self.expected("trim_where field", next_token),
2617        }
2618    }
2619
2620    /// Parses an array expression `[ex1, ex2, ..]`
2621    /// if `named` is `true`, came from an expression like  `ARRAY[ex1, ex2]`
2622    pub fn parse_array_expr(&mut self, named: bool) -> Result<Expr, ParserError> {
2623        let exprs = self.parse_comma_separated0(Parser::parse_expr, Token::RBracket)?;
2624        self.expect_token(&Token::RBracket)?;
2625        Ok(Expr::Array(Array { elem: exprs, named }))
2626    }
2627
2628    pub fn parse_listagg_on_overflow(&mut self) -> Result<Option<ListAggOnOverflow>, ParserError> {
2629        if self.parse_keywords(&[Keyword::ON, Keyword::OVERFLOW]) {
2630            if self.parse_keyword(Keyword::ERROR) {
2631                Ok(Some(ListAggOnOverflow::Error))
2632            } else {
2633                self.expect_keyword_is(Keyword::TRUNCATE)?;
2634                let filler = match self.peek_token().token {
2635                    Token::Word(w)
2636                        if w.keyword == Keyword::WITH || w.keyword == Keyword::WITHOUT =>
2637                    {
2638                        None
2639                    }
2640                    Token::SingleQuotedString(_)
2641                    | Token::EscapedStringLiteral(_)
2642                    | Token::UnicodeStringLiteral(_)
2643                    | Token::NationalStringLiteral(_)
2644                    | Token::HexStringLiteral(_) => Some(Box::new(self.parse_expr()?)),
2645                    _ => self.expected(
2646                        "either filler, WITH, or WITHOUT in LISTAGG",
2647                        self.peek_token(),
2648                    )?,
2649                };
2650                let with_count = self.parse_keyword(Keyword::WITH);
2651                if !with_count && !self.parse_keyword(Keyword::WITHOUT) {
2652                    self.expected("either WITH or WITHOUT in LISTAGG", self.peek_token())?;
2653                }
2654                self.expect_keyword_is(Keyword::COUNT)?;
2655                Ok(Some(ListAggOnOverflow::Truncate { filler, with_count }))
2656            }
2657        } else {
2658            Ok(None)
2659        }
2660    }
2661
2662    // This function parses date/time fields for the EXTRACT function-like
2663    // operator, interval qualifiers, and the ceil/floor operations.
2664    // EXTRACT supports a wider set of date/time fields than interval qualifiers,
2665    // so this function may need to be split in two.
2666    pub fn parse_date_time_field(&mut self) -> Result<DateTimeField, ParserError> {
2667        let next_token = self.next_token();
2668        match &next_token.token {
2669            Token::Word(w) => match w.keyword {
2670                Keyword::YEAR => Ok(DateTimeField::Year),
2671                Keyword::YEARS => Ok(DateTimeField::Years),
2672                Keyword::MONTH => Ok(DateTimeField::Month),
2673                Keyword::MONTHS => Ok(DateTimeField::Months),
2674                Keyword::WEEK => {
2675                    let week_day = if dialect_of!(self is BigQueryDialect | GenericDialect)
2676                        && self.consume_token(&Token::LParen)
2677                    {
2678                        let week_day = self.parse_identifier()?;
2679                        self.expect_token(&Token::RParen)?;
2680                        Some(week_day)
2681                    } else {
2682                        None
2683                    };
2684                    Ok(DateTimeField::Week(week_day))
2685                }
2686                Keyword::WEEKS => Ok(DateTimeField::Weeks),
2687                Keyword::DAY => Ok(DateTimeField::Day),
2688                Keyword::DAYOFWEEK => Ok(DateTimeField::DayOfWeek),
2689                Keyword::DAYOFYEAR => Ok(DateTimeField::DayOfYear),
2690                Keyword::DAYS => Ok(DateTimeField::Days),
2691                Keyword::DATE => Ok(DateTimeField::Date),
2692                Keyword::DATETIME => Ok(DateTimeField::Datetime),
2693                Keyword::HOUR => Ok(DateTimeField::Hour),
2694                Keyword::HOURS => Ok(DateTimeField::Hours),
2695                Keyword::MINUTE => Ok(DateTimeField::Minute),
2696                Keyword::MINUTES => Ok(DateTimeField::Minutes),
2697                Keyword::SECOND => Ok(DateTimeField::Second),
2698                Keyword::SECONDS => Ok(DateTimeField::Seconds),
2699                Keyword::CENTURY => Ok(DateTimeField::Century),
2700                Keyword::DECADE => Ok(DateTimeField::Decade),
2701                Keyword::DOY => Ok(DateTimeField::Doy),
2702                Keyword::DOW => Ok(DateTimeField::Dow),
2703                Keyword::EPOCH => Ok(DateTimeField::Epoch),
2704                Keyword::ISODOW => Ok(DateTimeField::Isodow),
2705                Keyword::ISOYEAR => Ok(DateTimeField::Isoyear),
2706                Keyword::ISOWEEK => Ok(DateTimeField::IsoWeek),
2707                Keyword::JULIAN => Ok(DateTimeField::Julian),
2708                Keyword::MICROSECOND => Ok(DateTimeField::Microsecond),
2709                Keyword::MICROSECONDS => Ok(DateTimeField::Microseconds),
2710                Keyword::MILLENIUM => Ok(DateTimeField::Millenium),
2711                Keyword::MILLENNIUM => Ok(DateTimeField::Millennium),
2712                Keyword::MILLISECOND => Ok(DateTimeField::Millisecond),
2713                Keyword::MILLISECONDS => Ok(DateTimeField::Milliseconds),
2714                Keyword::NANOSECOND => Ok(DateTimeField::Nanosecond),
2715                Keyword::NANOSECONDS => Ok(DateTimeField::Nanoseconds),
2716                Keyword::QUARTER => Ok(DateTimeField::Quarter),
2717                Keyword::TIME => Ok(DateTimeField::Time),
2718                Keyword::TIMEZONE => Ok(DateTimeField::Timezone),
2719                Keyword::TIMEZONE_ABBR => Ok(DateTimeField::TimezoneAbbr),
2720                Keyword::TIMEZONE_HOUR => Ok(DateTimeField::TimezoneHour),
2721                Keyword::TIMEZONE_MINUTE => Ok(DateTimeField::TimezoneMinute),
2722                Keyword::TIMEZONE_REGION => Ok(DateTimeField::TimezoneRegion),
2723                _ if self.dialect.allow_extract_custom() => {
2724                    self.prev_token();
2725                    let custom = self.parse_identifier()?;
2726                    Ok(DateTimeField::Custom(custom))
2727                }
2728                _ => self.expected("date/time field", next_token),
2729            },
2730            Token::SingleQuotedString(_) if self.dialect.allow_extract_single_quotes() => {
2731                self.prev_token();
2732                let custom = self.parse_identifier()?;
2733                Ok(DateTimeField::Custom(custom))
2734            }
2735            _ => self.expected("date/time field", next_token),
2736        }
2737    }
2738
2739    pub fn parse_not(&mut self) -> Result<Expr, ParserError> {
2740        match self.peek_token().token {
2741            Token::Word(w) => match w.keyword {
2742                Keyword::EXISTS => {
2743                    let negated = true;
2744                    let _ = self.parse_keyword(Keyword::EXISTS);
2745                    self.parse_exists_expr(negated)
2746                }
2747                _ => Ok(Expr::UnaryOp {
2748                    op: UnaryOperator::Not,
2749                    expr: Box::new(
2750                        self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?,
2751                    ),
2752                }),
2753            },
2754            _ => Ok(Expr::UnaryOp {
2755                op: UnaryOperator::Not,
2756                expr: Box::new(self.parse_subexpr(self.dialect.prec_value(Precedence::UnaryNot))?),
2757            }),
2758        }
2759    }
2760
2761    /// Parse expression types that start with a left brace '{'.
2762    /// Examples:
2763    /// ```sql
2764    /// -- Dictionary expr.
2765    /// {'key1': 'value1', 'key2': 'value2'}
2766    ///
2767    /// -- Function call using the ODBC syntax.
2768    /// { fn CONCAT('foo', 'bar') }
2769    /// ```
2770    fn parse_lbrace_expr(&mut self) -> Result<Expr, ParserError> {
2771        let token = self.expect_token(&Token::LBrace)?;
2772
2773        if let Some(fn_expr) = self.maybe_parse_odbc_fn_body()? {
2774            self.expect_token(&Token::RBrace)?;
2775            return Ok(fn_expr);
2776        }
2777
2778        if self.dialect.supports_dictionary_syntax() {
2779            self.prev_token(); // Put back the '{'
2780            return self.parse_dictionary();
2781        }
2782
2783        self.expected("an expression", token)
2784    }
2785
2786    /// Parses fulltext expressions [`sqlparser::ast::Expr::MatchAgainst`]
2787    ///
2788    /// # Errors
2789    /// This method will raise an error if the column list is empty or with invalid identifiers,
2790    /// the match expression is not a literal string, or if the search modifier is not valid.
2791    pub fn parse_match_against(&mut self) -> Result<Expr, ParserError> {
2792        let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
2793
2794        self.expect_keyword_is(Keyword::AGAINST)?;
2795
2796        self.expect_token(&Token::LParen)?;
2797
2798        // MySQL is too permissive about the value, IMO we can't validate it perfectly on syntax level.
2799        let match_value = self.parse_value()?.value;
2800
2801        let in_natural_language_mode_keywords = &[
2802            Keyword::IN,
2803            Keyword::NATURAL,
2804            Keyword::LANGUAGE,
2805            Keyword::MODE,
2806        ];
2807
2808        let with_query_expansion_keywords = &[Keyword::WITH, Keyword::QUERY, Keyword::EXPANSION];
2809
2810        let in_boolean_mode_keywords = &[Keyword::IN, Keyword::BOOLEAN, Keyword::MODE];
2811
2812        let opt_search_modifier = if self.parse_keywords(in_natural_language_mode_keywords) {
2813            if self.parse_keywords(with_query_expansion_keywords) {
2814                Some(SearchModifier::InNaturalLanguageModeWithQueryExpansion)
2815            } else {
2816                Some(SearchModifier::InNaturalLanguageMode)
2817            }
2818        } else if self.parse_keywords(in_boolean_mode_keywords) {
2819            Some(SearchModifier::InBooleanMode)
2820        } else if self.parse_keywords(with_query_expansion_keywords) {
2821            Some(SearchModifier::WithQueryExpansion)
2822        } else {
2823            None
2824        };
2825
2826        self.expect_token(&Token::RParen)?;
2827
2828        Ok(Expr::MatchAgainst {
2829            columns,
2830            match_value,
2831            opt_search_modifier,
2832        })
2833    }
2834
2835    /// Parse an `INTERVAL` expression.
2836    ///
2837    /// Some syntactically valid intervals:
2838    ///
2839    /// ```sql
2840    ///   1. INTERVAL '1' DAY
2841    ///   2. INTERVAL '1-1' YEAR TO MONTH
2842    ///   3. INTERVAL '1' SECOND
2843    ///   4. INTERVAL '1:1:1.1' HOUR (5) TO SECOND (5)
2844    ///   5. INTERVAL '1.1' SECOND (2, 2)
2845    ///   6. INTERVAL '1:1' HOUR (5) TO MINUTE (5)
2846    ///   7. (MySql & BigQuery only): INTERVAL 1 DAY
2847    /// ```
2848    ///
2849    /// Note that we do not currently attempt to parse the quoted value.
2850    pub fn parse_interval(&mut self) -> Result<Expr, ParserError> {
2851        // The SQL standard allows an optional sign before the value string, but
2852        // it is not clear if any implementations support that syntax, so we
2853        // don't currently try to parse it. (The sign can instead be included
2854        // inside the value string.)
2855
2856        // to match the different flavours of INTERVAL syntax, we only allow expressions
2857        // if the dialect requires an interval qualifier,
2858        // see https://github.com/sqlparser-rs/sqlparser-rs/pull/1398 for more details
2859        let value = if self.dialect.require_interval_qualifier() {
2860            // parse a whole expression so `INTERVAL 1 + 1 DAY` is valid
2861            self.parse_expr()?
2862        } else {
2863            // parse a prefix expression so `INTERVAL 1 DAY` is valid, but `INTERVAL 1 + 1 DAY` is not
2864            // this also means that `INTERVAL '5 days' > INTERVAL '1 day'` treated properly
2865            self.parse_prefix()?
2866        };
2867
2868        // Following the string literal is a qualifier which indicates the units
2869        // of the duration specified in the string literal.
2870        //
2871        // Note that PostgreSQL allows omitting the qualifier, so we provide
2872        // this more general implementation.
2873        let leading_field = if self.next_token_is_temporal_unit() {
2874            Some(self.parse_date_time_field()?)
2875        } else if self.dialect.require_interval_qualifier() {
2876            return parser_err!(
2877                "INTERVAL requires a unit after the literal value",
2878                self.peek_token().span.start
2879            );
2880        } else {
2881            None
2882        };
2883
2884        let (leading_precision, last_field, fsec_precision) =
2885            if leading_field == Some(DateTimeField::Second) {
2886                // SQL mandates special syntax for `SECOND TO SECOND` literals.
2887                // Instead of
2888                //     `SECOND [(<leading precision>)] TO SECOND[(<fractional seconds precision>)]`
2889                // one must use the special format:
2890                //     `SECOND [( <leading precision> [ , <fractional seconds precision>] )]`
2891                let last_field = None;
2892                let (leading_precision, fsec_precision) = self.parse_optional_precision_scale()?;
2893                (leading_precision, last_field, fsec_precision)
2894            } else {
2895                let leading_precision = self.parse_optional_precision()?;
2896                if self.parse_keyword(Keyword::TO) {
2897                    let last_field = Some(self.parse_date_time_field()?);
2898                    let fsec_precision = if last_field == Some(DateTimeField::Second) {
2899                        self.parse_optional_precision()?
2900                    } else {
2901                        None
2902                    };
2903                    (leading_precision, last_field, fsec_precision)
2904                } else {
2905                    (leading_precision, None, None)
2906                }
2907            };
2908
2909        Ok(Expr::Interval(Interval {
2910            value: Box::new(value),
2911            leading_field,
2912            leading_precision,
2913            last_field,
2914            fractional_seconds_precision: fsec_precision,
2915        }))
2916    }
2917
2918    /// Peek at the next token and determine if it is a temporal unit
2919    /// like `second`.
2920    pub fn next_token_is_temporal_unit(&mut self) -> bool {
2921        if let Token::Word(word) = self.peek_token().token {
2922            matches!(
2923                word.keyword,
2924                Keyword::YEAR
2925                    | Keyword::YEARS
2926                    | Keyword::MONTH
2927                    | Keyword::MONTHS
2928                    | Keyword::WEEK
2929                    | Keyword::WEEKS
2930                    | Keyword::DAY
2931                    | Keyword::DAYS
2932                    | Keyword::HOUR
2933                    | Keyword::HOURS
2934                    | Keyword::MINUTE
2935                    | Keyword::MINUTES
2936                    | Keyword::SECOND
2937                    | Keyword::SECONDS
2938                    | Keyword::CENTURY
2939                    | Keyword::DECADE
2940                    | Keyword::DOW
2941                    | Keyword::DOY
2942                    | Keyword::EPOCH
2943                    | Keyword::ISODOW
2944                    | Keyword::ISOYEAR
2945                    | Keyword::JULIAN
2946                    | Keyword::MICROSECOND
2947                    | Keyword::MICROSECONDS
2948                    | Keyword::MILLENIUM
2949                    | Keyword::MILLENNIUM
2950                    | Keyword::MILLISECOND
2951                    | Keyword::MILLISECONDS
2952                    | Keyword::NANOSECOND
2953                    | Keyword::NANOSECONDS
2954                    | Keyword::QUARTER
2955                    | Keyword::TIMEZONE
2956                    | Keyword::TIMEZONE_HOUR
2957                    | Keyword::TIMEZONE_MINUTE
2958            )
2959        } else {
2960            false
2961        }
2962    }
2963
2964    /// Syntax
2965    /// ```sql
2966    /// -- typed
2967    /// STRUCT<[field_name] field_type, ...>( expr1 [, ... ])
2968    /// -- typeless
2969    /// STRUCT( expr1 [AS field_name] [, ... ])
2970    /// ```
2971    fn parse_struct_literal(&mut self) -> Result<Expr, ParserError> {
2972        // Parse the fields definition if exist `<[field_name] field_type, ...>`
2973        self.prev_token();
2974        let (fields, trailing_bracket) =
2975            self.parse_struct_type_def(Self::parse_struct_field_def)?;
2976        if trailing_bracket.0 {
2977            return parser_err!(
2978                "unmatched > in STRUCT literal",
2979                self.peek_token().span.start
2980            );
2981        }
2982
2983        // Parse the struct values `(expr1 [, ... ])`
2984        self.expect_token(&Token::LParen)?;
2985        let values = self
2986            .parse_comma_separated(|parser| parser.parse_struct_field_expr(!fields.is_empty()))?;
2987        self.expect_token(&Token::RParen)?;
2988
2989        Ok(Expr::Struct { values, fields })
2990    }
2991
2992    /// Parse an expression value for a struct literal
2993    /// Syntax
2994    /// ```sql
2995    /// expr [AS name]
2996    /// ```
2997    ///
2998    /// For biquery [1], Parameter typed_syntax is set to true if the expression
2999    /// is to be parsed as a field expression declared using typed
3000    /// struct syntax [2], and false if using typeless struct syntax [3].
3001    ///
3002    /// [1]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#constructing_a_struct
3003    /// [2]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typed_struct_syntax
3004    /// [3]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#typeless_struct_syntax
3005    fn parse_struct_field_expr(&mut self, typed_syntax: bool) -> Result<Expr, ParserError> {
3006        let expr = self.parse_expr()?;
3007        if self.parse_keyword(Keyword::AS) {
3008            if typed_syntax {
3009                return parser_err!("Typed syntax does not allow AS", {
3010                    self.prev_token();
3011                    self.peek_token().span.start
3012                });
3013            }
3014            let field_name = self.parse_identifier()?;
3015            Ok(Expr::Named {
3016                expr: expr.into(),
3017                name: field_name,
3018            })
3019        } else {
3020            Ok(expr)
3021        }
3022    }
3023
3024    /// Parse a Struct type definition as a sequence of field-value pairs.
3025    /// The syntax of the Struct elem differs by dialect so it is customised
3026    /// by the `elem_parser` argument.
3027    ///
3028    /// Syntax
3029    /// ```sql
3030    /// Hive:
3031    /// STRUCT<field_name: field_type>
3032    ///
3033    /// BigQuery:
3034    /// STRUCT<[field_name] field_type>
3035    /// ```
3036    fn parse_struct_type_def<F>(
3037        &mut self,
3038        mut elem_parser: F,
3039    ) -> Result<(Vec<StructField>, MatchedTrailingBracket), ParserError>
3040    where
3041        F: FnMut(&mut Parser<'a>) -> Result<(StructField, MatchedTrailingBracket), ParserError>,
3042    {
3043        self.expect_keyword_is(Keyword::STRUCT)?;
3044
3045        // Nothing to do if we have no type information.
3046        if Token::Lt != self.peek_token() {
3047            return Ok((Default::default(), false.into()));
3048        }
3049        self.next_token();
3050
3051        let mut field_defs = vec![];
3052        let trailing_bracket = loop {
3053            let (def, trailing_bracket) = elem_parser(self)?;
3054            field_defs.push(def);
3055            // The struct field definition is finished if it occurs `>>` or comma.
3056            if trailing_bracket.0 || !self.consume_token(&Token::Comma) {
3057                break trailing_bracket;
3058            }
3059        };
3060
3061        Ok((
3062            field_defs,
3063            self.expect_closing_angle_bracket(trailing_bracket)?,
3064        ))
3065    }
3066
3067    /// Duckdb Struct Data Type <https://duckdb.org/docs/sql/data_types/struct.html#retrieving-from-structs>
3068    fn parse_duckdb_struct_type_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3069        self.expect_keyword_is(Keyword::STRUCT)?;
3070        self.expect_token(&Token::LParen)?;
3071        let struct_body = self.parse_comma_separated(|parser| {
3072            let field_name = parser.parse_identifier()?;
3073            let field_type = parser.parse_data_type()?;
3074
3075            Ok(StructField {
3076                field_name: Some(field_name),
3077                field_type,
3078                options: None,
3079            })
3080        });
3081        self.expect_token(&Token::RParen)?;
3082        struct_body
3083    }
3084
3085    /// Parse a field definition in a [struct] or [tuple].
3086    /// Syntax:
3087    ///
3088    /// ```sql
3089    /// [field_name] field_type
3090    /// ```
3091    ///
3092    /// [struct]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#declaring_a_struct_type
3093    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3094    fn parse_struct_field_def(
3095        &mut self,
3096    ) -> Result<(StructField, MatchedTrailingBracket), ParserError> {
3097        // Look beyond the next item to infer whether both field name
3098        // and type are specified.
3099        let is_anonymous_field = !matches!(
3100            (self.peek_nth_token(0).token, self.peek_nth_token(1).token),
3101            (Token::Word(_), Token::Word(_))
3102        );
3103
3104        let field_name = if is_anonymous_field {
3105            None
3106        } else {
3107            Some(self.parse_identifier()?)
3108        };
3109
3110        let (field_type, trailing_bracket) = self.parse_data_type_helper()?;
3111
3112        let options = self.maybe_parse_options(Keyword::OPTIONS)?;
3113        Ok((
3114            StructField {
3115                field_name,
3116                field_type,
3117                options,
3118            },
3119            trailing_bracket,
3120        ))
3121    }
3122
3123    /// DuckDB specific: Parse a Union type definition as a sequence of field-value pairs.
3124    ///
3125    /// Syntax:
3126    ///
3127    /// ```sql
3128    /// UNION(field_name field_type[,...])
3129    /// ```
3130    ///
3131    /// [1]: https://duckdb.org/docs/sql/data_types/union.html
3132    fn parse_union_type_def(&mut self) -> Result<Vec<UnionField>, ParserError> {
3133        self.expect_keyword_is(Keyword::UNION)?;
3134
3135        self.expect_token(&Token::LParen)?;
3136
3137        let fields = self.parse_comma_separated(|p| {
3138            Ok(UnionField {
3139                field_name: p.parse_identifier()?,
3140                field_type: p.parse_data_type()?,
3141            })
3142        })?;
3143
3144        self.expect_token(&Token::RParen)?;
3145
3146        Ok(fields)
3147    }
3148
3149    /// DuckDB and ClickHouse specific: Parse a duckdb [dictionary] or a clickhouse [map] setting
3150    ///
3151    /// Syntax:
3152    ///
3153    /// ```sql
3154    /// {'field_name': expr1[, ... ]}
3155    /// ```
3156    ///
3157    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3158    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3159    fn parse_dictionary(&mut self) -> Result<Expr, ParserError> {
3160        self.expect_token(&Token::LBrace)?;
3161
3162        let fields = self.parse_comma_separated0(Self::parse_dictionary_field, Token::RBrace)?;
3163
3164        self.expect_token(&Token::RBrace)?;
3165
3166        Ok(Expr::Dictionary(fields))
3167    }
3168
3169    /// Parse a field for a duckdb [dictionary] or a clickhouse [map] setting
3170    ///
3171    /// Syntax
3172    ///
3173    /// ```sql
3174    /// 'name': expr
3175    /// ```
3176    ///
3177    /// [dictionary]: https://duckdb.org/docs/sql/data_types/struct#creating-structs
3178    /// [map]: https://clickhouse.com/docs/operations/settings/settings#additional_table_filters
3179    fn parse_dictionary_field(&mut self) -> Result<DictionaryField, ParserError> {
3180        let key = self.parse_identifier()?;
3181
3182        self.expect_token(&Token::Colon)?;
3183
3184        let expr = self.parse_expr()?;
3185
3186        Ok(DictionaryField {
3187            key,
3188            value: Box::new(expr),
3189        })
3190    }
3191
3192    /// DuckDB specific: Parse a duckdb [map]
3193    ///
3194    /// Syntax:
3195    ///
3196    /// ```sql
3197    /// Map {key1: value1[, ... ]}
3198    /// ```
3199    ///
3200    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3201    fn parse_duckdb_map_literal(&mut self) -> Result<Expr, ParserError> {
3202        self.expect_token(&Token::LBrace)?;
3203        let fields = self.parse_comma_separated0(Self::parse_duckdb_map_field, Token::RBrace)?;
3204        self.expect_token(&Token::RBrace)?;
3205        Ok(Expr::Map(Map { entries: fields }))
3206    }
3207
3208    /// Parse a field for a duckdb [map]
3209    ///
3210    /// Syntax
3211    ///
3212    /// ```sql
3213    /// key: value
3214    /// ```
3215    ///
3216    /// [map]: https://duckdb.org/docs/sql/data_types/map.html#creating-maps
3217    fn parse_duckdb_map_field(&mut self) -> Result<MapEntry, ParserError> {
3218        let key = self.parse_expr()?;
3219
3220        self.expect_token(&Token::Colon)?;
3221
3222        let value = self.parse_expr()?;
3223
3224        Ok(MapEntry {
3225            key: Box::new(key),
3226            value: Box::new(value),
3227        })
3228    }
3229
3230    /// Parse clickhouse [map]
3231    ///
3232    /// Syntax
3233    ///
3234    /// ```sql
3235    /// Map(key_data_type, value_data_type)
3236    /// ```
3237    ///
3238    /// [map]: https://clickhouse.com/docs/en/sql-reference/data-types/map
3239    fn parse_click_house_map_def(&mut self) -> Result<(DataType, DataType), ParserError> {
3240        self.expect_keyword_is(Keyword::MAP)?;
3241        self.expect_token(&Token::LParen)?;
3242        let key_data_type = self.parse_data_type()?;
3243        self.expect_token(&Token::Comma)?;
3244        let value_data_type = self.parse_data_type()?;
3245        self.expect_token(&Token::RParen)?;
3246
3247        Ok((key_data_type, value_data_type))
3248    }
3249
3250    /// Parse clickhouse [tuple]
3251    ///
3252    /// Syntax
3253    ///
3254    /// ```sql
3255    /// Tuple([field_name] field_type, ...)
3256    /// ```
3257    ///
3258    /// [tuple]: https://clickhouse.com/docs/en/sql-reference/data-types/tuple
3259    fn parse_click_house_tuple_def(&mut self) -> Result<Vec<StructField>, ParserError> {
3260        self.expect_keyword_is(Keyword::TUPLE)?;
3261        self.expect_token(&Token::LParen)?;
3262        let mut field_defs = vec![];
3263        loop {
3264            let (def, _) = self.parse_struct_field_def()?;
3265            field_defs.push(def);
3266            if !self.consume_token(&Token::Comma) {
3267                break;
3268            }
3269        }
3270        self.expect_token(&Token::RParen)?;
3271
3272        Ok(field_defs)
3273    }
3274
3275    /// For nested types that use the angle bracket syntax, this matches either
3276    /// `>`, `>>` or nothing depending on which variant is expected (specified by the previously
3277    /// matched `trailing_bracket` argument). It returns whether there is a trailing
3278    /// left to be matched - (i.e. if '>>' was matched).
3279    fn expect_closing_angle_bracket(
3280        &mut self,
3281        trailing_bracket: MatchedTrailingBracket,
3282    ) -> Result<MatchedTrailingBracket, ParserError> {
3283        let trailing_bracket = if !trailing_bracket.0 {
3284            match self.peek_token().token {
3285                Token::Gt => {
3286                    self.next_token();
3287                    false.into()
3288                }
3289                Token::ShiftRight => {
3290                    self.next_token();
3291                    true.into()
3292                }
3293                _ => return self.expected(">", self.peek_token()),
3294            }
3295        } else {
3296            false.into()
3297        };
3298
3299        Ok(trailing_bracket)
3300    }
3301
3302    /// Parse an operator following an expression
3303    pub fn parse_infix(&mut self, expr: Expr, precedence: u8) -> Result<Expr, ParserError> {
3304        // allow the dialect to override infix parsing
3305        if let Some(infix) = self.dialect.parse_infix(self, &expr, precedence) {
3306            return infix;
3307        }
3308
3309        let dialect = self.dialect;
3310
3311        self.advance_token();
3312        let tok = self.get_current_token();
3313        let tok_index = self.get_current_index();
3314        let span = tok.span;
3315        let regular_binary_operator = match &tok.token {
3316            Token::Spaceship => Some(BinaryOperator::Spaceship),
3317            Token::DoubleEq => Some(BinaryOperator::Eq),
3318            Token::Assignment => Some(BinaryOperator::Assignment),
3319            Token::Eq => Some(BinaryOperator::Eq),
3320            Token::Neq => Some(BinaryOperator::NotEq),
3321            Token::Gt => Some(BinaryOperator::Gt),
3322            Token::GtEq => Some(BinaryOperator::GtEq),
3323            Token::Lt => Some(BinaryOperator::Lt),
3324            Token::LtEq => Some(BinaryOperator::LtEq),
3325            Token::Plus => Some(BinaryOperator::Plus),
3326            Token::Minus => Some(BinaryOperator::Minus),
3327            Token::Mul => Some(BinaryOperator::Multiply),
3328            Token::Mod => Some(BinaryOperator::Modulo),
3329            Token::StringConcat => Some(BinaryOperator::StringConcat),
3330            Token::Pipe => Some(BinaryOperator::BitwiseOr),
3331            Token::Caret => {
3332                // In PostgreSQL, ^ stands for the exponentiation operation,
3333                // and # stands for XOR. See https://www.postgresql.org/docs/current/functions-math.html
3334                if dialect_is!(dialect is PostgreSqlDialect) {
3335                    Some(BinaryOperator::PGExp)
3336                } else {
3337                    Some(BinaryOperator::BitwiseXor)
3338                }
3339            }
3340            Token::Ampersand => Some(BinaryOperator::BitwiseAnd),
3341            Token::Div => Some(BinaryOperator::Divide),
3342            Token::DuckIntDiv if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
3343                Some(BinaryOperator::DuckIntegerDivide)
3344            }
3345            Token::ShiftLeft if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3346                Some(BinaryOperator::PGBitwiseShiftLeft)
3347            }
3348            Token::ShiftRight if dialect_is!(dialect is PostgreSqlDialect | DuckDbDialect | GenericDialect | RedshiftSqlDialect) => {
3349                Some(BinaryOperator::PGBitwiseShiftRight)
3350            }
3351            Token::Sharp if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3352                Some(BinaryOperator::PGBitwiseXor)
3353            }
3354            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | RedshiftSqlDialect) => {
3355                Some(BinaryOperator::PGOverlap)
3356            }
3357            Token::Overlap if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3358                Some(BinaryOperator::PGOverlap)
3359            }
3360            Token::CaretAt if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3361                Some(BinaryOperator::PGStartsWith)
3362            }
3363            Token::Tilde => Some(BinaryOperator::PGRegexMatch),
3364            Token::TildeAsterisk => Some(BinaryOperator::PGRegexIMatch),
3365            Token::ExclamationMarkTilde => Some(BinaryOperator::PGRegexNotMatch),
3366            Token::ExclamationMarkTildeAsterisk => Some(BinaryOperator::PGRegexNotIMatch),
3367            Token::DoubleTilde => Some(BinaryOperator::PGLikeMatch),
3368            Token::DoubleTildeAsterisk => Some(BinaryOperator::PGILikeMatch),
3369            Token::ExclamationMarkDoubleTilde => Some(BinaryOperator::PGNotLikeMatch),
3370            Token::ExclamationMarkDoubleTildeAsterisk => Some(BinaryOperator::PGNotILikeMatch),
3371            Token::Arrow => Some(BinaryOperator::Arrow),
3372            Token::LongArrow => Some(BinaryOperator::LongArrow),
3373            Token::HashArrow => Some(BinaryOperator::HashArrow),
3374            Token::HashLongArrow => Some(BinaryOperator::HashLongArrow),
3375            Token::AtArrow => Some(BinaryOperator::AtArrow),
3376            Token::ArrowAt => Some(BinaryOperator::ArrowAt),
3377            Token::HashMinus => Some(BinaryOperator::HashMinus),
3378            Token::AtQuestion => Some(BinaryOperator::AtQuestion),
3379            Token::AtAt => Some(BinaryOperator::AtAt),
3380            Token::Question => Some(BinaryOperator::Question),
3381            Token::QuestionAnd => Some(BinaryOperator::QuestionAnd),
3382            Token::QuestionPipe => Some(BinaryOperator::QuestionPipe),
3383            Token::CustomBinaryOperator(s) => Some(BinaryOperator::Custom(s.clone())),
3384            Token::DoubleSharp if self.dialect.supports_geometric_types() => {
3385                Some(BinaryOperator::DoubleHash)
3386            }
3387
3388            Token::AmpersandLeftAngleBracket if self.dialect.supports_geometric_types() => {
3389                Some(BinaryOperator::AndLt)
3390            }
3391            Token::AmpersandRightAngleBracket if self.dialect.supports_geometric_types() => {
3392                Some(BinaryOperator::AndGt)
3393            }
3394            Token::QuestionMarkDash if self.dialect.supports_geometric_types() => {
3395                Some(BinaryOperator::QuestionDash)
3396            }
3397            Token::AmpersandLeftAngleBracketVerticalBar
3398                if self.dialect.supports_geometric_types() =>
3399            {
3400                Some(BinaryOperator::AndLtPipe)
3401            }
3402            Token::VerticalBarAmpersandRightAngleBracket
3403                if self.dialect.supports_geometric_types() =>
3404            {
3405                Some(BinaryOperator::PipeAndGt)
3406            }
3407            Token::TwoWayArrow if self.dialect.supports_geometric_types() => {
3408                Some(BinaryOperator::LtDashGt)
3409            }
3410            Token::LeftAngleBracketCaret if self.dialect.supports_geometric_types() => {
3411                Some(BinaryOperator::LtCaret)
3412            }
3413            Token::RightAngleBracketCaret if self.dialect.supports_geometric_types() => {
3414                Some(BinaryOperator::GtCaret)
3415            }
3416            Token::QuestionMarkSharp if self.dialect.supports_geometric_types() => {
3417                Some(BinaryOperator::QuestionHash)
3418            }
3419            Token::QuestionMarkDoubleVerticalBar if self.dialect.supports_geometric_types() => {
3420                Some(BinaryOperator::QuestionDoublePipe)
3421            }
3422            Token::QuestionMarkDashVerticalBar if self.dialect.supports_geometric_types() => {
3423                Some(BinaryOperator::QuestionDashPipe)
3424            }
3425            Token::TildeEqual if self.dialect.supports_geometric_types() => {
3426                Some(BinaryOperator::TildeEq)
3427            }
3428            Token::ShiftLeftVerticalBar if self.dialect.supports_geometric_types() => {
3429                Some(BinaryOperator::LtLtPipe)
3430            }
3431            Token::VerticalBarShiftRight if self.dialect.supports_geometric_types() => {
3432                Some(BinaryOperator::PipeGtGt)
3433            }
3434            Token::AtSign if self.dialect.supports_geometric_types() => Some(BinaryOperator::At),
3435
3436            Token::Word(w) => match w.keyword {
3437                Keyword::AND => Some(BinaryOperator::And),
3438                Keyword::OR => Some(BinaryOperator::Or),
3439                Keyword::XOR => Some(BinaryOperator::Xor),
3440                Keyword::OVERLAPS => Some(BinaryOperator::Overlaps),
3441                Keyword::OPERATOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
3442                    self.expect_token(&Token::LParen)?;
3443                    // there are special rules for operator names in
3444                    // postgres so we can not use 'parse_object'
3445                    // or similar.
3446                    // See https://www.postgresql.org/docs/current/sql-createoperator.html
3447                    let mut idents = vec![];
3448                    loop {
3449                        self.advance_token();
3450                        idents.push(self.get_current_token().to_string());
3451                        if !self.consume_token(&Token::Period) {
3452                            break;
3453                        }
3454                    }
3455                    self.expect_token(&Token::RParen)?;
3456                    Some(BinaryOperator::PGCustomBinaryOperator(idents))
3457                }
3458                _ => None,
3459            },
3460            _ => None,
3461        };
3462
3463        let tok = self.token_at(tok_index);
3464        if let Some(op) = regular_binary_operator {
3465            if let Some(keyword) =
3466                self.parse_one_of_keywords(&[Keyword::ANY, Keyword::ALL, Keyword::SOME])
3467            {
3468                self.expect_token(&Token::LParen)?;
3469                let right = if self.peek_sub_query() {
3470                    // We have a subquery ahead (SELECT\WITH ...) need to rewind and
3471                    // use the parenthesis for parsing the subquery as an expression.
3472                    self.prev_token(); // LParen
3473                    self.parse_subexpr(precedence)?
3474                } else {
3475                    // Non-subquery expression
3476                    let right = self.parse_subexpr(precedence)?;
3477                    self.expect_token(&Token::RParen)?;
3478                    right
3479                };
3480
3481                if !matches!(
3482                    op,
3483                    BinaryOperator::Gt
3484                        | BinaryOperator::Lt
3485                        | BinaryOperator::GtEq
3486                        | BinaryOperator::LtEq
3487                        | BinaryOperator::Eq
3488                        | BinaryOperator::NotEq
3489                        | BinaryOperator::PGRegexMatch
3490                        | BinaryOperator::PGRegexIMatch
3491                        | BinaryOperator::PGRegexNotMatch
3492                        | BinaryOperator::PGRegexNotIMatch
3493                        | BinaryOperator::PGLikeMatch
3494                        | BinaryOperator::PGILikeMatch
3495                        | BinaryOperator::PGNotLikeMatch
3496                        | BinaryOperator::PGNotILikeMatch
3497                ) {
3498                    return parser_err!(
3499                        format!(
3500                        "Expected one of [=, >, <, =>, =<, !=, ~, ~*, !~, !~*, ~~, ~~*, !~~, !~~*] as comparison operator, found: {op}"
3501                    ),
3502                        span.start
3503                    );
3504                };
3505
3506                Ok(match keyword {
3507                    Keyword::ALL => Expr::AllOp {
3508                        left: Box::new(expr),
3509                        compare_op: op,
3510                        right: Box::new(right),
3511                    },
3512                    Keyword::ANY | Keyword::SOME => Expr::AnyOp {
3513                        left: Box::new(expr),
3514                        compare_op: op,
3515                        right: Box::new(right),
3516                        is_some: keyword == Keyword::SOME,
3517                    },
3518                    _ => unreachable!(),
3519                })
3520            } else {
3521                Ok(Expr::BinaryOp {
3522                    left: Box::new(expr),
3523                    op,
3524                    right: Box::new(self.parse_subexpr(precedence)?),
3525                })
3526            }
3527        } else if let Token::Word(w) = &tok.token {
3528            match w.keyword {
3529                Keyword::IS => {
3530                    if self.parse_keyword(Keyword::NULL) {
3531                        Ok(Expr::IsNull(Box::new(expr)))
3532                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
3533                        Ok(Expr::IsNotNull(Box::new(expr)))
3534                    } else if self.parse_keywords(&[Keyword::TRUE]) {
3535                        Ok(Expr::IsTrue(Box::new(expr)))
3536                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::TRUE]) {
3537                        Ok(Expr::IsNotTrue(Box::new(expr)))
3538                    } else if self.parse_keywords(&[Keyword::FALSE]) {
3539                        Ok(Expr::IsFalse(Box::new(expr)))
3540                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::FALSE]) {
3541                        Ok(Expr::IsNotFalse(Box::new(expr)))
3542                    } else if self.parse_keywords(&[Keyword::UNKNOWN]) {
3543                        Ok(Expr::IsUnknown(Box::new(expr)))
3544                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::UNKNOWN]) {
3545                        Ok(Expr::IsNotUnknown(Box::new(expr)))
3546                    } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::FROM]) {
3547                        let expr2 = self.parse_expr()?;
3548                        Ok(Expr::IsDistinctFrom(Box::new(expr), Box::new(expr2)))
3549                    } else if self.parse_keywords(&[Keyword::NOT, Keyword::DISTINCT, Keyword::FROM])
3550                    {
3551                        let expr2 = self.parse_expr()?;
3552                        Ok(Expr::IsNotDistinctFrom(Box::new(expr), Box::new(expr2)))
3553                    } else if let Ok(is_normalized) = self.parse_unicode_is_normalized(expr) {
3554                        Ok(is_normalized)
3555                    } else {
3556                        self.expected(
3557                            "[NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS",
3558                            self.peek_token(),
3559                        )
3560                    }
3561                }
3562                Keyword::AT => {
3563                    self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
3564                    Ok(Expr::AtTimeZone {
3565                        timestamp: Box::new(expr),
3566                        time_zone: Box::new(self.parse_subexpr(precedence)?),
3567                    })
3568                }
3569                Keyword::NOT
3570                | Keyword::IN
3571                | Keyword::BETWEEN
3572                | Keyword::LIKE
3573                | Keyword::ILIKE
3574                | Keyword::SIMILAR
3575                | Keyword::REGEXP
3576                | Keyword::RLIKE => {
3577                    self.prev_token();
3578                    let negated = self.parse_keyword(Keyword::NOT);
3579                    let regexp = self.parse_keyword(Keyword::REGEXP);
3580                    let rlike = self.parse_keyword(Keyword::RLIKE);
3581                    if regexp || rlike {
3582                        Ok(Expr::RLike {
3583                            negated,
3584                            expr: Box::new(expr),
3585                            pattern: Box::new(
3586                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3587                            ),
3588                            regexp,
3589                        })
3590                    } else if self.parse_keyword(Keyword::IN) {
3591                        self.parse_in(expr, negated)
3592                    } else if self.parse_keyword(Keyword::BETWEEN) {
3593                        self.parse_between(expr, negated)
3594                    } else if self.parse_keyword(Keyword::LIKE) {
3595                        Ok(Expr::Like {
3596                            negated,
3597                            any: self.parse_keyword(Keyword::ANY),
3598                            expr: Box::new(expr),
3599                            pattern: Box::new(
3600                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3601                            ),
3602                            escape_char: self.parse_escape_char()?,
3603                        })
3604                    } else if self.parse_keyword(Keyword::ILIKE) {
3605                        Ok(Expr::ILike {
3606                            negated,
3607                            any: self.parse_keyword(Keyword::ANY),
3608                            expr: Box::new(expr),
3609                            pattern: Box::new(
3610                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3611                            ),
3612                            escape_char: self.parse_escape_char()?,
3613                        })
3614                    } else if self.parse_keywords(&[Keyword::SIMILAR, Keyword::TO]) {
3615                        Ok(Expr::SimilarTo {
3616                            negated,
3617                            expr: Box::new(expr),
3618                            pattern: Box::new(
3619                                self.parse_subexpr(self.dialect.prec_value(Precedence::Like))?,
3620                            ),
3621                            escape_char: self.parse_escape_char()?,
3622                        })
3623                    } else {
3624                        self.expected("IN or BETWEEN after NOT", self.peek_token())
3625                    }
3626                }
3627                Keyword::MEMBER => {
3628                    if self.parse_keyword(Keyword::OF) {
3629                        self.expect_token(&Token::LParen)?;
3630                        let array = self.parse_expr()?;
3631                        self.expect_token(&Token::RParen)?;
3632                        Ok(Expr::MemberOf(MemberOf {
3633                            value: Box::new(expr),
3634                            array: Box::new(array),
3635                        }))
3636                    } else {
3637                        self.expected("OF after MEMBER", self.peek_token())
3638                    }
3639                }
3640                // Can only happen if `get_next_precedence` got out of sync with this function
3641                _ => parser_err!(
3642                    format!("No infix parser for token {:?}", tok.token),
3643                    tok.span.start
3644                ),
3645            }
3646        } else if Token::DoubleColon == *tok {
3647            Ok(Expr::Cast {
3648                kind: CastKind::DoubleColon,
3649                expr: Box::new(expr),
3650                data_type: self.parse_data_type()?,
3651                format: None,
3652            })
3653        } else if Token::ExclamationMark == *tok && self.dialect.supports_factorial_operator() {
3654            Ok(Expr::UnaryOp {
3655                op: UnaryOperator::PGPostfixFactorial,
3656                expr: Box::new(expr),
3657            })
3658        } else if Token::LBracket == *tok && self.dialect.supports_partiql()
3659            || (dialect_of!(self is SnowflakeDialect | GenericDialect) && Token::Colon == *tok)
3660        {
3661            self.prev_token();
3662            self.parse_json_access(expr)
3663        } else {
3664            // Can only happen if `get_next_precedence` got out of sync with this function
3665            parser_err!(
3666                format!("No infix parser for token {:?}", tok.token),
3667                tok.span.start
3668            )
3669        }
3670    }
3671
3672    /// Parse the `ESCAPE CHAR` portion of `LIKE`, `ILIKE`, and `SIMILAR TO`
3673    pub fn parse_escape_char(&mut self) -> Result<Option<Value>, ParserError> {
3674        if self.parse_keyword(Keyword::ESCAPE) {
3675            Ok(Some(self.parse_value()?.into()))
3676        } else {
3677            Ok(None)
3678        }
3679    }
3680
3681    /// Parses an array subscript like
3682    /// * `[:]`
3683    /// * `[l]`
3684    /// * `[l:]`
3685    /// * `[:u]`
3686    /// * `[l:u]`
3687    /// * `[l:u:s]`
3688    ///
3689    /// Parser is right after `[`
3690    fn parse_subscript_inner(&mut self) -> Result<Subscript, ParserError> {
3691        // at either `<lower>:(rest)` or `:(rest)]`
3692        let lower_bound = if self.consume_token(&Token::Colon) {
3693            None
3694        } else {
3695            Some(self.parse_expr()?)
3696        };
3697
3698        // check for end
3699        if self.consume_token(&Token::RBracket) {
3700            if let Some(lower_bound) = lower_bound {
3701                return Ok(Subscript::Index { index: lower_bound });
3702            };
3703            return Ok(Subscript::Slice {
3704                lower_bound,
3705                upper_bound: None,
3706                stride: None,
3707            });
3708        }
3709
3710        // consume the `:`
3711        if lower_bound.is_some() {
3712            self.expect_token(&Token::Colon)?;
3713        }
3714
3715        // we are now at either `]`, `<upper>(rest)]`
3716        let upper_bound = if self.consume_token(&Token::RBracket) {
3717            return Ok(Subscript::Slice {
3718                lower_bound,
3719                upper_bound: None,
3720                stride: None,
3721            });
3722        } else {
3723            Some(self.parse_expr()?)
3724        };
3725
3726        // check for end
3727        if self.consume_token(&Token::RBracket) {
3728            return Ok(Subscript::Slice {
3729                lower_bound,
3730                upper_bound,
3731                stride: None,
3732            });
3733        }
3734
3735        // we are now at `:]` or `:stride]`
3736        self.expect_token(&Token::Colon)?;
3737        let stride = if self.consume_token(&Token::RBracket) {
3738            None
3739        } else {
3740            Some(self.parse_expr()?)
3741        };
3742
3743        if stride.is_some() {
3744            self.expect_token(&Token::RBracket)?;
3745        }
3746
3747        Ok(Subscript::Slice {
3748            lower_bound,
3749            upper_bound,
3750            stride,
3751        })
3752    }
3753
3754    /// Parse a multi-dimension array accessing like `[1:3][1][1]`
3755    pub fn parse_multi_dim_subscript(
3756        &mut self,
3757        chain: &mut Vec<AccessExpr>,
3758    ) -> Result<(), ParserError> {
3759        while self.consume_token(&Token::LBracket) {
3760            self.parse_subscript(chain)?;
3761        }
3762        Ok(())
3763    }
3764
3765    /// Parses an array subscript like `[1:3]`
3766    ///
3767    /// Parser is right after `[`
3768    fn parse_subscript(&mut self, chain: &mut Vec<AccessExpr>) -> Result<(), ParserError> {
3769        let subscript = self.parse_subscript_inner()?;
3770        chain.push(AccessExpr::Subscript(subscript));
3771        Ok(())
3772    }
3773
3774    fn parse_json_path_object_key(&mut self) -> Result<JsonPathElem, ParserError> {
3775        let token = self.next_token();
3776        match token.token {
3777            Token::Word(Word {
3778                value,
3779                // path segments in SF dot notation can be unquoted or double-quoted
3780                quote_style: quote_style @ (Some('"') | None),
3781                // some experimentation suggests that snowflake permits
3782                // any keyword here unquoted.
3783                keyword: _,
3784            }) => Ok(JsonPathElem::Dot {
3785                key: value,
3786                quoted: quote_style.is_some(),
3787            }),
3788
3789            // This token should never be generated on snowflake or generic
3790            // dialects, but we handle it just in case this is used on future
3791            // dialects.
3792            Token::DoubleQuotedString(key) => Ok(JsonPathElem::Dot { key, quoted: true }),
3793
3794            _ => self.expected("variant object key name", token),
3795        }
3796    }
3797
3798    fn parse_json_access(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3799        let path = self.parse_json_path()?;
3800        Ok(Expr::JsonAccess {
3801            value: Box::new(expr),
3802            path,
3803        })
3804    }
3805
3806    fn parse_json_path(&mut self) -> Result<JsonPath, ParserError> {
3807        let mut path = Vec::new();
3808        loop {
3809            match self.next_token().token {
3810                Token::Colon if path.is_empty() => {
3811                    path.push(self.parse_json_path_object_key()?);
3812                }
3813                Token::Period if !path.is_empty() => {
3814                    path.push(self.parse_json_path_object_key()?);
3815                }
3816                Token::LBracket => {
3817                    let key = self.parse_expr()?;
3818                    self.expect_token(&Token::RBracket)?;
3819
3820                    path.push(JsonPathElem::Bracket { key });
3821                }
3822                _ => {
3823                    self.prev_token();
3824                    break;
3825                }
3826            };
3827        }
3828
3829        debug_assert!(!path.is_empty());
3830        Ok(JsonPath { path })
3831    }
3832
3833    /// Parses the parens following the `[ NOT ] IN` operator.
3834    pub fn parse_in(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3835        // BigQuery allows `IN UNNEST(array_expression)`
3836        // https://cloud.google.com/bigquery/docs/reference/standard-sql/operators#in_operators
3837        if self.parse_keyword(Keyword::UNNEST) {
3838            self.expect_token(&Token::LParen)?;
3839            let array_expr = self.parse_expr()?;
3840            self.expect_token(&Token::RParen)?;
3841            return Ok(Expr::InUnnest {
3842                expr: Box::new(expr),
3843                array_expr: Box::new(array_expr),
3844                negated,
3845            });
3846        }
3847        self.expect_token(&Token::LParen)?;
3848        let in_op = match self.maybe_parse(|p| p.parse_query())? {
3849            Some(subquery) => Expr::InSubquery {
3850                expr: Box::new(expr),
3851                subquery,
3852                negated,
3853            },
3854            None => Expr::InList {
3855                expr: Box::new(expr),
3856                list: if self.dialect.supports_in_empty_list() {
3857                    self.parse_comma_separated0(Parser::parse_expr, Token::RParen)?
3858                } else {
3859                    self.parse_comma_separated(Parser::parse_expr)?
3860                },
3861                negated,
3862            },
3863        };
3864        self.expect_token(&Token::RParen)?;
3865        Ok(in_op)
3866    }
3867
3868    /// Parses `BETWEEN <low> AND <high>`, assuming the `BETWEEN` keyword was already consumed.
3869    pub fn parse_between(&mut self, expr: Expr, negated: bool) -> Result<Expr, ParserError> {
3870        // Stop parsing subexpressions for <low> and <high> on tokens with
3871        // precedence lower than that of `BETWEEN`, such as `AND`, `IS`, etc.
3872        let low = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3873        self.expect_keyword_is(Keyword::AND)?;
3874        let high = self.parse_subexpr(self.dialect.prec_value(Precedence::Between))?;
3875        Ok(Expr::Between {
3876            expr: Box::new(expr),
3877            negated,
3878            low: Box::new(low),
3879            high: Box::new(high),
3880        })
3881    }
3882
3883    /// Parse a PostgreSQL casting style which is in the form of `expr::datatype`.
3884    pub fn parse_pg_cast(&mut self, expr: Expr) -> Result<Expr, ParserError> {
3885        Ok(Expr::Cast {
3886            kind: CastKind::DoubleColon,
3887            expr: Box::new(expr),
3888            data_type: self.parse_data_type()?,
3889            format: None,
3890        })
3891    }
3892
3893    /// Get the precedence of the next token
3894    pub fn get_next_precedence(&self) -> Result<u8, ParserError> {
3895        self.dialect.get_next_precedence_default(self)
3896    }
3897
3898    /// Return the token at the given location, or EOF if the index is beyond
3899    /// the length of the current set of tokens.
3900    pub fn token_at(&self, index: usize) -> &TokenWithSpan {
3901        self.tokens.get(index).unwrap_or(&EOF_TOKEN)
3902    }
3903
3904    /// Return the first non-whitespace token that has not yet been processed
3905    /// or Token::EOF
3906    ///
3907    /// See [`Self::peek_token_ref`] to avoid the copy.
3908    pub fn peek_token(&self) -> TokenWithSpan {
3909        self.peek_nth_token(0)
3910    }
3911
3912    /// Return a reference to the first non-whitespace token that has not yet
3913    /// been processed or Token::EOF
3914    pub fn peek_token_ref(&self) -> &TokenWithSpan {
3915        self.peek_nth_token_ref(0)
3916    }
3917
3918    /// Returns the `N` next non-whitespace tokens that have not yet been
3919    /// processed.
3920    ///
3921    /// Example:
3922    /// ```rust
3923    /// # use sqlparser::dialect::GenericDialect;
3924    /// # use sqlparser::parser::Parser;
3925    /// # use sqlparser::keywords::Keyword;
3926    /// # use sqlparser::tokenizer::{Token, Word};
3927    /// let dialect = GenericDialect {};
3928    /// let mut parser = Parser::new(&dialect).try_with_sql("ORDER BY foo, bar").unwrap();
3929    ///
3930    /// // Note that Rust infers the number of tokens to peek based on the
3931    /// // length of the slice pattern!
3932    /// assert!(matches!(
3933    ///     parser.peek_tokens(),
3934    ///     [
3935    ///         Token::Word(Word { keyword: Keyword::ORDER, .. }),
3936    ///         Token::Word(Word { keyword: Keyword::BY, .. }),
3937    ///     ]
3938    /// ));
3939    /// ```
3940    pub fn peek_tokens<const N: usize>(&self) -> [Token; N] {
3941        self.peek_tokens_with_location()
3942            .map(|with_loc| with_loc.token)
3943    }
3944
3945    /// Returns the `N` next non-whitespace tokens with locations that have not
3946    /// yet been processed.
3947    ///
3948    /// See [`Self::peek_token`] for an example.
3949    pub fn peek_tokens_with_location<const N: usize>(&self) -> [TokenWithSpan; N] {
3950        let mut index = self.index;
3951        core::array::from_fn(|_| loop {
3952            let token = self.tokens.get(index);
3953            index += 1;
3954            if let Some(TokenWithSpan {
3955                token: Token::Whitespace(_),
3956                span: _,
3957            }) = token
3958            {
3959                continue;
3960            }
3961            break token.cloned().unwrap_or(TokenWithSpan {
3962                token: Token::EOF,
3963                span: Span::empty(),
3964            });
3965        })
3966    }
3967
3968    /// Returns references to the `N` next non-whitespace tokens
3969    /// that have not yet been processed.
3970    ///
3971    /// See [`Self::peek_tokens`] for an example.
3972    pub fn peek_tokens_ref<const N: usize>(&self) -> [&TokenWithSpan; N] {
3973        let mut index = self.index;
3974        core::array::from_fn(|_| loop {
3975            let token = self.tokens.get(index);
3976            index += 1;
3977            if let Some(TokenWithSpan {
3978                token: Token::Whitespace(_),
3979                span: _,
3980            }) = token
3981            {
3982                continue;
3983            }
3984            break token.unwrap_or(&EOF_TOKEN);
3985        })
3986    }
3987
3988    /// Return nth non-whitespace token that has not yet been processed
3989    pub fn peek_nth_token(&self, n: usize) -> TokenWithSpan {
3990        self.peek_nth_token_ref(n).clone()
3991    }
3992
3993    /// Return nth non-whitespace token that has not yet been processed
3994    pub fn peek_nth_token_ref(&self, mut n: usize) -> &TokenWithSpan {
3995        let mut index = self.index;
3996        loop {
3997            index += 1;
3998            match self.tokens.get(index - 1) {
3999                Some(TokenWithSpan {
4000                    token: Token::Whitespace(_),
4001                    span: _,
4002                }) => continue,
4003                non_whitespace => {
4004                    if n == 0 {
4005                        return non_whitespace.unwrap_or(&EOF_TOKEN);
4006                    }
4007                    n -= 1;
4008                }
4009            }
4010        }
4011    }
4012
4013    /// Return the first token, possibly whitespace, that has not yet been processed
4014    /// (or None if reached end-of-file).
4015    pub fn peek_token_no_skip(&self) -> TokenWithSpan {
4016        self.peek_nth_token_no_skip(0)
4017    }
4018
4019    /// Return nth token, possibly whitespace, that has not yet been processed.
4020    pub fn peek_nth_token_no_skip(&self, n: usize) -> TokenWithSpan {
4021        self.tokens
4022            .get(self.index + n)
4023            .cloned()
4024            .unwrap_or(TokenWithSpan {
4025                token: Token::EOF,
4026                span: Span::empty(),
4027            })
4028    }
4029
4030    /// Return true if the next tokens exactly `expected`
4031    ///
4032    /// Does not advance the current token.
4033    fn peek_keywords(&mut self, expected: &[Keyword]) -> bool {
4034        let index = self.index;
4035        let matched = self.parse_keywords(expected);
4036        self.index = index;
4037        matched
4038    }
4039
4040    /// Advances to the next non-whitespace token and returns a copy.
4041    ///
4042    /// Please use [`Self::advance_token`] and [`Self::get_current_token`] to
4043    /// avoid the copy.
4044    pub fn next_token(&mut self) -> TokenWithSpan {
4045        self.advance_token();
4046        self.get_current_token().clone()
4047    }
4048
4049    /// Returns the index of the current token
4050    ///
4051    /// This can be used with APIs that expect an index, such as
4052    /// [`Self::token_at`]
4053    pub fn get_current_index(&self) -> usize {
4054        self.index.saturating_sub(1)
4055    }
4056
4057    /// Return the next unprocessed token, possibly whitespace.
4058    pub fn next_token_no_skip(&mut self) -> Option<&TokenWithSpan> {
4059        self.index += 1;
4060        self.tokens.get(self.index - 1)
4061    }
4062
4063    /// Advances the current token to the next non-whitespace token
4064    ///
4065    /// See [`Self::get_current_token`] to get the current token after advancing
4066    pub fn advance_token(&mut self) {
4067        loop {
4068            self.index += 1;
4069            match self.tokens.get(self.index - 1) {
4070                Some(TokenWithSpan {
4071                    token: Token::Whitespace(_),
4072                    span: _,
4073                }) => continue,
4074                _ => break,
4075            }
4076        }
4077    }
4078
4079    /// Returns a reference to the current token
4080    ///
4081    /// Does not advance the current token.
4082    pub fn get_current_token(&self) -> &TokenWithSpan {
4083        self.token_at(self.index.saturating_sub(1))
4084    }
4085
4086    /// Returns a reference to the previous token
4087    ///
4088    /// Does not advance the current token.
4089    pub fn get_previous_token(&self) -> &TokenWithSpan {
4090        self.token_at(self.index.saturating_sub(2))
4091    }
4092
4093    /// Returns a reference to the next token
4094    ///
4095    /// Does not advance the current token.
4096    pub fn get_next_token(&self) -> &TokenWithSpan {
4097        self.token_at(self.index)
4098    }
4099
4100    /// Seek back the last one non-whitespace token.
4101    ///
4102    /// Must be called after `next_token()`, otherwise might panic. OK to call
4103    /// after `next_token()` indicates an EOF.
4104    ///
4105    // TODO rename to backup_token and deprecate prev_token?
4106    pub fn prev_token(&mut self) {
4107        loop {
4108            assert!(self.index > 0);
4109            self.index -= 1;
4110            if let Some(TokenWithSpan {
4111                token: Token::Whitespace(_),
4112                span: _,
4113            }) = self.tokens.get(self.index)
4114            {
4115                continue;
4116            }
4117            return;
4118        }
4119    }
4120
4121    /// Report `found` was encountered instead of `expected`
4122    pub fn expected<T>(&self, expected: &str, found: TokenWithSpan) -> Result<T, ParserError> {
4123        parser_err!(
4124            format!("Expected: {expected}, found: {found}"),
4125            found.span.start
4126        )
4127    }
4128
4129    /// report `found` was encountered instead of `expected`
4130    pub fn expected_ref<T>(&self, expected: &str, found: &TokenWithSpan) -> Result<T, ParserError> {
4131        parser_err!(
4132            format!("Expected: {expected}, found: {found}"),
4133            found.span.start
4134        )
4135    }
4136
4137    /// Report that the token at `index` was found instead of `expected`.
4138    pub fn expected_at<T>(&self, expected: &str, index: usize) -> Result<T, ParserError> {
4139        let found = self.tokens.get(index).unwrap_or(&EOF_TOKEN);
4140        parser_err!(
4141            format!("Expected: {expected}, found: {found}"),
4142            found.span.start
4143        )
4144    }
4145
4146    /// If the current token is the `expected` keyword, consume it and returns
4147    /// true. Otherwise, no tokens are consumed and returns false.
4148    #[must_use]
4149    pub fn parse_keyword(&mut self, expected: Keyword) -> bool {
4150        if self.peek_keyword(expected) {
4151            self.advance_token();
4152            true
4153        } else {
4154            false
4155        }
4156    }
4157
4158    #[must_use]
4159    pub fn peek_keyword(&self, expected: Keyword) -> bool {
4160        matches!(&self.peek_token_ref().token, Token::Word(w) if expected == w.keyword)
4161    }
4162
4163    /// If the current token is the `expected` keyword followed by
4164    /// specified tokens, consume them and returns true.
4165    /// Otherwise, no tokens are consumed and returns false.
4166    ///
4167    /// Note that if the length of `tokens` is too long, this function will
4168    /// not be efficient as it does a loop on the tokens with `peek_nth_token`
4169    /// each time.
4170    pub fn parse_keyword_with_tokens(&mut self, expected: Keyword, tokens: &[Token]) -> bool {
4171        match &self.peek_token_ref().token {
4172            Token::Word(w) if expected == w.keyword => {
4173                for (idx, token) in tokens.iter().enumerate() {
4174                    if self.peek_nth_token_ref(idx + 1).token != *token {
4175                        return false;
4176                    }
4177                }
4178                // consume all tokens
4179                for _ in 0..(tokens.len() + 1) {
4180                    self.advance_token();
4181                }
4182                true
4183            }
4184            _ => false,
4185        }
4186    }
4187
4188    /// If the current and subsequent tokens exactly match the `keywords`
4189    /// sequence, consume them and returns true. Otherwise, no tokens are
4190    /// consumed and returns false
4191    #[must_use]
4192    pub fn parse_keywords(&mut self, keywords: &[Keyword]) -> bool {
4193        let index = self.index;
4194        for &keyword in keywords {
4195            if !self.parse_keyword(keyword) {
4196                // println!("parse_keywords aborting .. did not find {:?}", keyword);
4197                // reset index and return immediately
4198                self.index = index;
4199                return false;
4200            }
4201        }
4202        true
4203    }
4204
4205    /// If the current token is one of the given `keywords`, returns the keyword
4206    /// that matches, without consuming the token. Otherwise, returns [`None`].
4207    #[must_use]
4208    pub fn peek_one_of_keywords(&self, keywords: &[Keyword]) -> Option<Keyword> {
4209        for keyword in keywords {
4210            if self.peek_keyword(*keyword) {
4211                return Some(*keyword);
4212            }
4213        }
4214        None
4215    }
4216
4217    /// If the current token is one of the given `keywords`, consume the token
4218    /// and return the keyword that matches. Otherwise, no tokens are consumed
4219    /// and returns [`None`].
4220    #[must_use]
4221    pub fn parse_one_of_keywords(&mut self, keywords: &[Keyword]) -> Option<Keyword> {
4222        match &self.peek_token_ref().token {
4223            Token::Word(w) => {
4224                keywords
4225                    .iter()
4226                    .find(|keyword| **keyword == w.keyword)
4227                    .map(|keyword| {
4228                        self.advance_token();
4229                        *keyword
4230                    })
4231            }
4232            _ => None,
4233        }
4234    }
4235
4236    /// If the current token is one of the expected keywords, consume the token
4237    /// and return the keyword that matches. Otherwise, return an error.
4238    pub fn expect_one_of_keywords(&mut self, keywords: &[Keyword]) -> Result<Keyword, ParserError> {
4239        if let Some(keyword) = self.parse_one_of_keywords(keywords) {
4240            Ok(keyword)
4241        } else {
4242            let keywords: Vec<String> = keywords.iter().map(|x| format!("{x:?}")).collect();
4243            self.expected_ref(
4244                &format!("one of {}", keywords.join(" or ")),
4245                self.peek_token_ref(),
4246            )
4247        }
4248    }
4249
4250    /// If the current token is the `expected` keyword, consume the token.
4251    /// Otherwise, return an error.
4252    ///
4253    // todo deprecate in favor of expected_keyword_is
4254    pub fn expect_keyword(&mut self, expected: Keyword) -> Result<TokenWithSpan, ParserError> {
4255        if self.parse_keyword(expected) {
4256            Ok(self.get_current_token().clone())
4257        } else {
4258            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4259        }
4260    }
4261
4262    /// If the current token is the `expected` keyword, consume the token.
4263    /// Otherwise, return an error.
4264    ///
4265    /// This differs from expect_keyword only in that the matched keyword
4266    /// token is not returned.
4267    pub fn expect_keyword_is(&mut self, expected: Keyword) -> Result<(), ParserError> {
4268        if self.parse_keyword(expected) {
4269            Ok(())
4270        } else {
4271            self.expected_ref(format!("{:?}", &expected).as_str(), self.peek_token_ref())
4272        }
4273    }
4274
4275    /// If the current and subsequent tokens exactly match the `keywords`
4276    /// sequence, consume them and returns Ok. Otherwise, return an Error.
4277    pub fn expect_keywords(&mut self, expected: &[Keyword]) -> Result<(), ParserError> {
4278        for &kw in expected {
4279            self.expect_keyword_is(kw)?;
4280        }
4281        Ok(())
4282    }
4283
4284    /// Consume the next token if it matches the expected token, otherwise return false
4285    ///
4286    /// See [Self::advance_token] to consume the token unconditionally
4287    #[must_use]
4288    pub fn consume_token(&mut self, expected: &Token) -> bool {
4289        if self.peek_token_ref() == expected {
4290            self.advance_token();
4291            true
4292        } else {
4293            false
4294        }
4295    }
4296
4297    /// If the current and subsequent tokens exactly match the `tokens`
4298    /// sequence, consume them and returns true. Otherwise, no tokens are
4299    /// consumed and returns false
4300    #[must_use]
4301    pub fn consume_tokens(&mut self, tokens: &[Token]) -> bool {
4302        let index = self.index;
4303        for token in tokens {
4304            if !self.consume_token(token) {
4305                self.index = index;
4306                return false;
4307            }
4308        }
4309        true
4310    }
4311
4312    /// Bail out if the current token is not an expected keyword, or consume it if it is
4313    pub fn expect_token(&mut self, expected: &Token) -> Result<TokenWithSpan, ParserError> {
4314        if self.peek_token_ref() == expected {
4315            Ok(self.next_token())
4316        } else {
4317            self.expected_ref(&expected.to_string(), self.peek_token_ref())
4318        }
4319    }
4320
4321    fn parse<T: FromStr>(s: String, loc: Location) -> Result<T, ParserError>
4322    where
4323        <T as FromStr>::Err: Display,
4324    {
4325        s.parse::<T>().map_err(|e| {
4326            ParserError::ParserError(format!(
4327                "Could not parse '{s}' as {}: {e}{loc}",
4328                core::any::type_name::<T>()
4329            ))
4330        })
4331    }
4332
4333    /// Parse a comma-separated list of 1+ SelectItem
4334    pub fn parse_projection(&mut self) -> Result<Vec<SelectItem>, ParserError> {
4335        // BigQuery and Snowflake allow trailing commas, but only in project lists
4336        // e.g. `SELECT 1, 2, FROM t`
4337        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#trailing_commas
4338        // https://docs.snowflake.com/en/release-notes/2024/8_11#select-supports-trailing-commas
4339
4340        let trailing_commas =
4341            self.options.trailing_commas | self.dialect.supports_projection_trailing_commas();
4342
4343        self.parse_comma_separated_with_trailing_commas(
4344            |p| p.parse_select_item(),
4345            trailing_commas,
4346            Self::is_reserved_for_column_alias,
4347        )
4348    }
4349
4350    pub fn parse_actions_list(&mut self) -> Result<Vec<Action>, ParserError> {
4351        let mut values = vec![];
4352        loop {
4353            values.push(self.parse_grant_permission()?);
4354            if !self.consume_token(&Token::Comma) {
4355                break;
4356            } else if self.options.trailing_commas {
4357                match self.peek_token().token {
4358                    Token::Word(kw) if kw.keyword == Keyword::ON => {
4359                        break;
4360                    }
4361                    Token::RParen
4362                    | Token::SemiColon
4363                    | Token::EOF
4364                    | Token::RBracket
4365                    | Token::RBrace => break,
4366                    _ => continue,
4367                }
4368            }
4369        }
4370        Ok(values)
4371    }
4372
4373    /// Parse a list of [TableWithJoins]
4374    fn parse_table_with_joins(&mut self) -> Result<Vec<TableWithJoins>, ParserError> {
4375        let trailing_commas = self.dialect.supports_from_trailing_commas();
4376
4377        self.parse_comma_separated_with_trailing_commas(
4378            Parser::parse_table_and_joins,
4379            trailing_commas,
4380            |kw, _parser| {
4381                self.dialect
4382                    .get_reserved_keywords_for_table_factor()
4383                    .contains(kw)
4384            },
4385        )
4386    }
4387
4388    /// Parse the comma of a comma-separated syntax element.
4389    /// `R` is a predicate that should return true if the next
4390    /// keyword is a reserved keyword.
4391    /// Allows for control over trailing commas
4392    ///
4393    /// Returns true if there is a next element
4394    fn is_parse_comma_separated_end_with_trailing_commas<R>(
4395        &mut self,
4396        trailing_commas: bool,
4397        is_reserved_keyword: &R,
4398    ) -> bool
4399    where
4400        R: Fn(&Keyword, &mut Parser) -> bool,
4401    {
4402        if !self.consume_token(&Token::Comma) {
4403            true
4404        } else if trailing_commas {
4405            let token = self.next_token().token;
4406            let is_end = match token {
4407                Token::Word(ref kw) if is_reserved_keyword(&kw.keyword, self) => true,
4408                Token::RParen | Token::SemiColon | Token::EOF | Token::RBracket | Token::RBrace => {
4409                    true
4410                }
4411                _ => false,
4412            };
4413            self.prev_token();
4414
4415            is_end
4416        } else {
4417            false
4418        }
4419    }
4420
4421    /// Parse the comma of a comma-separated syntax element.
4422    /// Returns true if there is a next element
4423    fn is_parse_comma_separated_end(&mut self) -> bool {
4424        self.is_parse_comma_separated_end_with_trailing_commas(
4425            self.options.trailing_commas,
4426            &Self::is_reserved_for_column_alias,
4427        )
4428    }
4429
4430    /// Parse a comma-separated list of 1+ items accepted by `F`
4431    pub fn parse_comma_separated<T, F>(&mut self, f: F) -> Result<Vec<T>, ParserError>
4432    where
4433        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4434    {
4435        self.parse_comma_separated_with_trailing_commas(
4436            f,
4437            self.options.trailing_commas,
4438            Self::is_reserved_for_column_alias,
4439        )
4440    }
4441
4442    /// Parse a comma-separated list of 1+ items accepted by `F`.
4443    /// `R` is a predicate that should return true if the next
4444    /// keyword is a reserved keyword.
4445    /// Allows for control over trailing commas.
4446    fn parse_comma_separated_with_trailing_commas<T, F, R>(
4447        &mut self,
4448        mut f: F,
4449        trailing_commas: bool,
4450        is_reserved_keyword: R,
4451    ) -> Result<Vec<T>, ParserError>
4452    where
4453        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4454        R: Fn(&Keyword, &mut Parser) -> bool,
4455    {
4456        let mut values = vec![];
4457        loop {
4458            values.push(f(self)?);
4459            if self.is_parse_comma_separated_end_with_trailing_commas(
4460                trailing_commas,
4461                &is_reserved_keyword,
4462            ) {
4463                break;
4464            }
4465        }
4466        Ok(values)
4467    }
4468
4469    /// Parse a period-separated list of 1+ items accepted by `F`
4470    fn parse_period_separated<T, F>(&mut self, mut f: F) -> Result<Vec<T>, ParserError>
4471    where
4472        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4473    {
4474        let mut values = vec![];
4475        loop {
4476            values.push(f(self)?);
4477            if !self.consume_token(&Token::Period) {
4478                break;
4479            }
4480        }
4481        Ok(values)
4482    }
4483
4484    /// Parse a keyword-separated list of 1+ items accepted by `F`
4485    pub fn parse_keyword_separated<T, F>(
4486        &mut self,
4487        keyword: Keyword,
4488        mut f: F,
4489    ) -> Result<Vec<T>, ParserError>
4490    where
4491        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4492    {
4493        let mut values = vec![];
4494        loop {
4495            values.push(f(self)?);
4496            if !self.parse_keyword(keyword) {
4497                break;
4498            }
4499        }
4500        Ok(values)
4501    }
4502
4503    pub fn parse_parenthesized<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4504    where
4505        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4506    {
4507        self.expect_token(&Token::LParen)?;
4508        let res = f(self)?;
4509        self.expect_token(&Token::RParen)?;
4510        Ok(res)
4511    }
4512
4513    /// Parse a comma-separated list of 0+ items accepted by `F`
4514    /// * `end_token` - expected end token for the closure (e.g. [Token::RParen], [Token::RBrace] ...)
4515    pub fn parse_comma_separated0<T, F>(
4516        &mut self,
4517        f: F,
4518        end_token: Token,
4519    ) -> Result<Vec<T>, ParserError>
4520    where
4521        F: FnMut(&mut Parser<'a>) -> Result<T, ParserError>,
4522    {
4523        if self.peek_token().token == end_token {
4524            return Ok(vec![]);
4525        }
4526
4527        if self.options.trailing_commas && self.peek_tokens() == [Token::Comma, end_token] {
4528            let _ = self.consume_token(&Token::Comma);
4529            return Ok(vec![]);
4530        }
4531
4532        self.parse_comma_separated(f)
4533    }
4534
4535    /// Parses 0 or more statements, each followed by a semicolon.
4536    /// If the next token is any of `terminal_keywords` then no more
4537    /// statements will be parsed.
4538    pub(crate) fn parse_statement_list(
4539        &mut self,
4540        terminal_keywords: &[Keyword],
4541    ) -> Result<Vec<Statement>, ParserError> {
4542        let mut values = vec![];
4543        loop {
4544            match &self.peek_nth_token_ref(0).token {
4545                Token::EOF => break,
4546                Token::Word(w) => {
4547                    if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4548                        break;
4549                    }
4550                }
4551                _ => {}
4552            }
4553
4554            values.push(self.parse_statement()?);
4555            self.expect_token(&Token::SemiColon)?;
4556        }
4557        Ok(values)
4558    }
4559
4560    /// Default implementation of a predicate that returns true if
4561    /// the specified keyword is reserved for column alias.
4562    /// See [Dialect::is_column_alias]
4563    fn is_reserved_for_column_alias(kw: &Keyword, parser: &mut Parser) -> bool {
4564        !parser.dialect.is_column_alias(kw, parser)
4565    }
4566
4567    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4568    /// Returns `ParserError::RecursionLimitExceeded` if `f` returns a `RecursionLimitExceeded`.
4569    /// Returns `Ok(None)` if `f` returns any other error.
4570    pub fn maybe_parse<T, F>(&mut self, f: F) -> Result<Option<T>, ParserError>
4571    where
4572        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4573    {
4574        match self.try_parse(f) {
4575            Ok(t) => Ok(Some(t)),
4576            Err(ParserError::RecursionLimitExceeded) => Err(ParserError::RecursionLimitExceeded),
4577            _ => Ok(None),
4578        }
4579    }
4580
4581    /// Run a parser method `f`, reverting back to the current position if unsuccessful.
4582    pub fn try_parse<T, F>(&mut self, mut f: F) -> Result<T, ParserError>
4583    where
4584        F: FnMut(&mut Parser) -> Result<T, ParserError>,
4585    {
4586        let index = self.index;
4587        match f(self) {
4588            Ok(t) => Ok(t),
4589            Err(e) => {
4590                // Unwind stack if limit exceeded
4591                self.index = index;
4592                Err(e)
4593            }
4594        }
4595    }
4596
4597    /// Parse either `ALL`, `DISTINCT` or `DISTINCT ON (...)`. Returns [`None`] if `ALL` is parsed
4598    /// and results in a [`ParserError`] if both `ALL` and `DISTINCT` are found.
4599    pub fn parse_all_or_distinct(&mut self) -> Result<Option<Distinct>, ParserError> {
4600        let loc = self.peek_token().span.start;
4601        let all = self.parse_keyword(Keyword::ALL);
4602        let distinct = self.parse_keyword(Keyword::DISTINCT);
4603        if !distinct {
4604            return Ok(None);
4605        }
4606        if all {
4607            return parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc);
4608        }
4609        let on = self.parse_keyword(Keyword::ON);
4610        if !on {
4611            return Ok(Some(Distinct::Distinct));
4612        }
4613
4614        self.expect_token(&Token::LParen)?;
4615        let col_names = if self.consume_token(&Token::RParen) {
4616            self.prev_token();
4617            Vec::new()
4618        } else {
4619            self.parse_comma_separated(Parser::parse_expr)?
4620        };
4621        self.expect_token(&Token::RParen)?;
4622        Ok(Some(Distinct::On(col_names)))
4623    }
4624
4625    /// Parse a SQL CREATE statement
4626    pub fn parse_create(&mut self) -> Result<Statement, ParserError> {
4627        let or_replace = self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]);
4628        let or_alter = self.parse_keywords(&[Keyword::OR, Keyword::ALTER]);
4629        let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
4630        let global = self.parse_one_of_keywords(&[Keyword::GLOBAL]).is_some();
4631        let transient = self.parse_one_of_keywords(&[Keyword::TRANSIENT]).is_some();
4632        let global: Option<bool> = if global {
4633            Some(true)
4634        } else if local {
4635            Some(false)
4636        } else {
4637            None
4638        };
4639        let temporary = self
4640            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
4641            .is_some();
4642        let persistent = dialect_of!(self is DuckDbDialect)
4643            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
4644        let create_view_params = self.parse_create_view_params()?;
4645        if self.parse_keyword(Keyword::TABLE) {
4646            self.parse_create_table(or_replace, temporary, global, transient)
4647        } else if self.parse_keyword(Keyword::MATERIALIZED) || self.parse_keyword(Keyword::VIEW) {
4648            self.prev_token();
4649            self.parse_create_view(or_alter, or_replace, temporary, create_view_params)
4650        } else if self.parse_keyword(Keyword::POLICY) {
4651            self.parse_create_policy()
4652        } else if self.parse_keyword(Keyword::EXTERNAL) {
4653            self.parse_create_external_table(or_replace)
4654        } else if self.parse_keyword(Keyword::FUNCTION) {
4655            self.parse_create_function(or_alter, or_replace, temporary)
4656        } else if self.parse_keyword(Keyword::DOMAIN) {
4657            self.parse_create_domain()
4658        } else if self.parse_keyword(Keyword::TRIGGER) {
4659            self.parse_create_trigger(or_alter, or_replace, false)
4660        } else if self.parse_keywords(&[Keyword::CONSTRAINT, Keyword::TRIGGER]) {
4661            self.parse_create_trigger(or_alter, or_replace, true)
4662        } else if self.parse_keyword(Keyword::MACRO) {
4663            self.parse_create_macro(or_replace, temporary)
4664        } else if self.parse_keyword(Keyword::SECRET) {
4665            self.parse_create_secret(or_replace, temporary, persistent)
4666        } else if or_replace {
4667            self.expected(
4668                "[EXTERNAL] TABLE or [MATERIALIZED] VIEW or FUNCTION after CREATE OR REPLACE",
4669                self.peek_token(),
4670            )
4671        } else if self.parse_keyword(Keyword::EXTENSION) {
4672            self.parse_create_extension()
4673        } else if self.parse_keyword(Keyword::INDEX) {
4674            self.parse_create_index(false)
4675        } else if self.parse_keywords(&[Keyword::UNIQUE, Keyword::INDEX]) {
4676            self.parse_create_index(true)
4677        } else if self.parse_keyword(Keyword::VIRTUAL) {
4678            self.parse_create_virtual_table()
4679        } else if self.parse_keyword(Keyword::SCHEMA) {
4680            self.parse_create_schema()
4681        } else if self.parse_keyword(Keyword::DATABASE) {
4682            self.parse_create_database()
4683        } else if self.parse_keyword(Keyword::ROLE) {
4684            self.parse_create_role()
4685        } else if self.parse_keyword(Keyword::SEQUENCE) {
4686            self.parse_create_sequence(temporary)
4687        } else if self.parse_keyword(Keyword::TYPE) {
4688            self.parse_create_type()
4689        } else if self.parse_keyword(Keyword::PROCEDURE) {
4690            self.parse_create_procedure(or_alter)
4691        } else if self.parse_keyword(Keyword::CONNECTOR) {
4692            self.parse_create_connector()
4693        } else if self.parse_keyword(Keyword::SERVER) {
4694            self.parse_pg_create_server()
4695        } else {
4696            self.expected("an object type after CREATE", self.peek_token())
4697        }
4698    }
4699
4700    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
4701    pub fn parse_create_secret(
4702        &mut self,
4703        or_replace: bool,
4704        temporary: bool,
4705        persistent: bool,
4706    ) -> Result<Statement, ParserError> {
4707        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4708
4709        let mut storage_specifier = None;
4710        let mut name = None;
4711        if self.peek_token() != Token::LParen {
4712            if self.parse_keyword(Keyword::IN) {
4713                storage_specifier = self.parse_identifier().ok()
4714            } else {
4715                name = self.parse_identifier().ok();
4716            }
4717
4718            // Storage specifier may follow the name
4719            if storage_specifier.is_none()
4720                && self.peek_token() != Token::LParen
4721                && self.parse_keyword(Keyword::IN)
4722            {
4723                storage_specifier = self.parse_identifier().ok();
4724            }
4725        }
4726
4727        self.expect_token(&Token::LParen)?;
4728        self.expect_keyword_is(Keyword::TYPE)?;
4729        let secret_type = self.parse_identifier()?;
4730
4731        let mut options = Vec::new();
4732        if self.consume_token(&Token::Comma) {
4733            options.append(&mut self.parse_comma_separated(|p| {
4734                let key = p.parse_identifier()?;
4735                let value = p.parse_identifier()?;
4736                Ok(SecretOption { key, value })
4737            })?);
4738        }
4739        self.expect_token(&Token::RParen)?;
4740
4741        let temp = match (temporary, persistent) {
4742            (true, false) => Some(true),
4743            (false, true) => Some(false),
4744            (false, false) => None,
4745            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
4746        };
4747
4748        Ok(Statement::CreateSecret {
4749            or_replace,
4750            temporary: temp,
4751            if_not_exists,
4752            name,
4753            storage_specifier,
4754            secret_type,
4755            options,
4756        })
4757    }
4758
4759    /// Parse a CACHE TABLE statement
4760    pub fn parse_cache_table(&mut self) -> Result<Statement, ParserError> {
4761        let (mut table_flag, mut options, mut has_as, mut query) = (None, vec![], false, None);
4762        if self.parse_keyword(Keyword::TABLE) {
4763            let table_name = self.parse_object_name(false)?;
4764            if self.peek_token().token != Token::EOF {
4765                if let Token::Word(word) = self.peek_token().token {
4766                    if word.keyword == Keyword::OPTIONS {
4767                        options = self.parse_options(Keyword::OPTIONS)?
4768                    }
4769                };
4770
4771                if self.peek_token().token != Token::EOF {
4772                    let (a, q) = self.parse_as_query()?;
4773                    has_as = a;
4774                    query = Some(q);
4775                }
4776
4777                Ok(Statement::Cache {
4778                    table_flag,
4779                    table_name,
4780                    has_as,
4781                    options,
4782                    query,
4783                })
4784            } else {
4785                Ok(Statement::Cache {
4786                    table_flag,
4787                    table_name,
4788                    has_as,
4789                    options,
4790                    query,
4791                })
4792            }
4793        } else {
4794            table_flag = Some(self.parse_object_name(false)?);
4795            if self.parse_keyword(Keyword::TABLE) {
4796                let table_name = self.parse_object_name(false)?;
4797                if self.peek_token() != Token::EOF {
4798                    if let Token::Word(word) = self.peek_token().token {
4799                        if word.keyword == Keyword::OPTIONS {
4800                            options = self.parse_options(Keyword::OPTIONS)?
4801                        }
4802                    };
4803
4804                    if self.peek_token() != Token::EOF {
4805                        let (a, q) = self.parse_as_query()?;
4806                        has_as = a;
4807                        query = Some(q);
4808                    }
4809
4810                    Ok(Statement::Cache {
4811                        table_flag,
4812                        table_name,
4813                        has_as,
4814                        options,
4815                        query,
4816                    })
4817                } else {
4818                    Ok(Statement::Cache {
4819                        table_flag,
4820                        table_name,
4821                        has_as,
4822                        options,
4823                        query,
4824                    })
4825                }
4826            } else {
4827                if self.peek_token() == Token::EOF {
4828                    self.prev_token();
4829                }
4830                self.expected("a `TABLE` keyword", self.peek_token())
4831            }
4832        }
4833    }
4834
4835    /// Parse 'AS' before as query,such as `WITH XXX AS SELECT XXX` oer `CACHE TABLE AS SELECT XXX`
4836    pub fn parse_as_query(&mut self) -> Result<(bool, Box<Query>), ParserError> {
4837        match self.peek_token().token {
4838            Token::Word(word) => match word.keyword {
4839                Keyword::AS => {
4840                    self.next_token();
4841                    Ok((true, self.parse_query()?))
4842                }
4843                _ => Ok((false, self.parse_query()?)),
4844            },
4845            _ => self.expected("a QUERY statement", self.peek_token()),
4846        }
4847    }
4848
4849    /// Parse a UNCACHE TABLE statement
4850    pub fn parse_uncache_table(&mut self) -> Result<Statement, ParserError> {
4851        self.expect_keyword_is(Keyword::TABLE)?;
4852        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
4853        let table_name = self.parse_object_name(false)?;
4854        Ok(Statement::UNCache {
4855            table_name,
4856            if_exists,
4857        })
4858    }
4859
4860    /// SQLite-specific `CREATE VIRTUAL TABLE`
4861    pub fn parse_create_virtual_table(&mut self) -> Result<Statement, ParserError> {
4862        self.expect_keyword_is(Keyword::TABLE)?;
4863        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4864        let table_name = self.parse_object_name(false)?;
4865        self.expect_keyword_is(Keyword::USING)?;
4866        let module_name = self.parse_identifier()?;
4867        // SQLite docs note that module "arguments syntax is sufficiently
4868        // general that the arguments can be made to appear as column
4869        // definitions in a traditional CREATE TABLE statement", but
4870        // we don't implement that.
4871        let module_args = self.parse_parenthesized_column_list(Optional, false)?;
4872        Ok(Statement::CreateVirtualTable {
4873            name: table_name,
4874            if_not_exists,
4875            module_name,
4876            module_args,
4877        })
4878    }
4879
4880    pub fn parse_create_schema(&mut self) -> Result<Statement, ParserError> {
4881        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4882
4883        let schema_name = self.parse_schema_name()?;
4884
4885        let default_collate_spec = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
4886            Some(self.parse_expr()?)
4887        } else {
4888            None
4889        };
4890
4891        let with = if self.peek_keyword(Keyword::WITH) {
4892            Some(self.parse_options(Keyword::WITH)?)
4893        } else {
4894            None
4895        };
4896
4897        let options = if self.peek_keyword(Keyword::OPTIONS) {
4898            Some(self.parse_options(Keyword::OPTIONS)?)
4899        } else {
4900            None
4901        };
4902
4903        Ok(Statement::CreateSchema {
4904            schema_name,
4905            if_not_exists,
4906            with,
4907            options,
4908            default_collate_spec,
4909        })
4910    }
4911
4912    fn parse_schema_name(&mut self) -> Result<SchemaName, ParserError> {
4913        if self.parse_keyword(Keyword::AUTHORIZATION) {
4914            Ok(SchemaName::UnnamedAuthorization(self.parse_identifier()?))
4915        } else {
4916            let name = self.parse_object_name(false)?;
4917
4918            if self.parse_keyword(Keyword::AUTHORIZATION) {
4919                Ok(SchemaName::NamedAuthorization(
4920                    name,
4921                    self.parse_identifier()?,
4922                ))
4923            } else {
4924                Ok(SchemaName::Simple(name))
4925            }
4926        }
4927    }
4928
4929    pub fn parse_create_database(&mut self) -> Result<Statement, ParserError> {
4930        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
4931        let db_name = self.parse_object_name(false)?;
4932        let mut location = None;
4933        let mut managed_location = None;
4934        loop {
4935            match self.parse_one_of_keywords(&[Keyword::LOCATION, Keyword::MANAGEDLOCATION]) {
4936                Some(Keyword::LOCATION) => location = Some(self.parse_literal_string()?),
4937                Some(Keyword::MANAGEDLOCATION) => {
4938                    managed_location = Some(self.parse_literal_string()?)
4939                }
4940                _ => break,
4941            }
4942        }
4943        Ok(Statement::CreateDatabase {
4944            db_name,
4945            if_not_exists: ine,
4946            location,
4947            managed_location,
4948        })
4949    }
4950
4951    pub fn parse_optional_create_function_using(
4952        &mut self,
4953    ) -> Result<Option<CreateFunctionUsing>, ParserError> {
4954        if !self.parse_keyword(Keyword::USING) {
4955            return Ok(None);
4956        };
4957        let keyword =
4958            self.expect_one_of_keywords(&[Keyword::JAR, Keyword::FILE, Keyword::ARCHIVE])?;
4959
4960        let uri = self.parse_literal_string()?;
4961
4962        match keyword {
4963            Keyword::JAR => Ok(Some(CreateFunctionUsing::Jar(uri))),
4964            Keyword::FILE => Ok(Some(CreateFunctionUsing::File(uri))),
4965            Keyword::ARCHIVE => Ok(Some(CreateFunctionUsing::Archive(uri))),
4966            _ => self.expected(
4967                "JAR, FILE or ARCHIVE, got {:?}",
4968                TokenWithSpan::wrap(Token::make_keyword(format!("{keyword:?}").as_str())),
4969            ),
4970        }
4971    }
4972
4973    pub fn parse_create_function(
4974        &mut self,
4975        or_alter: bool,
4976        or_replace: bool,
4977        temporary: bool,
4978    ) -> Result<Statement, ParserError> {
4979        if dialect_of!(self is HiveDialect) {
4980            self.parse_hive_create_function(or_replace, temporary)
4981        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect) {
4982            self.parse_postgres_create_function(or_replace, temporary)
4983        } else if dialect_of!(self is DuckDbDialect) {
4984            self.parse_create_macro(or_replace, temporary)
4985        } else if dialect_of!(self is BigQueryDialect) {
4986            self.parse_bigquery_create_function(or_replace, temporary)
4987        } else if dialect_of!(self is MsSqlDialect) {
4988            self.parse_mssql_create_function(or_alter, or_replace, temporary)
4989        } else {
4990            self.prev_token();
4991            self.expected("an object type after CREATE", self.peek_token())
4992        }
4993    }
4994
4995    /// Parse `CREATE FUNCTION` for [PostgreSQL]
4996    ///
4997    /// [PostgreSQL]: https://www.postgresql.org/docs/15/sql-createfunction.html
4998    fn parse_postgres_create_function(
4999        &mut self,
5000        or_replace: bool,
5001        temporary: bool,
5002    ) -> Result<Statement, ParserError> {
5003        let name = self.parse_object_name(false)?;
5004
5005        self.expect_token(&Token::LParen)?;
5006        let args = if Token::RParen != self.peek_token_ref().token {
5007            self.parse_comma_separated(Parser::parse_function_arg)?
5008        } else {
5009            vec![]
5010        };
5011        self.expect_token(&Token::RParen)?;
5012
5013        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5014            Some(self.parse_data_type()?)
5015        } else {
5016            None
5017        };
5018
5019        #[derive(Default)]
5020        struct Body {
5021            language: Option<Ident>,
5022            behavior: Option<FunctionBehavior>,
5023            function_body: Option<CreateFunctionBody>,
5024            called_on_null: Option<FunctionCalledOnNull>,
5025            parallel: Option<FunctionParallel>,
5026        }
5027        let mut body = Body::default();
5028        loop {
5029            fn ensure_not_set<T>(field: &Option<T>, name: &str) -> Result<(), ParserError> {
5030                if field.is_some() {
5031                    return Err(ParserError::ParserError(format!(
5032                        "{name} specified more than once",
5033                    )));
5034                }
5035                Ok(())
5036            }
5037            if self.parse_keyword(Keyword::AS) {
5038                ensure_not_set(&body.function_body, "AS")?;
5039                body.function_body = Some(CreateFunctionBody::AsBeforeOptions(
5040                    self.parse_create_function_body_string()?,
5041                ));
5042            } else if self.parse_keyword(Keyword::LANGUAGE) {
5043                ensure_not_set(&body.language, "LANGUAGE")?;
5044                body.language = Some(self.parse_identifier()?);
5045            } else if self.parse_keyword(Keyword::IMMUTABLE) {
5046                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5047                body.behavior = Some(FunctionBehavior::Immutable);
5048            } else if self.parse_keyword(Keyword::STABLE) {
5049                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5050                body.behavior = Some(FunctionBehavior::Stable);
5051            } else if self.parse_keyword(Keyword::VOLATILE) {
5052                ensure_not_set(&body.behavior, "IMMUTABLE | STABLE | VOLATILE")?;
5053                body.behavior = Some(FunctionBehavior::Volatile);
5054            } else if self.parse_keywords(&[
5055                Keyword::CALLED,
5056                Keyword::ON,
5057                Keyword::NULL,
5058                Keyword::INPUT,
5059            ]) {
5060                ensure_not_set(
5061                    &body.called_on_null,
5062                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5063                )?;
5064                body.called_on_null = Some(FunctionCalledOnNull::CalledOnNullInput);
5065            } else if self.parse_keywords(&[
5066                Keyword::RETURNS,
5067                Keyword::NULL,
5068                Keyword::ON,
5069                Keyword::NULL,
5070                Keyword::INPUT,
5071            ]) {
5072                ensure_not_set(
5073                    &body.called_on_null,
5074                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5075                )?;
5076                body.called_on_null = Some(FunctionCalledOnNull::ReturnsNullOnNullInput);
5077            } else if self.parse_keyword(Keyword::STRICT) {
5078                ensure_not_set(
5079                    &body.called_on_null,
5080                    "CALLED ON NULL INPUT | RETURNS NULL ON NULL INPUT | STRICT",
5081                )?;
5082                body.called_on_null = Some(FunctionCalledOnNull::Strict);
5083            } else if self.parse_keyword(Keyword::PARALLEL) {
5084                ensure_not_set(&body.parallel, "PARALLEL { UNSAFE | RESTRICTED | SAFE }")?;
5085                if self.parse_keyword(Keyword::UNSAFE) {
5086                    body.parallel = Some(FunctionParallel::Unsafe);
5087                } else if self.parse_keyword(Keyword::RESTRICTED) {
5088                    body.parallel = Some(FunctionParallel::Restricted);
5089                } else if self.parse_keyword(Keyword::SAFE) {
5090                    body.parallel = Some(FunctionParallel::Safe);
5091                } else {
5092                    return self.expected("one of UNSAFE | RESTRICTED | SAFE", self.peek_token());
5093                }
5094            } else if self.parse_keyword(Keyword::RETURN) {
5095                ensure_not_set(&body.function_body, "RETURN")?;
5096                body.function_body = Some(CreateFunctionBody::Return(self.parse_expr()?));
5097            } else {
5098                break;
5099            }
5100        }
5101
5102        Ok(Statement::CreateFunction(CreateFunction {
5103            or_alter: false,
5104            or_replace,
5105            temporary,
5106            name,
5107            args: Some(args),
5108            return_type,
5109            behavior: body.behavior,
5110            called_on_null: body.called_on_null,
5111            parallel: body.parallel,
5112            language: body.language,
5113            function_body: body.function_body,
5114            if_not_exists: false,
5115            using: None,
5116            determinism_specifier: None,
5117            options: None,
5118            remote_connection: None,
5119        }))
5120    }
5121
5122    /// Parse `CREATE FUNCTION` for [Hive]
5123    ///
5124    /// [Hive]: https://cwiki.apache.org/confluence/display/hive/languagemanual+ddl#LanguageManualDDL-Create/Drop/ReloadFunction
5125    fn parse_hive_create_function(
5126        &mut self,
5127        or_replace: bool,
5128        temporary: bool,
5129    ) -> Result<Statement, ParserError> {
5130        let name = self.parse_object_name(false)?;
5131        self.expect_keyword_is(Keyword::AS)?;
5132
5133        let as_ = self.parse_create_function_body_string()?;
5134        let using = self.parse_optional_create_function_using()?;
5135
5136        Ok(Statement::CreateFunction(CreateFunction {
5137            or_alter: false,
5138            or_replace,
5139            temporary,
5140            name,
5141            function_body: Some(CreateFunctionBody::AsBeforeOptions(as_)),
5142            using,
5143            if_not_exists: false,
5144            args: None,
5145            return_type: None,
5146            behavior: None,
5147            called_on_null: None,
5148            parallel: None,
5149            language: None,
5150            determinism_specifier: None,
5151            options: None,
5152            remote_connection: None,
5153        }))
5154    }
5155
5156    /// Parse `CREATE FUNCTION` for [BigQuery]
5157    ///
5158    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement
5159    fn parse_bigquery_create_function(
5160        &mut self,
5161        or_replace: bool,
5162        temporary: bool,
5163    ) -> Result<Statement, ParserError> {
5164        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5165        let (name, args) = self.parse_create_function_name_and_params()?;
5166
5167        let return_type = if self.parse_keyword(Keyword::RETURNS) {
5168            Some(self.parse_data_type()?)
5169        } else {
5170            None
5171        };
5172
5173        let determinism_specifier = if self.parse_keyword(Keyword::DETERMINISTIC) {
5174            Some(FunctionDeterminismSpecifier::Deterministic)
5175        } else if self.parse_keywords(&[Keyword::NOT, Keyword::DETERMINISTIC]) {
5176            Some(FunctionDeterminismSpecifier::NotDeterministic)
5177        } else {
5178            None
5179        };
5180
5181        let language = if self.parse_keyword(Keyword::LANGUAGE) {
5182            Some(self.parse_identifier()?)
5183        } else {
5184            None
5185        };
5186
5187        let remote_connection =
5188            if self.parse_keywords(&[Keyword::REMOTE, Keyword::WITH, Keyword::CONNECTION]) {
5189                Some(self.parse_object_name(false)?)
5190            } else {
5191                None
5192            };
5193
5194        // `OPTIONS` may come before of after the function body but
5195        // may be specified at most once.
5196        let mut options = self.maybe_parse_options(Keyword::OPTIONS)?;
5197
5198        let function_body = if remote_connection.is_none() {
5199            self.expect_keyword_is(Keyword::AS)?;
5200            let expr = self.parse_expr()?;
5201            if options.is_none() {
5202                options = self.maybe_parse_options(Keyword::OPTIONS)?;
5203                Some(CreateFunctionBody::AsBeforeOptions(expr))
5204            } else {
5205                Some(CreateFunctionBody::AsAfterOptions(expr))
5206            }
5207        } else {
5208            None
5209        };
5210
5211        Ok(Statement::CreateFunction(CreateFunction {
5212            or_alter: false,
5213            or_replace,
5214            temporary,
5215            if_not_exists,
5216            name,
5217            args: Some(args),
5218            return_type,
5219            function_body,
5220            language,
5221            determinism_specifier,
5222            options,
5223            remote_connection,
5224            using: None,
5225            behavior: None,
5226            called_on_null: None,
5227            parallel: None,
5228        }))
5229    }
5230
5231    /// Parse `CREATE FUNCTION` for [MsSql]
5232    ///
5233    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/statements/create-function-transact-sql
5234    fn parse_mssql_create_function(
5235        &mut self,
5236        or_alter: bool,
5237        or_replace: bool,
5238        temporary: bool,
5239    ) -> Result<Statement, ParserError> {
5240        let (name, args) = self.parse_create_function_name_and_params()?;
5241
5242        self.expect_keyword(Keyword::RETURNS)?;
5243
5244        let return_table = self.maybe_parse(|p| {
5245            let return_table_name = p.parse_identifier()?;
5246
5247            p.expect_keyword_is(Keyword::TABLE)?;
5248            p.prev_token();
5249
5250            let table_column_defs = match p.parse_data_type()? {
5251                DataType::Table(Some(table_column_defs)) if !table_column_defs.is_empty() => {
5252                    table_column_defs
5253                }
5254                _ => parser_err!(
5255                    "Expected table column definitions after TABLE keyword",
5256                    p.peek_token().span.start
5257                )?,
5258            };
5259
5260            Ok(DataType::NamedTable {
5261                name: ObjectName(vec![ObjectNamePart::Identifier(return_table_name)]),
5262                columns: table_column_defs,
5263            })
5264        })?;
5265
5266        let return_type = if return_table.is_some() {
5267            return_table
5268        } else {
5269            Some(self.parse_data_type()?)
5270        };
5271
5272        let _ = self.parse_keyword(Keyword::AS);
5273
5274        let function_body = if self.peek_keyword(Keyword::BEGIN) {
5275            let begin_token = self.expect_keyword(Keyword::BEGIN)?;
5276            let statements = self.parse_statement_list(&[Keyword::END])?;
5277            let end_token = self.expect_keyword(Keyword::END)?;
5278
5279            Some(CreateFunctionBody::AsBeginEnd(BeginEndStatements {
5280                begin_token: AttachedToken(begin_token),
5281                statements,
5282                end_token: AttachedToken(end_token),
5283            }))
5284        } else if self.parse_keyword(Keyword::RETURN) {
5285            if self.peek_token() == Token::LParen {
5286                Some(CreateFunctionBody::AsReturnExpr(self.parse_expr()?))
5287            } else if self.peek_keyword(Keyword::SELECT) {
5288                let select = self.parse_select()?;
5289                Some(CreateFunctionBody::AsReturnSelect(select))
5290            } else {
5291                parser_err!(
5292                    "Expected a subquery (or bare SELECT statement) after RETURN",
5293                    self.peek_token().span.start
5294                )?
5295            }
5296        } else {
5297            parser_err!("Unparsable function body", self.peek_token().span.start)?
5298        };
5299
5300        Ok(Statement::CreateFunction(CreateFunction {
5301            or_alter,
5302            or_replace,
5303            temporary,
5304            if_not_exists: false,
5305            name,
5306            args: Some(args),
5307            return_type,
5308            function_body,
5309            language: None,
5310            determinism_specifier: None,
5311            options: None,
5312            remote_connection: None,
5313            using: None,
5314            behavior: None,
5315            called_on_null: None,
5316            parallel: None,
5317        }))
5318    }
5319
5320    fn parse_create_function_name_and_params(
5321        &mut self,
5322    ) -> Result<(ObjectName, Vec<OperateFunctionArg>), ParserError> {
5323        let name = self.parse_object_name(false)?;
5324        let parse_function_param =
5325            |parser: &mut Parser| -> Result<OperateFunctionArg, ParserError> {
5326                let name = parser.parse_identifier()?;
5327                let data_type = parser.parse_data_type()?;
5328                let default_expr = if parser.consume_token(&Token::Eq) {
5329                    Some(parser.parse_expr()?)
5330                } else {
5331                    None
5332                };
5333
5334                Ok(OperateFunctionArg {
5335                    mode: None,
5336                    name: Some(name),
5337                    data_type,
5338                    default_expr,
5339                })
5340            };
5341        self.expect_token(&Token::LParen)?;
5342        let args = self.parse_comma_separated0(parse_function_param, Token::RParen)?;
5343        self.expect_token(&Token::RParen)?;
5344        Ok((name, args))
5345    }
5346
5347    fn parse_function_arg(&mut self) -> Result<OperateFunctionArg, ParserError> {
5348        let mode = if self.parse_keyword(Keyword::IN) {
5349            Some(ArgMode::In)
5350        } else if self.parse_keyword(Keyword::OUT) {
5351            Some(ArgMode::Out)
5352        } else if self.parse_keyword(Keyword::INOUT) {
5353            Some(ArgMode::InOut)
5354        } else {
5355            None
5356        };
5357
5358        // parse: [ argname ] argtype
5359        let mut name = None;
5360        let mut data_type = self.parse_data_type()?;
5361
5362        // To check whether the first token is a name or a type, we need to
5363        // peek the next token, which if it is another type keyword, then the
5364        // first token is a name and not a type in itself.
5365        let data_type_idx = self.get_current_index();
5366        if let Some(next_data_type) = self.maybe_parse(|parser| parser.parse_data_type())? {
5367            let token = self.token_at(data_type_idx);
5368
5369            // We ensure that the token is a `Word` token, and not other special tokens.
5370            if !matches!(token.token, Token::Word(_)) {
5371                return self.expected("a name or type", token.clone());
5372            }
5373
5374            name = Some(Ident::new(token.to_string()));
5375            data_type = next_data_type;
5376        }
5377
5378        let default_expr = if self.parse_keyword(Keyword::DEFAULT) || self.consume_token(&Token::Eq)
5379        {
5380            Some(self.parse_expr()?)
5381        } else {
5382            None
5383        };
5384        Ok(OperateFunctionArg {
5385            mode,
5386            name,
5387            data_type,
5388            default_expr,
5389        })
5390    }
5391
5392    /// Parse statements of the DropTrigger type such as:
5393    ///
5394    /// ```sql
5395    /// DROP TRIGGER [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
5396    /// ```
5397    pub fn parse_drop_trigger(&mut self) -> Result<Statement, ParserError> {
5398        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5399            self.prev_token();
5400            return self.expected("an object type after DROP", self.peek_token());
5401        }
5402        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
5403        let trigger_name = self.parse_object_name(false)?;
5404        let table_name = if self.parse_keyword(Keyword::ON) {
5405            Some(self.parse_object_name(false)?)
5406        } else {
5407            None
5408        };
5409        let option = self
5410            .parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT])
5411            .map(|keyword| match keyword {
5412                Keyword::CASCADE => ReferentialAction::Cascade,
5413                Keyword::RESTRICT => ReferentialAction::Restrict,
5414                _ => unreachable!(),
5415            });
5416        Ok(Statement::DropTrigger {
5417            if_exists,
5418            trigger_name,
5419            table_name,
5420            option,
5421        })
5422    }
5423
5424    pub fn parse_create_trigger(
5425        &mut self,
5426        or_alter: bool,
5427        or_replace: bool,
5428        is_constraint: bool,
5429    ) -> Result<Statement, ParserError> {
5430        if !dialect_of!(self is PostgreSqlDialect | GenericDialect | MySqlDialect | MsSqlDialect) {
5431            self.prev_token();
5432            return self.expected("an object type after CREATE", self.peek_token());
5433        }
5434
5435        let name = self.parse_object_name(false)?;
5436        let period = self.parse_trigger_period()?;
5437
5438        let events = self.parse_keyword_separated(Keyword::OR, Parser::parse_trigger_event)?;
5439        self.expect_keyword_is(Keyword::ON)?;
5440        let table_name = self.parse_object_name(false)?;
5441
5442        let referenced_table_name = if self.parse_keyword(Keyword::FROM) {
5443            self.parse_object_name(true).ok()
5444        } else {
5445            None
5446        };
5447
5448        let characteristics = self.parse_constraint_characteristics()?;
5449
5450        let mut referencing = vec![];
5451        if self.parse_keyword(Keyword::REFERENCING) {
5452            while let Some(refer) = self.parse_trigger_referencing()? {
5453                referencing.push(refer);
5454            }
5455        }
5456
5457        self.expect_keyword_is(Keyword::FOR)?;
5458        let include_each = self.parse_keyword(Keyword::EACH);
5459        let trigger_object =
5460            match self.expect_one_of_keywords(&[Keyword::ROW, Keyword::STATEMENT])? {
5461                Keyword::ROW => TriggerObject::Row,
5462                Keyword::STATEMENT => TriggerObject::Statement,
5463                _ => unreachable!(),
5464            };
5465
5466        let condition = self
5467            .parse_keyword(Keyword::WHEN)
5468            .then(|| self.parse_expr())
5469            .transpose()?;
5470
5471        self.expect_keyword_is(Keyword::EXECUTE)?;
5472
5473        let exec_body = self.parse_trigger_exec_body()?;
5474
5475        Ok(Statement::CreateTrigger {
5476            or_alter,
5477            or_replace,
5478            is_constraint,
5479            name,
5480            period,
5481            events,
5482            table_name,
5483            referenced_table_name,
5484            referencing,
5485            trigger_object,
5486            include_each,
5487            condition,
5488            exec_body: Some(exec_body),
5489            statements: None,
5490            characteristics,
5491        })
5492    }
5493
5494    pub fn parse_trigger_period(&mut self) -> Result<TriggerPeriod, ParserError> {
5495        Ok(
5496            match self.expect_one_of_keywords(&[
5497                Keyword::FOR,
5498                Keyword::BEFORE,
5499                Keyword::AFTER,
5500                Keyword::INSTEAD,
5501            ])? {
5502                Keyword::FOR => TriggerPeriod::For,
5503                Keyword::BEFORE => TriggerPeriod::Before,
5504                Keyword::AFTER => TriggerPeriod::After,
5505                Keyword::INSTEAD => self
5506                    .expect_keyword_is(Keyword::OF)
5507                    .map(|_| TriggerPeriod::InsteadOf)?,
5508                _ => unreachable!(),
5509            },
5510        )
5511    }
5512
5513    pub fn parse_trigger_event(&mut self) -> Result<TriggerEvent, ParserError> {
5514        Ok(
5515            match self.expect_one_of_keywords(&[
5516                Keyword::INSERT,
5517                Keyword::UPDATE,
5518                Keyword::DELETE,
5519                Keyword::TRUNCATE,
5520            ])? {
5521                Keyword::INSERT => TriggerEvent::Insert,
5522                Keyword::UPDATE => {
5523                    if self.parse_keyword(Keyword::OF) {
5524                        let cols = self.parse_comma_separated(Parser::parse_identifier)?;
5525                        TriggerEvent::Update(cols)
5526                    } else {
5527                        TriggerEvent::Update(vec![])
5528                    }
5529                }
5530                Keyword::DELETE => TriggerEvent::Delete,
5531                Keyword::TRUNCATE => TriggerEvent::Truncate,
5532                _ => unreachable!(),
5533            },
5534        )
5535    }
5536
5537    pub fn parse_trigger_referencing(&mut self) -> Result<Option<TriggerReferencing>, ParserError> {
5538        let refer_type = match self.parse_one_of_keywords(&[Keyword::OLD, Keyword::NEW]) {
5539            Some(Keyword::OLD) if self.parse_keyword(Keyword::TABLE) => {
5540                TriggerReferencingType::OldTable
5541            }
5542            Some(Keyword::NEW) if self.parse_keyword(Keyword::TABLE) => {
5543                TriggerReferencingType::NewTable
5544            }
5545            _ => {
5546                return Ok(None);
5547            }
5548        };
5549
5550        let is_as = self.parse_keyword(Keyword::AS);
5551        let transition_relation_name = self.parse_object_name(false)?;
5552        Ok(Some(TriggerReferencing {
5553            refer_type,
5554            is_as,
5555            transition_relation_name,
5556        }))
5557    }
5558
5559    pub fn parse_trigger_exec_body(&mut self) -> Result<TriggerExecBody, ParserError> {
5560        Ok(TriggerExecBody {
5561            exec_type: match self
5562                .expect_one_of_keywords(&[Keyword::FUNCTION, Keyword::PROCEDURE])?
5563            {
5564                Keyword::FUNCTION => TriggerExecBodyType::Function,
5565                Keyword::PROCEDURE => TriggerExecBodyType::Procedure,
5566                _ => unreachable!(),
5567            },
5568            func_desc: self.parse_function_desc()?,
5569        })
5570    }
5571
5572    pub fn parse_create_macro(
5573        &mut self,
5574        or_replace: bool,
5575        temporary: bool,
5576    ) -> Result<Statement, ParserError> {
5577        if dialect_of!(self is DuckDbDialect |  GenericDialect) {
5578            let name = self.parse_object_name(false)?;
5579            self.expect_token(&Token::LParen)?;
5580            let args = if self.consume_token(&Token::RParen) {
5581                self.prev_token();
5582                None
5583            } else {
5584                Some(self.parse_comma_separated(Parser::parse_macro_arg)?)
5585            };
5586
5587            self.expect_token(&Token::RParen)?;
5588            self.expect_keyword_is(Keyword::AS)?;
5589
5590            Ok(Statement::CreateMacro {
5591                or_replace,
5592                temporary,
5593                name,
5594                args,
5595                definition: if self.parse_keyword(Keyword::TABLE) {
5596                    MacroDefinition::Table(self.parse_query()?)
5597                } else {
5598                    MacroDefinition::Expr(self.parse_expr()?)
5599                },
5600            })
5601        } else {
5602            self.prev_token();
5603            self.expected("an object type after CREATE", self.peek_token())
5604        }
5605    }
5606
5607    fn parse_macro_arg(&mut self) -> Result<MacroArg, ParserError> {
5608        let name = self.parse_identifier()?;
5609
5610        let default_expr =
5611            if self.consume_token(&Token::Assignment) || self.consume_token(&Token::RArrow) {
5612                Some(self.parse_expr()?)
5613            } else {
5614                None
5615            };
5616        Ok(MacroArg { name, default_expr })
5617    }
5618
5619    pub fn parse_create_external_table(
5620        &mut self,
5621        or_replace: bool,
5622    ) -> Result<Statement, ParserError> {
5623        self.expect_keyword_is(Keyword::TABLE)?;
5624        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5625        let table_name = self.parse_object_name(false)?;
5626        let (columns, constraints) = self.parse_columns()?;
5627
5628        let hive_distribution = self.parse_hive_distribution()?;
5629        let hive_formats = self.parse_hive_formats()?;
5630
5631        let file_format = if let Some(ff) = &hive_formats.storage {
5632            match ff {
5633                HiveIOFormat::FileFormat { format } => Some(*format),
5634                _ => None,
5635            }
5636        } else {
5637            None
5638        };
5639        let location = hive_formats.location.clone();
5640        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
5641        let table_options = if !table_properties.is_empty() {
5642            CreateTableOptions::TableProperties(table_properties)
5643        } else {
5644            CreateTableOptions::None
5645        };
5646        Ok(CreateTableBuilder::new(table_name)
5647            .columns(columns)
5648            .constraints(constraints)
5649            .hive_distribution(hive_distribution)
5650            .hive_formats(Some(hive_formats))
5651            .table_options(table_options)
5652            .or_replace(or_replace)
5653            .if_not_exists(if_not_exists)
5654            .external(true)
5655            .file_format(file_format)
5656            .location(location)
5657            .build())
5658    }
5659
5660    pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
5661        let next_token = self.next_token();
5662        match &next_token.token {
5663            Token::Word(w) => match w.keyword {
5664                Keyword::AVRO => Ok(FileFormat::AVRO),
5665                Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
5666                Keyword::ORC => Ok(FileFormat::ORC),
5667                Keyword::PARQUET => Ok(FileFormat::PARQUET),
5668                Keyword::RCFILE => Ok(FileFormat::RCFILE),
5669                Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
5670                Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
5671                _ => self.expected("fileformat", next_token),
5672            },
5673            _ => self.expected("fileformat", next_token),
5674        }
5675    }
5676
5677    pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
5678        let next_token = self.next_token();
5679        match &next_token.token {
5680            Token::Word(w) => match w.keyword {
5681                Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
5682                Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
5683                Keyword::JSON => Ok(AnalyzeFormat::JSON),
5684                _ => self.expected("fileformat", next_token),
5685            },
5686            _ => self.expected("fileformat", next_token),
5687        }
5688    }
5689
5690    pub fn parse_create_view(
5691        &mut self,
5692        or_alter: bool,
5693        or_replace: bool,
5694        temporary: bool,
5695        create_view_params: Option<CreateViewParams>,
5696    ) -> Result<Statement, ParserError> {
5697        let materialized = self.parse_keyword(Keyword::MATERIALIZED);
5698        self.expect_keyword_is(Keyword::VIEW)?;
5699        let if_not_exists = dialect_of!(self is BigQueryDialect|SQLiteDialect|GenericDialect)
5700            && self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5701        // Many dialects support `OR ALTER` right after `CREATE`, but we don't (yet).
5702        // ANSI SQL and Postgres support RECURSIVE here, but we don't support it either.
5703        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
5704        let name = self.parse_object_name(allow_unquoted_hyphen)?;
5705        let columns = self.parse_view_columns()?;
5706        let mut options = CreateTableOptions::None;
5707        let with_options = self.parse_options(Keyword::WITH)?;
5708        if !with_options.is_empty() {
5709            options = CreateTableOptions::With(with_options);
5710        }
5711
5712        let cluster_by = if self.parse_keyword(Keyword::CLUSTER) {
5713            self.expect_keyword_is(Keyword::BY)?;
5714            self.parse_parenthesized_column_list(Optional, false)?
5715        } else {
5716            vec![]
5717        };
5718
5719        if dialect_of!(self is BigQueryDialect | GenericDialect) {
5720            if let Some(opts) = self.maybe_parse_options(Keyword::OPTIONS)? {
5721                if !opts.is_empty() {
5722                    options = CreateTableOptions::Options(opts);
5723                }
5724            };
5725        }
5726
5727        let to = if dialect_of!(self is ClickHouseDialect | GenericDialect)
5728            && self.parse_keyword(Keyword::TO)
5729        {
5730            Some(self.parse_object_name(false)?)
5731        } else {
5732            None
5733        };
5734
5735        let comment = if dialect_of!(self is SnowflakeDialect | GenericDialect)
5736            && self.parse_keyword(Keyword::COMMENT)
5737        {
5738            self.expect_token(&Token::Eq)?;
5739            Some(self.parse_comment_value()?)
5740        } else {
5741            None
5742        };
5743
5744        self.expect_keyword_is(Keyword::AS)?;
5745        let query = self.parse_query()?;
5746        // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
5747
5748        let with_no_schema_binding = dialect_of!(self is RedshiftSqlDialect | GenericDialect)
5749            && self.parse_keywords(&[
5750                Keyword::WITH,
5751                Keyword::NO,
5752                Keyword::SCHEMA,
5753                Keyword::BINDING,
5754            ]);
5755
5756        Ok(Statement::CreateView {
5757            or_alter,
5758            name,
5759            columns,
5760            query,
5761            materialized,
5762            or_replace,
5763            options,
5764            cluster_by,
5765            comment,
5766            with_no_schema_binding,
5767            if_not_exists,
5768            temporary,
5769            to,
5770            params: create_view_params,
5771        })
5772    }
5773
5774    /// Parse optional parameters for the `CREATE VIEW` statement supported by [MySQL].
5775    ///
5776    /// [MySQL]: https://dev.mysql.com/doc/refman/9.1/en/create-view.html
5777    fn parse_create_view_params(&mut self) -> Result<Option<CreateViewParams>, ParserError> {
5778        let algorithm = if self.parse_keyword(Keyword::ALGORITHM) {
5779            self.expect_token(&Token::Eq)?;
5780            Some(
5781                match self.expect_one_of_keywords(&[
5782                    Keyword::UNDEFINED,
5783                    Keyword::MERGE,
5784                    Keyword::TEMPTABLE,
5785                ])? {
5786                    Keyword::UNDEFINED => CreateViewAlgorithm::Undefined,
5787                    Keyword::MERGE => CreateViewAlgorithm::Merge,
5788                    Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
5789                    _ => {
5790                        self.prev_token();
5791                        let found = self.next_token();
5792                        return self
5793                            .expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
5794                    }
5795                },
5796            )
5797        } else {
5798            None
5799        };
5800        let definer = if self.parse_keyword(Keyword::DEFINER) {
5801            self.expect_token(&Token::Eq)?;
5802            Some(self.parse_grantee_name()?)
5803        } else {
5804            None
5805        };
5806        let security = if self.parse_keywords(&[Keyword::SQL, Keyword::SECURITY]) {
5807            Some(
5808                match self.expect_one_of_keywords(&[Keyword::DEFINER, Keyword::INVOKER])? {
5809                    Keyword::DEFINER => CreateViewSecurity::Definer,
5810                    Keyword::INVOKER => CreateViewSecurity::Invoker,
5811                    _ => {
5812                        self.prev_token();
5813                        let found = self.next_token();
5814                        return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
5815                    }
5816                },
5817            )
5818        } else {
5819            None
5820        };
5821        if algorithm.is_some() || definer.is_some() || security.is_some() {
5822            Ok(Some(CreateViewParams {
5823                algorithm,
5824                definer,
5825                security,
5826            }))
5827        } else {
5828            Ok(None)
5829        }
5830    }
5831
5832    pub fn parse_create_role(&mut self) -> Result<Statement, ParserError> {
5833        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
5834        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
5835
5836        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
5837
5838        let optional_keywords = if dialect_of!(self is MsSqlDialect) {
5839            vec![Keyword::AUTHORIZATION]
5840        } else if dialect_of!(self is PostgreSqlDialect) {
5841            vec![
5842                Keyword::LOGIN,
5843                Keyword::NOLOGIN,
5844                Keyword::INHERIT,
5845                Keyword::NOINHERIT,
5846                Keyword::BYPASSRLS,
5847                Keyword::NOBYPASSRLS,
5848                Keyword::PASSWORD,
5849                Keyword::CREATEDB,
5850                Keyword::NOCREATEDB,
5851                Keyword::CREATEROLE,
5852                Keyword::NOCREATEROLE,
5853                Keyword::SUPERUSER,
5854                Keyword::NOSUPERUSER,
5855                Keyword::REPLICATION,
5856                Keyword::NOREPLICATION,
5857                Keyword::CONNECTION,
5858                Keyword::VALID,
5859                Keyword::IN,
5860                Keyword::ROLE,
5861                Keyword::ADMIN,
5862                Keyword::USER,
5863            ]
5864        } else {
5865            vec![]
5866        };
5867
5868        // MSSQL
5869        let mut authorization_owner = None;
5870        // Postgres
5871        let mut login = None;
5872        let mut inherit = None;
5873        let mut bypassrls = None;
5874        let mut password = None;
5875        let mut create_db = None;
5876        let mut create_role = None;
5877        let mut superuser = None;
5878        let mut replication = None;
5879        let mut connection_limit = None;
5880        let mut valid_until = None;
5881        let mut in_role = vec![];
5882        let mut in_group = vec![];
5883        let mut role = vec![];
5884        let mut user = vec![];
5885        let mut admin = vec![];
5886
5887        while let Some(keyword) = self.parse_one_of_keywords(&optional_keywords) {
5888            let loc = self
5889                .tokens
5890                .get(self.index - 1)
5891                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
5892            match keyword {
5893                Keyword::AUTHORIZATION => {
5894                    if authorization_owner.is_some() {
5895                        parser_err!("Found multiple AUTHORIZATION", loc)
5896                    } else {
5897                        authorization_owner = Some(self.parse_object_name(false)?);
5898                        Ok(())
5899                    }
5900                }
5901                Keyword::LOGIN | Keyword::NOLOGIN => {
5902                    if login.is_some() {
5903                        parser_err!("Found multiple LOGIN or NOLOGIN", loc)
5904                    } else {
5905                        login = Some(keyword == Keyword::LOGIN);
5906                        Ok(())
5907                    }
5908                }
5909                Keyword::INHERIT | Keyword::NOINHERIT => {
5910                    if inherit.is_some() {
5911                        parser_err!("Found multiple INHERIT or NOINHERIT", loc)
5912                    } else {
5913                        inherit = Some(keyword == Keyword::INHERIT);
5914                        Ok(())
5915                    }
5916                }
5917                Keyword::BYPASSRLS | Keyword::NOBYPASSRLS => {
5918                    if bypassrls.is_some() {
5919                        parser_err!("Found multiple BYPASSRLS or NOBYPASSRLS", loc)
5920                    } else {
5921                        bypassrls = Some(keyword == Keyword::BYPASSRLS);
5922                        Ok(())
5923                    }
5924                }
5925                Keyword::CREATEDB | Keyword::NOCREATEDB => {
5926                    if create_db.is_some() {
5927                        parser_err!("Found multiple CREATEDB or NOCREATEDB", loc)
5928                    } else {
5929                        create_db = Some(keyword == Keyword::CREATEDB);
5930                        Ok(())
5931                    }
5932                }
5933                Keyword::CREATEROLE | Keyword::NOCREATEROLE => {
5934                    if create_role.is_some() {
5935                        parser_err!("Found multiple CREATEROLE or NOCREATEROLE", loc)
5936                    } else {
5937                        create_role = Some(keyword == Keyword::CREATEROLE);
5938                        Ok(())
5939                    }
5940                }
5941                Keyword::SUPERUSER | Keyword::NOSUPERUSER => {
5942                    if superuser.is_some() {
5943                        parser_err!("Found multiple SUPERUSER or NOSUPERUSER", loc)
5944                    } else {
5945                        superuser = Some(keyword == Keyword::SUPERUSER);
5946                        Ok(())
5947                    }
5948                }
5949                Keyword::REPLICATION | Keyword::NOREPLICATION => {
5950                    if replication.is_some() {
5951                        parser_err!("Found multiple REPLICATION or NOREPLICATION", loc)
5952                    } else {
5953                        replication = Some(keyword == Keyword::REPLICATION);
5954                        Ok(())
5955                    }
5956                }
5957                Keyword::PASSWORD => {
5958                    if password.is_some() {
5959                        parser_err!("Found multiple PASSWORD", loc)
5960                    } else {
5961                        password = if self.parse_keyword(Keyword::NULL) {
5962                            Some(Password::NullPassword)
5963                        } else {
5964                            Some(Password::Password(Expr::Value(self.parse_value()?)))
5965                        };
5966                        Ok(())
5967                    }
5968                }
5969                Keyword::CONNECTION => {
5970                    self.expect_keyword_is(Keyword::LIMIT)?;
5971                    if connection_limit.is_some() {
5972                        parser_err!("Found multiple CONNECTION LIMIT", loc)
5973                    } else {
5974                        connection_limit = Some(Expr::Value(self.parse_number_value()?));
5975                        Ok(())
5976                    }
5977                }
5978                Keyword::VALID => {
5979                    self.expect_keyword_is(Keyword::UNTIL)?;
5980                    if valid_until.is_some() {
5981                        parser_err!("Found multiple VALID UNTIL", loc)
5982                    } else {
5983                        valid_until = Some(Expr::Value(self.parse_value()?));
5984                        Ok(())
5985                    }
5986                }
5987                Keyword::IN => {
5988                    if self.parse_keyword(Keyword::ROLE) {
5989                        if !in_role.is_empty() {
5990                            parser_err!("Found multiple IN ROLE", loc)
5991                        } else {
5992                            in_role = self.parse_comma_separated(|p| p.parse_identifier())?;
5993                            Ok(())
5994                        }
5995                    } else if self.parse_keyword(Keyword::GROUP) {
5996                        if !in_group.is_empty() {
5997                            parser_err!("Found multiple IN GROUP", loc)
5998                        } else {
5999                            in_group = self.parse_comma_separated(|p| p.parse_identifier())?;
6000                            Ok(())
6001                        }
6002                    } else {
6003                        self.expected("ROLE or GROUP after IN", self.peek_token())
6004                    }
6005                }
6006                Keyword::ROLE => {
6007                    if !role.is_empty() {
6008                        parser_err!("Found multiple ROLE", loc)
6009                    } else {
6010                        role = self.parse_comma_separated(|p| p.parse_identifier())?;
6011                        Ok(())
6012                    }
6013                }
6014                Keyword::USER => {
6015                    if !user.is_empty() {
6016                        parser_err!("Found multiple USER", loc)
6017                    } else {
6018                        user = self.parse_comma_separated(|p| p.parse_identifier())?;
6019                        Ok(())
6020                    }
6021                }
6022                Keyword::ADMIN => {
6023                    if !admin.is_empty() {
6024                        parser_err!("Found multiple ADMIN", loc)
6025                    } else {
6026                        admin = self.parse_comma_separated(|p| p.parse_identifier())?;
6027                        Ok(())
6028                    }
6029                }
6030                _ => break,
6031            }?
6032        }
6033
6034        Ok(Statement::CreateRole {
6035            names,
6036            if_not_exists,
6037            login,
6038            inherit,
6039            bypassrls,
6040            password,
6041            create_db,
6042            create_role,
6043            replication,
6044            superuser,
6045            connection_limit,
6046            valid_until,
6047            in_role,
6048            in_group,
6049            role,
6050            user,
6051            admin,
6052            authorization_owner,
6053        })
6054    }
6055
6056    pub fn parse_owner(&mut self) -> Result<Owner, ParserError> {
6057        let owner = match self.parse_one_of_keywords(&[Keyword::CURRENT_USER, Keyword::CURRENT_ROLE, Keyword::SESSION_USER]) {
6058            Some(Keyword::CURRENT_USER) => Owner::CurrentUser,
6059            Some(Keyword::CURRENT_ROLE) => Owner::CurrentRole,
6060            Some(Keyword::SESSION_USER) => Owner::SessionUser,
6061            Some(_) => unreachable!(),
6062            None => {
6063                match self.parse_identifier() {
6064                    Ok(ident) => Owner::Ident(ident),
6065                    Err(e) => {
6066                        return Err(ParserError::ParserError(format!("Expected: CURRENT_USER, CURRENT_ROLE, SESSION_USER or identifier after OWNER TO. {e}")))
6067                    }
6068                }
6069            }
6070        };
6071        Ok(owner)
6072    }
6073
6074    /// Parses a [Statement::CreateDomain] statement.
6075    fn parse_create_domain(&mut self) -> Result<Statement, ParserError> {
6076        let name = self.parse_object_name(false)?;
6077        self.expect_keyword_is(Keyword::AS)?;
6078        let data_type = self.parse_data_type()?;
6079        let collation = if self.parse_keyword(Keyword::COLLATE) {
6080            Some(self.parse_identifier()?)
6081        } else {
6082            None
6083        };
6084        let default = if self.parse_keyword(Keyword::DEFAULT) {
6085            Some(self.parse_expr()?)
6086        } else {
6087            None
6088        };
6089        let mut constraints = Vec::new();
6090        while let Some(constraint) = self.parse_optional_table_constraint()? {
6091            constraints.push(constraint);
6092        }
6093
6094        Ok(Statement::CreateDomain(CreateDomain {
6095            name,
6096            data_type,
6097            collation,
6098            default,
6099            constraints,
6100        }))
6101    }
6102
6103    /// ```sql
6104    ///     CREATE POLICY name ON table_name [ AS { PERMISSIVE | RESTRICTIVE } ]
6105    ///     [ FOR { ALL | SELECT | INSERT | UPDATE | DELETE } ]
6106    ///     [ TO { role_name | PUBLIC | CURRENT_USER | CURRENT_ROLE | SESSION_USER } [, ...] ]
6107    ///     [ USING ( using_expression ) ]
6108    ///     [ WITH CHECK ( with_check_expression ) ]
6109    /// ```
6110    ///
6111    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-createpolicy.html)
6112    pub fn parse_create_policy(&mut self) -> Result<Statement, ParserError> {
6113        let name = self.parse_identifier()?;
6114        self.expect_keyword_is(Keyword::ON)?;
6115        let table_name = self.parse_object_name(false)?;
6116
6117        let policy_type = if self.parse_keyword(Keyword::AS) {
6118            let keyword =
6119                self.expect_one_of_keywords(&[Keyword::PERMISSIVE, Keyword::RESTRICTIVE])?;
6120            Some(match keyword {
6121                Keyword::PERMISSIVE => CreatePolicyType::Permissive,
6122                Keyword::RESTRICTIVE => CreatePolicyType::Restrictive,
6123                _ => unreachable!(),
6124            })
6125        } else {
6126            None
6127        };
6128
6129        let command = if self.parse_keyword(Keyword::FOR) {
6130            let keyword = self.expect_one_of_keywords(&[
6131                Keyword::ALL,
6132                Keyword::SELECT,
6133                Keyword::INSERT,
6134                Keyword::UPDATE,
6135                Keyword::DELETE,
6136            ])?;
6137            Some(match keyword {
6138                Keyword::ALL => CreatePolicyCommand::All,
6139                Keyword::SELECT => CreatePolicyCommand::Select,
6140                Keyword::INSERT => CreatePolicyCommand::Insert,
6141                Keyword::UPDATE => CreatePolicyCommand::Update,
6142                Keyword::DELETE => CreatePolicyCommand::Delete,
6143                _ => unreachable!(),
6144            })
6145        } else {
6146            None
6147        };
6148
6149        let to = if self.parse_keyword(Keyword::TO) {
6150            Some(self.parse_comma_separated(|p| p.parse_owner())?)
6151        } else {
6152            None
6153        };
6154
6155        let using = if self.parse_keyword(Keyword::USING) {
6156            self.expect_token(&Token::LParen)?;
6157            let expr = self.parse_expr()?;
6158            self.expect_token(&Token::RParen)?;
6159            Some(expr)
6160        } else {
6161            None
6162        };
6163
6164        let with_check = if self.parse_keywords(&[Keyword::WITH, Keyword::CHECK]) {
6165            self.expect_token(&Token::LParen)?;
6166            let expr = self.parse_expr()?;
6167            self.expect_token(&Token::RParen)?;
6168            Some(expr)
6169        } else {
6170            None
6171        };
6172
6173        Ok(CreatePolicy {
6174            name,
6175            table_name,
6176            policy_type,
6177            command,
6178            to,
6179            using,
6180            with_check,
6181        })
6182    }
6183
6184    /// ```sql
6185    /// CREATE CONNECTOR [IF NOT EXISTS] connector_name
6186    /// [TYPE datasource_type]
6187    /// [URL datasource_url]
6188    /// [COMMENT connector_comment]
6189    /// [WITH DCPROPERTIES(property_name=property_value, ...)]
6190    /// ```
6191    ///
6192    /// [Hive Documentation](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-CreateDataConnectorCreateConnector)
6193    pub fn parse_create_connector(&mut self) -> Result<Statement, ParserError> {
6194        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6195        let name = self.parse_identifier()?;
6196
6197        let connector_type = if self.parse_keyword(Keyword::TYPE) {
6198            Some(self.parse_literal_string()?)
6199        } else {
6200            None
6201        };
6202
6203        let url = if self.parse_keyword(Keyword::URL) {
6204            Some(self.parse_literal_string()?)
6205        } else {
6206            None
6207        };
6208
6209        let comment = self.parse_optional_inline_comment()?;
6210
6211        let with_dcproperties =
6212            match self.parse_options_with_keywords(&[Keyword::WITH, Keyword::DCPROPERTIES])? {
6213                properties if !properties.is_empty() => Some(properties),
6214                _ => None,
6215            };
6216
6217        Ok(Statement::CreateConnector(CreateConnector {
6218            name,
6219            if_not_exists,
6220            connector_type,
6221            url,
6222            comment,
6223            with_dcproperties,
6224        }))
6225    }
6226
6227    pub fn parse_drop(&mut self) -> Result<Statement, ParserError> {
6228        // MySQL dialect supports `TEMPORARY`
6229        let temporary = dialect_of!(self is MySqlDialect | GenericDialect | DuckDbDialect)
6230            && self.parse_keyword(Keyword::TEMPORARY);
6231        let persistent = dialect_of!(self is DuckDbDialect)
6232            && self.parse_one_of_keywords(&[Keyword::PERSISTENT]).is_some();
6233
6234        let object_type = if self.parse_keyword(Keyword::TABLE) {
6235            ObjectType::Table
6236        } else if self.parse_keyword(Keyword::VIEW) {
6237            ObjectType::View
6238        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEW]) {
6239            ObjectType::MaterializedView
6240        } else if self.parse_keyword(Keyword::INDEX) {
6241            ObjectType::Index
6242        } else if self.parse_keyword(Keyword::ROLE) {
6243            ObjectType::Role
6244        } else if self.parse_keyword(Keyword::SCHEMA) {
6245            ObjectType::Schema
6246        } else if self.parse_keyword(Keyword::DATABASE) {
6247            ObjectType::Database
6248        } else if self.parse_keyword(Keyword::SEQUENCE) {
6249            ObjectType::Sequence
6250        } else if self.parse_keyword(Keyword::STAGE) {
6251            ObjectType::Stage
6252        } else if self.parse_keyword(Keyword::TYPE) {
6253            ObjectType::Type
6254        } else if self.parse_keyword(Keyword::USER) {
6255            ObjectType::User
6256        } else if self.parse_keyword(Keyword::FUNCTION) {
6257            return self.parse_drop_function();
6258        } else if self.parse_keyword(Keyword::POLICY) {
6259            return self.parse_drop_policy();
6260        } else if self.parse_keyword(Keyword::CONNECTOR) {
6261            return self.parse_drop_connector();
6262        } else if self.parse_keyword(Keyword::DOMAIN) {
6263            return self.parse_drop_domain();
6264        } else if self.parse_keyword(Keyword::PROCEDURE) {
6265            return self.parse_drop_procedure();
6266        } else if self.parse_keyword(Keyword::SECRET) {
6267            return self.parse_drop_secret(temporary, persistent);
6268        } else if self.parse_keyword(Keyword::TRIGGER) {
6269            return self.parse_drop_trigger();
6270        } else if self.parse_keyword(Keyword::EXTENSION) {
6271            return self.parse_drop_extension();
6272        } else {
6273            return self.expected(
6274                "CONNECTOR, DATABASE, EXTENSION, FUNCTION, INDEX, POLICY, PROCEDURE, ROLE, SCHEMA, SECRET, SEQUENCE, STAGE, TABLE, TRIGGER, TYPE, VIEW, MATERIALIZED VIEW or USER after DROP",
6275                self.peek_token(),
6276            );
6277        };
6278        // Many dialects support the non-standard `IF EXISTS` clause and allow
6279        // specifying multiple objects to delete in a single statement
6280        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6281        let names = self.parse_comma_separated(|p| p.parse_object_name(false))?;
6282
6283        let loc = self.peek_token().span.start;
6284        let cascade = self.parse_keyword(Keyword::CASCADE);
6285        let restrict = self.parse_keyword(Keyword::RESTRICT);
6286        let purge = self.parse_keyword(Keyword::PURGE);
6287        if cascade && restrict {
6288            return parser_err!("Cannot specify both CASCADE and RESTRICT in DROP", loc);
6289        }
6290        if object_type == ObjectType::Role && (cascade || restrict || purge) {
6291            return parser_err!(
6292                "Cannot specify CASCADE, RESTRICT, or PURGE in DROP ROLE",
6293                loc
6294            );
6295        }
6296        let table = if self.parse_keyword(Keyword::ON) {
6297            Some(self.parse_object_name(false)?)
6298        } else {
6299            None
6300        };
6301        Ok(Statement::Drop {
6302            object_type,
6303            if_exists,
6304            names,
6305            cascade,
6306            restrict,
6307            purge,
6308            temporary,
6309            table,
6310        })
6311    }
6312
6313    fn parse_optional_drop_behavior(&mut self) -> Option<DropBehavior> {
6314        match self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]) {
6315            Some(Keyword::CASCADE) => Some(DropBehavior::Cascade),
6316            Some(Keyword::RESTRICT) => Some(DropBehavior::Restrict),
6317            _ => None,
6318        }
6319    }
6320
6321    /// ```sql
6322    /// DROP FUNCTION [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6323    /// [ CASCADE | RESTRICT ]
6324    /// ```
6325    fn parse_drop_function(&mut self) -> Result<Statement, ParserError> {
6326        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6327        let func_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6328        let drop_behavior = self.parse_optional_drop_behavior();
6329        Ok(Statement::DropFunction {
6330            if_exists,
6331            func_desc,
6332            drop_behavior,
6333        })
6334    }
6335
6336    /// ```sql
6337    /// DROP POLICY [ IF EXISTS ] name ON table_name [ CASCADE | RESTRICT ]
6338    /// ```
6339    ///
6340    /// [PostgreSQL Documentation](https://www.postgresql.org/docs/current/sql-droppolicy.html)
6341    fn parse_drop_policy(&mut self) -> Result<Statement, ParserError> {
6342        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6343        let name = self.parse_identifier()?;
6344        self.expect_keyword_is(Keyword::ON)?;
6345        let table_name = self.parse_object_name(false)?;
6346        let drop_behavior = self.parse_optional_drop_behavior();
6347        Ok(Statement::DropPolicy {
6348            if_exists,
6349            name,
6350            table_name,
6351            drop_behavior,
6352        })
6353    }
6354    /// ```sql
6355    /// DROP CONNECTOR [IF EXISTS] name
6356    /// ```
6357    ///
6358    /// See [Hive](https://cwiki.apache.org/confluence/pages/viewpage.action?pageId=27362034#LanguageManualDDL-DropConnector)
6359    fn parse_drop_connector(&mut self) -> Result<Statement, ParserError> {
6360        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6361        let name = self.parse_identifier()?;
6362        Ok(Statement::DropConnector { if_exists, name })
6363    }
6364
6365    /// ```sql
6366    /// DROP DOMAIN [ IF EXISTS ] name [ CASCADE | RESTRICT ]
6367    /// ```
6368    fn parse_drop_domain(&mut self) -> Result<Statement, ParserError> {
6369        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6370        let name = self.parse_object_name(false)?;
6371        let drop_behavior = self.parse_optional_drop_behavior();
6372        Ok(Statement::DropDomain(DropDomain {
6373            if_exists,
6374            name,
6375            drop_behavior,
6376        }))
6377    }
6378
6379    /// ```sql
6380    /// DROP PROCEDURE [ IF EXISTS ] name [ ( [ [ argmode ] [ argname ] argtype [, ...] ] ) ] [, ...]
6381    /// [ CASCADE | RESTRICT ]
6382    /// ```
6383    fn parse_drop_procedure(&mut self) -> Result<Statement, ParserError> {
6384        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6385        let proc_desc = self.parse_comma_separated(Parser::parse_function_desc)?;
6386        let drop_behavior = self.parse_optional_drop_behavior();
6387        Ok(Statement::DropProcedure {
6388            if_exists,
6389            proc_desc,
6390            drop_behavior,
6391        })
6392    }
6393
6394    fn parse_function_desc(&mut self) -> Result<FunctionDesc, ParserError> {
6395        let name = self.parse_object_name(false)?;
6396
6397        let args = if self.consume_token(&Token::LParen) {
6398            if self.consume_token(&Token::RParen) {
6399                None
6400            } else {
6401                let args = self.parse_comma_separated(Parser::parse_function_arg)?;
6402                self.expect_token(&Token::RParen)?;
6403                Some(args)
6404            }
6405        } else {
6406            None
6407        };
6408
6409        Ok(FunctionDesc { name, args })
6410    }
6411
6412    /// See [DuckDB Docs](https://duckdb.org/docs/sql/statements/create_secret.html) for more details.
6413    fn parse_drop_secret(
6414        &mut self,
6415        temporary: bool,
6416        persistent: bool,
6417    ) -> Result<Statement, ParserError> {
6418        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6419        let name = self.parse_identifier()?;
6420        let storage_specifier = if self.parse_keyword(Keyword::FROM) {
6421            self.parse_identifier().ok()
6422        } else {
6423            None
6424        };
6425        let temp = match (temporary, persistent) {
6426            (true, false) => Some(true),
6427            (false, true) => Some(false),
6428            (false, false) => None,
6429            _ => self.expected("TEMPORARY or PERSISTENT", self.peek_token())?,
6430        };
6431
6432        Ok(Statement::DropSecret {
6433            if_exists,
6434            temporary: temp,
6435            name,
6436            storage_specifier,
6437        })
6438    }
6439
6440    /// Parse a `DECLARE` statement.
6441    ///
6442    /// ```sql
6443    /// DECLARE name [ BINARY ] [ ASENSITIVE | INSENSITIVE ] [ [ NO ] SCROLL ]
6444    ///     CURSOR [ { WITH | WITHOUT } HOLD ] FOR query
6445    /// ```
6446    ///
6447    /// The syntax can vary significantly between warehouses. See the grammar
6448    /// on the warehouse specific function in such cases.
6449    pub fn parse_declare(&mut self) -> Result<Statement, ParserError> {
6450        if dialect_of!(self is BigQueryDialect) {
6451            return self.parse_big_query_declare();
6452        }
6453        if dialect_of!(self is SnowflakeDialect) {
6454            return self.parse_snowflake_declare();
6455        }
6456        if dialect_of!(self is MsSqlDialect) {
6457            return self.parse_mssql_declare();
6458        }
6459
6460        let name = self.parse_identifier()?;
6461
6462        let binary = Some(self.parse_keyword(Keyword::BINARY));
6463        let sensitive = if self.parse_keyword(Keyword::INSENSITIVE) {
6464            Some(true)
6465        } else if self.parse_keyword(Keyword::ASENSITIVE) {
6466            Some(false)
6467        } else {
6468            None
6469        };
6470        let scroll = if self.parse_keyword(Keyword::SCROLL) {
6471            Some(true)
6472        } else if self.parse_keywords(&[Keyword::NO, Keyword::SCROLL]) {
6473            Some(false)
6474        } else {
6475            None
6476        };
6477
6478        self.expect_keyword_is(Keyword::CURSOR)?;
6479        let declare_type = Some(DeclareType::Cursor);
6480
6481        let hold = match self.parse_one_of_keywords(&[Keyword::WITH, Keyword::WITHOUT]) {
6482            Some(keyword) => {
6483                self.expect_keyword_is(Keyword::HOLD)?;
6484
6485                match keyword {
6486                    Keyword::WITH => Some(true),
6487                    Keyword::WITHOUT => Some(false),
6488                    _ => unreachable!(),
6489                }
6490            }
6491            None => None,
6492        };
6493
6494        self.expect_keyword_is(Keyword::FOR)?;
6495
6496        let query = Some(self.parse_query()?);
6497
6498        Ok(Statement::Declare {
6499            stmts: vec![Declare {
6500                names: vec![name],
6501                data_type: None,
6502                assignment: None,
6503                declare_type,
6504                binary,
6505                sensitive,
6506                scroll,
6507                hold,
6508                for_query: query,
6509            }],
6510        })
6511    }
6512
6513    /// Parse a [BigQuery] `DECLARE` statement.
6514    ///
6515    /// Syntax:
6516    /// ```text
6517    /// DECLARE variable_name[, ...] [{ <variable_type> | <DEFAULT expression> }];
6518    /// ```
6519    /// [BigQuery]: https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#declare
6520    pub fn parse_big_query_declare(&mut self) -> Result<Statement, ParserError> {
6521        let names = self.parse_comma_separated(Parser::parse_identifier)?;
6522
6523        let data_type = match self.peek_token().token {
6524            Token::Word(w) if w.keyword == Keyword::DEFAULT => None,
6525            _ => Some(self.parse_data_type()?),
6526        };
6527
6528        let expr = if data_type.is_some() {
6529            if self.parse_keyword(Keyword::DEFAULT) {
6530                Some(self.parse_expr()?)
6531            } else {
6532                None
6533            }
6534        } else {
6535            // If no variable type - default expression must be specified, per BQ docs.
6536            // i.e `DECLARE foo;` is invalid.
6537            self.expect_keyword_is(Keyword::DEFAULT)?;
6538            Some(self.parse_expr()?)
6539        };
6540
6541        Ok(Statement::Declare {
6542            stmts: vec![Declare {
6543                names,
6544                data_type,
6545                assignment: expr.map(|expr| DeclareAssignment::Default(Box::new(expr))),
6546                declare_type: None,
6547                binary: None,
6548                sensitive: None,
6549                scroll: None,
6550                hold: None,
6551                for_query: None,
6552            }],
6553        })
6554    }
6555
6556    /// Parse a [Snowflake] `DECLARE` statement.
6557    ///
6558    /// Syntax:
6559    /// ```text
6560    /// DECLARE
6561    ///   [{ <variable_declaration>
6562    ///      | <cursor_declaration>
6563    ///      | <resultset_declaration>
6564    ///      | <exception_declaration> }; ... ]
6565    ///
6566    /// <variable_declaration>
6567    /// <variable_name> [<type>] [ { DEFAULT | := } <expression>]
6568    ///
6569    /// <cursor_declaration>
6570    /// <cursor_name> CURSOR FOR <query>
6571    ///
6572    /// <resultset_declaration>
6573    /// <resultset_name> RESULTSET [ { DEFAULT | := } ( <query> ) ] ;
6574    ///
6575    /// <exception_declaration>
6576    /// <exception_name> EXCEPTION [ ( <exception_number> , '<exception_message>' ) ] ;
6577    /// ```
6578    ///
6579    /// [Snowflake]: https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare
6580    pub fn parse_snowflake_declare(&mut self) -> Result<Statement, ParserError> {
6581        let mut stmts = vec![];
6582        loop {
6583            let name = self.parse_identifier()?;
6584            let (declare_type, for_query, assigned_expr, data_type) =
6585                if self.parse_keyword(Keyword::CURSOR) {
6586                    self.expect_keyword_is(Keyword::FOR)?;
6587                    match self.peek_token().token {
6588                        Token::Word(w) if w.keyword == Keyword::SELECT => (
6589                            Some(DeclareType::Cursor),
6590                            Some(self.parse_query()?),
6591                            None,
6592                            None,
6593                        ),
6594                        _ => (
6595                            Some(DeclareType::Cursor),
6596                            None,
6597                            Some(DeclareAssignment::For(Box::new(self.parse_expr()?))),
6598                            None,
6599                        ),
6600                    }
6601                } else if self.parse_keyword(Keyword::RESULTSET) {
6602                    let assigned_expr = if self.peek_token().token != Token::SemiColon {
6603                        self.parse_snowflake_variable_declaration_expression()?
6604                    } else {
6605                        // Nothing more to do. The statement has no further parameters.
6606                        None
6607                    };
6608
6609                    (Some(DeclareType::ResultSet), None, assigned_expr, None)
6610                } else if self.parse_keyword(Keyword::EXCEPTION) {
6611                    let assigned_expr = if self.peek_token().token == Token::LParen {
6612                        Some(DeclareAssignment::Expr(Box::new(self.parse_expr()?)))
6613                    } else {
6614                        // Nothing more to do. The statement has no further parameters.
6615                        None
6616                    };
6617
6618                    (Some(DeclareType::Exception), None, assigned_expr, None)
6619                } else {
6620                    // Without an explicit keyword, the only valid option is variable declaration.
6621                    let (assigned_expr, data_type) = if let Some(assigned_expr) =
6622                        self.parse_snowflake_variable_declaration_expression()?
6623                    {
6624                        (Some(assigned_expr), None)
6625                    } else if let Token::Word(_) = self.peek_token().token {
6626                        let data_type = self.parse_data_type()?;
6627                        (
6628                            self.parse_snowflake_variable_declaration_expression()?,
6629                            Some(data_type),
6630                        )
6631                    } else {
6632                        (None, None)
6633                    };
6634                    (None, None, assigned_expr, data_type)
6635                };
6636            let stmt = Declare {
6637                names: vec![name],
6638                data_type,
6639                assignment: assigned_expr,
6640                declare_type,
6641                binary: None,
6642                sensitive: None,
6643                scroll: None,
6644                hold: None,
6645                for_query,
6646            };
6647
6648            stmts.push(stmt);
6649            if self.consume_token(&Token::SemiColon) {
6650                match self.peek_token().token {
6651                    Token::Word(w)
6652                        if ALL_KEYWORDS
6653                            .binary_search(&w.value.to_uppercase().as_str())
6654                            .is_err() =>
6655                    {
6656                        // Not a keyword - start of a new declaration.
6657                        continue;
6658                    }
6659                    _ => {
6660                        // Put back the semicolon, this is the end of the DECLARE statement.
6661                        self.prev_token();
6662                    }
6663                }
6664            }
6665
6666            break;
6667        }
6668
6669        Ok(Statement::Declare { stmts })
6670    }
6671
6672    /// Parse a [MsSql] `DECLARE` statement.
6673    ///
6674    /// Syntax:
6675    /// ```text
6676    /// DECLARE
6677    // {
6678    //   { @local_variable [AS] data_type [ = value ] }
6679    //   | { @cursor_variable_name CURSOR [ FOR ] }
6680    // } [ ,...n ]
6681    /// ```
6682    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
6683    pub fn parse_mssql_declare(&mut self) -> Result<Statement, ParserError> {
6684        let stmts = self.parse_comma_separated(Parser::parse_mssql_declare_stmt)?;
6685
6686        Ok(Statement::Declare { stmts })
6687    }
6688
6689    /// Parse the body of a [MsSql] `DECLARE`statement.
6690    ///
6691    /// Syntax:
6692    /// ```text
6693    // {
6694    //   { @local_variable [AS] data_type [ = value ] }
6695    //   | { @cursor_variable_name CURSOR [ FOR ]}
6696    // } [ ,...n ]
6697    /// ```
6698    /// [MsSql]: https://learn.microsoft.com/en-us/sql/t-sql/language-elements/declare-local-variable-transact-sql?view=sql-server-ver16
6699    pub fn parse_mssql_declare_stmt(&mut self) -> Result<Declare, ParserError> {
6700        let name = {
6701            let ident = self.parse_identifier()?;
6702            if !ident.value.starts_with('@')
6703                && !matches!(
6704                    self.peek_token().token,
6705                    Token::Word(w) if w.keyword == Keyword::CURSOR
6706                )
6707            {
6708                Err(ParserError::TokenizerError(
6709                    "Invalid MsSql variable declaration.".to_string(),
6710                ))
6711            } else {
6712                Ok(ident)
6713            }
6714        }?;
6715
6716        let (declare_type, data_type) = match self.peek_token().token {
6717            Token::Word(w) => match w.keyword {
6718                Keyword::CURSOR => {
6719                    self.next_token();
6720                    (Some(DeclareType::Cursor), None)
6721                }
6722                Keyword::AS => {
6723                    self.next_token();
6724                    (None, Some(self.parse_data_type()?))
6725                }
6726                _ => (None, Some(self.parse_data_type()?)),
6727            },
6728            _ => (None, Some(self.parse_data_type()?)),
6729        };
6730
6731        let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
6732            self.next_token();
6733            let query = Some(self.parse_query()?);
6734            (query, None)
6735        } else {
6736            let assignment = self.parse_mssql_variable_declaration_expression()?;
6737            (None, assignment)
6738        };
6739
6740        Ok(Declare {
6741            names: vec![name],
6742            data_type,
6743            assignment,
6744            declare_type,
6745            binary: None,
6746            sensitive: None,
6747            scroll: None,
6748            hold: None,
6749            for_query,
6750        })
6751    }
6752
6753    /// Parses the assigned expression in a variable declaration.
6754    ///
6755    /// Syntax:
6756    /// ```text
6757    /// [ { DEFAULT | := } <expression>]
6758    /// ```
6759    /// <https://docs.snowflake.com/en/sql-reference/snowflake-scripting/declare#variable-declaration-syntax>
6760    pub fn parse_snowflake_variable_declaration_expression(
6761        &mut self,
6762    ) -> Result<Option<DeclareAssignment>, ParserError> {
6763        Ok(match self.peek_token().token {
6764            Token::Word(w) if w.keyword == Keyword::DEFAULT => {
6765                self.next_token(); // Skip `DEFAULT`
6766                Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
6767            }
6768            Token::Assignment => {
6769                self.next_token(); // Skip `:=`
6770                Some(DeclareAssignment::DuckAssignment(Box::new(
6771                    self.parse_expr()?,
6772                )))
6773            }
6774            _ => None,
6775        })
6776    }
6777
6778    /// Parses the assigned expression in a variable declaration.
6779    ///
6780    /// Syntax:
6781    /// ```text
6782    /// [ = <expression>]
6783    /// ```
6784    pub fn parse_mssql_variable_declaration_expression(
6785        &mut self,
6786    ) -> Result<Option<DeclareAssignment>, ParserError> {
6787        Ok(match self.peek_token().token {
6788            Token::Eq => {
6789                self.next_token(); // Skip `=`
6790                Some(DeclareAssignment::MsSqlAssignment(Box::new(
6791                    self.parse_expr()?,
6792                )))
6793            }
6794            _ => None,
6795        })
6796    }
6797
6798    // FETCH [ direction { FROM | IN } ] cursor INTO target;
6799    pub fn parse_fetch_statement(&mut self) -> Result<Statement, ParserError> {
6800        let direction = if self.parse_keyword(Keyword::NEXT) {
6801            FetchDirection::Next
6802        } else if self.parse_keyword(Keyword::PRIOR) {
6803            FetchDirection::Prior
6804        } else if self.parse_keyword(Keyword::FIRST) {
6805            FetchDirection::First
6806        } else if self.parse_keyword(Keyword::LAST) {
6807            FetchDirection::Last
6808        } else if self.parse_keyword(Keyword::ABSOLUTE) {
6809            FetchDirection::Absolute {
6810                limit: self.parse_number_value()?.value,
6811            }
6812        } else if self.parse_keyword(Keyword::RELATIVE) {
6813            FetchDirection::Relative {
6814                limit: self.parse_number_value()?.value,
6815            }
6816        } else if self.parse_keyword(Keyword::FORWARD) {
6817            if self.parse_keyword(Keyword::ALL) {
6818                FetchDirection::ForwardAll
6819            } else {
6820                FetchDirection::Forward {
6821                    // TODO: Support optional
6822                    limit: Some(self.parse_number_value()?.value),
6823                }
6824            }
6825        } else if self.parse_keyword(Keyword::BACKWARD) {
6826            if self.parse_keyword(Keyword::ALL) {
6827                FetchDirection::BackwardAll
6828            } else {
6829                FetchDirection::Backward {
6830                    // TODO: Support optional
6831                    limit: Some(self.parse_number_value()?.value),
6832                }
6833            }
6834        } else if self.parse_keyword(Keyword::ALL) {
6835            FetchDirection::All
6836        } else {
6837            FetchDirection::Count {
6838                limit: self.parse_number_value()?.value,
6839            }
6840        };
6841
6842        let position = if self.peek_keyword(Keyword::FROM) {
6843            self.expect_keyword(Keyword::FROM)?;
6844            FetchPosition::From
6845        } else if self.peek_keyword(Keyword::IN) {
6846            self.expect_keyword(Keyword::IN)?;
6847            FetchPosition::In
6848        } else {
6849            return parser_err!("Expected FROM or IN", self.peek_token().span.start);
6850        };
6851
6852        let name = self.parse_identifier()?;
6853
6854        let into = if self.parse_keyword(Keyword::INTO) {
6855            Some(self.parse_object_name(false)?)
6856        } else {
6857            None
6858        };
6859
6860        Ok(Statement::Fetch {
6861            name,
6862            direction,
6863            position,
6864            into,
6865        })
6866    }
6867
6868    pub fn parse_discard(&mut self) -> Result<Statement, ParserError> {
6869        let object_type = if self.parse_keyword(Keyword::ALL) {
6870            DiscardObject::ALL
6871        } else if self.parse_keyword(Keyword::PLANS) {
6872            DiscardObject::PLANS
6873        } else if self.parse_keyword(Keyword::SEQUENCES) {
6874            DiscardObject::SEQUENCES
6875        } else if self.parse_keyword(Keyword::TEMP) || self.parse_keyword(Keyword::TEMPORARY) {
6876            DiscardObject::TEMP
6877        } else {
6878            return self.expected(
6879                "ALL, PLANS, SEQUENCES, TEMP or TEMPORARY after DISCARD",
6880                self.peek_token(),
6881            );
6882        };
6883        Ok(Statement::Discard { object_type })
6884    }
6885
6886    pub fn parse_create_index(&mut self, unique: bool) -> Result<Statement, ParserError> {
6887        let concurrently = self.parse_keyword(Keyword::CONCURRENTLY);
6888        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6889        let index_name = if if_not_exists || !self.parse_keyword(Keyword::ON) {
6890            let index_name = self.parse_object_name(false)?;
6891            self.expect_keyword_is(Keyword::ON)?;
6892            Some(index_name)
6893        } else {
6894            None
6895        };
6896        let table_name = self.parse_object_name(false)?;
6897        let using = if self.parse_keyword(Keyword::USING) {
6898            Some(self.parse_index_type()?)
6899        } else {
6900            None
6901        };
6902
6903        let columns = self.parse_parenthesized_index_column_list()?;
6904
6905        let include = if self.parse_keyword(Keyword::INCLUDE) {
6906            self.expect_token(&Token::LParen)?;
6907            let columns = self.parse_comma_separated(|p| p.parse_identifier())?;
6908            self.expect_token(&Token::RParen)?;
6909            columns
6910        } else {
6911            vec![]
6912        };
6913
6914        let nulls_distinct = if self.parse_keyword(Keyword::NULLS) {
6915            let not = self.parse_keyword(Keyword::NOT);
6916            self.expect_keyword_is(Keyword::DISTINCT)?;
6917            Some(!not)
6918        } else {
6919            None
6920        };
6921
6922        let with = if self.dialect.supports_create_index_with_clause()
6923            && self.parse_keyword(Keyword::WITH)
6924        {
6925            self.expect_token(&Token::LParen)?;
6926            let with_params = self.parse_comma_separated(Parser::parse_expr)?;
6927            self.expect_token(&Token::RParen)?;
6928            with_params
6929        } else {
6930            Vec::new()
6931        };
6932
6933        let predicate = if self.parse_keyword(Keyword::WHERE) {
6934            Some(self.parse_expr()?)
6935        } else {
6936            None
6937        };
6938
6939        Ok(Statement::CreateIndex(CreateIndex {
6940            name: index_name,
6941            table_name,
6942            using,
6943            columns,
6944            unique,
6945            concurrently,
6946            if_not_exists,
6947            include,
6948            nulls_distinct,
6949            with,
6950            predicate,
6951        }))
6952    }
6953
6954    pub fn parse_create_extension(&mut self) -> Result<Statement, ParserError> {
6955        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
6956        let name = self.parse_identifier()?;
6957
6958        let (schema, version, cascade) = if self.parse_keyword(Keyword::WITH) {
6959            let schema = if self.parse_keyword(Keyword::SCHEMA) {
6960                Some(self.parse_identifier()?)
6961            } else {
6962                None
6963            };
6964
6965            let version = if self.parse_keyword(Keyword::VERSION) {
6966                Some(self.parse_identifier()?)
6967            } else {
6968                None
6969            };
6970
6971            let cascade = self.parse_keyword(Keyword::CASCADE);
6972
6973            (schema, version, cascade)
6974        } else {
6975            (None, None, false)
6976        };
6977
6978        Ok(Statement::CreateExtension {
6979            name,
6980            if_not_exists,
6981            schema,
6982            version,
6983            cascade,
6984        })
6985    }
6986
6987    /// Parse a PostgreSQL-specific [Statement::DropExtension] statement.
6988    pub fn parse_drop_extension(&mut self) -> Result<Statement, ParserError> {
6989        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
6990        let names = self.parse_comma_separated(|p| p.parse_identifier())?;
6991        let cascade_or_restrict =
6992            self.parse_one_of_keywords(&[Keyword::CASCADE, Keyword::RESTRICT]);
6993        Ok(Statement::DropExtension {
6994            names,
6995            if_exists,
6996            cascade_or_restrict: cascade_or_restrict
6997                .map(|k| match k {
6998                    Keyword::CASCADE => Ok(ReferentialAction::Cascade),
6999                    Keyword::RESTRICT => Ok(ReferentialAction::Restrict),
7000                    _ => self.expected("CASCADE or RESTRICT", self.peek_token()),
7001                })
7002                .transpose()?,
7003        })
7004    }
7005
7006    //TODO: Implement parsing for Skewed
7007    pub fn parse_hive_distribution(&mut self) -> Result<HiveDistributionStyle, ParserError> {
7008        if self.parse_keywords(&[Keyword::PARTITIONED, Keyword::BY]) {
7009            self.expect_token(&Token::LParen)?;
7010            let columns = self.parse_comma_separated(Parser::parse_column_def)?;
7011            self.expect_token(&Token::RParen)?;
7012            Ok(HiveDistributionStyle::PARTITIONED { columns })
7013        } else {
7014            Ok(HiveDistributionStyle::NONE)
7015        }
7016    }
7017
7018    pub fn parse_hive_formats(&mut self) -> Result<HiveFormat, ParserError> {
7019        let mut hive_format = HiveFormat::default();
7020        loop {
7021            match self.parse_one_of_keywords(&[
7022                Keyword::ROW,
7023                Keyword::STORED,
7024                Keyword::LOCATION,
7025                Keyword::WITH,
7026            ]) {
7027                Some(Keyword::ROW) => {
7028                    hive_format.row_format = Some(self.parse_row_format()?);
7029                }
7030                Some(Keyword::STORED) => {
7031                    self.expect_keyword_is(Keyword::AS)?;
7032                    if self.parse_keyword(Keyword::INPUTFORMAT) {
7033                        let input_format = self.parse_expr()?;
7034                        self.expect_keyword_is(Keyword::OUTPUTFORMAT)?;
7035                        let output_format = self.parse_expr()?;
7036                        hive_format.storage = Some(HiveIOFormat::IOF {
7037                            input_format,
7038                            output_format,
7039                        });
7040                    } else {
7041                        let format = self.parse_file_format()?;
7042                        hive_format.storage = Some(HiveIOFormat::FileFormat { format });
7043                    }
7044                }
7045                Some(Keyword::LOCATION) => {
7046                    hive_format.location = Some(self.parse_literal_string()?);
7047                }
7048                Some(Keyword::WITH) => {
7049                    self.prev_token();
7050                    let properties = self
7051                        .parse_options_with_keywords(&[Keyword::WITH, Keyword::SERDEPROPERTIES])?;
7052                    if !properties.is_empty() {
7053                        hive_format.serde_properties = Some(properties);
7054                    } else {
7055                        break;
7056                    }
7057                }
7058                None => break,
7059                _ => break,
7060            }
7061        }
7062
7063        Ok(hive_format)
7064    }
7065
7066    pub fn parse_row_format(&mut self) -> Result<HiveRowFormat, ParserError> {
7067        self.expect_keyword_is(Keyword::FORMAT)?;
7068        match self.parse_one_of_keywords(&[Keyword::SERDE, Keyword::DELIMITED]) {
7069            Some(Keyword::SERDE) => {
7070                let class = self.parse_literal_string()?;
7071                Ok(HiveRowFormat::SERDE { class })
7072            }
7073            _ => {
7074                let mut row_delimiters = vec![];
7075
7076                loop {
7077                    match self.parse_one_of_keywords(&[
7078                        Keyword::FIELDS,
7079                        Keyword::COLLECTION,
7080                        Keyword::MAP,
7081                        Keyword::LINES,
7082                        Keyword::NULL,
7083                    ]) {
7084                        Some(Keyword::FIELDS) => {
7085                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7086                                row_delimiters.push(HiveRowDelimiter {
7087                                    delimiter: HiveDelimiter::FieldsTerminatedBy,
7088                                    char: self.parse_identifier()?,
7089                                });
7090
7091                                if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
7092                                    row_delimiters.push(HiveRowDelimiter {
7093                                        delimiter: HiveDelimiter::FieldsEscapedBy,
7094                                        char: self.parse_identifier()?,
7095                                    });
7096                                }
7097                            } else {
7098                                break;
7099                            }
7100                        }
7101                        Some(Keyword::COLLECTION) => {
7102                            if self.parse_keywords(&[
7103                                Keyword::ITEMS,
7104                                Keyword::TERMINATED,
7105                                Keyword::BY,
7106                            ]) {
7107                                row_delimiters.push(HiveRowDelimiter {
7108                                    delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
7109                                    char: self.parse_identifier()?,
7110                                });
7111                            } else {
7112                                break;
7113                            }
7114                        }
7115                        Some(Keyword::MAP) => {
7116                            if self.parse_keywords(&[
7117                                Keyword::KEYS,
7118                                Keyword::TERMINATED,
7119                                Keyword::BY,
7120                            ]) {
7121                                row_delimiters.push(HiveRowDelimiter {
7122                                    delimiter: HiveDelimiter::MapKeysTerminatedBy,
7123                                    char: self.parse_identifier()?,
7124                                });
7125                            } else {
7126                                break;
7127                            }
7128                        }
7129                        Some(Keyword::LINES) => {
7130                            if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
7131                                row_delimiters.push(HiveRowDelimiter {
7132                                    delimiter: HiveDelimiter::LinesTerminatedBy,
7133                                    char: self.parse_identifier()?,
7134                                });
7135                            } else {
7136                                break;
7137                            }
7138                        }
7139                        Some(Keyword::NULL) => {
7140                            if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
7141                                row_delimiters.push(HiveRowDelimiter {
7142                                    delimiter: HiveDelimiter::NullDefinedAs,
7143                                    char: self.parse_identifier()?,
7144                                });
7145                            } else {
7146                                break;
7147                            }
7148                        }
7149                        _ => {
7150                            break;
7151                        }
7152                    }
7153                }
7154
7155                Ok(HiveRowFormat::DELIMITED {
7156                    delimiters: row_delimiters,
7157                })
7158            }
7159        }
7160    }
7161
7162    fn parse_optional_on_cluster(&mut self) -> Result<Option<Ident>, ParserError> {
7163        if self.parse_keywords(&[Keyword::ON, Keyword::CLUSTER]) {
7164            Ok(Some(self.parse_identifier()?))
7165        } else {
7166            Ok(None)
7167        }
7168    }
7169
7170    pub fn parse_create_table(
7171        &mut self,
7172        or_replace: bool,
7173        temporary: bool,
7174        global: Option<bool>,
7175        transient: bool,
7176    ) -> Result<Statement, ParserError> {
7177        let allow_unquoted_hyphen = dialect_of!(self is BigQueryDialect);
7178        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
7179        let table_name = self.parse_object_name(allow_unquoted_hyphen)?;
7180
7181        // Clickhouse has `ON CLUSTER 'cluster'` syntax for DDLs
7182        let on_cluster = self.parse_optional_on_cluster()?;
7183
7184        let like = if self.parse_keyword(Keyword::LIKE) || self.parse_keyword(Keyword::ILIKE) {
7185            self.parse_object_name(allow_unquoted_hyphen).ok()
7186        } else {
7187            None
7188        };
7189
7190        let clone = if self.parse_keyword(Keyword::CLONE) {
7191            self.parse_object_name(allow_unquoted_hyphen).ok()
7192        } else {
7193            None
7194        };
7195
7196        // parse optional column list (schema)
7197        let (columns, constraints) = self.parse_columns()?;
7198        let comment_after_column_def =
7199            if dialect_of!(self is HiveDialect) && self.parse_keyword(Keyword::COMMENT) {
7200                let next_token = self.next_token();
7201                match next_token.token {
7202                    Token::SingleQuotedString(str) => Some(CommentDef::WithoutEq(str)),
7203                    _ => self.expected("comment", next_token)?,
7204                }
7205            } else {
7206                None
7207            };
7208
7209        // SQLite supports `WITHOUT ROWID` at the end of `CREATE TABLE`
7210        let without_rowid = self.parse_keywords(&[Keyword::WITHOUT, Keyword::ROWID]);
7211
7212        let hive_distribution = self.parse_hive_distribution()?;
7213        let clustered_by = self.parse_optional_clustered_by()?;
7214        let hive_formats = self.parse_hive_formats()?;
7215
7216        let create_table_config = self.parse_optional_create_table_config()?;
7217
7218        // ClickHouse supports `PRIMARY KEY`, before `ORDER BY`
7219        // https://clickhouse.com/docs/en/sql-reference/statements/create/table#primary-key
7220        let primary_key = if dialect_of!(self is ClickHouseDialect | GenericDialect)
7221            && self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY])
7222        {
7223            Some(Box::new(self.parse_expr()?))
7224        } else {
7225            None
7226        };
7227
7228        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
7229            if self.consume_token(&Token::LParen) {
7230                let columns = if self.peek_token() != Token::RParen {
7231                    self.parse_comma_separated(|p| p.parse_expr())?
7232                } else {
7233                    vec![]
7234                };
7235                self.expect_token(&Token::RParen)?;
7236                Some(OneOrManyWithParens::Many(columns))
7237            } else {
7238                Some(OneOrManyWithParens::One(self.parse_expr()?))
7239            }
7240        } else {
7241            None
7242        };
7243
7244        let on_commit = if self.parse_keywords(&[Keyword::ON, Keyword::COMMIT]) {
7245            Some(self.parse_create_table_on_commit()?)
7246        } else {
7247            None
7248        };
7249
7250        let strict = self.parse_keyword(Keyword::STRICT);
7251
7252        // Parse optional `AS ( query )`
7253        let query = if self.parse_keyword(Keyword::AS) {
7254            Some(self.parse_query()?)
7255        } else if self.dialect.supports_create_table_select() && self.parse_keyword(Keyword::SELECT)
7256        {
7257            // rewind the SELECT keyword
7258            self.prev_token();
7259            Some(self.parse_query()?)
7260        } else {
7261            None
7262        };
7263
7264        Ok(CreateTableBuilder::new(table_name)
7265            .temporary(temporary)
7266            .columns(columns)
7267            .constraints(constraints)
7268            .or_replace(or_replace)
7269            .if_not_exists(if_not_exists)
7270            .transient(transient)
7271            .hive_distribution(hive_distribution)
7272            .hive_formats(Some(hive_formats))
7273            .global(global)
7274            .query(query)
7275            .without_rowid(without_rowid)
7276            .like(like)
7277            .clone_clause(clone)
7278            .comment_after_column_def(comment_after_column_def)
7279            .order_by(order_by)
7280            .on_commit(on_commit)
7281            .on_cluster(on_cluster)
7282            .clustered_by(clustered_by)
7283            .partition_by(create_table_config.partition_by)
7284            .cluster_by(create_table_config.cluster_by)
7285            .inherits(create_table_config.inherits)
7286            .table_options(create_table_config.table_options)
7287            .primary_key(primary_key)
7288            .strict(strict)
7289            .build())
7290    }
7291
7292    pub(crate) fn parse_create_table_on_commit(&mut self) -> Result<OnCommit, ParserError> {
7293        if self.parse_keywords(&[Keyword::DELETE, Keyword::ROWS]) {
7294            Ok(OnCommit::DeleteRows)
7295        } else if self.parse_keywords(&[Keyword::PRESERVE, Keyword::ROWS]) {
7296            Ok(OnCommit::PreserveRows)
7297        } else if self.parse_keywords(&[Keyword::DROP]) {
7298            Ok(OnCommit::Drop)
7299        } else {
7300            parser_err!(
7301                "Expecting DELETE ROWS, PRESERVE ROWS or DROP",
7302                self.peek_token()
7303            )
7304        }
7305    }
7306
7307    /// Parse configuration like inheritance, partitioning, clustering information during the table creation.
7308    ///
7309    /// [BigQuery](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#syntax_2)
7310    /// [PostgreSQL](https://www.postgresql.org/docs/current/ddl-partitioning.html)
7311    /// [MySql](https://dev.mysql.com/doc/refman/8.4/en/create-table.html)
7312    fn parse_optional_create_table_config(
7313        &mut self,
7314    ) -> Result<CreateTableConfiguration, ParserError> {
7315        let mut table_options = CreateTableOptions::None;
7316
7317        let inherits = if self.parse_keyword(Keyword::INHERITS) {
7318            Some(self.parse_parenthesized_qualified_column_list(IsOptional::Mandatory, false)?)
7319        } else {
7320            None
7321        };
7322
7323        // PostgreSQL supports `WITH ( options )`, before `AS`
7324        let with_options = self.parse_options(Keyword::WITH)?;
7325        if !with_options.is_empty() {
7326            table_options = CreateTableOptions::With(with_options)
7327        }
7328
7329        let table_properties = self.parse_options(Keyword::TBLPROPERTIES)?;
7330        if !table_properties.is_empty() {
7331            table_options = CreateTableOptions::TableProperties(table_properties);
7332        }
7333        let partition_by = if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
7334            && self.parse_keywords(&[Keyword::PARTITION, Keyword::BY])
7335        {
7336            Some(Box::new(self.parse_expr()?))
7337        } else {
7338            None
7339        };
7340
7341        let mut cluster_by = None;
7342        if dialect_of!(self is BigQueryDialect | GenericDialect) {
7343            if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
7344                cluster_by = Some(WrappedCollection::NoWrapping(
7345                    self.parse_comma_separated(|p| p.parse_expr())?,
7346                ));
7347            };
7348
7349            if let Token::Word(word) = self.peek_token().token {
7350                if word.keyword == Keyword::OPTIONS {
7351                    table_options =
7352                        CreateTableOptions::Options(self.parse_options(Keyword::OPTIONS)?)
7353                }
7354            };
7355        }
7356
7357        if !dialect_of!(self is HiveDialect) && table_options == CreateTableOptions::None {
7358            let plain_options = self.parse_plain_options()?;
7359            if !plain_options.is_empty() {
7360                table_options = CreateTableOptions::Plain(plain_options)
7361            }
7362        };
7363
7364        Ok(CreateTableConfiguration {
7365            partition_by,
7366            cluster_by,
7367            inherits,
7368            table_options,
7369        })
7370    }
7371
7372    fn parse_plain_option(&mut self) -> Result<Option<SqlOption>, ParserError> {
7373        // Single parameter option
7374        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7375        if self.parse_keywords(&[Keyword::START, Keyword::TRANSACTION]) {
7376            return Ok(Some(SqlOption::Ident(Ident::new("START TRANSACTION"))));
7377        }
7378
7379        // Custom option
7380        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7381        if self.parse_keywords(&[Keyword::COMMENT]) {
7382            let has_eq = self.consume_token(&Token::Eq);
7383            let value = self.next_token();
7384
7385            let comment = match (has_eq, value.token) {
7386                (true, Token::SingleQuotedString(s)) => {
7387                    Ok(Some(SqlOption::Comment(CommentDef::WithEq(s))))
7388                }
7389                (false, Token::SingleQuotedString(s)) => {
7390                    Ok(Some(SqlOption::Comment(CommentDef::WithoutEq(s))))
7391                }
7392                (_, token) => {
7393                    self.expected("Token::SingleQuotedString", TokenWithSpan::wrap(token))
7394                }
7395            };
7396            return comment;
7397        }
7398
7399        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7400        // <https://clickhouse.com/docs/sql-reference/statements/create/table>
7401        if self.parse_keywords(&[Keyword::ENGINE]) {
7402            let _ = self.consume_token(&Token::Eq);
7403            let value = self.next_token();
7404
7405            let engine = match value.token {
7406                Token::Word(w) => {
7407                    let parameters = if self.peek_token() == Token::LParen {
7408                        self.parse_parenthesized_identifiers()?
7409                    } else {
7410                        vec![]
7411                    };
7412
7413                    Ok(Some(SqlOption::NamedParenthesizedList(
7414                        NamedParenthesizedList {
7415                            key: Ident::new("ENGINE"),
7416                            name: Some(Ident::new(w.value)),
7417                            values: parameters,
7418                        },
7419                    )))
7420                }
7421                _ => {
7422                    return self.expected("Token::Word", value)?;
7423                }
7424            };
7425
7426            return engine;
7427        }
7428
7429        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7430        if self.parse_keywords(&[Keyword::TABLESPACE]) {
7431            let _ = self.consume_token(&Token::Eq);
7432            let value = self.next_token();
7433
7434            let tablespace = match value.token {
7435                Token::Word(Word { value: name, .. }) | Token::SingleQuotedString(name) => {
7436                    let storage = match self.parse_keyword(Keyword::STORAGE) {
7437                        true => {
7438                            let _ = self.consume_token(&Token::Eq);
7439                            let storage_token = self.next_token();
7440                            match &storage_token.token {
7441                                Token::Word(w) => match w.value.to_uppercase().as_str() {
7442                                    "DISK" => Some(StorageType::Disk),
7443                                    "MEMORY" => Some(StorageType::Memory),
7444                                    _ => self
7445                                        .expected("Storage type (DISK or MEMORY)", storage_token)?,
7446                                },
7447                                _ => self.expected("Token::Word", storage_token)?,
7448                            }
7449                        }
7450                        false => None,
7451                    };
7452
7453                    Ok(Some(SqlOption::TableSpace(TablespaceOption {
7454                        name,
7455                        storage,
7456                    })))
7457                }
7458                _ => {
7459                    return self.expected("Token::Word", value)?;
7460                }
7461            };
7462
7463            return tablespace;
7464        }
7465
7466        // <https://dev.mysql.com/doc/refman/8.4/en/create-table.html>
7467        if self.parse_keyword(Keyword::UNION) {
7468            let _ = self.consume_token(&Token::Eq);
7469            let value = self.next_token();
7470
7471            match value.token {
7472                Token::LParen => {
7473                    let tables: Vec<Ident> =
7474                        self.parse_comma_separated0(Parser::parse_identifier, Token::RParen)?;
7475                    self.expect_token(&Token::RParen)?;
7476
7477                    return Ok(Some(SqlOption::NamedParenthesizedList(
7478                        NamedParenthesizedList {
7479                            key: Ident::new("UNION"),
7480                            name: None,
7481                            values: tables,
7482                        },
7483                    )));
7484                }
7485                _ => {
7486                    return self.expected("Token::LParen", value)?;
7487                }
7488            }
7489        }
7490
7491        // Key/Value parameter option
7492        let key = if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARSET]) {
7493            Ident::new("DEFAULT CHARSET")
7494        } else if self.parse_keyword(Keyword::CHARSET) {
7495            Ident::new("CHARSET")
7496        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::CHARACTER, Keyword::SET]) {
7497            Ident::new("DEFAULT CHARACTER SET")
7498        } else if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7499            Ident::new("CHARACTER SET")
7500        } else if self.parse_keywords(&[Keyword::DEFAULT, Keyword::COLLATE]) {
7501            Ident::new("DEFAULT COLLATE")
7502        } else if self.parse_keyword(Keyword::COLLATE) {
7503            Ident::new("COLLATE")
7504        } else if self.parse_keywords(&[Keyword::DATA, Keyword::DIRECTORY]) {
7505            Ident::new("DATA DIRECTORY")
7506        } else if self.parse_keywords(&[Keyword::INDEX, Keyword::DIRECTORY]) {
7507            Ident::new("INDEX DIRECTORY")
7508        } else if self.parse_keyword(Keyword::KEY_BLOCK_SIZE) {
7509            Ident::new("KEY_BLOCK_SIZE")
7510        } else if self.parse_keyword(Keyword::ROW_FORMAT) {
7511            Ident::new("ROW_FORMAT")
7512        } else if self.parse_keyword(Keyword::PACK_KEYS) {
7513            Ident::new("PACK_KEYS")
7514        } else if self.parse_keyword(Keyword::STATS_AUTO_RECALC) {
7515            Ident::new("STATS_AUTO_RECALC")
7516        } else if self.parse_keyword(Keyword::STATS_PERSISTENT) {
7517            Ident::new("STATS_PERSISTENT")
7518        } else if self.parse_keyword(Keyword::STATS_SAMPLE_PAGES) {
7519            Ident::new("STATS_SAMPLE_PAGES")
7520        } else if self.parse_keyword(Keyword::DELAY_KEY_WRITE) {
7521            Ident::new("DELAY_KEY_WRITE")
7522        } else if self.parse_keyword(Keyword::COMPRESSION) {
7523            Ident::new("COMPRESSION")
7524        } else if self.parse_keyword(Keyword::ENCRYPTION) {
7525            Ident::new("ENCRYPTION")
7526        } else if self.parse_keyword(Keyword::MAX_ROWS) {
7527            Ident::new("MAX_ROWS")
7528        } else if self.parse_keyword(Keyword::MIN_ROWS) {
7529            Ident::new("MIN_ROWS")
7530        } else if self.parse_keyword(Keyword::AUTOEXTEND_SIZE) {
7531            Ident::new("AUTOEXTEND_SIZE")
7532        } else if self.parse_keyword(Keyword::AVG_ROW_LENGTH) {
7533            Ident::new("AVG_ROW_LENGTH")
7534        } else if self.parse_keyword(Keyword::CHECKSUM) {
7535            Ident::new("CHECKSUM")
7536        } else if self.parse_keyword(Keyword::CONNECTION) {
7537            Ident::new("CONNECTION")
7538        } else if self.parse_keyword(Keyword::ENGINE_ATTRIBUTE) {
7539            Ident::new("ENGINE_ATTRIBUTE")
7540        } else if self.parse_keyword(Keyword::PASSWORD) {
7541            Ident::new("PASSWORD")
7542        } else if self.parse_keyword(Keyword::SECONDARY_ENGINE_ATTRIBUTE) {
7543            Ident::new("SECONDARY_ENGINE_ATTRIBUTE")
7544        } else if self.parse_keyword(Keyword::INSERT_METHOD) {
7545            Ident::new("INSERT_METHOD")
7546        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
7547            Ident::new("AUTO_INCREMENT")
7548        } else {
7549            return Ok(None);
7550        };
7551
7552        let _ = self.consume_token(&Token::Eq);
7553
7554        let value = match self
7555            .maybe_parse(|parser| parser.parse_value())?
7556            .map(Expr::Value)
7557        {
7558            Some(expr) => expr,
7559            None => Expr::Identifier(self.parse_identifier()?),
7560        };
7561
7562        Ok(Some(SqlOption::KeyValue { key, value }))
7563    }
7564
7565    pub fn parse_plain_options(&mut self) -> Result<Vec<SqlOption>, ParserError> {
7566        let mut options = Vec::new();
7567
7568        while let Some(option) = self.parse_plain_option()? {
7569            options.push(option);
7570        }
7571
7572        Ok(options)
7573    }
7574
7575    pub fn parse_optional_inline_comment(&mut self) -> Result<Option<CommentDef>, ParserError> {
7576        let comment = if self.parse_keyword(Keyword::COMMENT) {
7577            let has_eq = self.consume_token(&Token::Eq);
7578            let comment = self.parse_comment_value()?;
7579            Some(if has_eq {
7580                CommentDef::WithEq(comment)
7581            } else {
7582                CommentDef::WithoutEq(comment)
7583            })
7584        } else {
7585            None
7586        };
7587        Ok(comment)
7588    }
7589
7590    pub fn parse_comment_value(&mut self) -> Result<String, ParserError> {
7591        let next_token = self.next_token();
7592        let value = match next_token.token {
7593            Token::SingleQuotedString(str) => str,
7594            Token::DollarQuotedString(str) => str.value,
7595            _ => self.expected("string literal", next_token)?,
7596        };
7597        Ok(value)
7598    }
7599
7600    pub fn parse_optional_procedure_parameters(
7601        &mut self,
7602    ) -> Result<Option<Vec<ProcedureParam>>, ParserError> {
7603        let mut params = vec![];
7604        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7605            return Ok(Some(params));
7606        }
7607        loop {
7608            if let Token::Word(_) = self.peek_token().token {
7609                params.push(self.parse_procedure_param()?)
7610            }
7611            let comma = self.consume_token(&Token::Comma);
7612            if self.consume_token(&Token::RParen) {
7613                // allow a trailing comma, even though it's not in standard
7614                break;
7615            } else if !comma {
7616                return self.expected("',' or ')' after parameter definition", self.peek_token());
7617            }
7618        }
7619        Ok(Some(params))
7620    }
7621
7622    pub fn parse_columns(&mut self) -> Result<(Vec<ColumnDef>, Vec<TableConstraint>), ParserError> {
7623        let mut columns = vec![];
7624        let mut constraints = vec![];
7625        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
7626            return Ok((columns, constraints));
7627        }
7628
7629        loop {
7630            if let Some(constraint) = self.parse_optional_table_constraint()? {
7631                constraints.push(constraint);
7632            } else if let Token::Word(_) = self.peek_token().token {
7633                columns.push(self.parse_column_def()?);
7634            } else {
7635                return self.expected("column name or constraint definition", self.peek_token());
7636            }
7637
7638            let comma = self.consume_token(&Token::Comma);
7639            let rparen = self.peek_token().token == Token::RParen;
7640
7641            if !comma && !rparen {
7642                return self.expected("',' or ')' after column definition", self.peek_token());
7643            };
7644
7645            if rparen
7646                && (!comma
7647                    || self.dialect.supports_column_definition_trailing_commas()
7648                    || self.options.trailing_commas)
7649            {
7650                let _ = self.consume_token(&Token::RParen);
7651                break;
7652            }
7653        }
7654
7655        Ok((columns, constraints))
7656    }
7657
7658    pub fn parse_procedure_param(&mut self) -> Result<ProcedureParam, ParserError> {
7659        let mode = if self.parse_keyword(Keyword::IN) {
7660            Some(ArgMode::In)
7661        } else if self.parse_keyword(Keyword::OUT) {
7662            Some(ArgMode::Out)
7663        } else if self.parse_keyword(Keyword::INOUT) {
7664            Some(ArgMode::InOut)
7665        } else {
7666            None
7667        };
7668        let name = self.parse_identifier()?;
7669        let data_type = self.parse_data_type()?;
7670        Ok(ProcedureParam {
7671            name,
7672            data_type,
7673            mode,
7674        })
7675    }
7676
7677    pub fn parse_column_def(&mut self) -> Result<ColumnDef, ParserError> {
7678        let name = self.parse_identifier()?;
7679        let data_type = if self.is_column_type_sqlite_unspecified() {
7680            DataType::Unspecified
7681        } else {
7682            self.parse_data_type()?
7683        };
7684        let mut options = vec![];
7685        loop {
7686            if self.parse_keyword(Keyword::CONSTRAINT) {
7687                let name = Some(self.parse_identifier()?);
7688                if let Some(option) = self.parse_optional_column_option()? {
7689                    options.push(ColumnOptionDef { name, option });
7690                } else {
7691                    return self.expected(
7692                        "constraint details after CONSTRAINT <name>",
7693                        self.peek_token(),
7694                    );
7695                }
7696            } else if let Some(option) = self.parse_optional_column_option()? {
7697                options.push(ColumnOptionDef { name: None, option });
7698            } else {
7699                break;
7700            };
7701        }
7702        Ok(ColumnDef {
7703            name,
7704            data_type,
7705            options,
7706        })
7707    }
7708
7709    fn is_column_type_sqlite_unspecified(&mut self) -> bool {
7710        if dialect_of!(self is SQLiteDialect) {
7711            match self.peek_token().token {
7712                Token::Word(word) => matches!(
7713                    word.keyword,
7714                    Keyword::CONSTRAINT
7715                        | Keyword::PRIMARY
7716                        | Keyword::NOT
7717                        | Keyword::UNIQUE
7718                        | Keyword::CHECK
7719                        | Keyword::DEFAULT
7720                        | Keyword::COLLATE
7721                        | Keyword::REFERENCES
7722                        | Keyword::GENERATED
7723                        | Keyword::AS
7724                ),
7725                _ => true, // e.g. comma immediately after column name
7726            }
7727        } else {
7728            false
7729        }
7730    }
7731
7732    pub fn parse_optional_column_option(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7733        if let Some(option) = self.dialect.parse_column_option(self)? {
7734            return option;
7735        }
7736
7737        if self.parse_keywords(&[Keyword::CHARACTER, Keyword::SET]) {
7738            Ok(Some(ColumnOption::CharacterSet(
7739                self.parse_object_name(false)?,
7740            )))
7741        } else if self.parse_keywords(&[Keyword::COLLATE]) {
7742            Ok(Some(ColumnOption::Collation(
7743                self.parse_object_name(false)?,
7744            )))
7745        } else if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) {
7746            Ok(Some(ColumnOption::NotNull))
7747        } else if self.parse_keywords(&[Keyword::COMMENT]) {
7748            Ok(Some(ColumnOption::Comment(self.parse_comment_value()?)))
7749        } else if self.parse_keyword(Keyword::NULL) {
7750            Ok(Some(ColumnOption::Null))
7751        } else if self.parse_keyword(Keyword::DEFAULT) {
7752            Ok(Some(ColumnOption::Default(self.parse_expr()?)))
7753        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7754            && self.parse_keyword(Keyword::MATERIALIZED)
7755        {
7756            Ok(Some(ColumnOption::Materialized(self.parse_expr()?)))
7757        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7758            && self.parse_keyword(Keyword::ALIAS)
7759        {
7760            Ok(Some(ColumnOption::Alias(self.parse_expr()?)))
7761        } else if dialect_of!(self is ClickHouseDialect| GenericDialect)
7762            && self.parse_keyword(Keyword::EPHEMERAL)
7763        {
7764            // The expression is optional for the EPHEMERAL syntax, so we need to check
7765            // if the column definition has remaining tokens before parsing the expression.
7766            if matches!(self.peek_token().token, Token::Comma | Token::RParen) {
7767                Ok(Some(ColumnOption::Ephemeral(None)))
7768            } else {
7769                Ok(Some(ColumnOption::Ephemeral(Some(self.parse_expr()?))))
7770            }
7771        } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
7772            let characteristics = self.parse_constraint_characteristics()?;
7773            Ok(Some(ColumnOption::Unique {
7774                is_primary: true,
7775                characteristics,
7776            }))
7777        } else if self.parse_keyword(Keyword::UNIQUE) {
7778            let characteristics = self.parse_constraint_characteristics()?;
7779            Ok(Some(ColumnOption::Unique {
7780                is_primary: false,
7781                characteristics,
7782            }))
7783        } else if self.parse_keyword(Keyword::REFERENCES) {
7784            let foreign_table = self.parse_object_name(false)?;
7785            // PostgreSQL allows omitting the column list and
7786            // uses the primary key column of the foreign table by default
7787            let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
7788            let mut on_delete = None;
7789            let mut on_update = None;
7790            loop {
7791                if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
7792                    on_delete = Some(self.parse_referential_action()?);
7793                } else if on_update.is_none()
7794                    && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7795                {
7796                    on_update = Some(self.parse_referential_action()?);
7797                } else {
7798                    break;
7799                }
7800            }
7801            let characteristics = self.parse_constraint_characteristics()?;
7802
7803            Ok(Some(ColumnOption::ForeignKey {
7804                foreign_table,
7805                referred_columns,
7806                on_delete,
7807                on_update,
7808                characteristics,
7809            }))
7810        } else if self.parse_keyword(Keyword::CHECK) {
7811            self.expect_token(&Token::LParen)?;
7812            let expr = self.parse_expr()?;
7813            self.expect_token(&Token::RParen)?;
7814            Ok(Some(ColumnOption::Check(expr)))
7815        } else if self.parse_keyword(Keyword::AUTO_INCREMENT)
7816            && dialect_of!(self is MySqlDialect | GenericDialect)
7817        {
7818            // Support AUTO_INCREMENT for MySQL
7819            Ok(Some(ColumnOption::DialectSpecific(vec![
7820                Token::make_keyword("AUTO_INCREMENT"),
7821            ])))
7822        } else if self.parse_keyword(Keyword::AUTOINCREMENT)
7823            && dialect_of!(self is SQLiteDialect |  GenericDialect)
7824        {
7825            // Support AUTOINCREMENT for SQLite
7826            Ok(Some(ColumnOption::DialectSpecific(vec![
7827                Token::make_keyword("AUTOINCREMENT"),
7828            ])))
7829        } else if self.parse_keyword(Keyword::ASC)
7830            && self.dialect.supports_asc_desc_in_column_definition()
7831        {
7832            // Support ASC for SQLite
7833            Ok(Some(ColumnOption::DialectSpecific(vec![
7834                Token::make_keyword("ASC"),
7835            ])))
7836        } else if self.parse_keyword(Keyword::DESC)
7837            && self.dialect.supports_asc_desc_in_column_definition()
7838        {
7839            // Support DESC for SQLite
7840            Ok(Some(ColumnOption::DialectSpecific(vec![
7841                Token::make_keyword("DESC"),
7842            ])))
7843        } else if self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
7844            && dialect_of!(self is MySqlDialect | GenericDialect)
7845        {
7846            let expr = self.parse_expr()?;
7847            Ok(Some(ColumnOption::OnUpdate(expr)))
7848        } else if self.parse_keyword(Keyword::GENERATED) {
7849            self.parse_optional_column_option_generated()
7850        } else if dialect_of!(self is BigQueryDialect | GenericDialect)
7851            && self.parse_keyword(Keyword::OPTIONS)
7852        {
7853            self.prev_token();
7854            Ok(Some(ColumnOption::Options(
7855                self.parse_options(Keyword::OPTIONS)?,
7856            )))
7857        } else if self.parse_keyword(Keyword::AS)
7858            && dialect_of!(self is MySqlDialect | SQLiteDialect | DuckDbDialect | GenericDialect)
7859        {
7860            self.parse_optional_column_option_as()
7861        } else if self.parse_keyword(Keyword::SRID)
7862            && dialect_of!(self is MySqlDialect | GenericDialect)
7863        {
7864            Ok(Some(ColumnOption::Srid(Box::new(self.parse_expr()?))))
7865        } else if self.parse_keyword(Keyword::IDENTITY)
7866            && dialect_of!(self is MsSqlDialect | GenericDialect)
7867        {
7868            let parameters = if self.consume_token(&Token::LParen) {
7869                let seed = self.parse_number()?;
7870                self.expect_token(&Token::Comma)?;
7871                let increment = self.parse_number()?;
7872                self.expect_token(&Token::RParen)?;
7873
7874                Some(IdentityPropertyFormatKind::FunctionCall(
7875                    IdentityParameters { seed, increment },
7876                ))
7877            } else {
7878                None
7879            };
7880            Ok(Some(ColumnOption::Identity(
7881                IdentityPropertyKind::Identity(IdentityProperty {
7882                    parameters,
7883                    order: None,
7884                }),
7885            )))
7886        } else if dialect_of!(self is SQLiteDialect | GenericDialect)
7887            && self.parse_keywords(&[Keyword::ON, Keyword::CONFLICT])
7888        {
7889            // Support ON CONFLICT for SQLite
7890            Ok(Some(ColumnOption::OnConflict(
7891                self.expect_one_of_keywords(&[
7892                    Keyword::ROLLBACK,
7893                    Keyword::ABORT,
7894                    Keyword::FAIL,
7895                    Keyword::IGNORE,
7896                    Keyword::REPLACE,
7897                ])?,
7898            )))
7899        } else {
7900            Ok(None)
7901        }
7902    }
7903
7904    pub(crate) fn parse_tag(&mut self) -> Result<Tag, ParserError> {
7905        let name = self.parse_object_name(false)?;
7906        self.expect_token(&Token::Eq)?;
7907        let value = self.parse_literal_string()?;
7908
7909        Ok(Tag::new(name, value))
7910    }
7911
7912    fn parse_optional_column_option_generated(
7913        &mut self,
7914    ) -> Result<Option<ColumnOption>, ParserError> {
7915        if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS, Keyword::IDENTITY]) {
7916            let mut sequence_options = vec![];
7917            if self.expect_token(&Token::LParen).is_ok() {
7918                sequence_options = self.parse_create_sequence_options()?;
7919                self.expect_token(&Token::RParen)?;
7920            }
7921            Ok(Some(ColumnOption::Generated {
7922                generated_as: GeneratedAs::Always,
7923                sequence_options: Some(sequence_options),
7924                generation_expr: None,
7925                generation_expr_mode: None,
7926                generated_keyword: true,
7927            }))
7928        } else if self.parse_keywords(&[
7929            Keyword::BY,
7930            Keyword::DEFAULT,
7931            Keyword::AS,
7932            Keyword::IDENTITY,
7933        ]) {
7934            let mut sequence_options = vec![];
7935            if self.expect_token(&Token::LParen).is_ok() {
7936                sequence_options = self.parse_create_sequence_options()?;
7937                self.expect_token(&Token::RParen)?;
7938            }
7939            Ok(Some(ColumnOption::Generated {
7940                generated_as: GeneratedAs::ByDefault,
7941                sequence_options: Some(sequence_options),
7942                generation_expr: None,
7943                generation_expr_mode: None,
7944                generated_keyword: true,
7945            }))
7946        } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::AS]) {
7947            if self.expect_token(&Token::LParen).is_ok() {
7948                let expr = self.parse_expr()?;
7949                self.expect_token(&Token::RParen)?;
7950                let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7951                    Ok((
7952                        GeneratedAs::ExpStored,
7953                        Some(GeneratedExpressionMode::Stored),
7954                    ))
7955                } else if dialect_of!(self is PostgreSqlDialect) {
7956                    // Postgres' AS IDENTITY branches are above, this one needs STORED
7957                    self.expected("STORED", self.peek_token())
7958                } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7959                    Ok((GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual)))
7960                } else {
7961                    Ok((GeneratedAs::Always, None))
7962                }?;
7963
7964                Ok(Some(ColumnOption::Generated {
7965                    generated_as: gen_as,
7966                    sequence_options: None,
7967                    generation_expr: Some(expr),
7968                    generation_expr_mode: expr_mode,
7969                    generated_keyword: true,
7970                }))
7971            } else {
7972                Ok(None)
7973            }
7974        } else {
7975            Ok(None)
7976        }
7977    }
7978
7979    fn parse_optional_column_option_as(&mut self) -> Result<Option<ColumnOption>, ParserError> {
7980        // Some DBs allow 'AS (expr)', shorthand for GENERATED ALWAYS AS
7981        self.expect_token(&Token::LParen)?;
7982        let expr = self.parse_expr()?;
7983        self.expect_token(&Token::RParen)?;
7984
7985        let (gen_as, expr_mode) = if self.parse_keywords(&[Keyword::STORED]) {
7986            (
7987                GeneratedAs::ExpStored,
7988                Some(GeneratedExpressionMode::Stored),
7989            )
7990        } else if self.parse_keywords(&[Keyword::VIRTUAL]) {
7991            (GeneratedAs::Always, Some(GeneratedExpressionMode::Virtual))
7992        } else {
7993            (GeneratedAs::Always, None)
7994        };
7995
7996        Ok(Some(ColumnOption::Generated {
7997            generated_as: gen_as,
7998            sequence_options: None,
7999            generation_expr: Some(expr),
8000            generation_expr_mode: expr_mode,
8001            generated_keyword: false,
8002        }))
8003    }
8004
8005    pub fn parse_optional_clustered_by(&mut self) -> Result<Option<ClusteredBy>, ParserError> {
8006        let clustered_by = if dialect_of!(self is HiveDialect|GenericDialect)
8007            && self.parse_keywords(&[Keyword::CLUSTERED, Keyword::BY])
8008        {
8009            let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8010
8011            let sorted_by = if self.parse_keywords(&[Keyword::SORTED, Keyword::BY]) {
8012                self.expect_token(&Token::LParen)?;
8013                let sorted_by_columns = self.parse_comma_separated(|p| p.parse_order_by_expr())?;
8014                self.expect_token(&Token::RParen)?;
8015                Some(sorted_by_columns)
8016            } else {
8017                None
8018            };
8019
8020            self.expect_keyword_is(Keyword::INTO)?;
8021            let num_buckets = self.parse_number_value()?.value;
8022            self.expect_keyword_is(Keyword::BUCKETS)?;
8023            Some(ClusteredBy {
8024                columns,
8025                sorted_by,
8026                num_buckets,
8027            })
8028        } else {
8029            None
8030        };
8031        Ok(clustered_by)
8032    }
8033
8034    pub fn parse_referential_action(&mut self) -> Result<ReferentialAction, ParserError> {
8035        if self.parse_keyword(Keyword::RESTRICT) {
8036            Ok(ReferentialAction::Restrict)
8037        } else if self.parse_keyword(Keyword::CASCADE) {
8038            Ok(ReferentialAction::Cascade)
8039        } else if self.parse_keywords(&[Keyword::SET, Keyword::NULL]) {
8040            Ok(ReferentialAction::SetNull)
8041        } else if self.parse_keywords(&[Keyword::NO, Keyword::ACTION]) {
8042            Ok(ReferentialAction::NoAction)
8043        } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8044            Ok(ReferentialAction::SetDefault)
8045        } else {
8046            self.expected(
8047                "one of RESTRICT, CASCADE, SET NULL, NO ACTION or SET DEFAULT",
8048                self.peek_token(),
8049            )
8050        }
8051    }
8052
8053    pub fn parse_constraint_characteristics(
8054        &mut self,
8055    ) -> Result<Option<ConstraintCharacteristics>, ParserError> {
8056        let mut cc = ConstraintCharacteristics::default();
8057
8058        loop {
8059            if cc.deferrable.is_none() && self.parse_keywords(&[Keyword::NOT, Keyword::DEFERRABLE])
8060            {
8061                cc.deferrable = Some(false);
8062            } else if cc.deferrable.is_none() && self.parse_keyword(Keyword::DEFERRABLE) {
8063                cc.deferrable = Some(true);
8064            } else if cc.initially.is_none() && self.parse_keyword(Keyword::INITIALLY) {
8065                if self.parse_keyword(Keyword::DEFERRED) {
8066                    cc.initially = Some(DeferrableInitial::Deferred);
8067                } else if self.parse_keyword(Keyword::IMMEDIATE) {
8068                    cc.initially = Some(DeferrableInitial::Immediate);
8069                } else {
8070                    self.expected("one of DEFERRED or IMMEDIATE", self.peek_token())?;
8071                }
8072            } else if cc.enforced.is_none() && self.parse_keyword(Keyword::ENFORCED) {
8073                cc.enforced = Some(true);
8074            } else if cc.enforced.is_none()
8075                && self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED])
8076            {
8077                cc.enforced = Some(false);
8078            } else {
8079                break;
8080            }
8081        }
8082
8083        if cc.deferrable.is_some() || cc.initially.is_some() || cc.enforced.is_some() {
8084            Ok(Some(cc))
8085        } else {
8086            Ok(None)
8087        }
8088    }
8089
8090    pub fn parse_optional_table_constraint(
8091        &mut self,
8092    ) -> Result<Option<TableConstraint>, ParserError> {
8093        let name = if self.parse_keyword(Keyword::CONSTRAINT) {
8094            Some(self.parse_identifier()?)
8095        } else {
8096            None
8097        };
8098
8099        let next_token = self.next_token();
8100        match next_token.token {
8101            Token::Word(w) if w.keyword == Keyword::UNIQUE => {
8102                let index_type_display = self.parse_index_type_display();
8103                if !dialect_of!(self is GenericDialect | MySqlDialect)
8104                    && !index_type_display.is_none()
8105                {
8106                    return self
8107                        .expected("`index_name` or `(column_name [, ...])`", self.peek_token());
8108                }
8109
8110                let nulls_distinct = self.parse_optional_nulls_distinct()?;
8111
8112                // optional index name
8113                let index_name = self.parse_optional_ident()?;
8114                let index_type = self.parse_optional_using_then_index_type()?;
8115
8116                let columns = self.parse_parenthesized_index_column_list()?;
8117                let index_options = self.parse_index_options()?;
8118                let characteristics = self.parse_constraint_characteristics()?;
8119                Ok(Some(TableConstraint::Unique {
8120                    name,
8121                    index_name,
8122                    index_type_display,
8123                    index_type,
8124                    columns,
8125                    index_options,
8126                    characteristics,
8127                    nulls_distinct,
8128                }))
8129            }
8130            Token::Word(w) if w.keyword == Keyword::PRIMARY => {
8131                // after `PRIMARY` always stay `KEY`
8132                self.expect_keyword_is(Keyword::KEY)?;
8133
8134                // optional index name
8135                let index_name = self.parse_optional_ident()?;
8136                let index_type = self.parse_optional_using_then_index_type()?;
8137
8138                let columns = self.parse_parenthesized_index_column_list()?;
8139                let index_options = self.parse_index_options()?;
8140                let characteristics = self.parse_constraint_characteristics()?;
8141                Ok(Some(TableConstraint::PrimaryKey {
8142                    name,
8143                    index_name,
8144                    index_type,
8145                    columns,
8146                    index_options,
8147                    characteristics,
8148                }))
8149            }
8150            Token::Word(w) if w.keyword == Keyword::FOREIGN => {
8151                self.expect_keyword_is(Keyword::KEY)?;
8152                let index_name = self.parse_optional_ident()?;
8153                let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
8154                self.expect_keyword_is(Keyword::REFERENCES)?;
8155                let foreign_table = self.parse_object_name(false)?;
8156                let referred_columns = self.parse_parenthesized_column_list(Optional, false)?;
8157                let mut on_delete = None;
8158                let mut on_update = None;
8159                loop {
8160                    if on_delete.is_none() && self.parse_keywords(&[Keyword::ON, Keyword::DELETE]) {
8161                        on_delete = Some(self.parse_referential_action()?);
8162                    } else if on_update.is_none()
8163                        && self.parse_keywords(&[Keyword::ON, Keyword::UPDATE])
8164                    {
8165                        on_update = Some(self.parse_referential_action()?);
8166                    } else {
8167                        break;
8168                    }
8169                }
8170
8171                let characteristics = self.parse_constraint_characteristics()?;
8172
8173                Ok(Some(TableConstraint::ForeignKey {
8174                    name,
8175                    index_name,
8176                    columns,
8177                    foreign_table,
8178                    referred_columns,
8179                    on_delete,
8180                    on_update,
8181                    characteristics,
8182                }))
8183            }
8184            Token::Word(w) if w.keyword == Keyword::CHECK => {
8185                self.expect_token(&Token::LParen)?;
8186                let expr = Box::new(self.parse_expr()?);
8187                self.expect_token(&Token::RParen)?;
8188
8189                let enforced = if self.parse_keyword(Keyword::ENFORCED) {
8190                    Some(true)
8191                } else if self.parse_keywords(&[Keyword::NOT, Keyword::ENFORCED]) {
8192                    Some(false)
8193                } else {
8194                    None
8195                };
8196
8197                Ok(Some(TableConstraint::Check {
8198                    name,
8199                    expr,
8200                    enforced,
8201                }))
8202            }
8203            Token::Word(w)
8204                if (w.keyword == Keyword::INDEX || w.keyword == Keyword::KEY)
8205                    && dialect_of!(self is GenericDialect | MySqlDialect)
8206                    && name.is_none() =>
8207            {
8208                let display_as_key = w.keyword == Keyword::KEY;
8209
8210                let name = match self.peek_token().token {
8211                    Token::Word(word) if word.keyword == Keyword::USING => None,
8212                    _ => self.parse_optional_ident()?,
8213                };
8214
8215                let index_type = self.parse_optional_using_then_index_type()?;
8216                let columns = self.parse_parenthesized_index_column_list()?;
8217
8218                Ok(Some(TableConstraint::Index {
8219                    display_as_key,
8220                    name,
8221                    index_type,
8222                    columns,
8223                }))
8224            }
8225            Token::Word(w)
8226                if (w.keyword == Keyword::FULLTEXT || w.keyword == Keyword::SPATIAL)
8227                    && dialect_of!(self is GenericDialect | MySqlDialect) =>
8228            {
8229                if let Some(name) = name {
8230                    return self.expected(
8231                        "FULLTEXT or SPATIAL option without constraint name",
8232                        TokenWithSpan {
8233                            token: Token::make_keyword(&name.to_string()),
8234                            span: next_token.span,
8235                        },
8236                    );
8237                }
8238
8239                let fulltext = w.keyword == Keyword::FULLTEXT;
8240
8241                let index_type_display = self.parse_index_type_display();
8242
8243                let opt_index_name = self.parse_optional_ident()?;
8244
8245                let columns = self.parse_parenthesized_index_column_list()?;
8246
8247                Ok(Some(TableConstraint::FulltextOrSpatial {
8248                    fulltext,
8249                    index_type_display,
8250                    opt_index_name,
8251                    columns,
8252                }))
8253            }
8254            _ => {
8255                if name.is_some() {
8256                    self.expected("PRIMARY, UNIQUE, FOREIGN, or CHECK", next_token)
8257                } else {
8258                    self.prev_token();
8259                    Ok(None)
8260                }
8261            }
8262        }
8263    }
8264
8265    fn parse_optional_nulls_distinct(&mut self) -> Result<NullsDistinctOption, ParserError> {
8266        Ok(if self.parse_keyword(Keyword::NULLS) {
8267            let not = self.parse_keyword(Keyword::NOT);
8268            self.expect_keyword_is(Keyword::DISTINCT)?;
8269            if not {
8270                NullsDistinctOption::NotDistinct
8271            } else {
8272                NullsDistinctOption::Distinct
8273            }
8274        } else {
8275            NullsDistinctOption::None
8276        })
8277    }
8278
8279    pub fn maybe_parse_options(
8280        &mut self,
8281        keyword: Keyword,
8282    ) -> Result<Option<Vec<SqlOption>>, ParserError> {
8283        if let Token::Word(word) = self.peek_token().token {
8284            if word.keyword == keyword {
8285                return Ok(Some(self.parse_options(keyword)?));
8286            }
8287        };
8288        Ok(None)
8289    }
8290
8291    pub fn parse_options(&mut self, keyword: Keyword) -> Result<Vec<SqlOption>, ParserError> {
8292        if self.parse_keyword(keyword) {
8293            self.expect_token(&Token::LParen)?;
8294            let options = self.parse_comma_separated0(Parser::parse_sql_option, Token::RParen)?;
8295            self.expect_token(&Token::RParen)?;
8296            Ok(options)
8297        } else {
8298            Ok(vec![])
8299        }
8300    }
8301
8302    pub fn parse_options_with_keywords(
8303        &mut self,
8304        keywords: &[Keyword],
8305    ) -> Result<Vec<SqlOption>, ParserError> {
8306        if self.parse_keywords(keywords) {
8307            self.expect_token(&Token::LParen)?;
8308            let options = self.parse_comma_separated(Parser::parse_sql_option)?;
8309            self.expect_token(&Token::RParen)?;
8310            Ok(options)
8311        } else {
8312            Ok(vec![])
8313        }
8314    }
8315
8316    pub fn parse_index_type(&mut self) -> Result<IndexType, ParserError> {
8317        Ok(if self.parse_keyword(Keyword::BTREE) {
8318            IndexType::BTree
8319        } else if self.parse_keyword(Keyword::HASH) {
8320            IndexType::Hash
8321        } else if self.parse_keyword(Keyword::GIN) {
8322            IndexType::GIN
8323        } else if self.parse_keyword(Keyword::GIST) {
8324            IndexType::GiST
8325        } else if self.parse_keyword(Keyword::SPGIST) {
8326            IndexType::SPGiST
8327        } else if self.parse_keyword(Keyword::BRIN) {
8328            IndexType::BRIN
8329        } else if self.parse_keyword(Keyword::BLOOM) {
8330            IndexType::Bloom
8331        } else {
8332            IndexType::Custom(self.parse_identifier()?)
8333        })
8334    }
8335
8336    /// Optionally parse the `USING` keyword, followed by an [IndexType]
8337    /// Example:
8338    /// ```sql
8339    //// USING BTREE (name, age DESC)
8340    /// ```
8341    pub fn parse_optional_using_then_index_type(
8342        &mut self,
8343    ) -> Result<Option<IndexType>, ParserError> {
8344        if self.parse_keyword(Keyword::USING) {
8345            Ok(Some(self.parse_index_type()?))
8346        } else {
8347            Ok(None)
8348        }
8349    }
8350
8351    /// Parse `[ident]`, mostly `ident` is name, like:
8352    /// `window_name`, `index_name`, ...
8353    pub fn parse_optional_ident(&mut self) -> Result<Option<Ident>, ParserError> {
8354        self.maybe_parse(|parser| parser.parse_identifier())
8355    }
8356
8357    #[must_use]
8358    pub fn parse_index_type_display(&mut self) -> KeyOrIndexDisplay {
8359        if self.parse_keyword(Keyword::KEY) {
8360            KeyOrIndexDisplay::Key
8361        } else if self.parse_keyword(Keyword::INDEX) {
8362            KeyOrIndexDisplay::Index
8363        } else {
8364            KeyOrIndexDisplay::None
8365        }
8366    }
8367
8368    pub fn parse_optional_index_option(&mut self) -> Result<Option<IndexOption>, ParserError> {
8369        if let Some(index_type) = self.parse_optional_using_then_index_type()? {
8370            Ok(Some(IndexOption::Using(index_type)))
8371        } else if self.parse_keyword(Keyword::COMMENT) {
8372            let s = self.parse_literal_string()?;
8373            Ok(Some(IndexOption::Comment(s)))
8374        } else {
8375            Ok(None)
8376        }
8377    }
8378
8379    pub fn parse_index_options(&mut self) -> Result<Vec<IndexOption>, ParserError> {
8380        let mut options = Vec::new();
8381
8382        loop {
8383            match self.parse_optional_index_option()? {
8384                Some(index_option) => options.push(index_option),
8385                None => return Ok(options),
8386            }
8387        }
8388    }
8389
8390    pub fn parse_sql_option(&mut self) -> Result<SqlOption, ParserError> {
8391        let is_mssql = dialect_of!(self is MsSqlDialect|GenericDialect);
8392
8393        match self.peek_token().token {
8394            Token::Word(w) if w.keyword == Keyword::HEAP && is_mssql => {
8395                Ok(SqlOption::Ident(self.parse_identifier()?))
8396            }
8397            Token::Word(w) if w.keyword == Keyword::PARTITION && is_mssql => {
8398                self.parse_option_partition()
8399            }
8400            Token::Word(w) if w.keyword == Keyword::CLUSTERED && is_mssql => {
8401                self.parse_option_clustered()
8402            }
8403            _ => {
8404                let name = self.parse_identifier()?;
8405                self.expect_token(&Token::Eq)?;
8406                let value = self.parse_expr()?;
8407
8408                Ok(SqlOption::KeyValue { key: name, value })
8409            }
8410        }
8411    }
8412
8413    pub fn parse_option_clustered(&mut self) -> Result<SqlOption, ParserError> {
8414        if self.parse_keywords(&[
8415            Keyword::CLUSTERED,
8416            Keyword::COLUMNSTORE,
8417            Keyword::INDEX,
8418            Keyword::ORDER,
8419        ]) {
8420            Ok(SqlOption::Clustered(
8421                TableOptionsClustered::ColumnstoreIndexOrder(
8422                    self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
8423                ),
8424            ))
8425        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::COLUMNSTORE, Keyword::INDEX]) {
8426            Ok(SqlOption::Clustered(
8427                TableOptionsClustered::ColumnstoreIndex,
8428            ))
8429        } else if self.parse_keywords(&[Keyword::CLUSTERED, Keyword::INDEX]) {
8430            self.expect_token(&Token::LParen)?;
8431
8432            let columns = self.parse_comma_separated(|p| {
8433                let name = p.parse_identifier()?;
8434                let asc = p.parse_asc_desc();
8435
8436                Ok(ClusteredIndex { name, asc })
8437            })?;
8438
8439            self.expect_token(&Token::RParen)?;
8440
8441            Ok(SqlOption::Clustered(TableOptionsClustered::Index(columns)))
8442        } else {
8443            Err(ParserError::ParserError(
8444                "invalid CLUSTERED sequence".to_string(),
8445            ))
8446        }
8447    }
8448
8449    pub fn parse_option_partition(&mut self) -> Result<SqlOption, ParserError> {
8450        self.expect_keyword_is(Keyword::PARTITION)?;
8451        self.expect_token(&Token::LParen)?;
8452        let column_name = self.parse_identifier()?;
8453
8454        self.expect_keyword_is(Keyword::RANGE)?;
8455        let range_direction = if self.parse_keyword(Keyword::LEFT) {
8456            Some(PartitionRangeDirection::Left)
8457        } else if self.parse_keyword(Keyword::RIGHT) {
8458            Some(PartitionRangeDirection::Right)
8459        } else {
8460            None
8461        };
8462
8463        self.expect_keywords(&[Keyword::FOR, Keyword::VALUES])?;
8464        self.expect_token(&Token::LParen)?;
8465
8466        let for_values = self.parse_comma_separated(Parser::parse_expr)?;
8467
8468        self.expect_token(&Token::RParen)?;
8469        self.expect_token(&Token::RParen)?;
8470
8471        Ok(SqlOption::Partition {
8472            column_name,
8473            range_direction,
8474            for_values,
8475        })
8476    }
8477
8478    pub fn parse_partition(&mut self) -> Result<Partition, ParserError> {
8479        self.expect_token(&Token::LParen)?;
8480        let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8481        self.expect_token(&Token::RParen)?;
8482        Ok(Partition::Partitions(partitions))
8483    }
8484
8485    pub fn parse_projection_select(&mut self) -> Result<ProjectionSelect, ParserError> {
8486        self.expect_token(&Token::LParen)?;
8487        self.expect_keyword_is(Keyword::SELECT)?;
8488        let projection = self.parse_projection()?;
8489        let group_by = self.parse_optional_group_by()?;
8490        let order_by = self.parse_optional_order_by()?;
8491        self.expect_token(&Token::RParen)?;
8492        Ok(ProjectionSelect {
8493            projection,
8494            group_by,
8495            order_by,
8496        })
8497    }
8498    pub fn parse_alter_table_add_projection(&mut self) -> Result<AlterTableOperation, ParserError> {
8499        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8500        let name = self.parse_identifier()?;
8501        let query = self.parse_projection_select()?;
8502        Ok(AlterTableOperation::AddProjection {
8503            if_not_exists,
8504            name,
8505            select: query,
8506        })
8507    }
8508
8509    pub fn parse_alter_table_operation(&mut self) -> Result<AlterTableOperation, ParserError> {
8510        let operation = if self.parse_keyword(Keyword::ADD) {
8511            if let Some(constraint) = self.parse_optional_table_constraint()? {
8512                let not_valid = self.parse_keywords(&[Keyword::NOT, Keyword::VALID]);
8513                AlterTableOperation::AddConstraint {
8514                    constraint,
8515                    not_valid,
8516                }
8517            } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8518                && self.parse_keyword(Keyword::PROJECTION)
8519            {
8520                return self.parse_alter_table_add_projection();
8521            } else {
8522                let if_not_exists =
8523                    self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
8524                let mut new_partitions = vec![];
8525                loop {
8526                    if self.parse_keyword(Keyword::PARTITION) {
8527                        new_partitions.push(self.parse_partition()?);
8528                    } else {
8529                        break;
8530                    }
8531                }
8532                if !new_partitions.is_empty() {
8533                    AlterTableOperation::AddPartitions {
8534                        if_not_exists,
8535                        new_partitions,
8536                    }
8537                } else {
8538                    let column_keyword = self.parse_keyword(Keyword::COLUMN);
8539
8540                    let if_not_exists = if dialect_of!(self is PostgreSqlDialect | BigQueryDialect | DuckDbDialect | GenericDialect)
8541                    {
8542                        self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS])
8543                            || if_not_exists
8544                    } else {
8545                        false
8546                    };
8547
8548                    let column_def = self.parse_column_def()?;
8549
8550                    let column_position = self.parse_column_position()?;
8551
8552                    AlterTableOperation::AddColumn {
8553                        column_keyword,
8554                        if_not_exists,
8555                        column_def,
8556                        column_position,
8557                    }
8558                }
8559            }
8560        } else if self.parse_keyword(Keyword::RENAME) {
8561            if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::CONSTRAINT) {
8562                let old_name = self.parse_identifier()?;
8563                self.expect_keyword_is(Keyword::TO)?;
8564                let new_name = self.parse_identifier()?;
8565                AlterTableOperation::RenameConstraint { old_name, new_name }
8566            } else if self.parse_keyword(Keyword::TO) {
8567                let table_name = self.parse_object_name(false)?;
8568                AlterTableOperation::RenameTable { table_name }
8569            } else {
8570                let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8571                let old_column_name = self.parse_identifier()?;
8572                self.expect_keyword_is(Keyword::TO)?;
8573                let new_column_name = self.parse_identifier()?;
8574                AlterTableOperation::RenameColumn {
8575                    old_column_name,
8576                    new_column_name,
8577                }
8578            }
8579        } else if self.parse_keyword(Keyword::DISABLE) {
8580            if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8581                AlterTableOperation::DisableRowLevelSecurity {}
8582            } else if self.parse_keyword(Keyword::RULE) {
8583                let name = self.parse_identifier()?;
8584                AlterTableOperation::DisableRule { name }
8585            } else if self.parse_keyword(Keyword::TRIGGER) {
8586                let name = self.parse_identifier()?;
8587                AlterTableOperation::DisableTrigger { name }
8588            } else {
8589                return self.expected(
8590                    "ROW LEVEL SECURITY, RULE, or TRIGGER after DISABLE",
8591                    self.peek_token(),
8592                );
8593            }
8594        } else if self.parse_keyword(Keyword::ENABLE) {
8595            if self.parse_keywords(&[Keyword::ALWAYS, Keyword::RULE]) {
8596                let name = self.parse_identifier()?;
8597                AlterTableOperation::EnableAlwaysRule { name }
8598            } else if self.parse_keywords(&[Keyword::ALWAYS, Keyword::TRIGGER]) {
8599                let name = self.parse_identifier()?;
8600                AlterTableOperation::EnableAlwaysTrigger { name }
8601            } else if self.parse_keywords(&[Keyword::ROW, Keyword::LEVEL, Keyword::SECURITY]) {
8602                AlterTableOperation::EnableRowLevelSecurity {}
8603            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::RULE]) {
8604                let name = self.parse_identifier()?;
8605                AlterTableOperation::EnableReplicaRule { name }
8606            } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::TRIGGER]) {
8607                let name = self.parse_identifier()?;
8608                AlterTableOperation::EnableReplicaTrigger { name }
8609            } else if self.parse_keyword(Keyword::RULE) {
8610                let name = self.parse_identifier()?;
8611                AlterTableOperation::EnableRule { name }
8612            } else if self.parse_keyword(Keyword::TRIGGER) {
8613                let name = self.parse_identifier()?;
8614                AlterTableOperation::EnableTrigger { name }
8615            } else {
8616                return self.expected(
8617                    "ALWAYS, REPLICA, ROW LEVEL SECURITY, RULE, or TRIGGER after ENABLE",
8618                    self.peek_token(),
8619                );
8620            }
8621        } else if self.parse_keywords(&[Keyword::CLEAR, Keyword::PROJECTION])
8622            && dialect_of!(self is ClickHouseDialect|GenericDialect)
8623        {
8624            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8625            let name = self.parse_identifier()?;
8626            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8627                Some(self.parse_identifier()?)
8628            } else {
8629                None
8630            };
8631            AlterTableOperation::ClearProjection {
8632                if_exists,
8633                name,
8634                partition,
8635            }
8636        } else if self.parse_keywords(&[Keyword::MATERIALIZE, Keyword::PROJECTION])
8637            && dialect_of!(self is ClickHouseDialect|GenericDialect)
8638        {
8639            let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8640            let name = self.parse_identifier()?;
8641            let partition = if self.parse_keywords(&[Keyword::IN, Keyword::PARTITION]) {
8642                Some(self.parse_identifier()?)
8643            } else {
8644                None
8645            };
8646            AlterTableOperation::MaterializeProjection {
8647                if_exists,
8648                name,
8649                partition,
8650            }
8651        } else if self.parse_keyword(Keyword::DROP) {
8652            if self.parse_keywords(&[Keyword::IF, Keyword::EXISTS, Keyword::PARTITION]) {
8653                self.expect_token(&Token::LParen)?;
8654                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8655                self.expect_token(&Token::RParen)?;
8656                AlterTableOperation::DropPartitions {
8657                    partitions,
8658                    if_exists: true,
8659                }
8660            } else if self.parse_keyword(Keyword::PARTITION) {
8661                self.expect_token(&Token::LParen)?;
8662                let partitions = self.parse_comma_separated(Parser::parse_expr)?;
8663                self.expect_token(&Token::RParen)?;
8664                AlterTableOperation::DropPartitions {
8665                    partitions,
8666                    if_exists: false,
8667                }
8668            } else if self.parse_keyword(Keyword::CONSTRAINT) {
8669                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8670                let name = self.parse_identifier()?;
8671                let drop_behavior = self.parse_optional_drop_behavior();
8672                AlterTableOperation::DropConstraint {
8673                    if_exists,
8674                    name,
8675                    drop_behavior,
8676                }
8677            } else if self.parse_keywords(&[Keyword::PRIMARY, Keyword::KEY]) {
8678                AlterTableOperation::DropPrimaryKey
8679            } else if self.parse_keywords(&[Keyword::FOREIGN, Keyword::KEY]) {
8680                let name = self.parse_identifier()?;
8681                AlterTableOperation::DropForeignKey { name }
8682            } else if self.parse_keyword(Keyword::INDEX) {
8683                let name = self.parse_identifier()?;
8684                AlterTableOperation::DropIndex { name }
8685            } else if self.parse_keyword(Keyword::PROJECTION)
8686                && dialect_of!(self is ClickHouseDialect|GenericDialect)
8687            {
8688                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8689                let name = self.parse_identifier()?;
8690                AlterTableOperation::DropProjection { if_exists, name }
8691            } else if self.parse_keywords(&[Keyword::CLUSTERING, Keyword::KEY]) {
8692                AlterTableOperation::DropClusteringKey
8693            } else {
8694                let has_column_keyword = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8695                let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
8696                let column_names = if self.dialect.supports_comma_separated_drop_column_list() {
8697                    self.parse_comma_separated(Parser::parse_identifier)?
8698                } else {
8699                    vec![self.parse_identifier()?]
8700                };
8701                let drop_behavior = self.parse_optional_drop_behavior();
8702                AlterTableOperation::DropColumn {
8703                    has_column_keyword,
8704                    column_names,
8705                    if_exists,
8706                    drop_behavior,
8707                }
8708            }
8709        } else if self.parse_keyword(Keyword::PARTITION) {
8710            self.expect_token(&Token::LParen)?;
8711            let before = self.parse_comma_separated(Parser::parse_expr)?;
8712            self.expect_token(&Token::RParen)?;
8713            self.expect_keyword_is(Keyword::RENAME)?;
8714            self.expect_keywords(&[Keyword::TO, Keyword::PARTITION])?;
8715            self.expect_token(&Token::LParen)?;
8716            let renames = self.parse_comma_separated(Parser::parse_expr)?;
8717            self.expect_token(&Token::RParen)?;
8718            AlterTableOperation::RenamePartitions {
8719                old_partitions: before,
8720                new_partitions: renames,
8721            }
8722        } else if self.parse_keyword(Keyword::CHANGE) {
8723            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8724            let old_name = self.parse_identifier()?;
8725            let new_name = self.parse_identifier()?;
8726            let data_type = self.parse_data_type()?;
8727            let mut options = vec![];
8728            while let Some(option) = self.parse_optional_column_option()? {
8729                options.push(option);
8730            }
8731
8732            let column_position = self.parse_column_position()?;
8733
8734            AlterTableOperation::ChangeColumn {
8735                old_name,
8736                new_name,
8737                data_type,
8738                options,
8739                column_position,
8740            }
8741        } else if self.parse_keyword(Keyword::MODIFY) {
8742            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8743            let col_name = self.parse_identifier()?;
8744            let data_type = self.parse_data_type()?;
8745            let mut options = vec![];
8746            while let Some(option) = self.parse_optional_column_option()? {
8747                options.push(option);
8748            }
8749
8750            let column_position = self.parse_column_position()?;
8751
8752            AlterTableOperation::ModifyColumn {
8753                col_name,
8754                data_type,
8755                options,
8756                column_position,
8757            }
8758        } else if self.parse_keyword(Keyword::ALTER) {
8759            let _ = self.parse_keyword(Keyword::COLUMN); // [ COLUMN ]
8760            let column_name = self.parse_identifier()?;
8761            let is_postgresql = dialect_of!(self is PostgreSqlDialect);
8762
8763            let op: AlterColumnOperation = if self.parse_keywords(&[
8764                Keyword::SET,
8765                Keyword::NOT,
8766                Keyword::NULL,
8767            ]) {
8768                AlterColumnOperation::SetNotNull {}
8769            } else if self.parse_keywords(&[Keyword::DROP, Keyword::NOT, Keyword::NULL]) {
8770                AlterColumnOperation::DropNotNull {}
8771            } else if self.parse_keywords(&[Keyword::SET, Keyword::DEFAULT]) {
8772                AlterColumnOperation::SetDefault {
8773                    value: self.parse_expr()?,
8774                }
8775            } else if self.parse_keywords(&[Keyword::DROP, Keyword::DEFAULT]) {
8776                AlterColumnOperation::DropDefault {}
8777            } else if self.parse_keywords(&[Keyword::SET, Keyword::DATA, Keyword::TYPE]) {
8778                self.parse_set_data_type(true)?
8779            } else if self.parse_keyword(Keyword::TYPE) {
8780                self.parse_set_data_type(false)?
8781            } else if self.parse_keywords(&[Keyword::ADD, Keyword::GENERATED]) {
8782                let generated_as = if self.parse_keyword(Keyword::ALWAYS) {
8783                    Some(GeneratedAs::Always)
8784                } else if self.parse_keywords(&[Keyword::BY, Keyword::DEFAULT]) {
8785                    Some(GeneratedAs::ByDefault)
8786                } else {
8787                    None
8788                };
8789
8790                self.expect_keywords(&[Keyword::AS, Keyword::IDENTITY])?;
8791
8792                let mut sequence_options: Option<Vec<SequenceOptions>> = None;
8793
8794                if self.peek_token().token == Token::LParen {
8795                    self.expect_token(&Token::LParen)?;
8796                    sequence_options = Some(self.parse_create_sequence_options()?);
8797                    self.expect_token(&Token::RParen)?;
8798                }
8799
8800                AlterColumnOperation::AddGenerated {
8801                    generated_as,
8802                    sequence_options,
8803                }
8804            } else {
8805                let message = if is_postgresql {
8806                    "SET/DROP NOT NULL, SET DEFAULT, SET DATA TYPE, or ADD GENERATED after ALTER COLUMN"
8807                } else {
8808                    "SET/DROP NOT NULL, SET DEFAULT, or SET DATA TYPE after ALTER COLUMN"
8809                };
8810
8811                return self.expected(message, self.peek_token());
8812            };
8813            AlterTableOperation::AlterColumn { column_name, op }
8814        } else if self.parse_keyword(Keyword::SWAP) {
8815            self.expect_keyword_is(Keyword::WITH)?;
8816            let table_name = self.parse_object_name(false)?;
8817            AlterTableOperation::SwapWith { table_name }
8818        } else if dialect_of!(self is PostgreSqlDialect | GenericDialect)
8819            && self.parse_keywords(&[Keyword::OWNER, Keyword::TO])
8820        {
8821            let new_owner = self.parse_owner()?;
8822            AlterTableOperation::OwnerTo { new_owner }
8823        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8824            && self.parse_keyword(Keyword::ATTACH)
8825        {
8826            AlterTableOperation::AttachPartition {
8827                partition: self.parse_part_or_partition()?,
8828            }
8829        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8830            && self.parse_keyword(Keyword::DETACH)
8831        {
8832            AlterTableOperation::DetachPartition {
8833                partition: self.parse_part_or_partition()?,
8834            }
8835        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8836            && self.parse_keyword(Keyword::FREEZE)
8837        {
8838            let partition = self.parse_part_or_partition()?;
8839            let with_name = if self.parse_keyword(Keyword::WITH) {
8840                self.expect_keyword_is(Keyword::NAME)?;
8841                Some(self.parse_identifier()?)
8842            } else {
8843                None
8844            };
8845            AlterTableOperation::FreezePartition {
8846                partition,
8847                with_name,
8848            }
8849        } else if dialect_of!(self is ClickHouseDialect|GenericDialect)
8850            && self.parse_keyword(Keyword::UNFREEZE)
8851        {
8852            let partition = self.parse_part_or_partition()?;
8853            let with_name = if self.parse_keyword(Keyword::WITH) {
8854                self.expect_keyword_is(Keyword::NAME)?;
8855                Some(self.parse_identifier()?)
8856            } else {
8857                None
8858            };
8859            AlterTableOperation::UnfreezePartition {
8860                partition,
8861                with_name,
8862            }
8863        } else if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
8864            self.expect_token(&Token::LParen)?;
8865            let exprs = self.parse_comma_separated(|parser| parser.parse_expr())?;
8866            self.expect_token(&Token::RParen)?;
8867            AlterTableOperation::ClusterBy { exprs }
8868        } else if self.parse_keywords(&[Keyword::SUSPEND, Keyword::RECLUSTER]) {
8869            AlterTableOperation::SuspendRecluster
8870        } else if self.parse_keywords(&[Keyword::RESUME, Keyword::RECLUSTER]) {
8871            AlterTableOperation::ResumeRecluster
8872        } else if self.parse_keyword(Keyword::LOCK) {
8873            let equals = self.consume_token(&Token::Eq);
8874            let lock = match self.parse_one_of_keywords(&[
8875                Keyword::DEFAULT,
8876                Keyword::EXCLUSIVE,
8877                Keyword::NONE,
8878                Keyword::SHARED,
8879            ]) {
8880                Some(Keyword::DEFAULT) => AlterTableLock::Default,
8881                Some(Keyword::EXCLUSIVE) => AlterTableLock::Exclusive,
8882                Some(Keyword::NONE) => AlterTableLock::None,
8883                Some(Keyword::SHARED) => AlterTableLock::Shared,
8884                _ => self.expected(
8885                    "DEFAULT, EXCLUSIVE, NONE or SHARED after LOCK [=]",
8886                    self.peek_token(),
8887                )?,
8888            };
8889            AlterTableOperation::Lock { equals, lock }
8890        } else if self.parse_keyword(Keyword::ALGORITHM) {
8891            let equals = self.consume_token(&Token::Eq);
8892            let algorithm = match self.parse_one_of_keywords(&[
8893                Keyword::DEFAULT,
8894                Keyword::INSTANT,
8895                Keyword::INPLACE,
8896                Keyword::COPY,
8897            ]) {
8898                Some(Keyword::DEFAULT) => AlterTableAlgorithm::Default,
8899                Some(Keyword::INSTANT) => AlterTableAlgorithm::Instant,
8900                Some(Keyword::INPLACE) => AlterTableAlgorithm::Inplace,
8901                Some(Keyword::COPY) => AlterTableAlgorithm::Copy,
8902                _ => self.expected(
8903                    "DEFAULT, INSTANT, INPLACE, or COPY after ALGORITHM [=]",
8904                    self.peek_token(),
8905                )?,
8906            };
8907            AlterTableOperation::Algorithm { equals, algorithm }
8908        } else if self.parse_keyword(Keyword::AUTO_INCREMENT) {
8909            let equals = self.consume_token(&Token::Eq);
8910            let value = self.parse_number_value()?;
8911            AlterTableOperation::AutoIncrement { equals, value }
8912        } else if self.parse_keywords(&[Keyword::REPLICA, Keyword::IDENTITY]) {
8913            let identity = if self.parse_keyword(Keyword::NONE) {
8914                ReplicaIdentity::None
8915            } else if self.parse_keyword(Keyword::FULL) {
8916                ReplicaIdentity::Full
8917            } else if self.parse_keyword(Keyword::DEFAULT) {
8918                ReplicaIdentity::Default
8919            } else if self.parse_keywords(&[Keyword::USING, Keyword::INDEX]) {
8920                ReplicaIdentity::Index(self.parse_identifier()?)
8921            } else {
8922                return self.expected(
8923                    "NONE, FULL, DEFAULT, or USING INDEX index_name after REPLICA IDENTITY",
8924                    self.peek_token(),
8925                );
8926            };
8927
8928            AlterTableOperation::ReplicaIdentity { identity }
8929        } else if self.parse_keywords(&[Keyword::VALIDATE, Keyword::CONSTRAINT]) {
8930            let name = self.parse_identifier()?;
8931            AlterTableOperation::ValidateConstraint { name }
8932        } else {
8933            let options: Vec<SqlOption> =
8934                self.parse_options_with_keywords(&[Keyword::SET, Keyword::TBLPROPERTIES])?;
8935            if !options.is_empty() {
8936                AlterTableOperation::SetTblProperties {
8937                    table_properties: options,
8938                }
8939            } else {
8940                return self.expected(
8941                    "ADD, RENAME, PARTITION, SWAP, DROP, REPLICA IDENTITY, or SET TBLPROPERTIES after ALTER TABLE",
8942                    self.peek_token(),
8943                );
8944            }
8945        };
8946        Ok(operation)
8947    }
8948
8949    fn parse_set_data_type(&mut self, had_set: bool) -> Result<AlterColumnOperation, ParserError> {
8950        let data_type = self.parse_data_type()?;
8951        let using = if self.dialect.supports_alter_column_type_using()
8952            && self.parse_keyword(Keyword::USING)
8953        {
8954            Some(self.parse_expr()?)
8955        } else {
8956            None
8957        };
8958        Ok(AlterColumnOperation::SetDataType {
8959            data_type,
8960            using,
8961            had_set,
8962        })
8963    }
8964
8965    fn parse_part_or_partition(&mut self) -> Result<Partition, ParserError> {
8966        let keyword = self.expect_one_of_keywords(&[Keyword::PART, Keyword::PARTITION])?;
8967        match keyword {
8968            Keyword::PART => Ok(Partition::Part(self.parse_expr()?)),
8969            Keyword::PARTITION => Ok(Partition::Expr(self.parse_expr()?)),
8970            // unreachable because expect_one_of_keywords used above
8971            _ => unreachable!(),
8972        }
8973    }
8974
8975    pub fn parse_alter(&mut self) -> Result<Statement, ParserError> {
8976        let object_type = self.expect_one_of_keywords(&[
8977            Keyword::VIEW,
8978            Keyword::TYPE,
8979            Keyword::TABLE,
8980            Keyword::INDEX,
8981            Keyword::ROLE,
8982            Keyword::POLICY,
8983            Keyword::CONNECTOR,
8984            Keyword::ICEBERG,
8985        ])?;
8986        match object_type {
8987            Keyword::VIEW => self.parse_alter_view(),
8988            Keyword::TYPE => self.parse_alter_type(),
8989            Keyword::TABLE => self.parse_alter_table(false),
8990            Keyword::ICEBERG => {
8991                self.expect_keyword(Keyword::TABLE)?;
8992                self.parse_alter_table(true)
8993            }
8994            Keyword::INDEX => {
8995                let index_name = self.parse_object_name(false)?;
8996                let operation = if self.parse_keyword(Keyword::RENAME) {
8997                    if self.parse_keyword(Keyword::TO) {
8998                        let index_name = self.parse_object_name(false)?;
8999                        AlterIndexOperation::RenameIndex { index_name }
9000                    } else {
9001                        return self.expected("TO after RENAME", self.peek_token());
9002                    }
9003                } else {
9004                    return self.expected("RENAME after ALTER INDEX", self.peek_token());
9005                };
9006
9007                Ok(Statement::AlterIndex {
9008                    name: index_name,
9009                    operation,
9010                })
9011            }
9012            Keyword::ROLE => self.parse_alter_role(),
9013            Keyword::POLICY => self.parse_alter_policy(),
9014            Keyword::CONNECTOR => self.parse_alter_connector(),
9015            // unreachable because expect_one_of_keywords used above
9016            _ => unreachable!(),
9017        }
9018    }
9019
9020    /// Parse a [Statement::AlterTable]
9021    pub fn parse_alter_table(&mut self, iceberg: bool) -> Result<Statement, ParserError> {
9022        let if_exists = self.parse_keywords(&[Keyword::IF, Keyword::EXISTS]);
9023        let only = self.parse_keyword(Keyword::ONLY); // [ ONLY ]
9024        let table_name = self.parse_object_name(false)?;
9025        let on_cluster = self.parse_optional_on_cluster()?;
9026        let operations = self.parse_comma_separated(Parser::parse_alter_table_operation)?;
9027
9028        let mut location = None;
9029        if self.parse_keyword(Keyword::LOCATION) {
9030            location = Some(HiveSetLocation {
9031                has_set: false,
9032                location: self.parse_identifier()?,
9033            });
9034        } else if self.parse_keywords(&[Keyword::SET, Keyword::LOCATION]) {
9035            location = Some(HiveSetLocation {
9036                has_set: true,
9037                location: self.parse_identifier()?,
9038            });
9039        }
9040
9041        Ok(Statement::AlterTable {
9042            name: table_name,
9043            if_exists,
9044            only,
9045            operations,
9046            location,
9047            on_cluster,
9048            iceberg,
9049        })
9050    }
9051
9052    pub fn parse_alter_view(&mut self) -> Result<Statement, ParserError> {
9053        let name = self.parse_object_name(false)?;
9054        let columns = self.parse_parenthesized_column_list(Optional, false)?;
9055
9056        let with_options = self.parse_options(Keyword::WITH)?;
9057
9058        self.expect_keyword_is(Keyword::AS)?;
9059        let query = self.parse_query()?;
9060
9061        Ok(Statement::AlterView {
9062            name,
9063            columns,
9064            query,
9065            with_options,
9066        })
9067    }
9068
9069    /// Parse a [Statement::AlterType]
9070    pub fn parse_alter_type(&mut self) -> Result<Statement, ParserError> {
9071        let name = self.parse_object_name(false)?;
9072
9073        if self.parse_keywords(&[Keyword::RENAME, Keyword::TO]) {
9074            let new_name = self.parse_identifier()?;
9075            Ok(Statement::AlterType(AlterType {
9076                name,
9077                operation: AlterTypeOperation::Rename(AlterTypeRename { new_name }),
9078            }))
9079        } else if self.parse_keywords(&[Keyword::ADD, Keyword::VALUE]) {
9080            let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
9081            let new_enum_value = self.parse_identifier()?;
9082            let position = if self.parse_keyword(Keyword::BEFORE) {
9083                Some(AlterTypeAddValuePosition::Before(self.parse_identifier()?))
9084            } else if self.parse_keyword(Keyword::AFTER) {
9085                Some(AlterTypeAddValuePosition::After(self.parse_identifier()?))
9086            } else {
9087                None
9088            };
9089
9090            Ok(Statement::AlterType(AlterType {
9091                name,
9092                operation: AlterTypeOperation::AddValue(AlterTypeAddValue {
9093                    if_not_exists,
9094                    value: new_enum_value,
9095                    position,
9096                }),
9097            }))
9098        } else if self.parse_keywords(&[Keyword::RENAME, Keyword::VALUE]) {
9099            let existing_enum_value = self.parse_identifier()?;
9100            self.expect_keyword(Keyword::TO)?;
9101            let new_enum_value = self.parse_identifier()?;
9102
9103            Ok(Statement::AlterType(AlterType {
9104                name,
9105                operation: AlterTypeOperation::RenameValue(AlterTypeRenameValue {
9106                    from: existing_enum_value,
9107                    to: new_enum_value,
9108                }),
9109            }))
9110        } else {
9111            return self.expected_ref(
9112                "{RENAME TO | { RENAME | ADD } VALUE}",
9113                self.peek_token_ref(),
9114            );
9115        }
9116    }
9117
9118    /// Parse a `CALL procedure_name(arg1, arg2, ...)`
9119    /// or `CALL procedure_name` statement
9120    pub fn parse_call(&mut self) -> Result<Statement, ParserError> {
9121        let object_name = self.parse_object_name(false)?;
9122        if self.peek_token().token == Token::LParen {
9123            match self.parse_function(object_name)? {
9124                Expr::Function(f) => Ok(Statement::Call(f)),
9125                other => parser_err!(
9126                    format!("Expected a simple procedure call but found: {other}"),
9127                    self.peek_token().span.start
9128                ),
9129            }
9130        } else {
9131            Ok(Statement::Call(Function {
9132                name: object_name,
9133                uses_odbc_syntax: false,
9134                parameters: FunctionArguments::None,
9135                args: FunctionArguments::None,
9136                over: None,
9137                filter: None,
9138                null_treatment: None,
9139                within_group: vec![],
9140            }))
9141        }
9142    }
9143
9144    /// Parse a copy statement
9145    pub fn parse_copy(&mut self) -> Result<Statement, ParserError> {
9146        let source;
9147        if self.consume_token(&Token::LParen) {
9148            source = CopySource::Query(self.parse_query()?);
9149            self.expect_token(&Token::RParen)?;
9150        } else {
9151            let table_name = self.parse_object_name(false)?;
9152            let columns = self.parse_parenthesized_column_list(Optional, false)?;
9153            source = CopySource::Table {
9154                table_name,
9155                columns,
9156            };
9157        }
9158        let to = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::TO]) {
9159            Some(Keyword::FROM) => false,
9160            Some(Keyword::TO) => true,
9161            _ => self.expected("FROM or TO", self.peek_token())?,
9162        };
9163        if !to {
9164            // Use a separate if statement to prevent Rust compiler from complaining about
9165            // "if statement in this position is unstable: https://github.com/rust-lang/rust/issues/53667"
9166            if let CopySource::Query(_) = source {
9167                return Err(ParserError::ParserError(
9168                    "COPY ... FROM does not support query as a source".to_string(),
9169                ));
9170            }
9171        }
9172        let target = if self.parse_keyword(Keyword::STDIN) {
9173            CopyTarget::Stdin
9174        } else if self.parse_keyword(Keyword::STDOUT) {
9175            CopyTarget::Stdout
9176        } else if self.parse_keyword(Keyword::PROGRAM) {
9177            CopyTarget::Program {
9178                command: self.parse_literal_string()?,
9179            }
9180        } else {
9181            CopyTarget::File {
9182                filename: self.parse_literal_string()?,
9183            }
9184        };
9185        let _ = self.parse_keyword(Keyword::WITH); // [ WITH ]
9186        let mut options = vec![];
9187        if self.consume_token(&Token::LParen) {
9188            options = self.parse_comma_separated(Parser::parse_copy_option)?;
9189            self.expect_token(&Token::RParen)?;
9190        }
9191        let mut legacy_options = vec![];
9192        while let Some(opt) = self.maybe_parse(|parser| parser.parse_copy_legacy_option())? {
9193            legacy_options.push(opt);
9194        }
9195        let values = if let CopyTarget::Stdin = target {
9196            self.expect_token(&Token::SemiColon)?;
9197            self.parse_tsv()
9198        } else {
9199            vec![]
9200        };
9201        Ok(Statement::Copy {
9202            source,
9203            to,
9204            target,
9205            options,
9206            legacy_options,
9207            values,
9208        })
9209    }
9210
9211    /// Parse [Statement::Open]
9212    fn parse_open(&mut self) -> Result<Statement, ParserError> {
9213        self.expect_keyword(Keyword::OPEN)?;
9214        Ok(Statement::Open(OpenStatement {
9215            cursor_name: self.parse_identifier()?,
9216        }))
9217    }
9218
9219    pub fn parse_close(&mut self) -> Result<Statement, ParserError> {
9220        let cursor = if self.parse_keyword(Keyword::ALL) {
9221            CloseCursor::All
9222        } else {
9223            let name = self.parse_identifier()?;
9224
9225            CloseCursor::Specific { name }
9226        };
9227
9228        Ok(Statement::Close { cursor })
9229    }
9230
9231    fn parse_copy_option(&mut self) -> Result<CopyOption, ParserError> {
9232        let ret = match self.parse_one_of_keywords(&[
9233            Keyword::FORMAT,
9234            Keyword::FREEZE,
9235            Keyword::DELIMITER,
9236            Keyword::NULL,
9237            Keyword::HEADER,
9238            Keyword::QUOTE,
9239            Keyword::ESCAPE,
9240            Keyword::FORCE_QUOTE,
9241            Keyword::FORCE_NOT_NULL,
9242            Keyword::FORCE_NULL,
9243            Keyword::ENCODING,
9244        ]) {
9245            Some(Keyword::FORMAT) => CopyOption::Format(self.parse_identifier()?),
9246            Some(Keyword::FREEZE) => CopyOption::Freeze(!matches!(
9247                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9248                Some(Keyword::FALSE)
9249            )),
9250            Some(Keyword::DELIMITER) => CopyOption::Delimiter(self.parse_literal_char()?),
9251            Some(Keyword::NULL) => CopyOption::Null(self.parse_literal_string()?),
9252            Some(Keyword::HEADER) => CopyOption::Header(!matches!(
9253                self.parse_one_of_keywords(&[Keyword::TRUE, Keyword::FALSE]),
9254                Some(Keyword::FALSE)
9255            )),
9256            Some(Keyword::QUOTE) => CopyOption::Quote(self.parse_literal_char()?),
9257            Some(Keyword::ESCAPE) => CopyOption::Escape(self.parse_literal_char()?),
9258            Some(Keyword::FORCE_QUOTE) => {
9259                CopyOption::ForceQuote(self.parse_parenthesized_column_list(Mandatory, false)?)
9260            }
9261            Some(Keyword::FORCE_NOT_NULL) => {
9262                CopyOption::ForceNotNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9263            }
9264            Some(Keyword::FORCE_NULL) => {
9265                CopyOption::ForceNull(self.parse_parenthesized_column_list(Mandatory, false)?)
9266            }
9267            Some(Keyword::ENCODING) => CopyOption::Encoding(self.parse_literal_string()?),
9268            _ => self.expected("option", self.peek_token())?,
9269        };
9270        Ok(ret)
9271    }
9272
9273    fn parse_copy_legacy_option(&mut self) -> Result<CopyLegacyOption, ParserError> {
9274        let ret = match self.parse_one_of_keywords(&[
9275            Keyword::BINARY,
9276            Keyword::DELIMITER,
9277            Keyword::NULL,
9278            Keyword::CSV,
9279        ]) {
9280            Some(Keyword::BINARY) => CopyLegacyOption::Binary,
9281            Some(Keyword::DELIMITER) => {
9282                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9283                CopyLegacyOption::Delimiter(self.parse_literal_char()?)
9284            }
9285            Some(Keyword::NULL) => {
9286                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9287                CopyLegacyOption::Null(self.parse_literal_string()?)
9288            }
9289            Some(Keyword::CSV) => CopyLegacyOption::Csv({
9290                let mut opts = vec![];
9291                while let Some(opt) =
9292                    self.maybe_parse(|parser| parser.parse_copy_legacy_csv_option())?
9293                {
9294                    opts.push(opt);
9295                }
9296                opts
9297            }),
9298            _ => self.expected("option", self.peek_token())?,
9299        };
9300        Ok(ret)
9301    }
9302
9303    fn parse_copy_legacy_csv_option(&mut self) -> Result<CopyLegacyCsvOption, ParserError> {
9304        let ret = match self.parse_one_of_keywords(&[
9305            Keyword::HEADER,
9306            Keyword::QUOTE,
9307            Keyword::ESCAPE,
9308            Keyword::FORCE,
9309        ]) {
9310            Some(Keyword::HEADER) => CopyLegacyCsvOption::Header,
9311            Some(Keyword::QUOTE) => {
9312                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9313                CopyLegacyCsvOption::Quote(self.parse_literal_char()?)
9314            }
9315            Some(Keyword::ESCAPE) => {
9316                let _ = self.parse_keyword(Keyword::AS); // [ AS ]
9317                CopyLegacyCsvOption::Escape(self.parse_literal_char()?)
9318            }
9319            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::NOT, Keyword::NULL]) => {
9320                CopyLegacyCsvOption::ForceNotNull(
9321                    self.parse_comma_separated(|p| p.parse_identifier())?,
9322                )
9323            }
9324            Some(Keyword::FORCE) if self.parse_keywords(&[Keyword::QUOTE]) => {
9325                CopyLegacyCsvOption::ForceQuote(
9326                    self.parse_comma_separated(|p| p.parse_identifier())?,
9327                )
9328            }
9329            _ => self.expected("csv option", self.peek_token())?,
9330        };
9331        Ok(ret)
9332    }
9333
9334    fn parse_literal_char(&mut self) -> Result<char, ParserError> {
9335        let s = self.parse_literal_string()?;
9336        if s.len() != 1 {
9337            let loc = self
9338                .tokens
9339                .get(self.index - 1)
9340                .map_or(Location { line: 0, column: 0 }, |t| t.span.start);
9341            return parser_err!(format!("Expect a char, found {s:?}"), loc);
9342        }
9343        Ok(s.chars().next().unwrap())
9344    }
9345
9346    /// Parse a tab separated values in
9347    /// COPY payload
9348    pub fn parse_tsv(&mut self) -> Vec<Option<String>> {
9349        self.parse_tab_value()
9350    }
9351
9352    pub fn parse_tab_value(&mut self) -> Vec<Option<String>> {
9353        let mut values = vec![];
9354        let mut content = String::from("");
9355        while let Some(t) = self.next_token_no_skip().map(|t| &t.token) {
9356            match t {
9357                Token::Whitespace(Whitespace::Tab) => {
9358                    values.push(Some(content.to_string()));
9359                    content.clear();
9360                }
9361                Token::Whitespace(Whitespace::Newline) => {
9362                    values.push(Some(content.to_string()));
9363                    content.clear();
9364                }
9365                Token::Backslash => {
9366                    if self.consume_token(&Token::Period) {
9367                        return values;
9368                    }
9369                    if let Token::Word(w) = self.next_token().token {
9370                        if w.value == "N" {
9371                            values.push(None);
9372                        }
9373                    }
9374                }
9375                _ => {
9376                    content.push_str(&t.to_string());
9377                }
9378            }
9379        }
9380        values
9381    }
9382
9383    /// Parse a literal value (numbers, strings, date/time, booleans)
9384    pub fn parse_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9385        let next_token = self.next_token();
9386        let span = next_token.span;
9387        let ok_value = |value: Value| Ok(value.with_span(span));
9388        match next_token.token {
9389            Token::Word(w) => match w.keyword {
9390                Keyword::TRUE if self.dialect.supports_boolean_literals() => {
9391                    ok_value(Value::Boolean(true))
9392                }
9393                Keyword::FALSE if self.dialect.supports_boolean_literals() => {
9394                    ok_value(Value::Boolean(false))
9395                }
9396                Keyword::NULL => ok_value(Value::Null),
9397                Keyword::NoKeyword if w.quote_style.is_some() => match w.quote_style {
9398                    Some('"') => ok_value(Value::DoubleQuotedString(w.value)),
9399                    Some('\'') => ok_value(Value::SingleQuotedString(w.value)),
9400                    _ => self.expected(
9401                        "A value?",
9402                        TokenWithSpan {
9403                            token: Token::Word(w),
9404                            span,
9405                        },
9406                    )?,
9407                },
9408                _ => self.expected(
9409                    "a concrete value",
9410                    TokenWithSpan {
9411                        token: Token::Word(w),
9412                        span,
9413                    },
9414                ),
9415            },
9416            // The call to n.parse() returns a bigdecimal when the
9417            // bigdecimal feature is enabled, and is otherwise a no-op
9418            // (i.e., it returns the input string).
9419            Token::Number(n, l) => ok_value(Value::Number(Self::parse(n, span.start)?, l)),
9420            Token::SingleQuotedString(ref s) => ok_value(Value::SingleQuotedString(s.to_string())),
9421            Token::DoubleQuotedString(ref s) => ok_value(Value::DoubleQuotedString(s.to_string())),
9422            Token::TripleSingleQuotedString(ref s) => {
9423                ok_value(Value::TripleSingleQuotedString(s.to_string()))
9424            }
9425            Token::TripleDoubleQuotedString(ref s) => {
9426                ok_value(Value::TripleDoubleQuotedString(s.to_string()))
9427            }
9428            Token::DollarQuotedString(ref s) => ok_value(Value::DollarQuotedString(s.clone())),
9429            Token::SingleQuotedByteStringLiteral(ref s) => {
9430                ok_value(Value::SingleQuotedByteStringLiteral(s.clone()))
9431            }
9432            Token::DoubleQuotedByteStringLiteral(ref s) => {
9433                ok_value(Value::DoubleQuotedByteStringLiteral(s.clone()))
9434            }
9435            Token::TripleSingleQuotedByteStringLiteral(ref s) => {
9436                ok_value(Value::TripleSingleQuotedByteStringLiteral(s.clone()))
9437            }
9438            Token::TripleDoubleQuotedByteStringLiteral(ref s) => {
9439                ok_value(Value::TripleDoubleQuotedByteStringLiteral(s.clone()))
9440            }
9441            Token::SingleQuotedRawStringLiteral(ref s) => {
9442                ok_value(Value::SingleQuotedRawStringLiteral(s.clone()))
9443            }
9444            Token::DoubleQuotedRawStringLiteral(ref s) => {
9445                ok_value(Value::DoubleQuotedRawStringLiteral(s.clone()))
9446            }
9447            Token::TripleSingleQuotedRawStringLiteral(ref s) => {
9448                ok_value(Value::TripleSingleQuotedRawStringLiteral(s.clone()))
9449            }
9450            Token::TripleDoubleQuotedRawStringLiteral(ref s) => {
9451                ok_value(Value::TripleDoubleQuotedRawStringLiteral(s.clone()))
9452            }
9453            Token::NationalStringLiteral(ref s) => {
9454                ok_value(Value::NationalStringLiteral(s.to_string()))
9455            }
9456            Token::EscapedStringLiteral(ref s) => {
9457                ok_value(Value::EscapedStringLiteral(s.to_string()))
9458            }
9459            Token::UnicodeStringLiteral(ref s) => {
9460                ok_value(Value::UnicodeStringLiteral(s.to_string()))
9461            }
9462            Token::HexStringLiteral(ref s) => ok_value(Value::HexStringLiteral(s.to_string())),
9463            Token::Placeholder(ref s) => ok_value(Value::Placeholder(s.to_string())),
9464            tok @ Token::Colon | tok @ Token::AtSign => {
9465                // Not calling self.parse_identifier(false)? because only in placeholder we want to check numbers as idfentifies
9466                // This because snowflake allows numbers as placeholders
9467                let next_token = self.next_token();
9468                let ident = match next_token.token {
9469                    Token::Word(w) => Ok(w.into_ident(next_token.span)),
9470                    Token::Number(w, false) => Ok(Ident::new(w)),
9471                    _ => self.expected("placeholder", next_token),
9472                }?;
9473                let placeholder = tok.to_string() + &ident.value;
9474                ok_value(Value::Placeholder(placeholder))
9475            }
9476            unexpected => self.expected(
9477                "a value",
9478                TokenWithSpan {
9479                    token: unexpected,
9480                    span,
9481                },
9482            ),
9483        }
9484    }
9485
9486    /// Parse an unsigned numeric literal
9487    pub fn parse_number_value(&mut self) -> Result<ValueWithSpan, ParserError> {
9488        let value_wrapper = self.parse_value()?;
9489        match &value_wrapper.value {
9490            Value::Number(_, _) => Ok(value_wrapper),
9491            Value::Placeholder(_) => Ok(value_wrapper),
9492            _ => {
9493                self.prev_token();
9494                self.expected("literal number", self.peek_token())
9495            }
9496        }
9497    }
9498
9499    /// Parse a numeric literal as an expression. Returns a [`Expr::UnaryOp`] if the number is signed,
9500    /// otherwise returns a [`Expr::Value`]
9501    pub fn parse_number(&mut self) -> Result<Expr, ParserError> {
9502        let next_token = self.next_token();
9503        match next_token.token {
9504            Token::Plus => Ok(Expr::UnaryOp {
9505                op: UnaryOperator::Plus,
9506                expr: Box::new(Expr::Value(self.parse_number_value()?)),
9507            }),
9508            Token::Minus => Ok(Expr::UnaryOp {
9509                op: UnaryOperator::Minus,
9510                expr: Box::new(Expr::Value(self.parse_number_value()?)),
9511            }),
9512            _ => {
9513                self.prev_token();
9514                Ok(Expr::Value(self.parse_number_value()?))
9515            }
9516        }
9517    }
9518
9519    fn parse_introduced_string_expr(&mut self) -> Result<Expr, ParserError> {
9520        let next_token = self.next_token();
9521        let span = next_token.span;
9522        match next_token.token {
9523            Token::SingleQuotedString(ref s) => Ok(Expr::Value(
9524                Value::SingleQuotedString(s.to_string()).with_span(span),
9525            )),
9526            Token::DoubleQuotedString(ref s) => Ok(Expr::Value(
9527                Value::DoubleQuotedString(s.to_string()).with_span(span),
9528            )),
9529            Token::HexStringLiteral(ref s) => Ok(Expr::Value(
9530                Value::HexStringLiteral(s.to_string()).with_span(span),
9531            )),
9532            unexpected => self.expected(
9533                "a string value",
9534                TokenWithSpan {
9535                    token: unexpected,
9536                    span,
9537                },
9538            ),
9539        }
9540    }
9541
9542    /// Parse an unsigned literal integer/long
9543    pub fn parse_literal_uint(&mut self) -> Result<u64, ParserError> {
9544        let next_token = self.next_token();
9545        match next_token.token {
9546            Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start),
9547            _ => self.expected("literal int", next_token),
9548        }
9549    }
9550
9551    /// Parse the body of a `CREATE FUNCTION` specified as a string.
9552    /// e.g. `CREATE FUNCTION ... AS $$ body $$`.
9553    fn parse_create_function_body_string(&mut self) -> Result<Expr, ParserError> {
9554        let peek_token = self.peek_token();
9555        let span = peek_token.span;
9556        match peek_token.token {
9557            Token::DollarQuotedString(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) =>
9558            {
9559                self.next_token();
9560                Ok(Expr::Value(Value::DollarQuotedString(s).with_span(span)))
9561            }
9562            _ => Ok(Expr::Value(
9563                Value::SingleQuotedString(self.parse_literal_string()?).with_span(span),
9564            )),
9565        }
9566    }
9567
9568    /// Parse a literal string
9569    pub fn parse_literal_string(&mut self) -> Result<String, ParserError> {
9570        let next_token = self.next_token();
9571        match next_token.token {
9572            Token::Word(Word {
9573                value,
9574                keyword: Keyword::NoKeyword,
9575                ..
9576            }) => Ok(value),
9577            Token::SingleQuotedString(s) => Ok(s),
9578            Token::DoubleQuotedString(s) => Ok(s),
9579            Token::EscapedStringLiteral(s) if dialect_of!(self is PostgreSqlDialect | GenericDialect) => {
9580                Ok(s)
9581            }
9582            Token::UnicodeStringLiteral(s) => Ok(s),
9583            _ => self.expected("literal string", next_token),
9584        }
9585    }
9586
9587    /// Parse a literal unicode normalization clause
9588    pub fn parse_unicode_is_normalized(&mut self, expr: Expr) -> Result<Expr, ParserError> {
9589        let neg = self.parse_keyword(Keyword::NOT);
9590        let normalized_form = self.maybe_parse(|parser| {
9591            match parser.parse_one_of_keywords(&[
9592                Keyword::NFC,
9593                Keyword::NFD,
9594                Keyword::NFKC,
9595                Keyword::NFKD,
9596            ]) {
9597                Some(Keyword::NFC) => Ok(NormalizationForm::NFC),
9598                Some(Keyword::NFD) => Ok(NormalizationForm::NFD),
9599                Some(Keyword::NFKC) => Ok(NormalizationForm::NFKC),
9600                Some(Keyword::NFKD) => Ok(NormalizationForm::NFKD),
9601                _ => parser.expected("unicode normalization form", parser.peek_token()),
9602            }
9603        })?;
9604        if self.parse_keyword(Keyword::NORMALIZED) {
9605            return Ok(Expr::IsNormalized {
9606                expr: Box::new(expr),
9607                form: normalized_form,
9608                negated: neg,
9609            });
9610        }
9611        self.expected("unicode normalization form", self.peek_token())
9612    }
9613
9614    pub fn parse_enum_values(&mut self) -> Result<Vec<EnumMember>, ParserError> {
9615        self.expect_token(&Token::LParen)?;
9616        let values = self.parse_comma_separated(|parser| {
9617            let name = parser.parse_literal_string()?;
9618            let e = if parser.consume_token(&Token::Eq) {
9619                let value = parser.parse_number()?;
9620                EnumMember::NamedValue(name, value)
9621            } else {
9622                EnumMember::Name(name)
9623            };
9624            Ok(e)
9625        })?;
9626        self.expect_token(&Token::RParen)?;
9627
9628        Ok(values)
9629    }
9630
9631    /// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
9632    pub fn parse_data_type(&mut self) -> Result<DataType, ParserError> {
9633        let (ty, trailing_bracket) = self.parse_data_type_helper()?;
9634        if trailing_bracket.0 {
9635            return parser_err!(
9636                format!("unmatched > after parsing data type {ty}"),
9637                self.peek_token()
9638            );
9639        }
9640
9641        Ok(ty)
9642    }
9643
9644    fn parse_data_type_helper(
9645        &mut self,
9646    ) -> Result<(DataType, MatchedTrailingBracket), ParserError> {
9647        let dialect = self.dialect;
9648        self.advance_token();
9649        let next_token = self.get_current_token();
9650        let next_token_index = self.get_current_index();
9651
9652        let mut trailing_bracket: MatchedTrailingBracket = false.into();
9653        let mut data = match &next_token.token {
9654            Token::Word(w) => match w.keyword {
9655                Keyword::BOOLEAN => Ok(DataType::Boolean),
9656                Keyword::BOOL => Ok(DataType::Bool),
9657                Keyword::FLOAT => Ok(DataType::Float(self.parse_optional_precision()?)),
9658                Keyword::REAL => Ok(DataType::Real),
9659                Keyword::FLOAT4 => Ok(DataType::Float4),
9660                Keyword::FLOAT32 => Ok(DataType::Float32),
9661                Keyword::FLOAT64 => Ok(DataType::Float64),
9662                Keyword::FLOAT8 => Ok(DataType::Float8),
9663                Keyword::DOUBLE => {
9664                    if self.parse_keyword(Keyword::PRECISION) {
9665                        Ok(DataType::DoublePrecision)
9666                    } else {
9667                        Ok(DataType::Double(
9668                            self.parse_exact_number_optional_precision_scale()?,
9669                        ))
9670                    }
9671                }
9672                Keyword::TINYINT => {
9673                    let optional_precision = self.parse_optional_precision();
9674                    if self.parse_keyword(Keyword::UNSIGNED) {
9675                        Ok(DataType::TinyIntUnsigned(optional_precision?))
9676                    } else {
9677                        Ok(DataType::TinyInt(optional_precision?))
9678                    }
9679                }
9680                Keyword::INT2 => {
9681                    let optional_precision = self.parse_optional_precision();
9682                    if self.parse_keyword(Keyword::UNSIGNED) {
9683                        Ok(DataType::Int2Unsigned(optional_precision?))
9684                    } else {
9685                        Ok(DataType::Int2(optional_precision?))
9686                    }
9687                }
9688                Keyword::SMALLINT => {
9689                    let optional_precision = self.parse_optional_precision();
9690                    if self.parse_keyword(Keyword::UNSIGNED) {
9691                        Ok(DataType::SmallIntUnsigned(optional_precision?))
9692                    } else {
9693                        Ok(DataType::SmallInt(optional_precision?))
9694                    }
9695                }
9696                Keyword::MEDIUMINT => {
9697                    let optional_precision = self.parse_optional_precision();
9698                    if self.parse_keyword(Keyword::UNSIGNED) {
9699                        Ok(DataType::MediumIntUnsigned(optional_precision?))
9700                    } else {
9701                        Ok(DataType::MediumInt(optional_precision?))
9702                    }
9703                }
9704                Keyword::INT => {
9705                    let optional_precision = self.parse_optional_precision();
9706                    if self.parse_keyword(Keyword::UNSIGNED) {
9707                        Ok(DataType::IntUnsigned(optional_precision?))
9708                    } else {
9709                        Ok(DataType::Int(optional_precision?))
9710                    }
9711                }
9712                Keyword::INT4 => {
9713                    let optional_precision = self.parse_optional_precision();
9714                    if self.parse_keyword(Keyword::UNSIGNED) {
9715                        Ok(DataType::Int4Unsigned(optional_precision?))
9716                    } else {
9717                        Ok(DataType::Int4(optional_precision?))
9718                    }
9719                }
9720                Keyword::INT8 => {
9721                    let optional_precision = self.parse_optional_precision();
9722                    if self.parse_keyword(Keyword::UNSIGNED) {
9723                        Ok(DataType::Int8Unsigned(optional_precision?))
9724                    } else {
9725                        Ok(DataType::Int8(optional_precision?))
9726                    }
9727                }
9728                Keyword::INT16 => Ok(DataType::Int16),
9729                Keyword::INT32 => Ok(DataType::Int32),
9730                Keyword::INT64 => Ok(DataType::Int64),
9731                Keyword::INT128 => Ok(DataType::Int128),
9732                Keyword::INT256 => Ok(DataType::Int256),
9733                Keyword::INTEGER => {
9734                    let optional_precision = self.parse_optional_precision();
9735                    if self.parse_keyword(Keyword::UNSIGNED) {
9736                        Ok(DataType::IntegerUnsigned(optional_precision?))
9737                    } else {
9738                        Ok(DataType::Integer(optional_precision?))
9739                    }
9740                }
9741                Keyword::BIGINT => {
9742                    let optional_precision = self.parse_optional_precision();
9743                    if self.parse_keyword(Keyword::UNSIGNED) {
9744                        Ok(DataType::BigIntUnsigned(optional_precision?))
9745                    } else {
9746                        Ok(DataType::BigInt(optional_precision?))
9747                    }
9748                }
9749                Keyword::HUGEINT => Ok(DataType::HugeInt),
9750                Keyword::UBIGINT => Ok(DataType::UBigInt),
9751                Keyword::UHUGEINT => Ok(DataType::UHugeInt),
9752                Keyword::USMALLINT => Ok(DataType::USmallInt),
9753                Keyword::UTINYINT => Ok(DataType::UTinyInt),
9754                Keyword::UINT8 => Ok(DataType::UInt8),
9755                Keyword::UINT16 => Ok(DataType::UInt16),
9756                Keyword::UINT32 => Ok(DataType::UInt32),
9757                Keyword::UINT64 => Ok(DataType::UInt64),
9758                Keyword::UINT128 => Ok(DataType::UInt128),
9759                Keyword::UINT256 => Ok(DataType::UInt256),
9760                Keyword::VARCHAR => Ok(DataType::Varchar(self.parse_optional_character_length()?)),
9761                Keyword::NVARCHAR => {
9762                    Ok(DataType::Nvarchar(self.parse_optional_character_length()?))
9763                }
9764                Keyword::CHARACTER => {
9765                    if self.parse_keyword(Keyword::VARYING) {
9766                        Ok(DataType::CharacterVarying(
9767                            self.parse_optional_character_length()?,
9768                        ))
9769                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
9770                        Ok(DataType::CharacterLargeObject(
9771                            self.parse_optional_precision()?,
9772                        ))
9773                    } else {
9774                        Ok(DataType::Character(self.parse_optional_character_length()?))
9775                    }
9776                }
9777                Keyword::CHAR => {
9778                    if self.parse_keyword(Keyword::VARYING) {
9779                        Ok(DataType::CharVarying(
9780                            self.parse_optional_character_length()?,
9781                        ))
9782                    } else if self.parse_keywords(&[Keyword::LARGE, Keyword::OBJECT]) {
9783                        Ok(DataType::CharLargeObject(self.parse_optional_precision()?))
9784                    } else {
9785                        Ok(DataType::Char(self.parse_optional_character_length()?))
9786                    }
9787                }
9788                Keyword::CLOB => Ok(DataType::Clob(self.parse_optional_precision()?)),
9789                Keyword::BINARY => Ok(DataType::Binary(self.parse_optional_precision()?)),
9790                Keyword::VARBINARY => Ok(DataType::Varbinary(self.parse_optional_binary_length()?)),
9791                Keyword::BLOB => Ok(DataType::Blob(self.parse_optional_precision()?)),
9792                Keyword::TINYBLOB => Ok(DataType::TinyBlob),
9793                Keyword::MEDIUMBLOB => Ok(DataType::MediumBlob),
9794                Keyword::LONGBLOB => Ok(DataType::LongBlob),
9795                Keyword::BYTES => Ok(DataType::Bytes(self.parse_optional_precision()?)),
9796                Keyword::BIT => {
9797                    if self.parse_keyword(Keyword::VARYING) {
9798                        Ok(DataType::BitVarying(self.parse_optional_precision()?))
9799                    } else {
9800                        Ok(DataType::Bit(self.parse_optional_precision()?))
9801                    }
9802                }
9803                Keyword::VARBIT => Ok(DataType::VarBit(self.parse_optional_precision()?)),
9804                Keyword::UUID => Ok(DataType::Uuid),
9805                Keyword::DATE => Ok(DataType::Date),
9806                Keyword::DATE32 => Ok(DataType::Date32),
9807                Keyword::DATETIME => Ok(DataType::Datetime(self.parse_optional_precision()?)),
9808                Keyword::DATETIME64 => {
9809                    self.prev_token();
9810                    let (precision, time_zone) = self.parse_datetime_64()?;
9811                    Ok(DataType::Datetime64(precision, time_zone))
9812                }
9813                Keyword::TIMESTAMP => {
9814                    let precision = self.parse_optional_precision()?;
9815                    let tz = if self.parse_keyword(Keyword::WITH) {
9816                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9817                        TimezoneInfo::WithTimeZone
9818                    } else if self.parse_keyword(Keyword::WITHOUT) {
9819                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9820                        TimezoneInfo::WithoutTimeZone
9821                    } else {
9822                        TimezoneInfo::None
9823                    };
9824                    Ok(DataType::Timestamp(precision, tz))
9825                }
9826                Keyword::TIMESTAMPTZ => Ok(DataType::Timestamp(
9827                    self.parse_optional_precision()?,
9828                    TimezoneInfo::Tz,
9829                )),
9830                Keyword::TIMESTAMP_NTZ => Ok(DataType::TimestampNtz),
9831                Keyword::TIME => {
9832                    let precision = self.parse_optional_precision()?;
9833                    let tz = if self.parse_keyword(Keyword::WITH) {
9834                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9835                        TimezoneInfo::WithTimeZone
9836                    } else if self.parse_keyword(Keyword::WITHOUT) {
9837                        self.expect_keywords(&[Keyword::TIME, Keyword::ZONE])?;
9838                        TimezoneInfo::WithoutTimeZone
9839                    } else {
9840                        TimezoneInfo::None
9841                    };
9842                    Ok(DataType::Time(precision, tz))
9843                }
9844                Keyword::TIMETZ => Ok(DataType::Time(
9845                    self.parse_optional_precision()?,
9846                    TimezoneInfo::Tz,
9847                )),
9848                // Interval types can be followed by a complicated interval
9849                // qualifier that we don't currently support. See
9850                // parse_interval for a taste.
9851                Keyword::INTERVAL => Ok(DataType::Interval),
9852                Keyword::JSON => Ok(DataType::JSON),
9853                Keyword::JSONB => Ok(DataType::JSONB),
9854                Keyword::REGCLASS => Ok(DataType::Regclass),
9855                Keyword::STRING => Ok(DataType::String(self.parse_optional_precision()?)),
9856                Keyword::FIXEDSTRING => {
9857                    self.expect_token(&Token::LParen)?;
9858                    let character_length = self.parse_literal_uint()?;
9859                    self.expect_token(&Token::RParen)?;
9860                    Ok(DataType::FixedString(character_length))
9861                }
9862                Keyword::TEXT => Ok(DataType::Text),
9863                Keyword::TINYTEXT => Ok(DataType::TinyText),
9864                Keyword::MEDIUMTEXT => Ok(DataType::MediumText),
9865                Keyword::LONGTEXT => Ok(DataType::LongText),
9866                Keyword::BYTEA => Ok(DataType::Bytea),
9867                Keyword::NUMERIC => Ok(DataType::Numeric(
9868                    self.parse_exact_number_optional_precision_scale()?,
9869                )),
9870                Keyword::DECIMAL => Ok(DataType::Decimal(
9871                    self.parse_exact_number_optional_precision_scale()?,
9872                )),
9873                Keyword::DEC => Ok(DataType::Dec(
9874                    self.parse_exact_number_optional_precision_scale()?,
9875                )),
9876                Keyword::BIGNUMERIC => Ok(DataType::BigNumeric(
9877                    self.parse_exact_number_optional_precision_scale()?,
9878                )),
9879                Keyword::BIGDECIMAL => Ok(DataType::BigDecimal(
9880                    self.parse_exact_number_optional_precision_scale()?,
9881                )),
9882                Keyword::ENUM => Ok(DataType::Enum(self.parse_enum_values()?, None)),
9883                Keyword::ENUM8 => Ok(DataType::Enum(self.parse_enum_values()?, Some(8))),
9884                Keyword::ENUM16 => Ok(DataType::Enum(self.parse_enum_values()?, Some(16))),
9885                Keyword::SET => Ok(DataType::Set(self.parse_string_values()?)),
9886                Keyword::ARRAY => {
9887                    if dialect_of!(self is SnowflakeDialect) {
9888                        Ok(DataType::Array(ArrayElemTypeDef::None))
9889                    } else if dialect_of!(self is ClickHouseDialect) {
9890                        Ok(self.parse_sub_type(|internal_type| {
9891                            DataType::Array(ArrayElemTypeDef::Parenthesis(internal_type))
9892                        })?)
9893                    } else {
9894                        self.expect_token(&Token::Lt)?;
9895                        let (inside_type, _trailing_bracket) = self.parse_data_type_helper()?;
9896                        trailing_bracket = self.expect_closing_angle_bracket(_trailing_bracket)?;
9897                        Ok(DataType::Array(ArrayElemTypeDef::AngleBracket(Box::new(
9898                            inside_type,
9899                        ))))
9900                    }
9901                }
9902                Keyword::STRUCT if dialect_is!(dialect is DuckDbDialect) => {
9903                    self.prev_token();
9904                    let field_defs = self.parse_duckdb_struct_type_def()?;
9905                    Ok(DataType::Struct(field_defs, StructBracketKind::Parentheses))
9906                }
9907                Keyword::STRUCT if dialect_is!(dialect is BigQueryDialect | GenericDialect) => {
9908                    self.prev_token();
9909                    let (field_defs, _trailing_bracket) =
9910                        self.parse_struct_type_def(Self::parse_struct_field_def)?;
9911                    trailing_bracket = _trailing_bracket;
9912                    Ok(DataType::Struct(
9913                        field_defs,
9914                        StructBracketKind::AngleBrackets,
9915                    ))
9916                }
9917                Keyword::UNION if dialect_is!(dialect is DuckDbDialect | GenericDialect) => {
9918                    self.prev_token();
9919                    let fields = self.parse_union_type_def()?;
9920                    Ok(DataType::Union(fields))
9921                }
9922                Keyword::NULLABLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9923                    Ok(self.parse_sub_type(DataType::Nullable)?)
9924                }
9925                Keyword::LOWCARDINALITY if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9926                    Ok(self.parse_sub_type(DataType::LowCardinality)?)
9927                }
9928                Keyword::MAP if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9929                    self.prev_token();
9930                    let (key_data_type, value_data_type) = self.parse_click_house_map_def()?;
9931                    Ok(DataType::Map(
9932                        Box::new(key_data_type),
9933                        Box::new(value_data_type),
9934                    ))
9935                }
9936                Keyword::NESTED if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9937                    self.expect_token(&Token::LParen)?;
9938                    let field_defs = self.parse_comma_separated(Parser::parse_column_def)?;
9939                    self.expect_token(&Token::RParen)?;
9940                    Ok(DataType::Nested(field_defs))
9941                }
9942                Keyword::TUPLE if dialect_is!(dialect is ClickHouseDialect | GenericDialect) => {
9943                    self.prev_token();
9944                    let field_defs = self.parse_click_house_tuple_def()?;
9945                    Ok(DataType::Tuple(field_defs))
9946                }
9947                Keyword::TRIGGER => Ok(DataType::Trigger),
9948                Keyword::ANY if self.peek_keyword(Keyword::TYPE) => {
9949                    let _ = self.parse_keyword(Keyword::TYPE);
9950                    Ok(DataType::AnyType)
9951                }
9952                Keyword::TABLE => {
9953                    // an LParen after the TABLE keyword indicates that table columns are being defined
9954                    // whereas no LParen indicates an anonymous table expression will be returned
9955                    if self.peek_token() == Token::LParen {
9956                        let columns = self.parse_returns_table_columns()?;
9957                        Ok(DataType::Table(Some(columns)))
9958                    } else {
9959                        Ok(DataType::Table(None))
9960                    }
9961                }
9962                Keyword::SIGNED => {
9963                    if self.parse_keyword(Keyword::INTEGER) {
9964                        Ok(DataType::SignedInteger)
9965                    } else {
9966                        Ok(DataType::Signed)
9967                    }
9968                }
9969                Keyword::UNSIGNED => {
9970                    if self.parse_keyword(Keyword::INTEGER) {
9971                        Ok(DataType::UnsignedInteger)
9972                    } else {
9973                        Ok(DataType::Unsigned)
9974                    }
9975                }
9976                Keyword::TSVECTOR if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
9977                    Ok(DataType::TsVector)
9978                }
9979                Keyword::TSQUERY if dialect_is!(dialect is PostgreSqlDialect | GenericDialect) => {
9980                    Ok(DataType::TsQuery)
9981                }
9982                _ => {
9983                    self.prev_token();
9984                    let type_name = self.parse_object_name(false)?;
9985                    if let Some(modifiers) = self.parse_optional_type_modifiers()? {
9986                        Ok(DataType::Custom(type_name, modifiers))
9987                    } else {
9988                        Ok(DataType::Custom(type_name, vec![]))
9989                    }
9990                }
9991            },
9992            _ => self.expected_at("a data type name", next_token_index),
9993        }?;
9994
9995        if self.dialect.supports_array_typedef_with_brackets() {
9996            while self.consume_token(&Token::LBracket) {
9997                // Parse optional array data type size
9998                let size = self.maybe_parse(|p| p.parse_literal_uint())?;
9999                self.expect_token(&Token::RBracket)?;
10000                data = DataType::Array(ArrayElemTypeDef::SquareBracket(Box::new(data), size))
10001            }
10002        }
10003        Ok((data, trailing_bracket))
10004    }
10005
10006    fn parse_returns_table_column(&mut self) -> Result<ColumnDef, ParserError> {
10007        self.parse_column_def()
10008    }
10009
10010    fn parse_returns_table_columns(&mut self) -> Result<Vec<ColumnDef>, ParserError> {
10011        self.expect_token(&Token::LParen)?;
10012        let columns = self.parse_comma_separated(Parser::parse_returns_table_column)?;
10013        self.expect_token(&Token::RParen)?;
10014        Ok(columns)
10015    }
10016
10017    pub fn parse_string_values(&mut self) -> Result<Vec<String>, ParserError> {
10018        self.expect_token(&Token::LParen)?;
10019        let mut values = Vec::new();
10020        loop {
10021            let next_token = self.next_token();
10022            match next_token.token {
10023                Token::SingleQuotedString(value) => values.push(value),
10024                _ => self.expected("a string", next_token)?,
10025            }
10026            let next_token = self.next_token();
10027            match next_token.token {
10028                Token::Comma => (),
10029                Token::RParen => break,
10030                _ => self.expected(", or }", next_token)?,
10031            }
10032        }
10033        Ok(values)
10034    }
10035
10036    /// Strictly parse `identifier AS identifier`
10037    pub fn parse_identifier_with_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
10038        let ident = self.parse_identifier()?;
10039        self.expect_keyword_is(Keyword::AS)?;
10040        let alias = self.parse_identifier()?;
10041        Ok(IdentWithAlias { ident, alias })
10042    }
10043
10044    /// Parse `identifier [AS] identifier` where the AS keyword is optional
10045    fn parse_identifier_with_optional_alias(&mut self) -> Result<IdentWithAlias, ParserError> {
10046        let ident = self.parse_identifier()?;
10047        let _after_as = self.parse_keyword(Keyword::AS);
10048        let alias = self.parse_identifier()?;
10049        Ok(IdentWithAlias { ident, alias })
10050    }
10051
10052    /// Parse comma-separated list of parenthesized queries for pipe operators
10053    fn parse_pipe_operator_queries(&mut self) -> Result<Vec<Query>, ParserError> {
10054        self.parse_comma_separated(|parser| {
10055            parser.expect_token(&Token::LParen)?;
10056            let query = parser.parse_query()?;
10057            parser.expect_token(&Token::RParen)?;
10058            Ok(*query)
10059        })
10060    }
10061
10062    /// Parse set quantifier for pipe operators that require DISTINCT. E.g. INTERSECT and EXCEPT
10063    fn parse_distinct_required_set_quantifier(
10064        &mut self,
10065        operator_name: &str,
10066    ) -> Result<SetQuantifier, ParserError> {
10067        let quantifier = self.parse_set_quantifier(&Some(SetOperator::Intersect));
10068        match quantifier {
10069            SetQuantifier::Distinct | SetQuantifier::DistinctByName => Ok(quantifier),
10070            _ => Err(ParserError::ParserError(format!(
10071                "{operator_name} pipe operator requires DISTINCT modifier",
10072            ))),
10073        }
10074    }
10075
10076    /// Parse optional identifier alias (with or without AS keyword)
10077    fn parse_identifier_optional_alias(&mut self) -> Result<Option<Ident>, ParserError> {
10078        if self.parse_keyword(Keyword::AS) {
10079            Ok(Some(self.parse_identifier()?))
10080        } else {
10081            // Check if the next token is an identifier (implicit alias)
10082            self.maybe_parse(|parser| parser.parse_identifier())
10083        }
10084    }
10085
10086    /// Optionally parses an alias for a select list item
10087    fn maybe_parse_select_item_alias(&mut self) -> Result<Option<Ident>, ParserError> {
10088        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
10089            parser.dialect.is_select_item_alias(explicit, kw, parser)
10090        }
10091        self.parse_optional_alias_inner(None, validator)
10092    }
10093
10094    /// Optionally parses an alias for a table like in `... FROM generate_series(1, 10) AS t (col)`.
10095    /// In this case, the alias is allowed to optionally name the columns in the table, in
10096    /// addition to the table itself.
10097    pub fn maybe_parse_table_alias(&mut self) -> Result<Option<TableAlias>, ParserError> {
10098        fn validator(explicit: bool, kw: &Keyword, parser: &mut Parser) -> bool {
10099            parser.dialect.is_table_factor_alias(explicit, kw, parser)
10100        }
10101        match self.parse_optional_alias_inner(None, validator)? {
10102            Some(name) => {
10103                let columns = self.parse_table_alias_column_defs()?;
10104                Ok(Some(TableAlias { name, columns }))
10105            }
10106            None => Ok(None),
10107        }
10108    }
10109
10110    fn parse_table_index_hints(&mut self) -> Result<Vec<TableIndexHints>, ParserError> {
10111        let mut hints = vec![];
10112        while let Some(hint_type) =
10113            self.parse_one_of_keywords(&[Keyword::USE, Keyword::IGNORE, Keyword::FORCE])
10114        {
10115            let hint_type = match hint_type {
10116                Keyword::USE => TableIndexHintType::Use,
10117                Keyword::IGNORE => TableIndexHintType::Ignore,
10118                Keyword::FORCE => TableIndexHintType::Force,
10119                _ => {
10120                    return self.expected(
10121                        "expected to match USE/IGNORE/FORCE keyword",
10122                        self.peek_token(),
10123                    )
10124                }
10125            };
10126            let index_type = match self.parse_one_of_keywords(&[Keyword::INDEX, Keyword::KEY]) {
10127                Some(Keyword::INDEX) => TableIndexType::Index,
10128                Some(Keyword::KEY) => TableIndexType::Key,
10129                _ => {
10130                    return self.expected("expected to match INDEX/KEY keyword", self.peek_token())
10131                }
10132            };
10133            let for_clause = if self.parse_keyword(Keyword::FOR) {
10134                let clause = if self.parse_keyword(Keyword::JOIN) {
10135                    TableIndexHintForClause::Join
10136                } else if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10137                    TableIndexHintForClause::OrderBy
10138                } else if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
10139                    TableIndexHintForClause::GroupBy
10140                } else {
10141                    return self.expected(
10142                        "expected to match FOR/ORDER BY/GROUP BY table hint in for clause",
10143                        self.peek_token(),
10144                    );
10145                };
10146                Some(clause)
10147            } else {
10148                None
10149            };
10150
10151            self.expect_token(&Token::LParen)?;
10152            let index_names = if self.peek_token().token != Token::RParen {
10153                self.parse_comma_separated(Parser::parse_identifier)?
10154            } else {
10155                vec![]
10156            };
10157            self.expect_token(&Token::RParen)?;
10158            hints.push(TableIndexHints {
10159                hint_type,
10160                index_type,
10161                for_clause,
10162                index_names,
10163            });
10164        }
10165        Ok(hints)
10166    }
10167
10168    /// Wrapper for parse_optional_alias_inner, left for backwards-compatibility
10169    /// but new flows should use the context-specific methods such as `maybe_parse_select_item_alias`
10170    /// and `maybe_parse_table_alias`.
10171    pub fn parse_optional_alias(
10172        &mut self,
10173        reserved_kwds: &[Keyword],
10174    ) -> Result<Option<Ident>, ParserError> {
10175        fn validator(_explicit: bool, _kw: &Keyword, _parser: &mut Parser) -> bool {
10176            false
10177        }
10178        self.parse_optional_alias_inner(Some(reserved_kwds), validator)
10179    }
10180
10181    /// Parses an optional alias after a SQL element such as a select list item
10182    /// or a table name.
10183    ///
10184    /// This method accepts an optional list of reserved keywords or a function
10185    /// to call to validate if a keyword should be parsed as an alias, to allow
10186    /// callers to customize the parsing logic based on their context.
10187    fn parse_optional_alias_inner<F>(
10188        &mut self,
10189        reserved_kwds: Option<&[Keyword]>,
10190        validator: F,
10191    ) -> Result<Option<Ident>, ParserError>
10192    where
10193        F: Fn(bool, &Keyword, &mut Parser) -> bool,
10194    {
10195        let after_as = self.parse_keyword(Keyword::AS);
10196
10197        let next_token = self.next_token();
10198        match next_token.token {
10199            // By default, if a word is located after the `AS` keyword we consider it an alias
10200            // as long as it's not reserved.
10201            Token::Word(w)
10202                if after_as || reserved_kwds.is_some_and(|x| !x.contains(&w.keyword)) =>
10203            {
10204                Ok(Some(w.into_ident(next_token.span)))
10205            }
10206            // This pattern allows for customizing the acceptance of words as aliases based on the caller's
10207            // context, such as to what SQL element this word is a potential alias of (select item alias, table name
10208            // alias, etc.) or dialect-specific logic that goes beyond a simple list of reserved keywords.
10209            Token::Word(w) if validator(after_as, &w.keyword, self) => {
10210                Ok(Some(w.into_ident(next_token.span)))
10211            }
10212            // For backwards-compatibility, we accept quoted strings as aliases regardless of the context.
10213            Token::SingleQuotedString(s) => Ok(Some(Ident::with_quote('\'', s))),
10214            Token::DoubleQuotedString(s) => Ok(Some(Ident::with_quote('\"', s))),
10215            _ => {
10216                if after_as {
10217                    return self.expected("an identifier after AS", next_token);
10218                }
10219                self.prev_token();
10220                Ok(None) // no alias found
10221            }
10222        }
10223    }
10224
10225    pub fn parse_optional_group_by(&mut self) -> Result<Option<GroupByExpr>, ParserError> {
10226        if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
10227            let expressions = if self.parse_keyword(Keyword::ALL) {
10228                None
10229            } else {
10230                Some(self.parse_comma_separated(Parser::parse_group_by_expr)?)
10231            };
10232
10233            let mut modifiers = vec![];
10234            if self.dialect.supports_group_by_with_modifier() {
10235                loop {
10236                    if !self.parse_keyword(Keyword::WITH) {
10237                        break;
10238                    }
10239                    let keyword = self.expect_one_of_keywords(&[
10240                        Keyword::ROLLUP,
10241                        Keyword::CUBE,
10242                        Keyword::TOTALS,
10243                    ])?;
10244                    modifiers.push(match keyword {
10245                        Keyword::ROLLUP => GroupByWithModifier::Rollup,
10246                        Keyword::CUBE => GroupByWithModifier::Cube,
10247                        Keyword::TOTALS => GroupByWithModifier::Totals,
10248                        _ => {
10249                            return parser_err!(
10250                                "BUG: expected to match GroupBy modifier keyword",
10251                                self.peek_token().span.start
10252                            )
10253                        }
10254                    });
10255                }
10256            }
10257            if self.parse_keywords(&[Keyword::GROUPING, Keyword::SETS]) {
10258                self.expect_token(&Token::LParen)?;
10259                let result = self.parse_comma_separated(|p| {
10260                    if p.peek_token_ref().token == Token::LParen {
10261                        p.parse_tuple(true, true)
10262                    } else {
10263                        Ok(vec![p.parse_expr()?])
10264                    }
10265                })?;
10266                self.expect_token(&Token::RParen)?;
10267                modifiers.push(GroupByWithModifier::GroupingSets(Expr::GroupingSets(
10268                    result,
10269                )));
10270            };
10271            let group_by = match expressions {
10272                None => GroupByExpr::All(modifiers),
10273                Some(exprs) => GroupByExpr::Expressions(exprs, modifiers),
10274            };
10275            Ok(Some(group_by))
10276        } else {
10277            Ok(None)
10278        }
10279    }
10280
10281    pub fn parse_optional_order_by(&mut self) -> Result<Option<OrderBy>, ParserError> {
10282        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
10283            let order_by =
10284                if self.dialect.supports_order_by_all() && self.parse_keyword(Keyword::ALL) {
10285                    let order_by_options = self.parse_order_by_options()?;
10286                    OrderBy {
10287                        kind: OrderByKind::All(order_by_options),
10288                        interpolate: None,
10289                    }
10290                } else {
10291                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
10292                    let interpolate = if dialect_of!(self is ClickHouseDialect | GenericDialect) {
10293                        self.parse_interpolations()?
10294                    } else {
10295                        None
10296                    };
10297                    OrderBy {
10298                        kind: OrderByKind::Expressions(exprs),
10299                        interpolate,
10300                    }
10301                };
10302            Ok(Some(order_by))
10303        } else {
10304            Ok(None)
10305        }
10306    }
10307
10308    fn parse_optional_limit_clause(&mut self) -> Result<Option<LimitClause>, ParserError> {
10309        let mut offset = if self.parse_keyword(Keyword::OFFSET) {
10310            Some(self.parse_offset()?)
10311        } else {
10312            None
10313        };
10314
10315        let (limit, limit_by) = if self.parse_keyword(Keyword::LIMIT) {
10316            let expr = self.parse_limit()?;
10317
10318            if self.dialect.supports_limit_comma()
10319                && offset.is_none()
10320                && expr.is_some() // ALL not supported with comma
10321                && self.consume_token(&Token::Comma)
10322            {
10323                let offset = expr.ok_or_else(|| {
10324                    ParserError::ParserError(
10325                        "Missing offset for LIMIT <offset>, <limit>".to_string(),
10326                    )
10327                })?;
10328                return Ok(Some(LimitClause::OffsetCommaLimit {
10329                    offset,
10330                    limit: self.parse_expr()?,
10331                }));
10332            }
10333
10334            let limit_by = if dialect_of!(self is ClickHouseDialect | GenericDialect)
10335                && self.parse_keyword(Keyword::BY)
10336            {
10337                Some(self.parse_comma_separated(Parser::parse_expr)?)
10338            } else {
10339                None
10340            };
10341
10342            (Some(expr), limit_by)
10343        } else {
10344            (None, None)
10345        };
10346
10347        if offset.is_none() && limit.is_some() && self.parse_keyword(Keyword::OFFSET) {
10348            offset = Some(self.parse_offset()?);
10349        }
10350
10351        if offset.is_some() || (limit.is_some() && limit != Some(None)) || limit_by.is_some() {
10352            Ok(Some(LimitClause::LimitOffset {
10353                limit: limit.unwrap_or_default(),
10354                offset,
10355                limit_by: limit_by.unwrap_or_default(),
10356            }))
10357        } else {
10358            Ok(None)
10359        }
10360    }
10361
10362    /// Parse a table object for insertion
10363    /// e.g. `some_database.some_table` or `FUNCTION some_table_func(...)`
10364    pub fn parse_table_object(&mut self) -> Result<TableObject, ParserError> {
10365        if self.dialect.supports_insert_table_function() && self.parse_keyword(Keyword::FUNCTION) {
10366            let fn_name = self.parse_object_name(false)?;
10367            self.parse_function_call(fn_name)
10368                .map(TableObject::TableFunction)
10369        } else {
10370            self.parse_object_name(false).map(TableObject::TableName)
10371        }
10372    }
10373
10374    /// Parse a possibly qualified, possibly quoted identifier, e.g.
10375    /// `foo` or `myschema."table"
10376    ///
10377    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
10378    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
10379    /// in this context on BigQuery.
10380    pub fn parse_object_name(&mut self, in_table_clause: bool) -> Result<ObjectName, ParserError> {
10381        self.parse_object_name_inner(in_table_clause, false)
10382    }
10383
10384    /// Parse a possibly qualified, possibly quoted identifier, e.g.
10385    /// `foo` or `myschema."table"
10386    ///
10387    /// The `in_table_clause` parameter indicates whether the object name is a table in a FROM, JOIN,
10388    /// or similar table clause. Currently, this is used only to support unquoted hyphenated identifiers
10389    /// in this context on BigQuery.
10390    ///
10391    /// The `allow_wildcards` parameter indicates whether to allow for wildcards in the object name
10392    /// e.g. *, *.*, `foo`.*, or "foo"."bar"
10393    fn parse_object_name_inner(
10394        &mut self,
10395        in_table_clause: bool,
10396        allow_wildcards: bool,
10397    ) -> Result<ObjectName, ParserError> {
10398        let mut parts = vec![];
10399        if dialect_of!(self is BigQueryDialect) && in_table_clause {
10400            loop {
10401                let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
10402                parts.push(ObjectNamePart::Identifier(ident));
10403                if !self.consume_token(&Token::Period) && !end_with_period {
10404                    break;
10405                }
10406            }
10407        } else {
10408            loop {
10409                if allow_wildcards && self.peek_token().token == Token::Mul {
10410                    let span = self.next_token().span;
10411                    parts.push(ObjectNamePart::Identifier(Ident {
10412                        value: Token::Mul.to_string(),
10413                        quote_style: None,
10414                        span,
10415                    }));
10416                } else if dialect_of!(self is BigQueryDialect) && in_table_clause {
10417                    let (ident, end_with_period) = self.parse_unquoted_hyphenated_identifier()?;
10418                    parts.push(ObjectNamePart::Identifier(ident));
10419                    if !self.consume_token(&Token::Period) && !end_with_period {
10420                        break;
10421                    }
10422                } else if self.dialect.supports_object_name_double_dot_notation()
10423                    && parts.len() == 1
10424                    && matches!(self.peek_token().token, Token::Period)
10425                {
10426                    // Empty string here means default schema
10427                    parts.push(ObjectNamePart::Identifier(Ident::new("")));
10428                } else {
10429                    let ident = self.parse_identifier()?;
10430                    let part = if self
10431                        .dialect
10432                        .is_identifier_generating_function_name(&ident, &parts)
10433                    {
10434                        self.expect_token(&Token::LParen)?;
10435                        let args: Vec<FunctionArg> =
10436                            self.parse_comma_separated0(Self::parse_function_args, Token::RParen)?;
10437                        self.expect_token(&Token::RParen)?;
10438                        ObjectNamePart::Function(ObjectNamePartFunction { name: ident, args })
10439                    } else {
10440                        ObjectNamePart::Identifier(ident)
10441                    };
10442                    parts.push(part);
10443                }
10444
10445                if !self.consume_token(&Token::Period) {
10446                    break;
10447                }
10448            }
10449        }
10450
10451        // BigQuery accepts any number of quoted identifiers of a table name.
10452        // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#quoted_identifiers
10453        if dialect_of!(self is BigQueryDialect)
10454            && parts.iter().any(|part| {
10455                part.as_ident()
10456                    .is_some_and(|ident| ident.value.contains('.'))
10457            })
10458        {
10459            parts = parts
10460                .into_iter()
10461                .flat_map(|part| match part.as_ident() {
10462                    Some(ident) => ident
10463                        .value
10464                        .split('.')
10465                        .map(|value| {
10466                            ObjectNamePart::Identifier(Ident {
10467                                value: value.into(),
10468                                quote_style: ident.quote_style,
10469                                span: ident.span,
10470                            })
10471                        })
10472                        .collect::<Vec<_>>(),
10473                    None => vec![part],
10474                })
10475                .collect()
10476        }
10477
10478        Ok(ObjectName(parts))
10479    }
10480
10481    /// Parse identifiers
10482    pub fn parse_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
10483        let mut idents = vec![];
10484        loop {
10485            match &self.peek_token_ref().token {
10486                Token::Word(w) => {
10487                    idents.push(w.clone().into_ident(self.peek_token_ref().span));
10488                }
10489                Token::EOF | Token::Eq => break,
10490                _ => {}
10491            }
10492            self.advance_token();
10493        }
10494        Ok(idents)
10495    }
10496
10497    /// Parse identifiers of form ident1[.identN]*
10498    ///
10499    /// Similar in functionality to [parse_identifiers], with difference
10500    /// being this function is much more strict about parsing a valid multipart identifier, not
10501    /// allowing extraneous tokens to be parsed, otherwise it fails.
10502    ///
10503    /// For example:
10504    ///
10505    /// ```rust
10506    /// use sqlparser::ast::Ident;
10507    /// use sqlparser::dialect::GenericDialect;
10508    /// use sqlparser::parser::Parser;
10509    ///
10510    /// let dialect = GenericDialect {};
10511    /// let expected = vec![Ident::new("one"), Ident::new("two")];
10512    ///
10513    /// // expected usage
10514    /// let sql = "one.two";
10515    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
10516    /// let actual = parser.parse_multipart_identifier().unwrap();
10517    /// assert_eq!(&actual, &expected);
10518    ///
10519    /// // parse_identifiers is more loose on what it allows, parsing successfully
10520    /// let sql = "one + two";
10521    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
10522    /// let actual = parser.parse_identifiers().unwrap();
10523    /// assert_eq!(&actual, &expected);
10524    ///
10525    /// // expected to strictly fail due to + separator
10526    /// let sql = "one + two";
10527    /// let mut parser = Parser::new(&dialect).try_with_sql(sql).unwrap();
10528    /// let actual = parser.parse_multipart_identifier().unwrap_err();
10529    /// assert_eq!(
10530    ///     actual.to_string(),
10531    ///     "sql parser error: Unexpected token in identifier: +"
10532    /// );
10533    /// ```
10534    ///
10535    /// [parse_identifiers]: Parser::parse_identifiers
10536    pub fn parse_multipart_identifier(&mut self) -> Result<Vec<Ident>, ParserError> {
10537        let mut idents = vec![];
10538
10539        // expecting at least one word for identifier
10540        let next_token = self.next_token();
10541        match next_token.token {
10542            Token::Word(w) => idents.push(w.into_ident(next_token.span)),
10543            Token::EOF => {
10544                return Err(ParserError::ParserError(
10545                    "Empty input when parsing identifier".to_string(),
10546                ))?
10547            }
10548            token => {
10549                return Err(ParserError::ParserError(format!(
10550                    "Unexpected token in identifier: {token}"
10551                )))?
10552            }
10553        };
10554
10555        // parse optional next parts if exist
10556        loop {
10557            match self.next_token().token {
10558                // ensure that optional period is succeeded by another identifier
10559                Token::Period => {
10560                    let next_token = self.next_token();
10561                    match next_token.token {
10562                        Token::Word(w) => idents.push(w.into_ident(next_token.span)),
10563                        Token::EOF => {
10564                            return Err(ParserError::ParserError(
10565                                "Trailing period in identifier".to_string(),
10566                            ))?
10567                        }
10568                        token => {
10569                            return Err(ParserError::ParserError(format!(
10570                                "Unexpected token following period in identifier: {token}"
10571                            )))?
10572                        }
10573                    }
10574                }
10575                Token::EOF => break,
10576                token => {
10577                    return Err(ParserError::ParserError(format!(
10578                        "Unexpected token in identifier: {token}"
10579                    )))?
10580                }
10581            }
10582        }
10583
10584        Ok(idents)
10585    }
10586
10587    /// Parse a simple one-word identifier (possibly quoted, possibly a keyword)
10588    pub fn parse_identifier(&mut self) -> Result<Ident, ParserError> {
10589        let next_token = self.next_token();
10590        match next_token.token {
10591            Token::Word(w) => Ok(w.into_ident(next_token.span)),
10592            Token::SingleQuotedString(s) => Ok(Ident::with_quote('\'', s)),
10593            Token::DoubleQuotedString(s) => Ok(Ident::with_quote('\"', s)),
10594            _ => self.expected("identifier", next_token),
10595        }
10596    }
10597
10598    /// On BigQuery, hyphens are permitted in unquoted identifiers inside of a FROM or
10599    /// TABLE clause.
10600    ///
10601    /// The first segment must be an ordinary unquoted identifier, e.g. it must not start
10602    /// with a digit. Subsequent segments are either must either be valid identifiers or
10603    /// integers, e.g. foo-123 is allowed, but foo-123a is not.
10604    ///
10605    /// [BigQuery-lexical](https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical)
10606    ///
10607    /// Return a tuple of the identifier and a boolean indicating it ends with a period.
10608    fn parse_unquoted_hyphenated_identifier(&mut self) -> Result<(Ident, bool), ParserError> {
10609        match self.peek_token().token {
10610            Token::Word(w) => {
10611                let quote_style_is_none = w.quote_style.is_none();
10612                let mut requires_whitespace = false;
10613                let mut ident = w.into_ident(self.next_token().span);
10614                if quote_style_is_none {
10615                    while matches!(self.peek_token_no_skip().token, Token::Minus) {
10616                        self.next_token();
10617                        ident.value.push('-');
10618
10619                        let token = self
10620                            .next_token_no_skip()
10621                            .cloned()
10622                            .unwrap_or(TokenWithSpan::wrap(Token::EOF));
10623                        requires_whitespace = match token.token {
10624                            Token::Word(next_word) if next_word.quote_style.is_none() => {
10625                                ident.value.push_str(&next_word.value);
10626                                false
10627                            }
10628                            Token::Number(s, false) => {
10629                                // A number token can represent a decimal value ending with a period, e.g., `Number('123.')`.
10630                                // However, for an [ObjectName], it is part of a hyphenated identifier, e.g., `foo-123.bar`.
10631                                //
10632                                // If a number token is followed by a period, it is part of an [ObjectName].
10633                                // Return the identifier with `true` if the number token is followed by a period, indicating that
10634                                // parsing should continue for the next part of the hyphenated identifier.
10635                                if s.ends_with('.') {
10636                                    let Some(s) = s.split('.').next().filter(|s| {
10637                                        !s.is_empty() && s.chars().all(|c| c.is_ascii_digit())
10638                                    }) else {
10639                                        return self.expected(
10640                                            "continuation of hyphenated identifier",
10641                                            TokenWithSpan::new(Token::Number(s, false), token.span),
10642                                        );
10643                                    };
10644                                    ident.value.push_str(s);
10645                                    return Ok((ident, true));
10646                                } else {
10647                                    ident.value.push_str(&s);
10648                                }
10649                                // If next token is period, then it is part of an ObjectName and we don't expect whitespace
10650                                // after the number.
10651                                !matches!(self.peek_token().token, Token::Period)
10652                            }
10653                            _ => {
10654                                return self
10655                                    .expected("continuation of hyphenated identifier", token);
10656                            }
10657                        }
10658                    }
10659
10660                    // If the last segment was a number, we must check that it's followed by whitespace,
10661                    // otherwise foo-123a will be parsed as `foo-123` with the alias `a`.
10662                    if requires_whitespace {
10663                        let token = self.next_token();
10664                        if !matches!(token.token, Token::EOF | Token::Whitespace(_)) {
10665                            return self
10666                                .expected("whitespace following hyphenated identifier", token);
10667                        }
10668                    }
10669                }
10670                Ok((ident, false))
10671            }
10672            _ => Ok((self.parse_identifier()?, false)),
10673        }
10674    }
10675
10676    /// Parses a parenthesized, comma-separated list of column definitions within a view.
10677    fn parse_view_columns(&mut self) -> Result<Vec<ViewColumnDef>, ParserError> {
10678        if self.consume_token(&Token::LParen) {
10679            if self.peek_token().token == Token::RParen {
10680                self.next_token();
10681                Ok(vec![])
10682            } else {
10683                let cols = self.parse_comma_separated_with_trailing_commas(
10684                    Parser::parse_view_column,
10685                    self.dialect.supports_column_definition_trailing_commas(),
10686                    Self::is_reserved_for_column_alias,
10687                )?;
10688                self.expect_token(&Token::RParen)?;
10689                Ok(cols)
10690            }
10691        } else {
10692            Ok(vec![])
10693        }
10694    }
10695
10696    /// Parses a column definition within a view.
10697    fn parse_view_column(&mut self) -> Result<ViewColumnDef, ParserError> {
10698        let name = self.parse_identifier()?;
10699        let options = self.parse_view_column_options()?;
10700        let data_type = if dialect_of!(self is ClickHouseDialect) {
10701            Some(self.parse_data_type()?)
10702        } else {
10703            None
10704        };
10705        Ok(ViewColumnDef {
10706            name,
10707            data_type,
10708            options,
10709        })
10710    }
10711
10712    fn parse_view_column_options(&mut self) -> Result<Option<ColumnOptions>, ParserError> {
10713        let mut options = Vec::new();
10714        loop {
10715            let option = self.parse_optional_column_option()?;
10716            if let Some(option) = option {
10717                options.push(option);
10718            } else {
10719                break;
10720            }
10721        }
10722        if options.is_empty() {
10723            Ok(None)
10724        } else if self.dialect.supports_space_separated_column_options() {
10725            Ok(Some(ColumnOptions::SpaceSeparated(options)))
10726        } else {
10727            Ok(Some(ColumnOptions::CommaSeparated(options)))
10728        }
10729    }
10730
10731    /// Parses a parenthesized comma-separated list of unqualified, possibly quoted identifiers.
10732    /// For example: `(col1, "col 2", ...)`
10733    pub fn parse_parenthesized_column_list(
10734        &mut self,
10735        optional: IsOptional,
10736        allow_empty: bool,
10737    ) -> Result<Vec<Ident>, ParserError> {
10738        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| p.parse_identifier())
10739    }
10740
10741    /// Parses a parenthesized comma-separated list of index columns, which can be arbitrary
10742    /// expressions with ordering information (and an opclass in some dialects).
10743    fn parse_parenthesized_index_column_list(&mut self) -> Result<Vec<IndexColumn>, ParserError> {
10744        self.parse_parenthesized_column_list_inner(Mandatory, false, |p| {
10745            p.parse_create_index_expr()
10746        })
10747    }
10748
10749    /// Parses a parenthesized comma-separated list of qualified, possibly quoted identifiers.
10750    /// For example: `(db1.sc1.tbl1.col1, db1.sc1.tbl1."col 2", ...)`
10751    pub fn parse_parenthesized_qualified_column_list(
10752        &mut self,
10753        optional: IsOptional,
10754        allow_empty: bool,
10755    ) -> Result<Vec<ObjectName>, ParserError> {
10756        self.parse_parenthesized_column_list_inner(optional, allow_empty, |p| {
10757            p.parse_object_name(true)
10758        })
10759    }
10760
10761    /// Parses a parenthesized comma-separated list of columns using
10762    /// the provided function to parse each element.
10763    fn parse_parenthesized_column_list_inner<F, T>(
10764        &mut self,
10765        optional: IsOptional,
10766        allow_empty: bool,
10767        mut f: F,
10768    ) -> Result<Vec<T>, ParserError>
10769    where
10770        F: FnMut(&mut Parser) -> Result<T, ParserError>,
10771    {
10772        if self.consume_token(&Token::LParen) {
10773            if allow_empty && self.peek_token().token == Token::RParen {
10774                self.next_token();
10775                Ok(vec![])
10776            } else {
10777                let cols = self.parse_comma_separated(|p| f(p))?;
10778                self.expect_token(&Token::RParen)?;
10779                Ok(cols)
10780            }
10781        } else if optional == Optional {
10782            Ok(vec![])
10783        } else {
10784            self.expected("a list of columns in parentheses", self.peek_token())
10785        }
10786    }
10787
10788    /// Parses a parenthesized comma-separated list of table alias column definitions.
10789    fn parse_table_alias_column_defs(&mut self) -> Result<Vec<TableAliasColumnDef>, ParserError> {
10790        if self.consume_token(&Token::LParen) {
10791            let cols = self.parse_comma_separated(|p| {
10792                let name = p.parse_identifier()?;
10793                let data_type = p.maybe_parse(|p| p.parse_data_type())?;
10794                Ok(TableAliasColumnDef { name, data_type })
10795            })?;
10796            self.expect_token(&Token::RParen)?;
10797            Ok(cols)
10798        } else {
10799            Ok(vec![])
10800        }
10801    }
10802
10803    pub fn parse_precision(&mut self) -> Result<u64, ParserError> {
10804        self.expect_token(&Token::LParen)?;
10805        let n = self.parse_literal_uint()?;
10806        self.expect_token(&Token::RParen)?;
10807        Ok(n)
10808    }
10809
10810    pub fn parse_optional_precision(&mut self) -> Result<Option<u64>, ParserError> {
10811        if self.consume_token(&Token::LParen) {
10812            let n = self.parse_literal_uint()?;
10813            self.expect_token(&Token::RParen)?;
10814            Ok(Some(n))
10815        } else {
10816            Ok(None)
10817        }
10818    }
10819
10820    /// Parse datetime64 [1]
10821    /// Syntax
10822    /// ```sql
10823    /// DateTime64(precision[, timezone])
10824    /// ```
10825    ///
10826    /// [1]: https://clickhouse.com/docs/en/sql-reference/data-types/datetime64
10827    pub fn parse_datetime_64(&mut self) -> Result<(u64, Option<String>), ParserError> {
10828        self.expect_keyword_is(Keyword::DATETIME64)?;
10829        self.expect_token(&Token::LParen)?;
10830        let precision = self.parse_literal_uint()?;
10831        let time_zone = if self.consume_token(&Token::Comma) {
10832            Some(self.parse_literal_string()?)
10833        } else {
10834            None
10835        };
10836        self.expect_token(&Token::RParen)?;
10837        Ok((precision, time_zone))
10838    }
10839
10840    pub fn parse_optional_character_length(
10841        &mut self,
10842    ) -> Result<Option<CharacterLength>, ParserError> {
10843        if self.consume_token(&Token::LParen) {
10844            let character_length = self.parse_character_length()?;
10845            self.expect_token(&Token::RParen)?;
10846            Ok(Some(character_length))
10847        } else {
10848            Ok(None)
10849        }
10850    }
10851
10852    pub fn parse_optional_binary_length(&mut self) -> Result<Option<BinaryLength>, ParserError> {
10853        if self.consume_token(&Token::LParen) {
10854            let binary_length = self.parse_binary_length()?;
10855            self.expect_token(&Token::RParen)?;
10856            Ok(Some(binary_length))
10857        } else {
10858            Ok(None)
10859        }
10860    }
10861
10862    pub fn parse_character_length(&mut self) -> Result<CharacterLength, ParserError> {
10863        if self.parse_keyword(Keyword::MAX) {
10864            return Ok(CharacterLength::Max);
10865        }
10866        let length = self.parse_literal_uint()?;
10867        let unit = if self.parse_keyword(Keyword::CHARACTERS) {
10868            Some(CharLengthUnits::Characters)
10869        } else if self.parse_keyword(Keyword::OCTETS) {
10870            Some(CharLengthUnits::Octets)
10871        } else {
10872            None
10873        };
10874        Ok(CharacterLength::IntegerLength { length, unit })
10875    }
10876
10877    pub fn parse_binary_length(&mut self) -> Result<BinaryLength, ParserError> {
10878        if self.parse_keyword(Keyword::MAX) {
10879            return Ok(BinaryLength::Max);
10880        }
10881        let length = self.parse_literal_uint()?;
10882        Ok(BinaryLength::IntegerLength { length })
10883    }
10884
10885    pub fn parse_optional_precision_scale(
10886        &mut self,
10887    ) -> Result<(Option<u64>, Option<u64>), ParserError> {
10888        if self.consume_token(&Token::LParen) {
10889            let n = self.parse_literal_uint()?;
10890            let scale = if self.consume_token(&Token::Comma) {
10891                Some(self.parse_literal_uint()?)
10892            } else {
10893                None
10894            };
10895            self.expect_token(&Token::RParen)?;
10896            Ok((Some(n), scale))
10897        } else {
10898            Ok((None, None))
10899        }
10900    }
10901
10902    pub fn parse_exact_number_optional_precision_scale(
10903        &mut self,
10904    ) -> Result<ExactNumberInfo, ParserError> {
10905        if self.consume_token(&Token::LParen) {
10906            let precision = self.parse_literal_uint()?;
10907            let scale = if self.consume_token(&Token::Comma) {
10908                Some(self.parse_literal_uint()?)
10909            } else {
10910                None
10911            };
10912
10913            self.expect_token(&Token::RParen)?;
10914
10915            match scale {
10916                None => Ok(ExactNumberInfo::Precision(precision)),
10917                Some(scale) => Ok(ExactNumberInfo::PrecisionAndScale(precision, scale)),
10918            }
10919        } else {
10920            Ok(ExactNumberInfo::None)
10921        }
10922    }
10923
10924    pub fn parse_optional_type_modifiers(&mut self) -> Result<Option<Vec<String>>, ParserError> {
10925        if self.consume_token(&Token::LParen) {
10926            let mut modifiers = Vec::new();
10927            loop {
10928                let next_token = self.next_token();
10929                match next_token.token {
10930                    Token::Word(w) => modifiers.push(w.to_string()),
10931                    Token::Number(n, _) => modifiers.push(n),
10932                    Token::SingleQuotedString(s) => modifiers.push(s),
10933
10934                    Token::Comma => {
10935                        continue;
10936                    }
10937                    Token::RParen => {
10938                        break;
10939                    }
10940                    _ => self.expected("type modifiers", next_token)?,
10941                }
10942            }
10943
10944            Ok(Some(modifiers))
10945        } else {
10946            Ok(None)
10947        }
10948    }
10949
10950    /// Parse a parenthesized sub data type
10951    fn parse_sub_type<F>(&mut self, parent_type: F) -> Result<DataType, ParserError>
10952    where
10953        F: FnOnce(Box<DataType>) -> DataType,
10954    {
10955        self.expect_token(&Token::LParen)?;
10956        let inside_type = self.parse_data_type()?;
10957        self.expect_token(&Token::RParen)?;
10958        Ok(parent_type(inside_type.into()))
10959    }
10960
10961    /// Parse a DELETE statement, returning a `Box`ed SetExpr
10962    ///
10963    /// This is used to reduce the size of the stack frames in debug builds
10964    fn parse_delete_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
10965        Ok(Box::new(SetExpr::Delete(self.parse_delete()?)))
10966    }
10967
10968    pub fn parse_delete(&mut self) -> Result<Statement, ParserError> {
10969        let (tables, with_from_keyword) = if !self.parse_keyword(Keyword::FROM) {
10970            // `FROM` keyword is optional in BigQuery SQL.
10971            // https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#delete_statement
10972            if dialect_of!(self is BigQueryDialect | GenericDialect) {
10973                (vec![], false)
10974            } else {
10975                let tables = self.parse_comma_separated(|p| p.parse_object_name(false))?;
10976                self.expect_keyword_is(Keyword::FROM)?;
10977                (tables, true)
10978            }
10979        } else {
10980            (vec![], true)
10981        };
10982
10983        let from = self.parse_comma_separated(Parser::parse_table_and_joins)?;
10984        let using = if self.parse_keyword(Keyword::USING) {
10985            Some(self.parse_comma_separated(Parser::parse_table_and_joins)?)
10986        } else {
10987            None
10988        };
10989        let selection = if self.parse_keyword(Keyword::WHERE) {
10990            Some(self.parse_expr()?)
10991        } else {
10992            None
10993        };
10994        let returning = if self.parse_keyword(Keyword::RETURNING) {
10995            Some(self.parse_comma_separated(Parser::parse_select_item)?)
10996        } else {
10997            None
10998        };
10999        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11000            self.parse_comma_separated(Parser::parse_order_by_expr)?
11001        } else {
11002            vec![]
11003        };
11004        let limit = if self.parse_keyword(Keyword::LIMIT) {
11005            self.parse_limit()?
11006        } else {
11007            None
11008        };
11009
11010        Ok(Statement::Delete(Delete {
11011            tables,
11012            from: if with_from_keyword {
11013                FromTable::WithFromKeyword(from)
11014            } else {
11015                FromTable::WithoutKeyword(from)
11016            },
11017            using,
11018            selection,
11019            returning,
11020            order_by,
11021            limit,
11022        }))
11023    }
11024
11025    // KILL [CONNECTION | QUERY | MUTATION] processlist_id
11026    pub fn parse_kill(&mut self) -> Result<Statement, ParserError> {
11027        let modifier_keyword =
11028            self.parse_one_of_keywords(&[Keyword::CONNECTION, Keyword::QUERY, Keyword::MUTATION]);
11029
11030        let id = self.parse_literal_uint()?;
11031
11032        let modifier = match modifier_keyword {
11033            Some(Keyword::CONNECTION) => Some(KillType::Connection),
11034            Some(Keyword::QUERY) => Some(KillType::Query),
11035            Some(Keyword::MUTATION) => {
11036                if dialect_of!(self is ClickHouseDialect | GenericDialect) {
11037                    Some(KillType::Mutation)
11038                } else {
11039                    self.expected(
11040                        "Unsupported type for KILL, allowed: CONNECTION | QUERY",
11041                        self.peek_token(),
11042                    )?
11043                }
11044            }
11045            _ => None,
11046        };
11047
11048        Ok(Statement::Kill { modifier, id })
11049    }
11050
11051    pub fn parse_explain(
11052        &mut self,
11053        describe_alias: DescribeAlias,
11054    ) -> Result<Statement, ParserError> {
11055        let mut analyze = false;
11056        let mut verbose = false;
11057        let mut query_plan = false;
11058        let mut estimate = false;
11059        let mut format = None;
11060        let mut options = None;
11061
11062        // Note: DuckDB is compatible with PostgreSQL syntax for this statement,
11063        // although not all features may be implemented.
11064        if describe_alias == DescribeAlias::Explain
11065            && self.dialect.supports_explain_with_utility_options()
11066            && self.peek_token().token == Token::LParen
11067        {
11068            options = Some(self.parse_utility_options()?)
11069        } else if self.parse_keywords(&[Keyword::QUERY, Keyword::PLAN]) {
11070            query_plan = true;
11071        } else if self.parse_keyword(Keyword::ESTIMATE) {
11072            estimate = true;
11073        } else {
11074            analyze = self.parse_keyword(Keyword::ANALYZE);
11075            verbose = self.parse_keyword(Keyword::VERBOSE);
11076            if self.parse_keyword(Keyword::FORMAT) {
11077                format = Some(self.parse_analyze_format()?);
11078            }
11079        }
11080
11081        match self.maybe_parse(|parser| parser.parse_statement())? {
11082            Some(Statement::Explain { .. }) | Some(Statement::ExplainTable { .. }) => Err(
11083                ParserError::ParserError("Explain must be root of the plan".to_string()),
11084            ),
11085            Some(statement) => Ok(Statement::Explain {
11086                describe_alias,
11087                analyze,
11088                verbose,
11089                query_plan,
11090                estimate,
11091                statement: Box::new(statement),
11092                format,
11093                options,
11094            }),
11095            _ => {
11096                let hive_format =
11097                    match self.parse_one_of_keywords(&[Keyword::EXTENDED, Keyword::FORMATTED]) {
11098                        Some(Keyword::EXTENDED) => Some(HiveDescribeFormat::Extended),
11099                        Some(Keyword::FORMATTED) => Some(HiveDescribeFormat::Formatted),
11100                        _ => None,
11101                    };
11102
11103                let has_table_keyword = if self.dialect.describe_requires_table_keyword() {
11104                    // only allow to use TABLE keyword for DESC|DESCRIBE statement
11105                    self.parse_keyword(Keyword::TABLE)
11106                } else {
11107                    false
11108                };
11109
11110                let table_name = self.parse_object_name(false)?;
11111                Ok(Statement::ExplainTable {
11112                    describe_alias,
11113                    hive_format,
11114                    has_table_keyword,
11115                    table_name,
11116                })
11117            }
11118        }
11119    }
11120
11121    /// Parse a query expression, i.e. a `SELECT` statement optionally
11122    /// preceded with some `WITH` CTE declarations and optionally followed
11123    /// by `ORDER BY`. Unlike some other parse_... methods, this one doesn't
11124    /// expect the initial keyword to be already consumed
11125    pub fn parse_query(&mut self) -> Result<Box<Query>, ParserError> {
11126        let _guard = self.recursion_counter.try_decrease()?;
11127        let with = if self.parse_keyword(Keyword::WITH) {
11128            let with_token = self.get_current_token();
11129            Some(With {
11130                with_token: with_token.clone().into(),
11131                recursive: self.parse_keyword(Keyword::RECURSIVE),
11132                cte_tables: self.parse_comma_separated(Parser::parse_cte)?,
11133            })
11134        } else {
11135            None
11136        };
11137        if self.parse_keyword(Keyword::INSERT) {
11138            Ok(Query {
11139                with,
11140                body: self.parse_insert_setexpr_boxed()?,
11141                order_by: None,
11142                limit_clause: None,
11143                fetch: None,
11144                locks: vec![],
11145                for_clause: None,
11146                settings: None,
11147                format_clause: None,
11148                pipe_operators: vec![],
11149            }
11150            .into())
11151        } else if self.parse_keyword(Keyword::UPDATE) {
11152            Ok(Query {
11153                with,
11154                body: self.parse_update_setexpr_boxed()?,
11155                order_by: None,
11156                limit_clause: None,
11157                fetch: None,
11158                locks: vec![],
11159                for_clause: None,
11160                settings: None,
11161                format_clause: None,
11162                pipe_operators: vec![],
11163            }
11164            .into())
11165        } else if self.parse_keyword(Keyword::DELETE) {
11166            Ok(Query {
11167                with,
11168                body: self.parse_delete_setexpr_boxed()?,
11169                limit_clause: None,
11170                order_by: None,
11171                fetch: None,
11172                locks: vec![],
11173                for_clause: None,
11174                settings: None,
11175                format_clause: None,
11176                pipe_operators: vec![],
11177            }
11178            .into())
11179        } else {
11180            let body = self.parse_query_body(self.dialect.prec_unknown())?;
11181
11182            let order_by = self.parse_optional_order_by()?;
11183
11184            let limit_clause = self.parse_optional_limit_clause()?;
11185
11186            let settings = self.parse_settings()?;
11187
11188            let fetch = if self.parse_keyword(Keyword::FETCH) {
11189                Some(self.parse_fetch()?)
11190            } else {
11191                None
11192            };
11193
11194            let mut for_clause = None;
11195            let mut locks = Vec::new();
11196            while self.parse_keyword(Keyword::FOR) {
11197                if let Some(parsed_for_clause) = self.parse_for_clause()? {
11198                    for_clause = Some(parsed_for_clause);
11199                    break;
11200                } else {
11201                    locks.push(self.parse_lock()?);
11202                }
11203            }
11204            let format_clause = if dialect_of!(self is ClickHouseDialect | GenericDialect)
11205                && self.parse_keyword(Keyword::FORMAT)
11206            {
11207                if self.parse_keyword(Keyword::NULL) {
11208                    Some(FormatClause::Null)
11209                } else {
11210                    let ident = self.parse_identifier()?;
11211                    Some(FormatClause::Identifier(ident))
11212                }
11213            } else {
11214                None
11215            };
11216
11217            let pipe_operators = if self.dialect.supports_pipe_operator() {
11218                self.parse_pipe_operators()?
11219            } else {
11220                Vec::new()
11221            };
11222
11223            Ok(Query {
11224                with,
11225                body,
11226                order_by,
11227                limit_clause,
11228                fetch,
11229                locks,
11230                for_clause,
11231                settings,
11232                format_clause,
11233                pipe_operators,
11234            }
11235            .into())
11236        }
11237    }
11238
11239    fn parse_pipe_operators(&mut self) -> Result<Vec<PipeOperator>, ParserError> {
11240        let mut pipe_operators = Vec::new();
11241
11242        while self.consume_token(&Token::VerticalBarRightAngleBracket) {
11243            let kw = self.expect_one_of_keywords(&[
11244                Keyword::SELECT,
11245                Keyword::EXTEND,
11246                Keyword::SET,
11247                Keyword::DROP,
11248                Keyword::AS,
11249                Keyword::WHERE,
11250                Keyword::LIMIT,
11251                Keyword::AGGREGATE,
11252                Keyword::ORDER,
11253                Keyword::TABLESAMPLE,
11254                Keyword::RENAME,
11255                Keyword::UNION,
11256                Keyword::INTERSECT,
11257                Keyword::EXCEPT,
11258                Keyword::CALL,
11259                Keyword::PIVOT,
11260                Keyword::UNPIVOT,
11261                Keyword::JOIN,
11262                Keyword::INNER,
11263                Keyword::LEFT,
11264                Keyword::RIGHT,
11265                Keyword::FULL,
11266                Keyword::CROSS,
11267            ])?;
11268            match kw {
11269                Keyword::SELECT => {
11270                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
11271                    pipe_operators.push(PipeOperator::Select { exprs })
11272                }
11273                Keyword::EXTEND => {
11274                    let exprs = self.parse_comma_separated(Parser::parse_select_item)?;
11275                    pipe_operators.push(PipeOperator::Extend { exprs })
11276                }
11277                Keyword::SET => {
11278                    let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
11279                    pipe_operators.push(PipeOperator::Set { assignments })
11280                }
11281                Keyword::DROP => {
11282                    let columns = self.parse_identifiers()?;
11283                    pipe_operators.push(PipeOperator::Drop { columns })
11284                }
11285                Keyword::AS => {
11286                    let alias = self.parse_identifier()?;
11287                    pipe_operators.push(PipeOperator::As { alias })
11288                }
11289                Keyword::WHERE => {
11290                    let expr = self.parse_expr()?;
11291                    pipe_operators.push(PipeOperator::Where { expr })
11292                }
11293                Keyword::LIMIT => {
11294                    let expr = self.parse_expr()?;
11295                    let offset = if self.parse_keyword(Keyword::OFFSET) {
11296                        Some(self.parse_expr()?)
11297                    } else {
11298                        None
11299                    };
11300                    pipe_operators.push(PipeOperator::Limit { expr, offset })
11301                }
11302                Keyword::AGGREGATE => {
11303                    let full_table_exprs = if self.peek_keyword(Keyword::GROUP) {
11304                        vec![]
11305                    } else {
11306                        self.parse_comma_separated(|parser| {
11307                            parser.parse_expr_with_alias_and_order_by()
11308                        })?
11309                    };
11310
11311                    let group_by_expr = if self.parse_keywords(&[Keyword::GROUP, Keyword::BY]) {
11312                        self.parse_comma_separated(|parser| {
11313                            parser.parse_expr_with_alias_and_order_by()
11314                        })?
11315                    } else {
11316                        vec![]
11317                    };
11318
11319                    pipe_operators.push(PipeOperator::Aggregate {
11320                        full_table_exprs,
11321                        group_by_expr,
11322                    })
11323                }
11324                Keyword::ORDER => {
11325                    self.expect_one_of_keywords(&[Keyword::BY])?;
11326                    let exprs = self.parse_comma_separated(Parser::parse_order_by_expr)?;
11327                    pipe_operators.push(PipeOperator::OrderBy { exprs })
11328                }
11329                Keyword::TABLESAMPLE => {
11330                    let sample = self.parse_table_sample(TableSampleModifier::TableSample)?;
11331                    pipe_operators.push(PipeOperator::TableSample { sample });
11332                }
11333                Keyword::RENAME => {
11334                    let mappings =
11335                        self.parse_comma_separated(Parser::parse_identifier_with_optional_alias)?;
11336                    pipe_operators.push(PipeOperator::Rename { mappings });
11337                }
11338                Keyword::UNION => {
11339                    let set_quantifier = self.parse_set_quantifier(&Some(SetOperator::Union));
11340                    let queries = self.parse_pipe_operator_queries()?;
11341                    pipe_operators.push(PipeOperator::Union {
11342                        set_quantifier,
11343                        queries,
11344                    });
11345                }
11346                Keyword::INTERSECT => {
11347                    let set_quantifier =
11348                        self.parse_distinct_required_set_quantifier("INTERSECT")?;
11349                    let queries = self.parse_pipe_operator_queries()?;
11350                    pipe_operators.push(PipeOperator::Intersect {
11351                        set_quantifier,
11352                        queries,
11353                    });
11354                }
11355                Keyword::EXCEPT => {
11356                    let set_quantifier = self.parse_distinct_required_set_quantifier("EXCEPT")?;
11357                    let queries = self.parse_pipe_operator_queries()?;
11358                    pipe_operators.push(PipeOperator::Except {
11359                        set_quantifier,
11360                        queries,
11361                    });
11362                }
11363                Keyword::CALL => {
11364                    let function_name = self.parse_object_name(false)?;
11365                    let function_expr = self.parse_function(function_name)?;
11366                    if let Expr::Function(function) = function_expr {
11367                        let alias = self.parse_identifier_optional_alias()?;
11368                        pipe_operators.push(PipeOperator::Call { function, alias });
11369                    } else {
11370                        return Err(ParserError::ParserError(
11371                            "Expected function call after CALL".to_string(),
11372                        ));
11373                    }
11374                }
11375                Keyword::PIVOT => {
11376                    self.expect_token(&Token::LParen)?;
11377                    let aggregate_functions =
11378                        self.parse_comma_separated(Self::parse_aliased_function_call)?;
11379                    self.expect_keyword_is(Keyword::FOR)?;
11380                    let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
11381                    self.expect_keyword_is(Keyword::IN)?;
11382
11383                    self.expect_token(&Token::LParen)?;
11384                    let value_source = if self.parse_keyword(Keyword::ANY) {
11385                        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
11386                            self.parse_comma_separated(Parser::parse_order_by_expr)?
11387                        } else {
11388                            vec![]
11389                        };
11390                        PivotValueSource::Any(order_by)
11391                    } else if self.peek_sub_query() {
11392                        PivotValueSource::Subquery(self.parse_query()?)
11393                    } else {
11394                        PivotValueSource::List(
11395                            self.parse_comma_separated(Self::parse_expr_with_alias)?,
11396                        )
11397                    };
11398                    self.expect_token(&Token::RParen)?;
11399                    self.expect_token(&Token::RParen)?;
11400
11401                    let alias = self.parse_identifier_optional_alias()?;
11402
11403                    pipe_operators.push(PipeOperator::Pivot {
11404                        aggregate_functions,
11405                        value_column,
11406                        value_source,
11407                        alias,
11408                    });
11409                }
11410                Keyword::UNPIVOT => {
11411                    self.expect_token(&Token::LParen)?;
11412                    let value_column = self.parse_identifier()?;
11413                    self.expect_keyword(Keyword::FOR)?;
11414                    let name_column = self.parse_identifier()?;
11415                    self.expect_keyword(Keyword::IN)?;
11416
11417                    self.expect_token(&Token::LParen)?;
11418                    let unpivot_columns = self.parse_comma_separated(Parser::parse_identifier)?;
11419                    self.expect_token(&Token::RParen)?;
11420
11421                    self.expect_token(&Token::RParen)?;
11422
11423                    let alias = self.parse_identifier_optional_alias()?;
11424
11425                    pipe_operators.push(PipeOperator::Unpivot {
11426                        value_column,
11427                        name_column,
11428                        unpivot_columns,
11429                        alias,
11430                    });
11431                }
11432                Keyword::JOIN
11433                | Keyword::INNER
11434                | Keyword::LEFT
11435                | Keyword::RIGHT
11436                | Keyword::FULL
11437                | Keyword::CROSS => {
11438                    self.prev_token();
11439                    let mut joins = self.parse_joins()?;
11440                    if joins.len() != 1 {
11441                        return Err(ParserError::ParserError(
11442                            "Join pipe operator must have a single join".to_string(),
11443                        ));
11444                    }
11445                    let join = joins.swap_remove(0);
11446                    pipe_operators.push(PipeOperator::Join(join))
11447                }
11448                unhandled => {
11449                    return Err(ParserError::ParserError(format!(
11450                    "`expect_one_of_keywords` further up allowed unhandled keyword: {unhandled:?}"
11451                )))
11452                }
11453            }
11454        }
11455        Ok(pipe_operators)
11456    }
11457
11458    fn parse_settings(&mut self) -> Result<Option<Vec<Setting>>, ParserError> {
11459        let settings = if dialect_of!(self is ClickHouseDialect|GenericDialect)
11460            && self.parse_keyword(Keyword::SETTINGS)
11461        {
11462            let key_values = self.parse_comma_separated(|p| {
11463                let key = p.parse_identifier()?;
11464                p.expect_token(&Token::Eq)?;
11465                let value = p.parse_expr()?;
11466                Ok(Setting { key, value })
11467            })?;
11468            Some(key_values)
11469        } else {
11470            None
11471        };
11472        Ok(settings)
11473    }
11474
11475    /// Parse a mssql `FOR [XML | JSON | BROWSE]` clause
11476    pub fn parse_for_clause(&mut self) -> Result<Option<ForClause>, ParserError> {
11477        if self.parse_keyword(Keyword::XML) {
11478            Ok(Some(self.parse_for_xml()?))
11479        } else if self.parse_keyword(Keyword::JSON) {
11480            Ok(Some(self.parse_for_json()?))
11481        } else if self.parse_keyword(Keyword::BROWSE) {
11482            Ok(Some(ForClause::Browse))
11483        } else {
11484            Ok(None)
11485        }
11486    }
11487
11488    /// Parse a mssql `FOR XML` clause
11489    pub fn parse_for_xml(&mut self) -> Result<ForClause, ParserError> {
11490        let for_xml = if self.parse_keyword(Keyword::RAW) {
11491            let mut element_name = None;
11492            if self.peek_token().token == Token::LParen {
11493                self.expect_token(&Token::LParen)?;
11494                element_name = Some(self.parse_literal_string()?);
11495                self.expect_token(&Token::RParen)?;
11496            }
11497            ForXml::Raw(element_name)
11498        } else if self.parse_keyword(Keyword::AUTO) {
11499            ForXml::Auto
11500        } else if self.parse_keyword(Keyword::EXPLICIT) {
11501            ForXml::Explicit
11502        } else if self.parse_keyword(Keyword::PATH) {
11503            let mut element_name = None;
11504            if self.peek_token().token == Token::LParen {
11505                self.expect_token(&Token::LParen)?;
11506                element_name = Some(self.parse_literal_string()?);
11507                self.expect_token(&Token::RParen)?;
11508            }
11509            ForXml::Path(element_name)
11510        } else {
11511            return Err(ParserError::ParserError(
11512                "Expected FOR XML [RAW | AUTO | EXPLICIT | PATH ]".to_string(),
11513            ));
11514        };
11515        let mut elements = false;
11516        let mut binary_base64 = false;
11517        let mut root = None;
11518        let mut r#type = false;
11519        while self.peek_token().token == Token::Comma {
11520            self.next_token();
11521            if self.parse_keyword(Keyword::ELEMENTS) {
11522                elements = true;
11523            } else if self.parse_keyword(Keyword::BINARY) {
11524                self.expect_keyword_is(Keyword::BASE64)?;
11525                binary_base64 = true;
11526            } else if self.parse_keyword(Keyword::ROOT) {
11527                self.expect_token(&Token::LParen)?;
11528                root = Some(self.parse_literal_string()?);
11529                self.expect_token(&Token::RParen)?;
11530            } else if self.parse_keyword(Keyword::TYPE) {
11531                r#type = true;
11532            }
11533        }
11534        Ok(ForClause::Xml {
11535            for_xml,
11536            elements,
11537            binary_base64,
11538            root,
11539            r#type,
11540        })
11541    }
11542
11543    /// Parse a mssql `FOR JSON` clause
11544    pub fn parse_for_json(&mut self) -> Result<ForClause, ParserError> {
11545        let for_json = if self.parse_keyword(Keyword::AUTO) {
11546            ForJson::Auto
11547        } else if self.parse_keyword(Keyword::PATH) {
11548            ForJson::Path
11549        } else {
11550            return Err(ParserError::ParserError(
11551                "Expected FOR JSON [AUTO | PATH ]".to_string(),
11552            ));
11553        };
11554        let mut root = None;
11555        let mut include_null_values = false;
11556        let mut without_array_wrapper = false;
11557        while self.peek_token().token == Token::Comma {
11558            self.next_token();
11559            if self.parse_keyword(Keyword::ROOT) {
11560                self.expect_token(&Token::LParen)?;
11561                root = Some(self.parse_literal_string()?);
11562                self.expect_token(&Token::RParen)?;
11563            } else if self.parse_keyword(Keyword::INCLUDE_NULL_VALUES) {
11564                include_null_values = true;
11565            } else if self.parse_keyword(Keyword::WITHOUT_ARRAY_WRAPPER) {
11566                without_array_wrapper = true;
11567            }
11568        }
11569        Ok(ForClause::Json {
11570            for_json,
11571            root,
11572            include_null_values,
11573            without_array_wrapper,
11574        })
11575    }
11576
11577    /// Parse a CTE (`alias [( col1, col2, ... )] AS (subquery)`)
11578    pub fn parse_cte(&mut self) -> Result<Cte, ParserError> {
11579        let name = self.parse_identifier()?;
11580
11581        let mut cte = if self.parse_keyword(Keyword::AS) {
11582            let mut is_materialized = None;
11583            if dialect_of!(self is PostgreSqlDialect) {
11584                if self.parse_keyword(Keyword::MATERIALIZED) {
11585                    is_materialized = Some(CteAsMaterialized::Materialized);
11586                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
11587                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
11588                }
11589            }
11590            self.expect_token(&Token::LParen)?;
11591
11592            let query = self.parse_query()?;
11593            let closing_paren_token = self.expect_token(&Token::RParen)?;
11594
11595            let alias = TableAlias {
11596                name,
11597                columns: vec![],
11598            };
11599            Cte {
11600                alias,
11601                query,
11602                from: None,
11603                materialized: is_materialized,
11604                closing_paren_token: closing_paren_token.into(),
11605            }
11606        } else {
11607            let columns = self.parse_table_alias_column_defs()?;
11608            self.expect_keyword_is(Keyword::AS)?;
11609            let mut is_materialized = None;
11610            if dialect_of!(self is PostgreSqlDialect) {
11611                if self.parse_keyword(Keyword::MATERIALIZED) {
11612                    is_materialized = Some(CteAsMaterialized::Materialized);
11613                } else if self.parse_keywords(&[Keyword::NOT, Keyword::MATERIALIZED]) {
11614                    is_materialized = Some(CteAsMaterialized::NotMaterialized);
11615                }
11616            }
11617            self.expect_token(&Token::LParen)?;
11618
11619            let query = self.parse_query()?;
11620            let closing_paren_token = self.expect_token(&Token::RParen)?;
11621
11622            let alias = TableAlias { name, columns };
11623            Cte {
11624                alias,
11625                query,
11626                from: None,
11627                materialized: is_materialized,
11628                closing_paren_token: closing_paren_token.into(),
11629            }
11630        };
11631        if self.parse_keyword(Keyword::FROM) {
11632            cte.from = Some(self.parse_identifier()?);
11633        }
11634        Ok(cte)
11635    }
11636
11637    /// Parse a "query body", which is an expression with roughly the
11638    /// following grammar:
11639    /// ```sql
11640    ///   query_body ::= restricted_select | '(' subquery ')' | set_operation
11641    ///   restricted_select ::= 'SELECT' [expr_list] [ from ] [ where ] [ groupby_having ]
11642    ///   subquery ::= query_body [ order_by_limit ]
11643    ///   set_operation ::= query_body { 'UNION' | 'EXCEPT' | 'INTERSECT' } [ 'ALL' ] query_body
11644    /// ```
11645    pub fn parse_query_body(&mut self, precedence: u8) -> Result<Box<SetExpr>, ParserError> {
11646        // We parse the expression using a Pratt parser, as in `parse_expr()`.
11647        // Start by parsing a restricted SELECT or a `(subquery)`:
11648        let expr = if self.peek_keyword(Keyword::SELECT)
11649            || (self.peek_keyword(Keyword::FROM) && self.dialect.supports_from_first_select())
11650        {
11651            SetExpr::Select(self.parse_select().map(Box::new)?)
11652        } else if self.consume_token(&Token::LParen) {
11653            // CTEs are not allowed here, but the parser currently accepts them
11654            let subquery = self.parse_query()?;
11655            self.expect_token(&Token::RParen)?;
11656            SetExpr::Query(subquery)
11657        } else if self.parse_keyword(Keyword::VALUES) {
11658            let is_mysql = dialect_of!(self is MySqlDialect);
11659            SetExpr::Values(self.parse_values(is_mysql)?)
11660        } else if self.parse_keyword(Keyword::TABLE) {
11661            SetExpr::Table(Box::new(self.parse_as_table()?))
11662        } else {
11663            return self.expected(
11664                "SELECT, VALUES, or a subquery in the query body",
11665                self.peek_token(),
11666            );
11667        };
11668
11669        self.parse_remaining_set_exprs(expr, precedence)
11670    }
11671
11672    /// Parse any extra set expressions that may be present in a query body
11673    ///
11674    /// (this is its own function to reduce required stack size in debug builds)
11675    fn parse_remaining_set_exprs(
11676        &mut self,
11677        mut expr: SetExpr,
11678        precedence: u8,
11679    ) -> Result<Box<SetExpr>, ParserError> {
11680        loop {
11681            // The query can be optionally followed by a set operator:
11682            let op = self.parse_set_operator(&self.peek_token().token);
11683            let next_precedence = match op {
11684                // UNION and EXCEPT have the same binding power and evaluate left-to-right
11685                Some(SetOperator::Union) | Some(SetOperator::Except) | Some(SetOperator::Minus) => {
11686                    10
11687                }
11688                // INTERSECT has higher precedence than UNION/EXCEPT
11689                Some(SetOperator::Intersect) => 20,
11690                // Unexpected token or EOF => stop parsing the query body
11691                None => break,
11692            };
11693            if precedence >= next_precedence {
11694                break;
11695            }
11696            self.next_token(); // skip past the set operator
11697            let set_quantifier = self.parse_set_quantifier(&op);
11698            expr = SetExpr::SetOperation {
11699                left: Box::new(expr),
11700                op: op.unwrap(),
11701                set_quantifier,
11702                right: self.parse_query_body(next_precedence)?,
11703            };
11704        }
11705
11706        Ok(expr.into())
11707    }
11708
11709    pub fn parse_set_operator(&mut self, token: &Token) -> Option<SetOperator> {
11710        match token {
11711            Token::Word(w) if w.keyword == Keyword::UNION => Some(SetOperator::Union),
11712            Token::Word(w) if w.keyword == Keyword::EXCEPT => Some(SetOperator::Except),
11713            Token::Word(w) if w.keyword == Keyword::INTERSECT => Some(SetOperator::Intersect),
11714            Token::Word(w) if w.keyword == Keyword::MINUS => Some(SetOperator::Minus),
11715            _ => None,
11716        }
11717    }
11718
11719    pub fn parse_set_quantifier(&mut self, op: &Option<SetOperator>) -> SetQuantifier {
11720        match op {
11721            Some(
11722                SetOperator::Except
11723                | SetOperator::Intersect
11724                | SetOperator::Union
11725                | SetOperator::Minus,
11726            ) => {
11727                if self.parse_keywords(&[Keyword::DISTINCT, Keyword::BY, Keyword::NAME]) {
11728                    SetQuantifier::DistinctByName
11729                } else if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
11730                    SetQuantifier::ByName
11731                } else if self.parse_keyword(Keyword::ALL) {
11732                    if self.parse_keywords(&[Keyword::BY, Keyword::NAME]) {
11733                        SetQuantifier::AllByName
11734                    } else {
11735                        SetQuantifier::All
11736                    }
11737                } else if self.parse_keyword(Keyword::DISTINCT) {
11738                    SetQuantifier::Distinct
11739                } else {
11740                    SetQuantifier::None
11741                }
11742            }
11743            _ => SetQuantifier::None,
11744        }
11745    }
11746
11747    /// Parse a restricted `SELECT` statement (no CTEs / `UNION` / `ORDER BY`)
11748    pub fn parse_select(&mut self) -> Result<Select, ParserError> {
11749        let mut from_first = None;
11750
11751        if self.dialect.supports_from_first_select() && self.peek_keyword(Keyword::FROM) {
11752            let from_token = self.expect_keyword(Keyword::FROM)?;
11753            let from = self.parse_table_with_joins()?;
11754            if !self.peek_keyword(Keyword::SELECT) {
11755                return Ok(Select {
11756                    select_token: AttachedToken(from_token),
11757                    distinct: None,
11758                    top: None,
11759                    top_before_distinct: false,
11760                    projection: vec![],
11761                    exclude: None,
11762                    into: None,
11763                    from,
11764                    lateral_views: vec![],
11765                    prewhere: None,
11766                    selection: None,
11767                    group_by: GroupByExpr::Expressions(vec![], vec![]),
11768                    cluster_by: vec![],
11769                    distribute_by: vec![],
11770                    sort_by: vec![],
11771                    having: None,
11772                    named_window: vec![],
11773                    window_before_qualify: false,
11774                    qualify: None,
11775                    value_table_mode: None,
11776                    connect_by: None,
11777                    flavor: SelectFlavor::FromFirstNoSelect,
11778                });
11779            }
11780            from_first = Some(from);
11781        }
11782
11783        let select_token = self.expect_keyword(Keyword::SELECT)?;
11784        let value_table_mode = self.parse_value_table_mode()?;
11785
11786        let mut top_before_distinct = false;
11787        let mut top = None;
11788        if self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
11789            top = Some(self.parse_top()?);
11790            top_before_distinct = true;
11791        }
11792        let distinct = self.parse_all_or_distinct()?;
11793        if !self.dialect.supports_top_before_distinct() && self.parse_keyword(Keyword::TOP) {
11794            top = Some(self.parse_top()?);
11795        }
11796
11797        let projection =
11798            if self.dialect.supports_empty_projections() && self.peek_keyword(Keyword::FROM) {
11799                vec![]
11800            } else {
11801                self.parse_projection()?
11802            };
11803
11804        let exclude = if self.dialect.supports_select_exclude() {
11805            self.parse_optional_select_item_exclude()?
11806        } else {
11807            None
11808        };
11809
11810        let into = if self.parse_keyword(Keyword::INTO) {
11811            Some(self.parse_select_into()?)
11812        } else {
11813            None
11814        };
11815
11816        // Note that for keywords to be properly handled here, they need to be
11817        // added to `RESERVED_FOR_COLUMN_ALIAS` / `RESERVED_FOR_TABLE_ALIAS`,
11818        // otherwise they may be parsed as an alias as part of the `projection`
11819        // or `from`.
11820
11821        let (from, from_first) = if let Some(from) = from_first.take() {
11822            (from, true)
11823        } else if self.parse_keyword(Keyword::FROM) {
11824            (self.parse_table_with_joins()?, false)
11825        } else {
11826            (vec![], false)
11827        };
11828
11829        let mut lateral_views = vec![];
11830        loop {
11831            if self.parse_keywords(&[Keyword::LATERAL, Keyword::VIEW]) {
11832                let outer = self.parse_keyword(Keyword::OUTER);
11833                let lateral_view = self.parse_expr()?;
11834                let lateral_view_name = self.parse_object_name(false)?;
11835                let lateral_col_alias = self
11836                    .parse_comma_separated(|parser| {
11837                        parser.parse_optional_alias(&[
11838                            Keyword::WHERE,
11839                            Keyword::GROUP,
11840                            Keyword::CLUSTER,
11841                            Keyword::HAVING,
11842                            Keyword::LATERAL,
11843                        ]) // This couldn't possibly be a bad idea
11844                    })?
11845                    .into_iter()
11846                    .flatten()
11847                    .collect();
11848
11849                lateral_views.push(LateralView {
11850                    lateral_view,
11851                    lateral_view_name,
11852                    lateral_col_alias,
11853                    outer,
11854                });
11855            } else {
11856                break;
11857            }
11858        }
11859
11860        let prewhere = if dialect_of!(self is ClickHouseDialect|GenericDialect)
11861            && self.parse_keyword(Keyword::PREWHERE)
11862        {
11863            Some(self.parse_expr()?)
11864        } else {
11865            None
11866        };
11867
11868        let selection = if self.parse_keyword(Keyword::WHERE) {
11869            Some(self.parse_expr()?)
11870        } else {
11871            None
11872        };
11873
11874        let group_by = self
11875            .parse_optional_group_by()?
11876            .unwrap_or_else(|| GroupByExpr::Expressions(vec![], vec![]));
11877
11878        let cluster_by = if self.parse_keywords(&[Keyword::CLUSTER, Keyword::BY]) {
11879            self.parse_comma_separated(Parser::parse_expr)?
11880        } else {
11881            vec![]
11882        };
11883
11884        let distribute_by = if self.parse_keywords(&[Keyword::DISTRIBUTE, Keyword::BY]) {
11885            self.parse_comma_separated(Parser::parse_expr)?
11886        } else {
11887            vec![]
11888        };
11889
11890        let sort_by = if self.parse_keywords(&[Keyword::SORT, Keyword::BY]) {
11891            self.parse_comma_separated(Parser::parse_order_by_expr)?
11892        } else {
11893            vec![]
11894        };
11895
11896        let having = if self.parse_keyword(Keyword::HAVING) {
11897            Some(self.parse_expr()?)
11898        } else {
11899            None
11900        };
11901
11902        // Accept QUALIFY and WINDOW in any order and flag accordingly.
11903        let (named_windows, qualify, window_before_qualify) = if self.parse_keyword(Keyword::WINDOW)
11904        {
11905            let named_windows = self.parse_comma_separated(Parser::parse_named_window)?;
11906            if self.parse_keyword(Keyword::QUALIFY) {
11907                (named_windows, Some(self.parse_expr()?), true)
11908            } else {
11909                (named_windows, None, true)
11910            }
11911        } else if self.parse_keyword(Keyword::QUALIFY) {
11912            let qualify = Some(self.parse_expr()?);
11913            if self.parse_keyword(Keyword::WINDOW) {
11914                (
11915                    self.parse_comma_separated(Parser::parse_named_window)?,
11916                    qualify,
11917                    false,
11918                )
11919            } else {
11920                (Default::default(), qualify, false)
11921            }
11922        } else {
11923            Default::default()
11924        };
11925
11926        let connect_by = if self.dialect.supports_connect_by()
11927            && self
11928                .parse_one_of_keywords(&[Keyword::START, Keyword::CONNECT])
11929                .is_some()
11930        {
11931            self.prev_token();
11932            Some(self.parse_connect_by()?)
11933        } else {
11934            None
11935        };
11936
11937        Ok(Select {
11938            select_token: AttachedToken(select_token),
11939            distinct,
11940            top,
11941            top_before_distinct,
11942            projection,
11943            exclude,
11944            into,
11945            from,
11946            lateral_views,
11947            prewhere,
11948            selection,
11949            group_by,
11950            cluster_by,
11951            distribute_by,
11952            sort_by,
11953            having,
11954            named_window: named_windows,
11955            window_before_qualify,
11956            qualify,
11957            value_table_mode,
11958            connect_by,
11959            flavor: if from_first {
11960                SelectFlavor::FromFirst
11961            } else {
11962                SelectFlavor::Standard
11963            },
11964        })
11965    }
11966
11967    fn parse_value_table_mode(&mut self) -> Result<Option<ValueTableMode>, ParserError> {
11968        if !dialect_of!(self is BigQueryDialect) {
11969            return Ok(None);
11970        }
11971
11972        let mode = if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::VALUE]) {
11973            Some(ValueTableMode::DistinctAsValue)
11974        } else if self.parse_keywords(&[Keyword::DISTINCT, Keyword::AS, Keyword::STRUCT]) {
11975            Some(ValueTableMode::DistinctAsStruct)
11976        } else if self.parse_keywords(&[Keyword::AS, Keyword::VALUE])
11977            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::VALUE])
11978        {
11979            Some(ValueTableMode::AsValue)
11980        } else if self.parse_keywords(&[Keyword::AS, Keyword::STRUCT])
11981            || self.parse_keywords(&[Keyword::ALL, Keyword::AS, Keyword::STRUCT])
11982        {
11983            Some(ValueTableMode::AsStruct)
11984        } else if self.parse_keyword(Keyword::AS) {
11985            self.expected("VALUE or STRUCT", self.peek_token())?
11986        } else {
11987            None
11988        };
11989
11990        Ok(mode)
11991    }
11992
11993    /// Invoke `f` after first setting the parser's `ParserState` to `state`.
11994    ///
11995    /// Upon return, restores the parser's state to what it started at.
11996    fn with_state<T, F>(&mut self, state: ParserState, mut f: F) -> Result<T, ParserError>
11997    where
11998        F: FnMut(&mut Parser) -> Result<T, ParserError>,
11999    {
12000        let current_state = self.state;
12001        self.state = state;
12002        let res = f(self);
12003        self.state = current_state;
12004        res
12005    }
12006
12007    pub fn parse_connect_by(&mut self) -> Result<ConnectBy, ParserError> {
12008        let (condition, relationships) = if self.parse_keywords(&[Keyword::CONNECT, Keyword::BY]) {
12009            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
12010                parser.parse_comma_separated(Parser::parse_expr)
12011            })?;
12012            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
12013            let condition = self.parse_expr()?;
12014            (condition, relationships)
12015        } else {
12016            self.expect_keywords(&[Keyword::START, Keyword::WITH])?;
12017            let condition = self.parse_expr()?;
12018            self.expect_keywords(&[Keyword::CONNECT, Keyword::BY])?;
12019            let relationships = self.with_state(ParserState::ConnectBy, |parser| {
12020                parser.parse_comma_separated(Parser::parse_expr)
12021            })?;
12022            (condition, relationships)
12023        };
12024        Ok(ConnectBy {
12025            condition,
12026            relationships,
12027        })
12028    }
12029
12030    /// Parse `CREATE TABLE x AS TABLE y`
12031    pub fn parse_as_table(&mut self) -> Result<Table, ParserError> {
12032        let token1 = self.next_token();
12033        let token2 = self.next_token();
12034        let token3 = self.next_token();
12035
12036        let table_name;
12037        let schema_name;
12038        if token2 == Token::Period {
12039            match token1.token {
12040                Token::Word(w) => {
12041                    schema_name = w.value;
12042                }
12043                _ => {
12044                    return self.expected("Schema name", token1);
12045                }
12046            }
12047            match token3.token {
12048                Token::Word(w) => {
12049                    table_name = w.value;
12050                }
12051                _ => {
12052                    return self.expected("Table name", token3);
12053                }
12054            }
12055            Ok(Table {
12056                table_name: Some(table_name),
12057                schema_name: Some(schema_name),
12058            })
12059        } else {
12060            match token1.token {
12061                Token::Word(w) => {
12062                    table_name = w.value;
12063                }
12064                _ => {
12065                    return self.expected("Table name", token1);
12066                }
12067            }
12068            Ok(Table {
12069                table_name: Some(table_name),
12070                schema_name: None,
12071            })
12072        }
12073    }
12074
12075    /// Parse a `SET ROLE` statement. Expects SET to be consumed already.
12076    fn parse_set_role(
12077        &mut self,
12078        modifier: Option<ContextModifier>,
12079    ) -> Result<Statement, ParserError> {
12080        self.expect_keyword_is(Keyword::ROLE)?;
12081
12082        let role_name = if self.parse_keyword(Keyword::NONE) {
12083            None
12084        } else {
12085            Some(self.parse_identifier()?)
12086        };
12087        Ok(Statement::Set(Set::SetRole {
12088            context_modifier: modifier,
12089            role_name,
12090        }))
12091    }
12092
12093    fn parse_set_values(
12094        &mut self,
12095        parenthesized_assignment: bool,
12096    ) -> Result<Vec<Expr>, ParserError> {
12097        let mut values = vec![];
12098
12099        if parenthesized_assignment {
12100            self.expect_token(&Token::LParen)?;
12101        }
12102
12103        loop {
12104            let value = if let Some(expr) = self.try_parse_expr_sub_query()? {
12105                expr
12106            } else if let Ok(expr) = self.parse_expr() {
12107                expr
12108            } else {
12109                self.expected("variable value", self.peek_token())?
12110            };
12111
12112            values.push(value);
12113            if self.consume_token(&Token::Comma) {
12114                continue;
12115            }
12116
12117            if parenthesized_assignment {
12118                self.expect_token(&Token::RParen)?;
12119            }
12120            return Ok(values);
12121        }
12122    }
12123
12124    fn parse_context_modifier(&mut self) -> Option<ContextModifier> {
12125        let modifier =
12126            self.parse_one_of_keywords(&[Keyword::SESSION, Keyword::LOCAL, Keyword::GLOBAL])?;
12127
12128        Self::keyword_to_modifier(modifier)
12129    }
12130
12131    /// Parse a single SET statement assignment `var = expr`.
12132    fn parse_set_assignment(&mut self) -> Result<SetAssignment, ParserError> {
12133        let scope = self.parse_context_modifier();
12134
12135        let name = if self.dialect.supports_parenthesized_set_variables()
12136            && self.consume_token(&Token::LParen)
12137        {
12138            // Parenthesized assignments are handled in the `parse_set` function after
12139            // trying to parse list of assignments using this function.
12140            // If a dialect supports both, and we find a LParen, we early exit from this function.
12141            self.expected("Unparenthesized assignment", self.peek_token())?
12142        } else {
12143            self.parse_object_name(false)?
12144        };
12145
12146        if !(self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO)) {
12147            return self.expected("assignment operator", self.peek_token());
12148        }
12149
12150        let value = self.parse_expr()?;
12151
12152        Ok(SetAssignment { scope, name, value })
12153    }
12154
12155    fn parse_set(&mut self) -> Result<Statement, ParserError> {
12156        let hivevar = self.parse_keyword(Keyword::HIVEVAR);
12157
12158        // Modifier is either HIVEVAR: or a ContextModifier (LOCAL, SESSION, etc), not both
12159        let scope = if !hivevar {
12160            self.parse_context_modifier()
12161        } else {
12162            None
12163        };
12164
12165        if hivevar {
12166            self.expect_token(&Token::Colon)?;
12167        }
12168
12169        if let Some(set_role_stmt) = self.maybe_parse(|parser| parser.parse_set_role(scope))? {
12170            return Ok(set_role_stmt);
12171        }
12172
12173        // Handle special cases first
12174        if self.parse_keywords(&[Keyword::TIME, Keyword::ZONE])
12175            || self.parse_keyword(Keyword::TIMEZONE)
12176        {
12177            if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
12178                return Ok(Set::SingleAssignment {
12179                    scope,
12180                    hivevar,
12181                    variable: ObjectName::from(vec!["TIMEZONE".into()]),
12182                    values: self.parse_set_values(false)?,
12183                }
12184                .into());
12185            } else {
12186                // A shorthand alias for SET TIME ZONE that doesn't require
12187                // the assignment operator. It's originally PostgreSQL specific,
12188                // but we allow it for all the dialects
12189                return Ok(Set::SetTimeZone {
12190                    local: scope == Some(ContextModifier::Local),
12191                    value: self.parse_expr()?,
12192                }
12193                .into());
12194            }
12195        } else if self.dialect.supports_set_names() && self.parse_keyword(Keyword::NAMES) {
12196            if self.parse_keyword(Keyword::DEFAULT) {
12197                return Ok(Set::SetNamesDefault {}.into());
12198            }
12199            let charset_name = self.parse_identifier()?;
12200            let collation_name = if self.parse_one_of_keywords(&[Keyword::COLLATE]).is_some() {
12201                Some(self.parse_literal_string()?)
12202            } else {
12203                None
12204            };
12205
12206            return Ok(Set::SetNames {
12207                charset_name,
12208                collation_name,
12209            }
12210            .into());
12211        } else if self.parse_keyword(Keyword::CHARACTERISTICS) {
12212            self.expect_keywords(&[Keyword::AS, Keyword::TRANSACTION])?;
12213            return Ok(Set::SetTransaction {
12214                modes: self.parse_transaction_modes()?,
12215                snapshot: None,
12216                session: true,
12217            }
12218            .into());
12219        } else if self.parse_keyword(Keyword::TRANSACTION) {
12220            if self.parse_keyword(Keyword::SNAPSHOT) {
12221                let snapshot_id = self.parse_value()?.value;
12222                return Ok(Set::SetTransaction {
12223                    modes: vec![],
12224                    snapshot: Some(snapshot_id),
12225                    session: false,
12226                }
12227                .into());
12228            }
12229            return Ok(Set::SetTransaction {
12230                modes: self.parse_transaction_modes()?,
12231                snapshot: None,
12232                session: false,
12233            }
12234            .into());
12235        }
12236
12237        if self.dialect.supports_comma_separated_set_assignments() {
12238            if scope.is_some() {
12239                self.prev_token();
12240            }
12241
12242            if let Some(assignments) = self
12243                .maybe_parse(|parser| parser.parse_comma_separated(Parser::parse_set_assignment))?
12244            {
12245                return if assignments.len() > 1 {
12246                    Ok(Set::MultipleAssignments { assignments }.into())
12247                } else {
12248                    let SetAssignment { scope, name, value } =
12249                        assignments.into_iter().next().ok_or_else(|| {
12250                            ParserError::ParserError("Expected at least one assignment".to_string())
12251                        })?;
12252
12253                    Ok(Set::SingleAssignment {
12254                        scope,
12255                        hivevar,
12256                        variable: name,
12257                        values: vec![value],
12258                    }
12259                    .into())
12260                };
12261            }
12262        }
12263
12264        let variables = if self.dialect.supports_parenthesized_set_variables()
12265            && self.consume_token(&Token::LParen)
12266        {
12267            let vars = OneOrManyWithParens::Many(
12268                self.parse_comma_separated(|parser: &mut Parser<'a>| parser.parse_identifier())?
12269                    .into_iter()
12270                    .map(|ident| ObjectName::from(vec![ident]))
12271                    .collect(),
12272            );
12273            self.expect_token(&Token::RParen)?;
12274            vars
12275        } else {
12276            OneOrManyWithParens::One(self.parse_object_name(false)?)
12277        };
12278
12279        if self.consume_token(&Token::Eq) || self.parse_keyword(Keyword::TO) {
12280            let stmt = match variables {
12281                OneOrManyWithParens::One(var) => Set::SingleAssignment {
12282                    scope,
12283                    hivevar,
12284                    variable: var,
12285                    values: self.parse_set_values(false)?,
12286                },
12287                OneOrManyWithParens::Many(vars) => Set::ParenthesizedAssignments {
12288                    variables: vars,
12289                    values: self.parse_set_values(true)?,
12290                },
12291            };
12292
12293            return Ok(stmt.into());
12294        }
12295
12296        if self.dialect.supports_set_stmt_without_operator() {
12297            self.prev_token();
12298            return self.parse_set_session_params();
12299        };
12300
12301        self.expected("equals sign or TO", self.peek_token())
12302    }
12303
12304    pub fn parse_set_session_params(&mut self) -> Result<Statement, ParserError> {
12305        if self.parse_keyword(Keyword::STATISTICS) {
12306            let topic = match self.parse_one_of_keywords(&[
12307                Keyword::IO,
12308                Keyword::PROFILE,
12309                Keyword::TIME,
12310                Keyword::XML,
12311            ]) {
12312                Some(Keyword::IO) => SessionParamStatsTopic::IO,
12313                Some(Keyword::PROFILE) => SessionParamStatsTopic::Profile,
12314                Some(Keyword::TIME) => SessionParamStatsTopic::Time,
12315                Some(Keyword::XML) => SessionParamStatsTopic::Xml,
12316                _ => return self.expected("IO, PROFILE, TIME or XML", self.peek_token()),
12317            };
12318            let value = self.parse_session_param_value()?;
12319            Ok(
12320                Set::SetSessionParam(SetSessionParamKind::Statistics(SetSessionParamStatistics {
12321                    topic,
12322                    value,
12323                }))
12324                .into(),
12325            )
12326        } else if self.parse_keyword(Keyword::IDENTITY_INSERT) {
12327            let obj = self.parse_object_name(false)?;
12328            let value = self.parse_session_param_value()?;
12329            Ok(Set::SetSessionParam(SetSessionParamKind::IdentityInsert(
12330                SetSessionParamIdentityInsert { obj, value },
12331            ))
12332            .into())
12333        } else if self.parse_keyword(Keyword::OFFSETS) {
12334            let keywords = self.parse_comma_separated(|parser| {
12335                let next_token = parser.next_token();
12336                match &next_token.token {
12337                    Token::Word(w) => Ok(w.to_string()),
12338                    _ => parser.expected("SQL keyword", next_token),
12339                }
12340            })?;
12341            let value = self.parse_session_param_value()?;
12342            Ok(
12343                Set::SetSessionParam(SetSessionParamKind::Offsets(SetSessionParamOffsets {
12344                    keywords,
12345                    value,
12346                }))
12347                .into(),
12348            )
12349        } else {
12350            let names = self.parse_comma_separated(|parser| {
12351                let next_token = parser.next_token();
12352                match next_token.token {
12353                    Token::Word(w) => Ok(w.to_string()),
12354                    _ => parser.expected("Session param name", next_token),
12355                }
12356            })?;
12357            let value = self.parse_expr()?.to_string();
12358            Ok(
12359                Set::SetSessionParam(SetSessionParamKind::Generic(SetSessionParamGeneric {
12360                    names,
12361                    value,
12362                }))
12363                .into(),
12364            )
12365        }
12366    }
12367
12368    fn parse_session_param_value(&mut self) -> Result<SessionParamValue, ParserError> {
12369        if self.parse_keyword(Keyword::ON) {
12370            Ok(SessionParamValue::On)
12371        } else if self.parse_keyword(Keyword::OFF) {
12372            Ok(SessionParamValue::Off)
12373        } else {
12374            self.expected("ON or OFF", self.peek_token())
12375        }
12376    }
12377
12378    pub fn parse_show(&mut self) -> Result<Statement, ParserError> {
12379        let terse = self.parse_keyword(Keyword::TERSE);
12380        let extended = self.parse_keyword(Keyword::EXTENDED);
12381        let full = self.parse_keyword(Keyword::FULL);
12382        let session = self.parse_keyword(Keyword::SESSION);
12383        let global = self.parse_keyword(Keyword::GLOBAL);
12384        let external = self.parse_keyword(Keyword::EXTERNAL);
12385        if self
12386            .parse_one_of_keywords(&[Keyword::COLUMNS, Keyword::FIELDS])
12387            .is_some()
12388        {
12389            Ok(self.parse_show_columns(extended, full)?)
12390        } else if self.parse_keyword(Keyword::TABLES) {
12391            Ok(self.parse_show_tables(terse, extended, full, external)?)
12392        } else if self.parse_keywords(&[Keyword::MATERIALIZED, Keyword::VIEWS]) {
12393            Ok(self.parse_show_views(terse, true)?)
12394        } else if self.parse_keyword(Keyword::VIEWS) {
12395            Ok(self.parse_show_views(terse, false)?)
12396        } else if self.parse_keyword(Keyword::FUNCTIONS) {
12397            Ok(self.parse_show_functions()?)
12398        } else if extended || full {
12399            Err(ParserError::ParserError(
12400                "EXTENDED/FULL are not supported with this type of SHOW query".to_string(),
12401            ))
12402        } else if self.parse_one_of_keywords(&[Keyword::CREATE]).is_some() {
12403            Ok(self.parse_show_create()?)
12404        } else if self.parse_keyword(Keyword::COLLATION) {
12405            Ok(self.parse_show_collation()?)
12406        } else if self.parse_keyword(Keyword::VARIABLES)
12407            && dialect_of!(self is MySqlDialect | GenericDialect)
12408        {
12409            Ok(Statement::ShowVariables {
12410                filter: self.parse_show_statement_filter()?,
12411                session,
12412                global,
12413            })
12414        } else if self.parse_keyword(Keyword::STATUS)
12415            && dialect_of!(self is MySqlDialect | GenericDialect)
12416        {
12417            Ok(Statement::ShowStatus {
12418                filter: self.parse_show_statement_filter()?,
12419                session,
12420                global,
12421            })
12422        } else if self.parse_keyword(Keyword::DATABASES) {
12423            self.parse_show_databases(terse)
12424        } else if self.parse_keyword(Keyword::SCHEMAS) {
12425            self.parse_show_schemas(terse)
12426        } else {
12427            Ok(Statement::ShowVariable {
12428                variable: self.parse_identifiers()?,
12429            })
12430        }
12431    }
12432
12433    fn parse_show_databases(&mut self, terse: bool) -> Result<Statement, ParserError> {
12434        let history = self.parse_keyword(Keyword::HISTORY);
12435        let show_options = self.parse_show_stmt_options()?;
12436        Ok(Statement::ShowDatabases {
12437            terse,
12438            history,
12439            show_options,
12440        })
12441    }
12442
12443    fn parse_show_schemas(&mut self, terse: bool) -> Result<Statement, ParserError> {
12444        let history = self.parse_keyword(Keyword::HISTORY);
12445        let show_options = self.parse_show_stmt_options()?;
12446        Ok(Statement::ShowSchemas {
12447            terse,
12448            history,
12449            show_options,
12450        })
12451    }
12452
12453    pub fn parse_show_create(&mut self) -> Result<Statement, ParserError> {
12454        let obj_type = match self.expect_one_of_keywords(&[
12455            Keyword::TABLE,
12456            Keyword::TRIGGER,
12457            Keyword::FUNCTION,
12458            Keyword::PROCEDURE,
12459            Keyword::EVENT,
12460            Keyword::VIEW,
12461        ])? {
12462            Keyword::TABLE => Ok(ShowCreateObject::Table),
12463            Keyword::TRIGGER => Ok(ShowCreateObject::Trigger),
12464            Keyword::FUNCTION => Ok(ShowCreateObject::Function),
12465            Keyword::PROCEDURE => Ok(ShowCreateObject::Procedure),
12466            Keyword::EVENT => Ok(ShowCreateObject::Event),
12467            Keyword::VIEW => Ok(ShowCreateObject::View),
12468            keyword => Err(ParserError::ParserError(format!(
12469                "Unable to map keyword to ShowCreateObject: {keyword:?}"
12470            ))),
12471        }?;
12472
12473        let obj_name = self.parse_object_name(false)?;
12474
12475        Ok(Statement::ShowCreate { obj_type, obj_name })
12476    }
12477
12478    pub fn parse_show_columns(
12479        &mut self,
12480        extended: bool,
12481        full: bool,
12482    ) -> Result<Statement, ParserError> {
12483        let show_options = self.parse_show_stmt_options()?;
12484        Ok(Statement::ShowColumns {
12485            extended,
12486            full,
12487            show_options,
12488        })
12489    }
12490
12491    fn parse_show_tables(
12492        &mut self,
12493        terse: bool,
12494        extended: bool,
12495        full: bool,
12496        external: bool,
12497    ) -> Result<Statement, ParserError> {
12498        let history = !external && self.parse_keyword(Keyword::HISTORY);
12499        let show_options = self.parse_show_stmt_options()?;
12500        Ok(Statement::ShowTables {
12501            terse,
12502            history,
12503            extended,
12504            full,
12505            external,
12506            show_options,
12507        })
12508    }
12509
12510    fn parse_show_views(
12511        &mut self,
12512        terse: bool,
12513        materialized: bool,
12514    ) -> Result<Statement, ParserError> {
12515        let show_options = self.parse_show_stmt_options()?;
12516        Ok(Statement::ShowViews {
12517            materialized,
12518            terse,
12519            show_options,
12520        })
12521    }
12522
12523    pub fn parse_show_functions(&mut self) -> Result<Statement, ParserError> {
12524        let filter = self.parse_show_statement_filter()?;
12525        Ok(Statement::ShowFunctions { filter })
12526    }
12527
12528    pub fn parse_show_collation(&mut self) -> Result<Statement, ParserError> {
12529        let filter = self.parse_show_statement_filter()?;
12530        Ok(Statement::ShowCollation { filter })
12531    }
12532
12533    pub fn parse_show_statement_filter(
12534        &mut self,
12535    ) -> Result<Option<ShowStatementFilter>, ParserError> {
12536        if self.parse_keyword(Keyword::LIKE) {
12537            Ok(Some(ShowStatementFilter::Like(
12538                self.parse_literal_string()?,
12539            )))
12540        } else if self.parse_keyword(Keyword::ILIKE) {
12541            Ok(Some(ShowStatementFilter::ILike(
12542                self.parse_literal_string()?,
12543            )))
12544        } else if self.parse_keyword(Keyword::WHERE) {
12545            Ok(Some(ShowStatementFilter::Where(self.parse_expr()?)))
12546        } else {
12547            self.maybe_parse(|parser| -> Result<String, ParserError> {
12548                parser.parse_literal_string()
12549            })?
12550            .map_or(Ok(None), |filter| {
12551                Ok(Some(ShowStatementFilter::NoKeyword(filter)))
12552            })
12553        }
12554    }
12555
12556    pub fn parse_use(&mut self) -> Result<Statement, ParserError> {
12557        // Determine which keywords are recognized by the current dialect
12558        let parsed_keyword = if dialect_of!(self is HiveDialect) {
12559            // HiveDialect accepts USE DEFAULT; statement without any db specified
12560            if self.parse_keyword(Keyword::DEFAULT) {
12561                return Ok(Statement::Use(Use::Default));
12562            }
12563            None // HiveDialect doesn't expect any other specific keyword after `USE`
12564        } else if dialect_of!(self is DatabricksDialect) {
12565            self.parse_one_of_keywords(&[Keyword::CATALOG, Keyword::DATABASE, Keyword::SCHEMA])
12566        } else if dialect_of!(self is SnowflakeDialect) {
12567            self.parse_one_of_keywords(&[
12568                Keyword::DATABASE,
12569                Keyword::SCHEMA,
12570                Keyword::WAREHOUSE,
12571                Keyword::ROLE,
12572                Keyword::SECONDARY,
12573            ])
12574        } else {
12575            None // No specific keywords for other dialects, including GenericDialect
12576        };
12577
12578        let result = if matches!(parsed_keyword, Some(Keyword::SECONDARY)) {
12579            self.parse_secondary_roles()?
12580        } else {
12581            let obj_name = self.parse_object_name(false)?;
12582            match parsed_keyword {
12583                Some(Keyword::CATALOG) => Use::Catalog(obj_name),
12584                Some(Keyword::DATABASE) => Use::Database(obj_name),
12585                Some(Keyword::SCHEMA) => Use::Schema(obj_name),
12586                Some(Keyword::WAREHOUSE) => Use::Warehouse(obj_name),
12587                Some(Keyword::ROLE) => Use::Role(obj_name),
12588                _ => Use::Object(obj_name),
12589            }
12590        };
12591
12592        Ok(Statement::Use(result))
12593    }
12594
12595    fn parse_secondary_roles(&mut self) -> Result<Use, ParserError> {
12596        self.expect_one_of_keywords(&[Keyword::ROLES, Keyword::ROLE])?;
12597        if self.parse_keyword(Keyword::NONE) {
12598            Ok(Use::SecondaryRoles(SecondaryRoles::None))
12599        } else if self.parse_keyword(Keyword::ALL) {
12600            Ok(Use::SecondaryRoles(SecondaryRoles::All))
12601        } else {
12602            let roles = self.parse_comma_separated(|parser| parser.parse_identifier())?;
12603            Ok(Use::SecondaryRoles(SecondaryRoles::List(roles)))
12604        }
12605    }
12606
12607    pub fn parse_table_and_joins(&mut self) -> Result<TableWithJoins, ParserError> {
12608        let relation = self.parse_table_factor()?;
12609        // Note that for keywords to be properly handled here, they need to be
12610        // added to `RESERVED_FOR_TABLE_ALIAS`, otherwise they may be parsed as
12611        // a table alias.
12612        let joins = self.parse_joins()?;
12613        Ok(TableWithJoins { relation, joins })
12614    }
12615
12616    fn parse_joins(&mut self) -> Result<Vec<Join>, ParserError> {
12617        let mut joins = vec![];
12618        loop {
12619            let global = self.parse_keyword(Keyword::GLOBAL);
12620            let join = if self.parse_keyword(Keyword::CROSS) {
12621                let join_operator = if self.parse_keyword(Keyword::JOIN) {
12622                    JoinOperator::CrossJoin
12623                } else if self.parse_keyword(Keyword::APPLY) {
12624                    // MSSQL extension, similar to CROSS JOIN LATERAL
12625                    JoinOperator::CrossApply
12626                } else {
12627                    return self.expected("JOIN or APPLY after CROSS", self.peek_token());
12628                };
12629                Join {
12630                    relation: self.parse_table_factor()?,
12631                    global,
12632                    join_operator,
12633                }
12634            } else if self.parse_keyword(Keyword::OUTER) {
12635                // MSSQL extension, similar to LEFT JOIN LATERAL .. ON 1=1
12636                self.expect_keyword_is(Keyword::APPLY)?;
12637                Join {
12638                    relation: self.parse_table_factor()?,
12639                    global,
12640                    join_operator: JoinOperator::OuterApply,
12641                }
12642            } else if self.parse_keyword(Keyword::ASOF) {
12643                self.expect_keyword_is(Keyword::JOIN)?;
12644                let relation = self.parse_table_factor()?;
12645                self.expect_keyword_is(Keyword::MATCH_CONDITION)?;
12646                let match_condition = self.parse_parenthesized(Self::parse_expr)?;
12647                Join {
12648                    relation,
12649                    global,
12650                    join_operator: JoinOperator::AsOf {
12651                        match_condition,
12652                        constraint: self.parse_join_constraint(false)?,
12653                    },
12654                }
12655            } else {
12656                let natural = self.parse_keyword(Keyword::NATURAL);
12657                let peek_keyword = if let Token::Word(w) = self.peek_token().token {
12658                    w.keyword
12659                } else {
12660                    Keyword::NoKeyword
12661                };
12662
12663                let join_operator_type = match peek_keyword {
12664                    Keyword::INNER | Keyword::JOIN => {
12665                        let inner = self.parse_keyword(Keyword::INNER); // [ INNER ]
12666                        self.expect_keyword_is(Keyword::JOIN)?;
12667                        if inner {
12668                            JoinOperator::Inner
12669                        } else {
12670                            JoinOperator::Join
12671                        }
12672                    }
12673                    kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
12674                        let _ = self.next_token(); // consume LEFT/RIGHT
12675                        let is_left = kw == Keyword::LEFT;
12676                        let join_type = self.parse_one_of_keywords(&[
12677                            Keyword::OUTER,
12678                            Keyword::SEMI,
12679                            Keyword::ANTI,
12680                            Keyword::JOIN,
12681                        ]);
12682                        match join_type {
12683                            Some(Keyword::OUTER) => {
12684                                self.expect_keyword_is(Keyword::JOIN)?;
12685                                if is_left {
12686                                    JoinOperator::LeftOuter
12687                                } else {
12688                                    JoinOperator::RightOuter
12689                                }
12690                            }
12691                            Some(Keyword::SEMI) => {
12692                                self.expect_keyword_is(Keyword::JOIN)?;
12693                                if is_left {
12694                                    JoinOperator::LeftSemi
12695                                } else {
12696                                    JoinOperator::RightSemi
12697                                }
12698                            }
12699                            Some(Keyword::ANTI) => {
12700                                self.expect_keyword_is(Keyword::JOIN)?;
12701                                if is_left {
12702                                    JoinOperator::LeftAnti
12703                                } else {
12704                                    JoinOperator::RightAnti
12705                                }
12706                            }
12707                            Some(Keyword::JOIN) => {
12708                                if is_left {
12709                                    JoinOperator::Left
12710                                } else {
12711                                    JoinOperator::Right
12712                                }
12713                            }
12714                            _ => {
12715                                return Err(ParserError::ParserError(format!(
12716                                    "expected OUTER, SEMI, ANTI or JOIN after {kw:?}"
12717                                )))
12718                            }
12719                        }
12720                    }
12721                    Keyword::ANTI => {
12722                        let _ = self.next_token(); // consume ANTI
12723                        self.expect_keyword_is(Keyword::JOIN)?;
12724                        JoinOperator::Anti
12725                    }
12726                    Keyword::SEMI => {
12727                        let _ = self.next_token(); // consume SEMI
12728                        self.expect_keyword_is(Keyword::JOIN)?;
12729                        JoinOperator::Semi
12730                    }
12731                    Keyword::FULL => {
12732                        let _ = self.next_token(); // consume FULL
12733                        let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
12734                        self.expect_keyword_is(Keyword::JOIN)?;
12735                        JoinOperator::FullOuter
12736                    }
12737                    Keyword::OUTER => {
12738                        return self.expected("LEFT, RIGHT, or FULL", self.peek_token());
12739                    }
12740                    Keyword::STRAIGHT_JOIN => {
12741                        let _ = self.next_token(); // consume STRAIGHT_JOIN
12742                        JoinOperator::StraightJoin
12743                    }
12744                    _ if natural => {
12745                        return self.expected("a join type after NATURAL", self.peek_token());
12746                    }
12747                    _ => break,
12748                };
12749                let mut relation = self.parse_table_factor()?;
12750
12751                if !self
12752                    .dialect
12753                    .supports_left_associative_joins_without_parens()
12754                    && self.peek_parens_less_nested_join()
12755                {
12756                    let joins = self.parse_joins()?;
12757                    relation = TableFactor::NestedJoin {
12758                        table_with_joins: Box::new(TableWithJoins { relation, joins }),
12759                        alias: None,
12760                    };
12761                }
12762
12763                let join_constraint = self.parse_join_constraint(natural)?;
12764                Join {
12765                    relation,
12766                    global,
12767                    join_operator: join_operator_type(join_constraint),
12768                }
12769            };
12770            joins.push(join);
12771        }
12772        Ok(joins)
12773    }
12774
12775    fn peek_parens_less_nested_join(&self) -> bool {
12776        matches!(
12777            self.peek_token_ref().token,
12778            Token::Word(Word {
12779                keyword: Keyword::JOIN
12780                    | Keyword::INNER
12781                    | Keyword::LEFT
12782                    | Keyword::RIGHT
12783                    | Keyword::FULL,
12784                ..
12785            })
12786        )
12787    }
12788
12789    /// A table name or a parenthesized subquery, followed by optional `[AS] alias`
12790    pub fn parse_table_factor(&mut self) -> Result<TableFactor, ParserError> {
12791        if self.parse_keyword(Keyword::LATERAL) {
12792            // LATERAL must always be followed by a subquery or table function.
12793            if self.consume_token(&Token::LParen) {
12794                self.parse_derived_table_factor(Lateral)
12795            } else {
12796                let name = self.parse_object_name(false)?;
12797                self.expect_token(&Token::LParen)?;
12798                let args = self.parse_optional_args()?;
12799                let alias = self.maybe_parse_table_alias()?;
12800                Ok(TableFactor::Function {
12801                    lateral: true,
12802                    name,
12803                    args,
12804                    alias,
12805                })
12806            }
12807        } else if self.parse_keyword(Keyword::TABLE) {
12808            // parse table function (SELECT * FROM TABLE (<expr>) [ AS <alias> ])
12809            self.expect_token(&Token::LParen)?;
12810            let expr = self.parse_expr()?;
12811            self.expect_token(&Token::RParen)?;
12812            let alias = self.maybe_parse_table_alias()?;
12813            Ok(TableFactor::TableFunction { expr, alias })
12814        } else if self.consume_token(&Token::LParen) {
12815            // A left paren introduces either a derived table (i.e., a subquery)
12816            // or a nested join. It's nearly impossible to determine ahead of
12817            // time which it is... so we just try to parse both.
12818            //
12819            // Here's an example that demonstrates the complexity:
12820            //                     /-------------------------------------------------------\
12821            //                     | /-----------------------------------\                 |
12822            //     SELECT * FROM ( ( ( (SELECT 1) UNION (SELECT 2) ) AS t1 NATURAL JOIN t2 ) )
12823            //                   ^ ^ ^ ^
12824            //                   | | | |
12825            //                   | | | |
12826            //                   | | | (4) belongs to a SetExpr::Query inside the subquery
12827            //                   | | (3) starts a derived table (subquery)
12828            //                   | (2) starts a nested join
12829            //                   (1) an additional set of parens around a nested join
12830            //
12831
12832            // If the recently consumed '(' starts a derived table, the call to
12833            // `parse_derived_table_factor` below will return success after parsing the
12834            // subquery, followed by the closing ')', and the alias of the derived table.
12835            // In the example above this is case (3).
12836            if let Some(mut table) =
12837                self.maybe_parse(|parser| parser.parse_derived_table_factor(NotLateral))?
12838            {
12839                while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT])
12840                {
12841                    table = match kw {
12842                        Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
12843                        Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
12844                        _ => unreachable!(),
12845                    }
12846                }
12847                return Ok(table);
12848            }
12849
12850            // A parsing error from `parse_derived_table_factor` indicates that the '(' we've
12851            // recently consumed does not start a derived table (cases 1, 2, or 4).
12852            // `maybe_parse` will ignore such an error and rewind to be after the opening '('.
12853
12854            // Inside the parentheses we expect to find an (A) table factor
12855            // followed by some joins or (B) another level of nesting.
12856            let mut table_and_joins = self.parse_table_and_joins()?;
12857
12858            #[allow(clippy::if_same_then_else)]
12859            if !table_and_joins.joins.is_empty() {
12860                self.expect_token(&Token::RParen)?;
12861                let alias = self.maybe_parse_table_alias()?;
12862                Ok(TableFactor::NestedJoin {
12863                    table_with_joins: Box::new(table_and_joins),
12864                    alias,
12865                }) // (A)
12866            } else if let TableFactor::NestedJoin {
12867                table_with_joins: _,
12868                alias: _,
12869            } = &table_and_joins.relation
12870            {
12871                // (B): `table_and_joins` (what we found inside the parentheses)
12872                // is a nested join `(foo JOIN bar)`, not followed by other joins.
12873                self.expect_token(&Token::RParen)?;
12874                let alias = self.maybe_parse_table_alias()?;
12875                Ok(TableFactor::NestedJoin {
12876                    table_with_joins: Box::new(table_and_joins),
12877                    alias,
12878                })
12879            } else if dialect_of!(self is SnowflakeDialect | GenericDialect) {
12880                // Dialect-specific behavior: Snowflake diverges from the
12881                // standard and from most of the other implementations by
12882                // allowing extra parentheses not only around a join (B), but
12883                // around lone table names (e.g. `FROM (mytable [AS alias])`)
12884                // and around derived tables (e.g. `FROM ((SELECT ...)
12885                // [AS alias])`) as well.
12886                self.expect_token(&Token::RParen)?;
12887
12888                if let Some(outer_alias) = self.maybe_parse_table_alias()? {
12889                    // Snowflake also allows specifying an alias *after* parens
12890                    // e.g. `FROM (mytable) AS alias`
12891                    match &mut table_and_joins.relation {
12892                        TableFactor::Derived { alias, .. }
12893                        | TableFactor::Table { alias, .. }
12894                        | TableFactor::Function { alias, .. }
12895                        | TableFactor::UNNEST { alias, .. }
12896                        | TableFactor::JsonTable { alias, .. }
12897                        | TableFactor::XmlTable { alias, .. }
12898                        | TableFactor::OpenJsonTable { alias, .. }
12899                        | TableFactor::TableFunction { alias, .. }
12900                        | TableFactor::Pivot { alias, .. }
12901                        | TableFactor::Unpivot { alias, .. }
12902                        | TableFactor::MatchRecognize { alias, .. }
12903                        | TableFactor::NestedJoin { alias, .. } => {
12904                            // but not `FROM (mytable AS alias1) AS alias2`.
12905                            if let Some(inner_alias) = alias {
12906                                return Err(ParserError::ParserError(format!(
12907                                    "duplicate alias {inner_alias}"
12908                                )));
12909                            }
12910                            // Act as if the alias was specified normally next
12911                            // to the table name: `(mytable) AS alias` ->
12912                            // `(mytable AS alias)`
12913                            alias.replace(outer_alias);
12914                        }
12915                    };
12916                }
12917                // Do not store the extra set of parens in the AST
12918                Ok(table_and_joins.relation)
12919            } else {
12920                // The SQL spec prohibits derived tables and bare tables from
12921                // appearing alone in parentheses (e.g. `FROM (mytable)`)
12922                self.expected("joined table", self.peek_token())
12923            }
12924        } else if dialect_of!(self is SnowflakeDialect | DatabricksDialect | GenericDialect)
12925            && matches!(
12926                self.peek_tokens(),
12927                [
12928                    Token::Word(Word {
12929                        keyword: Keyword::VALUES,
12930                        ..
12931                    }),
12932                    Token::LParen
12933                ]
12934            )
12935        {
12936            self.expect_keyword_is(Keyword::VALUES)?;
12937
12938            // Snowflake and Databricks allow syntax like below:
12939            // SELECT * FROM VALUES (1, 'a'), (2, 'b') AS t (col1, col2)
12940            // where there are no parentheses around the VALUES clause.
12941            let values = SetExpr::Values(self.parse_values(false)?);
12942            let alias = self.maybe_parse_table_alias()?;
12943            Ok(TableFactor::Derived {
12944                lateral: false,
12945                subquery: Box::new(Query {
12946                    with: None,
12947                    body: Box::new(values),
12948                    order_by: None,
12949                    limit_clause: None,
12950                    fetch: None,
12951                    locks: vec![],
12952                    for_clause: None,
12953                    settings: None,
12954                    format_clause: None,
12955                    pipe_operators: vec![],
12956                }),
12957                alias,
12958            })
12959        } else if dialect_of!(self is BigQueryDialect | PostgreSqlDialect | GenericDialect)
12960            && self.parse_keyword(Keyword::UNNEST)
12961        {
12962            self.expect_token(&Token::LParen)?;
12963            let array_exprs = self.parse_comma_separated(Parser::parse_expr)?;
12964            self.expect_token(&Token::RParen)?;
12965
12966            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
12967            let alias = match self.maybe_parse_table_alias() {
12968                Ok(Some(alias)) => Some(alias),
12969                Ok(None) => None,
12970                Err(e) => return Err(e),
12971            };
12972
12973            let with_offset = match self.expect_keywords(&[Keyword::WITH, Keyword::OFFSET]) {
12974                Ok(()) => true,
12975                Err(_) => false,
12976            };
12977
12978            let with_offset_alias = if with_offset {
12979                match self.parse_optional_alias(keywords::RESERVED_FOR_COLUMN_ALIAS) {
12980                    Ok(Some(alias)) => Some(alias),
12981                    Ok(None) => None,
12982                    Err(e) => return Err(e),
12983                }
12984            } else {
12985                None
12986            };
12987
12988            Ok(TableFactor::UNNEST {
12989                alias,
12990                array_exprs,
12991                with_offset,
12992                with_offset_alias,
12993                with_ordinality,
12994            })
12995        } else if self.parse_keyword_with_tokens(Keyword::JSON_TABLE, &[Token::LParen]) {
12996            let json_expr = self.parse_expr()?;
12997            self.expect_token(&Token::Comma)?;
12998            let json_path = self.parse_value()?.value;
12999            self.expect_keyword_is(Keyword::COLUMNS)?;
13000            self.expect_token(&Token::LParen)?;
13001            let columns = self.parse_comma_separated(Parser::parse_json_table_column_def)?;
13002            self.expect_token(&Token::RParen)?;
13003            self.expect_token(&Token::RParen)?;
13004            let alias = self.maybe_parse_table_alias()?;
13005            Ok(TableFactor::JsonTable {
13006                json_expr,
13007                json_path,
13008                columns,
13009                alias,
13010            })
13011        } else if self.parse_keyword_with_tokens(Keyword::OPENJSON, &[Token::LParen]) {
13012            self.prev_token();
13013            self.parse_open_json_table_factor()
13014        } else if self.parse_keyword_with_tokens(Keyword::XMLTABLE, &[Token::LParen]) {
13015            self.prev_token();
13016            self.parse_xml_table_factor()
13017        } else {
13018            let name = self.parse_object_name(true)?;
13019
13020            let json_path = match self.peek_token().token {
13021                Token::LBracket if self.dialect.supports_partiql() => Some(self.parse_json_path()?),
13022                _ => None,
13023            };
13024
13025            let partitions: Vec<Ident> = if dialect_of!(self is MySqlDialect | GenericDialect)
13026                && self.parse_keyword(Keyword::PARTITION)
13027            {
13028                self.parse_parenthesized_identifiers()?
13029            } else {
13030                vec![]
13031            };
13032
13033            // Parse potential version qualifier
13034            let version = self.maybe_parse_table_version()?;
13035
13036            // Postgres, MSSQL, ClickHouse: table-valued functions:
13037            let args = if self.consume_token(&Token::LParen) {
13038                Some(self.parse_table_function_args()?)
13039            } else {
13040                None
13041            };
13042
13043            let with_ordinality = self.parse_keywords(&[Keyword::WITH, Keyword::ORDINALITY]);
13044
13045            let mut sample = None;
13046            if self.dialect.supports_table_sample_before_alias() {
13047                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
13048                    sample = Some(TableSampleKind::BeforeTableAlias(parsed_sample));
13049                }
13050            }
13051
13052            let alias = self.maybe_parse_table_alias()?;
13053
13054            // MYSQL-specific table hints:
13055            let index_hints = if self.dialect.supports_table_hints() {
13056                self.maybe_parse(|p| p.parse_table_index_hints())?
13057                    .unwrap_or(vec![])
13058            } else {
13059                vec![]
13060            };
13061
13062            // MSSQL-specific table hints:
13063            let mut with_hints = vec![];
13064            if self.parse_keyword(Keyword::WITH) {
13065                if self.consume_token(&Token::LParen) {
13066                    with_hints = self.parse_comma_separated(Parser::parse_expr)?;
13067                    self.expect_token(&Token::RParen)?;
13068                } else {
13069                    // rewind, as WITH may belong to the next statement's CTE
13070                    self.prev_token();
13071                }
13072            };
13073
13074            if !self.dialect.supports_table_sample_before_alias() {
13075                if let Some(parsed_sample) = self.maybe_parse_table_sample()? {
13076                    sample = Some(TableSampleKind::AfterTableAlias(parsed_sample));
13077                }
13078            }
13079
13080            let mut table = TableFactor::Table {
13081                name,
13082                alias,
13083                args,
13084                with_hints,
13085                version,
13086                partitions,
13087                with_ordinality,
13088                json_path,
13089                sample,
13090                index_hints,
13091            };
13092
13093            while let Some(kw) = self.parse_one_of_keywords(&[Keyword::PIVOT, Keyword::UNPIVOT]) {
13094                table = match kw {
13095                    Keyword::PIVOT => self.parse_pivot_table_factor(table)?,
13096                    Keyword::UNPIVOT => self.parse_unpivot_table_factor(table)?,
13097                    _ => unreachable!(),
13098                }
13099            }
13100
13101            if self.dialect.supports_match_recognize()
13102                && self.parse_keyword(Keyword::MATCH_RECOGNIZE)
13103            {
13104                table = self.parse_match_recognize(table)?;
13105            }
13106
13107            Ok(table)
13108        }
13109    }
13110
13111    fn maybe_parse_table_sample(&mut self) -> Result<Option<Box<TableSample>>, ParserError> {
13112        let modifier = if self.parse_keyword(Keyword::TABLESAMPLE) {
13113            TableSampleModifier::TableSample
13114        } else if self.parse_keyword(Keyword::SAMPLE) {
13115            TableSampleModifier::Sample
13116        } else {
13117            return Ok(None);
13118        };
13119        self.parse_table_sample(modifier).map(Some)
13120    }
13121
13122    fn parse_table_sample(
13123        &mut self,
13124        modifier: TableSampleModifier,
13125    ) -> Result<Box<TableSample>, ParserError> {
13126        let name = match self.parse_one_of_keywords(&[
13127            Keyword::BERNOULLI,
13128            Keyword::ROW,
13129            Keyword::SYSTEM,
13130            Keyword::BLOCK,
13131        ]) {
13132            Some(Keyword::BERNOULLI) => Some(TableSampleMethod::Bernoulli),
13133            Some(Keyword::ROW) => Some(TableSampleMethod::Row),
13134            Some(Keyword::SYSTEM) => Some(TableSampleMethod::System),
13135            Some(Keyword::BLOCK) => Some(TableSampleMethod::Block),
13136            _ => None,
13137        };
13138
13139        let parenthesized = self.consume_token(&Token::LParen);
13140
13141        let (quantity, bucket) = if parenthesized && self.parse_keyword(Keyword::BUCKET) {
13142            let selected_bucket = self.parse_number_value()?.value;
13143            self.expect_keywords(&[Keyword::OUT, Keyword::OF])?;
13144            let total = self.parse_number_value()?.value;
13145            let on = if self.parse_keyword(Keyword::ON) {
13146                Some(self.parse_expr()?)
13147            } else {
13148                None
13149            };
13150            (
13151                None,
13152                Some(TableSampleBucket {
13153                    bucket: selected_bucket,
13154                    total,
13155                    on,
13156                }),
13157            )
13158        } else {
13159            let value = match self.maybe_parse(|p| p.parse_expr())? {
13160                Some(num) => num,
13161                None => {
13162                    let next_token = self.next_token();
13163                    if let Token::Word(w) = next_token.token {
13164                        Expr::Value(Value::Placeholder(w.value).with_span(next_token.span))
13165                    } else {
13166                        return parser_err!(
13167                            "Expecting number or byte length e.g. 100M",
13168                            self.peek_token().span.start
13169                        );
13170                    }
13171                }
13172            };
13173            let unit = if self.parse_keyword(Keyword::ROWS) {
13174                Some(TableSampleUnit::Rows)
13175            } else if self.parse_keyword(Keyword::PERCENT) {
13176                Some(TableSampleUnit::Percent)
13177            } else {
13178                None
13179            };
13180            (
13181                Some(TableSampleQuantity {
13182                    parenthesized,
13183                    value,
13184                    unit,
13185                }),
13186                None,
13187            )
13188        };
13189        if parenthesized {
13190            self.expect_token(&Token::RParen)?;
13191        }
13192
13193        let seed = if self.parse_keyword(Keyword::REPEATABLE) {
13194            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Repeatable)?)
13195        } else if self.parse_keyword(Keyword::SEED) {
13196            Some(self.parse_table_sample_seed(TableSampleSeedModifier::Seed)?)
13197        } else {
13198            None
13199        };
13200
13201        let offset = if self.parse_keyword(Keyword::OFFSET) {
13202            Some(self.parse_expr()?)
13203        } else {
13204            None
13205        };
13206
13207        Ok(Box::new(TableSample {
13208            modifier,
13209            name,
13210            quantity,
13211            seed,
13212            bucket,
13213            offset,
13214        }))
13215    }
13216
13217    fn parse_table_sample_seed(
13218        &mut self,
13219        modifier: TableSampleSeedModifier,
13220    ) -> Result<TableSampleSeed, ParserError> {
13221        self.expect_token(&Token::LParen)?;
13222        let value = self.parse_number_value()?.value;
13223        self.expect_token(&Token::RParen)?;
13224        Ok(TableSampleSeed { modifier, value })
13225    }
13226
13227    /// Parses `OPENJSON( jsonExpression [ , path ] )  [ <with_clause> ]` clause,
13228    /// assuming the `OPENJSON` keyword was already consumed.
13229    fn parse_open_json_table_factor(&mut self) -> Result<TableFactor, ParserError> {
13230        self.expect_token(&Token::LParen)?;
13231        let json_expr = self.parse_expr()?;
13232        let json_path = if self.consume_token(&Token::Comma) {
13233            Some(self.parse_value()?.value)
13234        } else {
13235            None
13236        };
13237        self.expect_token(&Token::RParen)?;
13238        let columns = if self.parse_keyword(Keyword::WITH) {
13239            self.expect_token(&Token::LParen)?;
13240            let columns = self.parse_comma_separated(Parser::parse_openjson_table_column_def)?;
13241            self.expect_token(&Token::RParen)?;
13242            columns
13243        } else {
13244            Vec::new()
13245        };
13246        let alias = self.maybe_parse_table_alias()?;
13247        Ok(TableFactor::OpenJsonTable {
13248            json_expr,
13249            json_path,
13250            columns,
13251            alias,
13252        })
13253    }
13254
13255    fn parse_xml_table_factor(&mut self) -> Result<TableFactor, ParserError> {
13256        self.expect_token(&Token::LParen)?;
13257        let namespaces = if self.parse_keyword(Keyword::XMLNAMESPACES) {
13258            self.expect_token(&Token::LParen)?;
13259            let namespaces = self.parse_comma_separated(Parser::parse_xml_namespace_definition)?;
13260            self.expect_token(&Token::RParen)?;
13261            self.expect_token(&Token::Comma)?;
13262            namespaces
13263        } else {
13264            vec![]
13265        };
13266        let row_expression = self.parse_expr()?;
13267        let passing = self.parse_xml_passing_clause()?;
13268        self.expect_keyword_is(Keyword::COLUMNS)?;
13269        let columns = self.parse_comma_separated(Parser::parse_xml_table_column)?;
13270        self.expect_token(&Token::RParen)?;
13271        let alias = self.maybe_parse_table_alias()?;
13272        Ok(TableFactor::XmlTable {
13273            namespaces,
13274            row_expression,
13275            passing,
13276            columns,
13277            alias,
13278        })
13279    }
13280
13281    fn parse_xml_namespace_definition(&mut self) -> Result<XmlNamespaceDefinition, ParserError> {
13282        let uri = self.parse_expr()?;
13283        self.expect_keyword_is(Keyword::AS)?;
13284        let name = self.parse_identifier()?;
13285        Ok(XmlNamespaceDefinition { uri, name })
13286    }
13287
13288    fn parse_xml_table_column(&mut self) -> Result<XmlTableColumn, ParserError> {
13289        let name = self.parse_identifier()?;
13290
13291        let option = if self.parse_keyword(Keyword::FOR) {
13292            self.expect_keyword(Keyword::ORDINALITY)?;
13293            XmlTableColumnOption::ForOrdinality
13294        } else {
13295            let r#type = self.parse_data_type()?;
13296            let mut path = None;
13297            let mut default = None;
13298
13299            if self.parse_keyword(Keyword::PATH) {
13300                path = Some(self.parse_expr()?);
13301            }
13302
13303            if self.parse_keyword(Keyword::DEFAULT) {
13304                default = Some(self.parse_expr()?);
13305            }
13306
13307            let not_null = self.parse_keywords(&[Keyword::NOT, Keyword::NULL]);
13308            if !not_null {
13309                // NULL is the default but can be specified explicitly
13310                let _ = self.parse_keyword(Keyword::NULL);
13311            }
13312
13313            XmlTableColumnOption::NamedInfo {
13314                r#type,
13315                path,
13316                default,
13317                nullable: !not_null,
13318            }
13319        };
13320        Ok(XmlTableColumn { name, option })
13321    }
13322
13323    fn parse_xml_passing_clause(&mut self) -> Result<XmlPassingClause, ParserError> {
13324        let mut arguments = vec![];
13325        if self.parse_keyword(Keyword::PASSING) {
13326            loop {
13327                let by_value =
13328                    self.parse_keyword(Keyword::BY) && self.expect_keyword(Keyword::VALUE).is_ok();
13329                let expr = self.parse_expr()?;
13330                let alias = if self.parse_keyword(Keyword::AS) {
13331                    Some(self.parse_identifier()?)
13332                } else {
13333                    None
13334                };
13335                arguments.push(XmlPassingArgument {
13336                    expr,
13337                    alias,
13338                    by_value,
13339                });
13340                if !self.consume_token(&Token::Comma) {
13341                    break;
13342                }
13343            }
13344        }
13345        Ok(XmlPassingClause { arguments })
13346    }
13347
13348    fn parse_match_recognize(&mut self, table: TableFactor) -> Result<TableFactor, ParserError> {
13349        self.expect_token(&Token::LParen)?;
13350
13351        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
13352            self.parse_comma_separated(Parser::parse_expr)?
13353        } else {
13354            vec![]
13355        };
13356
13357        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13358            self.parse_comma_separated(Parser::parse_order_by_expr)?
13359        } else {
13360            vec![]
13361        };
13362
13363        let measures = if self.parse_keyword(Keyword::MEASURES) {
13364            self.parse_comma_separated(|p| {
13365                let expr = p.parse_expr()?;
13366                let _ = p.parse_keyword(Keyword::AS);
13367                let alias = p.parse_identifier()?;
13368                Ok(Measure { expr, alias })
13369            })?
13370        } else {
13371            vec![]
13372        };
13373
13374        let rows_per_match =
13375            if self.parse_keywords(&[Keyword::ONE, Keyword::ROW, Keyword::PER, Keyword::MATCH]) {
13376                Some(RowsPerMatch::OneRow)
13377            } else if self.parse_keywords(&[
13378                Keyword::ALL,
13379                Keyword::ROWS,
13380                Keyword::PER,
13381                Keyword::MATCH,
13382            ]) {
13383                Some(RowsPerMatch::AllRows(
13384                    if self.parse_keywords(&[Keyword::SHOW, Keyword::EMPTY, Keyword::MATCHES]) {
13385                        Some(EmptyMatchesMode::Show)
13386                    } else if self.parse_keywords(&[
13387                        Keyword::OMIT,
13388                        Keyword::EMPTY,
13389                        Keyword::MATCHES,
13390                    ]) {
13391                        Some(EmptyMatchesMode::Omit)
13392                    } else if self.parse_keywords(&[
13393                        Keyword::WITH,
13394                        Keyword::UNMATCHED,
13395                        Keyword::ROWS,
13396                    ]) {
13397                        Some(EmptyMatchesMode::WithUnmatched)
13398                    } else {
13399                        None
13400                    },
13401                ))
13402            } else {
13403                None
13404            };
13405
13406        let after_match_skip =
13407            if self.parse_keywords(&[Keyword::AFTER, Keyword::MATCH, Keyword::SKIP]) {
13408                if self.parse_keywords(&[Keyword::PAST, Keyword::LAST, Keyword::ROW]) {
13409                    Some(AfterMatchSkip::PastLastRow)
13410                } else if self.parse_keywords(&[Keyword::TO, Keyword::NEXT, Keyword::ROW]) {
13411                    Some(AfterMatchSkip::ToNextRow)
13412                } else if self.parse_keywords(&[Keyword::TO, Keyword::FIRST]) {
13413                    Some(AfterMatchSkip::ToFirst(self.parse_identifier()?))
13414                } else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
13415                    Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
13416                } else {
13417                    let found = self.next_token();
13418                    return self.expected("after match skip option", found);
13419                }
13420            } else {
13421                None
13422            };
13423
13424        self.expect_keyword_is(Keyword::PATTERN)?;
13425        let pattern = self.parse_parenthesized(Self::parse_pattern)?;
13426
13427        self.expect_keyword_is(Keyword::DEFINE)?;
13428
13429        let symbols = self.parse_comma_separated(|p| {
13430            let symbol = p.parse_identifier()?;
13431            p.expect_keyword_is(Keyword::AS)?;
13432            let definition = p.parse_expr()?;
13433            Ok(SymbolDefinition { symbol, definition })
13434        })?;
13435
13436        self.expect_token(&Token::RParen)?;
13437
13438        let alias = self.maybe_parse_table_alias()?;
13439
13440        Ok(TableFactor::MatchRecognize {
13441            table: Box::new(table),
13442            partition_by,
13443            order_by,
13444            measures,
13445            rows_per_match,
13446            after_match_skip,
13447            pattern,
13448            symbols,
13449            alias,
13450        })
13451    }
13452
13453    fn parse_base_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13454        match self.next_token().token {
13455            Token::Caret => Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::Start)),
13456            Token::Placeholder(s) if s == "$" => {
13457                Ok(MatchRecognizePattern::Symbol(MatchRecognizeSymbol::End))
13458            }
13459            Token::LBrace => {
13460                self.expect_token(&Token::Minus)?;
13461                let symbol = self.parse_identifier().map(MatchRecognizeSymbol::Named)?;
13462                self.expect_token(&Token::Minus)?;
13463                self.expect_token(&Token::RBrace)?;
13464                Ok(MatchRecognizePattern::Exclude(symbol))
13465            }
13466            Token::Word(Word {
13467                value,
13468                quote_style: None,
13469                ..
13470            }) if value == "PERMUTE" => {
13471                self.expect_token(&Token::LParen)?;
13472                let symbols = self.parse_comma_separated(|p| {
13473                    p.parse_identifier().map(MatchRecognizeSymbol::Named)
13474                })?;
13475                self.expect_token(&Token::RParen)?;
13476                Ok(MatchRecognizePattern::Permute(symbols))
13477            }
13478            Token::LParen => {
13479                let pattern = self.parse_pattern()?;
13480                self.expect_token(&Token::RParen)?;
13481                Ok(MatchRecognizePattern::Group(Box::new(pattern)))
13482            }
13483            _ => {
13484                self.prev_token();
13485                self.parse_identifier()
13486                    .map(MatchRecognizeSymbol::Named)
13487                    .map(MatchRecognizePattern::Symbol)
13488            }
13489        }
13490    }
13491
13492    fn parse_repetition_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13493        let mut pattern = self.parse_base_pattern()?;
13494        loop {
13495            let token = self.next_token();
13496            let quantifier = match token.token {
13497                Token::Mul => RepetitionQuantifier::ZeroOrMore,
13498                Token::Plus => RepetitionQuantifier::OneOrMore,
13499                Token::Placeholder(s) if s == "?" => RepetitionQuantifier::AtMostOne,
13500                Token::LBrace => {
13501                    // quantifier is a range like {n} or {n,} or {,m} or {n,m}
13502                    let token = self.next_token();
13503                    match token.token {
13504                        Token::Comma => {
13505                            let next_token = self.next_token();
13506                            let Token::Number(n, _) = next_token.token else {
13507                                return self.expected("literal number", next_token);
13508                            };
13509                            self.expect_token(&Token::RBrace)?;
13510                            RepetitionQuantifier::AtMost(Self::parse(n, token.span.start)?)
13511                        }
13512                        Token::Number(n, _) if self.consume_token(&Token::Comma) => {
13513                            let next_token = self.next_token();
13514                            match next_token.token {
13515                                Token::Number(m, _) => {
13516                                    self.expect_token(&Token::RBrace)?;
13517                                    RepetitionQuantifier::Range(
13518                                        Self::parse(n, token.span.start)?,
13519                                        Self::parse(m, token.span.start)?,
13520                                    )
13521                                }
13522                                Token::RBrace => {
13523                                    RepetitionQuantifier::AtLeast(Self::parse(n, token.span.start)?)
13524                                }
13525                                _ => {
13526                                    return self.expected("} or upper bound", next_token);
13527                                }
13528                            }
13529                        }
13530                        Token::Number(n, _) => {
13531                            self.expect_token(&Token::RBrace)?;
13532                            RepetitionQuantifier::Exactly(Self::parse(n, token.span.start)?)
13533                        }
13534                        _ => return self.expected("quantifier range", token),
13535                    }
13536                }
13537                _ => {
13538                    self.prev_token();
13539                    break;
13540                }
13541            };
13542            pattern = MatchRecognizePattern::Repetition(Box::new(pattern), quantifier);
13543        }
13544        Ok(pattern)
13545    }
13546
13547    fn parse_concat_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13548        let mut patterns = vec![self.parse_repetition_pattern()?];
13549        while !matches!(self.peek_token().token, Token::RParen | Token::Pipe) {
13550            patterns.push(self.parse_repetition_pattern()?);
13551        }
13552        match <[MatchRecognizePattern; 1]>::try_from(patterns) {
13553            Ok([pattern]) => Ok(pattern),
13554            Err(patterns) => Ok(MatchRecognizePattern::Concat(patterns)),
13555        }
13556    }
13557
13558    fn parse_pattern(&mut self) -> Result<MatchRecognizePattern, ParserError> {
13559        let pattern = self.parse_concat_pattern()?;
13560        if self.consume_token(&Token::Pipe) {
13561            match self.parse_pattern()? {
13562                // flatten nested alternations
13563                MatchRecognizePattern::Alternation(mut patterns) => {
13564                    patterns.insert(0, pattern);
13565                    Ok(MatchRecognizePattern::Alternation(patterns))
13566                }
13567                next => Ok(MatchRecognizePattern::Alternation(vec![pattern, next])),
13568            }
13569        } else {
13570            Ok(pattern)
13571        }
13572    }
13573
13574    /// Parses a the timestamp version specifier (i.e. query historical data)
13575    pub fn maybe_parse_table_version(&mut self) -> Result<Option<TableVersion>, ParserError> {
13576        if self.dialect.supports_timestamp_versioning() {
13577            if self.parse_keywords(&[Keyword::FOR, Keyword::SYSTEM_TIME, Keyword::AS, Keyword::OF])
13578            {
13579                let expr = self.parse_expr()?;
13580                return Ok(Some(TableVersion::ForSystemTimeAsOf(expr)));
13581            } else if self.peek_keyword(Keyword::AT) || self.peek_keyword(Keyword::BEFORE) {
13582                let func_name = self.parse_object_name(true)?;
13583                let func = self.parse_function(func_name)?;
13584                return Ok(Some(TableVersion::Function(func)));
13585            }
13586        }
13587        Ok(None)
13588    }
13589
13590    /// Parses MySQL's JSON_TABLE column definition.
13591    /// For example: `id INT EXISTS PATH '$' DEFAULT '0' ON EMPTY ERROR ON ERROR`
13592    pub fn parse_json_table_column_def(&mut self) -> Result<JsonTableColumn, ParserError> {
13593        if self.parse_keyword(Keyword::NESTED) {
13594            let _has_path_keyword = self.parse_keyword(Keyword::PATH);
13595            let path = self.parse_value()?.value;
13596            self.expect_keyword_is(Keyword::COLUMNS)?;
13597            let columns = self.parse_parenthesized(|p| {
13598                p.parse_comma_separated(Self::parse_json_table_column_def)
13599            })?;
13600            return Ok(JsonTableColumn::Nested(JsonTableNestedColumn {
13601                path,
13602                columns,
13603            }));
13604        }
13605        let name = self.parse_identifier()?;
13606        if self.parse_keyword(Keyword::FOR) {
13607            self.expect_keyword_is(Keyword::ORDINALITY)?;
13608            return Ok(JsonTableColumn::ForOrdinality(name));
13609        }
13610        let r#type = self.parse_data_type()?;
13611        let exists = self.parse_keyword(Keyword::EXISTS);
13612        self.expect_keyword_is(Keyword::PATH)?;
13613        let path = self.parse_value()?.value;
13614        let mut on_empty = None;
13615        let mut on_error = None;
13616        while let Some(error_handling) = self.parse_json_table_column_error_handling()? {
13617            if self.parse_keyword(Keyword::EMPTY) {
13618                on_empty = Some(error_handling);
13619            } else {
13620                self.expect_keyword_is(Keyword::ERROR)?;
13621                on_error = Some(error_handling);
13622            }
13623        }
13624        Ok(JsonTableColumn::Named(JsonTableNamedColumn {
13625            name,
13626            r#type,
13627            path,
13628            exists,
13629            on_empty,
13630            on_error,
13631        }))
13632    }
13633
13634    /// Parses MSSQL's `OPENJSON WITH` column definition.
13635    ///
13636    /// ```sql
13637    /// colName type [ column_path ] [ AS JSON ]
13638    /// ```
13639    ///
13640    /// Reference: <https://learn.microsoft.com/en-us/sql/t-sql/functions/openjson-transact-sql?view=sql-server-ver16#syntax>
13641    pub fn parse_openjson_table_column_def(&mut self) -> Result<OpenJsonTableColumn, ParserError> {
13642        let name = self.parse_identifier()?;
13643        let r#type = self.parse_data_type()?;
13644        let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
13645            self.next_token();
13646            Some(path)
13647        } else {
13648            None
13649        };
13650        let as_json = self.parse_keyword(Keyword::AS);
13651        if as_json {
13652            self.expect_keyword_is(Keyword::JSON)?;
13653        }
13654        Ok(OpenJsonTableColumn {
13655            name,
13656            r#type,
13657            path,
13658            as_json,
13659        })
13660    }
13661
13662    fn parse_json_table_column_error_handling(
13663        &mut self,
13664    ) -> Result<Option<JsonTableColumnErrorHandling>, ParserError> {
13665        let res = if self.parse_keyword(Keyword::NULL) {
13666            JsonTableColumnErrorHandling::Null
13667        } else if self.parse_keyword(Keyword::ERROR) {
13668            JsonTableColumnErrorHandling::Error
13669        } else if self.parse_keyword(Keyword::DEFAULT) {
13670            JsonTableColumnErrorHandling::Default(self.parse_value()?.value)
13671        } else {
13672            return Ok(None);
13673        };
13674        self.expect_keyword_is(Keyword::ON)?;
13675        Ok(Some(res))
13676    }
13677
13678    pub fn parse_derived_table_factor(
13679        &mut self,
13680        lateral: IsLateral,
13681    ) -> Result<TableFactor, ParserError> {
13682        let subquery = self.parse_query()?;
13683        self.expect_token(&Token::RParen)?;
13684        let alias = self.maybe_parse_table_alias()?;
13685        Ok(TableFactor::Derived {
13686            lateral: match lateral {
13687                Lateral => true,
13688                NotLateral => false,
13689            },
13690            subquery,
13691            alias,
13692        })
13693    }
13694
13695    fn parse_aliased_function_call(&mut self) -> Result<ExprWithAlias, ParserError> {
13696        let function_name = match self.next_token().token {
13697            Token::Word(w) => Ok(w.value),
13698            _ => self.expected("a function identifier", self.peek_token()),
13699        }?;
13700        let expr = self.parse_function(ObjectName::from(vec![Ident::new(function_name)]))?;
13701        let alias = if self.parse_keyword(Keyword::AS) {
13702            Some(self.parse_identifier()?)
13703        } else {
13704            None
13705        };
13706
13707        Ok(ExprWithAlias { expr, alias })
13708    }
13709    /// Parses an expression with an optional alias
13710    ///
13711    /// Examples:
13712    ///
13713    /// ```sql
13714    /// SUM(price) AS total_price
13715    /// ```
13716    /// ```sql
13717    /// SUM(price)
13718    /// ```
13719    ///
13720    /// Example
13721    /// ```
13722    /// # use sqlparser::parser::{Parser, ParserError};
13723    /// # use sqlparser::dialect::GenericDialect;
13724    /// # fn main() ->Result<(), ParserError> {
13725    /// let sql = r#"SUM("a") as "b""#;
13726    /// let mut parser = Parser::new(&GenericDialect).try_with_sql(sql)?;
13727    /// let expr_with_alias = parser.parse_expr_with_alias()?;
13728    /// assert_eq!(Some("b".to_string()), expr_with_alias.alias.map(|x|x.value));
13729    /// # Ok(())
13730    /// # }
13731    pub fn parse_expr_with_alias(&mut self) -> Result<ExprWithAlias, ParserError> {
13732        let expr = self.parse_expr()?;
13733        let alias = if self.parse_keyword(Keyword::AS) {
13734            Some(self.parse_identifier()?)
13735        } else {
13736            None
13737        };
13738
13739        Ok(ExprWithAlias { expr, alias })
13740    }
13741
13742    pub fn parse_pivot_table_factor(
13743        &mut self,
13744        table: TableFactor,
13745    ) -> Result<TableFactor, ParserError> {
13746        self.expect_token(&Token::LParen)?;
13747        let aggregate_functions = self.parse_comma_separated(Self::parse_aliased_function_call)?;
13748        self.expect_keyword_is(Keyword::FOR)?;
13749        let value_column = self.parse_period_separated(|p| p.parse_identifier())?;
13750        self.expect_keyword_is(Keyword::IN)?;
13751
13752        self.expect_token(&Token::LParen)?;
13753        let value_source = if self.parse_keyword(Keyword::ANY) {
13754            let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
13755                self.parse_comma_separated(Parser::parse_order_by_expr)?
13756            } else {
13757                vec![]
13758            };
13759            PivotValueSource::Any(order_by)
13760        } else if self.peek_sub_query() {
13761            PivotValueSource::Subquery(self.parse_query()?)
13762        } else {
13763            PivotValueSource::List(self.parse_comma_separated(Self::parse_expr_with_alias)?)
13764        };
13765        self.expect_token(&Token::RParen)?;
13766
13767        let default_on_null =
13768            if self.parse_keywords(&[Keyword::DEFAULT, Keyword::ON, Keyword::NULL]) {
13769                self.expect_token(&Token::LParen)?;
13770                let expr = self.parse_expr()?;
13771                self.expect_token(&Token::RParen)?;
13772                Some(expr)
13773            } else {
13774                None
13775            };
13776
13777        self.expect_token(&Token::RParen)?;
13778        let alias = self.maybe_parse_table_alias()?;
13779        Ok(TableFactor::Pivot {
13780            table: Box::new(table),
13781            aggregate_functions,
13782            value_column,
13783            value_source,
13784            default_on_null,
13785            alias,
13786        })
13787    }
13788
13789    pub fn parse_unpivot_table_factor(
13790        &mut self,
13791        table: TableFactor,
13792    ) -> Result<TableFactor, ParserError> {
13793        let null_inclusion = if self.parse_keyword(Keyword::INCLUDE) {
13794            self.expect_keyword_is(Keyword::NULLS)?;
13795            Some(NullInclusion::IncludeNulls)
13796        } else if self.parse_keyword(Keyword::EXCLUDE) {
13797            self.expect_keyword_is(Keyword::NULLS)?;
13798            Some(NullInclusion::ExcludeNulls)
13799        } else {
13800            None
13801        };
13802        self.expect_token(&Token::LParen)?;
13803        let value = self.parse_identifier()?;
13804        self.expect_keyword_is(Keyword::FOR)?;
13805        let name = self.parse_identifier()?;
13806        self.expect_keyword_is(Keyword::IN)?;
13807        let columns = self.parse_parenthesized_column_list(Mandatory, false)?;
13808        self.expect_token(&Token::RParen)?;
13809        let alias = self.maybe_parse_table_alias()?;
13810        Ok(TableFactor::Unpivot {
13811            table: Box::new(table),
13812            value,
13813            null_inclusion,
13814            name,
13815            columns,
13816            alias,
13817        })
13818    }
13819
13820    pub fn parse_join_constraint(&mut self, natural: bool) -> Result<JoinConstraint, ParserError> {
13821        if natural {
13822            Ok(JoinConstraint::Natural)
13823        } else if self.parse_keyword(Keyword::ON) {
13824            let constraint = self.parse_expr()?;
13825            Ok(JoinConstraint::On(constraint))
13826        } else if self.parse_keyword(Keyword::USING) {
13827            let columns = self.parse_parenthesized_qualified_column_list(Mandatory, false)?;
13828            Ok(JoinConstraint::Using(columns))
13829        } else {
13830            Ok(JoinConstraint::None)
13831            //self.expected("ON, or USING after JOIN", self.peek_token())
13832        }
13833    }
13834
13835    /// Parse a GRANT statement.
13836    pub fn parse_grant(&mut self) -> Result<Statement, ParserError> {
13837        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
13838
13839        self.expect_keyword_is(Keyword::TO)?;
13840        let grantees = self.parse_grantees()?;
13841
13842        let with_grant_option =
13843            self.parse_keywords(&[Keyword::WITH, Keyword::GRANT, Keyword::OPTION]);
13844
13845        let current_grants =
13846            if self.parse_keywords(&[Keyword::COPY, Keyword::CURRENT, Keyword::GRANTS]) {
13847                Some(CurrentGrantsKind::CopyCurrentGrants)
13848            } else if self.parse_keywords(&[Keyword::REVOKE, Keyword::CURRENT, Keyword::GRANTS]) {
13849                Some(CurrentGrantsKind::RevokeCurrentGrants)
13850            } else {
13851                None
13852            };
13853
13854        let as_grantor = if self.parse_keywords(&[Keyword::AS]) {
13855            Some(self.parse_identifier()?)
13856        } else {
13857            None
13858        };
13859
13860        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
13861            Some(self.parse_identifier()?)
13862        } else {
13863            None
13864        };
13865
13866        Ok(Statement::Grant {
13867            privileges,
13868            objects,
13869            grantees,
13870            with_grant_option,
13871            as_grantor,
13872            granted_by,
13873            current_grants,
13874        })
13875    }
13876
13877    fn parse_grantees(&mut self) -> Result<Vec<Grantee>, ParserError> {
13878        let mut values = vec![];
13879        let mut grantee_type = GranteesType::None;
13880        loop {
13881            let new_grantee_type = if self.parse_keyword(Keyword::ROLE) {
13882                GranteesType::Role
13883            } else if self.parse_keyword(Keyword::USER) {
13884                GranteesType::User
13885            } else if self.parse_keyword(Keyword::SHARE) {
13886                GranteesType::Share
13887            } else if self.parse_keyword(Keyword::GROUP) {
13888                GranteesType::Group
13889            } else if self.parse_keyword(Keyword::PUBLIC) {
13890                GranteesType::Public
13891            } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
13892                GranteesType::DatabaseRole
13893            } else if self.parse_keywords(&[Keyword::APPLICATION, Keyword::ROLE]) {
13894                GranteesType::ApplicationRole
13895            } else if self.parse_keyword(Keyword::APPLICATION) {
13896                GranteesType::Application
13897            } else {
13898                grantee_type.clone() // keep from previous iteraton, if not specified
13899            };
13900
13901            if self
13902                .dialect
13903                .get_reserved_grantees_types()
13904                .contains(&new_grantee_type)
13905            {
13906                self.prev_token();
13907            } else {
13908                grantee_type = new_grantee_type;
13909            }
13910
13911            let grantee = if grantee_type == GranteesType::Public {
13912                Grantee {
13913                    grantee_type: grantee_type.clone(),
13914                    name: None,
13915                }
13916            } else {
13917                let mut name = self.parse_grantee_name()?;
13918                if self.consume_token(&Token::Colon) {
13919                    // Redshift supports namespace prefix for external users and groups:
13920                    // <Namespace>:<GroupName> or <Namespace>:<UserName>
13921                    // https://docs.aws.amazon.com/redshift/latest/mgmt/redshift-iam-access-control-native-idp.html
13922                    let ident = self.parse_identifier()?;
13923                    if let GranteeName::ObjectName(namespace) = name {
13924                        name = GranteeName::ObjectName(ObjectName::from(vec![Ident::new(
13925                            format!("{namespace}:{ident}"),
13926                        )]));
13927                    };
13928                }
13929                Grantee {
13930                    grantee_type: grantee_type.clone(),
13931                    name: Some(name),
13932                }
13933            };
13934
13935            values.push(grantee);
13936
13937            if !self.consume_token(&Token::Comma) {
13938                break;
13939            }
13940        }
13941
13942        Ok(values)
13943    }
13944
13945    pub fn parse_grant_deny_revoke_privileges_objects(
13946        &mut self,
13947    ) -> Result<(Privileges, Option<GrantObjects>), ParserError> {
13948        let privileges = if self.parse_keyword(Keyword::ALL) {
13949            Privileges::All {
13950                with_privileges_keyword: self.parse_keyword(Keyword::PRIVILEGES),
13951            }
13952        } else {
13953            let actions = self.parse_actions_list()?;
13954            Privileges::Actions(actions)
13955        };
13956
13957        let objects = if self.parse_keyword(Keyword::ON) {
13958            if self.parse_keywords(&[Keyword::ALL, Keyword::TABLES, Keyword::IN, Keyword::SCHEMA]) {
13959                Some(GrantObjects::AllTablesInSchema {
13960                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13961                })
13962            } else if self.parse_keywords(&[
13963                Keyword::ALL,
13964                Keyword::EXTERNAL,
13965                Keyword::TABLES,
13966                Keyword::IN,
13967                Keyword::SCHEMA,
13968            ]) {
13969                Some(GrantObjects::AllExternalTablesInSchema {
13970                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13971                })
13972            } else if self.parse_keywords(&[
13973                Keyword::ALL,
13974                Keyword::VIEWS,
13975                Keyword::IN,
13976                Keyword::SCHEMA,
13977            ]) {
13978                Some(GrantObjects::AllViewsInSchema {
13979                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13980                })
13981            } else if self.parse_keywords(&[
13982                Keyword::ALL,
13983                Keyword::MATERIALIZED,
13984                Keyword::VIEWS,
13985                Keyword::IN,
13986                Keyword::SCHEMA,
13987            ]) {
13988                Some(GrantObjects::AllMaterializedViewsInSchema {
13989                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13990                })
13991            } else if self.parse_keywords(&[
13992                Keyword::FUTURE,
13993                Keyword::SCHEMAS,
13994                Keyword::IN,
13995                Keyword::DATABASE,
13996            ]) {
13997                Some(GrantObjects::FutureSchemasInDatabase {
13998                    databases: self.parse_comma_separated(|p| p.parse_object_name(false))?,
13999                })
14000            } else if self.parse_keywords(&[
14001                Keyword::FUTURE,
14002                Keyword::TABLES,
14003                Keyword::IN,
14004                Keyword::SCHEMA,
14005            ]) {
14006                Some(GrantObjects::FutureTablesInSchema {
14007                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14008                })
14009            } else if self.parse_keywords(&[
14010                Keyword::FUTURE,
14011                Keyword::EXTERNAL,
14012                Keyword::TABLES,
14013                Keyword::IN,
14014                Keyword::SCHEMA,
14015            ]) {
14016                Some(GrantObjects::FutureExternalTablesInSchema {
14017                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14018                })
14019            } else if self.parse_keywords(&[
14020                Keyword::FUTURE,
14021                Keyword::VIEWS,
14022                Keyword::IN,
14023                Keyword::SCHEMA,
14024            ]) {
14025                Some(GrantObjects::FutureViewsInSchema {
14026                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14027                })
14028            } else if self.parse_keywords(&[
14029                Keyword::FUTURE,
14030                Keyword::MATERIALIZED,
14031                Keyword::VIEWS,
14032                Keyword::IN,
14033                Keyword::SCHEMA,
14034            ]) {
14035                Some(GrantObjects::FutureMaterializedViewsInSchema {
14036                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14037                })
14038            } else if self.parse_keywords(&[
14039                Keyword::ALL,
14040                Keyword::SEQUENCES,
14041                Keyword::IN,
14042                Keyword::SCHEMA,
14043            ]) {
14044                Some(GrantObjects::AllSequencesInSchema {
14045                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14046                })
14047            } else if self.parse_keywords(&[
14048                Keyword::FUTURE,
14049                Keyword::SEQUENCES,
14050                Keyword::IN,
14051                Keyword::SCHEMA,
14052            ]) {
14053                Some(GrantObjects::FutureSequencesInSchema {
14054                    schemas: self.parse_comma_separated(|p| p.parse_object_name(false))?,
14055                })
14056            } else if self.parse_keywords(&[Keyword::RESOURCE, Keyword::MONITOR]) {
14057                Some(GrantObjects::ResourceMonitors(
14058                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14059                ))
14060            } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
14061                Some(GrantObjects::ComputePools(
14062                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14063                ))
14064            } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
14065                Some(GrantObjects::FailoverGroup(
14066                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14067                ))
14068            } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
14069                Some(GrantObjects::ReplicationGroup(
14070                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14071                ))
14072            } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
14073                Some(GrantObjects::ExternalVolumes(
14074                    self.parse_comma_separated(|p| p.parse_object_name(false))?,
14075                ))
14076            } else {
14077                let object_type = self.parse_one_of_keywords(&[
14078                    Keyword::SEQUENCE,
14079                    Keyword::DATABASE,
14080                    Keyword::SCHEMA,
14081                    Keyword::TABLE,
14082                    Keyword::VIEW,
14083                    Keyword::WAREHOUSE,
14084                    Keyword::INTEGRATION,
14085                    Keyword::VIEW,
14086                    Keyword::WAREHOUSE,
14087                    Keyword::INTEGRATION,
14088                    Keyword::USER,
14089                    Keyword::CONNECTION,
14090                    Keyword::PROCEDURE,
14091                    Keyword::FUNCTION,
14092                ]);
14093                let objects =
14094                    self.parse_comma_separated(|p| p.parse_object_name_inner(false, true));
14095                match object_type {
14096                    Some(Keyword::DATABASE) => Some(GrantObjects::Databases(objects?)),
14097                    Some(Keyword::SCHEMA) => Some(GrantObjects::Schemas(objects?)),
14098                    Some(Keyword::SEQUENCE) => Some(GrantObjects::Sequences(objects?)),
14099                    Some(Keyword::WAREHOUSE) => Some(GrantObjects::Warehouses(objects?)),
14100                    Some(Keyword::INTEGRATION) => Some(GrantObjects::Integrations(objects?)),
14101                    Some(Keyword::VIEW) => Some(GrantObjects::Views(objects?)),
14102                    Some(Keyword::USER) => Some(GrantObjects::Users(objects?)),
14103                    Some(Keyword::CONNECTION) => Some(GrantObjects::Connections(objects?)),
14104                    kw @ (Some(Keyword::PROCEDURE) | Some(Keyword::FUNCTION)) => {
14105                        if let Some(name) = objects?.first() {
14106                            self.parse_grant_procedure_or_function(name, &kw)?
14107                        } else {
14108                            self.expected("procedure or function name", self.peek_token())?
14109                        }
14110                    }
14111                    Some(Keyword::TABLE) | None => Some(GrantObjects::Tables(objects?)),
14112                    _ => unreachable!(),
14113                }
14114            }
14115        } else {
14116            None
14117        };
14118
14119        Ok((privileges, objects))
14120    }
14121
14122    fn parse_grant_procedure_or_function(
14123        &mut self,
14124        name: &ObjectName,
14125        kw: &Option<Keyword>,
14126    ) -> Result<Option<GrantObjects>, ParserError> {
14127        let arg_types = if self.consume_token(&Token::LParen) {
14128            let list = self.parse_comma_separated0(Self::parse_data_type, Token::RParen)?;
14129            self.expect_token(&Token::RParen)?;
14130            list
14131        } else {
14132            vec![]
14133        };
14134        match kw {
14135            Some(Keyword::PROCEDURE) => Ok(Some(GrantObjects::Procedure {
14136                name: name.clone(),
14137                arg_types,
14138            })),
14139            Some(Keyword::FUNCTION) => Ok(Some(GrantObjects::Function {
14140                name: name.clone(),
14141                arg_types,
14142            })),
14143            _ => self.expected("procedure or function keywords", self.peek_token())?,
14144        }
14145    }
14146
14147    pub fn parse_grant_permission(&mut self) -> Result<Action, ParserError> {
14148        fn parse_columns(parser: &mut Parser) -> Result<Option<Vec<Ident>>, ParserError> {
14149            let columns = parser.parse_parenthesized_column_list(Optional, false)?;
14150            if columns.is_empty() {
14151                Ok(None)
14152            } else {
14153                Ok(Some(columns))
14154            }
14155        }
14156
14157        // Multi-word privileges
14158        if self.parse_keywords(&[Keyword::IMPORTED, Keyword::PRIVILEGES]) {
14159            Ok(Action::ImportedPrivileges)
14160        } else if self.parse_keywords(&[Keyword::ADD, Keyword::SEARCH, Keyword::OPTIMIZATION]) {
14161            Ok(Action::AddSearchOptimization)
14162        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::LISTING]) {
14163            Ok(Action::AttachListing)
14164        } else if self.parse_keywords(&[Keyword::ATTACH, Keyword::POLICY]) {
14165            Ok(Action::AttachPolicy)
14166        } else if self.parse_keywords(&[Keyword::BIND, Keyword::SERVICE, Keyword::ENDPOINT]) {
14167            Ok(Action::BindServiceEndpoint)
14168        } else if self.parse_keywords(&[Keyword::DATABASE, Keyword::ROLE]) {
14169            let role = self.parse_object_name(false)?;
14170            Ok(Action::DatabaseRole { role })
14171        } else if self.parse_keywords(&[Keyword::EVOLVE, Keyword::SCHEMA]) {
14172            Ok(Action::EvolveSchema)
14173        } else if self.parse_keywords(&[Keyword::IMPORT, Keyword::SHARE]) {
14174            Ok(Action::ImportShare)
14175        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::VERSIONS]) {
14176            Ok(Action::ManageVersions)
14177        } else if self.parse_keywords(&[Keyword::MANAGE, Keyword::RELEASES]) {
14178            Ok(Action::ManageReleases)
14179        } else if self.parse_keywords(&[Keyword::OVERRIDE, Keyword::SHARE, Keyword::RESTRICTIONS]) {
14180            Ok(Action::OverrideShareRestrictions)
14181        } else if self.parse_keywords(&[
14182            Keyword::PURCHASE,
14183            Keyword::DATA,
14184            Keyword::EXCHANGE,
14185            Keyword::LISTING,
14186        ]) {
14187            Ok(Action::PurchaseDataExchangeListing)
14188        } else if self.parse_keywords(&[Keyword::RESOLVE, Keyword::ALL]) {
14189            Ok(Action::ResolveAll)
14190        } else if self.parse_keywords(&[Keyword::READ, Keyword::SESSION]) {
14191            Ok(Action::ReadSession)
14192
14193        // Single-word privileges
14194        } else if self.parse_keyword(Keyword::APPLY) {
14195            let apply_type = self.parse_action_apply_type()?;
14196            Ok(Action::Apply { apply_type })
14197        } else if self.parse_keyword(Keyword::APPLYBUDGET) {
14198            Ok(Action::ApplyBudget)
14199        } else if self.parse_keyword(Keyword::AUDIT) {
14200            Ok(Action::Audit)
14201        } else if self.parse_keyword(Keyword::CONNECT) {
14202            Ok(Action::Connect)
14203        } else if self.parse_keyword(Keyword::CREATE) {
14204            let obj_type = self.maybe_parse_action_create_object_type();
14205            Ok(Action::Create { obj_type })
14206        } else if self.parse_keyword(Keyword::DELETE) {
14207            Ok(Action::Delete)
14208        } else if self.parse_keyword(Keyword::EXEC) {
14209            let obj_type = self.maybe_parse_action_execute_obj_type();
14210            Ok(Action::Exec { obj_type })
14211        } else if self.parse_keyword(Keyword::EXECUTE) {
14212            let obj_type = self.maybe_parse_action_execute_obj_type();
14213            Ok(Action::Execute { obj_type })
14214        } else if self.parse_keyword(Keyword::FAILOVER) {
14215            Ok(Action::Failover)
14216        } else if self.parse_keyword(Keyword::INSERT) {
14217            Ok(Action::Insert {
14218                columns: parse_columns(self)?,
14219            })
14220        } else if self.parse_keyword(Keyword::MANAGE) {
14221            let manage_type = self.parse_action_manage_type()?;
14222            Ok(Action::Manage { manage_type })
14223        } else if self.parse_keyword(Keyword::MODIFY) {
14224            let modify_type = self.parse_action_modify_type();
14225            Ok(Action::Modify { modify_type })
14226        } else if self.parse_keyword(Keyword::MONITOR) {
14227            let monitor_type = self.parse_action_monitor_type();
14228            Ok(Action::Monitor { monitor_type })
14229        } else if self.parse_keyword(Keyword::OPERATE) {
14230            Ok(Action::Operate)
14231        } else if self.parse_keyword(Keyword::REFERENCES) {
14232            Ok(Action::References {
14233                columns: parse_columns(self)?,
14234            })
14235        } else if self.parse_keyword(Keyword::READ) {
14236            Ok(Action::Read)
14237        } else if self.parse_keyword(Keyword::REPLICATE) {
14238            Ok(Action::Replicate)
14239        } else if self.parse_keyword(Keyword::ROLE) {
14240            let role = self.parse_identifier()?;
14241            Ok(Action::Role { role })
14242        } else if self.parse_keyword(Keyword::SELECT) {
14243            Ok(Action::Select {
14244                columns: parse_columns(self)?,
14245            })
14246        } else if self.parse_keyword(Keyword::TEMPORARY) {
14247            Ok(Action::Temporary)
14248        } else if self.parse_keyword(Keyword::TRIGGER) {
14249            Ok(Action::Trigger)
14250        } else if self.parse_keyword(Keyword::TRUNCATE) {
14251            Ok(Action::Truncate)
14252        } else if self.parse_keyword(Keyword::UPDATE) {
14253            Ok(Action::Update {
14254                columns: parse_columns(self)?,
14255            })
14256        } else if self.parse_keyword(Keyword::USAGE) {
14257            Ok(Action::Usage)
14258        } else if self.parse_keyword(Keyword::OWNERSHIP) {
14259            Ok(Action::Ownership)
14260        } else {
14261            self.expected("a privilege keyword", self.peek_token())?
14262        }
14263    }
14264
14265    fn maybe_parse_action_create_object_type(&mut self) -> Option<ActionCreateObjectType> {
14266        // Multi-word object types
14267        if self.parse_keywords(&[Keyword::APPLICATION, Keyword::PACKAGE]) {
14268            Some(ActionCreateObjectType::ApplicationPackage)
14269        } else if self.parse_keywords(&[Keyword::COMPUTE, Keyword::POOL]) {
14270            Some(ActionCreateObjectType::ComputePool)
14271        } else if self.parse_keywords(&[Keyword::DATA, Keyword::EXCHANGE, Keyword::LISTING]) {
14272            Some(ActionCreateObjectType::DataExchangeListing)
14273        } else if self.parse_keywords(&[Keyword::EXTERNAL, Keyword::VOLUME]) {
14274            Some(ActionCreateObjectType::ExternalVolume)
14275        } else if self.parse_keywords(&[Keyword::FAILOVER, Keyword::GROUP]) {
14276            Some(ActionCreateObjectType::FailoverGroup)
14277        } else if self.parse_keywords(&[Keyword::NETWORK, Keyword::POLICY]) {
14278            Some(ActionCreateObjectType::NetworkPolicy)
14279        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::LISTING]) {
14280            Some(ActionCreateObjectType::OrganiationListing)
14281        } else if self.parse_keywords(&[Keyword::REPLICATION, Keyword::GROUP]) {
14282            Some(ActionCreateObjectType::ReplicationGroup)
14283        }
14284        // Single-word object types
14285        else if self.parse_keyword(Keyword::ACCOUNT) {
14286            Some(ActionCreateObjectType::Account)
14287        } else if self.parse_keyword(Keyword::APPLICATION) {
14288            Some(ActionCreateObjectType::Application)
14289        } else if self.parse_keyword(Keyword::DATABASE) {
14290            Some(ActionCreateObjectType::Database)
14291        } else if self.parse_keyword(Keyword::INTEGRATION) {
14292            Some(ActionCreateObjectType::Integration)
14293        } else if self.parse_keyword(Keyword::ROLE) {
14294            Some(ActionCreateObjectType::Role)
14295        } else if self.parse_keyword(Keyword::SHARE) {
14296            Some(ActionCreateObjectType::Share)
14297        } else if self.parse_keyword(Keyword::USER) {
14298            Some(ActionCreateObjectType::User)
14299        } else if self.parse_keyword(Keyword::WAREHOUSE) {
14300            Some(ActionCreateObjectType::Warehouse)
14301        } else {
14302            None
14303        }
14304    }
14305
14306    fn parse_action_apply_type(&mut self) -> Result<ActionApplyType, ParserError> {
14307        if self.parse_keywords(&[Keyword::AGGREGATION, Keyword::POLICY]) {
14308            Ok(ActionApplyType::AggregationPolicy)
14309        } else if self.parse_keywords(&[Keyword::AUTHENTICATION, Keyword::POLICY]) {
14310            Ok(ActionApplyType::AuthenticationPolicy)
14311        } else if self.parse_keywords(&[Keyword::JOIN, Keyword::POLICY]) {
14312            Ok(ActionApplyType::JoinPolicy)
14313        } else if self.parse_keywords(&[Keyword::MASKING, Keyword::POLICY]) {
14314            Ok(ActionApplyType::MaskingPolicy)
14315        } else if self.parse_keywords(&[Keyword::PACKAGES, Keyword::POLICY]) {
14316            Ok(ActionApplyType::PackagesPolicy)
14317        } else if self.parse_keywords(&[Keyword::PASSWORD, Keyword::POLICY]) {
14318            Ok(ActionApplyType::PasswordPolicy)
14319        } else if self.parse_keywords(&[Keyword::PROJECTION, Keyword::POLICY]) {
14320            Ok(ActionApplyType::ProjectionPolicy)
14321        } else if self.parse_keywords(&[Keyword::ROW, Keyword::ACCESS, Keyword::POLICY]) {
14322            Ok(ActionApplyType::RowAccessPolicy)
14323        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::POLICY]) {
14324            Ok(ActionApplyType::SessionPolicy)
14325        } else if self.parse_keyword(Keyword::TAG) {
14326            Ok(ActionApplyType::Tag)
14327        } else {
14328            self.expected("GRANT APPLY type", self.peek_token())
14329        }
14330    }
14331
14332    fn maybe_parse_action_execute_obj_type(&mut self) -> Option<ActionExecuteObjectType> {
14333        if self.parse_keywords(&[Keyword::DATA, Keyword::METRIC, Keyword::FUNCTION]) {
14334            Some(ActionExecuteObjectType::DataMetricFunction)
14335        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::ALERT]) {
14336            Some(ActionExecuteObjectType::ManagedAlert)
14337        } else if self.parse_keywords(&[Keyword::MANAGED, Keyword::TASK]) {
14338            Some(ActionExecuteObjectType::ManagedTask)
14339        } else if self.parse_keyword(Keyword::ALERT) {
14340            Some(ActionExecuteObjectType::Alert)
14341        } else if self.parse_keyword(Keyword::TASK) {
14342            Some(ActionExecuteObjectType::Task)
14343        } else {
14344            None
14345        }
14346    }
14347
14348    fn parse_action_manage_type(&mut self) -> Result<ActionManageType, ParserError> {
14349        if self.parse_keywords(&[Keyword::ACCOUNT, Keyword::SUPPORT, Keyword::CASES]) {
14350            Ok(ActionManageType::AccountSupportCases)
14351        } else if self.parse_keywords(&[Keyword::EVENT, Keyword::SHARING]) {
14352            Ok(ActionManageType::EventSharing)
14353        } else if self.parse_keywords(&[Keyword::LISTING, Keyword::AUTO, Keyword::FULFILLMENT]) {
14354            Ok(ActionManageType::ListingAutoFulfillment)
14355        } else if self.parse_keywords(&[Keyword::ORGANIZATION, Keyword::SUPPORT, Keyword::CASES]) {
14356            Ok(ActionManageType::OrganizationSupportCases)
14357        } else if self.parse_keywords(&[Keyword::USER, Keyword::SUPPORT, Keyword::CASES]) {
14358            Ok(ActionManageType::UserSupportCases)
14359        } else if self.parse_keyword(Keyword::GRANTS) {
14360            Ok(ActionManageType::Grants)
14361        } else if self.parse_keyword(Keyword::WAREHOUSES) {
14362            Ok(ActionManageType::Warehouses)
14363        } else {
14364            self.expected("GRANT MANAGE type", self.peek_token())
14365        }
14366    }
14367
14368    fn parse_action_modify_type(&mut self) -> Option<ActionModifyType> {
14369        if self.parse_keywords(&[Keyword::LOG, Keyword::LEVEL]) {
14370            Some(ActionModifyType::LogLevel)
14371        } else if self.parse_keywords(&[Keyword::TRACE, Keyword::LEVEL]) {
14372            Some(ActionModifyType::TraceLevel)
14373        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::LOG, Keyword::LEVEL]) {
14374            Some(ActionModifyType::SessionLogLevel)
14375        } else if self.parse_keywords(&[Keyword::SESSION, Keyword::TRACE, Keyword::LEVEL]) {
14376            Some(ActionModifyType::SessionTraceLevel)
14377        } else {
14378            None
14379        }
14380    }
14381
14382    fn parse_action_monitor_type(&mut self) -> Option<ActionMonitorType> {
14383        if self.parse_keyword(Keyword::EXECUTION) {
14384            Some(ActionMonitorType::Execution)
14385        } else if self.parse_keyword(Keyword::SECURITY) {
14386            Some(ActionMonitorType::Security)
14387        } else if self.parse_keyword(Keyword::USAGE) {
14388            Some(ActionMonitorType::Usage)
14389        } else {
14390            None
14391        }
14392    }
14393
14394    pub fn parse_grantee_name(&mut self) -> Result<GranteeName, ParserError> {
14395        let mut name = self.parse_object_name(false)?;
14396        if self.dialect.supports_user_host_grantee()
14397            && name.0.len() == 1
14398            && name.0[0].as_ident().is_some()
14399            && self.consume_token(&Token::AtSign)
14400        {
14401            let user = name.0.pop().unwrap().as_ident().unwrap().clone();
14402            let host = self.parse_identifier()?;
14403            Ok(GranteeName::UserHost { user, host })
14404        } else {
14405            Ok(GranteeName::ObjectName(name))
14406        }
14407    }
14408
14409    /// Parse [`Statement::Deny`]
14410    pub fn parse_deny(&mut self) -> Result<Statement, ParserError> {
14411        self.expect_keyword(Keyword::DENY)?;
14412
14413        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
14414        let objects = match objects {
14415            Some(o) => o,
14416            None => {
14417                return parser_err!(
14418                    "DENY statements must specify an object",
14419                    self.peek_token().span.start
14420                )
14421            }
14422        };
14423
14424        self.expect_keyword_is(Keyword::TO)?;
14425        let grantees = self.parse_grantees()?;
14426        let cascade = self.parse_cascade_option();
14427        let granted_by = if self.parse_keywords(&[Keyword::AS]) {
14428            Some(self.parse_identifier()?)
14429        } else {
14430            None
14431        };
14432
14433        Ok(Statement::Deny(DenyStatement {
14434            privileges,
14435            objects,
14436            grantees,
14437            cascade,
14438            granted_by,
14439        }))
14440    }
14441
14442    /// Parse a REVOKE statement
14443    pub fn parse_revoke(&mut self) -> Result<Statement, ParserError> {
14444        let (privileges, objects) = self.parse_grant_deny_revoke_privileges_objects()?;
14445
14446        self.expect_keyword_is(Keyword::FROM)?;
14447        let grantees = self.parse_grantees()?;
14448
14449        let granted_by = if self.parse_keywords(&[Keyword::GRANTED, Keyword::BY]) {
14450            Some(self.parse_identifier()?)
14451        } else {
14452            None
14453        };
14454
14455        let cascade = self.parse_cascade_option();
14456
14457        Ok(Statement::Revoke {
14458            privileges,
14459            objects,
14460            grantees,
14461            granted_by,
14462            cascade,
14463        })
14464    }
14465
14466    /// Parse an REPLACE statement
14467    pub fn parse_replace(&mut self) -> Result<Statement, ParserError> {
14468        if !dialect_of!(self is MySqlDialect | GenericDialect) {
14469            return parser_err!(
14470                "Unsupported statement REPLACE",
14471                self.peek_token().span.start
14472            );
14473        }
14474
14475        let mut insert = self.parse_insert()?;
14476        if let Statement::Insert(Insert { replace_into, .. }) = &mut insert {
14477            *replace_into = true;
14478        }
14479
14480        Ok(insert)
14481    }
14482
14483    /// Parse an INSERT statement, returning a `Box`ed SetExpr
14484    ///
14485    /// This is used to reduce the size of the stack frames in debug builds
14486    fn parse_insert_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
14487        Ok(Box::new(SetExpr::Insert(self.parse_insert()?)))
14488    }
14489
14490    /// Parse an INSERT statement
14491    pub fn parse_insert(&mut self) -> Result<Statement, ParserError> {
14492        let or = self.parse_conflict_clause();
14493        let priority = if !dialect_of!(self is MySqlDialect | GenericDialect) {
14494            None
14495        } else if self.parse_keyword(Keyword::LOW_PRIORITY) {
14496            Some(MysqlInsertPriority::LowPriority)
14497        } else if self.parse_keyword(Keyword::DELAYED) {
14498            Some(MysqlInsertPriority::Delayed)
14499        } else if self.parse_keyword(Keyword::HIGH_PRIORITY) {
14500            Some(MysqlInsertPriority::HighPriority)
14501        } else {
14502            None
14503        };
14504
14505        let ignore = dialect_of!(self is MySqlDialect | GenericDialect)
14506            && self.parse_keyword(Keyword::IGNORE);
14507
14508        let replace_into = false;
14509
14510        let overwrite = self.parse_keyword(Keyword::OVERWRITE);
14511        let into = self.parse_keyword(Keyword::INTO);
14512
14513        let local = self.parse_keyword(Keyword::LOCAL);
14514
14515        if self.parse_keyword(Keyword::DIRECTORY) {
14516            let path = self.parse_literal_string()?;
14517            let file_format = if self.parse_keywords(&[Keyword::STORED, Keyword::AS]) {
14518                Some(self.parse_file_format()?)
14519            } else {
14520                None
14521            };
14522            let source = self.parse_query()?;
14523            Ok(Statement::Directory {
14524                local,
14525                path,
14526                overwrite,
14527                file_format,
14528                source,
14529            })
14530        } else {
14531            // Hive lets you put table here regardless
14532            let table = self.parse_keyword(Keyword::TABLE);
14533            let table_object = self.parse_table_object()?;
14534
14535            let table_alias =
14536                if dialect_of!(self is PostgreSqlDialect) && self.parse_keyword(Keyword::AS) {
14537                    Some(self.parse_identifier()?)
14538                } else {
14539                    None
14540                };
14541
14542            let is_mysql = dialect_of!(self is MySqlDialect);
14543
14544            let (columns, partitioned, after_columns, source, assignments) = if self
14545                .parse_keywords(&[Keyword::DEFAULT, Keyword::VALUES])
14546            {
14547                (vec![], None, vec![], None, vec![])
14548            } else {
14549                let (columns, partitioned, after_columns) = if !self.peek_subquery_start() {
14550                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
14551
14552                    let partitioned = self.parse_insert_partition()?;
14553                    // Hive allows you to specify columns after partitions as well if you want.
14554                    let after_columns = if dialect_of!(self is HiveDialect) {
14555                        self.parse_parenthesized_column_list(Optional, false)?
14556                    } else {
14557                        vec![]
14558                    };
14559                    (columns, partitioned, after_columns)
14560                } else {
14561                    Default::default()
14562                };
14563
14564                let (source, assignments) = if self.peek_keyword(Keyword::FORMAT)
14565                    || self.peek_keyword(Keyword::SETTINGS)
14566                {
14567                    (None, vec![])
14568                } else if self.dialect.supports_insert_set() && self.parse_keyword(Keyword::SET) {
14569                    (None, self.parse_comma_separated(Parser::parse_assignment)?)
14570                } else {
14571                    (Some(self.parse_query()?), vec![])
14572                };
14573
14574                (columns, partitioned, after_columns, source, assignments)
14575            };
14576
14577            let (format_clause, settings) = if self.dialect.supports_insert_format() {
14578                // Settings always comes before `FORMAT` for ClickHouse:
14579                // <https://clickhouse.com/docs/en/sql-reference/statements/insert-into>
14580                let settings = self.parse_settings()?;
14581
14582                let format = if self.parse_keyword(Keyword::FORMAT) {
14583                    Some(self.parse_input_format_clause()?)
14584                } else {
14585                    None
14586                };
14587
14588                (format, settings)
14589            } else {
14590                Default::default()
14591            };
14592
14593            let insert_alias = if dialect_of!(self is MySqlDialect | GenericDialect)
14594                && self.parse_keyword(Keyword::AS)
14595            {
14596                let row_alias = self.parse_object_name(false)?;
14597                let col_aliases = Some(self.parse_parenthesized_column_list(Optional, false)?);
14598                Some(InsertAliases {
14599                    row_alias,
14600                    col_aliases,
14601                })
14602            } else {
14603                None
14604            };
14605
14606            let on = if self.parse_keyword(Keyword::ON) {
14607                if self.parse_keyword(Keyword::CONFLICT) {
14608                    let conflict_target =
14609                        if self.parse_keywords(&[Keyword::ON, Keyword::CONSTRAINT]) {
14610                            Some(ConflictTarget::OnConstraint(self.parse_object_name(false)?))
14611                        } else if self.peek_token() == Token::LParen {
14612                            Some(ConflictTarget::Columns(
14613                                self.parse_parenthesized_column_list(IsOptional::Mandatory, false)?,
14614                            ))
14615                        } else {
14616                            None
14617                        };
14618
14619                    self.expect_keyword_is(Keyword::DO)?;
14620                    let action = if self.parse_keyword(Keyword::NOTHING) {
14621                        OnConflictAction::DoNothing
14622                    } else {
14623                        self.expect_keyword_is(Keyword::UPDATE)?;
14624                        self.expect_keyword_is(Keyword::SET)?;
14625                        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14626                        let selection = if self.parse_keyword(Keyword::WHERE) {
14627                            Some(self.parse_expr()?)
14628                        } else {
14629                            None
14630                        };
14631                        OnConflictAction::DoUpdate(DoUpdate {
14632                            assignments,
14633                            selection,
14634                        })
14635                    };
14636
14637                    Some(OnInsert::OnConflict(OnConflict {
14638                        conflict_target,
14639                        action,
14640                    }))
14641                } else {
14642                    self.expect_keyword_is(Keyword::DUPLICATE)?;
14643                    self.expect_keyword_is(Keyword::KEY)?;
14644                    self.expect_keyword_is(Keyword::UPDATE)?;
14645                    let l = self.parse_comma_separated(Parser::parse_assignment)?;
14646
14647                    Some(OnInsert::DuplicateKeyUpdate(l))
14648                }
14649            } else {
14650                None
14651            };
14652
14653            let returning = if self.parse_keyword(Keyword::RETURNING) {
14654                Some(self.parse_comma_separated(Parser::parse_select_item)?)
14655            } else {
14656                None
14657            };
14658
14659            Ok(Statement::Insert(Insert {
14660                or,
14661                table: table_object,
14662                table_alias,
14663                ignore,
14664                into,
14665                overwrite,
14666                partitioned,
14667                columns,
14668                after_columns,
14669                source,
14670                assignments,
14671                has_table_keyword: table,
14672                on,
14673                returning,
14674                replace_into,
14675                priority,
14676                insert_alias,
14677                settings,
14678                format_clause,
14679            }))
14680        }
14681    }
14682
14683    // Parses input format clause used for [ClickHouse].
14684    //
14685    // <https://clickhouse.com/docs/en/interfaces/formats>
14686    pub fn parse_input_format_clause(&mut self) -> Result<InputFormatClause, ParserError> {
14687        let ident = self.parse_identifier()?;
14688        let values = self
14689            .maybe_parse(|p| p.parse_comma_separated(|p| p.parse_expr()))?
14690            .unwrap_or_default();
14691
14692        Ok(InputFormatClause { ident, values })
14693    }
14694
14695    /// Returns true if the immediate tokens look like the
14696    /// beginning of a subquery. `(SELECT ...`
14697    fn peek_subquery_start(&mut self) -> bool {
14698        let [maybe_lparen, maybe_select] = self.peek_tokens();
14699        Token::LParen == maybe_lparen
14700            && matches!(maybe_select, Token::Word(w) if w.keyword == Keyword::SELECT)
14701    }
14702
14703    fn parse_conflict_clause(&mut self) -> Option<SqliteOnConflict> {
14704        if self.parse_keywords(&[Keyword::OR, Keyword::REPLACE]) {
14705            Some(SqliteOnConflict::Replace)
14706        } else if self.parse_keywords(&[Keyword::OR, Keyword::ROLLBACK]) {
14707            Some(SqliteOnConflict::Rollback)
14708        } else if self.parse_keywords(&[Keyword::OR, Keyword::ABORT]) {
14709            Some(SqliteOnConflict::Abort)
14710        } else if self.parse_keywords(&[Keyword::OR, Keyword::FAIL]) {
14711            Some(SqliteOnConflict::Fail)
14712        } else if self.parse_keywords(&[Keyword::OR, Keyword::IGNORE]) {
14713            Some(SqliteOnConflict::Ignore)
14714        } else if self.parse_keyword(Keyword::REPLACE) {
14715            Some(SqliteOnConflict::Replace)
14716        } else {
14717            None
14718        }
14719    }
14720
14721    pub fn parse_insert_partition(&mut self) -> Result<Option<Vec<Expr>>, ParserError> {
14722        if self.parse_keyword(Keyword::PARTITION) {
14723            self.expect_token(&Token::LParen)?;
14724            let partition_cols = Some(self.parse_comma_separated(Parser::parse_expr)?);
14725            self.expect_token(&Token::RParen)?;
14726            Ok(partition_cols)
14727        } else {
14728            Ok(None)
14729        }
14730    }
14731
14732    pub fn parse_load_data_table_format(
14733        &mut self,
14734    ) -> Result<Option<HiveLoadDataFormat>, ParserError> {
14735        if self.parse_keyword(Keyword::INPUTFORMAT) {
14736            let input_format = self.parse_expr()?;
14737            self.expect_keyword_is(Keyword::SERDE)?;
14738            let serde = self.parse_expr()?;
14739            Ok(Some(HiveLoadDataFormat {
14740                input_format,
14741                serde,
14742            }))
14743        } else {
14744            Ok(None)
14745        }
14746    }
14747
14748    /// Parse an UPDATE statement, returning a `Box`ed SetExpr
14749    ///
14750    /// This is used to reduce the size of the stack frames in debug builds
14751    fn parse_update_setexpr_boxed(&mut self) -> Result<Box<SetExpr>, ParserError> {
14752        Ok(Box::new(SetExpr::Update(self.parse_update()?)))
14753    }
14754
14755    pub fn parse_update(&mut self) -> Result<Statement, ParserError> {
14756        let or = self.parse_conflict_clause();
14757        let table = self.parse_table_and_joins()?;
14758        let from_before_set = if self.parse_keyword(Keyword::FROM) {
14759            Some(UpdateTableFromKind::BeforeSet(
14760                self.parse_table_with_joins()?,
14761            ))
14762        } else {
14763            None
14764        };
14765        self.expect_keyword(Keyword::SET)?;
14766        let assignments = self.parse_comma_separated(Parser::parse_assignment)?;
14767        let from = if from_before_set.is_none() && self.parse_keyword(Keyword::FROM) {
14768            Some(UpdateTableFromKind::AfterSet(
14769                self.parse_table_with_joins()?,
14770            ))
14771        } else {
14772            from_before_set
14773        };
14774        let selection = if self.parse_keyword(Keyword::WHERE) {
14775            Some(self.parse_expr()?)
14776        } else {
14777            None
14778        };
14779        let returning = if self.parse_keyword(Keyword::RETURNING) {
14780            Some(self.parse_comma_separated(Parser::parse_select_item)?)
14781        } else {
14782            None
14783        };
14784        Ok(Statement::Update {
14785            table,
14786            assignments,
14787            from,
14788            selection,
14789            returning,
14790            or,
14791        })
14792    }
14793
14794    /// Parse a `var = expr` assignment, used in an UPDATE statement
14795    pub fn parse_assignment(&mut self) -> Result<Assignment, ParserError> {
14796        let target = self.parse_assignment_target()?;
14797        self.expect_token(&Token::Eq)?;
14798        let value = self.parse_expr()?;
14799        Ok(Assignment { target, value })
14800    }
14801
14802    /// Parse the left-hand side of an assignment, used in an UPDATE statement
14803    pub fn parse_assignment_target(&mut self) -> Result<AssignmentTarget, ParserError> {
14804        if self.consume_token(&Token::LParen) {
14805            let columns = self.parse_comma_separated(|p| p.parse_object_name(false))?;
14806            self.expect_token(&Token::RParen)?;
14807            Ok(AssignmentTarget::Tuple(columns))
14808        } else {
14809            let column = self.parse_object_name(false)?;
14810            Ok(AssignmentTarget::ColumnName(column))
14811        }
14812    }
14813
14814    pub fn parse_function_args(&mut self) -> Result<FunctionArg, ParserError> {
14815        let arg = if self.dialect.supports_named_fn_args_with_expr_name() {
14816            self.maybe_parse(|p| {
14817                let name = p.parse_expr()?;
14818                let operator = p.parse_function_named_arg_operator()?;
14819                let arg = p.parse_wildcard_expr()?.into();
14820                Ok(FunctionArg::ExprNamed {
14821                    name,
14822                    arg,
14823                    operator,
14824                })
14825            })?
14826        } else {
14827            self.maybe_parse(|p| {
14828                let name = p.parse_identifier()?;
14829                let operator = p.parse_function_named_arg_operator()?;
14830                let arg = p.parse_wildcard_expr()?.into();
14831                Ok(FunctionArg::Named {
14832                    name,
14833                    arg,
14834                    operator,
14835                })
14836            })?
14837        };
14838        if let Some(arg) = arg {
14839            return Ok(arg);
14840        }
14841        Ok(FunctionArg::Unnamed(self.parse_wildcard_expr()?.into()))
14842    }
14843
14844    fn parse_function_named_arg_operator(&mut self) -> Result<FunctionArgOperator, ParserError> {
14845        if self.parse_keyword(Keyword::VALUE) {
14846            return Ok(FunctionArgOperator::Value);
14847        }
14848        let tok = self.next_token();
14849        match tok.token {
14850            Token::RArrow if self.dialect.supports_named_fn_args_with_rarrow_operator() => {
14851                Ok(FunctionArgOperator::RightArrow)
14852            }
14853            Token::Eq if self.dialect.supports_named_fn_args_with_eq_operator() => {
14854                Ok(FunctionArgOperator::Equals)
14855            }
14856            Token::Assignment
14857                if self
14858                    .dialect
14859                    .supports_named_fn_args_with_assignment_operator() =>
14860            {
14861                Ok(FunctionArgOperator::Assignment)
14862            }
14863            Token::Colon if self.dialect.supports_named_fn_args_with_colon_operator() => {
14864                Ok(FunctionArgOperator::Colon)
14865            }
14866            _ => {
14867                self.prev_token();
14868                self.expected("argument operator", tok)
14869            }
14870        }
14871    }
14872
14873    pub fn parse_optional_args(&mut self) -> Result<Vec<FunctionArg>, ParserError> {
14874        if self.consume_token(&Token::RParen) {
14875            Ok(vec![])
14876        } else {
14877            let args = self.parse_comma_separated(Parser::parse_function_args)?;
14878            self.expect_token(&Token::RParen)?;
14879            Ok(args)
14880        }
14881    }
14882
14883    fn parse_table_function_args(&mut self) -> Result<TableFunctionArgs, ParserError> {
14884        if self.consume_token(&Token::RParen) {
14885            return Ok(TableFunctionArgs {
14886                args: vec![],
14887                settings: None,
14888            });
14889        }
14890        let mut args = vec![];
14891        let settings = loop {
14892            if let Some(settings) = self.parse_settings()? {
14893                break Some(settings);
14894            }
14895            args.push(self.parse_function_args()?);
14896            if self.is_parse_comma_separated_end() {
14897                break None;
14898            }
14899        };
14900        self.expect_token(&Token::RParen)?;
14901        Ok(TableFunctionArgs { args, settings })
14902    }
14903
14904    /// Parses a potentially empty list of arguments to a window function
14905    /// (including the closing parenthesis).
14906    ///
14907    /// Examples:
14908    /// ```sql
14909    /// FIRST_VALUE(x ORDER BY 1,2,3);
14910    /// FIRST_VALUE(x IGNORE NULL);
14911    /// ```
14912    fn parse_function_argument_list(&mut self) -> Result<FunctionArgumentList, ParserError> {
14913        let mut clauses = vec![];
14914
14915        // For MSSQL empty argument list with json-null-clause case, e.g. `JSON_ARRAY(NULL ON NULL)`
14916        if let Some(null_clause) = self.parse_json_null_clause() {
14917            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
14918        }
14919
14920        if self.consume_token(&Token::RParen) {
14921            return Ok(FunctionArgumentList {
14922                duplicate_treatment: None,
14923                args: vec![],
14924                clauses,
14925            });
14926        }
14927
14928        let duplicate_treatment = self.parse_duplicate_treatment()?;
14929        let args = self.parse_comma_separated(Parser::parse_function_args)?;
14930
14931        if self.dialect.supports_window_function_null_treatment_arg() {
14932            if let Some(null_treatment) = self.parse_null_treatment()? {
14933                clauses.push(FunctionArgumentClause::IgnoreOrRespectNulls(null_treatment));
14934            }
14935        }
14936
14937        if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
14938            clauses.push(FunctionArgumentClause::OrderBy(
14939                self.parse_comma_separated(Parser::parse_order_by_expr)?,
14940            ));
14941        }
14942
14943        if self.parse_keyword(Keyword::LIMIT) {
14944            clauses.push(FunctionArgumentClause::Limit(self.parse_expr()?));
14945        }
14946
14947        if dialect_of!(self is GenericDialect | BigQueryDialect)
14948            && self.parse_keyword(Keyword::HAVING)
14949        {
14950            let kind = match self.expect_one_of_keywords(&[Keyword::MIN, Keyword::MAX])? {
14951                Keyword::MIN => HavingBoundKind::Min,
14952                Keyword::MAX => HavingBoundKind::Max,
14953                _ => unreachable!(),
14954            };
14955            clauses.push(FunctionArgumentClause::Having(HavingBound(
14956                kind,
14957                self.parse_expr()?,
14958            )))
14959        }
14960
14961        if dialect_of!(self is GenericDialect | MySqlDialect)
14962            && self.parse_keyword(Keyword::SEPARATOR)
14963        {
14964            clauses.push(FunctionArgumentClause::Separator(self.parse_value()?.value));
14965        }
14966
14967        if let Some(on_overflow) = self.parse_listagg_on_overflow()? {
14968            clauses.push(FunctionArgumentClause::OnOverflow(on_overflow));
14969        }
14970
14971        if let Some(null_clause) = self.parse_json_null_clause() {
14972            clauses.push(FunctionArgumentClause::JsonNullClause(null_clause));
14973        }
14974
14975        self.expect_token(&Token::RParen)?;
14976        Ok(FunctionArgumentList {
14977            duplicate_treatment,
14978            args,
14979            clauses,
14980        })
14981    }
14982
14983    /// Parses MSSQL's json-null-clause
14984    fn parse_json_null_clause(&mut self) -> Option<JsonNullClause> {
14985        if self.parse_keywords(&[Keyword::ABSENT, Keyword::ON, Keyword::NULL]) {
14986            Some(JsonNullClause::AbsentOnNull)
14987        } else if self.parse_keywords(&[Keyword::NULL, Keyword::ON, Keyword::NULL]) {
14988            Some(JsonNullClause::NullOnNull)
14989        } else {
14990            None
14991        }
14992    }
14993
14994    fn parse_duplicate_treatment(&mut self) -> Result<Option<DuplicateTreatment>, ParserError> {
14995        let loc = self.peek_token().span.start;
14996        match (
14997            self.parse_keyword(Keyword::ALL),
14998            self.parse_keyword(Keyword::DISTINCT),
14999        ) {
15000            (true, false) => Ok(Some(DuplicateTreatment::All)),
15001            (false, true) => Ok(Some(DuplicateTreatment::Distinct)),
15002            (false, false) => Ok(None),
15003            (true, true) => parser_err!("Cannot specify both ALL and DISTINCT".to_string(), loc),
15004        }
15005    }
15006
15007    /// Parse a comma-delimited list of projections after SELECT
15008    pub fn parse_select_item(&mut self) -> Result<SelectItem, ParserError> {
15009        let prefix = self
15010            .parse_one_of_keywords(
15011                self.dialect
15012                    .get_reserved_keywords_for_select_item_operator(),
15013            )
15014            .map(|keyword| Ident::new(format!("{keyword:?}")));
15015
15016        match self.parse_wildcard_expr()? {
15017            Expr::QualifiedWildcard(prefix, token) => Ok(SelectItem::QualifiedWildcard(
15018                SelectItemQualifiedWildcardKind::ObjectName(prefix),
15019                self.parse_wildcard_additional_options(token.0)?,
15020            )),
15021            Expr::Wildcard(token) => Ok(SelectItem::Wildcard(
15022                self.parse_wildcard_additional_options(token.0)?,
15023            )),
15024            Expr::Identifier(v) if v.value.to_lowercase() == "from" && v.quote_style.is_none() => {
15025                parser_err!(
15026                    format!("Expected an expression, found: {}", v),
15027                    self.peek_token().span.start
15028                )
15029            }
15030            Expr::BinaryOp {
15031                left,
15032                op: BinaryOperator::Eq,
15033                right,
15034            } if self.dialect.supports_eq_alias_assignment()
15035                && matches!(left.as_ref(), Expr::Identifier(_)) =>
15036            {
15037                let Expr::Identifier(alias) = *left else {
15038                    return parser_err!(
15039                        "BUG: expected identifier expression as alias",
15040                        self.peek_token().span.start
15041                    );
15042                };
15043                Ok(SelectItem::ExprWithAlias {
15044                    expr: *right,
15045                    alias,
15046                })
15047            }
15048            expr if self.dialect.supports_select_expr_star()
15049                && self.consume_tokens(&[Token::Period, Token::Mul]) =>
15050            {
15051                let wildcard_token = self.get_previous_token().clone();
15052                Ok(SelectItem::QualifiedWildcard(
15053                    SelectItemQualifiedWildcardKind::Expr(expr),
15054                    self.parse_wildcard_additional_options(wildcard_token)?,
15055                ))
15056            }
15057            expr => self
15058                .maybe_parse_select_item_alias()
15059                .map(|alias| match alias {
15060                    Some(alias) => SelectItem::ExprWithAlias {
15061                        expr: maybe_prefixed_expr(expr, prefix),
15062                        alias,
15063                    },
15064                    None => SelectItem::UnnamedExpr(maybe_prefixed_expr(expr, prefix)),
15065                }),
15066        }
15067    }
15068
15069    /// Parse an [`WildcardAdditionalOptions`] information for wildcard select items.
15070    ///
15071    /// If it is not possible to parse it, will return an option.
15072    pub fn parse_wildcard_additional_options(
15073        &mut self,
15074        wildcard_token: TokenWithSpan,
15075    ) -> Result<WildcardAdditionalOptions, ParserError> {
15076        let opt_ilike = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
15077            self.parse_optional_select_item_ilike()?
15078        } else {
15079            None
15080        };
15081        let opt_exclude = if opt_ilike.is_none() && self.dialect.supports_select_wildcard_exclude()
15082        {
15083            self.parse_optional_select_item_exclude()?
15084        } else {
15085            None
15086        };
15087        let opt_except = if self.dialect.supports_select_wildcard_except() {
15088            self.parse_optional_select_item_except()?
15089        } else {
15090            None
15091        };
15092        let opt_replace = if dialect_of!(self is GenericDialect | BigQueryDialect | ClickHouseDialect | DuckDbDialect | SnowflakeDialect)
15093        {
15094            self.parse_optional_select_item_replace()?
15095        } else {
15096            None
15097        };
15098        let opt_rename = if dialect_of!(self is GenericDialect | SnowflakeDialect) {
15099            self.parse_optional_select_item_rename()?
15100        } else {
15101            None
15102        };
15103
15104        Ok(WildcardAdditionalOptions {
15105            wildcard_token: wildcard_token.into(),
15106            opt_ilike,
15107            opt_exclude,
15108            opt_except,
15109            opt_rename,
15110            opt_replace,
15111        })
15112    }
15113
15114    /// Parse an [`Ilike`](IlikeSelectItem) information for wildcard select items.
15115    ///
15116    /// If it is not possible to parse it, will return an option.
15117    pub fn parse_optional_select_item_ilike(
15118        &mut self,
15119    ) -> Result<Option<IlikeSelectItem>, ParserError> {
15120        let opt_ilike = if self.parse_keyword(Keyword::ILIKE) {
15121            let next_token = self.next_token();
15122            let pattern = match next_token.token {
15123                Token::SingleQuotedString(s) => s,
15124                _ => return self.expected("ilike pattern", next_token),
15125            };
15126            Some(IlikeSelectItem { pattern })
15127        } else {
15128            None
15129        };
15130        Ok(opt_ilike)
15131    }
15132
15133    /// Parse an [`Exclude`](ExcludeSelectItem) information for wildcard select items.
15134    ///
15135    /// If it is not possible to parse it, will return an option.
15136    pub fn parse_optional_select_item_exclude(
15137        &mut self,
15138    ) -> Result<Option<ExcludeSelectItem>, ParserError> {
15139        let opt_exclude = if self.parse_keyword(Keyword::EXCLUDE) {
15140            if self.consume_token(&Token::LParen) {
15141                let columns = self.parse_comma_separated(|parser| parser.parse_identifier())?;
15142                self.expect_token(&Token::RParen)?;
15143                Some(ExcludeSelectItem::Multiple(columns))
15144            } else {
15145                let column = self.parse_identifier()?;
15146                Some(ExcludeSelectItem::Single(column))
15147            }
15148        } else {
15149            None
15150        };
15151
15152        Ok(opt_exclude)
15153    }
15154
15155    /// Parse an [`Except`](ExceptSelectItem) information for wildcard select items.
15156    ///
15157    /// If it is not possible to parse it, will return an option.
15158    pub fn parse_optional_select_item_except(
15159        &mut self,
15160    ) -> Result<Option<ExceptSelectItem>, ParserError> {
15161        let opt_except = if self.parse_keyword(Keyword::EXCEPT) {
15162            if self.peek_token().token == Token::LParen {
15163                let idents = self.parse_parenthesized_column_list(Mandatory, false)?;
15164                match &idents[..] {
15165                    [] => {
15166                        return self.expected(
15167                            "at least one column should be parsed by the expect clause",
15168                            self.peek_token(),
15169                        )?;
15170                    }
15171                    [first, idents @ ..] => Some(ExceptSelectItem {
15172                        first_element: first.clone(),
15173                        additional_elements: idents.to_vec(),
15174                    }),
15175                }
15176            } else {
15177                // Clickhouse allows EXCEPT column_name
15178                let ident = self.parse_identifier()?;
15179                Some(ExceptSelectItem {
15180                    first_element: ident,
15181                    additional_elements: vec![],
15182                })
15183            }
15184        } else {
15185            None
15186        };
15187
15188        Ok(opt_except)
15189    }
15190
15191    /// Parse a [`Rename`](RenameSelectItem) information for wildcard select items.
15192    pub fn parse_optional_select_item_rename(
15193        &mut self,
15194    ) -> Result<Option<RenameSelectItem>, ParserError> {
15195        let opt_rename = if self.parse_keyword(Keyword::RENAME) {
15196            if self.consume_token(&Token::LParen) {
15197                let idents =
15198                    self.parse_comma_separated(|parser| parser.parse_identifier_with_alias())?;
15199                self.expect_token(&Token::RParen)?;
15200                Some(RenameSelectItem::Multiple(idents))
15201            } else {
15202                let ident = self.parse_identifier_with_alias()?;
15203                Some(RenameSelectItem::Single(ident))
15204            }
15205        } else {
15206            None
15207        };
15208
15209        Ok(opt_rename)
15210    }
15211
15212    /// Parse a [`Replace`](ReplaceSelectItem) information for wildcard select items.
15213    pub fn parse_optional_select_item_replace(
15214        &mut self,
15215    ) -> Result<Option<ReplaceSelectItem>, ParserError> {
15216        let opt_replace = if self.parse_keyword(Keyword::REPLACE) {
15217            if self.consume_token(&Token::LParen) {
15218                let items = self.parse_comma_separated(|parser| {
15219                    Ok(Box::new(parser.parse_replace_elements()?))
15220                })?;
15221                self.expect_token(&Token::RParen)?;
15222                Some(ReplaceSelectItem { items })
15223            } else {
15224                let tok = self.next_token();
15225                return self.expected("( after REPLACE but", tok);
15226            }
15227        } else {
15228            None
15229        };
15230
15231        Ok(opt_replace)
15232    }
15233    pub fn parse_replace_elements(&mut self) -> Result<ReplaceSelectElement, ParserError> {
15234        let expr = self.parse_expr()?;
15235        let as_keyword = self.parse_keyword(Keyword::AS);
15236        let ident = self.parse_identifier()?;
15237        Ok(ReplaceSelectElement {
15238            expr,
15239            column_name: ident,
15240            as_keyword,
15241        })
15242    }
15243
15244    /// Parse ASC or DESC, returns an Option with true if ASC, false of DESC or `None` if none of
15245    /// them.
15246    pub fn parse_asc_desc(&mut self) -> Option<bool> {
15247        if self.parse_keyword(Keyword::ASC) {
15248            Some(true)
15249        } else if self.parse_keyword(Keyword::DESC) {
15250            Some(false)
15251        } else {
15252            None
15253        }
15254    }
15255
15256    /// Parse an [OrderByExpr] expression.
15257    pub fn parse_order_by_expr(&mut self) -> Result<OrderByExpr, ParserError> {
15258        self.parse_order_by_expr_inner(false)
15259            .map(|(order_by, _)| order_by)
15260    }
15261
15262    /// Parse an [IndexColumn].
15263    pub fn parse_create_index_expr(&mut self) -> Result<IndexColumn, ParserError> {
15264        self.parse_order_by_expr_inner(true)
15265            .map(|(column, operator_class)| IndexColumn {
15266                column,
15267                operator_class,
15268            })
15269    }
15270
15271    fn parse_order_by_expr_inner(
15272        &mut self,
15273        with_operator_class: bool,
15274    ) -> Result<(OrderByExpr, Option<Ident>), ParserError> {
15275        let expr = self.parse_expr()?;
15276
15277        let operator_class: Option<Ident> = if with_operator_class {
15278            // We check that if non of the following keywords are present, then we parse an
15279            // identifier as operator class.
15280            if self
15281                .peek_one_of_keywords(&[Keyword::ASC, Keyword::DESC, Keyword::NULLS, Keyword::WITH])
15282                .is_some()
15283            {
15284                None
15285            } else {
15286                self.maybe_parse(|parser| parser.parse_identifier())?
15287            }
15288        } else {
15289            None
15290        };
15291
15292        let options = self.parse_order_by_options()?;
15293
15294        let with_fill = if dialect_of!(self is ClickHouseDialect | GenericDialect)
15295            && self.parse_keywords(&[Keyword::WITH, Keyword::FILL])
15296        {
15297            Some(self.parse_with_fill()?)
15298        } else {
15299            None
15300        };
15301
15302        Ok((
15303            OrderByExpr {
15304                expr,
15305                options,
15306                with_fill,
15307            },
15308            operator_class,
15309        ))
15310    }
15311
15312    fn parse_order_by_options(&mut self) -> Result<OrderByOptions, ParserError> {
15313        let asc = self.parse_asc_desc();
15314
15315        let nulls_first = if self.parse_keywords(&[Keyword::NULLS, Keyword::FIRST]) {
15316            Some(true)
15317        } else if self.parse_keywords(&[Keyword::NULLS, Keyword::LAST]) {
15318            Some(false)
15319        } else {
15320            None
15321        };
15322
15323        Ok(OrderByOptions { asc, nulls_first })
15324    }
15325
15326    // Parse a WITH FILL clause (ClickHouse dialect)
15327    // that follow the WITH FILL keywords in a ORDER BY clause
15328    pub fn parse_with_fill(&mut self) -> Result<WithFill, ParserError> {
15329        let from = if self.parse_keyword(Keyword::FROM) {
15330            Some(self.parse_expr()?)
15331        } else {
15332            None
15333        };
15334
15335        let to = if self.parse_keyword(Keyword::TO) {
15336            Some(self.parse_expr()?)
15337        } else {
15338            None
15339        };
15340
15341        let step = if self.parse_keyword(Keyword::STEP) {
15342            Some(self.parse_expr()?)
15343        } else {
15344            None
15345        };
15346
15347        Ok(WithFill { from, to, step })
15348    }
15349
15350    // Parse a set of comma separated INTERPOLATE expressions (ClickHouse dialect)
15351    // that follow the INTERPOLATE keyword in an ORDER BY clause with the WITH FILL modifier
15352    pub fn parse_interpolations(&mut self) -> Result<Option<Interpolate>, ParserError> {
15353        if !self.parse_keyword(Keyword::INTERPOLATE) {
15354            return Ok(None);
15355        }
15356
15357        if self.consume_token(&Token::LParen) {
15358            let interpolations =
15359                self.parse_comma_separated0(|p| p.parse_interpolation(), Token::RParen)?;
15360            self.expect_token(&Token::RParen)?;
15361            // INTERPOLATE () and INTERPOLATE ( ... ) variants
15362            return Ok(Some(Interpolate {
15363                exprs: Some(interpolations),
15364            }));
15365        }
15366
15367        // INTERPOLATE
15368        Ok(Some(Interpolate { exprs: None }))
15369    }
15370
15371    // Parse a INTERPOLATE expression (ClickHouse dialect)
15372    pub fn parse_interpolation(&mut self) -> Result<InterpolateExpr, ParserError> {
15373        let column = self.parse_identifier()?;
15374        let expr = if self.parse_keyword(Keyword::AS) {
15375            Some(self.parse_expr()?)
15376        } else {
15377            None
15378        };
15379        Ok(InterpolateExpr { column, expr })
15380    }
15381
15382    /// Parse a TOP clause, MSSQL equivalent of LIMIT,
15383    /// that follows after `SELECT [DISTINCT]`.
15384    pub fn parse_top(&mut self) -> Result<Top, ParserError> {
15385        let quantity = if self.consume_token(&Token::LParen) {
15386            let quantity = self.parse_expr()?;
15387            self.expect_token(&Token::RParen)?;
15388            Some(TopQuantity::Expr(quantity))
15389        } else {
15390            let next_token = self.next_token();
15391            let quantity = match next_token.token {
15392                Token::Number(s, _) => Self::parse::<u64>(s, next_token.span.start)?,
15393                _ => self.expected("literal int", next_token)?,
15394            };
15395            Some(TopQuantity::Constant(quantity))
15396        };
15397
15398        let percent = self.parse_keyword(Keyword::PERCENT);
15399
15400        let with_ties = self.parse_keywords(&[Keyword::WITH, Keyword::TIES]);
15401
15402        Ok(Top {
15403            with_ties,
15404            percent,
15405            quantity,
15406        })
15407    }
15408
15409    /// Parse a LIMIT clause
15410    pub fn parse_limit(&mut self) -> Result<Option<Expr>, ParserError> {
15411        if self.parse_keyword(Keyword::ALL) {
15412            Ok(None)
15413        } else {
15414            Ok(Some(self.parse_expr()?))
15415        }
15416    }
15417
15418    /// Parse an OFFSET clause
15419    pub fn parse_offset(&mut self) -> Result<Offset, ParserError> {
15420        let value = self.parse_expr()?;
15421        let rows = if self.parse_keyword(Keyword::ROW) {
15422            OffsetRows::Row
15423        } else if self.parse_keyword(Keyword::ROWS) {
15424            OffsetRows::Rows
15425        } else {
15426            OffsetRows::None
15427        };
15428        Ok(Offset { value, rows })
15429    }
15430
15431    /// Parse a FETCH clause
15432    pub fn parse_fetch(&mut self) -> Result<Fetch, ParserError> {
15433        let _ = self.parse_one_of_keywords(&[Keyword::FIRST, Keyword::NEXT]);
15434
15435        let (quantity, percent) = if self
15436            .parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS])
15437            .is_some()
15438        {
15439            (None, false)
15440        } else {
15441            let quantity = Expr::Value(self.parse_value()?);
15442            let percent = self.parse_keyword(Keyword::PERCENT);
15443            let _ = self.parse_one_of_keywords(&[Keyword::ROW, Keyword::ROWS]);
15444            (Some(quantity), percent)
15445        };
15446
15447        let with_ties = if self.parse_keyword(Keyword::ONLY) {
15448            false
15449        } else {
15450            self.parse_keywords(&[Keyword::WITH, Keyword::TIES])
15451        };
15452
15453        Ok(Fetch {
15454            with_ties,
15455            percent,
15456            quantity,
15457        })
15458    }
15459
15460    /// Parse a FOR UPDATE/FOR SHARE clause
15461    pub fn parse_lock(&mut self) -> Result<LockClause, ParserError> {
15462        let lock_type = match self.expect_one_of_keywords(&[Keyword::UPDATE, Keyword::SHARE])? {
15463            Keyword::UPDATE => LockType::Update,
15464            Keyword::SHARE => LockType::Share,
15465            _ => unreachable!(),
15466        };
15467        let of = if self.parse_keyword(Keyword::OF) {
15468            Some(self.parse_object_name(false)?)
15469        } else {
15470            None
15471        };
15472        let nonblock = if self.parse_keyword(Keyword::NOWAIT) {
15473            Some(NonBlock::Nowait)
15474        } else if self.parse_keywords(&[Keyword::SKIP, Keyword::LOCKED]) {
15475            Some(NonBlock::SkipLocked)
15476        } else {
15477            None
15478        };
15479        Ok(LockClause {
15480            lock_type,
15481            of,
15482            nonblock,
15483        })
15484    }
15485
15486    pub fn parse_values(&mut self, allow_empty: bool) -> Result<Values, ParserError> {
15487        let mut explicit_row = false;
15488
15489        let rows = self.parse_comma_separated(|parser| {
15490            if parser.parse_keyword(Keyword::ROW) {
15491                explicit_row = true;
15492            }
15493
15494            parser.expect_token(&Token::LParen)?;
15495            if allow_empty && parser.peek_token().token == Token::RParen {
15496                parser.next_token();
15497                Ok(vec![])
15498            } else {
15499                let exprs = parser.parse_comma_separated(Parser::parse_expr)?;
15500                parser.expect_token(&Token::RParen)?;
15501                Ok(exprs)
15502            }
15503        })?;
15504        Ok(Values { explicit_row, rows })
15505    }
15506
15507    pub fn parse_start_transaction(&mut self) -> Result<Statement, ParserError> {
15508        self.expect_keyword_is(Keyword::TRANSACTION)?;
15509        Ok(Statement::StartTransaction {
15510            modes: self.parse_transaction_modes()?,
15511            begin: false,
15512            transaction: Some(BeginTransactionKind::Transaction),
15513            modifier: None,
15514            statements: vec![],
15515            exception: None,
15516            has_end_keyword: false,
15517        })
15518    }
15519
15520    pub fn parse_begin(&mut self) -> Result<Statement, ParserError> {
15521        let modifier = if !self.dialect.supports_start_transaction_modifier() {
15522            None
15523        } else if self.parse_keyword(Keyword::DEFERRED) {
15524            Some(TransactionModifier::Deferred)
15525        } else if self.parse_keyword(Keyword::IMMEDIATE) {
15526            Some(TransactionModifier::Immediate)
15527        } else if self.parse_keyword(Keyword::EXCLUSIVE) {
15528            Some(TransactionModifier::Exclusive)
15529        } else if self.parse_keyword(Keyword::TRY) {
15530            Some(TransactionModifier::Try)
15531        } else if self.parse_keyword(Keyword::CATCH) {
15532            Some(TransactionModifier::Catch)
15533        } else {
15534            None
15535        };
15536        let transaction = match self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]) {
15537            Some(Keyword::TRANSACTION) => Some(BeginTransactionKind::Transaction),
15538            Some(Keyword::WORK) => Some(BeginTransactionKind::Work),
15539            _ => None,
15540        };
15541        Ok(Statement::StartTransaction {
15542            modes: self.parse_transaction_modes()?,
15543            begin: true,
15544            transaction,
15545            modifier,
15546            statements: vec![],
15547            exception: None,
15548            has_end_keyword: false,
15549        })
15550    }
15551
15552    pub fn parse_begin_exception_end(&mut self) -> Result<Statement, ParserError> {
15553        let statements = self.parse_statement_list(&[Keyword::EXCEPTION, Keyword::END])?;
15554
15555        let exception = if self.parse_keyword(Keyword::EXCEPTION) {
15556            let mut when = Vec::new();
15557
15558            // We can have multiple `WHEN` arms so we consume all cases until `END`
15559            while !self.peek_keyword(Keyword::END) {
15560                self.expect_keyword(Keyword::WHEN)?;
15561
15562                // Each `WHEN` case can have one or more conditions, e.g.
15563                // WHEN EXCEPTION_1 [OR EXCEPTION_2] THEN
15564                // So we parse identifiers until the `THEN` keyword.
15565                let mut idents = Vec::new();
15566
15567                while !self.parse_keyword(Keyword::THEN) {
15568                    let ident = self.parse_identifier()?;
15569                    idents.push(ident);
15570
15571                    self.maybe_parse(|p| p.expect_keyword(Keyword::OR))?;
15572                }
15573
15574                let statements = self.parse_statement_list(&[Keyword::WHEN, Keyword::END])?;
15575
15576                when.push(ExceptionWhen { idents, statements });
15577            }
15578
15579            Some(when)
15580        } else {
15581            None
15582        };
15583
15584        self.expect_keyword(Keyword::END)?;
15585
15586        Ok(Statement::StartTransaction {
15587            begin: true,
15588            statements,
15589            exception,
15590            has_end_keyword: true,
15591            transaction: None,
15592            modifier: None,
15593            modes: Default::default(),
15594        })
15595    }
15596
15597    pub fn parse_end(&mut self) -> Result<Statement, ParserError> {
15598        let modifier = if !self.dialect.supports_end_transaction_modifier() {
15599            None
15600        } else if self.parse_keyword(Keyword::TRY) {
15601            Some(TransactionModifier::Try)
15602        } else if self.parse_keyword(Keyword::CATCH) {
15603            Some(TransactionModifier::Catch)
15604        } else {
15605            None
15606        };
15607        Ok(Statement::Commit {
15608            chain: self.parse_commit_rollback_chain()?,
15609            end: true,
15610            modifier,
15611        })
15612    }
15613
15614    pub fn parse_transaction_modes(&mut self) -> Result<Vec<TransactionMode>, ParserError> {
15615        let mut modes = vec![];
15616        let mut required = false;
15617        loop {
15618            let mode = if self.parse_keywords(&[Keyword::ISOLATION, Keyword::LEVEL]) {
15619                let iso_level = if self.parse_keywords(&[Keyword::READ, Keyword::UNCOMMITTED]) {
15620                    TransactionIsolationLevel::ReadUncommitted
15621                } else if self.parse_keywords(&[Keyword::READ, Keyword::COMMITTED]) {
15622                    TransactionIsolationLevel::ReadCommitted
15623                } else if self.parse_keywords(&[Keyword::REPEATABLE, Keyword::READ]) {
15624                    TransactionIsolationLevel::RepeatableRead
15625                } else if self.parse_keyword(Keyword::SERIALIZABLE) {
15626                    TransactionIsolationLevel::Serializable
15627                } else if self.parse_keyword(Keyword::SNAPSHOT) {
15628                    TransactionIsolationLevel::Snapshot
15629                } else {
15630                    self.expected("isolation level", self.peek_token())?
15631                };
15632                TransactionMode::IsolationLevel(iso_level)
15633            } else if self.parse_keywords(&[Keyword::READ, Keyword::ONLY]) {
15634                TransactionMode::AccessMode(TransactionAccessMode::ReadOnly)
15635            } else if self.parse_keywords(&[Keyword::READ, Keyword::WRITE]) {
15636                TransactionMode::AccessMode(TransactionAccessMode::ReadWrite)
15637            } else if required {
15638                self.expected("transaction mode", self.peek_token())?
15639            } else {
15640                break;
15641            };
15642            modes.push(mode);
15643            // ANSI requires a comma after each transaction mode, but
15644            // PostgreSQL, for historical reasons, does not. We follow
15645            // PostgreSQL in making the comma optional, since that is strictly
15646            // more general.
15647            required = self.consume_token(&Token::Comma);
15648        }
15649        Ok(modes)
15650    }
15651
15652    pub fn parse_commit(&mut self) -> Result<Statement, ParserError> {
15653        Ok(Statement::Commit {
15654            chain: self.parse_commit_rollback_chain()?,
15655            end: false,
15656            modifier: None,
15657        })
15658    }
15659
15660    pub fn parse_rollback(&mut self) -> Result<Statement, ParserError> {
15661        let chain = self.parse_commit_rollback_chain()?;
15662        let savepoint = self.parse_rollback_savepoint()?;
15663
15664        Ok(Statement::Rollback { chain, savepoint })
15665    }
15666
15667    pub fn parse_commit_rollback_chain(&mut self) -> Result<bool, ParserError> {
15668        let _ = self.parse_one_of_keywords(&[Keyword::TRANSACTION, Keyword::WORK]);
15669        if self.parse_keyword(Keyword::AND) {
15670            let chain = !self.parse_keyword(Keyword::NO);
15671            self.expect_keyword_is(Keyword::CHAIN)?;
15672            Ok(chain)
15673        } else {
15674            Ok(false)
15675        }
15676    }
15677
15678    pub fn parse_rollback_savepoint(&mut self) -> Result<Option<Ident>, ParserError> {
15679        if self.parse_keyword(Keyword::TO) {
15680            let _ = self.parse_keyword(Keyword::SAVEPOINT);
15681            let savepoint = self.parse_identifier()?;
15682
15683            Ok(Some(savepoint))
15684        } else {
15685            Ok(None)
15686        }
15687    }
15688
15689    /// Parse a 'RAISERROR' statement
15690    pub fn parse_raiserror(&mut self) -> Result<Statement, ParserError> {
15691        self.expect_token(&Token::LParen)?;
15692        let message = Box::new(self.parse_expr()?);
15693        self.expect_token(&Token::Comma)?;
15694        let severity = Box::new(self.parse_expr()?);
15695        self.expect_token(&Token::Comma)?;
15696        let state = Box::new(self.parse_expr()?);
15697        let arguments = if self.consume_token(&Token::Comma) {
15698            self.parse_comma_separated(Parser::parse_expr)?
15699        } else {
15700            vec![]
15701        };
15702        self.expect_token(&Token::RParen)?;
15703        let options = if self.parse_keyword(Keyword::WITH) {
15704            self.parse_comma_separated(Parser::parse_raiserror_option)?
15705        } else {
15706            vec![]
15707        };
15708        Ok(Statement::RaisError {
15709            message,
15710            severity,
15711            state,
15712            arguments,
15713            options,
15714        })
15715    }
15716
15717    pub fn parse_raiserror_option(&mut self) -> Result<RaisErrorOption, ParserError> {
15718        match self.expect_one_of_keywords(&[Keyword::LOG, Keyword::NOWAIT, Keyword::SETERROR])? {
15719            Keyword::LOG => Ok(RaisErrorOption::Log),
15720            Keyword::NOWAIT => Ok(RaisErrorOption::NoWait),
15721            Keyword::SETERROR => Ok(RaisErrorOption::SetError),
15722            _ => self.expected(
15723                "LOG, NOWAIT OR SETERROR raiserror option",
15724                self.peek_token(),
15725            ),
15726        }
15727    }
15728
15729    pub fn parse_deallocate(&mut self) -> Result<Statement, ParserError> {
15730        let prepare = self.parse_keyword(Keyword::PREPARE);
15731        let name = self.parse_identifier()?;
15732        Ok(Statement::Deallocate { name, prepare })
15733    }
15734
15735    pub fn parse_execute(&mut self) -> Result<Statement, ParserError> {
15736        let name = if self.dialect.supports_execute_immediate()
15737            && self.parse_keyword(Keyword::IMMEDIATE)
15738        {
15739            None
15740        } else {
15741            let name = self.parse_object_name(false)?;
15742            Some(name)
15743        };
15744
15745        let has_parentheses = self.consume_token(&Token::LParen);
15746
15747        let end_kws = &[Keyword::USING, Keyword::OUTPUT, Keyword::DEFAULT];
15748        let end_token = match (has_parentheses, self.peek_token().token) {
15749            (true, _) => Token::RParen,
15750            (false, Token::EOF) => Token::EOF,
15751            (false, Token::Word(w)) if end_kws.contains(&w.keyword) => Token::Word(w),
15752            (false, _) => Token::SemiColon,
15753        };
15754
15755        let parameters = self.parse_comma_separated0(Parser::parse_expr, end_token)?;
15756
15757        if has_parentheses {
15758            self.expect_token(&Token::RParen)?;
15759        }
15760
15761        let into = if self.parse_keyword(Keyword::INTO) {
15762            self.parse_comma_separated(Self::parse_identifier)?
15763        } else {
15764            vec![]
15765        };
15766
15767        let using = if self.parse_keyword(Keyword::USING) {
15768            self.parse_comma_separated(Self::parse_expr_with_alias)?
15769        } else {
15770            vec![]
15771        };
15772
15773        let output = self.parse_keyword(Keyword::OUTPUT);
15774
15775        let default = self.parse_keyword(Keyword::DEFAULT);
15776
15777        Ok(Statement::Execute {
15778            immediate: name.is_none(),
15779            name,
15780            parameters,
15781            has_parentheses,
15782            into,
15783            using,
15784            output,
15785            default,
15786        })
15787    }
15788
15789    pub fn parse_prepare(&mut self) -> Result<Statement, ParserError> {
15790        let name = self.parse_identifier()?;
15791
15792        let mut data_types = vec![];
15793        if self.consume_token(&Token::LParen) {
15794            data_types = self.parse_comma_separated(Parser::parse_data_type)?;
15795            self.expect_token(&Token::RParen)?;
15796        }
15797
15798        self.expect_keyword_is(Keyword::AS)?;
15799        let statement = Box::new(self.parse_statement()?);
15800        Ok(Statement::Prepare {
15801            name,
15802            data_types,
15803            statement,
15804        })
15805    }
15806
15807    pub fn parse_unload(&mut self) -> Result<Statement, ParserError> {
15808        self.expect_token(&Token::LParen)?;
15809        let query = self.parse_query()?;
15810        self.expect_token(&Token::RParen)?;
15811
15812        self.expect_keyword_is(Keyword::TO)?;
15813        let to = self.parse_identifier()?;
15814
15815        let with_options = self.parse_options(Keyword::WITH)?;
15816
15817        Ok(Statement::Unload {
15818            query,
15819            to,
15820            with: with_options,
15821        })
15822    }
15823
15824    pub fn parse_merge_clauses(&mut self) -> Result<Vec<MergeClause>, ParserError> {
15825        let mut clauses = vec![];
15826        loop {
15827            if !(self.parse_keyword(Keyword::WHEN)) {
15828                break;
15829            }
15830
15831            let mut clause_kind = MergeClauseKind::Matched;
15832            if self.parse_keyword(Keyword::NOT) {
15833                clause_kind = MergeClauseKind::NotMatched;
15834            }
15835            self.expect_keyword_is(Keyword::MATCHED)?;
15836
15837            if matches!(clause_kind, MergeClauseKind::NotMatched)
15838                && self.parse_keywords(&[Keyword::BY, Keyword::SOURCE])
15839            {
15840                clause_kind = MergeClauseKind::NotMatchedBySource;
15841            } else if matches!(clause_kind, MergeClauseKind::NotMatched)
15842                && self.parse_keywords(&[Keyword::BY, Keyword::TARGET])
15843            {
15844                clause_kind = MergeClauseKind::NotMatchedByTarget;
15845            }
15846
15847            let predicate = if self.parse_keyword(Keyword::AND) {
15848                Some(self.parse_expr()?)
15849            } else {
15850                None
15851            };
15852
15853            self.expect_keyword_is(Keyword::THEN)?;
15854
15855            let merge_clause = match self.parse_one_of_keywords(&[
15856                Keyword::UPDATE,
15857                Keyword::INSERT,
15858                Keyword::DELETE,
15859            ]) {
15860                Some(Keyword::UPDATE) => {
15861                    if matches!(
15862                        clause_kind,
15863                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15864                    ) {
15865                        return Err(ParserError::ParserError(format!(
15866                            "UPDATE is not allowed in a {clause_kind} merge clause"
15867                        )));
15868                    }
15869                    self.expect_keyword_is(Keyword::SET)?;
15870                    MergeAction::Update {
15871                        assignments: self.parse_comma_separated(Parser::parse_assignment)?,
15872                    }
15873                }
15874                Some(Keyword::DELETE) => {
15875                    if matches!(
15876                        clause_kind,
15877                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15878                    ) {
15879                        return Err(ParserError::ParserError(format!(
15880                            "DELETE is not allowed in a {clause_kind} merge clause"
15881                        )));
15882                    }
15883                    MergeAction::Delete
15884                }
15885                Some(Keyword::INSERT) => {
15886                    if !matches!(
15887                        clause_kind,
15888                        MergeClauseKind::NotMatched | MergeClauseKind::NotMatchedByTarget
15889                    ) {
15890                        return Err(ParserError::ParserError(format!(
15891                            "INSERT is not allowed in a {clause_kind} merge clause"
15892                        )));
15893                    }
15894                    let is_mysql = dialect_of!(self is MySqlDialect);
15895
15896                    let columns = self.parse_parenthesized_column_list(Optional, is_mysql)?;
15897                    let kind = if dialect_of!(self is BigQueryDialect | GenericDialect)
15898                        && self.parse_keyword(Keyword::ROW)
15899                    {
15900                        MergeInsertKind::Row
15901                    } else {
15902                        self.expect_keyword_is(Keyword::VALUES)?;
15903                        let values = self.parse_values(is_mysql)?;
15904                        MergeInsertKind::Values(values)
15905                    };
15906                    MergeAction::Insert(MergeInsertExpr { columns, kind })
15907                }
15908                _ => {
15909                    return Err(ParserError::ParserError(
15910                        "expected UPDATE, DELETE or INSERT in merge clause".to_string(),
15911                    ));
15912                }
15913            };
15914            clauses.push(MergeClause {
15915                clause_kind,
15916                predicate,
15917                action: merge_clause,
15918            });
15919        }
15920        Ok(clauses)
15921    }
15922
15923    fn parse_output(&mut self) -> Result<OutputClause, ParserError> {
15924        self.expect_keyword_is(Keyword::OUTPUT)?;
15925        let select_items = self.parse_projection()?;
15926        self.expect_keyword_is(Keyword::INTO)?;
15927        let into_table = self.parse_select_into()?;
15928
15929        Ok(OutputClause {
15930            select_items,
15931            into_table,
15932        })
15933    }
15934
15935    fn parse_select_into(&mut self) -> Result<SelectInto, ParserError> {
15936        let temporary = self
15937            .parse_one_of_keywords(&[Keyword::TEMP, Keyword::TEMPORARY])
15938            .is_some();
15939        let unlogged = self.parse_keyword(Keyword::UNLOGGED);
15940        let table = self.parse_keyword(Keyword::TABLE);
15941        let name = self.parse_object_name(false)?;
15942
15943        Ok(SelectInto {
15944            temporary,
15945            unlogged,
15946            table,
15947            name,
15948        })
15949    }
15950
15951    pub fn parse_merge(&mut self) -> Result<Statement, ParserError> {
15952        let into = self.parse_keyword(Keyword::INTO);
15953
15954        let table = self.parse_table_factor()?;
15955
15956        self.expect_keyword_is(Keyword::USING)?;
15957        let source = self.parse_table_factor()?;
15958        self.expect_keyword_is(Keyword::ON)?;
15959        let on = self.parse_expr()?;
15960        let clauses = self.parse_merge_clauses()?;
15961        let output = if self.peek_keyword(Keyword::OUTPUT) {
15962            Some(self.parse_output()?)
15963        } else {
15964            None
15965        };
15966
15967        Ok(Statement::Merge {
15968            into,
15969            table,
15970            source,
15971            on: Box::new(on),
15972            clauses,
15973            output,
15974        })
15975    }
15976
15977    fn parse_pragma_value(&mut self) -> Result<Value, ParserError> {
15978        match self.parse_value()?.value {
15979            v @ Value::SingleQuotedString(_) => Ok(v),
15980            v @ Value::DoubleQuotedString(_) => Ok(v),
15981            v @ Value::Number(_, _) => Ok(v),
15982            v @ Value::Placeholder(_) => Ok(v),
15983            _ => {
15984                self.prev_token();
15985                self.expected("number or string or ? placeholder", self.peek_token())
15986            }
15987        }
15988    }
15989
15990    // PRAGMA [schema-name '.'] pragma-name [('=' pragma-value) | '(' pragma-value ')']
15991    pub fn parse_pragma(&mut self) -> Result<Statement, ParserError> {
15992        let name = self.parse_object_name(false)?;
15993        if self.consume_token(&Token::LParen) {
15994            let value = self.parse_pragma_value()?;
15995            self.expect_token(&Token::RParen)?;
15996            Ok(Statement::Pragma {
15997                name,
15998                value: Some(value),
15999                is_eq: false,
16000            })
16001        } else if self.consume_token(&Token::Eq) {
16002            Ok(Statement::Pragma {
16003                name,
16004                value: Some(self.parse_pragma_value()?),
16005                is_eq: true,
16006            })
16007        } else {
16008            Ok(Statement::Pragma {
16009                name,
16010                value: None,
16011                is_eq: false,
16012            })
16013        }
16014    }
16015
16016    /// `INSTALL [extension_name]`
16017    pub fn parse_install(&mut self) -> Result<Statement, ParserError> {
16018        let extension_name = self.parse_identifier()?;
16019
16020        Ok(Statement::Install { extension_name })
16021    }
16022
16023    /// Parse a SQL LOAD statement
16024    pub fn parse_load(&mut self) -> Result<Statement, ParserError> {
16025        if self.dialect.supports_load_extension() {
16026            let extension_name = self.parse_identifier()?;
16027            Ok(Statement::Load { extension_name })
16028        } else if self.parse_keyword(Keyword::DATA) && self.dialect.supports_load_data() {
16029            let local = self.parse_one_of_keywords(&[Keyword::LOCAL]).is_some();
16030            self.expect_keyword_is(Keyword::INPATH)?;
16031            let inpath = self.parse_literal_string()?;
16032            let overwrite = self.parse_one_of_keywords(&[Keyword::OVERWRITE]).is_some();
16033            self.expect_keyword_is(Keyword::INTO)?;
16034            self.expect_keyword_is(Keyword::TABLE)?;
16035            let table_name = self.parse_object_name(false)?;
16036            let partitioned = self.parse_insert_partition()?;
16037            let table_format = self.parse_load_data_table_format()?;
16038            Ok(Statement::LoadData {
16039                local,
16040                inpath,
16041                overwrite,
16042                table_name,
16043                partitioned,
16044                table_format,
16045            })
16046        } else {
16047            self.expected(
16048                "`DATA` or an extension name after `LOAD`",
16049                self.peek_token(),
16050            )
16051        }
16052    }
16053
16054    /// ```sql
16055    /// OPTIMIZE TABLE [db.]name [ON CLUSTER cluster] [PARTITION partition | PARTITION ID 'partition_id'] [FINAL] [DEDUPLICATE [BY expression]]
16056    /// ```
16057    /// [ClickHouse](https://clickhouse.com/docs/en/sql-reference/statements/optimize)
16058    pub fn parse_optimize_table(&mut self) -> Result<Statement, ParserError> {
16059        self.expect_keyword_is(Keyword::TABLE)?;
16060        let name = self.parse_object_name(false)?;
16061        let on_cluster = self.parse_optional_on_cluster()?;
16062
16063        let partition = if self.parse_keyword(Keyword::PARTITION) {
16064            if self.parse_keyword(Keyword::ID) {
16065                Some(Partition::Identifier(self.parse_identifier()?))
16066            } else {
16067                Some(Partition::Expr(self.parse_expr()?))
16068            }
16069        } else {
16070            None
16071        };
16072
16073        let include_final = self.parse_keyword(Keyword::FINAL);
16074        let deduplicate = if self.parse_keyword(Keyword::DEDUPLICATE) {
16075            if self.parse_keyword(Keyword::BY) {
16076                Some(Deduplicate::ByExpression(self.parse_expr()?))
16077            } else {
16078                Some(Deduplicate::All)
16079            }
16080        } else {
16081            None
16082        };
16083
16084        Ok(Statement::OptimizeTable {
16085            name,
16086            on_cluster,
16087            partition,
16088            include_final,
16089            deduplicate,
16090        })
16091    }
16092
16093    /// ```sql
16094    /// CREATE [ { TEMPORARY | TEMP } ] SEQUENCE [ IF NOT EXISTS ] <sequence_name>
16095    /// ```
16096    ///
16097    /// See [Postgres docs](https://www.postgresql.org/docs/current/sql-createsequence.html) for more details.
16098    pub fn parse_create_sequence(&mut self, temporary: bool) -> Result<Statement, ParserError> {
16099        //[ IF NOT EXISTS ]
16100        let if_not_exists = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
16101        //name
16102        let name = self.parse_object_name(false)?;
16103        //[ AS data_type ]
16104        let mut data_type: Option<DataType> = None;
16105        if self.parse_keywords(&[Keyword::AS]) {
16106            data_type = Some(self.parse_data_type()?)
16107        }
16108        let sequence_options = self.parse_create_sequence_options()?;
16109        // [ OWNED BY { table_name.column_name | NONE } ]
16110        let owned_by = if self.parse_keywords(&[Keyword::OWNED, Keyword::BY]) {
16111            if self.parse_keywords(&[Keyword::NONE]) {
16112                Some(ObjectName::from(vec![Ident::new("NONE")]))
16113            } else {
16114                Some(self.parse_object_name(false)?)
16115            }
16116        } else {
16117            None
16118        };
16119        Ok(Statement::CreateSequence {
16120            temporary,
16121            if_not_exists,
16122            name,
16123            data_type,
16124            sequence_options,
16125            owned_by,
16126        })
16127    }
16128
16129    fn parse_create_sequence_options(&mut self) -> Result<Vec<SequenceOptions>, ParserError> {
16130        let mut sequence_options = vec![];
16131        //[ INCREMENT [ BY ] increment ]
16132        if self.parse_keywords(&[Keyword::INCREMENT]) {
16133            if self.parse_keywords(&[Keyword::BY]) {
16134                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, true));
16135            } else {
16136                sequence_options.push(SequenceOptions::IncrementBy(self.parse_number()?, false));
16137            }
16138        }
16139        //[ MINVALUE minvalue | NO MINVALUE ]
16140        if self.parse_keyword(Keyword::MINVALUE) {
16141            sequence_options.push(SequenceOptions::MinValue(Some(self.parse_number()?)));
16142        } else if self.parse_keywords(&[Keyword::NO, Keyword::MINVALUE]) {
16143            sequence_options.push(SequenceOptions::MinValue(None));
16144        }
16145        //[ MAXVALUE maxvalue | NO MAXVALUE ]
16146        if self.parse_keywords(&[Keyword::MAXVALUE]) {
16147            sequence_options.push(SequenceOptions::MaxValue(Some(self.parse_number()?)));
16148        } else if self.parse_keywords(&[Keyword::NO, Keyword::MAXVALUE]) {
16149            sequence_options.push(SequenceOptions::MaxValue(None));
16150        }
16151
16152        //[ START [ WITH ] start ]
16153        if self.parse_keywords(&[Keyword::START]) {
16154            if self.parse_keywords(&[Keyword::WITH]) {
16155                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, true));
16156            } else {
16157                sequence_options.push(SequenceOptions::StartWith(self.parse_number()?, false));
16158            }
16159        }
16160        //[ CACHE cache ]
16161        if self.parse_keywords(&[Keyword::CACHE]) {
16162            sequence_options.push(SequenceOptions::Cache(self.parse_number()?));
16163        }
16164        // [ [ NO ] CYCLE ]
16165        if self.parse_keywords(&[Keyword::NO, Keyword::CYCLE]) {
16166            sequence_options.push(SequenceOptions::Cycle(true));
16167        } else if self.parse_keywords(&[Keyword::CYCLE]) {
16168            sequence_options.push(SequenceOptions::Cycle(false));
16169        }
16170
16171        Ok(sequence_options)
16172    }
16173
16174    ///   Parse a `CREATE SERVER` statement.
16175    ///
16176    ///  See [Statement::CreateServer]
16177    pub fn parse_pg_create_server(&mut self) -> Result<Statement, ParserError> {
16178        let ine = self.parse_keywords(&[Keyword::IF, Keyword::NOT, Keyword::EXISTS]);
16179        let name = self.parse_object_name(false)?;
16180
16181        let server_type = if self.parse_keyword(Keyword::TYPE) {
16182            Some(self.parse_identifier()?)
16183        } else {
16184            None
16185        };
16186
16187        let version = if self.parse_keyword(Keyword::VERSION) {
16188            Some(self.parse_identifier()?)
16189        } else {
16190            None
16191        };
16192
16193        self.expect_keywords(&[Keyword::FOREIGN, Keyword::DATA, Keyword::WRAPPER])?;
16194        let foreign_data_wrapper = self.parse_object_name(false)?;
16195
16196        let mut options = None;
16197        if self.parse_keyword(Keyword::OPTIONS) {
16198            self.expect_token(&Token::LParen)?;
16199            options = Some(self.parse_comma_separated(|p| {
16200                let key = p.parse_identifier()?;
16201                let value = p.parse_identifier()?;
16202                Ok(CreateServerOption { key, value })
16203            })?);
16204            self.expect_token(&Token::RParen)?;
16205        }
16206
16207        Ok(Statement::CreateServer(CreateServerStatement {
16208            name,
16209            if_not_exists: ine,
16210            server_type,
16211            version,
16212            foreign_data_wrapper,
16213            options,
16214        }))
16215    }
16216
16217    /// The index of the first unprocessed token.
16218    pub fn index(&self) -> usize {
16219        self.index
16220    }
16221
16222    pub fn parse_named_window(&mut self) -> Result<NamedWindowDefinition, ParserError> {
16223        let ident = self.parse_identifier()?;
16224        self.expect_keyword_is(Keyword::AS)?;
16225
16226        let window_expr = if self.consume_token(&Token::LParen) {
16227            NamedWindowExpr::WindowSpec(self.parse_window_spec()?)
16228        } else if self.dialect.supports_window_clause_named_window_reference() {
16229            NamedWindowExpr::NamedWindow(self.parse_identifier()?)
16230        } else {
16231            return self.expected("(", self.peek_token());
16232        };
16233
16234        Ok(NamedWindowDefinition(ident, window_expr))
16235    }
16236
16237    pub fn parse_create_procedure(&mut self, or_alter: bool) -> Result<Statement, ParserError> {
16238        let name = self.parse_object_name(false)?;
16239        let params = self.parse_optional_procedure_parameters()?;
16240
16241        let language = if self.parse_keyword(Keyword::LANGUAGE) {
16242            Some(self.parse_identifier()?)
16243        } else {
16244            None
16245        };
16246
16247        self.expect_keyword_is(Keyword::AS)?;
16248
16249        let body = self.parse_conditional_statements(&[Keyword::END])?;
16250
16251        Ok(Statement::CreateProcedure {
16252            name,
16253            or_alter,
16254            params,
16255            language,
16256            body,
16257        })
16258    }
16259
16260    pub fn parse_window_spec(&mut self) -> Result<WindowSpec, ParserError> {
16261        let window_name = match self.peek_token().token {
16262            Token::Word(word) if word.keyword == Keyword::NoKeyword => {
16263                self.parse_optional_ident()?
16264            }
16265            _ => None,
16266        };
16267
16268        let partition_by = if self.parse_keywords(&[Keyword::PARTITION, Keyword::BY]) {
16269            self.parse_comma_separated(Parser::parse_expr)?
16270        } else {
16271            vec![]
16272        };
16273        let order_by = if self.parse_keywords(&[Keyword::ORDER, Keyword::BY]) {
16274            self.parse_comma_separated(Parser::parse_order_by_expr)?
16275        } else {
16276            vec![]
16277        };
16278
16279        let window_frame = if !self.consume_token(&Token::RParen) {
16280            let window_frame = self.parse_window_frame()?;
16281            self.expect_token(&Token::RParen)?;
16282            Some(window_frame)
16283        } else {
16284            None
16285        };
16286        Ok(WindowSpec {
16287            window_name,
16288            partition_by,
16289            order_by,
16290            window_frame,
16291        })
16292    }
16293
16294    pub fn parse_create_type(&mut self) -> Result<Statement, ParserError> {
16295        let name = self.parse_object_name(false)?;
16296        self.expect_keyword_is(Keyword::AS)?;
16297
16298        if self.parse_keyword(Keyword::ENUM) {
16299            return self.parse_create_type_enum(name);
16300        }
16301
16302        let mut attributes = vec![];
16303        if !self.consume_token(&Token::LParen) || self.consume_token(&Token::RParen) {
16304            return Ok(Statement::CreateType {
16305                name,
16306                representation: UserDefinedTypeRepresentation::Composite { attributes },
16307            });
16308        }
16309
16310        loop {
16311            let attr_name = self.parse_identifier()?;
16312            let attr_data_type = self.parse_data_type()?;
16313            let attr_collation = if self.parse_keyword(Keyword::COLLATE) {
16314                Some(self.parse_object_name(false)?)
16315            } else {
16316                None
16317            };
16318            attributes.push(UserDefinedTypeCompositeAttributeDef {
16319                name: attr_name,
16320                data_type: attr_data_type,
16321                collation: attr_collation,
16322            });
16323            let comma = self.consume_token(&Token::Comma);
16324            if self.consume_token(&Token::RParen) {
16325                // allow a trailing comma
16326                break;
16327            } else if !comma {
16328                return self.expected("',' or ')' after attribute definition", self.peek_token());
16329            }
16330        }
16331
16332        Ok(Statement::CreateType {
16333            name,
16334            representation: UserDefinedTypeRepresentation::Composite { attributes },
16335        })
16336    }
16337
16338    /// Parse remainder of `CREATE TYPE AS ENUM` statement (see [Statement::CreateType] and [Self::parse_create_type])
16339    ///
16340    /// See [PostgreSQL](https://www.postgresql.org/docs/current/sql-createtype.html)
16341    pub fn parse_create_type_enum(&mut self, name: ObjectName) -> Result<Statement, ParserError> {
16342        self.expect_token(&Token::LParen)?;
16343        let labels = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
16344        self.expect_token(&Token::RParen)?;
16345
16346        Ok(Statement::CreateType {
16347            name,
16348            representation: UserDefinedTypeRepresentation::Enum { labels },
16349        })
16350    }
16351
16352    fn parse_parenthesized_identifiers(&mut self) -> Result<Vec<Ident>, ParserError> {
16353        self.expect_token(&Token::LParen)?;
16354        let idents = self.parse_comma_separated0(|p| p.parse_identifier(), Token::RParen)?;
16355        self.expect_token(&Token::RParen)?;
16356        Ok(idents)
16357    }
16358
16359    fn parse_column_position(&mut self) -> Result<Option<MySQLColumnPosition>, ParserError> {
16360        if dialect_of!(self is MySqlDialect | GenericDialect) {
16361            if self.parse_keyword(Keyword::FIRST) {
16362                Ok(Some(MySQLColumnPosition::First))
16363            } else if self.parse_keyword(Keyword::AFTER) {
16364                let ident = self.parse_identifier()?;
16365                Ok(Some(MySQLColumnPosition::After(ident)))
16366            } else {
16367                Ok(None)
16368            }
16369        } else {
16370            Ok(None)
16371        }
16372    }
16373
16374    /// Parse [Statement::Print]
16375    fn parse_print(&mut self) -> Result<Statement, ParserError> {
16376        Ok(Statement::Print(PrintStatement {
16377            message: Box::new(self.parse_expr()?),
16378        }))
16379    }
16380
16381    /// Parse [Statement::Return]
16382    fn parse_return(&mut self) -> Result<Statement, ParserError> {
16383        match self.maybe_parse(|p| p.parse_expr())? {
16384            Some(expr) => Ok(Statement::Return(ReturnStatement {
16385                value: Some(ReturnStatementValue::Expr(expr)),
16386            })),
16387            None => Ok(Statement::Return(ReturnStatement { value: None })),
16388        }
16389    }
16390
16391    /// Consume the parser and return its underlying token buffer
16392    pub fn into_tokens(self) -> Vec<TokenWithSpan> {
16393        self.tokens
16394    }
16395
16396    /// Returns true if the next keyword indicates a sub query, i.e. SELECT or WITH
16397    fn peek_sub_query(&mut self) -> bool {
16398        if self
16399            .parse_one_of_keywords(&[Keyword::SELECT, Keyword::WITH])
16400            .is_some()
16401        {
16402            self.prev_token();
16403            return true;
16404        }
16405        false
16406    }
16407
16408    pub(crate) fn parse_show_stmt_options(&mut self) -> Result<ShowStatementOptions, ParserError> {
16409        let show_in;
16410        let mut filter_position = None;
16411        if self.dialect.supports_show_like_before_in() {
16412            if let Some(filter) = self.parse_show_statement_filter()? {
16413                filter_position = Some(ShowStatementFilterPosition::Infix(filter));
16414            }
16415            show_in = self.maybe_parse_show_stmt_in()?;
16416        } else {
16417            show_in = self.maybe_parse_show_stmt_in()?;
16418            if let Some(filter) = self.parse_show_statement_filter()? {
16419                filter_position = Some(ShowStatementFilterPosition::Suffix(filter));
16420            }
16421        }
16422        let starts_with = self.maybe_parse_show_stmt_starts_with()?;
16423        let limit = self.maybe_parse_show_stmt_limit()?;
16424        let from = self.maybe_parse_show_stmt_from()?;
16425        Ok(ShowStatementOptions {
16426            filter_position,
16427            show_in,
16428            starts_with,
16429            limit,
16430            limit_from: from,
16431        })
16432    }
16433
16434    fn maybe_parse_show_stmt_in(&mut self) -> Result<Option<ShowStatementIn>, ParserError> {
16435        let clause = match self.parse_one_of_keywords(&[Keyword::FROM, Keyword::IN]) {
16436            Some(Keyword::FROM) => ShowStatementInClause::FROM,
16437            Some(Keyword::IN) => ShowStatementInClause::IN,
16438            None => return Ok(None),
16439            _ => return self.expected("FROM or IN", self.peek_token()),
16440        };
16441
16442        let (parent_type, parent_name) = match self.parse_one_of_keywords(&[
16443            Keyword::ACCOUNT,
16444            Keyword::DATABASE,
16445            Keyword::SCHEMA,
16446            Keyword::TABLE,
16447            Keyword::VIEW,
16448        ]) {
16449            // If we see these next keywords it means we don't have a parent name
16450            Some(Keyword::DATABASE)
16451                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
16452                    | self.peek_keyword(Keyword::LIMIT) =>
16453            {
16454                (Some(ShowStatementInParentType::Database), None)
16455            }
16456            Some(Keyword::SCHEMA)
16457                if self.peek_keywords(&[Keyword::STARTS, Keyword::WITH])
16458                    | self.peek_keyword(Keyword::LIMIT) =>
16459            {
16460                (Some(ShowStatementInParentType::Schema), None)
16461            }
16462            Some(parent_kw) => {
16463                // The parent name here is still optional, for example:
16464                // SHOW TABLES IN ACCOUNT, so parsing the object name
16465                // may fail because the statement ends.
16466                let parent_name = self.maybe_parse(|p| p.parse_object_name(false))?;
16467                match parent_kw {
16468                    Keyword::ACCOUNT => (Some(ShowStatementInParentType::Account), parent_name),
16469                    Keyword::DATABASE => (Some(ShowStatementInParentType::Database), parent_name),
16470                    Keyword::SCHEMA => (Some(ShowStatementInParentType::Schema), parent_name),
16471                    Keyword::TABLE => (Some(ShowStatementInParentType::Table), parent_name),
16472                    Keyword::VIEW => (Some(ShowStatementInParentType::View), parent_name),
16473                    _ => {
16474                        return self.expected(
16475                            "one of ACCOUNT, DATABASE, SCHEMA, TABLE or VIEW",
16476                            self.peek_token(),
16477                        )
16478                    }
16479                }
16480            }
16481            None => {
16482                // Parsing MySQL style FROM tbl_name FROM db_name
16483                // which is equivalent to FROM tbl_name.db_name
16484                let mut parent_name = self.parse_object_name(false)?;
16485                if self
16486                    .parse_one_of_keywords(&[Keyword::FROM, Keyword::IN])
16487                    .is_some()
16488                {
16489                    parent_name
16490                        .0
16491                        .insert(0, ObjectNamePart::Identifier(self.parse_identifier()?));
16492                }
16493                (None, Some(parent_name))
16494            }
16495        };
16496
16497        Ok(Some(ShowStatementIn {
16498            clause,
16499            parent_type,
16500            parent_name,
16501        }))
16502    }
16503
16504    fn maybe_parse_show_stmt_starts_with(&mut self) -> Result<Option<Value>, ParserError> {
16505        if self.parse_keywords(&[Keyword::STARTS, Keyword::WITH]) {
16506            Ok(Some(self.parse_value()?.value))
16507        } else {
16508            Ok(None)
16509        }
16510    }
16511
16512    fn maybe_parse_show_stmt_limit(&mut self) -> Result<Option<Expr>, ParserError> {
16513        if self.parse_keyword(Keyword::LIMIT) {
16514            Ok(self.parse_limit()?)
16515        } else {
16516            Ok(None)
16517        }
16518    }
16519
16520    fn maybe_parse_show_stmt_from(&mut self) -> Result<Option<Value>, ParserError> {
16521        if self.parse_keyword(Keyword::FROM) {
16522            Ok(Some(self.parse_value()?.value))
16523        } else {
16524            Ok(None)
16525        }
16526    }
16527}
16528
16529fn maybe_prefixed_expr(expr: Expr, prefix: Option<Ident>) -> Expr {
16530    if let Some(prefix) = prefix {
16531        Expr::Prefixed {
16532            prefix,
16533            value: Box::new(expr),
16534        }
16535    } else {
16536        expr
16537    }
16538}
16539
16540impl Word {
16541    #[deprecated(since = "0.54.0", note = "please use `into_ident` instead")]
16542    pub fn to_ident(&self, span: Span) -> Ident {
16543        Ident {
16544            value: self.value.clone(),
16545            quote_style: self.quote_style,
16546            span,
16547        }
16548    }
16549
16550    /// Convert this word into an [`Ident`] identifier
16551    pub fn into_ident(self, span: Span) -> Ident {
16552        Ident {
16553            value: self.value,
16554            quote_style: self.quote_style,
16555            span,
16556        }
16557    }
16558}
16559
16560#[cfg(test)]
16561mod tests {
16562    use crate::test_utils::{all_dialects, TestedDialects};
16563
16564    use super::*;
16565
16566    #[test]
16567    fn test_prev_index() {
16568        let sql = "SELECT version";
16569        all_dialects().run_parser_method(sql, |parser| {
16570            assert_eq!(parser.peek_token(), Token::make_keyword("SELECT"));
16571            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
16572            parser.prev_token();
16573            assert_eq!(parser.next_token(), Token::make_keyword("SELECT"));
16574            assert_eq!(parser.next_token(), Token::make_word("version", None));
16575            parser.prev_token();
16576            assert_eq!(parser.peek_token(), Token::make_word("version", None));
16577            assert_eq!(parser.next_token(), Token::make_word("version", None));
16578            assert_eq!(parser.peek_token(), Token::EOF);
16579            parser.prev_token();
16580            assert_eq!(parser.next_token(), Token::make_word("version", None));
16581            assert_eq!(parser.next_token(), Token::EOF);
16582            assert_eq!(parser.next_token(), Token::EOF);
16583            parser.prev_token();
16584        });
16585    }
16586
16587    #[test]
16588    fn test_peek_tokens() {
16589        all_dialects().run_parser_method("SELECT foo AS bar FROM baz", |parser| {
16590            assert!(matches!(
16591                parser.peek_tokens(),
16592                [Token::Word(Word {
16593                    keyword: Keyword::SELECT,
16594                    ..
16595                })]
16596            ));
16597
16598            assert!(matches!(
16599                parser.peek_tokens(),
16600                [
16601                    Token::Word(Word {
16602                        keyword: Keyword::SELECT,
16603                        ..
16604                    }),
16605                    Token::Word(_),
16606                    Token::Word(Word {
16607                        keyword: Keyword::AS,
16608                        ..
16609                    }),
16610                ]
16611            ));
16612
16613            for _ in 0..4 {
16614                parser.next_token();
16615            }
16616
16617            assert!(matches!(
16618                parser.peek_tokens(),
16619                [
16620                    Token::Word(Word {
16621                        keyword: Keyword::FROM,
16622                        ..
16623                    }),
16624                    Token::Word(_),
16625                    Token::EOF,
16626                    Token::EOF,
16627                ]
16628            ))
16629        })
16630    }
16631
16632    #[cfg(test)]
16633    mod test_parse_data_type {
16634        use crate::ast::{
16635            CharLengthUnits, CharacterLength, DataType, ExactNumberInfo, ObjectName, TimezoneInfo,
16636        };
16637        use crate::dialect::{AnsiDialect, GenericDialect};
16638        use crate::test_utils::TestedDialects;
16639
16640        macro_rules! test_parse_data_type {
16641            ($dialect:expr, $input:expr, $expected_type:expr $(,)?) => {{
16642                $dialect.run_parser_method(&*$input, |parser| {
16643                    let data_type = parser.parse_data_type().unwrap();
16644                    assert_eq!($expected_type, data_type);
16645                    assert_eq!($input.to_string(), data_type.to_string());
16646                });
16647            }};
16648        }
16649
16650        #[test]
16651        fn test_ansii_character_string_types() {
16652            // Character string types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-string-type>
16653            let dialect =
16654                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16655
16656            test_parse_data_type!(dialect, "CHARACTER", DataType::Character(None));
16657
16658            test_parse_data_type!(
16659                dialect,
16660                "CHARACTER(20)",
16661                DataType::Character(Some(CharacterLength::IntegerLength {
16662                    length: 20,
16663                    unit: None
16664                }))
16665            );
16666
16667            test_parse_data_type!(
16668                dialect,
16669                "CHARACTER(20 CHARACTERS)",
16670                DataType::Character(Some(CharacterLength::IntegerLength {
16671                    length: 20,
16672                    unit: Some(CharLengthUnits::Characters)
16673                }))
16674            );
16675
16676            test_parse_data_type!(
16677                dialect,
16678                "CHARACTER(20 OCTETS)",
16679                DataType::Character(Some(CharacterLength::IntegerLength {
16680                    length: 20,
16681                    unit: Some(CharLengthUnits::Octets)
16682                }))
16683            );
16684
16685            test_parse_data_type!(dialect, "CHAR", DataType::Char(None));
16686
16687            test_parse_data_type!(
16688                dialect,
16689                "CHAR(20)",
16690                DataType::Char(Some(CharacterLength::IntegerLength {
16691                    length: 20,
16692                    unit: None
16693                }))
16694            );
16695
16696            test_parse_data_type!(
16697                dialect,
16698                "CHAR(20 CHARACTERS)",
16699                DataType::Char(Some(CharacterLength::IntegerLength {
16700                    length: 20,
16701                    unit: Some(CharLengthUnits::Characters)
16702                }))
16703            );
16704
16705            test_parse_data_type!(
16706                dialect,
16707                "CHAR(20 OCTETS)",
16708                DataType::Char(Some(CharacterLength::IntegerLength {
16709                    length: 20,
16710                    unit: Some(CharLengthUnits::Octets)
16711                }))
16712            );
16713
16714            test_parse_data_type!(
16715                dialect,
16716                "CHARACTER VARYING(20)",
16717                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
16718                    length: 20,
16719                    unit: None
16720                }))
16721            );
16722
16723            test_parse_data_type!(
16724                dialect,
16725                "CHARACTER VARYING(20 CHARACTERS)",
16726                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
16727                    length: 20,
16728                    unit: Some(CharLengthUnits::Characters)
16729                }))
16730            );
16731
16732            test_parse_data_type!(
16733                dialect,
16734                "CHARACTER VARYING(20 OCTETS)",
16735                DataType::CharacterVarying(Some(CharacterLength::IntegerLength {
16736                    length: 20,
16737                    unit: Some(CharLengthUnits::Octets)
16738                }))
16739            );
16740
16741            test_parse_data_type!(
16742                dialect,
16743                "CHAR VARYING(20)",
16744                DataType::CharVarying(Some(CharacterLength::IntegerLength {
16745                    length: 20,
16746                    unit: None
16747                }))
16748            );
16749
16750            test_parse_data_type!(
16751                dialect,
16752                "CHAR VARYING(20 CHARACTERS)",
16753                DataType::CharVarying(Some(CharacterLength::IntegerLength {
16754                    length: 20,
16755                    unit: Some(CharLengthUnits::Characters)
16756                }))
16757            );
16758
16759            test_parse_data_type!(
16760                dialect,
16761                "CHAR VARYING(20 OCTETS)",
16762                DataType::CharVarying(Some(CharacterLength::IntegerLength {
16763                    length: 20,
16764                    unit: Some(CharLengthUnits::Octets)
16765                }))
16766            );
16767
16768            test_parse_data_type!(
16769                dialect,
16770                "VARCHAR(20)",
16771                DataType::Varchar(Some(CharacterLength::IntegerLength {
16772                    length: 20,
16773                    unit: None
16774                }))
16775            );
16776        }
16777
16778        #[test]
16779        fn test_ansii_character_large_object_types() {
16780            // Character large object types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#character-large-object-length>
16781            let dialect =
16782                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16783
16784            test_parse_data_type!(
16785                dialect,
16786                "CHARACTER LARGE OBJECT",
16787                DataType::CharacterLargeObject(None)
16788            );
16789            test_parse_data_type!(
16790                dialect,
16791                "CHARACTER LARGE OBJECT(20)",
16792                DataType::CharacterLargeObject(Some(20))
16793            );
16794
16795            test_parse_data_type!(
16796                dialect,
16797                "CHAR LARGE OBJECT",
16798                DataType::CharLargeObject(None)
16799            );
16800            test_parse_data_type!(
16801                dialect,
16802                "CHAR LARGE OBJECT(20)",
16803                DataType::CharLargeObject(Some(20))
16804            );
16805
16806            test_parse_data_type!(dialect, "CLOB", DataType::Clob(None));
16807            test_parse_data_type!(dialect, "CLOB(20)", DataType::Clob(Some(20)));
16808        }
16809
16810        #[test]
16811        fn test_parse_custom_types() {
16812            let dialect =
16813                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16814
16815            test_parse_data_type!(
16816                dialect,
16817                "GEOMETRY",
16818                DataType::Custom(ObjectName::from(vec!["GEOMETRY".into()]), vec![])
16819            );
16820
16821            test_parse_data_type!(
16822                dialect,
16823                "GEOMETRY(POINT)",
16824                DataType::Custom(
16825                    ObjectName::from(vec!["GEOMETRY".into()]),
16826                    vec!["POINT".to_string()]
16827                )
16828            );
16829
16830            test_parse_data_type!(
16831                dialect,
16832                "GEOMETRY(POINT, 4326)",
16833                DataType::Custom(
16834                    ObjectName::from(vec!["GEOMETRY".into()]),
16835                    vec!["POINT".to_string(), "4326".to_string()]
16836                )
16837            );
16838        }
16839
16840        #[test]
16841        fn test_ansii_exact_numeric_types() {
16842            // Exact numeric types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#exact-numeric-type>
16843            let dialect =
16844                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16845
16846            test_parse_data_type!(dialect, "NUMERIC", DataType::Numeric(ExactNumberInfo::None));
16847
16848            test_parse_data_type!(
16849                dialect,
16850                "NUMERIC(2)",
16851                DataType::Numeric(ExactNumberInfo::Precision(2))
16852            );
16853
16854            test_parse_data_type!(
16855                dialect,
16856                "NUMERIC(2,10)",
16857                DataType::Numeric(ExactNumberInfo::PrecisionAndScale(2, 10))
16858            );
16859
16860            test_parse_data_type!(dialect, "DECIMAL", DataType::Decimal(ExactNumberInfo::None));
16861
16862            test_parse_data_type!(
16863                dialect,
16864                "DECIMAL(2)",
16865                DataType::Decimal(ExactNumberInfo::Precision(2))
16866            );
16867
16868            test_parse_data_type!(
16869                dialect,
16870                "DECIMAL(2,10)",
16871                DataType::Decimal(ExactNumberInfo::PrecisionAndScale(2, 10))
16872            );
16873
16874            test_parse_data_type!(dialect, "DEC", DataType::Dec(ExactNumberInfo::None));
16875
16876            test_parse_data_type!(
16877                dialect,
16878                "DEC(2)",
16879                DataType::Dec(ExactNumberInfo::Precision(2))
16880            );
16881
16882            test_parse_data_type!(
16883                dialect,
16884                "DEC(2,10)",
16885                DataType::Dec(ExactNumberInfo::PrecisionAndScale(2, 10))
16886            );
16887        }
16888
16889        #[test]
16890        fn test_ansii_date_type() {
16891            // Datetime types: <https://jakewheat.github.io/sql-overview/sql-2016-foundation-grammar.html#datetime-type>
16892            let dialect =
16893                TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(AnsiDialect {})]);
16894
16895            test_parse_data_type!(dialect, "DATE", DataType::Date);
16896
16897            test_parse_data_type!(dialect, "TIME", DataType::Time(None, TimezoneInfo::None));
16898
16899            test_parse_data_type!(
16900                dialect,
16901                "TIME(6)",
16902                DataType::Time(Some(6), TimezoneInfo::None)
16903            );
16904
16905            test_parse_data_type!(
16906                dialect,
16907                "TIME WITH TIME ZONE",
16908                DataType::Time(None, TimezoneInfo::WithTimeZone)
16909            );
16910
16911            test_parse_data_type!(
16912                dialect,
16913                "TIME(6) WITH TIME ZONE",
16914                DataType::Time(Some(6), TimezoneInfo::WithTimeZone)
16915            );
16916
16917            test_parse_data_type!(
16918                dialect,
16919                "TIME WITHOUT TIME ZONE",
16920                DataType::Time(None, TimezoneInfo::WithoutTimeZone)
16921            );
16922
16923            test_parse_data_type!(
16924                dialect,
16925                "TIME(6) WITHOUT TIME ZONE",
16926                DataType::Time(Some(6), TimezoneInfo::WithoutTimeZone)
16927            );
16928
16929            test_parse_data_type!(
16930                dialect,
16931                "TIMESTAMP",
16932                DataType::Timestamp(None, TimezoneInfo::None)
16933            );
16934
16935            test_parse_data_type!(
16936                dialect,
16937                "TIMESTAMP(22)",
16938                DataType::Timestamp(Some(22), TimezoneInfo::None)
16939            );
16940
16941            test_parse_data_type!(
16942                dialect,
16943                "TIMESTAMP(22) WITH TIME ZONE",
16944                DataType::Timestamp(Some(22), TimezoneInfo::WithTimeZone)
16945            );
16946
16947            test_parse_data_type!(
16948                dialect,
16949                "TIMESTAMP(33) WITHOUT TIME ZONE",
16950                DataType::Timestamp(Some(33), TimezoneInfo::WithoutTimeZone)
16951            );
16952        }
16953    }
16954
16955    #[test]
16956    fn test_parse_schema_name() {
16957        // The expected name should be identical as the input name, that's why I don't receive both
16958        macro_rules! test_parse_schema_name {
16959            ($input:expr, $expected_name:expr $(,)?) => {{
16960                all_dialects().run_parser_method(&*$input, |parser| {
16961                    let schema_name = parser.parse_schema_name().unwrap();
16962                    // Validate that the structure is the same as expected
16963                    assert_eq!(schema_name, $expected_name);
16964                    // Validate that the input and the expected structure serialization are the same
16965                    assert_eq!(schema_name.to_string(), $input.to_string());
16966                });
16967            }};
16968        }
16969
16970        let dummy_name = ObjectName::from(vec![Ident::new("dummy_name")]);
16971        let dummy_authorization = Ident::new("dummy_authorization");
16972
16973        test_parse_schema_name!(
16974            format!("{dummy_name}"),
16975            SchemaName::Simple(dummy_name.clone())
16976        );
16977
16978        test_parse_schema_name!(
16979            format!("AUTHORIZATION {dummy_authorization}"),
16980            SchemaName::UnnamedAuthorization(dummy_authorization.clone()),
16981        );
16982        test_parse_schema_name!(
16983            format!("{dummy_name} AUTHORIZATION {dummy_authorization}"),
16984            SchemaName::NamedAuthorization(dummy_name.clone(), dummy_authorization.clone()),
16985        );
16986    }
16987
16988    #[test]
16989    fn mysql_parse_index_table_constraint() {
16990        macro_rules! test_parse_table_constraint {
16991            ($dialect:expr, $input:expr, $expected:expr $(,)?) => {{
16992                $dialect.run_parser_method(&*$input, |parser| {
16993                    let constraint = parser.parse_optional_table_constraint().unwrap().unwrap();
16994                    // Validate that the structure is the same as expected
16995                    assert_eq!(constraint, $expected);
16996                    // Validate that the input and the expected structure serialization are the same
16997                    assert_eq!(constraint.to_string(), $input.to_string());
16998                });
16999            }};
17000        }
17001
17002        fn mk_expected_col(name: &str) -> IndexColumn {
17003            IndexColumn {
17004                column: OrderByExpr {
17005                    expr: Expr::Identifier(name.into()),
17006                    options: OrderByOptions {
17007                        asc: None,
17008                        nulls_first: None,
17009                    },
17010                    with_fill: None,
17011                },
17012                operator_class: None,
17013            }
17014        }
17015
17016        let dialect =
17017            TestedDialects::new(vec![Box::new(GenericDialect {}), Box::new(MySqlDialect {})]);
17018
17019        test_parse_table_constraint!(
17020            dialect,
17021            "INDEX (c1)",
17022            TableConstraint::Index {
17023                display_as_key: false,
17024                name: None,
17025                index_type: None,
17026                columns: vec![mk_expected_col("c1")],
17027            }
17028        );
17029
17030        test_parse_table_constraint!(
17031            dialect,
17032            "KEY (c1)",
17033            TableConstraint::Index {
17034                display_as_key: true,
17035                name: None,
17036                index_type: None,
17037                columns: vec![mk_expected_col("c1")],
17038            }
17039        );
17040
17041        test_parse_table_constraint!(
17042            dialect,
17043            "INDEX 'index' (c1, c2)",
17044            TableConstraint::Index {
17045                display_as_key: false,
17046                name: Some(Ident::with_quote('\'', "index")),
17047                index_type: None,
17048                columns: vec![mk_expected_col("c1"), mk_expected_col("c2")],
17049            }
17050        );
17051
17052        test_parse_table_constraint!(
17053            dialect,
17054            "INDEX USING BTREE (c1)",
17055            TableConstraint::Index {
17056                display_as_key: false,
17057                name: None,
17058                index_type: Some(IndexType::BTree),
17059                columns: vec![mk_expected_col("c1")],
17060            }
17061        );
17062
17063        test_parse_table_constraint!(
17064            dialect,
17065            "INDEX USING HASH (c1)",
17066            TableConstraint::Index {
17067                display_as_key: false,
17068                name: None,
17069                index_type: Some(IndexType::Hash),
17070                columns: vec![mk_expected_col("c1")],
17071            }
17072        );
17073
17074        test_parse_table_constraint!(
17075            dialect,
17076            "INDEX idx_name USING BTREE (c1)",
17077            TableConstraint::Index {
17078                display_as_key: false,
17079                name: Some(Ident::new("idx_name")),
17080                index_type: Some(IndexType::BTree),
17081                columns: vec![mk_expected_col("c1")],
17082            }
17083        );
17084
17085        test_parse_table_constraint!(
17086            dialect,
17087            "INDEX idx_name USING HASH (c1)",
17088            TableConstraint::Index {
17089                display_as_key: false,
17090                name: Some(Ident::new("idx_name")),
17091                index_type: Some(IndexType::Hash),
17092                columns: vec![mk_expected_col("c1")],
17093            }
17094        );
17095    }
17096
17097    #[test]
17098    fn test_tokenizer_error_loc() {
17099        let sql = "foo '";
17100        let ast = Parser::parse_sql(&GenericDialect, sql);
17101        assert_eq!(
17102            ast,
17103            Err(ParserError::TokenizerError(
17104                "Unterminated string literal at Line: 1, Column: 5".to_string()
17105            ))
17106        );
17107    }
17108
17109    #[test]
17110    fn test_parser_error_loc() {
17111        let sql = "SELECT this is a syntax error";
17112        let ast = Parser::parse_sql(&GenericDialect, sql);
17113        assert_eq!(
17114            ast,
17115            Err(ParserError::ParserError(
17116                "Expected: [NOT] NULL | TRUE | FALSE | DISTINCT | [form] NORMALIZED FROM after IS, found: a at Line: 1, Column: 16"
17117                    .to_string()
17118            ))
17119        );
17120    }
17121
17122    #[test]
17123    fn test_nested_explain_error() {
17124        let sql = "EXPLAIN EXPLAIN SELECT 1";
17125        let ast = Parser::parse_sql(&GenericDialect, sql);
17126        assert_eq!(
17127            ast,
17128            Err(ParserError::ParserError(
17129                "Explain must be root of the plan".to_string()
17130            ))
17131        );
17132    }
17133
17134    #[test]
17135    fn test_parse_multipart_identifier_positive() {
17136        let dialect = TestedDialects::new(vec![Box::new(GenericDialect {})]);
17137
17138        // parse multipart with quotes
17139        let expected = vec![
17140            Ident {
17141                value: "CATALOG".to_string(),
17142                quote_style: None,
17143                span: Span::empty(),
17144            },
17145            Ident {
17146                value: "F(o)o. \"bar".to_string(),
17147                quote_style: Some('"'),
17148                span: Span::empty(),
17149            },
17150            Ident {
17151                value: "table".to_string(),
17152                quote_style: None,
17153                span: Span::empty(),
17154            },
17155        ];
17156        dialect.run_parser_method(r#"CATALOG."F(o)o. ""bar".table"#, |parser| {
17157            let actual = parser.parse_multipart_identifier().unwrap();
17158            assert_eq!(expected, actual);
17159        });
17160
17161        // allow whitespace between ident parts
17162        let expected = vec![
17163            Ident {
17164                value: "CATALOG".to_string(),
17165                quote_style: None,
17166                span: Span::empty(),
17167            },
17168            Ident {
17169                value: "table".to_string(),
17170                quote_style: None,
17171                span: Span::empty(),
17172            },
17173        ];
17174        dialect.run_parser_method("CATALOG . table", |parser| {
17175            let actual = parser.parse_multipart_identifier().unwrap();
17176            assert_eq!(expected, actual);
17177        });
17178    }
17179
17180    #[test]
17181    fn test_parse_multipart_identifier_negative() {
17182        macro_rules! test_parse_multipart_identifier_error {
17183            ($input:expr, $expected_err:expr $(,)?) => {{
17184                all_dialects().run_parser_method(&*$input, |parser| {
17185                    let actual_err = parser.parse_multipart_identifier().unwrap_err();
17186                    assert_eq!(actual_err.to_string(), $expected_err);
17187                });
17188            }};
17189        }
17190
17191        test_parse_multipart_identifier_error!(
17192            "",
17193            "sql parser error: Empty input when parsing identifier",
17194        );
17195
17196        test_parse_multipart_identifier_error!(
17197            "*schema.table",
17198            "sql parser error: Unexpected token in identifier: *",
17199        );
17200
17201        test_parse_multipart_identifier_error!(
17202            "schema.table*",
17203            "sql parser error: Unexpected token in identifier: *",
17204        );
17205
17206        test_parse_multipart_identifier_error!(
17207            "schema.table.",
17208            "sql parser error: Trailing period in identifier",
17209        );
17210
17211        test_parse_multipart_identifier_error!(
17212            "schema.*",
17213            "sql parser error: Unexpected token following period in identifier: *",
17214        );
17215    }
17216
17217    #[test]
17218    fn test_mysql_partition_selection() {
17219        let sql = "SELECT * FROM employees PARTITION (p0, p2)";
17220        let expected = vec!["p0", "p2"];
17221
17222        let ast: Vec<Statement> = Parser::parse_sql(&MySqlDialect {}, sql).unwrap();
17223        assert_eq!(ast.len(), 1);
17224        if let Statement::Query(v) = &ast[0] {
17225            if let SetExpr::Select(select) = &*v.body {
17226                assert_eq!(select.from.len(), 1);
17227                let from: &TableWithJoins = &select.from[0];
17228                let table_factor = &from.relation;
17229                if let TableFactor::Table { partitions, .. } = table_factor {
17230                    let actual: Vec<&str> = partitions
17231                        .iter()
17232                        .map(|ident| ident.value.as_str())
17233                        .collect();
17234                    assert_eq!(expected, actual);
17235                }
17236            }
17237        } else {
17238            panic!("fail to parse mysql partition selection");
17239        }
17240    }
17241
17242    #[test]
17243    fn test_replace_into_placeholders() {
17244        let sql = "REPLACE INTO t (a) VALUES (&a)";
17245
17246        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
17247    }
17248
17249    #[test]
17250    fn test_replace_into_set_placeholder() {
17251        let sql = "REPLACE INTO t SET ?";
17252
17253        assert!(Parser::parse_sql(&GenericDialect {}, sql).is_err());
17254    }
17255
17256    #[test]
17257    fn test_replace_incomplete() {
17258        let sql = r#"REPLACE"#;
17259
17260        assert!(Parser::parse_sql(&MySqlDialect {}, sql).is_err());
17261    }
17262}